@plusplusoneplusplus/deep-wiki 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/utils/error-utils.ts", "../src/logger.ts", "../src/config-loader.ts", "../../pipeline-core/src/logger.ts", "../../pipeline-core/src/errors/error-codes.ts", "../../pipeline-core/src/errors/pipeline-core-error.ts", "../../pipeline-core/src/errors/index.ts", "../../pipeline-core/src/runtime/cancellation.ts", "../../pipeline-core/src/runtime/timeout.ts", "../../pipeline-core/src/runtime/retry.ts", "../../pipeline-core/src/runtime/policy.ts", "../../pipeline-core/src/runtime/index.ts", "../../pipeline-core/src/utils/file-utils.ts", "../../pipeline-core/src/utils/glob-utils.ts", "../../pipeline-core/src/utils/exec-utils.ts", "../../pipeline-core/src/utils/http-utils.ts", "../../pipeline-core/src/utils/text-matching.ts", "../../pipeline-core/src/utils/ai-response-parser.ts", "../../pipeline-core/src/utils/window-focus-service.ts", "../../pipeline-core/src/ai/cli-utils.ts", "../../pipeline-core/src/utils/external-terminal-launcher.ts", "../../pipeline-core/src/config/defaults.ts", "../../pipeline-core/src/utils/process-monitor.ts", "../../pipeline-core/src/utils/template-engine.ts", "../../pipeline-core/src/utils/index.ts", "../../pipeline-core/src/config/index.ts", "../../pipeline-core/src/ai/model-registry.ts", "../../pipeline-core/src/ai/types.ts", "../../pipeline-core/src/ai/command-types.ts", "../../pipeline-core/src/ai/prompt-builder.ts", "../../pipeline-core/src/ai/program-utils.ts", "../../pipeline-core/src/ai/process-types.ts", "../../pipeline-core/src/ai/session-pool.ts", "../../pipeline-core/src/ai/mcp-config-loader.ts", "../../pipeline-core/src/ai/trusted-folder.ts", "../../pipeline-core/src/ai/timeouts.ts", "../../pipeline-core/src/ai/copilot-sdk-service.ts", "../../pipeline-core/src/ai/index.ts", "../../pipeline-core/src/map-reduce/types.ts", "../../pipeline-core/src/map-reduce/concurrency-limiter.ts", "../../pipeline-core/src/map-reduce/executor.ts", "../../pipeline-core/src/map-reduce/prompt-template.ts", "../../pipeline-core/src/map-reduce/reducers/reducer.ts", "../../pipeline-core/src/map-reduce/reducers/deterministic.ts", "../../pipeline-core/src/map-reduce/reducers/ai-reducer.ts", "../../pipeline-core/src/map-reduce/reducers/hybrid-reducer.ts", "../../pipeline-core/src/map-reduce/reducers/index.ts", "../../pipeline-core/src/map-reduce/splitters/file-splitter.ts", "../../pipeline-core/src/map-reduce/splitters/chunk-splitter.ts", "../../pipeline-core/src/map-reduce/splitters/rule-splitter.ts", "../../pipeline-core/src/map-reduce/splitters/index.ts", "../../pipeline-core/src/map-reduce/jobs/code-review-job.ts", "../../pipeline-core/src/map-reduce/jobs/template-job.ts", "../../pipeline-core/src/map-reduce/temp-file-utils.ts", "../../pipeline-core/src/map-reduce/jobs/prompt-map-job.ts", "../../pipeline-core/src/map-reduce/jobs/index.ts", "../../pipeline-core/src/map-reduce/index.ts", "../../pipeline-core/src/pipeline/types.ts", "../../pipeline-core/src/pipeline/csv-reader.ts", "../../pipeline-core/src/pipeline/template.ts", "../../pipeline-core/src/pipeline/input-generator.ts", "../../pipeline-core/src/pipeline/filter-executor.ts", "../../pipeline-core/src/pipeline/prompt-resolver.ts", "../../pipeline-core/src/pipeline/skill-resolver.ts", "../../pipeline-core/src/pipeline/executor.ts", "../../pipeline-core/src/pipeline/index.ts", "../../pipeline-core/src/queue/types.ts", "../../pipeline-core/src/queue/task-queue-manager.ts", "../../pipeline-core/src/queue/queue-executor.ts", "../../pipeline-core/src/queue/index.ts", "../../pipeline-core/src/index.ts", "../src/seeds/prompts.ts", "../src/schemas.ts", "../src/utils/parse-ai-response.ts", "../src/seeds/response-parser.ts", "../src/seeds/heuristic-fallback.ts", "../src/seeds/seeds-session.ts", "../src/seeds/seed-file-parser.ts", "../src/seeds/index.ts", "../src/commands/seeds.ts", "../src/discovery/prompts.ts", "../src/discovery/response-parser.ts", "../src/discovery/discovery-session.ts", "../src/cache/git-utils.ts", "../src/cache/cache-utils.ts", "../src/cache/cache-constants.ts", "../src/cache/discovery-cache.ts", "../src/cache/graph-cache.ts", "../src/cache/consolidation-cache.ts", "../src/cache/analysis-cache.ts", "../src/cache/article-cache.ts", "../src/cache/index.ts", "../src/discovery/large-repo-handler.ts", "../src/discovery/iterative/probe-prompts.ts", "../src/discovery/iterative/probe-response-parser.ts", "../src/discovery/iterative/probe-session.ts", "../src/discovery/iterative/merge-prompts.ts", "../src/discovery/iterative/merge-response-parser.ts", "../src/discovery/iterative/merge-session.ts", "../src/discovery/iterative/iterative-discovery.ts", "../src/discovery/index.ts", "../src/commands/discover.ts", "../src/ai-invoker.ts", "../src/usage-tracker.ts", "../src/commands/phases/discovery-phase.ts", "../src/consolidation/constants.ts", "../src/consolidation/rule-based-consolidator.ts", "../src/consolidation/ai-consolidator.ts", "../src/consolidation/consolidator.ts", "../src/consolidation/index.ts", "../src/commands/phases/consolidation-phase.ts", "../src/analysis/prompts.ts", "../src/analysis/response-parser.ts", "../src/analysis/analysis-executor.ts", "../src/analysis/index.ts", "../src/commands/phases/analysis-phase.ts", "../src/writing/prompts.ts", "../src/writing/reduce-prompts.ts", "../src/writing/article-executor.ts", "../src/writing/file-writer.ts", "../src/rendering/mermaid-zoom.ts", "../src/writing/website-styles.ts", "../src/writing/website-client-script.ts", "../src/writing/website-data.ts", "../src/writing/website-generator.ts", "../src/writing/index.ts", "../src/commands/phases/writing-phase.ts", "../src/commands/phases/website-phase.ts", "../src/commands/phases/index.ts", "../src/commands/generate.ts", "../src/server/wiki-data.ts", "../src/server/ask-handler.ts", "../src/server/explore-handler.ts", "../src/server/api-handlers.ts", "../src/server/router.ts", "../src/server/spa-template.ts", "../src/server/context-builder.ts", "../src/server/websocket.ts", "../src/server/file-watcher.ts", "../src/server/conversation-session-manager.ts", "../src/server/index.ts", "../src/commands/serve.ts", "../src/cli.ts", "../src/index.ts"],
4
+ "sourcesContent": ["/**\n * Safely extract an error message from an unknown thrown value.\n */\nexport function getErrorMessage(error: unknown): string {\n if (error instanceof Error) {\n return error.message;\n }\n return String(error);\n}\n", "/**\n * CLI Logger\n *\n * Provides colored console output with spinner/progress support for the deep-wiki CLI.\n * Implements the pipeline-core Logger interface and adds CLI-specific features\n * like spinners, status messages, and color-coded output.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { Logger } from '@plusplusoneplusplus/pipeline-core';\n\n// ============================================================================\n// ANSI Color Codes\n// ============================================================================\n\nconst COLORS = {\n reset: '\\x1b[0m',\n bold: '\\x1b[1m',\n dim: '\\x1b[2m',\n\n red: '\\x1b[31m',\n green: '\\x1b[32m',\n yellow: '\\x1b[33m',\n blue: '\\x1b[34m',\n magenta: '\\x1b[35m',\n cyan: '\\x1b[36m',\n gray: '\\x1b[90m',\n\n bgRed: '\\x1b[41m',\n bgGreen: '\\x1b[42m',\n bgYellow: '\\x1b[43m',\n} as const;\n\n// ============================================================================\n// Color Helpers\n// ============================================================================\n\nlet colorEnabled = true;\n\n/**\n * Enable or disable colored output\n */\nexport function setColorEnabled(enabled: boolean): void {\n colorEnabled = enabled;\n}\n\n/**\n * Check if colors are enabled\n */\nexport function isColorEnabled(): boolean {\n return colorEnabled;\n}\n\nfunction colorize(color: string, text: string): string {\n if (!colorEnabled) { return text; }\n return `${color}${text}${COLORS.reset}`;\n}\n\nexport function red(text: string): string { return colorize(COLORS.red, text); }\nexport function green(text: string): string { return colorize(COLORS.green, text); }\nexport function yellow(text: string): string { return colorize(COLORS.yellow, text); }\nexport function blue(text: string): string { return colorize(COLORS.blue, text); }\nexport function cyan(text: string): string { return colorize(COLORS.cyan, text); }\nexport function gray(text: string): string { return colorize(COLORS.gray, text); }\nexport function bold(text: string): string { return colorize(COLORS.bold, text); }\nexport function dim(text: string): string { return colorize(COLORS.dim, text); }\nexport function magenta(text: string): string { return colorize(COLORS.magenta, text); }\n\n// ============================================================================\n// Symbols (cross-platform)\n// ============================================================================\n\nconst isWindows = process.platform === 'win32';\n\nexport const SYMBOLS = {\n success: isWindows ? '\u221A' : '\u2713',\n error: isWindows ? '\u00D7' : '\u2717',\n warning: isWindows ? '\u203C' : '\u26A0',\n info: isWindows ? 'i' : '\u2139',\n arrow: isWindows ? '>' : '\u2192',\n bullet: isWindows ? '*' : '\u2022',\n spinner: isWindows\n ? ['|', '/', '-', '\\\\']\n : ['\u280B', '\u2819', '\u2839', '\u2838', '\u283C', '\u2834', '\u2826', '\u2827', '\u2807', '\u280F'],\n} as const;\n\n// ============================================================================\n// Spinner\n// ============================================================================\n\n/**\n * Simple CLI spinner for showing progress\n */\nexport class Spinner {\n private frameIndex = 0;\n private timer: ReturnType<typeof setInterval> | null = null;\n private _message: string;\n private _isRunning = false;\n\n constructor(message: string = '') {\n this._message = message;\n }\n\n get isRunning(): boolean {\n return this._isRunning;\n }\n\n get message(): string {\n return this._message;\n }\n\n /**\n * Start the spinner with an optional message\n */\n start(message?: string): void {\n if (this._isRunning) { this.stop(); }\n if (message !== undefined) { this._message = message; }\n this._isRunning = true;\n\n // Only animate if TTY\n if (process.stderr.isTTY) {\n this.timer = setInterval(() => {\n const frame = SYMBOLS.spinner[this.frameIndex % SYMBOLS.spinner.length];\n process.stderr.write(`\\r${colorize(COLORS.cyan, frame)} ${this._message}`);\n this.frameIndex++;\n }, 80);\n } else {\n process.stderr.write(`${this._message}\\n`);\n }\n }\n\n /**\n * Update the spinner message\n */\n update(message: string): void {\n this._message = message;\n if (!process.stderr.isTTY && this._isRunning) {\n process.stderr.write(`${message}\\n`);\n }\n }\n\n /**\n * Stop the spinner and show a final message\n */\n stop(finalMessage?: string): void {\n this._isRunning = false;\n if (this.timer) {\n clearInterval(this.timer);\n this.timer = null;\n }\n if (process.stderr.isTTY) {\n process.stderr.write('\\r\\x1b[K'); // Clear the line\n }\n if (finalMessage) {\n process.stderr.write(`${finalMessage}\\n`);\n }\n }\n\n /**\n * Stop with success state\n */\n succeed(message?: string): void {\n const msg = message || this._message;\n this.stop(`${green(SYMBOLS.success)} ${msg}`);\n }\n\n /**\n * Stop with failure state\n */\n fail(message?: string): void {\n const msg = message || this._message;\n this.stop(`${red(SYMBOLS.error)} ${msg}`);\n }\n\n /**\n * Stop with warning state\n */\n warn(message?: string): void {\n const msg = message || this._message;\n this.stop(`${yellow(SYMBOLS.warning)} ${msg}`);\n }\n}\n\n// ============================================================================\n// CLI Logger (implements pipeline-core Logger interface)\n// ============================================================================\n\n/**\n * Verbosity level for CLI output\n */\nexport type VerbosityLevel = 'quiet' | 'normal' | 'verbose';\n\nlet verbosity: VerbosityLevel = 'normal';\n\n/**\n * Set the CLI verbosity level\n */\nexport function setVerbosity(level: VerbosityLevel): void {\n verbosity = level;\n}\n\n/**\n * Get the current verbosity level\n */\nexport function getVerbosity(): VerbosityLevel {\n return verbosity;\n}\n\n/**\n * Create a pipeline-core compatible Logger for CLI usage\n */\nexport function createCLILogger(): Logger {\n return {\n debug(category: string, message: string): void {\n if (verbosity === 'verbose') {\n process.stderr.write(`${gray(`[DEBUG] [${category}]`)} ${message}\\n`);\n }\n },\n info(category: string, message: string): void {\n if (verbosity !== 'quiet') {\n process.stderr.write(`${blue(`[${category}]`)} ${message}\\n`);\n }\n },\n warn(category: string, message: string): void {\n process.stderr.write(`${yellow(`[WARN] [${category}]`)} ${message}\\n`);\n },\n error(category: string, message: string, error?: Error): void {\n process.stderr.write(`${red(`[ERROR] [${category}]`)} ${message}\\n`);\n if (error && verbosity === 'verbose') {\n process.stderr.write(`${gray(error.stack || error.message)}\\n`);\n }\n },\n };\n}\n\n// ============================================================================\n// Print Helpers (user-facing output)\n// ============================================================================\n\n/**\n * Print a success message to stderr\n */\nexport function printSuccess(message: string): void {\n process.stderr.write(`${green(SYMBOLS.success)} ${message}\\n`);\n}\n\n/**\n * Print an error message to stderr\n */\nexport function printError(message: string): void {\n process.stderr.write(`${red(SYMBOLS.error)} ${message}\\n`);\n}\n\n/**\n * Print a warning message to stderr\n */\nexport function printWarning(message: string): void {\n process.stderr.write(`${yellow(SYMBOLS.warning)} ${message}\\n`);\n}\n\n/**\n * Print an info message to stderr\n */\nexport function printInfo(message: string): void {\n process.stderr.write(`${blue(SYMBOLS.info)} ${message}\\n`);\n}\n\n/**\n * Print a header/title to stderr\n */\nexport function printHeader(title: string): void {\n process.stderr.write(`\\n${bold(title)}\\n`);\n}\n\n/**\n * Print a key-value pair to stderr\n */\nexport function printKeyValue(key: string, value: string): void {\n process.stderr.write(` ${gray(key + ':')} ${value}\\n`);\n}\n", "/**\n * Config Loader\n *\n * Loads and validates a YAML configuration file for the `deep-wiki generate` command.\n * Merges config-file values with CLI flags, resolving per-phase overrides.\n *\n * Resolution order (highest priority first):\n * 1. CLI flags (--model, --timeout, etc.)\n * 2. Phase-specific config (phases.analysis.model)\n * 3. Global config (model)\n * 4. Defaults (existing defaults in code)\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as yaml from 'js-yaml';\nimport { getErrorMessage } from './utils/error-utils';\nimport type {\n DeepWikiConfigFile,\n GenerateCommandOptions,\n PhaseName,\n PhasesConfig,\n WebsiteTheme,\n} from './types';\n\n// ============================================================================\n// Config Loading\n// ============================================================================\n\n/**\n * Load and parse a YAML configuration file.\n *\n * @param configPath - Absolute or relative path to the YAML config file\n * @returns Parsed config object\n * @throws If the file does not exist, cannot be read, or contains invalid YAML\n */\nexport function loadConfig(configPath: string): DeepWikiConfigFile {\n const absolutePath = path.resolve(configPath);\n\n if (!fs.existsSync(absolutePath)) {\n throw new Error(`Config file not found: ${absolutePath}`);\n }\n\n const content = fs.readFileSync(absolutePath, 'utf-8');\n\n let parsed: unknown;\n try {\n parsed = yaml.load(content);\n } catch (e) {\n throw new Error(`Invalid YAML in config file: ${getErrorMessage(e)}`);\n }\n\n if (parsed === null || parsed === undefined || typeof parsed !== 'object') {\n throw new Error('Config file is empty or not a valid YAML object');\n }\n\n return validateConfig(parsed as Record<string, unknown>);\n}\n\n/**\n * Try to auto-discover a config file in the given directory.\n * Looks for `deep-wiki.config.yaml` or `deep-wiki.config.yml`.\n *\n * @param dir - Directory to search (typically the repo root)\n * @returns Absolute path to config file, or undefined if not found\n */\nexport function discoverConfigFile(dir: string): string | undefined {\n const candidates = ['deep-wiki.config.yaml', 'deep-wiki.config.yml'];\n for (const filename of candidates) {\n const candidate = path.join(dir, filename);\n if (fs.existsSync(candidate)) {\n return candidate;\n }\n }\n return undefined;\n}\n\n// ============================================================================\n// Config Merging\n// ============================================================================\n\n/**\n * Sentinel value indicating a CLI flag was explicitly set.\n * Used to distinguish \"user passed --model X\" from \"model was never set\".\n */\ninterface CLIOverrides {\n /** Fields explicitly set via CLI flags (not defaults) */\n explicitFields: Set<string>;\n}\n\n/**\n * Merge a config file with CLI options.\n * CLI flags override config file values. Config file fills in unset fields.\n *\n * @param config - Parsed config file\n * @param cliOptions - Options from CLI flags\n * @param cliExplicit - Set of field names explicitly provided via CLI (not defaults)\n * @returns Merged GenerateCommandOptions\n */\nexport function mergeConfigWithCLI(\n config: DeepWikiConfigFile,\n cliOptions: GenerateCommandOptions,\n cliExplicit?: Set<string>\n): GenerateCommandOptions {\n const explicit = cliExplicit || new Set<string>();\n\n // Helper: use CLI value if explicitly set, otherwise config value, otherwise existing CLI default\n function resolve<T>(field: string, cliVal: T, configVal: T | undefined): T {\n if (explicit.has(field)) {\n return cliVal;\n }\n return configVal !== undefined ? configVal : cliVal;\n }\n\n // Merge phases: config phases are the base, CLI phases (if any) override\n let mergedPhases: PhasesConfig | undefined;\n if (config.phases || cliOptions.phases) {\n mergedPhases = { ...config.phases };\n if (cliOptions.phases) {\n for (const [phase, overrides] of Object.entries(cliOptions.phases)) {\n const phaseName = phase as PhaseName;\n mergedPhases[phaseName] = {\n ...mergedPhases[phaseName],\n ...overrides,\n };\n }\n }\n }\n\n return {\n output: resolve('output', cliOptions.output, config.output),\n model: resolve('model', cliOptions.model, config.model),\n concurrency: resolve('concurrency', cliOptions.concurrency, config.concurrency),\n timeout: resolve('timeout', cliOptions.timeout, config.timeout),\n focus: resolve('focus', cliOptions.focus, config.focus),\n depth: resolve('depth', cliOptions.depth, config.depth),\n force: resolve('force', cliOptions.force, config.force),\n useCache: resolve('useCache', cliOptions.useCache, config.useCache),\n phase: resolve('phase', cliOptions.phase, config.phase),\n verbose: cliOptions.verbose, // always from CLI\n skipWebsite: resolve('skipWebsite', cliOptions.skipWebsite, config.skipWebsite),\n theme: resolve('theme', cliOptions.theme, config.theme as WebsiteTheme | undefined),\n title: resolve('title', cliOptions.title, config.title),\n seeds: resolve('seeds', cliOptions.seeds, config.seeds),\n noCluster: resolve('noCluster', cliOptions.noCluster, config.noCluster),\n strict: resolve('strict', cliOptions.strict, config.strict),\n config: cliOptions.config,\n phases: mergedPhases,\n };\n}\n\n// ============================================================================\n// Per-Phase Resolution\n// ============================================================================\n\n/**\n * Resolve the AI model for a specific phase.\n *\n * Resolution order:\n * 1. Phase-specific config (options.phases[phase].model)\n * 2. Global option (options.model)\n * 3. undefined (use SDK default)\n */\nexport function resolvePhaseModel(\n options: GenerateCommandOptions,\n phase: PhaseName\n): string | undefined {\n return options.phases?.[phase]?.model ?? options.model;\n}\n\n/**\n * Resolve the timeout (in seconds) for a specific phase.\n *\n * Resolution order:\n * 1. Phase-specific config (options.phases[phase].timeout)\n * 2. Global option (options.timeout)\n * 3. undefined (use phase default)\n */\nexport function resolvePhaseTimeout(\n options: GenerateCommandOptions,\n phase: PhaseName\n): number | undefined {\n return options.phases?.[phase]?.timeout ?? options.timeout;\n}\n\n/**\n * Resolve the concurrency for a specific phase.\n *\n * Resolution order:\n * 1. Phase-specific config (options.phases[phase].concurrency)\n * 2. Global option (options.concurrency)\n * 3. undefined (use phase default)\n */\nexport function resolvePhaseConcurrency(\n options: GenerateCommandOptions,\n phase: PhaseName\n): number | undefined {\n return options.phases?.[phase]?.concurrency ?? options.concurrency;\n}\n\n/**\n * Resolve the depth for a specific phase.\n *\n * Resolution order:\n * 1. Phase-specific config (options.phases[phase].depth)\n * 2. Global option (options.depth)\n */\nexport function resolvePhaseDepth(\n options: GenerateCommandOptions,\n phase: PhaseName\n): 'shallow' | 'normal' | 'deep' {\n return options.phases?.[phase]?.depth ?? options.depth;\n}\n\n// ============================================================================\n// Validation\n// ============================================================================\n\nconst VALID_DEPTHS = new Set(['shallow', 'normal', 'deep']);\nconst VALID_THEMES = new Set(['light', 'dark', 'auto']);\nconst VALID_PHASE_NAMES = new Set<string>(['discovery', 'consolidation', 'analysis', 'writing']);\n\nfunction fieldLabel(field: string, prefix?: string): string {\n return prefix ? `${prefix}${field}` : `\"${field}\"`;\n}\n\nfunction assignString(raw: Record<string, unknown>, field: string, target: Record<string, unknown>, prefix?: string): void {\n if (raw[field] !== undefined) {\n if (typeof raw[field] !== 'string') {\n throw new Error(`Config error: ${fieldLabel(field, prefix)} must be a string`);\n }\n target[field] = raw[field];\n }\n}\n\nfunction assignBoolean(raw: Record<string, unknown>, field: string, target: Record<string, unknown>, prefix?: string): void {\n if (raw[field] !== undefined) {\n if (typeof raw[field] !== 'boolean') {\n throw new Error(`Config error: ${fieldLabel(field, prefix)} must be a boolean`);\n }\n target[field] = raw[field];\n }\n}\n\nfunction assignPositiveNumber(raw: Record<string, unknown>, field: string, target: Record<string, unknown>, prefix?: string): void {\n if (raw[field] !== undefined) {\n if (typeof raw[field] !== 'number' || !Number.isFinite(raw[field] as number) || (raw[field] as number) < 1) {\n throw new Error(`Config error: ${fieldLabel(field, prefix)} must be a positive number`);\n }\n target[field] = raw[field];\n }\n}\n\nfunction assignEnum(raw: Record<string, unknown>, field: string, target: Record<string, unknown>, validValues: Set<string>, prefix?: string): void {\n if (raw[field] !== undefined) {\n if (typeof raw[field] !== 'string' || !validValues.has(raw[field] as string)) {\n throw new Error(`Config error: ${fieldLabel(field, prefix)} must be one of: ${[...validValues].join(', ')}`);\n }\n target[field] = raw[field];\n }\n}\n\n/**\n * Validate a raw parsed config object and return a typed DeepWikiConfigFile.\n *\n * @param raw - Raw parsed YAML object\n * @returns Validated config\n * @throws If the config contains invalid values\n */\nexport function validateConfig(raw: Record<string, unknown>): DeepWikiConfigFile {\n const config: DeepWikiConfigFile = {};\n\n // String fields\n assignString(raw, 'repoPath', config);\n assignString(raw, 'output', config);\n assignString(raw, 'model', config);\n assignString(raw, 'focus', config);\n assignString(raw, 'seeds', config);\n assignString(raw, 'title', config);\n\n // Number fields\n assignPositiveNumber(raw, 'concurrency', config);\n assignPositiveNumber(raw, 'timeout', config);\n\n // Phase (custom: integer check + range 1-4)\n if (raw.phase !== undefined) {\n if (typeof raw.phase !== 'number' || !Number.isInteger(raw.phase) || raw.phase < 1 || raw.phase > 4) {\n throw new Error('Config error: \"phase\" must be an integer between 1 and 4');\n }\n config.phase = raw.phase;\n }\n\n // Boolean fields\n assignBoolean(raw, 'useCache', config);\n assignBoolean(raw, 'force', config);\n assignBoolean(raw, 'noCluster', config);\n assignBoolean(raw, 'strict', config);\n assignBoolean(raw, 'skipWebsite', config);\n\n // Enum fields\n assignEnum(raw, 'depth', config, VALID_DEPTHS);\n assignEnum(raw, 'theme', config, VALID_THEMES);\n\n // Phases map\n if (raw.phases !== undefined) {\n if (typeof raw.phases !== 'object' || raw.phases === null || Array.isArray(raw.phases)) {\n throw new Error('Config error: \"phases\" must be an object');\n }\n\n const phases: PhasesConfig = {};\n for (const [key, value] of Object.entries(raw.phases as Record<string, unknown>)) {\n if (!VALID_PHASE_NAMES.has(key)) {\n throw new Error(`Config error: unknown phase \"${key}\". Valid phases: ${[...VALID_PHASE_NAMES].join(', ')}`);\n }\n\n if (typeof value !== 'object' || value === null || Array.isArray(value)) {\n throw new Error(`Config error: phases.${key} must be an object`);\n }\n\n const phaseRaw = value as Record<string, unknown>;\n const phaseConfig: Record<string, unknown> = {};\n const phasePrefix = `phases.${key}.`;\n\n assignString(phaseRaw, 'model', phaseConfig, phasePrefix);\n assignPositiveNumber(phaseRaw, 'timeout', phaseConfig, phasePrefix);\n assignPositiveNumber(phaseRaw, 'concurrency', phaseConfig, phasePrefix);\n assignEnum(phaseRaw, 'depth', phaseConfig, VALID_DEPTHS, phasePrefix);\n assignBoolean(phaseRaw, 'skipAI', phaseConfig, phasePrefix);\n\n phases[key as PhaseName] = phaseConfig;\n }\n\n config.phases = phases;\n }\n\n return config;\n}\n", "/**\n * Logger abstraction for pipeline-core package.\n * \n * This module provides a simple logger interface that can be implemented\n * by different environments (VS Code, CLI, tests, etc.).\n * \n * Usage:\n * import { getLogger, setLogger, consoleLogger } from 'pipeline-core';\n * \n * // Use default console logger\n * const logger = getLogger();\n * logger.info('AI', 'Processing started');\n * \n * // Or set a custom logger (e.g., VS Code output channel)\n * setLogger(myCustomLogger);\n */\n\n/**\n * Log categories for different subsystems\n */\nexport enum LogCategory {\n /** AI Service operations (Copilot SDK, sessions) */\n AI = 'AI Service',\n /** Map-reduce operations */\n MAP_REDUCE = 'Map-Reduce',\n /** Pipeline execution */\n PIPELINE = 'Pipeline',\n /** Utility operations */\n UTILS = 'Utils',\n /** General operations */\n GENERAL = 'General'\n}\n\n/**\n * Logger interface that can be implemented by different environments.\n */\nexport interface Logger {\n /**\n * Log a debug message (verbose, for development)\n */\n debug(category: string, message: string): void;\n \n /**\n * Log an informational message\n */\n info(category: string, message: string): void;\n \n /**\n * Log a warning message\n */\n warn(category: string, message: string): void;\n \n /**\n * Log an error message with optional Error object\n */\n error(category: string, message: string, error?: Error): void;\n}\n\n/**\n * Console-based logger implementation.\n * Outputs to stdout/stderr with timestamps and categories.\n */\nexport const consoleLogger: Logger = {\n debug: (cat, msg) => console.debug(`[DEBUG] [${cat}] ${msg}`),\n info: (cat, msg) => console.log(`[INFO] [${cat}] ${msg}`),\n warn: (cat, msg) => console.warn(`[WARN] [${cat}] ${msg}`),\n error: (cat, msg, err) => console.error(`[ERROR] [${cat}] ${msg}`, err || ''),\n};\n\n/**\n * Null logger that discards all messages.\n * Useful for tests or when logging should be disabled.\n */\nexport const nullLogger: Logger = {\n debug: () => {},\n info: () => {},\n warn: () => {},\n error: () => {},\n};\n\n/**\n * Global logger instance.\n * Defaults to console logger but can be replaced.\n */\nlet globalLogger: Logger = consoleLogger;\n\n/**\n * Set the global logger instance.\n * Call this during initialization to use a custom logger.\n * \n * @param logger The logger implementation to use\n * \n * @example\n * // In VS Code extension\n * import { setLogger } from 'pipeline-core';\n * import { getExtensionLogger } from './shared/extension-logger';\n * \n * setLogger({\n * debug: (cat, msg) => getExtensionLogger().debug(cat, msg),\n * info: (cat, msg) => getExtensionLogger().info(cat, msg),\n * warn: (cat, msg) => getExtensionLogger().warn(cat, msg),\n * error: (cat, msg, err) => getExtensionLogger().error(cat, msg, err),\n * });\n */\nexport function setLogger(logger: Logger): void {\n globalLogger = logger;\n}\n\n/**\n * Get the current global logger instance.\n * \n * @returns The current logger\n */\nexport function getLogger(): Logger {\n return globalLogger;\n}\n\n/**\n * Reset the logger to the default console logger.\n * Primarily useful for testing.\n */\nexport function resetLogger(): void {\n globalLogger = consoleLogger;\n}\n", "/**\n * Error Codes for Pipeline Core\n *\n * Well-known error codes used across the pipeline-core package.\n * These codes provide structured error identification without relying on message parsing.\n *\n * Categories:\n * - Control flow: CANCELLED, TIMEOUT, RETRY_EXHAUSTED\n * - AI operations: AI_INVOCATION_FAILED\n * - Pipeline phases: PIPELINE_*, MAP_REDUCE_*\n * - Queue operations: QUEUE_*\n * - Data operations: CSV_*, TEMPLATE_*, MISSING_VARIABLE\n */\n\n/**\n * Error codes as a const object for type safety and autocompletion\n */\nexport const ErrorCode = {\n // =========================================================================\n // Control Flow\n // =========================================================================\n /** Operation was cancelled by user or system */\n CANCELLED: 'CANCELLED',\n /** Operation exceeded its timeout limit */\n TIMEOUT: 'TIMEOUT',\n /** All retry attempts have been exhausted */\n RETRY_EXHAUSTED: 'RETRY_EXHAUSTED',\n\n // =========================================================================\n // AI Operations\n // =========================================================================\n /** AI invocation failed (SDK, CLI, or other backend) */\n AI_INVOCATION_FAILED: 'AI_INVOCATION_FAILED',\n /** AI response could not be parsed */\n AI_RESPONSE_PARSE_FAILED: 'AI_RESPONSE_PARSE_FAILED',\n /** AI session pool exhausted */\n AI_POOL_EXHAUSTED: 'AI_POOL_EXHAUSTED',\n\n // =========================================================================\n // Pipeline Execution\n // =========================================================================\n /** Generic pipeline execution failure */\n PIPELINE_EXECUTION_FAILED: 'PIPELINE_EXECUTION_FAILED',\n /** Pipeline filter phase failed */\n PIPELINE_FILTER_FAILED: 'PIPELINE_FILTER_FAILED',\n /** Pipeline input validation failed */\n PIPELINE_INPUT_INVALID: 'PIPELINE_INPUT_INVALID',\n /** Pipeline configuration is invalid */\n PIPELINE_CONFIG_INVALID: 'PIPELINE_CONFIG_INVALID',\n\n // =========================================================================\n // Map-Reduce\n // =========================================================================\n /** Split phase failed */\n MAP_REDUCE_SPLIT_FAILED: 'MAP_REDUCE_SPLIT_FAILED',\n /** Map phase failed for one or more items */\n MAP_REDUCE_MAP_FAILED: 'MAP_REDUCE_MAP_FAILED',\n /** Reduce phase failed */\n MAP_REDUCE_REDUCE_FAILED: 'MAP_REDUCE_REDUCE_FAILED',\n\n // =========================================================================\n // Queue Operations\n // =========================================================================\n /** Task exceeded its timeout */\n QUEUE_TASK_TIMEOUT: 'QUEUE_TASK_TIMEOUT',\n /** Task execution failed */\n QUEUE_TASK_FAILED: 'QUEUE_TASK_FAILED',\n /** Queue is not running */\n QUEUE_NOT_RUNNING: 'QUEUE_NOT_RUNNING',\n\n // =========================================================================\n // Data Operations\n // =========================================================================\n /** CSV parsing failed */\n CSV_PARSE_ERROR: 'CSV_PARSE_ERROR',\n /** Template rendering failed */\n TEMPLATE_ERROR: 'TEMPLATE_ERROR',\n /** Required template variable is missing */\n MISSING_VARIABLE: 'MISSING_VARIABLE',\n /** Prompt file resolution failed */\n PROMPT_RESOLUTION_FAILED: 'PROMPT_RESOLUTION_FAILED',\n /** Skill resolution failed */\n SKILL_RESOLUTION_FAILED: 'SKILL_RESOLUTION_FAILED',\n /** Input generation failed */\n INPUT_GENERATION_FAILED: 'INPUT_GENERATION_FAILED',\n\n // =========================================================================\n // File System\n // =========================================================================\n /** File not found (wrapper for ENOENT) */\n FILE_NOT_FOUND: 'FILE_NOT_FOUND',\n /** Permission denied (wrapper for EACCES) */\n PERMISSION_DENIED: 'PERMISSION_DENIED',\n /** Generic file system error */\n FILE_SYSTEM_ERROR: 'FILE_SYSTEM_ERROR',\n\n // =========================================================================\n // Unknown / Fallback\n // =========================================================================\n /** Error code could not be determined */\n UNKNOWN: 'UNKNOWN',\n} as const;\n\n/**\n * Type representing valid error codes\n */\nexport type ErrorCodeType = typeof ErrorCode[keyof typeof ErrorCode];\n\n/**\n * Map Node.js system error codes to our error codes\n */\nexport function mapSystemErrorCode(nodeCode: string): ErrorCodeType {\n switch (nodeCode) {\n case 'ENOENT':\n return ErrorCode.FILE_NOT_FOUND;\n case 'EACCES':\n case 'EPERM':\n return ErrorCode.PERMISSION_DENIED;\n case 'ETIMEDOUT':\n case 'ESOCKETTIMEDOUT':\n return ErrorCode.TIMEOUT;\n case 'ECONNREFUSED':\n case 'ENOTFOUND':\n case 'EAI_AGAIN':\n return ErrorCode.AI_INVOCATION_FAILED;\n default:\n if (nodeCode.startsWith('E')) {\n return ErrorCode.FILE_SYSTEM_ERROR;\n }\n return ErrorCode.UNKNOWN;\n }\n}\n", "/**\n * PipelineCoreError\n *\n * Base error class for all pipeline-core errors.\n * Provides structured error information with:\n * - code: Well-known error code for programmatic handling\n * - cause: Original error that caused this error (error chaining)\n * - meta: Additional context metadata\n */\n\nimport { ErrorCode, ErrorCodeType, mapSystemErrorCode } from './error-codes';\nimport { getLogger, LogCategory } from '../logger';\n\n/**\n * Metadata that can be attached to errors for debugging and telemetry\n */\nexport interface ErrorMetadata {\n /** Unique identifier for the execution context */\n executionId?: string;\n /** Phase where the error occurred (input, filter, map, reduce) */\n phase?: string;\n /** Task identifier for queue operations */\n taskId?: string;\n /** Timeout value that was exceeded */\n timeoutMs?: number;\n /** Retry attempt number */\n attempt?: number;\n /** Maximum retry attempts configured */\n maxAttempts?: number;\n /** Item index in a batch operation */\n itemIndex?: number;\n /** Total items in a batch operation */\n totalItems?: number;\n /** File path related to the error */\n filePath?: string;\n /** Additional custom metadata */\n [key: string]: unknown;\n}\n\n/**\n * Base error class for pipeline-core package.\n *\n * @example\n * ```typescript\n * throw new PipelineCoreError('Failed to parse CSV', {\n * code: ErrorCode.CSV_PARSE_ERROR,\n * cause: originalError,\n * meta: { filePath: 'input.csv', line: 42 }\n * });\n * ```\n */\nexport class PipelineCoreError extends Error {\n /** Well-known error code for programmatic handling */\n readonly code: ErrorCodeType;\n\n /** Original error that caused this error */\n readonly cause?: unknown;\n\n /** Additional context metadata */\n readonly meta?: ErrorMetadata;\n\n constructor(\n message: string,\n options?: {\n code?: ErrorCodeType;\n cause?: unknown;\n meta?: ErrorMetadata;\n }\n ) {\n super(message);\n\n this.name = 'PipelineCoreError';\n this.code = options?.code ?? ErrorCode.UNKNOWN;\n this.cause = options?.cause;\n this.meta = options?.meta;\n\n // Ensure proper prototype chain for instanceof checks\n Object.setPrototypeOf(this, new.target.prototype);\n }\n\n /**\n * Get a formatted string representation including code and metadata\n */\n toDetailedString(): string {\n const parts = [`[${this.code}] ${this.message}`];\n\n if (this.meta && Object.keys(this.meta).length > 0) {\n parts.push(`Meta: ${JSON.stringify(this.meta)}`);\n }\n\n if (this.cause instanceof Error) {\n parts.push(`Caused by: ${this.cause.message}`);\n } else if (this.cause !== undefined) {\n parts.push(`Caused by: ${String(this.cause)}`);\n }\n\n return parts.join('\\n');\n }\n\n /**\n * Convert to a plain object for serialization\n */\n toJSON(): Record<string, unknown> {\n return {\n name: this.name,\n code: this.code,\n message: this.message,\n meta: this.meta,\n cause: this.cause instanceof Error\n ? { name: this.cause.name, message: this.cause.message }\n : this.cause,\n stack: this.stack,\n };\n }\n}\n\n/**\n * Type guard to check if an error is a PipelineCoreError\n */\nexport function isPipelineCoreError(error: unknown): error is PipelineCoreError {\n return error instanceof PipelineCoreError;\n}\n\n/**\n * Convert any error to a PipelineCoreError.\n * If already a PipelineCoreError, returns as-is.\n * Otherwise wraps the error with appropriate code detection.\n */\nexport function toPipelineCoreError(\n error: unknown,\n defaultCode: ErrorCodeType = ErrorCode.UNKNOWN,\n meta?: ErrorMetadata\n): PipelineCoreError {\n // Already a PipelineCoreError\n if (isPipelineCoreError(error)) {\n // Merge additional meta if provided\n if (meta) {\n return new PipelineCoreError(error.message, {\n code: error.code,\n cause: error.cause,\n meta: { ...error.meta, ...meta },\n });\n }\n return error;\n }\n\n // Regular Error\n if (error instanceof Error) {\n // Try to detect code from Node.js system errors\n const nodeCode = (error as NodeJS.ErrnoException).code;\n const detectedCode = nodeCode ? mapSystemErrorCode(nodeCode) : defaultCode;\n\n return new PipelineCoreError(error.message, {\n code: detectedCode !== ErrorCode.UNKNOWN ? detectedCode : defaultCode,\n cause: error,\n meta,\n });\n }\n\n // String or other primitive\n const message = typeof error === 'string' ? error : String(error);\n return new PipelineCoreError(message, {\n code: defaultCode,\n cause: error,\n meta,\n });\n}\n\n/**\n * Wrap an error with a new message while preserving the original as cause.\n * Useful for adding context at different layers.\n */\nexport function wrapError(\n message: string,\n cause: unknown,\n code?: ErrorCodeType,\n meta?: ErrorMetadata\n): PipelineCoreError {\n // If cause is already a PipelineCoreError, preserve its code unless overridden\n const causeError = isPipelineCoreError(cause) ? cause : undefined;\n const effectiveCode = code ?? causeError?.code ?? ErrorCode.UNKNOWN;\n const effectiveMeta = meta ?? causeError?.meta;\n\n return new PipelineCoreError(message, {\n code: effectiveCode,\n cause,\n meta: effectiveMeta,\n });\n}\n\n/**\n * Extract a human-readable message from an error's cause chain\n */\nexport function getErrorCauseMessage(error: unknown, maxDepth = 5): string {\n const messages: string[] = [];\n let current: unknown = error;\n let depth = 0;\n\n while (current && depth < maxDepth) {\n if (current instanceof Error) {\n messages.push(current.message);\n // Only PipelineCoreError has cause property in ES2020\n current = (current as PipelineCoreError).cause;\n } else if (typeof current === 'string') {\n messages.push(current);\n break;\n } else {\n messages.push(String(current));\n break;\n }\n depth++;\n }\n\n return messages.join(' -> ');\n}\n\n/**\n * Log an error with structured information.\n * Uses the global logger and formats PipelineCoreError specially.\n */\nexport function logError(\n category: string,\n message: string,\n error: unknown\n): void {\n const logger = getLogger();\n\n if (isPipelineCoreError(error)) {\n const details = [\n `[${error.code}]`,\n error.message,\n ];\n\n if (error.meta && Object.keys(error.meta).length > 0) {\n details.push(`(${JSON.stringify(error.meta)})`);\n }\n\n logger.error(category, `${message}: ${details.join(' ')}`, error);\n } else if (error instanceof Error) {\n logger.error(category, `${message}: ${error.message}`, error);\n } else {\n logger.error(category, `${message}: ${String(error)}`);\n }\n}\n\n// Re-export for convenience\nexport { ErrorCode, ErrorCodeType, mapSystemErrorCode } from './error-codes';\n", "/**\n * Errors Module - Public API\n *\n * Exports all error types and utilities for the pipeline-core package.\n */\n\n// Error codes\nexport {\n ErrorCode,\n ErrorCodeType,\n mapSystemErrorCode,\n} from './error-codes';\n\n// Core error class and utilities\nexport {\n PipelineCoreError,\n ErrorMetadata,\n isPipelineCoreError,\n toPipelineCoreError,\n wrapError,\n getErrorCauseMessage,\n logError,\n} from './pipeline-core-error';\n", "/**\n * Cancellation Utilities\n *\n * Provides a standard way to check for and handle cancellation across async operations.\n * Works with the existing ConcurrencyLimiter's isCancelled pattern.\n */\n\nimport { PipelineCoreError, ErrorCode, ErrorMetadata } from '../errors';\n\n/**\n * Error thrown when an operation is cancelled.\n * Extends PipelineCoreError with CANCELLED code.\n */\nexport class CancellationError extends PipelineCoreError {\n constructor(message = 'Operation cancelled', meta?: ErrorMetadata) {\n super(message, {\n code: ErrorCode.CANCELLED,\n meta,\n });\n this.name = 'CancellationError';\n }\n}\n\n/**\n * Function type for cancellation check.\n * Returns true if the operation should be cancelled.\n */\nexport type IsCancelledFn = () => boolean;\n\n/**\n * Check if an error is a cancellation error\n */\nexport function isCancellationError(error: unknown): error is CancellationError {\n if (error instanceof CancellationError) {\n return true;\n }\n if (error instanceof PipelineCoreError && error.code === ErrorCode.CANCELLED) {\n return true;\n }\n return false;\n}\n\n/**\n * Throws CancellationError if the operation has been cancelled.\n * Use at strategic points in long-running operations.\n *\n * @param isCancelled Optional function to check cancellation status\n * @param meta Optional metadata to include in the error\n * @throws CancellationError if cancelled\n */\nexport function throwIfCancelled(\n isCancelled?: IsCancelledFn,\n meta?: ErrorMetadata\n): void {\n if (isCancelled?.()) {\n throw new CancellationError('Operation cancelled', meta);\n }\n}\n\n/**\n * Create a cancellation token from a function.\n * Useful for wrapping external cancellation sources.\n */\nexport function createCancellationToken(isCancelled?: IsCancelledFn): {\n isCancelled: IsCancelledFn;\n throwIfCancelled: (meta?: ErrorMetadata) => void;\n} {\n const fn: IsCancelledFn = isCancelled ?? (() => false);\n return {\n isCancelled: fn,\n throwIfCancelled: (meta?: ErrorMetadata) => throwIfCancelled(fn, meta),\n };\n}\n", "/**\n * Timeout Utilities\n *\n * Provides a standard way to apply timeouts to async operations.\n * Produces structured PipelineCoreError with TIMEOUT code.\n */\n\nimport { PipelineCoreError, ErrorCode, ErrorMetadata } from '../errors';\nimport { IsCancelledFn, throwIfCancelled } from './cancellation';\n\n/**\n * Error thrown when an operation times out.\n * Extends PipelineCoreError with TIMEOUT code.\n */\nexport class TimeoutError extends PipelineCoreError {\n constructor(message: string, meta?: ErrorMetadata) {\n super(message, {\n code: ErrorCode.TIMEOUT,\n meta,\n });\n this.name = 'TimeoutError';\n }\n}\n\n/**\n * Options for withTimeout\n */\nexport interface TimeoutOptions {\n /** Timeout in milliseconds */\n timeoutMs: number;\n /** Optional callback when timeout occurs (before throwing) */\n onTimeout?: () => void;\n /** Optional cancellation check function */\n isCancelled?: IsCancelledFn;\n /** Optional operation name for error messages */\n operationName?: string;\n /** Additional metadata for the timeout error */\n meta?: ErrorMetadata;\n}\n\n/**\n * Execute an async function with a timeout.\n *\n * @param fn The async function to execute\n * @param options Timeout configuration\n * @returns Promise resolving to the function's result\n * @throws TimeoutError if the timeout is exceeded\n * @throws CancellationError if cancelled\n *\n * @example\n * ```typescript\n * const result = await withTimeout(\n * () => fetchData(),\n * { timeoutMs: 5000, operationName: 'fetchData' }\n * );\n * ```\n */\nexport async function withTimeout<T>(\n fn: () => Promise<T>,\n options: TimeoutOptions\n): Promise<T> {\n const { timeoutMs, onTimeout, isCancelled, operationName, meta } = options;\n\n // Check for immediate cancellation\n throwIfCancelled(isCancelled, meta);\n\n return new Promise<T>((resolve, reject) => {\n let completed = false;\n let timeoutId: ReturnType<typeof setTimeout> | undefined;\n\n // Set up timeout\n timeoutId = setTimeout(() => {\n if (!completed) {\n completed = true;\n onTimeout?.();\n\n const name = operationName ?? 'Operation';\n reject(\n new TimeoutError(`${name} timed out after ${timeoutMs}ms`, {\n ...meta,\n timeoutMs,\n })\n );\n }\n }, timeoutMs);\n\n // Execute the function\n fn()\n .then((result) => {\n if (!completed) {\n completed = true;\n if (timeoutId) {\n clearTimeout(timeoutId);\n }\n resolve(result);\n }\n })\n .catch((error) => {\n if (!completed) {\n completed = true;\n if (timeoutId) {\n clearTimeout(timeoutId);\n }\n reject(error);\n }\n });\n });\n}\n\n/**\n * Check if an error is a timeout error\n */\nexport function isTimeoutError(error: unknown): error is TimeoutError {\n if (error instanceof TimeoutError) {\n return true;\n }\n if (error instanceof PipelineCoreError && error.code === ErrorCode.TIMEOUT) {\n return true;\n }\n return false;\n}\n\n/**\n * Create a promise that rejects after a timeout.\n * Useful for Promise.race patterns.\n */\nexport function createTimeoutPromise(\n timeoutMs: number,\n operationName?: string,\n meta?: ErrorMetadata\n): Promise<never> {\n return new Promise((_, reject) => {\n setTimeout(() => {\n const name = operationName ?? 'Operation';\n reject(\n new TimeoutError(`${name} timed out after ${timeoutMs}ms`, {\n ...meta,\n timeoutMs,\n })\n );\n }, timeoutMs);\n });\n}\n", "/**\n * Retry Utilities\n *\n * Provides a standard way to retry async operations with configurable backoff.\n * Produces structured PipelineCoreError with RETRY_EXHAUSTED code.\n */\n\nimport { PipelineCoreError, ErrorCode, ErrorMetadata } from '../errors';\nimport { IsCancelledFn, throwIfCancelled, isCancellationError } from './cancellation';\nimport { isTimeoutError } from './timeout';\n\n/**\n * Error thrown when all retry attempts have been exhausted.\n */\nexport class RetryExhaustedError extends PipelineCoreError {\n constructor(\n message: string,\n cause?: unknown,\n meta?: ErrorMetadata\n ) {\n super(message, {\n code: ErrorCode.RETRY_EXHAUSTED,\n cause,\n meta,\n });\n this.name = 'RetryExhaustedError';\n }\n}\n\n/**\n * Backoff strategy type\n */\nexport type BackoffStrategy = 'fixed' | 'exponential' | 'linear';\n\n/**\n * Function called before each retry attempt\n */\nexport type OnAttemptFn = (attempt: number, maxAttempts: number, lastError?: unknown) => void;\n\n/**\n * Function to determine if an error should trigger a retry\n */\nexport type RetryOnFn = (error: unknown, attempt: number) => boolean;\n\n/**\n * Options for withRetry\n */\nexport interface RetryOptions {\n /** Maximum number of attempts (including initial attempt). Default: 3 */\n attempts?: number;\n /** Base delay between retries in milliseconds. Default: 1000 */\n delayMs?: number;\n /** Backoff strategy. Default: 'exponential' */\n backoff?: BackoffStrategy;\n /** Maximum delay between retries (caps exponential/linear growth). Default: 30000 */\n maxDelayMs?: number;\n /** Function to determine if error should trigger retry. Default: retry all except cancellation */\n retryOn?: RetryOnFn;\n /** Callback before each attempt */\n onAttempt?: OnAttemptFn;\n /** Optional cancellation check function */\n isCancelled?: IsCancelledFn;\n /** Optional operation name for error messages */\n operationName?: string;\n /** Additional metadata for errors */\n meta?: ErrorMetadata;\n}\n\n/** Default retry options */\nexport const DEFAULT_RETRY_OPTIONS: Required<Omit<RetryOptions, 'retryOn' | 'onAttempt' | 'isCancelled' | 'operationName' | 'meta'>> = {\n attempts: 3,\n delayMs: 1000,\n backoff: 'exponential',\n maxDelayMs: 30000,\n};\n\n/**\n * Default retry predicate - retry everything except cancellation errors\n */\nexport const defaultRetryOn: RetryOnFn = (error: unknown): boolean => {\n // Never retry cancellation\n if (isCancellationError(error)) {\n return false;\n }\n return true;\n};\n\n/**\n * Retry predicate that also retries on timeout\n */\nexport const retryOnTimeout: RetryOnFn = (error: unknown): boolean => {\n if (isCancellationError(error)) {\n return false;\n }\n return isTimeoutError(error);\n};\n\n/**\n * Calculate delay for a given attempt based on backoff strategy\n */\nexport function calculateDelay(\n attempt: number,\n baseDelayMs: number,\n backoff: BackoffStrategy,\n maxDelayMs: number\n): number {\n let delay: number;\n\n switch (backoff) {\n case 'fixed':\n delay = baseDelayMs;\n break;\n case 'linear':\n delay = baseDelayMs * attempt;\n break;\n case 'exponential':\n default:\n delay = baseDelayMs * Math.pow(2, attempt - 1);\n break;\n }\n\n return Math.min(delay, maxDelayMs);\n}\n\n/**\n * Execute an async function with retries.\n *\n * @param fn The async function to execute\n * @param options Retry configuration\n * @returns Promise resolving to the function's result\n * @throws RetryExhaustedError if all attempts fail\n * @throws CancellationError if cancelled\n * @throws Original error if retryOn returns false\n *\n * @example\n * ```typescript\n * const result = await withRetry(\n * () => fetchData(),\n * {\n * attempts: 3,\n * delayMs: 1000,\n * backoff: 'exponential',\n * onAttempt: (attempt) => console.log(`Attempt ${attempt}`)\n * }\n * );\n * ```\n */\nexport async function withRetry<T>(\n fn: () => Promise<T>,\n options?: RetryOptions\n): Promise<T> {\n const {\n attempts = DEFAULT_RETRY_OPTIONS.attempts,\n delayMs = DEFAULT_RETRY_OPTIONS.delayMs,\n backoff = DEFAULT_RETRY_OPTIONS.backoff,\n maxDelayMs = DEFAULT_RETRY_OPTIONS.maxDelayMs,\n retryOn = defaultRetryOn,\n onAttempt,\n isCancelled,\n operationName,\n meta,\n } = options ?? {};\n\n let lastError: unknown;\n\n for (let attempt = 1; attempt <= attempts; attempt++) {\n // Check for cancellation before each attempt\n throwIfCancelled(isCancelled, { ...meta, attempt, maxAttempts: attempts });\n\n // Notify about attempt\n onAttempt?.(attempt, attempts, lastError);\n\n try {\n return await fn();\n } catch (error) {\n lastError = error;\n\n // Check if we should retry\n if (!retryOn(error, attempt)) {\n throw error;\n }\n\n // Check if we have more attempts\n if (attempt < attempts) {\n const delay = calculateDelay(attempt, delayMs, backoff, maxDelayMs);\n await sleep(delay);\n }\n }\n }\n\n // All attempts exhausted\n const name = operationName ?? 'Operation';\n throw new RetryExhaustedError(\n `${name} failed after ${attempts} attempts`,\n lastError,\n {\n ...meta,\n attempt: attempts,\n maxAttempts: attempts,\n }\n );\n}\n\n/**\n * Check if an error is a retry exhausted error\n */\nexport function isRetryExhaustedError(error: unknown): error is RetryExhaustedError {\n if (error instanceof RetryExhaustedError) {\n return true;\n }\n if (error instanceof PipelineCoreError && error.code === ErrorCode.RETRY_EXHAUSTED) {\n return true;\n }\n return false;\n}\n\n/**\n * Sleep for a specified duration\n */\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n", "/**\n * Policy Runner\n *\n * Composes timeout, retry, and cancellation into a single unified policy.\n * This is the top-level API for running operations with cross-cutting concerns.\n */\n\nimport { ErrorMetadata } from '../errors';\nimport { IsCancelledFn, throwIfCancelled } from './cancellation';\nimport { withTimeout, TimeoutOptions } from './timeout';\nimport { withRetry, RetryOptions, BackoffStrategy } from './retry';\n\n/**\n * Unified policy options combining timeout, retry, and cancellation\n */\nexport interface PolicyOptions {\n // =========================================================================\n // Timeout Configuration\n // =========================================================================\n /** Timeout in milliseconds for each attempt (not total). Optional. */\n timeoutMs?: number;\n\n // =========================================================================\n // Retry Configuration\n // =========================================================================\n /** Whether to retry on failure. Default: false */\n retryOnFailure?: boolean;\n /** Number of retry attempts (including initial). Default: 3 when retryOnFailure is true */\n retryAttempts?: number;\n /** Base delay between retries in milliseconds. Default: 1000 */\n retryDelayMs?: number;\n /** Backoff strategy. Default: 'exponential' */\n backoff?: BackoffStrategy;\n /** Maximum delay between retries. Default: 30000 */\n maxRetryDelayMs?: number;\n\n // =========================================================================\n // Cancellation Configuration\n // =========================================================================\n /** Function to check if operation should be cancelled */\n isCancelled?: IsCancelledFn;\n\n // =========================================================================\n // Metadata\n // =========================================================================\n /** Operation name for error messages */\n operationName?: string;\n /** Additional metadata for errors */\n meta?: ErrorMetadata;\n}\n\n/** Default policy options */\nexport const DEFAULT_POLICY_OPTIONS: Partial<PolicyOptions> = {\n retryOnFailure: false,\n retryAttempts: 3,\n retryDelayMs: 1000,\n backoff: 'exponential',\n maxRetryDelayMs: 30000,\n};\n\n/**\n * Run an async function with a unified policy for timeout, retry, and cancellation.\n *\n * The policy applies in this order:\n * 1. Check for cancellation before starting\n * 2. If retry is enabled, wrap with retry logic\n * 3. For each attempt, if timeout is specified, wrap with timeout\n *\n * @param fn The async function to execute\n * @param options Policy configuration\n * @returns Promise resolving to the function's result\n *\n * @example\n * ```typescript\n * // Simple timeout\n * const result = await runWithPolicy(\n * () => fetchData(),\n * { timeoutMs: 5000 }\n * );\n *\n * // Timeout with retry\n * const result = await runWithPolicy(\n * () => fetchData(),\n * {\n * timeoutMs: 5000,\n * retryOnFailure: true,\n * retryAttempts: 3,\n * backoff: 'exponential'\n * }\n * );\n *\n * // With cancellation\n * const result = await runWithPolicy(\n * () => fetchData(),\n * {\n * timeoutMs: 5000,\n * isCancelled: () => shouldCancel\n * }\n * );\n * ```\n */\nexport async function runWithPolicy<T>(\n fn: () => Promise<T>,\n options?: PolicyOptions\n): Promise<T> {\n const {\n timeoutMs,\n retryOnFailure = false,\n retryAttempts = 3,\n retryDelayMs = 1000,\n backoff = 'exponential',\n maxRetryDelayMs = 30000,\n isCancelled,\n operationName,\n meta,\n } = options ?? {};\n\n // Check for immediate cancellation\n throwIfCancelled(isCancelled, meta);\n\n // Build the execution function with timeout if specified\n const executeWithTimeout = timeoutMs\n ? () =>\n withTimeout(fn, {\n timeoutMs,\n isCancelled,\n operationName,\n meta,\n } as TimeoutOptions)\n : fn;\n\n // If retry is enabled, wrap with retry\n if (retryOnFailure) {\n return withRetry(executeWithTimeout, {\n attempts: retryAttempts,\n delayMs: retryDelayMs,\n backoff,\n maxDelayMs: maxRetryDelayMs,\n isCancelled,\n operationName,\n meta,\n } as RetryOptions);\n }\n\n // Just execute (possibly with timeout)\n return executeWithTimeout();\n}\n\n/**\n * Create a policy runner with pre-configured defaults.\n * Useful for creating consistent policies across a module.\n *\n * @example\n * ```typescript\n * const aiPolicy = createPolicyRunner({\n * timeoutMs: 30000,\n * retryOnFailure: true,\n * retryAttempts: 2,\n * operationName: 'AI Invocation'\n * });\n *\n * // Later use:\n * const result = await aiPolicy(() => invokeAI(prompt));\n * ```\n */\nexport function createPolicyRunner(\n defaultOptions: PolicyOptions\n): <T>(fn: () => Promise<T>, overrides?: Partial<PolicyOptions>) => Promise<T> {\n return <T>(fn: () => Promise<T>, overrides?: Partial<PolicyOptions>) =>\n runWithPolicy(fn, { ...defaultOptions, ...overrides });\n}\n", "/**\n * Runtime Module - Public API\n *\n * Exports centralized async policy utilities for timeout, retry, cancellation, and concurrency.\n */\n\n// Cancellation\nexport {\n CancellationError,\n IsCancelledFn,\n isCancellationError,\n throwIfCancelled,\n createCancellationToken,\n} from './cancellation';\n\n// Timeout\nexport {\n TimeoutError,\n TimeoutOptions,\n withTimeout,\n isTimeoutError,\n createTimeoutPromise,\n} from './timeout';\n\n// Retry\nexport {\n RetryExhaustedError,\n BackoffStrategy,\n OnAttemptFn,\n RetryOnFn,\n RetryOptions,\n DEFAULT_RETRY_OPTIONS,\n defaultRetryOn,\n retryOnTimeout,\n calculateDelay,\n withRetry,\n isRetryExhaustedError,\n} from './retry';\n\n// Policy (unified runner)\nexport {\n PolicyOptions,\n DEFAULT_POLICY_OPTIONS,\n runWithPolicy,\n createPolicyRunner,\n} from './policy';\n", "/**\n * File Utilities\n *\n * Centralized file I/O utilities with consistent error handling.\n * Cross-platform compatible (Linux/Mac/Windows).\n * \n * These utilities provide:\n * - Consistent error handling across all file operations\n * - Type-safe return values with explicit error states\n * - YAML file reading/writing with proper parsing\n * - Directory operations with recursive support\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as yaml from 'js-yaml';\n\n/**\n * Result type for file operations that may fail.\n * Provides explicit success/failure states with error information.\n */\nexport interface FileOperationResult<T> {\n success: boolean;\n data?: T;\n error?: Error;\n errorCode?: string;\n}\n\n/**\n * Options for file reading operations\n */\nexport interface ReadFileOptions {\n /** Encoding to use (default: 'utf8') */\n encoding?: BufferEncoding;\n}\n\n/**\n * Options for file writing operations\n */\nexport interface WriteFileOptions {\n /** Encoding to use (default: 'utf8') */\n encoding?: BufferEncoding;\n /** Create parent directories if they don't exist (default: true) */\n createDirs?: boolean;\n}\n\n/**\n * Options for YAML operations\n */\nexport interface YAMLOptions {\n /** Indentation level (default: 2) */\n indent?: number;\n /** Line width for wrapping (-1 for no wrap, default: -1) */\n lineWidth?: number;\n /** Disable YAML references (default: true) */\n noRefs?: boolean;\n}\n\n/**\n * Safely checks if a file or directory exists.\n * \n * @param filePath - Path to check\n * @returns True if the path exists, false otherwise\n * \n * @example\n * ```typescript\n * if (safeExists('/path/to/file.txt')) {\n * // File exists\n * }\n * ```\n */\nexport function safeExists(filePath: string): boolean {\n try {\n return fs.existsSync(filePath);\n } catch {\n // If we can't even check existence, treat as non-existent\n return false;\n }\n}\n\n/**\n * Safely checks if a path is a directory.\n * \n * @param dirPath - Path to check\n * @returns True if the path is a directory, false otherwise\n */\nexport function safeIsDirectory(dirPath: string): boolean {\n try {\n const stats = fs.statSync(dirPath);\n return stats.isDirectory();\n } catch {\n return false;\n }\n}\n\n/**\n * Safely checks if a path is a file.\n * \n * @param filePath - Path to check\n * @returns True if the path is a file, false otherwise\n */\nexport function safeIsFile(filePath: string): boolean {\n try {\n const stats = fs.statSync(filePath);\n return stats.isFile();\n } catch {\n return false;\n }\n}\n\n/**\n * Safely reads a file and returns its contents.\n * \n * @param filePath - Path to the file to read\n * @param options - Optional read options\n * @returns FileOperationResult with the file contents or error information\n * \n * @example\n * ```typescript\n * const result = safeReadFile('/path/to/file.txt');\n * if (result.success) {\n * console.log(result.data);\n * } else {\n * console.error('Failed to read:', result.error?.message);\n * }\n * ```\n */\nexport function safeReadFile(\n filePath: string,\n options: ReadFileOptions = {}\n): FileOperationResult<string> {\n const { encoding = 'utf8' } = options;\n\n try {\n const data = fs.readFileSync(filePath, encoding);\n return { success: true, data };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const errorCode = extractErrorCode(err);\n return { success: false, error: err, errorCode };\n }\n}\n\n/**\n * Safely writes content to a file.\n * \n * @param filePath - Path to the file to write\n * @param content - Content to write\n * @param options - Optional write options\n * @returns FileOperationResult indicating success or failure\n * \n * @example\n * ```typescript\n * const result = safeWriteFile('/path/to/file.txt', 'Hello, World!');\n * if (!result.success) {\n * console.error('Failed to write:', result.error?.message);\n * }\n * ```\n */\nexport function safeWriteFile(\n filePath: string,\n content: string,\n options: WriteFileOptions = {}\n): FileOperationResult<void> {\n const { encoding = 'utf8', createDirs = true } = options;\n\n try {\n // Ensure parent directory exists if requested\n if (createDirs) {\n const dirResult = ensureDirectoryExists(path.dirname(filePath));\n if (!dirResult.success) {\n return dirResult;\n }\n }\n\n fs.writeFileSync(filePath, content, encoding);\n return { success: true };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const errorCode = extractErrorCode(err);\n return { success: false, error: err, errorCode };\n }\n}\n\n/**\n * Ensures a directory exists, creating it recursively if necessary.\n * \n * @param dirPath - Path to the directory\n * @returns FileOperationResult indicating success or failure\n * \n * @example\n * ```typescript\n * const result = ensureDirectoryExists('/path/to/new/directory');\n * if (result.success) {\n * // Directory now exists\n * }\n * ```\n */\nexport function ensureDirectoryExists(dirPath: string): FileOperationResult<void> {\n try {\n if (!fs.existsSync(dirPath)) {\n fs.mkdirSync(dirPath, { recursive: true });\n }\n return { success: true };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const errorCode = extractErrorCode(err);\n return { success: false, error: err, errorCode };\n }\n}\n\n/**\n * Safely reads a directory and returns its entries.\n * \n * @param dirPath - Path to the directory to read\n * @param withFileTypes - If true, returns Dirent objects with file type info\n * @returns FileOperationResult with directory entries or error information\n * \n * @example\n * ```typescript\n * const result = safeReadDir('/path/to/directory');\n * if (result.success) {\n * result.data?.forEach(entry => console.log(entry));\n * }\n * ```\n */\nexport function safeReadDir(\n dirPath: string\n): FileOperationResult<string[]>;\nexport function safeReadDir(\n dirPath: string,\n withFileTypes: true\n): FileOperationResult<fs.Dirent[]>;\nexport function safeReadDir(\n dirPath: string,\n withFileTypes: false\n): FileOperationResult<string[]>;\nexport function safeReadDir(\n dirPath: string,\n withFileTypes?: boolean\n): FileOperationResult<string[] | fs.Dirent[]> {\n try {\n if (withFileTypes) {\n const entries = fs.readdirSync(dirPath, { withFileTypes: true });\n return { success: true, data: entries };\n } else {\n const entries = fs.readdirSync(dirPath);\n return { success: true, data: entries };\n }\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const errorCode = extractErrorCode(err);\n return { success: false, error: err, errorCode };\n }\n}\n\n/**\n * Safely gets file stats.\n * \n * @param filePath - Path to the file\n * @returns FileOperationResult with fs.Stats or error information\n */\nexport function safeStats(filePath: string): FileOperationResult<fs.Stats> {\n try {\n const stats = fs.statSync(filePath);\n return { success: true, data: stats };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const errorCode = extractErrorCode(err);\n return { success: false, error: err, errorCode };\n }\n}\n\n/**\n * Reads and parses a YAML file.\n * \n * @param filePath - Path to the YAML file\n * @returns FileOperationResult with parsed YAML content or error information\n * \n * @example\n * ```typescript\n * interface Config {\n * name: string;\n * version: number;\n * }\n * \n * const result = readYAML<Config>('/path/to/config.yaml');\n * if (result.success && result.data) {\n * console.log(result.data.name);\n * }\n * ```\n */\nexport function readYAML<T = unknown>(filePath: string): FileOperationResult<T> {\n const readResult = safeReadFile(filePath);\n if (!readResult.success) {\n return { \n success: false, \n error: readResult.error, \n errorCode: readResult.errorCode \n };\n }\n\n try {\n const parsed = yaml.load(readResult.data!) as T;\n return { success: true, data: parsed };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n // Enhance error message for YAML parse errors\n const yamlError = new Error(`YAML parse error in ${filePath}: ${err.message}`);\n return { success: false, error: yamlError, errorCode: 'YAML_PARSE_ERROR' };\n }\n}\n\n/**\n * Writes data to a YAML file.\n * \n * @param filePath - Path to the YAML file\n * @param data - Data to serialize and write\n * @param options - Optional YAML serialization options\n * @returns FileOperationResult indicating success or failure\n * \n * @example\n * ```typescript\n * const config = { name: 'MyApp', version: 1 };\n * const result = writeYAML('/path/to/config.yaml', config);\n * if (!result.success) {\n * console.error('Failed to write YAML:', result.error?.message);\n * }\n * ```\n */\nexport function writeYAML<T>(\n filePath: string,\n data: T,\n options: YAMLOptions = {}\n): FileOperationResult<void> {\n const { indent = 2, lineWidth = -1, noRefs = true } = options;\n\n try {\n const yamlContent = yaml.dump(data, { indent, lineWidth, noRefs });\n return safeWriteFile(filePath, yamlContent);\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const yamlError = new Error(`YAML serialization error: ${err.message}`);\n return { success: false, error: yamlError, errorCode: 'YAML_DUMP_ERROR' };\n }\n}\n\n/**\n * Safely copies a file from source to destination.\n * \n * @param srcPath - Source file path\n * @param destPath - Destination file path\n * @param createDirs - Create parent directories if they don't exist (default: true)\n * @returns FileOperationResult indicating success or failure\n */\nexport function safeCopyFile(\n srcPath: string,\n destPath: string,\n createDirs: boolean = true\n): FileOperationResult<void> {\n try {\n if (createDirs) {\n const dirResult = ensureDirectoryExists(path.dirname(destPath));\n if (!dirResult.success) {\n return dirResult;\n }\n }\n\n fs.copyFileSync(srcPath, destPath);\n return { success: true };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const errorCode = extractErrorCode(err);\n return { success: false, error: err, errorCode };\n }\n}\n\n/**\n * Safely renames/moves a file or directory.\n * \n * @param oldPath - Current path\n * @param newPath - New path\n * @returns FileOperationResult indicating success or failure\n */\nexport function safeRename(\n oldPath: string,\n newPath: string\n): FileOperationResult<void> {\n try {\n fs.renameSync(oldPath, newPath);\n return { success: true };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const errorCode = extractErrorCode(err);\n return { success: false, error: err, errorCode };\n }\n}\n\n/**\n * Safely removes a file or directory.\n * \n * @param targetPath - Path to remove\n * @param options - Options for removal\n * @returns FileOperationResult indicating success or failure\n */\nexport function safeRemove(\n targetPath: string,\n options: { recursive?: boolean; force?: boolean } = {}\n): FileOperationResult<void> {\n const { recursive = false, force = false } = options;\n\n try {\n fs.rmSync(targetPath, { recursive, force });\n return { success: true };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n const errorCode = extractErrorCode(err);\n return { success: false, error: err, errorCode };\n }\n}\n\n/**\n * Extracts error code from a Node.js error.\n * \n * @param error - The error to extract code from\n * @returns The error code or 'UNKNOWN'\n */\nfunction extractErrorCode(error: Error): string {\n // Node.js file system errors have a 'code' property\n const nodeError = error as NodeJS.ErrnoException;\n return nodeError.code || 'UNKNOWN';\n}\n\n/**\n * Gets a user-friendly error message for common file operation errors.\n * \n * @param errorCode - The error code from a file operation\n * @param context - Optional context about what operation was being performed\n * @returns A user-friendly error message\n */\nexport function getFileErrorMessage(errorCode: string, context?: string): string {\n const prefix = context ? `${context}: ` : '';\n \n switch (errorCode) {\n case 'ENOENT':\n return `${prefix}File or directory not found`;\n case 'EACCES':\n case 'EPERM':\n return `${prefix}Permission denied`;\n case 'EEXIST':\n return `${prefix}File or directory already exists`;\n case 'ENOTDIR':\n return `${prefix}Not a directory`;\n case 'EISDIR':\n return `${prefix}Is a directory`;\n case 'ENOSPC':\n return `${prefix}No space left on device`;\n case 'EMFILE':\n case 'ENFILE':\n return `${prefix}Too many open files`;\n case 'EBUSY':\n return `${prefix}Resource busy or locked`;\n case 'YAML_PARSE_ERROR':\n return `${prefix}Invalid YAML syntax`;\n case 'YAML_DUMP_ERROR':\n return `${prefix}Failed to serialize data to YAML`;\n default:\n return `${prefix}File operation failed`;\n }\n}\n", "/**\n * Glob Utilities\n * \n * Simple glob pattern matching for file discovery.\n * Uses Node.js built-in modules for compatibility.\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { getLogger, LogCategory } from '../logger';\n\n/**\n * Check if a filename matches a simple extension pattern\n * @param filename The filename to check\n * @param extension The extension to match (e.g., \".md\")\n * @returns True if the filename ends with the extension\n */\nfunction matchesExtension(filename: string, extension: string): boolean {\n return filename.toLowerCase().endsWith(extension.toLowerCase());\n}\n\n/**\n * Extract the extension pattern from a glob pattern\n * For patterns like \"**\\/*.md\", returns \".md\"\n * @param pattern The glob pattern\n * @returns The extension or null if not a simple extension pattern\n */\nfunction extractExtension(pattern: string): string | null {\n // Match patterns like **/*.md, *.md, **/*.txt\n const match = pattern.match(/\\*+\\/?\\*(\\.[a-zA-Z0-9]+)$/);\n if (match) {\n return match[1];\n }\n // Match patterns like *.md (without **/)\n const simpleMatch = pattern.match(/^\\*(\\.[a-zA-Z0-9]+)$/);\n if (simpleMatch) {\n return simpleMatch[1];\n }\n return null;\n}\n\n/**\n * Find all files matching a glob pattern in a directory\n * @param pattern The glob pattern (e.g., \"**\\/*.md\")\n * @param baseDir The base directory to search from\n * @returns Array of absolute file paths matching the pattern\n */\nexport function glob(pattern: string, baseDir: string): string[] {\n const results: string[] = [];\n const logger = getLogger();\n\n // For simple extension patterns, use optimized matching\n const extension = extractExtension(pattern);\n\n function walkDir(dir: string): void {\n try {\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const entryPath = path.join(dir, entry.name);\n\n if (entry.isDirectory()) {\n // Skip hidden directories and node_modules\n if (entry.name.startsWith('.') || entry.name === 'node_modules') {\n continue;\n }\n walkDir(entryPath);\n } else if (entry.isFile()) {\n // Check if file matches the pattern\n if (extension) {\n // Use optimized extension matching\n if (matchesExtension(entry.name, extension)) {\n results.push(entryPath);\n }\n } else {\n // Fallback: include all files for complex patterns\n results.push(entryPath);\n }\n }\n }\n } catch (error) {\n // Skip directories we can't read\n logger.warn(LogCategory.UTILS, `Cannot read directory ${dir}: ${error}`);\n }\n }\n\n walkDir(baseDir);\n return results;\n}\n\n/**\n * Get all files with a specific extension in a directory (recursive)\n * @param dir The directory to search\n * @param extension The file extension (e.g., \".md\")\n * @returns Array of absolute file paths\n */\nexport function getFilesWithExtension(dir: string, extension: string): string[] {\n return glob(`**/*${extension}`, dir);\n}\n", "/**\n * Utility functions for executing shell commands\n */\n\nimport { exec, ExecOptions } from 'child_process';\n\n/**\n * Execute a shell command asynchronously\n * @param command Command to execute\n * @param options Execution options\n * @returns Promise with stdout and stderr\n */\nexport function execAsync(\n command: string,\n options?: ExecOptions\n): Promise<{ stdout: string; stderr: string }> {\n return new Promise((resolve, reject) => {\n const defaultOptions: ExecOptions = {\n timeout: 30000, // 30 second default timeout\n maxBuffer: 10 * 1024 * 1024, // 10MB buffer\n ...options\n };\n\n exec(command, { ...defaultOptions, encoding: 'utf-8' }, (error, stdout, stderr) => {\n if (error) {\n reject(error);\n } else {\n resolve({ stdout: stdout as string, stderr: stderr as string });\n }\n });\n });\n}\n", "/**\n * Cross-platform HTTP utilities\n * Uses native Node.js https module for maximum compatibility\n */\n\nimport * as https from 'https';\nimport * as http from 'http';\n\nexport interface HttpResponse {\n statusCode: number;\n body: string;\n headers: http.IncomingHttpHeaders;\n}\n\n/**\n * Make an HTTP GET request using native Node.js modules\n * Works on all platforms without external dependencies\n * \n * @param url The URL to fetch\n * @param options Optional request options\n * @returns Promise resolving to the response\n */\nexport function httpGet(url: string, options?: {\n headers?: Record<string, string>;\n timeout?: number;\n}): Promise<HttpResponse> {\n return new Promise((resolve, reject) => {\n const urlObj = new URL(url);\n const isHttps = urlObj.protocol === 'https:';\n const client = isHttps ? https : http;\n\n const requestOptions: https.RequestOptions = {\n hostname: urlObj.hostname,\n port: urlObj.port || (isHttps ? 443 : 80),\n path: urlObj.pathname + urlObj.search,\n method: 'GET',\n headers: {\n 'User-Agent': 'Pipeline-Core',\n 'Accept': 'application/json',\n ...options?.headers\n },\n timeout: options?.timeout || 30000\n };\n\n const req = client.request(requestOptions, (res) => {\n let body = '';\n\n res.setEncoding('utf-8');\n res.on('data', (chunk) => {\n body += chunk;\n });\n\n res.on('end', () => {\n resolve({\n statusCode: res.statusCode || 0,\n body,\n headers: res.headers\n });\n });\n });\n\n req.on('error', (error) => {\n reject(error);\n });\n\n req.on('timeout', () => {\n req.destroy();\n reject(new Error('Request timed out'));\n });\n\n req.end();\n });\n}\n\n/**\n * Download a file from a URL and return its contents as a string\n * Follows redirects automatically\n * \n * @param url The URL to download from\n * @param options Optional request options\n * @returns Promise resolving to the file contents\n */\nexport async function httpDownload(url: string, options?: {\n headers?: Record<string, string>;\n timeout?: number;\n maxRedirects?: number;\n}): Promise<string> {\n const maxRedirects = options?.maxRedirects ?? 5;\n let currentUrl = url;\n let redirectCount = 0;\n\n while (redirectCount < maxRedirects) {\n const response = await httpGet(currentUrl, options);\n\n // Handle redirects\n if (response.statusCode >= 300 && response.statusCode < 400 && response.headers.location) {\n currentUrl = response.headers.location;\n redirectCount++;\n continue;\n }\n\n if (response.statusCode >= 200 && response.statusCode < 300) {\n return response.body;\n }\n\n throw new Error(`HTTP ${response.statusCode}: ${response.body.substring(0, 200)}`);\n }\n\n throw new Error(`Too many redirects (max: ${maxRedirects})`);\n}\n\n/**\n * Fetch JSON from a URL\n * \n * @param url The URL to fetch\n * @param options Optional request options\n * @returns Promise resolving to the parsed JSON\n */\nexport async function httpGetJson<T = unknown>(url: string, options?: {\n headers?: Record<string, string>;\n timeout?: number;\n}): Promise<T> {\n const response = await httpGet(url, {\n ...options,\n headers: {\n 'Accept': 'application/json',\n ...options?.headers\n }\n });\n\n if (response.statusCode >= 200 && response.statusCode < 300) {\n return JSON.parse(response.body);\n }\n\n // Try to parse error message from JSON response\n try {\n const errorBody = JSON.parse(response.body);\n if (errorBody.message) {\n throw new Error(errorBody.message);\n }\n } catch {\n // Not JSON, use raw body\n }\n\n throw new Error(`HTTP ${response.statusCode}: ${response.body.substring(0, 200)}`);\n}\n", "/**\n * Text Matching Utilities\n * \n * Provides common text matching and manipulation functions used by\n * anchor systems in both markdown-comments and git-diff-comments.\n * \n * These pure functions handle:\n * - Text hashing (djb2 algorithm)\n * - Levenshtein distance calculation\n * - Similarity scoring\n * - Text normalization\n * - Line/offset conversions\n */\n\n/**\n * Configuration for anchor matching operations\n */\nexport interface AnchorMatchConfig {\n /** Number of characters to capture before the selection */\n contextCharsBefore: number;\n /** Number of characters to capture after the selection */\n contextCharsAfter: number;\n /** Minimum similarity threshold for fuzzy matching (0-1) */\n minSimilarityThreshold: number;\n /** Maximum line distance to search when relocating */\n maxLineSearchDistance: number;\n}\n\n/**\n * Default anchor matching configuration\n */\nexport const DEFAULT_ANCHOR_MATCH_CONFIG: AnchorMatchConfig = {\n contextCharsBefore: 100,\n contextCharsAfter: 100,\n minSimilarityThreshold: 0.6,\n maxLineSearchDistance: 50\n};\n\n/**\n * Generate a simple hash for text content\n * Uses a djb2-like algorithm for fast hashing\n */\nexport function hashText(text: string): string {\n let hash = 5381;\n for (let i = 0; i < text.length; i++) {\n hash = ((hash << 5) + hash) + text.charCodeAt(i);\n hash = hash & hash; // Convert to 32-bit integer\n }\n return Math.abs(hash).toString(36);\n}\n\n/**\n * Calculate Levenshtein distance between two strings\n * Used for fuzzy matching\n */\nexport function levenshteinDistance(str1: string, str2: string): number {\n const m = str1.length;\n const n = str2.length;\n\n // Use two rows to optimize space\n let prevRow = new Array(n + 1);\n let currRow = new Array(n + 1);\n\n // Initialize first row\n for (let j = 0; j <= n; j++) {\n prevRow[j] = j;\n }\n\n for (let i = 1; i <= m; i++) {\n currRow[0] = i;\n\n for (let j = 1; j <= n; j++) {\n if (str1[i - 1] === str2[j - 1]) {\n currRow[j] = prevRow[j - 1];\n } else {\n currRow[j] = 1 + Math.min(\n prevRow[j], // deletion\n currRow[j - 1], // insertion\n prevRow[j - 1] // substitution\n );\n }\n }\n\n // Swap rows\n [prevRow, currRow] = [currRow, prevRow];\n }\n\n return prevRow[n];\n}\n\n/**\n * Calculate similarity ratio between two strings (0-1)\n * 1 = identical, 0 = completely different\n */\nexport function calculateSimilarity(str1: string, str2: string): number {\n if (str1 === str2) {\n return 1;\n }\n if (str1.length === 0 || str2.length === 0) {\n return 0;\n }\n\n const distance = levenshteinDistance(str1, str2);\n const maxLength = Math.max(str1.length, str2.length);\n\n return 1 - (distance / maxLength);\n}\n\n/**\n * Normalize text for comparison (trim whitespace, normalize line endings)\n */\nexport function normalizeText(text: string): string {\n return text\n .replace(/\\r\\n/g, '\\n')\n .replace(/\\r/g, '\\n')\n .trim();\n}\n\n/**\n * Split document content into lines\n */\nexport function splitIntoLines(content: string): string[] {\n return content.split(/\\r?\\n/);\n}\n\n/**\n * Get character offset in document for a given line and column (1-based)\n */\nexport function getCharOffset(lines: string[], line: number, column: number): number {\n let offset = 0;\n\n for (let i = 0; i < line - 1 && i < lines.length; i++) {\n offset += lines[i].length + 1; // +1 for newline\n }\n\n offset += Math.min(column - 1, lines[line - 1]?.length || 0);\n\n return offset;\n}\n\n/**\n * Convert character offset to line and column (1-based)\n */\nexport function offsetToLineColumn(content: string, offset: number): { line: number; column: number } {\n const lines = splitIntoLines(content);\n let currentOffset = 0;\n\n for (let i = 0; i < lines.length; i++) {\n const lineLength = lines[i].length + 1; // +1 for newline\n\n if (currentOffset + lineLength > offset) {\n return {\n line: i + 1,\n column: offset - currentOffset + 1\n };\n }\n\n currentOffset += lineLength;\n }\n\n // Return last position if offset exceeds content\n return {\n line: lines.length,\n column: (lines[lines.length - 1]?.length || 0) + 1\n };\n}\n\n/**\n * Find all occurrences of a substring in content\n * Returns array of start offsets\n */\nexport function findAllOccurrences(content: string, searchText: string): number[] {\n const occurrences: number[] = [];\n if (!searchText) {\n return occurrences;\n }\n\n let startIndex = 0;\n while (true) {\n const index = content.indexOf(searchText, startIndex);\n if (index === -1) {\n break;\n }\n occurrences.push(index);\n startIndex = index + 1;\n }\n\n return occurrences;\n}\n\n/**\n * Base anchor interface for context-based text matching\n */\nexport interface BaseMatchAnchor {\n /** The exact selected/commented text */\n selectedText: string;\n /** Text appearing before the selection */\n contextBefore: string;\n /** Text appearing after the selection */\n contextAfter: string;\n /** Original line number when the anchor was created (for fallback) */\n originalLine: number;\n /** Hash/fingerprint of the selected text for quick comparison */\n textHash: string;\n}\n\n/**\n * Score a potential match based on context similarity\n */\nexport function scoreMatch(\n content: string,\n matchOffset: number,\n matchLength: number,\n anchor: BaseMatchAnchor,\n config: AnchorMatchConfig = DEFAULT_ANCHOR_MATCH_CONFIG\n): number {\n // Extract context around the match\n const contextBeforeStart = Math.max(0, matchOffset - config.contextCharsBefore);\n const actualContextBefore = content.substring(contextBeforeStart, matchOffset);\n\n const matchEnd = matchOffset + matchLength;\n const contextAfterEnd = Math.min(content.length, matchEnd + config.contextCharsAfter);\n const actualContextAfter = content.substring(matchEnd, contextAfterEnd);\n\n // Calculate similarity scores\n const beforeSimilarity = calculateSimilarity(\n normalizeText(anchor.contextBefore),\n normalizeText(actualContextBefore)\n );\n\n const afterSimilarity = calculateSimilarity(\n normalizeText(anchor.contextAfter),\n normalizeText(actualContextAfter)\n );\n\n // Combined score: weighted average\n // Context is very important, so we weight it heavily\n return (beforeSimilarity * 0.4) + (afterSimilarity * 0.4) + 0.2;\n}\n\n/**\n * Find text using fuzzy matching within a search range\n * Returns the best match offset and similarity score\n */\nexport function findFuzzyMatch(\n content: string,\n searchText: string,\n startLine: number,\n config: AnchorMatchConfig = DEFAULT_ANCHOR_MATCH_CONFIG\n): { offset: number; similarity: number } | null {\n const lines = splitIntoLines(content);\n const normalizedSearchText = normalizeText(searchText);\n\n if (!normalizedSearchText) {\n return null;\n }\n\n // Calculate search range\n const minLine = Math.max(0, startLine - 1 - config.maxLineSearchDistance);\n const maxLine = Math.min(lines.length - 1, startLine - 1 + config.maxLineSearchDistance);\n\n let bestMatch: { offset: number; similarity: number } | null = null;\n\n // Search through lines in the range\n for (let lineIdx = minLine; lineIdx <= maxLine; lineIdx++) {\n // Build a window of text to search (include neighboring lines for multi-line matches)\n const windowStart = lineIdx;\n const windowEnd = Math.min(maxLine, lineIdx + Math.ceil(normalizedSearchText.split('\\n').length) + 1);\n\n let windowText = '';\n let windowOffset = 0;\n\n for (let i = 0; i < windowStart; i++) {\n windowOffset += lines[i].length + 1;\n }\n\n for (let i = windowStart; i <= windowEnd; i++) {\n if (i > windowStart) {\n windowText += '\\n';\n }\n windowText += lines[i];\n }\n\n // Try to find the search text or similar text in this window\n const normalizedWindow = normalizeText(windowText);\n\n // First try exact match\n const exactIndex = normalizedWindow.indexOf(normalizedSearchText);\n if (exactIndex !== -1) {\n // Calculate actual offset accounting for potential trimming\n const leadingWhitespace = windowText.length - windowText.trimStart().length;\n const actualOffset = windowOffset + exactIndex + leadingWhitespace;\n\n return { offset: actualOffset, similarity: 1.0 };\n }\n\n // Try fuzzy matching by sliding window\n if (normalizedWindow.length >= normalizedSearchText.length * 0.5) {\n // Check subsequences\n for (let i = 0; i <= normalizedWindow.length - Math.floor(normalizedSearchText.length * 0.5); i++) {\n const substringLength = Math.min(normalizedSearchText.length * 1.5, normalizedWindow.length - i);\n const substring = normalizedWindow.substring(i, i + substringLength);\n\n const similarity = calculateSimilarity(normalizedSearchText, substring);\n\n if (similarity >= config.minSimilarityThreshold) {\n if (!bestMatch || similarity > bestMatch.similarity) {\n bestMatch = {\n offset: windowOffset + i,\n similarity: similarity\n };\n }\n }\n }\n }\n }\n\n return bestMatch;\n}\n\n/**\n * Extract context around a selection for anchor creation\n */\nexport function extractContext(\n content: string,\n startOffset: number,\n endOffset: number,\n config: AnchorMatchConfig = DEFAULT_ANCHOR_MATCH_CONFIG\n): { contextBefore: string; contextAfter: string } {\n // Extract context before\n const contextBeforeStart = Math.max(0, startOffset - config.contextCharsBefore);\n const contextBefore = content.substring(contextBeforeStart, startOffset);\n\n // Extract context after\n const contextAfterEnd = Math.min(content.length, endOffset + config.contextCharsAfter);\n const contextAfter = content.substring(endOffset, contextAfterEnd);\n\n return { contextBefore, contextAfter };\n}\n", "/**\n * AI Response Parser - Shared Utilities\n * \n * Comprehensive edge case handling for parsing AI responses into structured data.\n * Handles JSON in various formats, malformed responses, and natural language fallbacks.\n * \n * Used by both yaml-pipeline and map-reduce jobs.\n */\n\n/**\n * Configuration for bracket matching operations\n */\ninterface BracketConfig {\n open: string; // Opening bracket character: '{' or '['\n close: string; // Closing bracket character: '}' or ']'\n name: string; // Type name for error messages: 'object' or 'array'\n}\n\n/** Configuration for JSON object extraction */\nconst OBJECT_BRACKET_CONFIG: BracketConfig = { open: '{', close: '}', name: 'object' };\n\n/** Configuration for JSON array extraction */\nconst ARRAY_BRACKET_CONFIG: BracketConfig = { open: '[', close: ']', name: 'array' };\n\n/**\n * Check if a string has balanced brackets for the given configuration\n */\nfunction hasBalanced(str: string, config: BracketConfig): boolean {\n let depth = 0;\n for (const char of str) {\n if (char === config.open) depth++;\n else if (char === config.close) depth--;\n if (depth < 0) return false;\n }\n return depth === 0;\n}\n\n/**\n * Find all matching bracket positions for the given configuration\n */\nfunction findAllBracketPositions(str: string, config: BracketConfig): Array<{start: number, end: number}> {\n const positions: Array<{start: number, end: number}> = [];\n let depth = 0;\n let start = -1;\n\n for (let i = 0; i < str.length; i++) {\n if (str[i] === config.open) {\n if (depth === 0) start = i;\n depth++;\n } else if (str[i] === config.close) {\n depth--;\n if (depth === 0 && start !== -1) {\n positions.push({ start, end: i });\n start = -1;\n }\n }\n }\n return positions;\n}\n\n/**\n * Try to extract a JSON structure (object or array) from text\n * @param text Text to search in\n * @param config Bracket configuration\n * @param additionalValidation Optional additional validation (e.g., objects must contain ':')\n */\nfunction tryExtractStructure(\n text: string,\n config: BracketConfig,\n additionalValidation?: (candidate: string) => boolean\n): string | null {\n const pattern = new RegExp(`\\\\${config.open}[\\\\s\\\\S]*\\\\${config.close}`);\n const match = text.match(pattern);\n if (match) {\n const candidate = match[0];\n try {\n JSON.parse(candidate);\n return candidate;\n } catch {\n // Try to find valid JSON by checking all bracket pairs\n const positions = findAllBracketPositions(text, config);\n for (let i = positions.length - 1; i >= 0; i--) {\n const {start, end} = positions[i];\n const subCandidate = text.substring(start, end + 1);\n try {\n JSON.parse(subCandidate);\n return subCandidate;\n } catch {\n continue;\n }\n }\n // Return candidate if balanced (and passes additional validation if provided)\n if (hasBalanced(candidate, config) && (!additionalValidation || additionalValidation(candidate))) {\n return candidate;\n }\n }\n }\n return null;\n}\n\n/**\n * Extract JSON from a response string with comprehensive edge case handling\n * Handles JSON in markdown code blocks, inline, malformed responses, and various AI quirks\n * @param response Response string from AI\n * @returns Extracted JSON string or null if no valid JSON found\n */\nexport function extractJSON(response: string): string | null {\n if (!response || typeof response !== 'string') {\n return null;\n }\n\n response = response.trim();\n\n // Try markdown code block first (with various language tags)\n const codeBlockPatterns = [\n /```json\\s*([\\s\\S]*?)```/,\n /```javascript\\s*([\\s\\S]*?)```/,\n /```js\\s*([\\s\\S]*?)```/,\n /```\\s*([\\s\\S]*?)```/\n ];\n\n for (const pattern of codeBlockPatterns) {\n const match = response.match(pattern);\n if (match) {\n const extracted = match[1].trim();\n if (extracted && (extracted.startsWith('{') || extracted.startsWith('['))) {\n return extracted;\n }\n }\n }\n\n // Find first occurrence of { and [ to determine which type to try first\n const firstBrace = response.indexOf('{');\n const firstBracket = response.indexOf('[');\n\n // Object extraction requires colon to look like JSON\n const tryExtractObject = () => tryExtractStructure(response, OBJECT_BRACKET_CONFIG, c => c.includes(':'));\n const tryExtractArray = () => tryExtractStructure(response, ARRAY_BRACKET_CONFIG);\n\n // Try to extract based on which comes first in the string\n // This ensures that top-level arrays are detected before embedded arrays in objects\n if (firstBracket !== -1 && (firstBrace === -1 || firstBracket < firstBrace)) {\n // Array appears first - try array then object\n const arrayResult = tryExtractArray();\n if (arrayResult) return arrayResult;\n const objectResult = tryExtractObject();\n if (objectResult) return objectResult;\n } else if (firstBrace !== -1) {\n // Object appears first - try object then array\n const objectResult = tryExtractObject();\n if (objectResult) return objectResult;\n const arrayResult = tryExtractArray();\n if (arrayResult) return arrayResult;\n }\n\n // Try to extract key-value pairs from plain text\n const kvPairs = extractKeyValuePairs(response);\n if (kvPairs) {\n return kvPairs;\n }\n\n return null;\n}\n\n/**\n * Parse AI response and extract only the declared output fields\n * Includes comprehensive error recovery, type coercion, and natural language fallback\n * @param response AI response string\n * @param outputFields Expected field names\n * @returns Object with extracted fields (missing fields become null)\n */\nexport function parseAIResponse(\n response: string,\n outputFields: string[]\n): Record<string, unknown> {\n const jsonStr = extractJSON(response);\n \n if (!jsonStr) {\n // Last resort: extract from natural language\n const extracted = extractFieldsFromNaturalLanguage(response, outputFields);\n if (extracted) {\n return extracted;\n }\n throw new Error('No JSON found in AI response');\n }\n\n let parsed: Record<string, unknown>;\n try {\n parsed = JSON.parse(jsonStr);\n } catch {\n // Try to fix common JSON errors\n const fixed = attemptJSONFix(jsonStr);\n if (fixed) {\n try {\n parsed = JSON.parse(fixed);\n } catch {\n throw new Error(`Invalid JSON in AI response: ${jsonStr.substring(0, 100)}...`);\n }\n } else {\n throw new Error(`Invalid JSON in AI response: ${jsonStr.substring(0, 100)}...`);\n }\n }\n\n // Handle array responses\n if (Array.isArray(parsed)) {\n if (parsed.length === 1 && typeof parsed[0] === 'object' && parsed[0] !== null) {\n parsed = parsed[0] as Record<string, unknown>;\n } else {\n const obj: Record<string, unknown> = {};\n let reconstructed = false;\n for (const item of parsed) {\n if (typeof item === 'object' && item !== null) {\n if ('field' in item && 'value' in item) {\n obj[String(item.field)] = item.value;\n reconstructed = true;\n } else if ('key' in item && 'value' in item) {\n obj[String(item.key)] = item.value;\n reconstructed = true;\n }\n }\n }\n if (reconstructed) {\n parsed = obj;\n } else {\n throw new Error('AI returned array instead of object');\n }\n }\n }\n\n // Extract fields with case-insensitive matching\n const result: Record<string, unknown> = {};\n for (const field of outputFields) {\n if (field in parsed) {\n result[field] = coerceValue(parsed[field]);\n } else {\n const lowerField = field.toLowerCase();\n const matchingKey = Object.keys(parsed).find(k => k.toLowerCase() === lowerField);\n if (matchingKey) {\n result[field] = coerceValue(parsed[matchingKey]);\n } else {\n result[field] = null;\n }\n }\n }\n return result;\n}\n\n/**\n * Extract key-value pairs from plain text response and convert to JSON\n * Handles cases where AI returns \"field: value\" format instead of JSON\n */\nfunction extractKeyValuePairs(response: string): string | null {\n const lines = response.split('\\n');\n const kvPairs: Record<string, string> = {};\n let foundAny = false;\n\n for (const line of lines) {\n const kvMatch = line.match(/^\\s*([a-zA-Z_][a-zA-Z0-9_]*)\\s*[:=\\-]\\s*(.+?)\\s*$/);\n if (kvMatch) {\n const key = kvMatch[1].trim();\n let value = kvMatch[2].trim().replace(/[,;]$/, '');\n if ((value.startsWith('\"') && value.endsWith('\"')) || \n (value.startsWith(\"'\") && value.endsWith(\"'\"))) {\n value = value.slice(1, -1);\n }\n kvPairs[key] = value;\n foundAny = true;\n }\n }\n\n return foundAny ? JSON.stringify(kvPairs) : null;\n}\n\n/**\n * Attempt to fix common JSON formatting errors\n */\nfunction attemptJSONFix(jsonStr: string): string | null {\n try {\n let fixed = jsonStr.replace(/'/g, '\"');\n fixed = fixed.replace(/([{,]\\s*)([a-zA-Z_][a-zA-Z0-9_]*)\\s*:/g, '$1\"$2\":');\n fixed = fixed.replace(/,(\\s*[}\\]])/g, '$1');\n fixed = fixed.replace(/\"\\s*\\n\\s*\"/g, '\",\\n\"');\n JSON.parse(fixed);\n return fixed;\n } catch {\n return null;\n }\n}\n\n/**\n * Coerce values to appropriate types (convert strings to numbers/booleans where obvious)\n */\nfunction coerceValue(value: unknown): unknown {\n if (value === null || value === undefined) return null;\n if (typeof value !== 'string') return value;\n \n const str = (value as string).toLowerCase().trim();\n \n if (str === 'true' || str === 'yes') return true;\n if (str === 'false' || str === 'no') return false;\n if (str === 'null' || str === 'none' || str === 'n/a' || str === '') return null;\n \n if (/^-?\\d+\\.?\\d*$/.test(str)) {\n const num = parseFloat(str);\n if (!isNaN(num)) return num;\n }\n \n return value;\n}\n\n/**\n * Extract field values from natural language response (last resort)\n * Handles cases where AI completely ignores JSON format instruction\n */\nfunction extractFieldsFromNaturalLanguage(response: string, outputFields: string[]): Record<string, unknown> | null {\n const result: Record<string, unknown> = {};\n let foundAny = false;\n\n for (const field of outputFields) {\n // Patterns ordered from most specific to least specific\n // Use word boundary or limited capture to avoid over-matching\n // Note: Using (?:\\n|$) to match end of line OR end of string\n const patterns = [\n // Quoted value after colon/equals: field: \"value\" or field = 'value'\n new RegExp(`${field}\\\\s*[:=]\\\\s*[\"']([^\"']+)[\"']`, 'i'),\n // Markdown bold: **field**: value\n new RegExp(`\\\\*\\\\*${field}\\\\*\\\\*\\\\s*[:=]\\\\s*([^\\\\n,;]+?)(?:\\\\s+and\\\\s+|\\\\s*[,;]|\\\\s*\\\\n|\\\\s*$)`, 'i'),\n // field: value (stop at \"and\", comma, semicolon, newline, or end of string)\n new RegExp(`${field}\\\\s*[:=]\\\\s*([^\\\\n,;]+?)(?:\\\\s+and\\\\s+|\\\\s*[,;]|\\\\s*\\\\n|\\\\s*$)`, 'i'),\n // field is value (stop at \"and\", comma, period, newline, or end of string)\n new RegExp(`${field}\\\\s+is\\\\s+([^\\\\n,;.]+?)(?:\\\\s+and\\\\s+|\\\\s*[,;.]|\\\\s*\\\\n|\\\\s*$)`, 'i'),\n // field - value\n new RegExp(`${field}\\\\s*-\\\\s*([^\\\\n,;]+?)(?:\\\\s+and\\\\s+|\\\\s*[,;]|\\\\s*\\\\n|\\\\s*$)`, 'i'),\n ];\n\n for (const pattern of patterns) {\n const match = response.match(pattern);\n if (match) {\n let value = match[1].trim();\n // Clean up markdown formatting and quotes\n value = value.replace(/[*_`]/g, '').replace(/^[\"']|[\"']$/g, '');\n result[field] = coerceValue(value);\n foundAny = true;\n break;\n }\n }\n\n if (!(field in result)) {\n result[field] = null;\n }\n }\n\n return foundAny ? result : null;\n}\n", "/**\n * Window Focus Service\n *\n * Platform-specific service for bringing terminal windows to the foreground.\n * Currently only supports Windows (win32) platform.\n *\n * On Windows, each terminal window (cmd, PowerShell) is a separate process,\n * so we can use the PID to focus the specific window using PowerShell's\n * AppActivate method.\n */\n\nimport { spawn, SpawnOptions } from 'child_process';\nimport { InteractiveSession, TerminalType, WindowFocusResult } from './terminal-types';\n\n/**\n * Window Focus Service\n *\n * Handles platform-specific window focusing for interactive sessions.\n * Currently only implemented for Windows.\n */\nexport class WindowFocusService {\n private platform: NodeJS.Platform;\n private spawnFn: typeof spawn;\n\n constructor(\n platform?: NodeJS.Platform,\n spawnFn?: typeof spawn\n ) {\n this.platform = platform ?? process.platform;\n this.spawnFn = spawnFn ?? spawn;\n }\n\n /**\n * Check if window focusing is supported on the current platform\n */\n isSupported(): boolean {\n return this.platform === 'win32';\n }\n\n /**\n * Check if a specific terminal type supports window focusing\n * On Windows, cmd and PowerShell have separate processes per window.\n * Windows Terminal is trickier as it uses a single process for tabs.\n */\n isTerminalSupported(terminalType: TerminalType): boolean {\n if (this.platform !== 'win32') {\n return false;\n }\n\n // cmd and PowerShell have separate processes per window\n // Windows Terminal uses a single process, so PID-based focusing is less reliable\n return terminalType === 'cmd' || terminalType === 'powershell';\n }\n\n /**\n * Focus the window associated with an interactive session\n *\n * @param session The interactive session to focus\n * @returns Promise resolving to the focus result\n */\n async focusSession(session: InteractiveSession): Promise<WindowFocusResult> {\n // Check platform support\n if (!this.isSupported()) {\n return {\n success: false,\n error: `Window focusing is not supported on ${this.platform}. Only Windows is currently supported.`\n };\n }\n\n // Check if session has a PID\n if (!session.pid) {\n return {\n success: false,\n error: 'Session does not have a process ID'\n };\n }\n\n // Check if session is active\n if (session.status !== 'active' && session.status !== 'starting') {\n return {\n success: false,\n error: `Cannot focus session with status: ${session.status}`\n };\n }\n\n // Focus using the appropriate method for the terminal type\n return this.focusWindowByPid(session.pid, session.terminalType);\n }\n\n /**\n * Focus a window by its process ID using PowerShell\n *\n * Uses WScript.Shell's AppActivate method which works with PIDs\n * for cmd and PowerShell windows.\n */\n private async focusWindowByPid(pid: number, terminalType: TerminalType): Promise<WindowFocusResult> {\n return new Promise((resolve) => {\n // PowerShell command to activate window by PID\n // AppActivate accepts a PID and brings the window to foreground\n const psCommand = `(New-Object -ComObject WScript.Shell).AppActivate(${pid})`;\n\n const spawnOptions: SpawnOptions = {\n shell: true,\n stdio: ['pipe', 'pipe', 'pipe']\n };\n\n try {\n const child = this.spawnFn('powershell', ['-Command', psCommand], spawnOptions);\n\n let stdout = '';\n let stderr = '';\n\n child.stdout?.on('data', (data) => {\n stdout += data.toString();\n });\n\n child.stderr?.on('data', (data) => {\n stderr += data.toString();\n });\n\n child.on('close', (code) => {\n if (code === 0) {\n // AppActivate returns True if successful, False if window not found\n const result = stdout.trim().toLowerCase();\n if (result === 'true') {\n resolve({ success: true });\n } else {\n // Window not found - might be closed or PID changed\n resolve({\n success: false,\n error: `Window with PID ${pid} not found. The terminal may have been closed.`\n });\n }\n } else {\n resolve({\n success: false,\n error: `PowerShell command failed with code ${code}: ${stderr}`\n });\n }\n });\n\n child.on('error', (error) => {\n resolve({\n success: false,\n error: `Failed to spawn PowerShell: ${error.message}`\n });\n });\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n resolve({\n success: false,\n error: `Failed to focus window: ${errorMessage}`\n });\n }\n });\n }\n\n /**\n * Get the current platform\n */\n getPlatform(): NodeJS.Platform {\n return this.platform;\n }\n}\n\n/**\n * Singleton instance for convenience\n */\nlet defaultService: WindowFocusService | undefined;\n\n/**\n * Get the default WindowFocusService instance\n */\nexport function getWindowFocusService(): WindowFocusService {\n if (!defaultService) {\n defaultService = new WindowFocusService();\n }\n return defaultService;\n}\n\n/**\n * Reset the default service (useful for testing)\n */\nexport function resetWindowFocusService(): void {\n defaultService = undefined;\n}\n", "/**\n * CLI Utilities\n *\n * Shared utility functions for building and executing CLI commands\n * across the AI service module.\n */\n\nimport * as os from 'os';\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport { InteractiveToolType } from './types';\n\n/** Threshold for prompt length before switching to file-based delivery */\nexport const PROMPT_LENGTH_THRESHOLD = 500;\n\n/**\n * Pattern matching characters that are problematic for shell escaping.\n * Alphanumeric, spaces, and basic punctuation (. , : ; - _ @) are safe.\n *\n * Problematic characters include:\n * - Quotes: ' \" `\n * - Shell variables/expansion: $ ! %\n * - Escape characters: \\\n * - Redirection/pipes: < > |\n * - Command separators: &\n * - Grouping: ( ) { } [ ]\n * - Comments/special: # * ? ~\n * - Whitespace: \\n \\r \\t (newlines and tabs)\n */\nexport const PROBLEMATIC_CHARS_PATTERN = /['\"$!%`\\\\<>|&(){}\\[\\]#*?~\\n\\r\\t]/;\n\n/** Base flags for copilot CLI commands */\nexport const COPILOT_BASE_FLAGS = '--allow-all-tools --allow-all-paths --disable-builtin-mcps';\n\n/**\n * Escape a string for safe use in shell commands.\n *\n * Platform-specific escaping:\n * - Windows (cmd.exe): Uses double quotes, escapes internal double quotes by doubling them (\"\")\n * - Unix/macOS: Uses single quotes, escapes internal single quotes with '\\''\n *\n * Windows-specific considerations:\n * - Newlines (\\n, \\r\\n) break cmd.exe commands, so they are converted to literal '\\n' strings\n * - Exclamation marks (!) are escaped with caret (^!) to prevent delayed expansion issues\n * - Percent signs (%) are escaped by doubling (%%)\n * - Carriage returns (\\r) are removed\n *\n * @param str - The string to escape\n * @param platform - Optional platform override for testing (defaults to process.platform)\n * @returns The escaped string safe for shell use\n */\nexport function escapeShellArg(str: string, platform?: NodeJS.Platform): string {\n const isWindows = (platform ?? process.platform) === 'win32';\n\n if (isWindows) {\n // Windows cmd.exe escaping:\n // - Use double quotes to wrap the argument\n // - Escape internal double quotes by doubling them (\"\")\n // - Escape percent signs by doubling them (%%)\n // - Escape exclamation marks with caret (^!) for delayed expansion safety\n // - Convert newlines to literal '\\n' strings since cmd.exe can't handle actual newlines\n // - Remove carriage returns (\\r) as they cause issues\n const escaped = str\n .replace(/\\r\\n/g, '\\\\n') // Convert Windows line endings to literal \\n first\n .replace(/\\r/g, '') // Remove any remaining carriage returns\n .replace(/\\n/g, '\\\\n') // Convert Unix newlines to literal \\n\n .replace(/%/g, '%%') // Escape percent signs (environment variable expansion)\n .replace(/!/g, '^!') // Escape exclamation marks (delayed expansion)\n .replace(/\"/g, '\"\"'); // Escape double quotes by doubling\n\n return `\"${escaped}\"`;\n } else {\n // Unix/macOS shell escaping:\n // In single quotes, the only character that needs escaping is the single quote itself.\n // We handle it by ending the single-quoted string, adding an escaped single quote,\n // and starting a new single-quoted string: ' -> '\\''\n //\n // Newlines, tabs, backslashes, etc. are preserved literally in single quotes,\n // which is exactly what we want for passing to copilot CLI.\n const escaped = str.replace(/'/g, \"'\\\\''\");\n\n // Wrap in single quotes for shell safety\n return `'${escaped}'`;\n }\n}\n\n/**\n * Determine if a prompt should use file-based delivery instead of direct shell argument.\n *\n * Uses direct prompt when:\n * - Prompt is short (under PROMPT_LENGTH_THRESHOLD)\n * - AND contains only safe characters (alphanumeric, spaces, basic punctuation)\n *\n * Uses file-based delivery when:\n * - Prompt is long (over PROMPT_LENGTH_THRESHOLD)\n * - OR contains any shell-problematic characters\n *\n * @param prompt - The prompt to evaluate\n * @returns true if file-based delivery should be used, false for direct prompt\n */\nexport function shouldUseFileDelivery(prompt: string): boolean {\n if (prompt.length > PROMPT_LENGTH_THRESHOLD) {\n return true;\n }\n return PROBLEMATIC_CHARS_PATTERN.test(prompt);\n}\n\n/**\n * Write prompt content to a temporary file.\n * The OS handles cleanup via the temp directory lifecycle.\n *\n * File naming includes timestamp and random suffix to avoid collisions.\n * Uses UTF-8 encoding for proper Unicode support.\n *\n * @param prompt - The prompt content to write\n * @returns The absolute path to the created temp file\n */\nexport function writePromptToTempFile(prompt: string): string {\n const tmpDir = os.tmpdir();\n const timestamp = Date.now();\n const randomSuffix = Math.random().toString(36).slice(2, 8);\n const filename = `copilot-prompt-${timestamp}-${randomSuffix}.txt`;\n const filepath = path.join(tmpDir, filename);\n fs.writeFileSync(filepath, prompt, 'utf-8');\n return filepath;\n}\n\n/**\n * Result of building a CLI command, including metadata about the delivery method used.\n */\nexport interface BuildCliCommandResult {\n /** The complete command string to execute */\n command: string;\n /** The delivery method used for the prompt */\n deliveryMethod: 'direct' | 'file' | 'resume';\n /** Path to the temp file if file-based delivery was used */\n tempFilePath?: string;\n}\n\n/**\n * Options for building a CLI command\n */\nexport interface BuildCliCommandOptions {\n /** Initial prompt to send */\n prompt?: string;\n /** Model to use (e.g., 'gpt-4') */\n model?: string;\n /** Platform override for shell escaping (defaults to process.platform) */\n platform?: NodeJS.Platform;\n /** Session ID to resume (for session resume functionality) */\n resumeSessionId?: string;\n}\n\n/**\n * Build the CLI command string for the specified tool.\n *\n * This is the shared command builder used by both interactive (terminal)\n * and non-interactive (child process) modes.\n *\n * Uses smart prompt delivery:\n * - Direct: For short, simple prompts without shell-problematic characters\n * - File-based: For long prompts or those containing special characters\n * - Resume: For resuming an existing session with --resume flag\n *\n * @param tool - The CLI tool to use ('copilot' or 'claude')\n * @param options - Optional command options\n * @returns Object containing the command string and delivery metadata\n */\nexport function buildCliCommand(\n tool: InteractiveToolType,\n options?: BuildCliCommandOptions\n): BuildCliCommandResult {\n const baseCommand = tool === 'copilot' ? 'copilot' : 'claude';\n const { prompt, model, platform, resumeSessionId } = options ?? {};\n const modelFlag = model ? ` --model ${model}` : '';\n\n // Session resume mode: use --resume flag\n if (resumeSessionId) {\n return {\n command: `${baseCommand} ${COPILOT_BASE_FLAGS}${modelFlag} --resume=${resumeSessionId}`,\n deliveryMethod: 'resume'\n };\n }\n\n if (!prompt) {\n return {\n command: `${baseCommand} ${COPILOT_BASE_FLAGS}${modelFlag}`,\n deliveryMethod: 'direct'\n };\n }\n\n // Determine delivery method based on prompt characteristics\n if (shouldUseFileDelivery(prompt)) {\n // File-based delivery: write prompt to temp file\n const tempFilePath = writePromptToTempFile(prompt);\n\n // Build a simple redirection prompt - the CLI tool will read the file\n // File paths only need escaping for spaces, which is handled by quotes\n const redirectPrompt = `Follow the instructions in ${tempFilePath}`;\n const escapedRedirect = escapeShellArg(redirectPrompt, platform);\n\n return {\n command: `${baseCommand} ${COPILOT_BASE_FLAGS}${modelFlag} -i ${escapedRedirect}`,\n deliveryMethod: 'file',\n tempFilePath\n };\n }\n\n // Direct delivery: escape and pass prompt directly\n const escapedPrompt = escapeShellArg(prompt, platform);\n\n return {\n command: `${baseCommand} ${COPILOT_BASE_FLAGS}${modelFlag} -i ${escapedPrompt}`,\n deliveryMethod: 'direct'\n };\n}\n", "/**\n * External Terminal Launcher\n *\n * Platform-specific logic for launching external terminal applications\n * with AI CLI tools (copilot, claude) for interactive sessions.\n *\n * Supports:\n * - macOS: Terminal.app, iTerm2\n * - Windows: Windows Terminal, cmd.exe, PowerShell\n * - Linux: gnome-terminal, konsole, xfce4-terminal, xterm\n */\n\nimport { spawn, execSync, SpawnOptions } from 'child_process';\nimport {\n ExternalTerminalLaunchOptions,\n ExternalTerminalLaunchResult,\n TerminalType\n} from './terminal-types';\nimport { buildCliCommand } from '../ai/cli-utils';\n\n/**\n * Terminal configuration for each platform\n */\ninterface TerminalConfig {\n /** Command to check if terminal is available */\n checkCommand: string;\n /** Function to build spawn arguments */\n buildSpawnArgs: (cwd: string, command: string) => { cmd: string; args: string[] };\n}\n\n/**\n * Terminal configurations by platform and terminal type\n */\nconst TERMINAL_CONFIGS: Record<string, Record<string, TerminalConfig>> = {\n darwin: {\n 'iterm': {\n checkCommand: 'osascript -e \\'application \"iTerm\" exists\\'',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'osascript',\n args: [\n '-e', `tell application \"iTerm\"`,\n '-e', `create window with default profile`,\n '-e', `tell current session of current window`,\n '-e', `write text \"cd ${escapeAppleScript(cwd)} && ${escapeAppleScript(command)}\"`,\n '-e', `end tell`,\n '-e', `end tell`\n ]\n })\n },\n 'terminal.app': {\n checkCommand: 'osascript -e \\'application \"Terminal\" exists\\'',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'osascript',\n args: [\n '-e', `tell application \"Terminal\"`,\n '-e', `do script \"cd ${escapeAppleScript(cwd)} && ${escapeAppleScript(command)}\"`,\n '-e', `activate`,\n '-e', `end tell`\n ]\n })\n }\n },\n win32: {\n 'windows-terminal': {\n checkCommand: 'where wt',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'wt',\n args: ['-d', cwd, 'cmd', '/k', command]\n })\n },\n 'powershell': {\n checkCommand: 'where powershell',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'powershell',\n args: ['-NoExit', '-Command', `Set-Location '${cwd}'; ${command}`]\n })\n },\n 'cmd': {\n checkCommand: 'where cmd',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'cmd',\n args: ['/k', `cd /d \"${cwd}\" && ${command}`]\n })\n }\n },\n linux: {\n 'gnome-terminal': {\n checkCommand: 'which gnome-terminal',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'gnome-terminal',\n args: ['--working-directory', cwd, '--', 'bash', '-c', `${command}; exec bash`]\n })\n },\n 'konsole': {\n checkCommand: 'which konsole',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'konsole',\n args: ['--workdir', cwd, '-e', 'bash', '-c', `${command}; exec bash`]\n })\n },\n 'xfce4-terminal': {\n checkCommand: 'which xfce4-terminal',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'xfce4-terminal',\n args: ['--working-directory', cwd, '-e', `bash -c \"${command}; exec bash\"`]\n })\n },\n 'xterm': {\n checkCommand: 'which xterm',\n buildSpawnArgs: (cwd: string, command: string) => ({\n cmd: 'xterm',\n args: ['-e', `cd \"${cwd}\" && ${command} && exec bash`]\n })\n }\n }\n};\n\n/**\n * Order of terminal preference by platform\n */\nconst TERMINAL_PREFERENCE_ORDER: Record<string, TerminalType[]> = {\n darwin: ['iterm', 'terminal.app'],\n win32: ['windows-terminal', 'powershell', 'cmd'],\n linux: ['gnome-terminal', 'konsole', 'xfce4-terminal', 'xterm']\n};\n\n/**\n * Escape a string for use in AppleScript double-quoted strings.\n * Only backslashes and double quotes need escaping.\n * Single quotes are safe inside AppleScript double-quoted strings.\n */\nfunction escapeAppleScript(str: string): string {\n return str\n .replace(/\\\\/g, '\\\\\\\\')\n .replace(/\"/g, '\\\\\"');\n}\n\n\n/**\n * External Terminal Launcher\n *\n * Handles platform-specific detection and launching of external terminals\n * for interactive AI CLI sessions.\n */\nexport class ExternalTerminalLauncher {\n private platform: NodeJS.Platform;\n private terminalCache: Map<TerminalType, boolean> = new Map();\n\n // Dependency injection points for testing\n private execSyncFn: typeof execSync;\n private spawnFn: typeof spawn;\n\n constructor(\n platform?: NodeJS.Platform,\n execSyncFn?: typeof execSync,\n spawnFn?: typeof spawn\n ) {\n this.platform = platform ?? process.platform;\n this.execSyncFn = execSyncFn ?? execSync;\n this.spawnFn = spawnFn ?? spawn;\n }\n\n /**\n * Get the current platform\n */\n getPlatform(): NodeJS.Platform {\n return this.platform;\n }\n\n /**\n * Check if a specific terminal is available on the system\n */\n isTerminalAvailable(terminalType: TerminalType): boolean {\n // Check cache first\n const cached = this.terminalCache.get(terminalType);\n if (cached !== undefined) {\n return cached;\n }\n\n const platformKey = this.getPlatformKey();\n const configs = TERMINAL_CONFIGS[platformKey];\n\n if (!configs || !configs[terminalType]) {\n this.terminalCache.set(terminalType, false);\n return false;\n }\n\n const config = configs[terminalType];\n\n try {\n this.execSyncFn(config.checkCommand, {\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n timeout: 5000\n });\n this.terminalCache.set(terminalType, true);\n return true;\n } catch {\n this.terminalCache.set(terminalType, false);\n return false;\n }\n }\n\n /**\n * Detect the best available terminal for the current platform\n */\n detectTerminal(): TerminalType {\n const platformKey = this.getPlatformKey();\n const preferenceOrder = TERMINAL_PREFERENCE_ORDER[platformKey];\n\n if (!preferenceOrder) {\n return 'unknown';\n }\n\n for (const terminal of preferenceOrder) {\n if (this.isTerminalAvailable(terminal)) {\n return terminal;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Get all available terminals for the current platform\n */\n getAvailableTerminals(): TerminalType[] {\n const platformKey = this.getPlatformKey();\n const preferenceOrder = TERMINAL_PREFERENCE_ORDER[platformKey];\n\n if (!preferenceOrder) {\n return [];\n }\n\n return preferenceOrder.filter(terminal => this.isTerminalAvailable(terminal));\n }\n\n /**\n * Launch an external terminal with the specified options\n */\n async launch(options: ExternalTerminalLaunchOptions): Promise<ExternalTerminalLaunchResult> {\n const { workingDirectory, tool, initialPrompt, preferredTerminal, model, resumeSessionId } = options;\n\n // Determine which terminal to use\n let terminalType: TerminalType;\n\n if (preferredTerminal && this.isTerminalAvailable(preferredTerminal)) {\n terminalType = preferredTerminal;\n } else {\n terminalType = this.detectTerminal();\n }\n\n if (terminalType === 'unknown') {\n return {\n success: false,\n terminalType: 'unknown',\n error: `No supported terminal found for platform: ${this.platform}`\n };\n }\n\n // Build the CLI command (with optional session resume)\n const result = buildCliCommand(tool, { prompt: initialPrompt, model, resumeSessionId });\n const command = result.command;\n\n // Get the terminal configuration\n const platformKey = this.getPlatformKey();\n const config = TERMINAL_CONFIGS[platformKey]?.[terminalType];\n\n if (!config) {\n return {\n success: false,\n terminalType,\n error: `Terminal configuration not found for: ${terminalType}`\n };\n }\n\n try {\n const { cmd, args } = config.buildSpawnArgs(workingDirectory, command);\n\n const spawnOptions: SpawnOptions = {\n detached: true,\n stdio: 'ignore',\n // On Windows, we need shell: true for some commands\n shell: this.platform === 'win32'\n };\n\n const child = this.spawnFn(cmd, args, spawnOptions);\n\n // Unref to allow the parent process to exit independently\n child.unref();\n\n return {\n success: true,\n terminalType,\n pid: child.pid\n };\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n return {\n success: false,\n terminalType,\n error: `Failed to launch terminal: ${errorMessage}`\n };\n }\n }\n\n /**\n * Get the platform key for configuration lookup\n */\n private getPlatformKey(): string {\n // Normalize platform to our supported keys\n if (this.platform === 'darwin') {\n return 'darwin';\n } else if (this.platform === 'win32') {\n return 'win32';\n } else {\n // All other Unix-like systems use linux config\n return 'linux';\n }\n }\n\n /**\n * Clear the terminal availability cache\n */\n clearCache(): void {\n this.terminalCache.clear();\n }\n\n /**\n * Get supported terminals for a specific platform\n */\n static getSupportedTerminals(platform: NodeJS.Platform): TerminalType[] {\n let platformKey: string;\n if (platform === 'darwin') {\n platformKey = 'darwin';\n } else if (platform === 'win32') {\n platformKey = 'win32';\n } else {\n platformKey = 'linux';\n }\n\n return TERMINAL_PREFERENCE_ORDER[platformKey] ?? [];\n }\n}\n\n/**\n * Singleton instance for convenience\n */\nlet defaultLauncher: ExternalTerminalLauncher | undefined;\n\n/**\n * Get the default ExternalTerminalLauncher instance\n */\nexport function getExternalTerminalLauncher(): ExternalTerminalLauncher {\n if (!defaultLauncher) {\n defaultLauncher = new ExternalTerminalLauncher();\n }\n return defaultLauncher;\n}\n\n/**\n * Reset the default launcher (useful for testing)\n */\nexport function resetExternalTerminalLauncher(): void {\n defaultLauncher = undefined;\n}\n", "/**\n * Centralized Defaults\n *\n * Single source of truth for all DEFAULT_* constants used across the pipeline-core package.\n * This consolidates scattered defaults for better discoverability and maintainability.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\n// ============================================================================\n// Timeouts\n// ============================================================================\n\n/**\n * Default AI request timeout (30 minutes).\n * Used for individual AI calls in pipelines, map-reduce, and queue tasks.\n */\nexport const DEFAULT_AI_TIMEOUT_MS = 30 * 60 * 1000; // 30 minutes\n\n/**\n * Default poll interval for process monitoring (5 seconds).\n */\nexport const DEFAULT_POLL_INTERVAL_MS = 5000;\n\n// ============================================================================\n// Concurrency & Parallelism\n// ============================================================================\n\n/**\n * Default parallel limit for pipeline map operations.\n */\nexport const DEFAULT_PARALLEL_LIMIT = 5;\n\n/**\n * Default maximum concurrency for map-reduce operations.\n */\nexport const DEFAULT_MAX_CONCURRENCY = 5;\n\n// ============================================================================\n// Session Pool\n// ============================================================================\n\n/**\n * Default maximum number of sessions in the pool.\n */\nexport const DEFAULT_MAX_SESSIONS = 5;\n\n/**\n * Default idle timeout for pool sessions (5 minutes).\n */\nexport const DEFAULT_IDLE_TIMEOUT_MS = 300000;\n\n/**\n * Default minimum number of sessions to keep in the pool.\n */\nexport const DEFAULT_MIN_SESSIONS = 0;\n\n/**\n * Default cleanup interval for the session pool (1 minute).\n */\nexport const DEFAULT_CLEANUP_INTERVAL_MS = 60000;\n\n/**\n * Default timeout for acquiring a session from the pool (30 seconds).\n */\nexport const DEFAULT_ACQUIRE_TIMEOUT_MS = 30000;\n\n// ============================================================================\n// Chunk Splitter\n// ============================================================================\n\n/**\n * Default maximum chunk size in characters.\n */\nexport const DEFAULT_CHUNK_MAX_SIZE = 4000;\n\n/**\n * Default overlap size between chunks in characters.\n */\nexport const DEFAULT_CHUNK_OVERLAP_SIZE = 200;\n\n/**\n * Default chunk strategy.\n */\nexport const DEFAULT_CHUNK_STRATEGY: 'character' | 'line' | 'paragraph' = 'character';\n\n/**\n * Default setting for preserving boundaries in chunk splitting.\n */\nexport const DEFAULT_CHUNK_PRESERVE_BOUNDARIES = true;\n\n// ============================================================================\n// CSV Reader\n// ============================================================================\n\n/**\n * Default CSV delimiter.\n */\nexport const DEFAULT_CSV_DELIMITER = ',';\n\n/**\n * Default CSV quote character.\n */\nexport const DEFAULT_CSV_QUOTE = '\"';\n\n/**\n * Default setting for CSV header presence.\n */\nexport const DEFAULT_CSV_HAS_HEADER = true;\n\n/**\n * Default setting for skipping empty lines in CSV.\n */\nexport const DEFAULT_CSV_SKIP_EMPTY_LINES = true;\n\n/**\n * Default setting for trimming fields in CSV.\n */\nexport const DEFAULT_CSV_TRIM_FIELDS = true;\n\n// ============================================================================\n// Queue Executor\n// ============================================================================\n\n/**\n * Default number of retry attempts for queue tasks.\n */\nexport const DEFAULT_RETRY_ATTEMPTS = 3;\n\n/**\n * Default delay between retries in milliseconds.\n */\nexport const DEFAULT_RETRY_DELAY_MS = 1000;\n\n/**\n * Default maximum concurrent tasks in the queue executor.\n */\nexport const DEFAULT_QUEUE_MAX_CONCURRENT = 3;\n\n/**\n * Default process on startup setting.\n */\nexport const DEFAULT_QUEUE_PROCESS_ON_STARTUP = true;\n\n/**\n * Default auto-start setting for queue manager.\n */\nexport const DEFAULT_QUEUE_AUTO_START = true;\n\n/**\n * Default setting for automatic queue persistence.\n */\nexport const DEFAULT_QUEUE_AUTO_PERSIST = true;\n\n// ============================================================================\n// Skills\n// ============================================================================\n\n/**\n * Default directory for skill definitions.\n */\nexport const DEFAULT_SKILLS_DIRECTORY = '.github/skills';\n\n// ============================================================================\n// Text Matching\n// ============================================================================\n\n/**\n * Default fuzzy match threshold (0-1).\n */\nexport const DEFAULT_FUZZY_MATCH_THRESHOLD = 0.7;\n\n/**\n * Default context lines to include around matches.\n */\nexport const DEFAULT_CONTEXT_LINES = 3;\n\n/**\n * Default case sensitivity for text matching.\n */\nexport const DEFAULT_CASE_SENSITIVE = false;\n", "/**\n * Process Monitor\n *\n * Monitors external terminal processes to detect when they terminate.\n * Uses platform-specific commands to check if processes are still running.\n *\n * Platform-specific checks:\n * - Windows: tasklist /FI \"PID eq X\"\n * - macOS/Linux: ps -p X (exit code 0 = running)\n */\n\nimport { execSync } from 'child_process';\nimport { getLogger, LogCategory } from '../logger';\n\n/**\n * Disposable interface for cleanup\n * Compatible with VS Code's Disposable interface\n */\nexport interface Disposable {\n dispose(): void;\n}\n\n/**\n * Result of checking if a process is running\n */\nexport interface ProcessCheckResult {\n /** Whether the process is currently running */\n isRunning: boolean;\n /** Error message if the check failed */\n error?: string;\n}\n\n/**\n * Configuration options for ProcessMonitor\n */\nexport interface ProcessMonitorOptions {\n /** Poll interval in milliseconds (default: 5000) */\n pollIntervalMs?: number;\n /** Platform override for testing (default: process.platform) */\n platform?: NodeJS.Platform;\n /** Custom exec function for testing */\n execSyncFn?: typeof execSync;\n}\n\n/**\n * Monitored session entry\n */\ninterface MonitoredSession {\n /** Process ID to monitor */\n pid: number;\n /** Callback to invoke when process terminates */\n onTerminated: () => void;\n}\n\n// Re-export for backward compatibility\nexport { DEFAULT_POLL_INTERVAL_MS } from '../config/defaults';\n\n// Import for internal use\nimport { DEFAULT_POLL_INTERVAL_MS } from '../config/defaults';\n\n/**\n * ProcessMonitor\n *\n * Monitors external terminal processes and notifies when they terminate.\n * Uses polling with platform-specific commands to check process status.\n */\nexport class ProcessMonitor implements Disposable {\n private checkInterval?: ReturnType<typeof setInterval>;\n private monitoredSessions: Map<string, MonitoredSession> = new Map();\n private readonly pollIntervalMs: number;\n private readonly platform: NodeJS.Platform;\n private readonly execSyncFn: typeof execSync;\n private isDisposed: boolean = false;\n\n constructor(options: ProcessMonitorOptions = {}) {\n this.pollIntervalMs = options.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;\n this.platform = options.platform ?? process.platform;\n this.execSyncFn = options.execSyncFn ?? execSync;\n }\n\n /**\n * Check if a process is currently running\n *\n * @param pid Process ID to check\n * @returns ProcessCheckResult indicating if the process is running\n */\n isProcessRunning(pid: number): ProcessCheckResult {\n if (pid <= 0) {\n return { isRunning: false, error: 'Invalid PID' };\n }\n\n try {\n if (this.platform === 'win32') {\n return this.checkWindowsProcess(pid);\n } else {\n return this.checkUnixProcess(pid);\n }\n } catch (error) {\n return {\n isRunning: false,\n error: error instanceof Error ? error.message : String(error)\n };\n }\n }\n\n /**\n * Check if a process is running on Windows\n */\n private checkWindowsProcess(pid: number): ProcessCheckResult {\n try {\n // tasklist returns exit code 0 if process found, non-zero if not found\n // We use /FI to filter by PID and /NH to skip headers\n const output = this.execSyncFn(`tasklist /FI \"PID eq ${pid}\" /NH`, {\n encoding: 'utf8',\n windowsHide: true,\n timeout: 5000\n });\n\n // If the output contains the PID, the process is running\n // tasklist returns \"INFO: No tasks are running which match the specified criteria.\"\n // when no process is found\n const isRunning = !output.includes('No tasks are running') &&\n output.includes(String(pid));\n\n return { isRunning };\n } catch (error) {\n // If tasklist fails, assume process is not running\n return { isRunning: false };\n }\n }\n\n /**\n * Check if a process is running on Unix (macOS/Linux)\n */\n private checkUnixProcess(pid: number): ProcessCheckResult {\n try {\n // ps -p returns exit code 0 if process exists, 1 if not\n this.execSyncFn(`ps -p ${pid}`, {\n encoding: 'utf8',\n timeout: 5000,\n stdio: ['pipe', 'pipe', 'pipe']\n });\n\n // If we get here without error, the process is running\n return { isRunning: true };\n } catch {\n // ps -p returns non-zero exit code if process doesn't exist\n return { isRunning: false };\n }\n }\n\n /**\n * Start monitoring a session's process\n *\n * @param sessionId Unique identifier for the session\n * @param pid Process ID to monitor\n * @param onTerminated Callback to invoke when the process terminates\n */\n startMonitoring(sessionId: string, pid: number, onTerminated: () => void): void {\n if (this.isDisposed) {\n return;\n }\n\n if (pid <= 0) {\n // Invalid PID, don't monitor\n return;\n }\n\n // Store the session for monitoring\n this.monitoredSessions.set(sessionId, { pid, onTerminated });\n\n // Start the polling interval if not already running\n if (!this.checkInterval && this.monitoredSessions.size > 0) {\n this.startPolling();\n }\n }\n\n /**\n * Stop monitoring a session's process\n *\n * @param sessionId Session ID to stop monitoring\n */\n stopMonitoring(sessionId: string): void {\n this.monitoredSessions.delete(sessionId);\n\n // Stop polling if no more sessions to monitor\n if (this.monitoredSessions.size === 0 && this.checkInterval) {\n this.stopPolling();\n }\n }\n\n /**\n * Get the number of sessions currently being monitored\n */\n getMonitoredSessionCount(): number {\n return this.monitoredSessions.size;\n }\n\n /**\n * Check if a specific session is being monitored\n */\n isMonitoring(sessionId: string): boolean {\n return this.monitoredSessions.has(sessionId);\n }\n\n /**\n * Start the polling interval\n */\n private startPolling(): void {\n if (this.checkInterval || this.isDisposed) {\n return;\n }\n\n this.checkInterval = setInterval(() => {\n this.checkAllProcesses();\n }, this.pollIntervalMs);\n }\n\n /**\n * Stop the polling interval\n */\n private stopPolling(): void {\n if (this.checkInterval) {\n clearInterval(this.checkInterval);\n this.checkInterval = undefined;\n }\n }\n\n /**\n * Check all monitored processes and invoke callbacks for terminated ones\n */\n private checkAllProcesses(): void {\n const terminatedSessions: string[] = [];\n\n for (const [sessionId, { pid, onTerminated }] of this.monitoredSessions.entries()) {\n const result = this.isProcessRunning(pid);\n\n if (!result.isRunning) {\n terminatedSessions.push(sessionId);\n\n // Invoke the callback\n try {\n onTerminated();\n } catch (error) {\n // Log but don't throw - we want to continue checking other processes\n getLogger().error(LogCategory.UTILS, `Error in termination callback for session ${sessionId}`, error instanceof Error ? error : undefined);\n }\n }\n }\n\n // Remove terminated sessions from monitoring\n for (const sessionId of terminatedSessions) {\n this.monitoredSessions.delete(sessionId);\n }\n\n // Stop polling if no more sessions to monitor\n if (this.monitoredSessions.size === 0 && this.checkInterval) {\n this.stopPolling();\n }\n }\n\n /**\n * Force an immediate check of all processes (useful for testing)\n */\n checkNow(): void {\n this.checkAllProcesses();\n }\n\n /**\n * Dispose of resources\n */\n dispose(): void {\n this.isDisposed = true;\n this.stopPolling();\n this.monitoredSessions.clear();\n }\n}\n\n/**\n * Singleton instance for convenience\n */\nlet defaultMonitor: ProcessMonitor | undefined;\n\n/**\n * Get the default ProcessMonitor instance\n */\nexport function getProcessMonitor(): ProcessMonitor {\n if (!defaultMonitor) {\n defaultMonitor = new ProcessMonitor();\n }\n return defaultMonitor;\n}\n\n/**\n * Reset the default monitor (useful for testing)\n */\nexport function resetProcessMonitor(): void {\n if (defaultMonitor) {\n defaultMonitor.dispose();\n defaultMonitor = undefined;\n }\n}\n", "/**\n * Template Engine\n *\n * Shared template variable substitution logic for prompts across the codebase.\n * Provides a centralized implementation of {{variable}} placeholder replacement.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { PipelineCoreError, ErrorCode } from '../errors';\n\n/**\n * Regular expression to match {{variable}} placeholders.\n * Shared across all template substitution implementations.\n */\nexport const TEMPLATE_VARIABLE_REGEX = /\\{\\{(\\w+)\\}\\}/g;\n\n/**\n * Special template variables that are automatically provided by the system\n * and should not be validated against item fields.\n *\n * - ITEMS: JSON array of all input items (available in map phase)\n * - RESULTS: JSON array of map results (available in reduce phase)\n * - RESULTS_FILE: Path to temp file with results (available in reduce phase)\n * - COUNT: Total count of items/results\n * - SUCCESS_COUNT: Count of successful items\n * - FAILURE_COUNT: Count of failed items\n */\nexport const SPECIAL_VARIABLES = new Set([\n 'ITEMS',\n 'RESULTS',\n 'RESULTS_FILE',\n 'COUNT',\n 'SUCCESS_COUNT',\n 'FAILURE_COUNT'\n]);\n\n/**\n * Options for template variable substitution\n */\nexport interface SubstituteVariablesOptions {\n /**\n * If true, throws on missing variables; if false, behavior depends on missingValueBehavior.\n * @default false\n */\n strict?: boolean;\n\n /**\n * Behavior when a variable is missing and strict mode is false.\n * - 'empty': Replace with empty string (default, backward compatible)\n * - 'preserve': Leave the {{variable}} placeholder as-is\n */\n missingValueBehavior?: 'empty' | 'preserve';\n\n /**\n * If true, special system variables (ITEMS, RESULTS, etc.) are always preserved\n * even in strict mode.\n * @default true\n */\n preserveSpecialVariables?: boolean;\n}\n\n/**\n * Error thrown when a template variable is missing in strict mode\n */\nexport class TemplateVariableError extends PipelineCoreError {\n /** Name of the missing variable */\n readonly variableName?: string;\n\n constructor(\n message: string,\n variableName?: string\n ) {\n super(message, {\n code: ErrorCode.MISSING_VARIABLE,\n meta: variableName ? { variableName } : undefined,\n });\n this.name = 'TemplateVariableError';\n this.variableName = variableName;\n }\n}\n\n/**\n * Substitute template variables in a string with values from a variables object.\n *\n * @param template Template string with {{variable}} placeholders\n * @param variables Object containing variable values (key-value pairs)\n * @param options Substitution options\n * @returns String with variables substituted\n * @throws TemplateVariableError if strict mode is enabled and a variable is missing\n *\n * @example\n * ```typescript\n * // Basic usage\n * substituteVariables('Hello {{name}}!', { name: 'World' });\n * // Returns: 'Hello World!'\n *\n * // Strict mode\n * substituteVariables('Hello {{name}}!', {}, { strict: true });\n * // Throws: TemplateVariableError\n *\n * // Preserve missing variables\n * substituteVariables('Hello {{name}}!', {}, { missingValueBehavior: 'preserve' });\n * // Returns: 'Hello {{name}}!'\n * ```\n */\nexport function substituteVariables(\n template: string,\n variables: Record<string, unknown>,\n options: SubstituteVariablesOptions = {}\n): string {\n const {\n strict = false,\n missingValueBehavior = 'empty',\n preserveSpecialVariables = true\n } = options;\n\n // Need to reset lastIndex since we're using a global regex\n const regex = new RegExp(TEMPLATE_VARIABLE_REGEX.source, 'g');\n\n return template.replace(regex, (match, variableName: string) => {\n // Handle special system variables - always preserve them\n if (preserveSpecialVariables && SPECIAL_VARIABLES.has(variableName)) {\n return match;\n }\n\n // Check if variable exists\n if (variableName in variables) {\n const value = variables[variableName];\n // Handle null/undefined explicitly\n if (value === null || value === undefined) {\n return '';\n }\n // Handle objects by JSON stringifying them\n if (typeof value === 'object') {\n return JSON.stringify(value);\n }\n return String(value);\n }\n\n // Variable not found\n if (strict) {\n throw new TemplateVariableError(\n `Missing variable \"${variableName}\" in template`,\n variableName\n );\n }\n\n // Non-strict: behavior depends on option\n if (missingValueBehavior === 'preserve') {\n return match;\n }\n return ''; // 'empty' is the default\n });\n}\n\n/**\n * Extract all variable names from a template string.\n *\n * @param template Template string to analyze\n * @param excludeSpecial If true, excludes special system variables (ITEMS, RESULTS, etc.)\n * @returns Array of unique variable names found in the template\n *\n * @example\n * ```typescript\n * extractVariables('Hello {{name}}, you have {{count}} messages');\n * // Returns: ['name', 'count']\n *\n * extractVariables('Items: {{ITEMS}}, Name: {{name}}', true);\n * // Returns: ['name'] (ITEMS is excluded)\n *\n * extractVariables('Items: {{ITEMS}}, Name: {{name}}', false);\n * // Returns: ['ITEMS', 'name']\n * ```\n */\nexport function extractVariables(template: string, excludeSpecial: boolean = true): string[] {\n const variables = new Set<string>();\n const regex = new RegExp(TEMPLATE_VARIABLE_REGEX.source, 'g');\n const matches = template.matchAll(regex);\n\n for (const match of matches) {\n const varName = match[1];\n // Optionally exclude special system-provided variables\n if (!excludeSpecial || !SPECIAL_VARIABLES.has(varName)) {\n variables.add(varName);\n }\n }\n\n return Array.from(variables);\n}\n\n/**\n * Check if a template contains any variables.\n *\n * @param template Template string to check\n * @returns True if the template contains at least one {{variable}} placeholder\n */\nexport function hasVariables(template: string): boolean {\n return TEMPLATE_VARIABLE_REGEX.test(template);\n}\n\n/**\n * Check if a template contains any of the specified variables.\n *\n * @param template Template string to check\n * @param variableNames Variable names to look for\n * @returns True if the template contains any of the specified variables\n */\nexport function containsVariables(template: string, variableNames: string[]): boolean {\n const found = extractVariables(template, false);\n return variableNames.some(name => found.includes(name));\n}\n\n/**\n * Validate that all required variables are present in a variables object.\n *\n * @param template Template string\n * @param variables Variables object to validate\n * @returns Object with validation result and missing variables\n */\nexport function validateVariables(\n template: string,\n variables: Record<string, unknown>\n): { valid: boolean; missingVariables: string[] } {\n const requiredVariables = extractVariables(template, true); // Exclude special vars\n const missingVariables = requiredVariables.filter(v => !(v in variables));\n\n return {\n valid: missingVariables.length === 0,\n missingVariables\n };\n}\n", "/**\n * Utils Module - Public API\n * \n * Exports all utility functions for file operations, HTTP requests,\n * text matching, and AI response parsing.\n */\n\n// File utilities\nexport {\n FileOperationResult,\n ReadFileOptions,\n WriteFileOptions,\n YAMLOptions,\n safeExists,\n safeIsDirectory,\n safeIsFile,\n safeReadFile,\n safeWriteFile,\n ensureDirectoryExists,\n safeReadDir,\n safeStats,\n readYAML,\n writeYAML,\n safeCopyFile,\n safeRename,\n safeRemove,\n getFileErrorMessage\n} from './file-utils';\n\n// Glob utilities\nexport {\n glob,\n getFilesWithExtension\n} from './glob-utils';\n\n// Exec utilities\nexport {\n execAsync\n} from './exec-utils';\n\n// HTTP utilities\nexport {\n HttpResponse,\n httpGet,\n httpDownload,\n httpGetJson\n} from './http-utils';\n\n// Text matching utilities\nexport {\n AnchorMatchConfig,\n DEFAULT_ANCHOR_MATCH_CONFIG,\n BaseMatchAnchor,\n hashText,\n levenshteinDistance,\n calculateSimilarity,\n normalizeText,\n splitIntoLines,\n getCharOffset,\n offsetToLineColumn,\n findAllOccurrences,\n scoreMatch,\n findFuzzyMatch,\n extractContext\n} from './text-matching';\n\n// AI response parser\nexport {\n extractJSON,\n parseAIResponse\n} from './ai-response-parser';\n\n// Terminal types\nexport {\n TerminalType,\n InteractiveSessionStatus,\n InteractiveSession,\n ExternalTerminalLaunchOptions,\n ExternalTerminalLaunchResult,\n WindowFocusResult\n} from './terminal-types';\n\n// Window focus service\nexport {\n WindowFocusService,\n getWindowFocusService,\n resetWindowFocusService\n} from './window-focus-service';\n\n// External terminal launcher\nexport {\n ExternalTerminalLauncher,\n getExternalTerminalLauncher,\n resetExternalTerminalLauncher\n} from './external-terminal-launcher';\n\n// Process monitor\nexport {\n Disposable,\n ProcessCheckResult,\n ProcessMonitorOptions,\n ProcessMonitor,\n getProcessMonitor,\n resetProcessMonitor,\n DEFAULT_POLL_INTERVAL_MS\n} from './process-monitor';\n\n// Template engine\nexport {\n TEMPLATE_VARIABLE_REGEX,\n SPECIAL_VARIABLES,\n SubstituteVariablesOptions,\n TemplateVariableError,\n substituteVariables,\n extractVariables as extractTemplateVariables,\n hasVariables,\n containsVariables,\n validateVariables\n} from './template-engine';\n", "/**\n * Config Module - Public API\n *\n * Exports all configuration constants and defaults.\n */\n\n// Defaults\nexport {\n // Timeouts\n DEFAULT_AI_TIMEOUT_MS,\n DEFAULT_POLL_INTERVAL_MS,\n // Concurrency\n DEFAULT_PARALLEL_LIMIT,\n DEFAULT_MAX_CONCURRENCY,\n // Session Pool\n DEFAULT_MAX_SESSIONS,\n DEFAULT_IDLE_TIMEOUT_MS,\n DEFAULT_MIN_SESSIONS,\n DEFAULT_CLEANUP_INTERVAL_MS,\n DEFAULT_ACQUIRE_TIMEOUT_MS,\n // Chunk Splitter\n DEFAULT_CHUNK_MAX_SIZE,\n DEFAULT_CHUNK_OVERLAP_SIZE,\n DEFAULT_CHUNK_STRATEGY,\n DEFAULT_CHUNK_PRESERVE_BOUNDARIES,\n // CSV Reader\n DEFAULT_CSV_DELIMITER,\n DEFAULT_CSV_QUOTE,\n DEFAULT_CSV_HAS_HEADER,\n DEFAULT_CSV_SKIP_EMPTY_LINES,\n DEFAULT_CSV_TRIM_FIELDS,\n // Queue Executor\n DEFAULT_RETRY_ATTEMPTS,\n DEFAULT_RETRY_DELAY_MS,\n DEFAULT_QUEUE_MAX_CONCURRENT,\n DEFAULT_QUEUE_PROCESS_ON_STARTUP,\n DEFAULT_QUEUE_AUTO_START,\n DEFAULT_QUEUE_AUTO_PERSIST,\n // Skills\n DEFAULT_SKILLS_DIRECTORY,\n // Text Matching\n DEFAULT_FUZZY_MATCH_THRESHOLD,\n DEFAULT_CONTEXT_LINES,\n DEFAULT_CASE_SENSITIVE\n} from './defaults';\n", "/**\n * Central AI Model Registry\n *\n * Single source of truth for all AI model definitions used across the codebase.\n * When adding, updating, or removing a model, only this file needs to change\n * (plus `package.json` enum for VS Code settings UI).\n *\n * Design:\n * - `MODEL_REGISTRY` is the authoritative list of supported models.\n * - `VALID_MODELS` and `AIModel` are derived from the registry.\n * - Helper functions provide display labels, descriptions, and lookups.\n * - The first model in the registry is considered the default/recommended model.\n */\n\n// ============================================================================\n// Model Definition Interface\n// ============================================================================\n\n/**\n * Complete definition of an AI model.\n */\nexport interface ModelDefinition {\n /** Unique model identifier sent to the API (e.g., 'claude-sonnet-4.5') */\n readonly id: string;\n /** Human-readable display label (e.g., 'Claude Sonnet 4.5') */\n readonly label: string;\n /** Short description for UI display (e.g., '(Recommended)') */\n readonly description: string;\n /** Performance/cost tier */\n readonly tier: 'fast' | 'standard' | 'premium';\n /** Whether the model is deprecated but kept for backward compatibility */\n readonly deprecated?: boolean;\n}\n\n// ============================================================================\n// Model Registry (Source of Truth)\n// ============================================================================\n\n/**\n * The authoritative list of all supported AI models.\n * Order matters: the first entry is the default/recommended model.\n *\n * To add a new model:\n * 1. Add an entry here\n * 2. Update the `package.json` enum (for VS Code settings UI)\n * 3. All types, helpers, and tests will automatically pick it up\n */\nconst MODEL_DEFINITIONS: readonly ModelDefinition[] = [\n {\n id: 'claude-sonnet-4.5',\n label: 'Claude Sonnet 4.5',\n description: '(Recommended)',\n tier: 'standard',\n },\n {\n id: 'claude-haiku-4.5',\n label: 'Claude Haiku 4.5',\n description: '(Fast)',\n tier: 'fast',\n },\n {\n id: 'claude-opus-4.6',\n label: 'Claude Opus 4.6',\n description: '(Premium)',\n tier: 'premium',\n },\n {\n id: 'gpt-5.2',\n label: 'GPT-5.2',\n description: '',\n tier: 'standard',\n },\n {\n id: 'gpt-5.1-codex-max',\n label: 'GPT-5.1 Codex Max',\n description: '',\n tier: 'premium',\n },\n {\n id: 'gemini-3-pro-preview',\n label: 'Gemini 3 Pro',\n description: '(Preview)',\n tier: 'standard',\n },\n] as const;\n\n/**\n * The model registry indexed by model ID for fast lookups.\n */\nexport const MODEL_REGISTRY: ReadonlyMap<string, ModelDefinition> = new Map(\n MODEL_DEFINITIONS.map(m => [m.id, m])\n);\n\n// ============================================================================\n// Derived Constants (used across the codebase)\n// ============================================================================\n\n/**\n * All valid model IDs as a tuple. Derived from MODEL_REGISTRY.\n * This replaces the previously hand-maintained VALID_MODELS array.\n */\nexport const VALID_MODELS = MODEL_DEFINITIONS.map(m => m.id) as unknown as readonly [\n 'claude-sonnet-4.5',\n 'claude-haiku-4.5',\n 'claude-opus-4.6',\n 'gpt-5.2',\n 'gpt-5.1-codex-max',\n 'gemini-3-pro-preview',\n];\n\n/**\n * Union type of all valid model IDs.\n */\nexport type AIModel = typeof VALID_MODELS[number];\n\n/**\n * The default/recommended model ID (first entry in registry).\n */\nexport const DEFAULT_MODEL_ID: AIModel = MODEL_DEFINITIONS[0].id as AIModel;\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\n/**\n * Get the display label for a model ID.\n * @returns The label, or the raw ID if not found in registry\n */\nexport function getModelLabel(modelId: string): string {\n return MODEL_REGISTRY.get(modelId)?.label ?? modelId;\n}\n\n/**\n * Get the description for a model ID.\n * @returns The description, or empty string if not found\n */\nexport function getModelDescription(modelId: string): string {\n return MODEL_REGISTRY.get(modelId)?.description ?? '';\n}\n\n/**\n * Get the full model definition for a model ID.\n * @returns The definition, or undefined if not found\n */\nexport function getModelDefinition(modelId: string): ModelDefinition | undefined {\n return MODEL_REGISTRY.get(modelId);\n}\n\n/**\n * Get all model definitions (ordered).\n */\nexport function getAllModels(): readonly ModelDefinition[] {\n return MODEL_DEFINITIONS;\n}\n\n/**\n * Get all active (non-deprecated) model definitions.\n */\nexport function getActiveModels(): readonly ModelDefinition[] {\n return MODEL_DEFINITIONS.filter(m => !m.deprecated);\n}\n\n/**\n * Check if a string is a valid model ID.\n */\nexport function isValidModelId(id: string): id is AIModel {\n return MODEL_REGISTRY.has(id);\n}\n\n/**\n * Get model count.\n */\nexport function getModelCount(): number {\n return MODEL_DEFINITIONS.length;\n}\n\n/**\n * Get models filtered by tier.\n */\nexport function getModelsByTier(tier: ModelDefinition['tier']): readonly ModelDefinition[] {\n return MODEL_DEFINITIONS.filter(m => m.tier === tier);\n}\n", "/**\n * AI Service Types (Pure Node.js)\n * \n * Core types for AI service operations. These types are VS Code-free\n * and can be used in CLI tools, tests, and other Node.js environments.\n */\n\n/**\n * Supported AI backends for invocation.\n * - 'copilot-sdk': Use the @github/copilot-sdk for structured JSON-RPC communication\n * - 'copilot-cli': Use the copilot CLI via child process (legacy)\n * - 'clipboard': Copy prompt to clipboard for manual use\n */\nexport type AIBackendType = 'copilot-sdk' | 'copilot-cli' | 'clipboard';\n\n/**\n * Valid AI model options for Copilot CLI.\n * Derived from the central model registry (model-registry.ts).\n */\nexport { VALID_MODELS, AIModel, DEFAULT_MODEL_ID } from './model-registry';\n\n// Re-export model registry helpers and types for convenience\nexport {\n ModelDefinition,\n MODEL_REGISTRY,\n getModelLabel,\n getModelDescription,\n getModelDefinition,\n getAllModels,\n getActiveModels,\n isValidModelId,\n getModelCount,\n getModelsByTier\n} from './model-registry';\n\n/**\n * Result of an AI invocation\n */\nexport interface AIInvocationResult {\n /** Whether the invocation was successful */\n success: boolean;\n /** The response text from the AI (if successful) */\n response?: string;\n /** Error message (if failed) */\n error?: string;\n}\n\n/**\n * Default prompt templates for different instruction types\n */\nexport const DEFAULT_PROMPTS = {\n clarify: `Please clarify the following snippet with more depth.\n\n- Explain what it does in plain language.\n- Walk through the key steps, including control flow and data flow.\n- State any assumptions you are making from limited context.\n- Call out ambiguities and ask up to 3 targeted questions.\n- Suggest 2 to 3 concrete next checks, such as what to inspect or test next.\n\nSnippet`,\n goDeeper: `Please provide an in-depth explanation and analysis of the following snippet.\n\nGo beyond a summary and explore the surrounding implications.\n\n- Intent and responsibilities in the broader system.\n- Step-by-step control flow and data flow.\n- Edge cases and failure modes, including correctness, security, and performance.\n- Likely dependencies and impacts, and what else to inspect.\n- Concrete improvements or refactors with tradeoffs.\n- How to validate, including focused tests, repro steps, or logs.\n\nSnippet`,\n customDefault: 'Please explain the following snippet'\n} as const;\n\n/**\n * Supported CLI tools for interactive sessions\n */\nexport type InteractiveToolType = 'copilot' | 'claude';\n", "/**\n * AI Command Types\n *\n * Type definitions for configurable AI commands.\n * These are pure types with no VS Code dependencies.\n */\n\nimport { DEFAULT_PROMPTS } from './types';\n\n/**\n * Mode for AI command execution\n * - 'comment': AI response is added as a comment in the document (default)\n * - 'interactive': Opens an interactive AI session in external terminal\n * - 'background': Runs in background via SDK, tracks progress in AI Processes panel\n */\nexport type AICommandMode = 'comment' | 'interactive' | 'background';\n\n/**\n * An AI command that can be invoked from the review editor\n */\nexport interface AICommand {\n /** Unique identifier for the command */\n id: string;\n\n /** Display label in menus */\n label: string;\n\n /** Emoji or codicon icon (optional) */\n icon?: string;\n\n /**\n * Prompt template. Supports variables:\n * - {{selection}} - The selected text\n * - {{file}} - The file path\n * - {{heading}} - The nearest heading above selection\n * - {{context}} - Surrounding content\n */\n prompt: string;\n\n /** Display order in menus (lower = first) */\n order?: number;\n\n /** If true, shows input dialog for custom prompt */\n isCustomInput?: boolean;\n\n /** Label prefix for AI response comments */\n responseLabel?: string;\n\n /** Comment type for styling differentiation */\n commentType?: 'ai-clarification' | 'ai-critique' | 'ai-suggestion' | 'ai-question';\n}\n\n/**\n * Configuration structure for AI commands in settings\n */\nexport interface AICommandsConfig {\n commands: AICommand[];\n}\n\n/**\n * Default AI commands when none are configured\n */\nexport const DEFAULT_AI_COMMANDS: AICommand[] = [\n {\n id: 'clarify',\n label: 'Clarify',\n icon: '\uD83D\uDCA1',\n prompt: DEFAULT_PROMPTS.clarify,\n order: 1,\n commentType: 'ai-clarification',\n responseLabel: '\uD83E\uDD16 **AI Clarification:**'\n },\n {\n id: 'go-deeper',\n label: 'Go Deeper',\n icon: '\uD83D\uDD0D',\n prompt: DEFAULT_PROMPTS.goDeeper,\n order: 2,\n commentType: 'ai-clarification',\n responseLabel: '\uD83D\uDD0D **AI Deep Analysis:**'\n },\n {\n id: 'custom',\n label: 'Custom...',\n icon: '\uD83D\uDCAC',\n prompt: DEFAULT_PROMPTS.customDefault,\n order: 99,\n isCustomInput: true,\n responseLabel: '\uD83E\uDD16 **AI Response:**'\n }\n];\n\n/**\n * Serialized format of AI command for sending to webview\n */\nexport interface SerializedAICommand {\n id: string;\n label: string;\n icon?: string;\n order?: number;\n isCustomInput?: boolean;\n /** Prompt text shown in hover preview tooltip */\n prompt?: string;\n}\n\n/**\n * Serialized format of AI menu configuration for webview\n * Contains both comment and interactive mode commands\n */\nexport interface SerializedAIMenuConfig {\n /** Commands for \"Ask AI to Comment\" menu */\n commentCommands: SerializedAICommand[];\n /** Commands for \"Ask AI Interactively\" menu */\n interactiveCommands: SerializedAICommand[];\n}\n\n/**\n * Convert AICommand to serialized format for webview\n */\nexport function serializeCommand(command: AICommand): SerializedAICommand {\n return {\n id: command.id,\n label: command.label,\n icon: command.icon,\n order: command.order,\n isCustomInput: command.isCustomInput,\n prompt: command.prompt\n };\n}\n\n/**\n * Convert array of AICommands to serialized format\n */\nexport function serializeCommands(commands: AICommand[]): SerializedAICommand[] {\n return commands.map(serializeCommand);\n}\n", "/**\n * Prompt Builder (Pure Implementation)\n *\n * Pure template variable substitution for AI prompts.\n * No VS Code dependencies - can be used in CLI tools and other environments.\n */\n\nimport { substituteVariables } from '../utils/template-engine';\n\n/**\n * Context for building prompts\n */\nexport interface PromptContext {\n /** The selected text to process */\n selectedText: string;\n /** File path being reviewed */\n filePath: string;\n /** Surrounding content for context */\n surroundingContent?: string;\n /** Nearest heading above selection */\n nearestHeading?: string | null;\n /** All document headings */\n headings?: string[];\n}\n\n/**\n * Prompt-specific variable names used in AI prompts.\n * These map to PromptContext fields.\n */\nconst PROMPT_VARIABLE_NAMES = ['selection', 'file', 'heading', 'context', 'headings'] as const;\n\n/**\n * Substitute template variables in a prompt\n *\n * Template variables supported:\n * - {{selection}} - The selected text\n * - {{file}} - The file path\n * - {{heading}} - The nearest heading above selection\n * - {{context}} - Surrounding content\n * - {{headings}} - All document headings (comma-separated)\n *\n * @param template - The prompt template with variables\n * @param context - The context for variable substitution\n * @returns The prompt with variables substituted\n */\nexport function substitutePromptVariables(template: string, context: PromptContext): string {\n // Build variables object from context\n const variables: Record<string, string> = {\n selection: context.selectedText,\n file: context.filePath,\n heading: context.nearestHeading ?? '',\n context: context.surroundingContent ?? '',\n headings: context.headings?.join(', ') ?? ''\n };\n\n return substituteVariables(template, variables, {\n strict: false,\n missingValueBehavior: 'empty',\n preserveSpecialVariables: false\n });\n}\n\n/**\n * Build a prompt from a template and context\n *\n * If the prompt contains template variables, they are substituted.\n * Otherwise, a simple format is used: \"{prompt} \"{selection}\" in the file {file}\"\n *\n * @param promptTemplate - The prompt template\n * @param context - The context for variable substitution\n * @param isCustomInstruction - Whether this is a custom instruction (affects simple format)\n * @returns The built prompt string\n */\nexport function buildPromptFromContext(\n promptTemplate: string,\n context: PromptContext,\n isCustomInstruction: boolean = false\n): string {\n // Apply template variable substitutions\n let prompt = substitutePromptVariables(promptTemplate, context);\n\n // Append the selected text and file path if not using template variables\n // This maintains backward compatibility with simple prompts\n if (!prompt.includes('{{')) {\n // Simple prompt format: \"{prompt} \"{selection}\" in the file {file}\"\n if (isCustomInstruction) {\n return `${prompt}: \"${context.selectedText}\" in the file ${context.filePath}`;\n }\n return `${prompt} \"${context.selectedText}\" in the file ${context.filePath}`;\n }\n\n return prompt;\n}\n\n/**\n * Check if a prompt template uses template variables\n */\nexport function usesTemplateVariables(template: string): boolean {\n const pattern = new RegExp(`\\\\{\\\\{(${PROMPT_VARIABLE_NAMES.join('|')})\\\\}\\\\}`);\n return pattern.test(template);\n}\n\n/**\n * Get available template variables\n */\nexport function getAvailableVariables(): { name: string; description: string }[] {\n return [\n { name: '{{selection}}', description: 'The selected text' },\n { name: '{{file}}', description: 'The file path being reviewed' },\n { name: '{{heading}}', description: 'The nearest heading above the selection' },\n { name: '{{context}}', description: 'Surrounding content for context' },\n { name: '{{headings}}', description: 'All document headings (comma-separated)' }\n ];\n}\n", "/**\n * Program Utilities\n *\n * Pure Node.js utilities for checking program availability and parsing CLI output.\n * No VS Code dependencies.\n */\n\nimport { execSync } from 'child_process';\nimport { getLogger } from '../logger';\n\n/** Cache for program existence checks to avoid repeated lookups */\nconst programExistsCache = new Map<string, { exists: boolean; path?: string; error?: string }>();\n\n/**\n * Check if a program/command exists in the system PATH.\n * Results are cached to avoid repeated lookups.\n * \n * Platform-specific implementation:\n * - Windows: Uses `where` command\n * - Unix/macOS: Uses `which` command\n * \n * @param programName - The name of the program to check (e.g., 'copilot', 'git')\n * @param platform - Optional platform override for testing (defaults to process.platform)\n * @returns Object with exists boolean and optional path where program was found\n */\nexport function checkProgramExists(\n programName: string,\n platform?: NodeJS.Platform\n): { exists: boolean; path?: string; error?: string } {\n // Create cache key that includes platform to handle cross-platform testing\n const cacheKey = `${programName}:${platform ?? process.platform}`;\n\n // Return cached result if available\n const cached = programExistsCache.get(cacheKey);\n if (cached !== undefined) {\n return cached;\n }\n\n const isWindows = (platform ?? process.platform) === 'win32';\n const checkCommand = isWindows ? `where ${programName}` : `which ${programName}`;\n\n let result: { exists: boolean; path?: string; error?: string };\n\n const logger = getLogger();\n \n try {\n const output = execSync(checkCommand, {\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n timeout: 5000 // 5 second timeout for the check\n });\n\n // Parse the result - get the first line (path to the program)\n const programPath = output.trim().split('\\n')[0].trim();\n\n result = {\n exists: true,\n path: programPath\n };\n \n logger.debug('ProgramCheck', `Program '${programName}' found at: ${programPath}`);\n } catch (error) {\n // Command failed - program not found\n const errorMsg = `'${programName}' is not installed or not found in PATH. Please install it first.`;\n result = {\n exists: false,\n error: errorMsg\n };\n \n logger.debug('ProgramCheck', `Program '${programName}' not found: ${errorMsg}`);\n }\n\n // Cache the result\n programExistsCache.set(cacheKey, result);\n return result;\n}\n\n/**\n * Clear the program existence cache.\n * Useful for testing or when the user installs a program and wants to retry.\n * \n * @param programName - Optional program name to clear. If not provided, clears entire cache.\n */\nexport function clearProgramExistsCache(programName?: string): void {\n if (programName) {\n // Clear all entries for this program (all platforms)\n for (const key of programExistsCache.keys()) {\n if (key.startsWith(`${programName}:`)) {\n programExistsCache.delete(key);\n }\n }\n } else {\n programExistsCache.clear();\n }\n}\n\n/**\n * Parse the copilot CLI output to extract the response text.\n * Removes the status lines, tool operations, and usage statistics.\n * \n * @param output - Raw output from copilot CLI\n * @returns The extracted response text\n */\nexport function parseCopilotOutput(output: string): string {\n const lines = output.split('\\n');\n const resultLines: string[] = [];\n let inContent = false;\n\n for (const line of lines) {\n // Skip ANSI escape codes and clean the line\n const cleanLine = line.replace(/\\x1b\\[[0-9;]*m/g, '').trim();\n\n // Skip empty lines at the start\n if (!inContent && cleanLine === '') {\n continue;\n }\n\n // Skip copilot status/operation lines\n // \u2713 = success, \u2717 = failure, \u2514 = tree branch (sub-info)\n if (cleanLine.startsWith('\u2713') ||\n cleanLine.startsWith('\u2717') ||\n cleanLine.startsWith('\u2514') ||\n cleanLine.startsWith('\u251C')) {\n continue;\n }\n\n // Skip error/info messages from copilot tools\n if (cleanLine.startsWith('Invalid session') ||\n cleanLine.includes('session ID') ||\n cleanLine.startsWith('Error:') ||\n cleanLine.startsWith('Warning:')) {\n continue;\n }\n\n // Skip lines that look like tool invocations or file operations\n if (cleanLine.match(/^(Read|Glob|Search|List|Edit|Write|Delete|Run)\\s/i)) {\n continue;\n }\n\n // Stop at usage statistics\n if (cleanLine.startsWith('Total usage') ||\n cleanLine.startsWith('Total duration') ||\n cleanLine.startsWith('Total code changes') ||\n cleanLine.startsWith('Usage by model')) {\n break;\n }\n\n // Start capturing content\n inContent = true;\n resultLines.push(cleanLine);\n }\n\n // Trim trailing empty lines\n while (resultLines.length > 0 && resultLines[resultLines.length - 1] === '') {\n resultLines.pop();\n }\n\n return resultLines.join('\\n').trim();\n}\n", "/**\n * AI Process Types (Pure Node.js)\n *\n * Pure types for AI process tracking and management.\n * No VS Code dependencies - can be used in CLI tools and other environments.\n */\n\nimport { AIBackendType } from './types';\n\n/**\n * Supported AI tools for invocation\n */\nexport type AIToolType = 'copilot-cli' | 'clipboard';\n\n/**\n * Status of an AI process\n */\nexport type AIProcessStatus = 'queued' | 'running' | 'completed' | 'failed' | 'cancelled';\n\n/**\n * Type of AI process - extensible via string union\n * Core types: 'clarification' | 'discovery'\n * Feature modules can register additional types via the generic metadata system\n */\nexport type AIProcessType = 'clarification' | 'code-review' | 'discovery' | 'code-review-group' | 'pipeline-execution' | 'pipeline-item' | string;\n\n/**\n * Generic metadata interface that feature modules can extend.\n * This allows ai-service to remain decoupled from specific feature implementations.\n */\nexport interface GenericProcessMetadata {\n /** Type identifier for the metadata (matches AIProcessType) */\n type: string;\n /** Feature-specific data stored as key-value pairs */\n [key: string]: unknown;\n}\n\n/**\n * Generic group metadata interface for grouped processes.\n * Feature modules can extend this for specific group tracking needs.\n */\nexport interface GenericGroupMetadata extends GenericProcessMetadata {\n /** Child process IDs in this group */\n childProcessIds: string[];\n}\n\n/**\n * Options for registering a generic typed process\n */\nexport interface TypedProcessOptions {\n /** The process type identifier */\n type: AIProcessType;\n /** ID prefix for generated process IDs (e.g., 'review' -> 'review-1-timestamp') */\n idPrefix?: string;\n /** Feature-specific metadata */\n metadata?: GenericProcessMetadata;\n /** Parent process ID for grouped processes */\n parentProcessId?: string;\n /** Initial status for the process (default: 'running'). Use 'queued' for queue systems. */\n initialStatus?: 'queued' | 'running';\n}\n\n/**\n * Options for registering a generic process group\n */\nexport interface ProcessGroupOptions {\n /** The group type identifier */\n type: AIProcessType;\n /** ID prefix for generated group IDs */\n idPrefix?: string;\n /** Feature-specific metadata (will have childProcessIds added) */\n metadata?: Omit<GenericGroupMetadata, 'childProcessIds'>;\n}\n\n/**\n * Options for completing a process group\n */\nexport interface CompleteGroupOptions {\n /** Summary result text */\n result: string;\n /** Structured result as JSON string */\n structuredResult: string;\n /** Feature-specific execution statistics */\n executionStats?: Record<string, unknown>;\n}\n\n// ============================================================================\n// LEGACY TYPES - Kept for backward compatibility\n// These types are deprecated and will be removed in a future version.\n// Feature modules should define their own metadata types.\n// ============================================================================\n\n/**\n * @deprecated Use GenericProcessMetadata with type='code-review' instead.\n * This type is kept for backward compatibility with existing code.\n * Code review specific metadata - defined here temporarily for compatibility.\n */\nexport interface CodeReviewProcessMetadata {\n /** Type of review */\n reviewType: 'commit' | 'pending' | 'staged' | 'range';\n /** Commit SHA (for commit reviews) */\n commitSha?: string;\n /** Commit message */\n commitMessage?: string;\n /** Rules used for the review */\n rulesUsed: string[];\n /** Diff statistics */\n diffStats?: {\n files: number;\n additions: number;\n deletions: number;\n };\n}\n\n/**\n * Discovery process specific metadata\n */\nexport interface DiscoveryProcessMetadata {\n /** Feature description being searched */\n featureDescription: string;\n /** Keywords used in the search */\n keywords?: string[];\n /** Target group path (if scoped to a group) */\n targetGroupPath?: string;\n /** Search scope settings */\n scope?: {\n includeSourceFiles: boolean;\n includeDocs: boolean;\n includeConfigFiles: boolean;\n includeGitHistory: boolean;\n };\n /** Number of results found */\n resultCount?: number;\n}\n\n/**\n * @deprecated Use GenericGroupMetadata with type='code-review-group' instead.\n * This type is kept for backward compatibility with existing code.\n * Metadata for grouped code review processes (master process)\n */\nexport interface CodeReviewGroupMetadata {\n /** Type of review */\n reviewType: 'commit' | 'pending' | 'staged' | 'range';\n /** Commit SHA (for commit reviews) */\n commitSha?: string;\n /** Commit message */\n commitMessage?: string;\n /** All rules being reviewed */\n rulesUsed: string[];\n /** Diff statistics */\n diffStats?: {\n files: number;\n additions: number;\n deletions: number;\n };\n /** Child process IDs (individual rule reviews) */\n childProcessIds: string[];\n /** Execution statistics */\n executionStats?: {\n totalRules: number;\n successfulRules: number;\n failedRules: number;\n totalTimeMs: number;\n };\n}\n\n/**\n * A tracked AI process\n */\nexport interface AIProcess {\n /** Unique identifier */\n id: string;\n /** Type of process */\n type: AIProcessType;\n /** Preview of the prompt (first ~50 chars) */\n promptPreview: string;\n /** Full prompt text */\n fullPrompt: string;\n /** Current status */\n status: AIProcessStatus;\n /** When the process started */\n startTime: Date;\n /** When the process ended (if finished) */\n endTime?: Date;\n /** Error message if failed */\n error?: string;\n /** The AI response if completed */\n result?: string;\n /** Path to the file containing the full result */\n resultFilePath?: string;\n /** Path to the file containing raw stdout from the AI tool */\n rawStdoutFilePath?: string;\n\n // ========================================================================\n // Generic metadata (preferred for new features)\n // ========================================================================\n\n /** Generic feature-specific metadata. Feature modules should use this. */\n metadata?: GenericProcessMetadata;\n\n /** Generic group metadata for grouped processes */\n groupMetadata?: GenericGroupMetadata;\n\n // ========================================================================\n // Legacy metadata fields (kept for backward compatibility)\n // New features should use `metadata` and `groupMetadata` instead.\n // ========================================================================\n\n /** @deprecated Use metadata with type='code-review' instead */\n codeReviewMetadata?: CodeReviewProcessMetadata;\n /** Discovery specific metadata (if type is 'discovery') */\n discoveryMetadata?: DiscoveryProcessMetadata;\n /** @deprecated Use groupMetadata with type='code-review-group' instead */\n codeReviewGroupMetadata?: CodeReviewGroupMetadata;\n /** Parsed structured result (for code reviews) */\n structuredResult?: string; // JSON string of CodeReviewResult\n /** Parent process ID (for child processes in a group) */\n parentProcessId?: string;\n\n // ========================================================================\n // Session Resume Fields (Added 2026-01)\n // ========================================================================\n\n /** SDK session ID for resuming sessions (only for copilot-sdk backend) */\n sdkSessionId?: string;\n /** Backend type used for this process */\n backend?: AIBackendType;\n /** Working directory used for the original session */\n workingDirectory?: string;\n}\n\n/**\n * Serialized format of AIProcess for persistence (Date -> ISO string)\n */\nexport interface SerializedAIProcess {\n id: string;\n type?: AIProcessType;\n promptPreview: string;\n fullPrompt: string;\n status: AIProcessStatus;\n startTime: string; // ISO string\n endTime?: string; // ISO string\n error?: string;\n result?: string;\n resultFilePath?: string;\n rawStdoutFilePath?: string;\n /** Generic feature-specific metadata */\n metadata?: GenericProcessMetadata;\n /** Generic group metadata for grouped processes */\n groupMetadata?: GenericGroupMetadata;\n /** @deprecated Use metadata instead */\n codeReviewMetadata?: CodeReviewProcessMetadata;\n discoveryMetadata?: DiscoveryProcessMetadata;\n /** @deprecated Use groupMetadata instead */\n codeReviewGroupMetadata?: CodeReviewGroupMetadata;\n structuredResult?: string;\n parentProcessId?: string;\n\n // ========================================================================\n // Session Resume Fields (Added 2026-01)\n // ========================================================================\n\n /** SDK session ID for resuming sessions (only for copilot-sdk backend) */\n sdkSessionId?: string;\n /** Backend type used for this process */\n backend?: AIBackendType;\n /** Working directory used for the original session */\n workingDirectory?: string;\n}\n\n/**\n * Extended AIProcess with session resume fields (internal use)\n * These fields are tracked in-memory and persisted for session resume functionality.\n */\nexport interface TrackedProcessFields {\n /** SDK session ID for resuming sessions */\n sdkSessionId?: string;\n /** Backend type used for this process */\n backend?: AIBackendType;\n /** Working directory used for the original session */\n workingDirectory?: string;\n}\n\n/**\n * Convert AIProcess to serialized format for storage\n */\nexport function serializeProcess(process: AIProcess & Partial<TrackedProcessFields>): SerializedAIProcess {\n return {\n id: process.id,\n type: process.type,\n promptPreview: process.promptPreview,\n fullPrompt: process.fullPrompt,\n status: process.status,\n startTime: process.startTime.toISOString(),\n endTime: process.endTime?.toISOString(),\n error: process.error,\n result: process.result,\n resultFilePath: process.resultFilePath,\n rawStdoutFilePath: process.rawStdoutFilePath,\n metadata: process.metadata,\n groupMetadata: process.groupMetadata,\n codeReviewMetadata: process.codeReviewMetadata,\n discoveryMetadata: process.discoveryMetadata,\n codeReviewGroupMetadata: process.codeReviewGroupMetadata,\n structuredResult: process.structuredResult,\n parentProcessId: process.parentProcessId,\n // Session resume fields\n sdkSessionId: process.sdkSessionId,\n backend: process.backend,\n workingDirectory: process.workingDirectory\n };\n}\n\n/**\n * Convert serialized format back to AIProcess\n */\nexport function deserializeProcess(serialized: SerializedAIProcess): AIProcess {\n return {\n id: serialized.id,\n type: serialized.type || 'clarification',\n promptPreview: serialized.promptPreview,\n fullPrompt: serialized.fullPrompt,\n status: serialized.status,\n startTime: new Date(serialized.startTime),\n endTime: serialized.endTime ? new Date(serialized.endTime) : undefined,\n error: serialized.error,\n result: serialized.result,\n resultFilePath: serialized.resultFilePath,\n rawStdoutFilePath: serialized.rawStdoutFilePath,\n metadata: serialized.metadata,\n groupMetadata: serialized.groupMetadata,\n codeReviewMetadata: serialized.codeReviewMetadata,\n discoveryMetadata: serialized.discoveryMetadata,\n codeReviewGroupMetadata: serialized.codeReviewGroupMetadata,\n structuredResult: serialized.structuredResult,\n parentProcessId: serialized.parentProcessId,\n // Session resume fields\n sdkSessionId: serialized.sdkSessionId,\n backend: serialized.backend,\n workingDirectory: serialized.workingDirectory\n };\n}\n\n/**\n * Event types for process changes\n */\nexport type ProcessEventType = 'process-added' | 'process-updated' | 'process-removed' | 'processes-cleared';\n\n/**\n * Process change event\n */\nexport interface ProcessEvent {\n type: ProcessEventType;\n process?: AIProcess;\n}\n\n/**\n * Process count statistics\n */\nexport interface ProcessCounts {\n queued: number;\n running: number;\n completed: number;\n failed: number;\n cancelled: number;\n}\n", "/**\n * Session Pool for Copilot SDK\n *\n * Manages a pool of reusable Copilot SDK sessions for efficient concurrent request handling.\n * This pool provides:\n * - Session reuse to avoid creation overhead\n * - Concurrency limiting to prevent resource exhaustion\n * - Idle timeout cleanup to free unused sessions\n * - Graceful shutdown with proper session cleanup\n *\n * @see https://github.com/github/copilot-sdk\n */\n\nimport { getLogger, LogCategory } from '../logger';\n\n/**\n * Interface for a Copilot SDK session.\n * Defined here to avoid direct type dependency on the SDK.\n */\nexport interface IPoolableSession {\n /** Unique session identifier */\n sessionId: string;\n /**\n * Send a message and wait for the session to become idle.\n * @param options - Message options including prompt\n * @param timeout - Timeout in milliseconds (SDK default: 60000)\n */\n sendAndWait(options: { prompt: string }, timeout?: number): Promise<{ data?: { content?: string } }>;\n /** Destroy the session and release resources */\n destroy(): Promise<void>;\n}\n\n/**\n * Factory function type for creating new sessions.\n * This allows the pool to be decoupled from the actual SDK client.\n */\nexport type SessionFactory = () => Promise<IPoolableSession>;\n\n/**\n * Configuration options for the session pool.\n */\nexport interface SessionPoolOptions {\n /** Maximum number of sessions in the pool (default: 5) */\n maxSessions?: number;\n /** Idle timeout in milliseconds before a session is destroyed (default: 300000 = 5 minutes) */\n idleTimeoutMs?: number;\n /** Minimum number of sessions to keep in the pool even when idle (default: 0) */\n minSessions?: number;\n /** How often to check for idle sessions in milliseconds (default: 60000 = 1 minute) */\n cleanupIntervalMs?: number;\n}\n\n/**\n * Internal representation of a pooled session with metadata.\n */\ninterface PooledSession {\n /** The actual SDK session */\n session: IPoolableSession;\n /** Whether the session is currently in use */\n inUse: boolean;\n /** Timestamp when the session was last used (for idle timeout) */\n lastUsedAt: number;\n /** Timestamp when the session was created */\n createdAt: number;\n}\n\n/**\n * Statistics about the session pool.\n */\nexport interface SessionPoolStats {\n /** Total number of sessions in the pool */\n totalSessions: number;\n /** Number of sessions currently in use */\n inUseSessions: number;\n /** Number of idle sessions available */\n idleSessions: number;\n /** Number of requests waiting for a session */\n waitingRequests: number;\n /** Maximum sessions allowed */\n maxSessions: number;\n /** Idle timeout in milliseconds */\n idleTimeoutMs: number;\n}\n\n/**\n * A waiter for a session when the pool is exhausted.\n */\ninterface SessionWaiter {\n resolve: (session: IPoolableSession) => void;\n reject: (error: Error) => void;\n timeoutId?: ReturnType<typeof setTimeout>;\n}\n\n/**\n * Session pool for managing reusable Copilot SDK sessions.\n *\n * Usage:\n * ```typescript\n * const pool = new SessionPool(createSession, { maxSessions: 5 });\n *\n * // Acquire a session\n * const session = await pool.acquire();\n * try {\n * const result = await session.sendAndWait({ prompt: 'Hello' });\n * } finally {\n * // Release the session back to the pool\n * pool.release(session);\n * }\n *\n * // Clean up when done\n * await pool.dispose();\n * ```\n */\nexport class SessionPool {\n private readonly sessions: Map<string, PooledSession> = new Map();\n private readonly waiters: SessionWaiter[] = [];\n private readonly sessionFactory: SessionFactory;\n private readonly maxSessions: number;\n private readonly idleTimeoutMs: number;\n private readonly minSessions: number;\n private readonly cleanupIntervalMs: number;\n private cleanupTimer?: ReturnType<typeof setInterval>;\n private disposed = false;\n\n /** Default maximum sessions */\n public static readonly DEFAULT_MAX_SESSIONS = 5;\n /** Default idle timeout (5 minutes) */\n public static readonly DEFAULT_IDLE_TIMEOUT_MS = 300000;\n /** Default minimum sessions to keep */\n public static readonly DEFAULT_MIN_SESSIONS = 0;\n /** Default cleanup interval (1 minute) */\n public static readonly DEFAULT_CLEANUP_INTERVAL_MS = 60000;\n /** Default acquire timeout (30 seconds) */\n public static readonly DEFAULT_ACQUIRE_TIMEOUT_MS = 30000;\n\n /**\n * Create a new session pool.\n *\n * @param sessionFactory Factory function to create new sessions\n * @param options Pool configuration options\n */\n constructor(sessionFactory: SessionFactory, options: SessionPoolOptions = {}) {\n this.sessionFactory = sessionFactory;\n this.maxSessions = options.maxSessions ?? SessionPool.DEFAULT_MAX_SESSIONS;\n this.idleTimeoutMs = options.idleTimeoutMs ?? SessionPool.DEFAULT_IDLE_TIMEOUT_MS;\n this.minSessions = options.minSessions ?? SessionPool.DEFAULT_MIN_SESSIONS;\n this.cleanupIntervalMs = options.cleanupIntervalMs ?? SessionPool.DEFAULT_CLEANUP_INTERVAL_MS;\n\n // Validate options\n if (this.maxSessions < 1) {\n throw new Error('maxSessions must be at least 1');\n }\n if (this.minSessions > this.maxSessions) {\n throw new Error('minSessions cannot exceed maxSessions');\n }\n if (this.idleTimeoutMs < 0) {\n throw new Error('idleTimeoutMs cannot be negative');\n }\n\n // Start the cleanup timer\n this.startCleanupTimer();\n\n const logger = getLogger();\n logger.debug(LogCategory.AI, `SessionPool: Created with maxSessions=${this.maxSessions}, idleTimeoutMs=${this.idleTimeoutMs}`);\n }\n\n /**\n * Acquire a session from the pool.\n * If no idle session is available and the pool is not at capacity, a new session is created.\n * If the pool is at capacity, this will wait until a session becomes available.\n *\n * @param timeoutMs Maximum time to wait for a session (default: 30 seconds)\n * @returns A session from the pool\n * @throws Error if the pool is disposed or timeout is reached\n */\n public async acquire(timeoutMs: number = SessionPool.DEFAULT_ACQUIRE_TIMEOUT_MS): Promise<IPoolableSession> {\n if (this.disposed) {\n throw new Error('SessionPool has been disposed');\n }\n\n const logger = getLogger();\n logger.debug(LogCategory.AI, `SessionPool: Acquiring session (total=${this.sessions.size}, inUse=${this.getInUseCount()})`);\n\n // Try to find an idle session\n const idleSession = this.findIdleSession();\n if (idleSession) {\n idleSession.inUse = true;\n idleSession.lastUsedAt = Date.now();\n logger.debug(LogCategory.AI, `SessionPool: Reusing idle session ${idleSession.session.sessionId}`);\n return idleSession.session;\n }\n\n // If we can create a new session, do so\n if (this.sessions.size < this.maxSessions) {\n const session = await this.createAndAddSession();\n logger.debug(LogCategory.AI, `SessionPool: Created new session ${session.sessionId}`);\n return session;\n }\n\n // Otherwise, wait for a session to become available\n logger.debug(LogCategory.AI, `SessionPool: Pool at capacity, waiting for available session`);\n return this.waitForSession(timeoutMs);\n }\n\n /**\n * Release a session back to the pool.\n * The session becomes available for reuse by other requests.\n *\n * @param session The session to release\n */\n public release(session: IPoolableSession): void {\n if (this.disposed) {\n // If disposed, just destroy the session\n this.destroySession(session).catch(() => {\n // Ignore errors during dispose\n });\n return;\n }\n\n const pooledSession = this.sessions.get(session.sessionId);\n if (!pooledSession) {\n // Session not in pool, destroy it\n const logger = getLogger();\n logger.debug(LogCategory.AI, `SessionPool: Session ${session.sessionId} not in pool, destroying`);\n this.destroySession(session).catch(() => {\n // Ignore errors\n });\n return;\n }\n\n const logger = getLogger();\n\n // Check if there are waiters\n if (this.waiters.length > 0) {\n const waiter = this.waiters.shift()!;\n if (waiter.timeoutId) {\n clearTimeout(waiter.timeoutId);\n }\n pooledSession.lastUsedAt = Date.now();\n logger.debug(LogCategory.AI, `SessionPool: Passing session ${session.sessionId} to waiting request`);\n waiter.resolve(session);\n return;\n }\n\n // No waiters, mark as idle\n pooledSession.inUse = false;\n pooledSession.lastUsedAt = Date.now();\n logger.debug(LogCategory.AI, `SessionPool: Released session ${session.sessionId} back to pool`);\n }\n\n /**\n * Destroy a specific session and remove it from the pool.\n * Use this when a session is in an error state and should not be reused.\n *\n * @param session The session to destroy\n */\n public async destroy(session: IPoolableSession): Promise<void> {\n const logger = getLogger();\n logger.debug(LogCategory.AI, `SessionPool: Destroying session ${session.sessionId}`);\n\n this.sessions.delete(session.sessionId);\n await this.destroySession(session);\n }\n\n /**\n * Get statistics about the pool.\n *\n * @returns Current pool statistics\n */\n public getStats(): SessionPoolStats {\n const inUseSessions = this.getInUseCount();\n return {\n totalSessions: this.sessions.size,\n inUseSessions,\n idleSessions: this.sessions.size - inUseSessions,\n waitingRequests: this.waiters.length,\n maxSessions: this.maxSessions,\n idleTimeoutMs: this.idleTimeoutMs\n };\n }\n\n /**\n * Check if the pool has been disposed.\n */\n public isDisposed(): boolean {\n return this.disposed;\n }\n\n /**\n * Dispose of the pool and all sessions.\n * After disposal, the pool cannot be used.\n */\n public async dispose(): Promise<void> {\n if (this.disposed) {\n return;\n }\n\n const logger = getLogger();\n logger.debug(LogCategory.AI, 'SessionPool: Disposing pool');\n\n this.disposed = true;\n\n // Stop the cleanup timer\n if (this.cleanupTimer) {\n clearInterval(this.cleanupTimer);\n this.cleanupTimer = undefined;\n }\n\n // Reject all waiters\n for (const waiter of this.waiters) {\n if (waiter.timeoutId) {\n clearTimeout(waiter.timeoutId);\n }\n waiter.reject(new Error('SessionPool has been disposed'));\n }\n this.waiters.length = 0;\n\n // Destroy all sessions\n const destroyPromises: Promise<void>[] = [];\n for (const [, pooledSession] of this.sessions) {\n destroyPromises.push(this.destroySession(pooledSession.session));\n }\n this.sessions.clear();\n\n await Promise.allSettled(destroyPromises);\n logger.debug(LogCategory.AI, 'SessionPool: Disposed');\n }\n\n /**\n * Manually trigger cleanup of idle sessions.\n * This is automatically called on a timer, but can be called manually for testing.\n */\n public async cleanupIdleSessions(): Promise<number> {\n if (this.disposed) {\n return 0;\n }\n\n const logger = getLogger();\n const now = Date.now();\n const sessionsToRemove: string[] = [];\n\n // Find sessions that have been idle too long\n for (const [sessionId, pooledSession] of this.sessions) {\n if (!pooledSession.inUse) {\n const idleTime = now - pooledSession.lastUsedAt;\n if (idleTime > this.idleTimeoutMs) {\n // Keep minimum sessions\n const idleCount = this.sessions.size - this.getInUseCount();\n const currentIdleAfterRemoval = idleCount - sessionsToRemove.length - 1;\n if (currentIdleAfterRemoval >= this.minSessions) {\n sessionsToRemove.push(sessionId);\n }\n }\n }\n }\n\n // Remove and destroy idle sessions\n for (const sessionId of sessionsToRemove) {\n const pooledSession = this.sessions.get(sessionId);\n if (pooledSession) {\n this.sessions.delete(sessionId);\n await this.destroySession(pooledSession.session);\n logger.debug(LogCategory.AI, `SessionPool: Cleaned up idle session ${sessionId}`);\n }\n }\n\n if (sessionsToRemove.length > 0) {\n logger.debug(LogCategory.AI, `SessionPool: Cleaned up ${sessionsToRemove.length} idle sessions`);\n }\n\n return sessionsToRemove.length;\n }\n\n // ========================================================================\n // Private Methods\n // ========================================================================\n\n /**\n * Find an idle session in the pool.\n */\n private findIdleSession(): PooledSession | undefined {\n for (const [, pooledSession] of this.sessions) {\n if (!pooledSession.inUse) {\n return pooledSession;\n }\n }\n return undefined;\n }\n\n /**\n * Get the count of sessions currently in use.\n */\n private getInUseCount(): number {\n let count = 0;\n for (const [, pooledSession] of this.sessions) {\n if (pooledSession.inUse) {\n count++;\n }\n }\n return count;\n }\n\n /**\n * Create a new session and add it to the pool.\n */\n private async createAndAddSession(): Promise<IPoolableSession> {\n const session = await this.sessionFactory();\n const now = Date.now();\n\n this.sessions.set(session.sessionId, {\n session,\n inUse: true,\n lastUsedAt: now,\n createdAt: now\n });\n\n return session;\n }\n\n /**\n * Wait for a session to become available.\n */\n private waitForSession(timeoutMs: number): Promise<IPoolableSession> {\n return new Promise((resolve, reject) => {\n const waiter: SessionWaiter = { resolve, reject };\n\n // Set up timeout\n waiter.timeoutId = setTimeout(() => {\n const index = this.waiters.indexOf(waiter);\n if (index !== -1) {\n this.waiters.splice(index, 1);\n }\n reject(new Error(`Timeout waiting for session after ${timeoutMs}ms`));\n }, timeoutMs);\n\n this.waiters.push(waiter);\n });\n }\n\n /**\n * Destroy a session (internal helper).\n */\n private async destroySession(session: IPoolableSession): Promise<void> {\n try {\n await session.destroy();\n } catch (error) {\n const logger = getLogger();\n logger.debug(LogCategory.AI, `SessionPool: Error destroying session ${session.sessionId}: ${error}`);\n }\n }\n\n /**\n * Start the cleanup timer.\n */\n private startCleanupTimer(): void {\n if (this.cleanupIntervalMs > 0) {\n this.cleanupTimer = setInterval(() => {\n this.cleanupIdleSessions().catch(() => {\n // Ignore cleanup errors\n });\n }, this.cleanupIntervalMs);\n\n // Don't let the timer prevent Node from exiting\n if (this.cleanupTimer.unref) {\n this.cleanupTimer.unref();\n }\n }\n }\n}\n", "/**\n * MCP Config Loader\n * \n * Utility for loading MCP server configuration from the user's home directory.\n * The config file is located at ~/.copilot/mcp-config.json and follows the same\n * format used by the Copilot CLI.\n * \n * Features:\n * - Cross-platform home directory resolution\n * - Graceful handling of missing files\n * - JSON parsing with error handling\n * - Config caching to avoid repeated file reads\n */\n\nimport * as os from 'os';\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport { MCPServerConfig } from './copilot-sdk-service';\nimport { getLogger, LogCategory } from '../logger';\n\n/**\n * Structure of the MCP config file (~/.copilot/mcp-config.json)\n */\nexport interface MCPConfigFile {\n /** Map of server names to their configurations */\n mcpServers?: Record<string, MCPServerConfig>;\n}\n\n/**\n * Result of loading the MCP config\n */\nexport interface MCPConfigLoadResult {\n /** Whether the config was loaded successfully */\n success: boolean;\n /** The loaded MCP server configurations (empty object if not found or error) */\n mcpServers: Record<string, MCPServerConfig>;\n /** Path to the config file that was checked */\n configPath: string;\n /** Error message if loading failed */\n error?: string;\n /** Whether the config file exists */\n fileExists: boolean;\n}\n\n/** Default config file path relative to home directory */\nconst CONFIG_DIR = '.copilot';\nconst CONFIG_FILE = 'mcp-config.json';\n\n/** Cached config to avoid repeated file reads */\nlet cachedConfig: MCPConfigLoadResult | null = null;\n\n/** Override for home directory (used for testing) */\nlet homeDirectoryOverride: string | null = null;\n\n/**\n * Set an override for the home directory.\n * This is primarily used for testing purposes.\n * \n * @param dir - The directory to use as home, or null to use the system default\n */\nexport function setHomeDirectoryOverride(dir: string | null): void {\n homeDirectoryOverride = dir;\n // Clear cache when home directory changes\n cachedConfig = null;\n}\n\n/**\n * Get the user's home directory in a cross-platform manner.\n * If a home directory override is set (for testing), that is returned instead.\n * \n * @returns The home directory path\n */\nexport function getHomeDirectory(): string {\n // Return override if set (for testing)\n if (homeDirectoryOverride !== null) {\n return homeDirectoryOverride;\n }\n // os.homedir() handles all platforms correctly:\n // - Windows: %USERPROFILE% or %HOMEDRIVE%%HOMEPATH%\n // - macOS/Linux: $HOME or from /etc/passwd\n return os.homedir();\n}\n\n/**\n * Get the path to the MCP config file.\n * \n * @returns The full path to ~/.copilot/mcp-config.json\n */\nexport function getMcpConfigPath(): string {\n return path.join(getHomeDirectory(), CONFIG_DIR, CONFIG_FILE);\n}\n\n/**\n * Load MCP server configuration from the default config file.\n * Results are cached after the first successful load.\n * \n * @param forceReload - If true, bypass the cache and reload from disk\n * @returns The load result with MCP server configurations\n */\nexport function loadDefaultMcpConfig(forceReload = false): MCPConfigLoadResult {\n const logger = getLogger();\n const configPath = getMcpConfigPath();\n\n // Return cached config if available and not forcing reload\n if (cachedConfig && !forceReload) {\n logger.debug(LogCategory.AI, 'MCPConfigLoader: Returning cached config');\n return cachedConfig;\n }\n\n logger.debug(LogCategory.AI, `MCPConfigLoader: Loading config from ${configPath}`);\n\n // Check if file exists\n if (!fs.existsSync(configPath)) {\n logger.debug(LogCategory.AI, 'MCPConfigLoader: Config file not found (this is normal if not configured)');\n cachedConfig = {\n success: true,\n mcpServers: {},\n configPath,\n fileExists: false\n };\n return cachedConfig;\n }\n\n try {\n // Read and parse the config file\n const content = fs.readFileSync(configPath, 'utf-8');\n const config: MCPConfigFile = JSON.parse(content);\n\n // Validate the structure\n const mcpServers = config.mcpServers || {};\n \n // Log what we found\n const serverCount = Object.keys(mcpServers).length;\n logger.debug(LogCategory.AI, `MCPConfigLoader: Loaded ${serverCount} MCP server(s) from config`);\n\n cachedConfig = {\n success: true,\n mcpServers,\n configPath,\n fileExists: true\n };\n return cachedConfig;\n\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n logger.warn(LogCategory.AI, `MCPConfigLoader: Failed to parse config file: ${errorMessage}`);\n\n cachedConfig = {\n success: false,\n mcpServers: {},\n configPath,\n fileExists: true,\n error: `Failed to parse MCP config: ${errorMessage}`\n };\n return cachedConfig;\n }\n}\n\n/**\n * Load MCP config asynchronously.\n * This is a convenience wrapper for async contexts.\n * \n * @param forceReload - If true, bypass the cache and reload from disk\n * @returns Promise resolving to the load result\n */\nexport async function loadDefaultMcpConfigAsync(forceReload = false): Promise<MCPConfigLoadResult> {\n return loadDefaultMcpConfig(forceReload);\n}\n\n/**\n * Merge MCP server configurations.\n * Explicit configurations take precedence over default configurations.\n * \n * @param defaultConfig - Default MCP servers from config file\n * @param explicitConfig - Explicit MCP servers passed in options\n * @returns Merged configuration with explicit taking precedence\n */\nexport function mergeMcpConfigs(\n defaultConfig: Record<string, MCPServerConfig>,\n explicitConfig?: Record<string, MCPServerConfig>\n): Record<string, MCPServerConfig> {\n // If no explicit config, return default\n if (!explicitConfig) {\n return { ...defaultConfig };\n }\n\n // If explicit config is empty object, it means \"disable all MCP servers\"\n // This is a special case documented in the SDK\n if (Object.keys(explicitConfig).length === 0) {\n return {};\n }\n\n // Merge with explicit taking precedence\n return {\n ...defaultConfig,\n ...explicitConfig\n };\n}\n\n/**\n * Clear the cached MCP config.\n * Useful for testing or when the config file might have changed.\n */\nexport function clearMcpConfigCache(): void {\n const logger = getLogger();\n logger.debug(LogCategory.AI, 'MCPConfigLoader: Clearing config cache');\n cachedConfig = null;\n}\n\n/**\n * Check if an MCP config file exists at the default location.\n * \n * @returns True if the config file exists\n */\nexport function mcpConfigExists(): boolean {\n return fs.existsSync(getMcpConfigPath());\n}\n\n/**\n * Get the cached config without loading from disk.\n * Returns null if no config has been loaded yet.\n * \n * @returns The cached config or null\n */\nexport function getCachedMcpConfig(): MCPConfigLoadResult | null {\n return cachedConfig;\n}\n", "/**\n * Trusted Folder Management\n *\n * Manages the `trusted_folders` list in `~/.copilot/config.json` to\n * programmatically bypass the interactive folder trust confirmation dialog\n * that the Copilot CLI shows when working in a new directory.\n *\n * The config directory is determined by `XDG_CONFIG_HOME` or defaults\n * to `~/.copilot`.\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport * as os from 'os';\nimport { getLogger, LogCategory } from '../logger';\n\n/** Config directory name under home */\nconst CONFIG_DIR = '.copilot';\n/** Config file name */\nconst CONFIG_FILE = 'config.json';\n\n/** Override for home directory (used for testing) */\nlet homeDirectoryOverride: string | null = null;\n\n/**\n * Set an override for the home directory.\n * Primarily used for testing purposes.\n */\nexport function setTrustedFolderHomeOverride(dir: string | null): void {\n homeDirectoryOverride = dir;\n}\n\n/**\n * Get the Copilot config directory path.\n * Respects XDG_CONFIG_HOME if set, otherwise uses ~/.copilot.\n */\nfunction getConfigDir(): string {\n const home = homeDirectoryOverride ?? os.homedir();\n return process.env['XDG_CONFIG_HOME'] ?? path.join(home, CONFIG_DIR);\n}\n\n/**\n * Get the full path to the Copilot config file.\n */\nexport function getCopilotConfigPath(): string {\n return path.join(getConfigDir(), CONFIG_FILE);\n}\n\n/**\n * Read and parse the Copilot config file.\n * Returns an empty object if the file doesn't exist or is invalid.\n */\nfunction readConfig(configPath: string): Record<string, unknown> {\n try {\n if (!fs.existsSync(configPath)) {\n return {};\n }\n const content = fs.readFileSync(configPath, 'utf-8');\n const parsed = JSON.parse(content);\n return typeof parsed === 'object' && parsed !== null ? parsed : {};\n } catch {\n return {};\n }\n}\n\n/**\n * Write the config object back to disk.\n * Creates the directory if it doesn't exist.\n */\nfunction writeConfig(configPath: string, config: Record<string, unknown>): void {\n const dir = path.dirname(configPath);\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir, { recursive: true });\n }\n fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + '\\n', 'utf-8');\n}\n\n/**\n * Normalize a folder path for comparison and storage.\n * Resolves to absolute and removes trailing separators.\n */\nfunction normalizeFolderPath(folder: string): string {\n let resolved = path.resolve(folder);\n // Remove trailing separator (but not root like \"/\" or \"C:\\\")\n while (resolved.length > 1 && (resolved.endsWith(path.sep) || resolved.endsWith('/'))) {\n resolved = resolved.slice(0, -1);\n }\n return resolved;\n}\n\n/**\n * Check whether a folder is already trusted (present in trusted_folders).\n */\nexport function isFolderTrusted(folder: string): boolean {\n const configPath = getCopilotConfigPath();\n const config = readConfig(configPath);\n const trustedFolders = Array.isArray(config['trusted_folders']) ? config['trusted_folders'] as string[] : [];\n const normalized = normalizeFolderPath(folder);\n return trustedFolders.some(f => normalizeFolderPath(f) === normalized);\n}\n\n/**\n * Ensure a folder is registered as trusted in `~/.copilot/config.json`.\n *\n * If the folder is already in `trusted_folders`, this is a no-op.\n * Otherwise, the folder is appended to the list and the config file is\n * written back to disk.\n *\n * This prevents the Copilot CLI from showing the interactive\n * \"Confirm folder trust\" dialog when creating sessions for new directories.\n *\n * @param folder - The folder path to trust\n */\nexport function ensureFolderTrusted(folder: string): void {\n const logger = getLogger();\n const normalized = normalizeFolderPath(folder);\n const configPath = getCopilotConfigPath();\n\n try {\n const config = readConfig(configPath);\n const trustedFolders = Array.isArray(config['trusted_folders']) ? config['trusted_folders'] as string[] : [];\n\n // Check if already trusted\n if (trustedFolders.some(f => normalizeFolderPath(f) === normalized)) {\n logger.debug(LogCategory.AI, `TrustedFolder: '${normalized}' is already trusted`);\n return;\n }\n\n // Add and persist\n trustedFolders.push(normalized);\n config['trusted_folders'] = trustedFolders;\n writeConfig(configPath, config);\n logger.debug(LogCategory.AI, `TrustedFolder: Added '${normalized}' to trusted_folders`);\n } catch (error) {\n // Non-fatal: if we can't update config, the trust dialog will appear\n logger.debug(LogCategory.AI, `TrustedFolder: Failed to update config: ${error}`);\n }\n}\n", "/**\n * AI Timeouts\n *\n * Re-exports the default AI timeout from centralized defaults.\n * Kept for backward compatibility.\n */\nexport { DEFAULT_AI_TIMEOUT_MS } from '../config/defaults';\n", "/**\n * Copilot SDK Service\n *\n * Provides a wrapper around the @github/copilot-sdk for structured AI interactions.\n * This service manages the SDK client lifecycle and provides a clean API for\n * sending messages and managing sessions.\n *\n * Key Features:\n * - Creates a new client per working directory (cwd is set at client init time)\n * - Lazy initialization with ESM dynamic import workaround\n * - Graceful fallback when SDK is unavailable\n * - Session-per-request pattern for simple one-off requests\n *\n * @see https://github.com/github/copilot-sdk\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport { AIInvocationResult } from './types';\nimport { getLogger, LogCategory } from '../logger';\n// Note: SessionPool is kept for backward compatibility but not used for clarification requests\nimport { SessionPool, IPoolableSession, SessionPoolStats } from './session-pool';\nimport { loadDefaultMcpConfig, mergeMcpConfigs } from './mcp-config-loader';\nimport { ensureFolderTrusted } from './trusted-folder';\nimport { DEFAULT_AI_TIMEOUT_MS } from './timeouts';\n\n/**\n * Base configuration for MCP (Model Context Protocol) servers.\n * Contains common fields shared by all server types.\n */\nexport interface MCPServerConfigBase {\n /** List of tools to enable from this server. Use [\"*\"] for all tools. */\n tools?: string[];\n /** Server type: \"local\" | \"stdio\" | \"http\" | \"sse\" */\n type?: 'local' | 'stdio' | 'http' | 'sse';\n /** Optional timeout in milliseconds */\n timeout?: number;\n /** Whether the server is enabled */\n enabled?: boolean;\n}\n\n/**\n * Configuration for local/stdio MCP servers.\n * These servers are spawned as child processes.\n */\nexport interface MCPLocalServerConfig extends MCPServerConfigBase {\n /** Server type: \"local\" or \"stdio\" (default if not specified) */\n type?: 'local' | 'stdio';\n /** Server command or executable path */\n command: string;\n /** Arguments to pass to the server */\n args?: string[];\n /** Environment variables for the server */\n env?: Record<string, string>;\n /** Working directory for the server process */\n cwd?: string;\n}\n\n/**\n * Configuration for remote MCP servers (HTTP or SSE).\n * These servers are accessed over the network.\n */\nexport interface MCPRemoteServerConfig extends MCPServerConfigBase {\n /** Server type: \"http\" or \"sse\" */\n type: 'http' | 'sse';\n /** URL of the remote server */\n url: string;\n /** Optional HTTP headers for authentication or other purposes */\n headers?: Record<string, string>;\n}\n\n/**\n * MCP (Model Context Protocol) server configuration.\n * Supports both local (command-based) and remote (HTTP/SSE) servers.\n * \n * @example Local server\n * ```typescript\n * const localServer: MCPServerConfig = {\n * type: 'local',\n * command: 'my-mcp-server',\n * args: ['--port', '8080'],\n * tools: ['*']\n * };\n * ```\n * \n * @example Remote SSE server\n * ```typescript\n * const remoteServer: MCPServerConfig = {\n * type: 'sse',\n * url: 'http://localhost:8000/sse',\n * headers: { 'Authorization': 'Bearer token' },\n * tools: ['*']\n * };\n * ```\n */\nexport type MCPServerConfig = MCPLocalServerConfig | MCPRemoteServerConfig;\n\n/**\n * Options for controlling MCP tools at the session level.\n * These options map directly to the SDK's SessionConfig parameters.\n * \n * Tool filtering behavior:\n * - If `availableTools` is specified, only those tools are available (whitelist mode)\n * - If `excludedTools` is specified, those tools are disabled (blacklist mode)\n * - `availableTools` takes precedence over `excludedTools` if both are specified\n * - If neither is specified, all tools are available (default SDK behavior)\n */\nexport interface MCPControlOptions {\n /**\n * Whitelist of tool names to make available.\n * When specified, only these tools will be available for the session.\n * Takes precedence over `excludedTools`.\n * \n * @example ['bash', 'view', 'edit'] - Only allow these specific tools\n */\n availableTools?: string[];\n\n /**\n * Blacklist of tool names to exclude.\n * When specified, these tools will be disabled for the session.\n * Ignored if `availableTools` is also specified.\n * \n * @example ['github_*', 'mcp_*'] - Disable all github and mcp tools\n */\n excludedTools?: string[];\n\n /**\n * Custom MCP server configurations.\n * Allows overriding or adding MCP servers for the session.\n * Pass an empty object `{}` to disable all MCP servers.\n * \n * @example { 'my-server': { command: 'my-mcp-server', args: ['--port', '8080'] } }\n */\n mcpServers?: Record<string, MCPServerConfig>;\n}\n\n/**\n * Options for sending a message via the SDK\n */\nexport interface SendMessageOptions {\n /** The prompt to send */\n prompt: string;\n /** Optional model override (e.g., 'gpt-5', 'claude-sonnet-4.5') */\n model?: string;\n /** Optional working directory for context (set at client level) */\n workingDirectory?: string;\n /** Optional timeout in milliseconds (default: 1800000 = 30 minutes) */\n timeoutMs?: number;\n /** Use session pool for efficient parallel requests (default: false) */\n usePool?: boolean;\n /** Enable streaming for real-time response chunks (default: false) */\n streaming?: boolean;\n\n // ========================================================================\n // MCP Control Options (Session-level tool filtering)\n // ========================================================================\n\n /**\n * Whitelist of tool names to make available.\n * When specified, only these tools will be available for the session.\n * Takes precedence over `excludedTools`.\n * \n * Note: Only applies to direct sessions (usePool: false).\n * Session pool sessions use default tool configuration.\n * \n * @example ['bash', 'view', 'edit'] - Only allow these specific tools\n */\n availableTools?: string[];\n\n /**\n * Blacklist of tool names to exclude.\n * When specified, these tools will be disabled for the session.\n * Ignored if `availableTools` is also specified.\n * \n * Note: Only applies to direct sessions (usePool: false).\n * Session pool sessions use default tool configuration.\n * \n * @example ['github_*', 'mcp_*'] - Disable all github and mcp tools\n */\n excludedTools?: string[];\n\n /**\n * Custom MCP server configurations.\n * Allows overriding or adding MCP servers for the session.\n * Pass an empty object `{}` to disable all MCP servers.\n * \n * Note: Only applies to direct sessions (usePool: false).\n * Session pool sessions use default MCP configuration.\n * \n * @example { 'my-server': { command: 'my-mcp-server', args: ['--port', '8080'] } }\n */\n mcpServers?: Record<string, MCPServerConfig>;\n\n /**\n * Whether to automatically load MCP server configuration from ~/.copilot/mcp-config.json.\n * When enabled, the default config is loaded and merged with any explicit mcpServers option.\n * Explicit mcpServers take precedence over the default config.\n * \n * Note: Only applies to direct sessions (usePool: false).\n * Session pool sessions do not load default MCP config.\n * \n * @default true\n */\n loadDefaultMcpConfig?: boolean;\n\n /**\n * Handler for permission requests from the Copilot CLI.\n * When the AI needs permission to perform operations (file reads/writes, shell commands, etc.),\n * this handler is called to approve or deny the request.\n * \n * Without a handler, all permission requests are denied by default.\n * \n * Note: Only applies to direct sessions (usePool: false).\n * Session pool sessions use default permission handling (deny all).\n * \n * @example\n * // Approve all permissions\n * onPermissionRequest: () => ({ kind: 'approved' })\n * \n * @example\n * // Selective approval\n * onPermissionRequest: (request) => {\n * if (request.kind === 'read') return { kind: 'approved' };\n * return { kind: 'denied-by-rules' };\n * }\n */\n onPermissionRequest?: PermissionHandler;\n\n /**\n * Callback invoked for each streaming chunk as it arrives from the SDK.\n * When provided, streaming mode is automatically enabled.\n * \n * The callback receives each `assistant.message_delta` chunk in real-time,\n * enabling true streaming to web UIs via SSE or WebSocket.\n * \n * Callback errors are caught and logged but do not break the streaming flow.\n * The final return value of `sendMessage()` still contains the full response.\n * \n * Note: Only works with direct sessions (usePool: false), since pool sessions\n * don't support per-request streaming configuration.\n * \n * @example\n * ```typescript\n * const result = await service.sendMessage({\n * prompt: 'Analyze this code',\n * onStreamingChunk: (chunk) => {\n * res.write(`data: ${JSON.stringify({ type: 'chunk', content: chunk })}\\n\\n`);\n * },\n * });\n * // result.response still contains the full response\n * ```\n */\n onStreamingChunk?: (chunk: string) => void;\n}\n\n/**\n * Aggregated token usage data from SDK events.\n *\n * Accumulated from `assistant.usage` events (per-turn) and\n * `session.usage_info` events (session-level quota info).\n */\nexport interface TokenUsage {\n /** Total input tokens consumed across all turns */\n inputTokens: number;\n /** Total output tokens generated across all turns */\n outputTokens: number;\n /** Total cache-read tokens across all turns */\n cacheReadTokens: number;\n /** Total cache-write tokens across all turns */\n cacheWriteTokens: number;\n /** Sum of inputTokens + outputTokens */\n totalTokens: number;\n /** Cumulative cost across all turns (if reported by the SDK) */\n cost?: number;\n /** Cumulative duration in ms across all turns (if reported by the SDK) */\n duration?: number;\n /** Number of assistant.usage events received (one per turn) */\n turnCount: number;\n /** Session-level token limit (last seen from session.usage_info) */\n tokenLimit?: number;\n /** Session-level current token count (last seen from session.usage_info) */\n currentTokens?: number;\n}\n\n/**\n * Result from SDK invocation, extends AIInvocationResult with SDK-specific fields\n */\nexport interface SDKInvocationResult extends AIInvocationResult {\n /** Session ID used for this request (if session was created) */\n sessionId?: string;\n /** Raw SDK response data */\n rawResponse?: unknown;\n /** Aggregated token usage data (undefined when no usage events were received) */\n tokenUsage?: TokenUsage;\n}\n\n/**\n * Internal result from sendWithStreaming, including token usage.\n */\ninterface StreamingResult {\n response: string;\n tokenUsage?: TokenUsage;\n /** Number of assistant turns completed during the session.\n * A value > 0 with an empty response indicates the AI performed\n * work via tool execution (file edits, shell commands) without\n * producing a text summary. */\n turnCount: number;\n}\n\n/**\n * SDK availability check result\n */\nexport interface SDKAvailabilityResult {\n /** Whether the SDK is available and can be used */\n available: boolean;\n /** Path to the SDK if found */\n sdkPath?: string;\n /** Error message if not available */\n error?: string;\n}\n\n/**\n * Options for creating a CopilotClient\n */\ninterface ICopilotClientOptions {\n /** Working directory for the CLI process */\n cwd?: string;\n}\n\n/**\n * Permission request from the Copilot CLI.\n * Maps to SDK's PermissionRequest interface.\n */\nexport interface PermissionRequest {\n /** Type of permission being requested */\n kind: 'shell' | 'write' | 'mcp' | 'read' | 'url';\n /** Associated tool call ID (if applicable) */\n toolCallId?: string;\n /** Additional request-specific data */\n [key: string]: unknown;\n}\n\n/**\n * Result of a permission request.\n * Maps to SDK's PermissionRequestResult interface.\n */\nexport interface PermissionRequestResult {\n /** The decision kind */\n kind: 'approved' | 'denied-by-rules' | 'denied-no-approval-rule-and-could-not-request-from-user' | 'denied-interactively-by-user';\n /** Optional rules that led to this decision */\n rules?: unknown[];\n}\n\n/**\n * Handler function for permission requests.\n */\nexport type PermissionHandler = (\n request: PermissionRequest,\n invocation: { sessionId: string }\n) => Promise<PermissionRequestResult> | PermissionRequestResult;\n\n/**\n * Options for creating a session.\n * Maps to the SDK's SessionConfig interface.\n */\ninterface ISessionOptions {\n /** AI model to use (e.g., 'gpt-5', 'claude-sonnet-4.5') */\n model?: string;\n /** Enable streaming for real-time response chunks */\n streaming?: boolean;\n /** Whitelist of tool names to make available (takes precedence over excludedTools) */\n availableTools?: string[];\n /** Blacklist of tool names to exclude */\n excludedTools?: string[];\n /** Custom MCP server configurations */\n mcpServers?: Record<string, MCPServerConfig>;\n /** Handler for permission requests from the CLI */\n onPermissionRequest?: PermissionHandler;\n}\n\n/**\n * Interface for the CopilotClient from @github/copilot-sdk\n * We define this interface to avoid direct type dependency on the SDK\n */\ninterface ICopilotClient {\n createSession(options?: ISessionOptions): Promise<ICopilotSession>;\n stop(): Promise<void>;\n}\n\n/**\n * Interface for the CopilotSession from @github/copilot-sdk\n */\ninterface ICopilotSession {\n sessionId: string;\n /**\n * Send a message and wait for the session to become idle.\n * @param options - Message options including prompt\n * @param timeout - Timeout in milliseconds (SDK default: 60000)\n */\n sendAndWait(options: { prompt: string }, timeout?: number): Promise<{ data?: { content?: string } }>;\n destroy(): Promise<void>;\n /** Event handler for streaming responses. Returns an unsubscribe function. */\n on?(handler: (event: ISessionEvent) => void): (() => void);\n /** Send a message without waiting (for streaming) */\n send?(options: { prompt: string }): Promise<void>;\n}\n\n/**\n * Interface for session events (streaming)\n * \n * The Copilot SDK fires events with `type` as a plain string (e.g., \"session.idle\"),\n * not as an object with a `.value` property.\n * \n * Known event types:\n * - \"session.idle\" - Session finished processing (data: {})\n * - \"session.error\" - Session error (data: { message, stack? })\n * - \"assistant.message\" - Final assistant message (data: { messageId, content })\n * - \"assistant.message_delta\" - Streaming chunk (data: { messageId, deltaContent })\n * - \"assistant.turn_end\" - Turn ended (data: { turnId })\n * - \"assistant.usage\" - Per-turn token usage (data: { inputTokens, outputTokens, ... })\n * - \"session.usage_info\" - Session-level quota info (data: { tokenLimit, currentTokens })\n * \n * Completion detection order:\n * 1. `session.idle` settles immediately\n * 2. `assistant.turn_end` starts a 500ms grace period, then settles if content exists\n */\ninterface ISessionEvent {\n type: string;\n data?: {\n content?: string;\n deltaContent?: string;\n message?: string;\n stack?: string;\n turnId?: string;\n // Token usage fields (from assistant.usage)\n inputTokens?: number;\n outputTokens?: number;\n cacheReadTokens?: number;\n cacheWriteTokens?: number;\n cost?: number;\n duration?: number;\n // Session quota fields (from session.usage_info)\n tokenLimit?: number;\n currentTokens?: number;\n };\n}\n\n/**\n * Configuration options for the session pool.\n * These are passed to the service to avoid VS Code dependencies.\n */\nexport interface SessionPoolConfig {\n /** Maximum number of concurrent sessions in the pool (default: 5) */\n maxSessions?: number;\n /** Idle timeout in milliseconds before sessions are destroyed (default: 300000 = 5 minutes) */\n idleTimeoutMs?: number;\n}\n\n/**\n * Default session pool configuration values.\n * These match the VS Code setting defaults.\n */\nexport const DEFAULT_SESSION_POOL_CONFIG: Required<SessionPoolConfig> = {\n maxSessions: 5,\n idleTimeoutMs: 300000\n};\n\n/**\n * Singleton service for interacting with the Copilot SDK.\n * \n * Creates a new client per working directory since the SDK's `cwd` option\n * is set at client initialization time (not per-request).\n *\n * Usage:\n * ```typescript\n * const service = CopilotSDKService.getInstance();\n * if (await service.isAvailable()) {\n * // Request with working directory (creates client with cwd, then session)\n * const result = await service.sendMessage({ \n * prompt: 'Hello',\n * workingDirectory: '/path/to/project'\n * });\n * }\n * ```\n */\nexport class CopilotSDKService {\n private static instance: CopilotSDKService | null = null;\n\n private client: ICopilotClient | null = null;\n private clientCwd: string | undefined = undefined;\n private sdkModule: { CopilotClient: new (options?: ICopilotClientOptions) => ICopilotClient } | null = null;\n private initializationPromise: Promise<void> | null = null;\n private availabilityCache: SDKAvailabilityResult | null = null;\n private sessionPool: SessionPool | null = null;\n private sessionPoolConfig: Required<SessionPoolConfig> = { ...DEFAULT_SESSION_POOL_CONFIG };\n private disposed = false;\n\n /** Map of active sessions for cancellation support */\n private activeSessions: Map<string, ICopilotSession> = new Map();\n\n /** Default timeout for SDK requests */\n private static readonly DEFAULT_TIMEOUT_MS = DEFAULT_AI_TIMEOUT_MS;\n\n private constructor() {\n // Private constructor for singleton pattern\n }\n\n /**\n * Get the singleton instance of CopilotSDKService\n */\n public static getInstance(): CopilotSDKService {\n if (!CopilotSDKService.instance) {\n CopilotSDKService.instance = new CopilotSDKService();\n }\n return CopilotSDKService.instance;\n }\n\n /**\n * Reset the singleton instance (primarily for testing)\n */\n public static resetInstance(): void {\n if (CopilotSDKService.instance) {\n CopilotSDKService.instance.dispose();\n CopilotSDKService.instance = null;\n }\n }\n\n /**\n * Configure the session pool settings.\n * Call this before using the session pool to override default values.\n * Typically called during extension activation with values from VS Code settings.\n *\n * @param config Session pool configuration\n */\n public configureSessionPool(config: SessionPoolConfig): void {\n this.sessionPoolConfig = {\n maxSessions: config.maxSessions ?? DEFAULT_SESSION_POOL_CONFIG.maxSessions,\n idleTimeoutMs: config.idleTimeoutMs ?? DEFAULT_SESSION_POOL_CONFIG.idleTimeoutMs\n };\n\n const logger = getLogger();\n logger.debug(\n LogCategory.AI,\n `CopilotSDKService: Session pool configured with maxSessions=${this.sessionPoolConfig.maxSessions}, idleTimeoutMs=${this.sessionPoolConfig.idleTimeoutMs}`\n );\n }\n\n /**\n * Check if the Copilot SDK is available and can be used.\n * Results are cached after the first check.\n *\n * @returns Availability result with status and optional error\n */\n public async isAvailable(): Promise<SDKAvailabilityResult> {\n if (this.disposed) {\n return { available: false, error: 'Service has been disposed' };\n }\n\n if (this.availabilityCache) {\n return this.availabilityCache;\n }\n\n const logger = getLogger();\n logger.debug(LogCategory.AI, 'CopilotSDKService: Checking SDK availability');\n\n try {\n const sdkPath = this.findSDKPath();\n if (!sdkPath) {\n this.availabilityCache = {\n available: false,\n error: 'Copilot SDK not found. Please ensure @github/copilot-sdk is installed.'\n };\n logger.debug(LogCategory.AI, 'CopilotSDKService: SDK not found');\n return this.availabilityCache;\n }\n\n // Try to load the SDK module to verify it works\n await this.loadSDKModule(sdkPath);\n\n this.availabilityCache = {\n available: true,\n sdkPath\n };\n logger.debug(LogCategory.AI, `CopilotSDKService: SDK available at: ${sdkPath}`);\n return this.availabilityCache;\n\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n this.availabilityCache = {\n available: false,\n error: `Failed to load Copilot SDK: ${errorMessage}`\n };\n logger.error(LogCategory.AI, 'CopilotSDKService: SDK availability check failed', error instanceof Error ? error : undefined);\n return this.availabilityCache;\n }\n }\n\n /**\n * Clear the availability cache, forcing a re-check on next isAvailable() call.\n * Useful when the SDK might have been installed after initial check.\n */\n public clearAvailabilityCache(): void {\n this.availabilityCache = null;\n }\n\n /**\n * Ensure the SDK client is initialized with the specified working directory.\n * If the working directory changes, a new client is created.\n * Uses lazy initialization to avoid startup overhead.\n *\n * @param cwd Optional working directory for the client\n * @throws Error if SDK is not available or initialization fails\n */\n public async ensureClient(cwd?: string): Promise<ICopilotClient> {\n if (this.disposed) {\n throw new Error('CopilotSDKService has been disposed');\n }\n\n // Check if we can reuse the existing client (same cwd)\n if (this.client && this.clientCwd === cwd) {\n return this.client;\n }\n\n // If cwd changed, stop the old client first\n if (this.client && this.clientCwd !== cwd) {\n const logger = getLogger();\n logger.debug(LogCategory.AI, `CopilotSDKService: Working directory changed from '${this.clientCwd}' to '${cwd}', creating new client`);\n try {\n await this.client.stop();\n } catch (error) {\n logger.debug(LogCategory.AI, `CopilotSDKService: Warning: Error stopping old client: ${error}`);\n }\n this.client = null;\n this.clientCwd = undefined;\n }\n\n // Use a promise to prevent concurrent initialization\n if (this.initializationPromise) {\n await this.initializationPromise;\n if (this.client && this.clientCwd === cwd) {\n return this.client;\n }\n }\n\n const logger = getLogger();\n logger.debug(LogCategory.AI, `CopilotSDKService: Initializing SDK client with cwd: ${cwd || '(default)'}`);\n\n this.initializationPromise = this.initializeClient(cwd);\n await this.initializationPromise;\n this.initializationPromise = null;\n\n if (!this.client) {\n throw new Error('Failed to initialize Copilot SDK client');\n }\n\n return this.client;\n }\n\n /**\n * Send a message to Copilot via the SDK.\n * By default, creates a new session for each request (session-per-request pattern).\n * When usePool is true, uses the session pool for efficient parallel requests.\n *\n * @param options Message options including prompt and optional settings\n * @returns Invocation result with response or error\n */\n public async sendMessage(options: SendMessageOptions): Promise<SDKInvocationResult> {\n if (options.usePool) {\n return this.sendMessageWithPool(options);\n }\n return this.sendMessageDirect(options);\n }\n\n /**\n * Send a message using a session from the pool.\n * This is more efficient for parallel workloads as sessions are reused.\n *\n * @param options Message options including prompt and optional settings\n * @returns Invocation result with response or error\n */\n private async sendMessageWithPool(options: SendMessageOptions): Promise<SDKInvocationResult> {\n const logger = getLogger();\n const startTime = Date.now();\n\n // Check availability first\n const availability = await this.isAvailable();\n if (!availability.available) {\n return {\n success: false,\n error: availability.error || 'Copilot SDK is not available'\n };\n }\n\n let session: IPoolableSession | null = null;\n let shouldDestroySession = false;\n\n try {\n const pool = await this.ensureSessionPool();\n const timeoutMs = options.timeoutMs ?? CopilotSDKService.DEFAULT_TIMEOUT_MS;\n\n logger.debug(LogCategory.AI, 'CopilotSDKService: Acquiring session from pool');\n session = await pool.acquire(timeoutMs);\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Acquired session from pool`);\n\n // Send the message with timeout\n const result = await this.sendWithTimeout(session, options.prompt, timeoutMs);\n\n const response = result?.data?.content || '';\n const durationMs = Date.now() - startTime;\n\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Pooled request completed in ${durationMs}ms`);\n\n if (!response) {\n return {\n success: false,\n error: 'No response received from Copilot SDK',\n sessionId: session.sessionId\n };\n }\n\n return {\n success: true,\n response,\n sessionId: session.sessionId,\n rawResponse: result\n };\n\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n const durationMs = Date.now() - startTime;\n\n logger.error(LogCategory.AI, `CopilotSDKService [${session?.sessionId ?? 'no-session'}]: Pooled request failed after ${durationMs}ms`, error instanceof Error ? error : undefined);\n\n // Mark session for destruction on error (don't reuse potentially broken sessions)\n shouldDestroySession = true;\n\n return {\n success: false,\n error: `Copilot SDK error: ${errorMessage}`,\n sessionId: session?.sessionId\n };\n\n } finally {\n // Release or destroy session\n if (session && this.sessionPool) {\n if (shouldDestroySession) {\n try {\n await this.sessionPool.destroy(session);\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Session destroyed after error`);\n } catch (destroyError) {\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Warning: Error destroying session: ${destroyError}`);\n }\n } else {\n this.sessionPool.release(session);\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Session released back to pool`);\n }\n }\n }\n }\n\n /**\n * Send a message directly (creates client with cwd, creates session, destroys session).\n * This creates a fresh client with the specified working directory.\n *\n * @param options Message options including prompt and optional settings\n * @returns Invocation result with response or error\n */\n private async sendMessageDirect(options: SendMessageOptions): Promise<SDKInvocationResult> {\n const logger = getLogger();\n const startTime = Date.now();\n\n // Check availability first\n const availability = await this.isAvailable();\n if (!availability.available) {\n return {\n success: false,\n error: availability.error || 'Copilot SDK is not available'\n };\n }\n\n let session: ICopilotSession | null = null;\n\n try {\n // Create/reuse client with the specified working directory\n const client = await this.ensureClient(options.workingDirectory);\n\n // Build session options\n const sessionOptions: ISessionOptions = {};\n if (options.model) {\n sessionOptions.model = options.model;\n }\n if (options.streaming) {\n sessionOptions.streaming = options.streaming;\n }\n\n // MCP control options (tool filtering)\n if (options.availableTools) {\n sessionOptions.availableTools = options.availableTools;\n }\n if (options.excludedTools) {\n sessionOptions.excludedTools = options.excludedTools;\n }\n\n // Load and merge MCP server configurations\n // Default is to load from ~/.copilot/mcp-config.json unless explicitly disabled\n const shouldLoadDefaultMcp = options.loadDefaultMcpConfig !== false;\n if (shouldLoadDefaultMcp || options.mcpServers !== undefined) {\n let finalMcpServers: Record<string, MCPServerConfig> | undefined;\n\n if (shouldLoadDefaultMcp) {\n // Load default config from ~/.copilot/mcp-config.json\n const defaultConfig = loadDefaultMcpConfig();\n logger.debug(LogCategory.AI, `CopilotSDKService: Default MCP config load result: success=${defaultConfig.success}, fileExists=${defaultConfig.fileExists}, serverCount=${Object.keys(defaultConfig.mcpServers).length}`);\n if (defaultConfig.error) {\n logger.debug(LogCategory.AI, `CopilotSDKService: Default MCP config error: ${defaultConfig.error}`);\n }\n if (defaultConfig.success && Object.keys(defaultConfig.mcpServers).length > 0) {\n logger.debug(LogCategory.AI, `CopilotSDKService: Loaded ${Object.keys(defaultConfig.mcpServers).length} default MCP server(s): ${JSON.stringify(defaultConfig.mcpServers)}`);\n }\n // Merge with explicit config (explicit takes precedence)\n finalMcpServers = mergeMcpConfigs(defaultConfig.mcpServers, options.mcpServers);\n } else if (options.mcpServers !== undefined) {\n // Only use explicit config\n finalMcpServers = options.mcpServers;\n }\n\n if (finalMcpServers && Object.keys(finalMcpServers).length > 0) {\n sessionOptions.mcpServers = finalMcpServers;\n logger.debug(LogCategory.AI, `CopilotSDKService: Using ${Object.keys(finalMcpServers).length} MCP server(s): ${Object.keys(finalMcpServers).join(', ')}`);\n logger.debug(LogCategory.AI, `CopilotSDKService: MCP servers config: ${JSON.stringify(finalMcpServers)}`);\n } else if (options.mcpServers !== undefined && Object.keys(options.mcpServers).length === 0) {\n // Explicit empty object means disable all MCP servers\n sessionOptions.mcpServers = {};\n logger.debug(LogCategory.AI, 'CopilotSDKService: MCP servers explicitly disabled');\n }\n }\n\n // Permission handler\n if (options.onPermissionRequest) {\n sessionOptions.onPermissionRequest = options.onPermissionRequest;\n }\n\n const sessionOptionsStr = Object.keys(sessionOptions).length > 0 \n ? JSON.stringify(sessionOptions) \n : '(default)';\n logger.debug(LogCategory.AI, `CopilotSDKService: Creating session (cwd: ${options.workingDirectory || '(default)'}, options: ${sessionOptionsStr})`);\n\n session = await client.createSession(sessionOptions);\n logger.debug(LogCategory.AI, `CopilotSDKService: Session created: ${session.sessionId}`);\n\n // Track the session for potential cancellation\n this.trackSession(session);\n\n // Send the message with timeout\n const timeoutMs = options.timeoutMs ?? CopilotSDKService.DEFAULT_TIMEOUT_MS;\n\n // Use streaming mode if enabled and supported, OR if timeout > 120s,\n // OR if an onStreamingChunk callback is provided\n // (SDK's sendAndWait has hardcoded 120s timeout for session.idle)\n let response: string;\n let tokenUsage: TokenUsage | undefined;\n let turnCount = 0;\n if ((options.streaming || options.onStreamingChunk || timeoutMs > 120000) && session.on && session.send) {\n const streamingResult = await this.sendWithStreaming(session, options.prompt, timeoutMs, options.onStreamingChunk);\n response = streamingResult.response;\n tokenUsage = streamingResult.tokenUsage;\n turnCount = streamingResult.turnCount;\n } else {\n const result = await this.sendWithTimeout(session, options.prompt, timeoutMs);\n response = result?.data?.content || '';\n }\n\n const durationMs = Date.now() - startTime;\n\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Request completed in ${durationMs}ms`);\n\n if (!response) {\n // For tool-heavy sessions (e.g., impl skill), the AI may complete\n // all work via tool execution (file edits, shell commands) without\n // producing a text summary. If turns occurred, the work was done\n // successfully \u2014 treat empty text as success, not failure.\n if (turnCount > 0) {\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Empty text response but ${turnCount} turns completed \u2014 treating as success (tool-based execution)`);\n return {\n success: true,\n response: '',\n sessionId: session.sessionId,\n tokenUsage,\n };\n }\n return {\n success: false,\n error: 'No response received from Copilot SDK',\n sessionId: session.sessionId,\n tokenUsage,\n };\n }\n\n return {\n success: true,\n response,\n sessionId: session.sessionId,\n tokenUsage,\n };\n\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n const durationMs = Date.now() - startTime;\n\n logger.error(LogCategory.AI, `CopilotSDKService [${session?.sessionId ?? 'no-session'}]: Request failed after ${durationMs}ms`, error instanceof Error ? error : undefined);\n\n return {\n success: false,\n error: `Copilot SDK error: ${errorMessage}`,\n sessionId: session?.sessionId\n };\n\n } finally {\n // Clean up session\n if (session) {\n // Untrack the session first\n this.untrackSession(session.sessionId);\n try {\n await session.destroy();\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Session destroyed`);\n } catch (destroyError) {\n logger.debug(LogCategory.AI, `CopilotSDKService [${session.sessionId}]: Warning: Error destroying session: ${destroyError}`);\n }\n }\n }\n }\n\n /**\n * Get the session pool, creating it if necessary.\n * The pool is lazily initialized on first use.\n *\n * @returns The session pool\n * @throws Error if SDK is not available\n */\n private async ensureSessionPool(): Promise<SessionPool> {\n if (this.disposed) {\n throw new Error('CopilotSDKService has been disposed');\n }\n\n if (this.sessionPool) {\n return this.sessionPool;\n }\n\n const logger = getLogger();\n logger.debug(LogCategory.AI, 'CopilotSDKService: Creating session pool');\n\n // Ensure client is initialized first\n const client = await this.ensureClient();\n\n // Create the session pool with a factory that creates sessions from the client\n this.sessionPool = new SessionPool(\n async () => {\n const session = await client.createSession();\n return session as IPoolableSession;\n },\n {\n maxSessions: this.sessionPoolConfig.maxSessions,\n idleTimeoutMs: this.sessionPoolConfig.idleTimeoutMs\n }\n );\n\n logger.debug(LogCategory.AI, 'CopilotSDKService: Session pool created');\n return this.sessionPool;\n }\n\n /**\n * Get statistics about the session pool.\n * Returns null if the pool has not been initialized.\n *\n * @returns Pool statistics or null\n */\n public getPoolStats(): SessionPoolStats | null {\n return this.sessionPool?.getStats() ?? null;\n }\n\n /**\n * Check if the session pool is active.\n *\n * @returns True if the pool exists and is not disposed\n */\n public hasActivePool(): boolean {\n return this.sessionPool !== null && !this.sessionPool.isDisposed();\n }\n\n /**\n * Abort an active session by its ID.\n * This destroys the session and removes it from tracking.\n * Used for cancellation support in the AI Processes panel.\n *\n * @param sessionId The session ID to abort\n * @returns True if the session was found and aborted, false otherwise\n */\n public async abortSession(sessionId: string): Promise<boolean> {\n const logger = getLogger();\n \n const session = this.activeSessions.get(sessionId);\n if (!session) {\n logger.debug(LogCategory.AI, `CopilotSDKService [${sessionId}]: Session not found for abort`);\n return false;\n }\n\n logger.debug(LogCategory.AI, `CopilotSDKService [${sessionId}]: Aborting session`);\n\n try {\n await session.destroy();\n this.activeSessions.delete(sessionId);\n logger.debug(LogCategory.AI, `CopilotSDKService [${sessionId}]: Session aborted successfully`);\n return true;\n } catch (error) {\n logger.error(LogCategory.AI, `CopilotSDKService [${sessionId}]: Error aborting session`, error instanceof Error ? error : undefined);\n // Still remove from tracking even if destroy failed\n this.activeSessions.delete(sessionId);\n return false;\n }\n }\n\n /**\n * Check if a session is currently active.\n *\n * @param sessionId The session ID to check\n * @returns True if the session is active\n */\n public hasActiveSession(sessionId: string): boolean {\n return this.activeSessions.has(sessionId);\n }\n\n /**\n * Get the count of currently active sessions.\n *\n * @returns Number of active sessions\n */\n public getActiveSessionCount(): number {\n return this.activeSessions.size;\n }\n\n /**\n * Track an active session for potential cancellation.\n * Called internally when a session is created.\n *\n * @param session The session to track\n */\n private trackSession(session: ICopilotSession): void {\n this.activeSessions.set(session.sessionId, session);\n }\n\n /**\n * Untrack a session (called when session is destroyed normally).\n *\n * @param sessionId The session ID to untrack\n */\n private untrackSession(sessionId: string): void {\n this.activeSessions.delete(sessionId);\n }\n\n /**\n * Clean up resources. Should be called when the extension deactivates.\n */\n public async cleanup(): Promise<void> {\n const logger = getLogger();\n logger.debug(LogCategory.AI, 'CopilotSDKService: Cleaning up SDK service');\n\n // Abort all active sessions first\n const abortPromises: Promise<void>[] = [];\n for (const [sessionId] of this.activeSessions) {\n abortPromises.push(this.abortSession(sessionId).then(() => {}));\n }\n await Promise.allSettled(abortPromises);\n this.activeSessions.clear();\n\n // Dispose session pool\n if (this.sessionPool) {\n try {\n await this.sessionPool.dispose();\n logger.debug(LogCategory.AI, 'CopilotSDKService: Session pool disposed');\n } catch (error) {\n logger.debug(LogCategory.AI, `CopilotSDKService: Warning: Error disposing session pool: ${error}`);\n }\n this.sessionPool = null;\n }\n\n if (this.client) {\n try {\n await this.client.stop();\n logger.debug(LogCategory.AI, 'CopilotSDKService: Client stopped');\n } catch (error) {\n logger.debug(LogCategory.AI, `CopilotSDKService: Warning: Error stopping client: ${error}`);\n }\n this.client = null;\n this.clientCwd = undefined;\n }\n\n this.sdkModule = null;\n this.availabilityCache = null;\n }\n\n /**\n * Dispose of the service and release all resources.\n */\n public dispose(): void {\n this.disposed = true;\n // Fire and forget cleanup\n this.cleanup().catch(() => {\n // Ignore cleanup errors during dispose\n });\n }\n\n /**\n * Find the SDK package path by checking multiple possible locations.\n * This handles both development and packaged extension scenarios.\n */\n private findSDKPath(): string | undefined {\n const possiblePaths = [\n // Development: running from dist/\n path.join(__dirname, '..', 'node_modules', '@github', 'copilot-sdk'),\n // Development: running from out/shortcuts/ai-service\n path.join(__dirname, '..', '..', '..', 'node_modules', '@github', 'copilot-sdk'),\n // Packaged extension\n path.join(__dirname, 'node_modules', '@github', 'copilot-sdk'),\n // Workspace root (for development)\n path.join(__dirname, '..', '..', '..', '..', 'node_modules', '@github', 'copilot-sdk'),\n ];\n\n for (const testPath of possiblePaths) {\n const indexPath = path.join(testPath, 'dist', 'index.js');\n if (fs.existsSync(indexPath)) {\n return testPath;\n }\n }\n\n return undefined;\n }\n\n /**\n * Load the SDK module using ESM dynamic import workaround.\n * This is necessary because webpack transforms import() in ways that break ESM loading.\n */\n private async loadSDKModule(sdkPath: string): Promise<void> {\n if (this.sdkModule) {\n return;\n }\n\n const sdkIndexPath = path.join(sdkPath, 'dist', 'index.js');\n\n // Import using file URL for ESM compatibility\n // Use Function constructor to bypass webpack's import() transformation\n const { pathToFileURL } = await import('url');\n const sdkUrl = pathToFileURL(sdkIndexPath).href;\n\n // Bypass webpack's import transformation using Function constructor\n // This is necessary because webpack transforms import() in ways that break ESM loading\n // eslint-disable-next-line @typescript-eslint/no-implied-eval\n const dynamicImport = new Function('specifier', 'return import(specifier)');\n const sdk = await dynamicImport(sdkUrl);\n\n if (!sdk.CopilotClient) {\n throw new Error('CopilotClient not found in SDK module');\n }\n\n this.sdkModule = sdk;\n }\n\n /**\n * Initialize the SDK client with optional working directory.\n * \n * @param cwd Optional working directory for the CLI process\n */\n private async initializeClient(cwd?: string): Promise<void> {\n const sdkPath = this.findSDKPath();\n if (!sdkPath) {\n throw new Error('Copilot SDK not found');\n }\n\n await this.loadSDKModule(sdkPath);\n\n if (!this.sdkModule) {\n throw new Error('SDK module not loaded');\n }\n\n // Create client with cwd option if specified\n const options: ICopilotClientOptions = {};\n if (cwd) {\n options.cwd = cwd;\n // Pre-register the working directory as trusted to bypass the\n // interactive folder trust confirmation dialog\n try {\n ensureFolderTrusted(cwd);\n } catch {\n // Non-fatal: trust dialog will appear if this fails\n }\n }\n\n const logger = getLogger();\n logger.debug(LogCategory.AI, `CopilotSDKService: Creating CopilotClient with options: ${JSON.stringify(options)}`);\n\n this.client = new this.sdkModule.CopilotClient(options);\n this.clientCwd = cwd;\n }\n\n /**\n * Send a message with timeout support (non-streaming).\n * WARNING: SDK's sendAndWait has a hardcoded 120-second timeout for session.idle event.\n * For longer timeouts, use sendWithStreaming instead (automatically done for timeoutMs > 120s).\n */\n private async sendWithTimeout(\n session: ICopilotSession,\n prompt: string,\n timeoutMs: number\n ): Promise<{ data?: { content?: string } }> {\n // Pass timeout directly to SDK's sendAndWait method\n // Note: SDK internally limits this to 120 seconds for the session.idle event\n return session.sendAndWait({ prompt }, timeoutMs);\n }\n\n /**\n * Send a message with streaming support.\n * Accumulates deltaContent chunks until a completion event fires.\n * \n * The Copilot SDK fires events with `event.type` as a plain string:\n * - \"assistant.message_delta\" with `data.deltaContent` for streaming chunks\n * - \"assistant.message\" with `data.content` for the final message\n * - \"assistant.turn_end\" with `data.turnId` when the turn is complete\n * - \"session.idle\" with empty data when the session finishes processing\n * - \"session.error\" with `data.message` for errors\n * \n * Completion is detected by:\n * 1. `session.idle` \u2014 the most explicit signal that the session is done\n * 2. `assistant.turn_end` \u2014 indicates the assistant's turn ended; used as a\n * fallback completion signal because some SDK versions may not fire\n * `session.idle` reliably or may delay it significantly.\n * \n * When `assistant.turn_end` fires and we already have content (from deltas\n * or a final message), we schedule a short grace period to allow a\n * `session.idle` or `assistant.message` event to arrive. If nothing else\n * arrives within the grace period, we settle with the content we have.\n */\n private async sendWithStreaming(\n session: ICopilotSession,\n prompt: string,\n timeoutMs: number,\n onStreamingChunk?: (chunk: string) => void\n ): Promise<StreamingResult> {\n return new Promise((resolve, reject) => {\n const logger = getLogger();\n const sid = session.sessionId;\n let response = '';\n // Accumulate ALL assistant.message content across turns.\n // With multi-turn MCP tool usage, the AI may produce multiple messages\n // (e.g., \"Let me read the files...\" on turn 1, then the actual JSON on turn 2+).\n // We keep ALL messages so we don't lose the final output.\n let allMessages: string[] = [];\n let settled = false;\n let turnEndGraceTimer: ReturnType<typeof setTimeout> | null = null;\n let turnCount = 0;\n\n // Token usage accumulator\n let usageInputTokens = 0;\n let usageOutputTokens = 0;\n let usageCacheReadTokens = 0;\n let usageCacheWriteTokens = 0;\n let usageCost: number | undefined;\n let usageDuration: number | undefined;\n let usageTurnCount = 0;\n let usageTokenLimit: number | undefined;\n let usageCurrentTokens: number | undefined;\n\n const cleanup = () => {\n if (unsubscribe) {\n unsubscribe();\n }\n clearTimeout(timeoutId);\n if (turnEndGraceTimer) {\n clearTimeout(turnEndGraceTimer);\n turnEndGraceTimer = null;\n }\n };\n\n const settle = (resolver: (value: StreamingResult) => void, value: StreamingResult) => {\n if (!settled) {\n settled = true;\n cleanup();\n resolver(value);\n }\n };\n\n const settleError = (error: Error) => {\n if (!settled) {\n settled = true;\n cleanup();\n reject(error);\n }\n };\n\n const buildTokenUsage = (): TokenUsage | undefined => {\n if (usageTurnCount === 0) {\n return undefined;\n }\n return {\n inputTokens: usageInputTokens,\n outputTokens: usageOutputTokens,\n cacheReadTokens: usageCacheReadTokens,\n cacheWriteTokens: usageCacheWriteTokens,\n totalTokens: usageInputTokens + usageOutputTokens,\n cost: usageCost,\n duration: usageDuration,\n turnCount: usageTurnCount,\n tokenLimit: usageTokenLimit,\n currentTokens: usageCurrentTokens,\n };\n };\n\n const settleWithResult = () => {\n // Join ALL non-empty messages across turns to preserve the full\n // conversation narrative. For tool-heavy sessions (e.g., impl skill),\n // intermediate messages like \"I'll read the files...\", \"Making changes\n // to X...\", \"All tests pass\" provide valuable context for the final\n // report. Fall back to accumulated delta response if no messages exist.\n const joinedMessages = allMessages.length > 0\n ? allMessages.filter(m => m.trim()).join('\\n\\n')\n : '';\n const result = joinedMessages || response;\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: Streaming completed (${result.length} chars, ${turnCount} turns, ${allMessages.length} messages)`);\n settle(resolve, { response: result, tokenUsage: buildTokenUsage(), turnCount });\n };\n\n const timeoutId = setTimeout(() => {\n settleError(new Error(`Request timed out after ${timeoutMs}ms`));\n }, timeoutMs);\n\n // Set up event handler for streaming\n // SDK's session.on() returns an unsubscribe function\n const unsubscribe = session.on!((event: ISessionEvent) => {\n const eventType = event.type;\n\n if (eventType === 'assistant.message_delta') {\n // Accumulate streaming chunks\n const delta = event.data?.deltaContent || '';\n response += delta;\n // Invoke the streaming callback if provided\n if (onStreamingChunk && delta) {\n try {\n onStreamingChunk(delta);\n } catch (cbError) {\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: onStreamingChunk callback error: ${cbError}`);\n }\n }\n } else if (eventType === 'assistant.message') {\n // Accumulate messages across turns.\n // Each turn may produce an assistant.message event.\n // With MCP tools, the first message(s) may be tool-use intent\n // while the final message contains the actual output.\n const messageContent = event.data?.content || '';\n if (messageContent) {\n allMessages.push(messageContent);\n }\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: Received message #${allMessages.length} (${messageContent.length} chars)`);\n // If no delta chunks were received but we have a streaming callback,\n // emit the message as a single chunk so SSE consumers get content\n if (onStreamingChunk && messageContent && !response) {\n try {\n onStreamingChunk(messageContent);\n } catch (cbError) {\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: onStreamingChunk callback error: ${cbError}`);\n }\n }\n } else if (eventType === 'assistant.turn_start') {\n // A new turn is starting \u2014 cancel any pending turn_end grace timer.\n // This is critical for multi-turn MCP tool conversations:\n // after the AI uses tools, the SDK fires turn_end then immediately\n // starts a new turn (turn_start) to process tool results. If we\n // don't cancel the grace timer, we'd settle with just the intent\n // message from the first turn instead of waiting for the full response.\n if (turnEndGraceTimer) {\n clearTimeout(turnEndGraceTimer);\n turnEndGraceTimer = null;\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: Cancelled turn_end grace timer \u2014 new turn starting`);\n }\n } else if (eventType === 'assistant.turn_end') {\n // Turn ended \u2014 the assistant finished its current turn.\n // In multi-turn conversations (MCP tool usage), there can be many turns:\n // Turn 1: AI expresses intent + tool calls \u2192 turn_end \u2192 tool execution \u2192 turn_start\n // Turn 2: AI processes tool results + more tool calls \u2192 turn_end \u2192 tool execution \u2192 turn_start\n // ...\n // Turn N: AI produces final output \u2192 turn_end \u2192 session.idle\n //\n // We prefer settling on session.idle which signals the entire conversation\n // is done. The turn_end grace period is only a safety net for sessions\n // that don't fire session.idle.\n turnCount++;\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: Turn ${turnCount} ended (${allMessages.length} messages so far)`);\n\n // Start a grace timer. If a new turn starts (turn_start), this timer\n // will be cancelled. If nothing else happens, we settle after the grace period.\n if (!settled && !turnEndGraceTimer) {\n turnEndGraceTimer = setTimeout(() => {\n turnEndGraceTimer = null;\n if (!settled && (allMessages.length > 0 || response)) {\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: Settling after turn_end grace period (turn ${turnCount})`);\n settleWithResult();\n }\n }, 2000); // 2 second grace period to allow tool execution + new turn\n }\n } else if (eventType === 'session.idle') {\n // Session finished processing \u2014 settle immediately\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: Session idle after ${turnCount} turns`);\n settleWithResult();\n } else if (eventType === 'session.error') {\n // Session error\n const errorMessage = event.data?.message || 'Unknown session error';\n logger.error(LogCategory.AI, `CopilotSDKService [${sid}]: Session error: ${errorMessage}`);\n settleError(new Error(`Copilot session error: ${errorMessage}`));\n } else if (eventType === 'assistant.usage') {\n // Per-turn token usage \u2014 accumulate across turns\n usageTurnCount++;\n usageInputTokens += event.data?.inputTokens ?? 0;\n usageOutputTokens += event.data?.outputTokens ?? 0;\n usageCacheReadTokens += event.data?.cacheReadTokens ?? 0;\n usageCacheWriteTokens += event.data?.cacheWriteTokens ?? 0;\n if (event.data?.cost != null) {\n usageCost = (usageCost ?? 0) + event.data.cost;\n }\n if (event.data?.duration != null) {\n usageDuration = (usageDuration ?? 0) + event.data.duration;\n }\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: Usage turn ${usageTurnCount}: in=${event.data?.inputTokens ?? 0} out=${event.data?.outputTokens ?? 0}`);\n } else if (eventType === 'session.usage_info') {\n // Session-level quota info \u2014 store last-seen values\n if (event.data?.tokenLimit != null) {\n usageTokenLimit = event.data.tokenLimit;\n }\n if (event.data?.currentTokens != null) {\n usageCurrentTokens = event.data.currentTokens;\n }\n logger.debug(LogCategory.AI, `CopilotSDKService [${sid}]: Session usage info: limit=${usageTokenLimit} current=${usageCurrentTokens}`);\n }\n });\n\n // Send the message (without waiting)\n session.send!({ prompt }).catch(error => {\n settleError(error instanceof Error ? error : new Error(String(error)));\n });\n });\n }\n}\n\n// ============================================================================\n// Convenience Functions\n// ============================================================================\n\n/**\n * Get the singleton CopilotSDKService instance.\n * Convenience function for cleaner imports.\n */\nexport function getCopilotSDKService(): CopilotSDKService {\n return CopilotSDKService.getInstance();\n}\n\n/**\n * Reset the CopilotSDKService singleton (primarily for testing).\n */\nexport function resetCopilotSDKService(): void {\n CopilotSDKService.resetInstance();\n}\n\n// ============================================================================\n// Permission Handler Helpers\n// ============================================================================\n\n/**\n * Permission handler that approves all permission requests.\n * \n * **WARNING**: This allows the AI to perform any operation without restrictions:\n * - Read/write any file\n * - Execute any shell command\n * - Access any URL\n * - Use any MCP server\n * \n * Only use this in trusted environments or for testing purposes.\n * \n * @example\n * ```typescript\n * const result = await copilotSDKService.sendMessage({\n * prompt: 'List files in the current directory',\n * onPermissionRequest: approveAllPermissions\n * });\n * ```\n */\nexport const approveAllPermissions: PermissionHandler = () => {\n return { kind: 'approved' };\n};\n\n/**\n * Permission handler that denies all permission requests.\n * This is the default behavior when no handler is provided.\n * \n * @example\n * ```typescript\n * const result = await copilotSDKService.sendMessage({\n * prompt: 'Just answer this question',\n * onPermissionRequest: denyAllPermissions\n * });\n * ```\n */\nexport const denyAllPermissions: PermissionHandler = () => {\n return { kind: 'denied-by-rules' };\n};\n", "/**\n * AI Module - Public API\n * \n * Exports AI service components for interacting with the Copilot SDK.\n */\n\n// Types\nexport {\n AIBackendType,\n AIModel,\n VALID_MODELS,\n AIInvocationResult,\n DEFAULT_PROMPTS,\n InteractiveToolType,\n DEFAULT_MODEL_ID,\n // Model registry exports\n ModelDefinition,\n MODEL_REGISTRY,\n getModelLabel,\n getModelDescription,\n getModelDefinition,\n getAllModels,\n getActiveModels,\n isValidModelId,\n getModelCount,\n getModelsByTier\n} from './types';\n\n// AI Command Types\nexport {\n AICommand,\n AICommandMode,\n AICommandsConfig,\n DEFAULT_AI_COMMANDS,\n SerializedAICommand,\n SerializedAIMenuConfig,\n serializeCommand,\n serializeCommands\n} from './command-types';\n\n// Prompt Builder (Pure)\nexport {\n PromptContext,\n substitutePromptVariables,\n buildPromptFromContext,\n usesTemplateVariables,\n getAvailableVariables\n} from './prompt-builder';\n\n// Program Utilities\nexport {\n checkProgramExists,\n clearProgramExistsCache,\n parseCopilotOutput\n} from './program-utils';\n\n// Process Types\nexport {\n AIToolType,\n AIProcessStatus,\n AIProcessType,\n GenericProcessMetadata,\n GenericGroupMetadata,\n TypedProcessOptions,\n ProcessGroupOptions,\n CompleteGroupOptions,\n CodeReviewProcessMetadata,\n DiscoveryProcessMetadata,\n CodeReviewGroupMetadata,\n AIProcess,\n SerializedAIProcess,\n TrackedProcessFields,\n serializeProcess,\n deserializeProcess,\n ProcessEventType,\n ProcessEvent,\n ProcessCounts\n} from './process-types';\n\n// Session Pool\nexport {\n SessionPool,\n IPoolableSession,\n SessionFactory,\n SessionPoolOptions,\n SessionPoolStats\n} from './session-pool';\n\n// CLI Utilities\nexport {\n PROMPT_LENGTH_THRESHOLD,\n PROBLEMATIC_CHARS_PATTERN,\n COPILOT_BASE_FLAGS,\n escapeShellArg,\n shouldUseFileDelivery,\n writePromptToTempFile,\n buildCliCommand,\n BuildCliCommandResult,\n BuildCliCommandOptions\n} from './cli-utils';\n\n// Copilot SDK Service\nexport {\n CopilotSDKService,\n getCopilotSDKService,\n resetCopilotSDKService,\n TokenUsage,\n MCPServerConfigBase,\n MCPLocalServerConfig,\n MCPRemoteServerConfig,\n MCPServerConfig,\n MCPControlOptions,\n SendMessageOptions,\n SDKInvocationResult,\n SDKAvailabilityResult,\n PermissionRequest,\n PermissionRequestResult,\n PermissionHandler,\n SessionPoolConfig,\n DEFAULT_SESSION_POOL_CONFIG,\n approveAllPermissions,\n denyAllPermissions\n} from './copilot-sdk-service';\n\n// Default timeouts\nexport { DEFAULT_AI_TIMEOUT_MS } from './timeouts';\n\n// MCP Config Loader\nexport {\n MCPConfigFile,\n MCPConfigLoadResult,\n getHomeDirectory,\n getMcpConfigPath,\n loadDefaultMcpConfig,\n loadDefaultMcpConfigAsync,\n mergeMcpConfigs,\n clearMcpConfigCache,\n mcpConfigExists,\n getCachedMcpConfig,\n setHomeDirectoryOverride\n} from './mcp-config-loader';\n\n// Trusted Folder Management\nexport {\n ensureFolderTrusted,\n isFolderTrusted,\n getCopilotConfigPath,\n setTrustedFolderHomeOverride\n} from './trusted-folder';\n", "/**\n * Map-Reduce Framework Types\n *\n * Core types and interfaces for the map-reduce AI workflow framework.\n * Provides a reusable execution pipeline for AI map-reduce jobs with support\n * for pluggable splitters, mappers, reducers, and prompt templates.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\n/**\n * A single work item to be processed by the mapper\n */\nexport interface WorkItem<TInput> {\n /** Unique identifier for this work item */\n id: string;\n /** The input data for this work item */\n data: TInput;\n /** Optional metadata about this work item */\n metadata?: Record<string, unknown>;\n}\n\n/**\n * Context provided to mapper functions during execution\n */\nexport interface MapContext {\n /** Unique ID for this map execution */\n executionId: string;\n /** Total number of work items being processed */\n totalItems: number;\n /** Index of this item (0-based) */\n itemIndex: number;\n /** Optional parent group ID for process tracking */\n parentGroupId?: string;\n /** Cancellation token (if supported) */\n isCancelled?: () => boolean;\n}\n\n/**\n * Result from a single map operation\n */\nexport interface MapResult<TMapOutput> {\n /** Work item ID this result corresponds to */\n workItemId: string;\n /** Whether the map operation succeeded */\n success: boolean;\n /** The output from the mapper (if successful) */\n output?: TMapOutput;\n /** Error message (if failed) */\n error?: string;\n /** Time taken for this map operation in ms */\n executionTimeMs: number;\n /** Optional process ID for tracking */\n processId?: string;\n}\n\n/**\n * Context provided to reducer functions during execution\n */\nexport interface ReduceContext {\n /** Unique ID for this reduce execution */\n executionId: string;\n /** Total execution time of map phase in ms */\n mapPhaseTimeMs: number;\n /** Number of successful map operations */\n successfulMaps: number;\n /** Number of failed map operations */\n failedMaps: number;\n /** Optional custom context data */\n customContext?: Record<string, unknown>;\n /** Optional process tracker for AI reduce tracking */\n processTracker?: ProcessTracker;\n /** Optional parent group ID for process tracking */\n parentGroupId?: string;\n}\n\n/**\n * Result from the reduce operation\n */\nexport interface ReduceResult<TReduceOutput> {\n /** The final output from the reducer */\n output: TReduceOutput;\n /** Statistics about the reduce operation */\n stats: ReduceStats;\n}\n\n/**\n * Statistics about the reduce phase\n */\nexport interface ReduceStats {\n /** Number of inputs before deduplication/reduction */\n inputCount: number;\n /** Number of outputs after reduction */\n outputCount: number;\n /** Number of items merged/deduplicated */\n mergedCount: number;\n /** Time taken for reduce phase in ms */\n reduceTimeMs: number;\n /** Whether AI-powered reduce was used */\n usedAIReduce: boolean;\n}\n\n/**\n * Mode for the reduce phase\n */\nexport type ReduceMode = 'deterministic' | 'ai' | 'hybrid';\n\n/**\n * Options for map-reduce job execution\n */\nexport interface MapReduceOptions {\n /** Maximum number of concurrent map operations (default: 5) */\n maxConcurrency: number;\n /** Mode for the reduce phase (default: 'deterministic') */\n reduceMode: ReduceMode;\n /** Whether to show progress updates (default: true) */\n showProgress: boolean;\n /** Whether to retry failed map operations (default: false) */\n retryOnFailure: boolean;\n /** Number of retry attempts for failed operations (default: 1) */\n retryAttempts?: number;\n /** \n * Timeout for each map operation in ms (default: 1800000 = 30 minutes).\n * On timeout, the system automatically retries once with doubled timeout value.\n */\n timeoutMs?: number;\n /** Optional job name for display/logging */\n jobName?: string;\n}\n\n/**\n * Default options for map-reduce execution\n */\nimport { DEFAULT_AI_TIMEOUT_MS, DEFAULT_MAX_CONCURRENCY } from '../config/defaults';\n\nexport const DEFAULT_MAP_REDUCE_OPTIONS: MapReduceOptions = {\n maxConcurrency: DEFAULT_MAX_CONCURRENCY,\n reduceMode: 'deterministic',\n showProgress: true,\n retryOnFailure: false,\n retryAttempts: 1,\n timeoutMs: DEFAULT_AI_TIMEOUT_MS\n};\n\n/**\n * Interface for a splitter that divides input into work items\n */\nexport interface Splitter<TInput, TWorkItemData> {\n /**\n * Split the input into work items\n * @param input The input to split\n * @returns Array of work items\n */\n split(input: TInput): WorkItem<TWorkItemData>[];\n}\n\n/**\n * Interface for a mapper that processes individual work items\n */\nexport interface Mapper<TWorkItemData, TMapOutput> {\n /**\n * Process a single work item\n * @param item The work item to process\n * @param context Context for the map operation\n * @returns Promise resolving to the map output\n */\n map(item: WorkItem<TWorkItemData>, context: MapContext): Promise<TMapOutput>;\n}\n\n/**\n * Interface for a reducer that aggregates map outputs\n */\nexport interface Reducer<TMapOutput, TReduceOutput> {\n /**\n * Reduce multiple map outputs into a single result\n * @param results Array of map results\n * @param context Context for the reduce operation\n * @returns Promise resolving to the reduce result\n */\n reduce(\n results: MapResult<TMapOutput>[],\n context: ReduceContext\n ): Promise<ReduceResult<TReduceOutput>>;\n}\n\n/**\n * Interface for a complete map-reduce job\n */\nexport interface MapReduceJob<TInput, TWorkItemData, TMapOutput, TReduceOutput> {\n /** Unique identifier for this job type */\n id: string;\n /** Display name for the job */\n name: string;\n /** Splitter that divides input into work items */\n splitter: Splitter<TInput, TWorkItemData>;\n /** Mapper that processes individual work items */\n mapper: Mapper<TWorkItemData, TMapOutput>;\n /** Reducer that aggregates map outputs */\n reducer: Reducer<TMapOutput, TReduceOutput>;\n /** Optional prompt template for map operations */\n promptTemplate?: PromptTemplate;\n /** Job-specific options (merged with defaults) */\n options?: Partial<MapReduceOptions>;\n}\n\n/**\n * Progress callback for tracking job execution\n */\nexport type ProgressCallback = (progress: JobProgress) => void;\n\n/**\n * Progress information during job execution\n */\nexport interface JobProgress {\n /** Current phase of execution */\n phase: 'splitting' | 'mapping' | 'reducing' | 'complete';\n /** Total number of work items */\n totalItems: number;\n /** Number of completed items */\n completedItems: number;\n /** Number of failed items */\n failedItems: number;\n /** Progress percentage (0-100) */\n percentage: number;\n /** Optional message for display */\n message?: string;\n}\n\n/**\n * Result of a map-reduce job execution\n */\nexport interface MapReduceResult<TMapOutput, TReduceOutput> {\n /** Whether the overall job succeeded */\n success: boolean;\n /** The final reduced output */\n output?: TReduceOutput;\n /** Results from individual map operations */\n mapResults: MapResult<TMapOutput>[];\n /** Statistics about the reduce phase */\n reduceStats?: ReduceStats;\n /** Total execution time in ms */\n totalTimeMs: number;\n /** Execution statistics */\n executionStats: ExecutionStats;\n /** Error message if job failed */\n error?: string;\n}\n\n/**\n * Execution statistics for the job\n */\nexport interface ExecutionStats {\n /** Total number of work items */\n totalItems: number;\n /** Number of successful map operations */\n successfulMaps: number;\n /** Number of failed map operations */\n failedMaps: number;\n /** Time spent in map phase */\n mapPhaseTimeMs: number;\n /** Time spent in reduce phase */\n reducePhaseTimeMs: number;\n /** Max concurrency used */\n maxConcurrency: number;\n}\n\n/**\n * Prompt template for generating prompts from work items\n */\nexport interface PromptTemplate {\n /** The template string with {{variable}} placeholders */\n template: string;\n /** Required variables that must be provided */\n requiredVariables: string[];\n /** Optional system prompt */\n systemPrompt?: string;\n /** Optional function to parse the AI response */\n responseParser?: (response: string) => unknown;\n}\n\n/**\n * Options for prompt rendering\n */\nexport interface PromptRenderOptions {\n /** Variables to substitute in the template */\n variables: Record<string, string | number | boolean>;\n /** Whether to include system prompt */\n includeSystemPrompt?: boolean;\n}\n\n/**\n * AI invocation function type\n */\nexport type AIInvoker = (prompt: string, options?: AIInvokerOptions) => Promise<AIInvokerResult>;\n\n/**\n * Options for AI invocation\n */\nexport interface AIInvokerOptions {\n /** Model to use (optional, uses default if not specified) */\n model?: string;\n /** Working directory for execution */\n workingDirectory?: string;\n /** Timeout in ms */\n timeoutMs?: number;\n}\n\n/**\n * Result from AI invocation\n */\nexport interface AIInvokerResult {\n /** Whether the invocation succeeded */\n success: boolean;\n /** The AI response (if successful) */\n response?: string;\n /** Error message (if failed) */\n error?: string;\n /** SDK session ID if the request was made via SDK (for session resume) */\n sessionId?: string;\n /** Token usage data from the SDK (if available) */\n tokenUsage?: import('../ai/copilot-sdk-service').TokenUsage;\n}\n\n/**\n * Session metadata for session resume functionality\n */\nexport interface SessionMetadata {\n /** SDK session ID for resuming sessions */\n sessionId?: string;\n /** Backend type used for this process */\n backend?: 'copilot-sdk' | 'copilot-cli' | 'clipboard';\n /** Working directory used for the session */\n workingDirectory?: string;\n}\n\n/**\n * Process tracking hooks for integration with AI process manager\n */\nexport interface ProcessTracker {\n /**\n * Register a new process for tracking\n * @param description Description of the process\n * @param parentGroupId Optional parent group ID\n * @returns Process ID\n */\n registerProcess(description: string, parentGroupId?: string): string;\n\n /**\n * Update process status\n * @param processId Process ID\n * @param status New status\n * @param response Optional response\n * @param error Optional error\n * @param structuredResult Optional structured result (JSON string)\n */\n updateProcess(\n processId: string,\n status: 'running' | 'completed' | 'failed',\n response?: string,\n error?: string,\n structuredResult?: string\n ): void;\n\n /**\n * Attach session metadata to a process for session resume functionality.\n * This should be called after the AI invocation completes with the session ID.\n * @param processId Process ID\n * @param metadata Session metadata (sessionId, backend, workingDirectory)\n */\n attachSessionMetadata?(processId: string, metadata: SessionMetadata): void;\n\n /**\n * Register a group of processes\n * @param description Description of the group\n * @returns Group ID\n */\n registerGroup(description: string): string;\n\n /**\n * Complete a process group\n * @param groupId Group ID\n * @param summary Summary text\n * @param stats Execution statistics\n */\n completeGroup(\n groupId: string,\n summary: string,\n stats: ExecutionStats\n ): void;\n}\n\n/**\n * Callback invoked after each individual map item completes (success or failure).\n * Receives the original work item and its result.\n */\nexport type ItemCompleteCallback<TWorkItemData = unknown, TMapOutput = unknown> = (\n item: WorkItem<TWorkItemData>,\n result: MapResult<TMapOutput>\n) => void;\n\n/**\n * Executor options that combine job options with runtime options\n */\nexport interface ExecutorOptions extends MapReduceOptions {\n /** AI invoker function for map operations */\n aiInvoker: AIInvoker;\n /** Optional process tracker for integration */\n processTracker?: ProcessTracker;\n /** Optional progress callback */\n onProgress?: ProgressCallback;\n /** Optional cancellation check function - returns true if execution should be cancelled */\n isCancelled?: () => boolean;\n /**\n * Optional callback invoked after each individual map item completes.\n * Useful for incremental saving of results (e.g., per-module cache writes).\n * Called for both successful and failed items.\n */\n onItemComplete?: ItemCompleteCallback;\n}\n", "/**\n * ConcurrencyLimiter\n *\n * Controls parallel execution of async tasks with a configurable concurrency limit.\n * Prevents overwhelming APIs with too many simultaneous requests.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { CancellationError as RuntimeCancellationError, IsCancelledFn } from '../runtime/cancellation';\n\n/**\n * Error thrown when an operation is cancelled.\n * Re-exported from runtime for backward compatibility.\n */\nexport class CancellationError extends RuntimeCancellationError {\n constructor(message = 'Operation cancelled') {\n super(message);\n // Keep 'CancellationError' name for backward compatibility\n this.name = 'CancellationError';\n }\n}\n\n/**\n * A limiter that controls the maximum number of concurrent async operations.\n * Uses a queue-based approach to manage pending tasks.\n */\nexport class ConcurrencyLimiter {\n private running = 0;\n private queue: Array<() => void> = [];\n\n /**\n * Creates a new ConcurrencyLimiter\n * @param maxConcurrency Maximum number of concurrent operations (default: 5)\n */\n constructor(private readonly maxConcurrency: number = 5) {\n if (maxConcurrency < 1) {\n throw new Error('maxConcurrency must be at least 1');\n }\n }\n\n /**\n * Get the current number of running tasks\n */\n get runningCount(): number {\n return this.running;\n }\n\n /**\n * Get the current number of queued tasks\n */\n get queuedCount(): number {\n return this.queue.length;\n }\n\n /**\n * Get the maximum concurrency limit\n */\n get limit(): number {\n return this.maxConcurrency;\n }\n\n /**\n * Execute a single async function with concurrency limiting.\n * If the limit is reached, the function will be queued until a slot is available.\n *\n * @param fn The async function to execute\n * @param isCancelled Optional function to check if operation should be cancelled\n * @returns Promise that resolves with the function's result\n */\n async run<T>(fn: () => Promise<T>, isCancelled?: () => boolean): Promise<T> {\n // Check for cancellation before acquiring slot\n if (isCancelled?.()) {\n throw new CancellationError();\n }\n\n await this.acquire();\n\n // Check for cancellation after acquiring slot but before executing\n if (isCancelled?.()) {\n this.release();\n throw new CancellationError();\n }\n\n try {\n return await fn();\n } finally {\n this.release();\n }\n }\n\n /**\n * Execute multiple async tasks with concurrency limiting.\n * Similar to Promise.all but respects the maxConcurrency limit.\n *\n * @param tasks Array of functions that return promises\n * @param isCancelled Optional function to check if operation should be cancelled\n * @returns Promise that resolves with array of results (in same order as input)\n */\n async all<T>(tasks: Array<() => Promise<T>>, isCancelled?: () => boolean): Promise<T[]> {\n return Promise.all(tasks.map(task => this.run(task, isCancelled)));\n }\n\n /**\n * Execute multiple async tasks with concurrency limiting, settling all promises.\n * Similar to Promise.allSettled but respects the maxConcurrency limit.\n *\n * @param tasks Array of functions that return promises\n * @param isCancelled Optional function to check if operation should be cancelled\n * @returns Promise that resolves with array of settled results\n */\n async allSettled<T>(tasks: Array<() => Promise<T>>, isCancelled?: () => boolean): Promise<PromiseSettledResult<T>[]> {\n return Promise.all(\n tasks.map(task =>\n this.run(task, isCancelled)\n .then(value => ({ status: 'fulfilled' as const, value }))\n .catch(reason => ({ status: 'rejected' as const, reason }))\n )\n );\n }\n\n /**\n * Acquire a slot for execution.\n * If maxConcurrency is reached, this will wait until a slot is available.\n */\n private acquire(): Promise<void> {\n if (this.running < this.maxConcurrency) {\n this.running++;\n return Promise.resolve();\n }\n\n // Queue the request and wait for a slot\n return new Promise<void>(resolve => {\n this.queue.push(resolve);\n });\n }\n\n /**\n * Release a slot after execution completes.\n * If there are queued tasks, the next one will be started.\n */\n private release(): void {\n this.running--;\n\n // Start next queued task if any\n const next = this.queue.shift();\n if (next) {\n this.running++;\n next();\n }\n }\n}\n\n// Re-export for backward compatibility\nexport { DEFAULT_MAX_CONCURRENCY } from '../config/defaults';\n", "/**\n * Map-Reduce Executor\n *\n * Executes map-reduce jobs with configurable concurrency, progress tracking,\n * and optional process manager integration.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { ConcurrencyLimiter, CancellationError } from './concurrency-limiter';\nimport {\n DEFAULT_MAP_REDUCE_OPTIONS,\n ExecutionStats,\n ExecutorOptions,\n JobProgress,\n MapContext,\n MapReduceJob,\n MapReduceOptions,\n MapReduceResult,\n MapResult,\n ReduceContext,\n WorkItem,\n SessionMetadata\n} from './types';\n\n/**\n * Generates a unique execution ID\n */\nfunction generateExecutionId(): string {\n return `mr-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;\n}\n\n/**\n * MapReduceExecutor\n *\n * Executes map-reduce jobs with:\n * - Configurable concurrency limiting\n * - Progress tracking and callbacks\n * - Optional AI process manager integration\n * - Retry support for failed operations\n * - Timeout handling\n */\nexport class MapReduceExecutor {\n private limiter: ConcurrencyLimiter;\n private options: ExecutorOptions;\n\n constructor(options: ExecutorOptions) {\n this.options = {\n ...DEFAULT_MAP_REDUCE_OPTIONS,\n ...options\n };\n this.limiter = new ConcurrencyLimiter(this.options.maxConcurrency);\n }\n\n /**\n * Execute a map-reduce job\n * @param job The job to execute\n * @param input The input to process\n * @returns Promise resolving to the job result\n */\n async execute<TInput, TWorkItemData, TMapOutput, TReduceOutput>(\n job: MapReduceJob<TInput, TWorkItemData, TMapOutput, TReduceOutput>,\n input: TInput\n ): Promise<MapReduceResult<TMapOutput, TReduceOutput>> {\n const executionId = generateExecutionId();\n const startTime = Date.now();\n\n // Merge job options with executor options\n const options: MapReduceOptions = {\n ...this.options,\n ...job.options\n };\n\n // Report initial progress\n this.reportProgress({\n phase: 'splitting',\n totalItems: 0,\n completedItems: 0,\n failedItems: 0,\n percentage: 0,\n message: 'Splitting input into work items...'\n });\n\n // 1. Split Phase\n let workItems: WorkItem<TWorkItemData>[];\n try {\n workItems = job.splitter.split(input);\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n return this.createFailedResult(startTime, `Split phase failed: ${errorMsg}`);\n }\n\n if (workItems.length === 0) {\n return this.createEmptyResult(startTime);\n }\n\n // Register group process if tracker is available\n let groupId: string | undefined;\n if (this.options.processTracker && workItems.length > 1) {\n const description = options.jobName || job.name;\n groupId = this.options.processTracker.registerGroup(description);\n }\n\n // Report split complete\n this.reportProgress({\n phase: 'mapping',\n totalItems: workItems.length,\n completedItems: 0,\n failedItems: 0,\n percentage: 0,\n message: `Processing ${workItems.length} items (max ${options.maxConcurrency} concurrent)...`\n });\n\n // 2. Map Phase\n const mapStartTime = Date.now();\n let mapResults: MapResult<TMapOutput>[];\n try {\n mapResults = await this.executeMapPhase(\n job,\n workItems,\n executionId,\n options,\n groupId\n );\n } catch (error) {\n if (error instanceof CancellationError) {\n const mapPhaseTimeMs = Date.now() - mapStartTime;\n return this.createCancelledResult(startTime, mapPhaseTimeMs, workItems.length, options.maxConcurrency);\n }\n throw error;\n }\n const mapPhaseTimeMs = Date.now() - mapStartTime;\n\n // Calculate map statistics\n const successfulMaps = mapResults.filter(r => r.success).length;\n const failedMaps = mapResults.filter(r => !r.success).length;\n\n // Report map complete\n this.reportProgress({\n phase: 'reducing',\n totalItems: workItems.length,\n completedItems: successfulMaps,\n failedItems: failedMaps,\n percentage: 90,\n message: 'Aggregating results...'\n });\n\n // 3. Reduce Phase\n const reduceStartTime = Date.now();\n const reduceContext: ReduceContext = {\n executionId,\n mapPhaseTimeMs,\n successfulMaps,\n failedMaps,\n processTracker: this.options.processTracker,\n parentGroupId: groupId\n };\n\n let reduceResult;\n try {\n reduceResult = await job.reducer.reduce(mapResults, reduceContext);\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n const totalTimeMs = Date.now() - startTime;\n\n // Still return partial results from map phase\n return {\n success: false,\n mapResults,\n totalTimeMs,\n executionStats: {\n totalItems: workItems.length,\n successfulMaps,\n failedMaps,\n mapPhaseTimeMs,\n reducePhaseTimeMs: Date.now() - reduceStartTime,\n maxConcurrency: options.maxConcurrency\n },\n error: `Reduce phase failed: ${errorMsg}`\n };\n }\n\n const reducePhaseTimeMs = Date.now() - reduceStartTime;\n const totalTimeMs = Date.now() - startTime;\n\n // Build execution stats\n const executionStats: ExecutionStats = {\n totalItems: workItems.length,\n successfulMaps,\n failedMaps,\n mapPhaseTimeMs,\n reducePhaseTimeMs,\n maxConcurrency: options.maxConcurrency\n };\n\n // Complete group process if registered\n if (this.options.processTracker && groupId) {\n this.options.processTracker.completeGroup(\n groupId,\n `Completed: ${successfulMaps}/${workItems.length} items processed`,\n executionStats\n );\n }\n\n // Report complete\n this.reportProgress({\n phase: 'complete',\n totalItems: workItems.length,\n completedItems: successfulMaps,\n failedItems: failedMaps,\n percentage: 100,\n message: `Complete: ${successfulMaps} succeeded, ${failedMaps} failed`\n });\n\n // Build the result\n const overallSuccess = failedMaps === 0;\n const result: MapReduceResult<TMapOutput, TReduceOutput> = {\n success: overallSuccess,\n output: reduceResult.output,\n mapResults,\n reduceStats: reduceResult.stats,\n totalTimeMs,\n executionStats\n };\n\n // Add error message if there were failures\n if (!overallSuccess) {\n // Get error messages from failed items\n const failedResults = mapResults.filter(r => !r.success);\n if (failedResults.length === 1) {\n result.error = `1 item failed: ${failedResults[0].error || 'Unknown error'}`;\n } else {\n // Collect unique error messages\n const uniqueErrors = [...new Set(failedResults.map(r => r.error || 'Unknown error'))];\n if (uniqueErrors.length === 1) {\n result.error = `${failedResults.length} items failed: ${uniqueErrors[0]}`;\n } else {\n result.error = `${failedResults.length} items failed with ${uniqueErrors.length} different errors`;\n }\n }\n }\n\n return result;\n }\n\n /**\n * Execute the map phase with concurrency limiting\n */\n private async executeMapPhase<TWorkItemData, TMapOutput>(\n job: MapReduceJob<unknown, TWorkItemData, TMapOutput, unknown>,\n workItems: WorkItem<TWorkItemData>[],\n executionId: string,\n options: MapReduceOptions,\n parentGroupId?: string\n ): Promise<MapResult<TMapOutput>[]> {\n let completedCount = 0;\n let failedCount = 0;\n let cancelled = false;\n\n // Create tasks for each work item\n const tasks = workItems.map((item, index) => {\n return () => {\n // Check for cancellation before starting this task\n if (cancelled || this.options.isCancelled?.()) {\n cancelled = true;\n // Return a cancelled result instead of throwing\n return Promise.resolve<MapResult<TMapOutput>>({\n workItemId: item.id,\n success: false,\n error: 'Operation cancelled',\n executionTimeMs: 0\n });\n }\n\n return this.executeMapItem(\n job,\n item,\n {\n executionId,\n totalItems: workItems.length,\n itemIndex: index,\n parentGroupId,\n isCancelled: this.options.isCancelled\n },\n options\n ).then(result => {\n // Update progress\n if (result.success) {\n completedCount++;\n } else {\n failedCount++;\n }\n\n this.reportProgress({\n phase: 'mapping',\n totalItems: workItems.length,\n completedItems: completedCount,\n failedItems: failedCount,\n percentage: Math.round(((completedCount + failedCount) / workItems.length) * 85),\n message: `Processed ${completedCount + failedCount}/${workItems.length} items...`\n });\n\n // Notify per-item completion (for incremental saving, etc.)\n if (this.options.onItemComplete) {\n try {\n this.options.onItemComplete(item, result);\n } catch {\n // Don't let callback errors affect the pipeline\n }\n }\n\n return result;\n });\n };\n });\n\n // Execute with concurrency limit and cancellation support\n try {\n return await this.limiter.all(tasks, this.options.isCancelled);\n } catch (error) {\n if (error instanceof CancellationError) {\n // Return cancelled results for any remaining items\n const processedCount = completedCount + failedCount;\n const cancelledResults: MapResult<TMapOutput>[] = [];\n for (let i = processedCount; i < workItems.length; i++) {\n cancelledResults.push({\n workItemId: workItems[i].id,\n success: false,\n error: 'Operation cancelled',\n executionTimeMs: 0\n });\n }\n throw error; // Re-throw to propagate cancellation\n }\n throw error;\n }\n }\n\n /**\n * Execute a single map item with retry support\n */\n private async executeMapItem<TWorkItemData, TMapOutput>(\n job: MapReduceJob<unknown, TWorkItemData, TMapOutput, unknown>,\n item: WorkItem<TWorkItemData>,\n context: MapContext,\n options: MapReduceOptions\n ): Promise<MapResult<TMapOutput>> {\n const startTime = Date.now();\n const maxAttempts = options.retryOnFailure ? (options.retryAttempts || 1) + 1 : 1;\n const baseTimeoutMs = options.timeoutMs || DEFAULT_MAP_REDUCE_OPTIONS.timeoutMs;\n\n // Register process if tracker available\n let processId: string | undefined;\n if (this.options.processTracker) {\n processId = this.options.processTracker.registerProcess(\n `Processing item ${context.itemIndex + 1}/${context.totalItems}`,\n context.parentGroupId\n );\n }\n\n for (let attempt = 0; attempt < maxAttempts; attempt++) {\n try {\n // Try with timeout, including timeout retry with doubled value\n const output = await this.executeMapItemWithTimeoutRetry<TWorkItemData, TMapOutput>(\n job,\n item,\n context,\n baseTimeoutMs\n );\n\n const executionTimeMs = Date.now() - startTime;\n\n // Update process status with structured result\n if (this.options.processTracker && processId) {\n // Serialize the output for structured result storage\n let structuredResult: string | undefined;\n try {\n structuredResult = JSON.stringify(output);\n } catch {\n // Ignore serialization errors\n }\n this.options.processTracker.updateProcess(processId, 'completed', undefined, undefined, structuredResult);\n \n // Attach session metadata if the output contains sessionId (for session resume)\n // This is used by pipeline items to enable session resume functionality\n if (this.options.processTracker.attachSessionMetadata) {\n const outputWithSession = output as { sessionId?: string };\n if (outputWithSession?.sessionId) {\n this.options.processTracker.attachSessionMetadata(processId, {\n sessionId: outputWithSession.sessionId,\n backend: 'copilot-sdk' // If we have a sessionId, it came from SDK\n });\n }\n }\n }\n\n return {\n workItemId: item.id,\n success: true,\n output,\n executionTimeMs,\n processId\n };\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n\n // If this was the last attempt, return failure\n if (attempt === maxAttempts - 1) {\n const executionTimeMs = Date.now() - startTime;\n\n // Update process status\n if (this.options.processTracker && processId) {\n this.options.processTracker.updateProcess(processId, 'failed', undefined, errorMsg);\n }\n\n return {\n workItemId: item.id,\n success: false,\n error: errorMsg,\n executionTimeMs,\n processId\n };\n }\n\n // Wait before retry (exponential backoff)\n await this.delay(1000 * (attempt + 1));\n }\n }\n\n // Should never reach here, but TypeScript needs this\n return {\n workItemId: item.id,\n success: false,\n error: 'Unexpected error in map execution',\n executionTimeMs: Date.now() - startTime,\n processId\n };\n }\n\n /**\n * Execute a map item with timeout retry support.\n * On timeout, retries once with doubled timeout value.\n */\n private async executeMapItemWithTimeoutRetry<TWorkItemData, TMapOutput>(\n job: MapReduceJob<unknown, TWorkItemData, TMapOutput, unknown>,\n item: WorkItem<TWorkItemData>,\n context: MapContext,\n baseTimeoutMs: number | undefined\n ): Promise<TMapOutput> {\n // First attempt with base timeout\n try {\n return await this.executeMapItemWithTimeout<TWorkItemData, TMapOutput>(\n job,\n item,\n context,\n baseTimeoutMs\n );\n } catch (error) {\n // Check if it's a timeout error\n const isTimeoutError = error instanceof Error && \n error.message.includes('timed out after');\n\n // If not a timeout error, re-throw immediately\n if (!isTimeoutError) {\n throw error;\n }\n\n // Timeout occurred - retry once with doubled timeout\n const doubledTimeoutMs = baseTimeoutMs ? baseTimeoutMs * 2 : undefined;\n \n // Second attempt with doubled timeout (no further retries for timeout)\n return await this.executeMapItemWithTimeout<TWorkItemData, TMapOutput>(\n job,\n item,\n context,\n doubledTimeoutMs\n );\n }\n }\n\n /**\n * Execute a map item with a specific timeout\n */\n private async executeMapItemWithTimeout<TWorkItemData, TMapOutput>(\n job: MapReduceJob<unknown, TWorkItemData, TMapOutput, unknown>,\n item: WorkItem<TWorkItemData>,\n context: MapContext,\n timeoutMs: number | undefined\n ): Promise<TMapOutput> {\n const mapPromise = job.mapper.map(item, context);\n\n if (timeoutMs && timeoutMs > 0) {\n return await Promise.race([\n mapPromise,\n this.createTimeoutPromise<TMapOutput>(timeoutMs)\n ]);\n } else {\n return await mapPromise;\n }\n }\n\n /**\n * Create a timeout promise that rejects after the specified time\n */\n private createTimeoutPromise<T>(timeoutMs: number): Promise<T> {\n return new Promise((_, reject) => {\n setTimeout(() => {\n reject(new Error(`Operation timed out after ${timeoutMs}ms`));\n }, timeoutMs);\n });\n }\n\n /**\n * Delay for a specified number of milliseconds\n */\n private delay(ms: number): Promise<void> {\n return new Promise(resolve => setTimeout(resolve, ms));\n }\n\n /**\n * Report progress to the callback if configured\n */\n private reportProgress(progress: JobProgress): void {\n if (this.options.onProgress) {\n this.options.onProgress(progress);\n }\n }\n\n /**\n * Create a failed result\n */\n private createFailedResult<TMapOutput, TReduceOutput>(\n startTime: number,\n error: string\n ): MapReduceResult<TMapOutput, TReduceOutput> {\n return {\n success: false,\n mapResults: [],\n totalTimeMs: Date.now() - startTime,\n executionStats: {\n totalItems: 0,\n successfulMaps: 0,\n failedMaps: 0,\n mapPhaseTimeMs: 0,\n reducePhaseTimeMs: 0,\n maxConcurrency: this.options.maxConcurrency\n },\n error\n };\n }\n\n /**\n * Create an empty result (no work items)\n */\n private createEmptyResult<TMapOutput, TReduceOutput>(\n startTime: number\n ): MapReduceResult<TMapOutput, TReduceOutput> {\n return {\n success: true,\n output: undefined,\n mapResults: [],\n reduceStats: {\n inputCount: 0,\n outputCount: 0,\n mergedCount: 0,\n reduceTimeMs: 0,\n usedAIReduce: false\n },\n totalTimeMs: Date.now() - startTime,\n executionStats: {\n totalItems: 0,\n successfulMaps: 0,\n failedMaps: 0,\n mapPhaseTimeMs: 0,\n reducePhaseTimeMs: 0,\n maxConcurrency: this.options.maxConcurrency\n }\n };\n }\n\n /**\n * Create a cancelled result\n */\n private createCancelledResult<TMapOutput, TReduceOutput>(\n startTime: number,\n mapPhaseTimeMs: number,\n totalItems: number,\n maxConcurrency: number\n ): MapReduceResult<TMapOutput, TReduceOutput> {\n return {\n success: false,\n mapResults: [],\n totalTimeMs: Date.now() - startTime,\n executionStats: {\n totalItems,\n successfulMaps: 0,\n failedMaps: 0,\n mapPhaseTimeMs,\n reducePhaseTimeMs: 0,\n maxConcurrency\n },\n error: 'Operation cancelled'\n };\n }\n}\n\n/**\n * Create a new MapReduceExecutor with the given options\n */\nexport function createExecutor(options: ExecutorOptions): MapReduceExecutor {\n return new MapReduceExecutor(options);\n}\n", "/**\n * Prompt Template\n *\n * Lightweight template system for building prompts from templates with variable substitution.\n * Supports required variables validation and optional response parsing.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { PromptRenderOptions, PromptTemplate } from './types';\nimport {\n TEMPLATE_VARIABLE_REGEX,\n substituteVariables,\n extractVariables as extractTemplateVariables\n} from '../utils/template-engine';\nimport { PipelineCoreError, ErrorCode } from '../errors';\n\n// Re-export PromptTemplate for convenience\nexport type { PromptTemplate } from './types';\n\n/**\n * Error thrown when a required variable is missing\n */\nexport class MissingVariableError extends PipelineCoreError {\n /** Name of the missing variable */\n readonly variableName: string;\n /** Name of the template (if available) */\n readonly templateName?: string;\n\n constructor(\n variableName: string,\n templateName?: string\n ) {\n const context = templateName ? ` in template \"${templateName}\"` : '';\n super(`Missing required variable \"${variableName}\"${context}`, {\n code: ErrorCode.MISSING_VARIABLE,\n meta: {\n variableName,\n ...(templateName && { templateName }),\n },\n });\n this.name = 'MissingVariableError';\n this.variableName = variableName;\n this.templateName = templateName;\n }\n}\n\n/**\n * Error thrown when template rendering fails\n */\nexport class TemplateRenderError extends PipelineCoreError {\n constructor(\n message: string,\n cause?: Error\n ) {\n super(message, {\n code: ErrorCode.TEMPLATE_ERROR,\n cause,\n });\n this.name = 'TemplateRenderError';\n }\n}\n\n/**\n * Render a prompt template with the given variables\n * @param template The prompt template to render\n * @param options Render options including variables\n * @returns The rendered prompt string\n * @throws MissingVariableError if a required variable is missing\n * @throws TemplateRenderError if rendering fails\n */\nexport function renderTemplate(\n template: PromptTemplate,\n options: PromptRenderOptions\n): string {\n const { variables, includeSystemPrompt = false } = options;\n\n // Validate required variables\n for (const required of template.requiredVariables) {\n if (!(required in variables) || variables[required] === undefined || variables[required] === null) {\n throw new MissingVariableError(required);\n }\n }\n\n try {\n // Perform variable substitution using shared engine\n // Use 'preserve' mode for missing variables (they may be optional)\n let rendered = substituteVariables(template.template, variables, {\n strict: false,\n missingValueBehavior: 'preserve',\n preserveSpecialVariables: false // Don't treat any as special in this context\n });\n\n // Prepend system prompt if requested\n if (includeSystemPrompt && template.systemPrompt) {\n rendered = `${template.systemPrompt}\\n\\n${rendered}`;\n }\n\n return rendered;\n } catch (error) {\n throw new TemplateRenderError(\n 'Failed to render template',\n error instanceof Error ? error : undefined\n );\n }\n}\n\n/**\n * Create a new prompt template\n * @param config Template configuration\n * @returns PromptTemplate instance\n */\nexport function createTemplate(config: {\n template: string;\n requiredVariables?: string[];\n systemPrompt?: string;\n responseParser?: (response: string) => unknown;\n}): PromptTemplate {\n // Auto-detect required variables from template if not provided\n const requiredVariables = config.requiredVariables ?? extractVariables(config.template);\n\n return {\n template: config.template,\n requiredVariables,\n systemPrompt: config.systemPrompt,\n responseParser: config.responseParser\n };\n}\n\n/**\n * Extract variable names from a template string\n * @param template The template string\n * @returns Array of variable names found in the template\n */\nexport function extractVariables(template: string): string[] {\n // Use shared implementation but don't exclude any variables\n return extractTemplateVariables(template, false);\n}\n\n/**\n * Validate that a template has all required variables defined\n * @param template The template to validate\n * @returns Object with valid flag and any missing variables\n */\nexport function validateTemplate(template: PromptTemplate): {\n valid: boolean;\n missingInTemplate: string[];\n undeclaredVariables: string[];\n} {\n const templateVariables = extractVariables(template.template);\n const requiredSet = new Set(template.requiredVariables);\n const templateSet = new Set(templateVariables);\n\n // Find required variables not in template\n const missingInTemplate = template.requiredVariables.filter(v => !templateSet.has(v));\n\n // Find template variables not declared as required\n const undeclaredVariables = templateVariables.filter(v => !requiredSet.has(v));\n\n return {\n valid: missingInTemplate.length === 0,\n missingInTemplate,\n undeclaredVariables\n };\n}\n\n/**\n * Compose multiple templates into one\n * @param templates Array of templates to compose\n * @param separator Separator between templates (default: '\\n\\n')\n * @returns Combined template\n */\nexport function composeTemplates(\n templates: PromptTemplate[],\n separator: string = '\\n\\n'\n): PromptTemplate {\n const combinedTemplate = templates.map(t => t.template).join(separator);\n const combinedRequired = Array.from(\n new Set(templates.flatMap(t => t.requiredVariables))\n );\n\n // Use first template's system prompt if available\n const systemPrompt = templates.find(t => t.systemPrompt)?.systemPrompt;\n\n return {\n template: combinedTemplate,\n requiredVariables: combinedRequired,\n systemPrompt\n };\n}\n\n/**\n * Built-in template helpers\n */\nexport const TemplateHelpers = {\n /**\n * Escape special characters in a string for use in templates\n */\n escape(str: string): string {\n return str\n .replace(/\\\\/g, '\\\\\\\\')\n .replace(/\\{/g, '\\\\{')\n .replace(/\\}/g, '\\\\}');\n },\n\n /**\n * Truncate a string to a maximum length\n */\n truncate(str: string, maxLength: number, suffix: string = '...'): string {\n if (str.length <= maxLength) {\n return str;\n }\n return str.slice(0, maxLength - suffix.length) + suffix;\n },\n\n /**\n * Indent all lines in a string\n */\n indent(str: string, spaces: number = 2): string {\n const indent = ' '.repeat(spaces);\n return str.split('\\n').map(line => indent + line).join('\\n');\n },\n\n /**\n * Convert an object to a formatted string for use in prompts\n */\n formatObject(obj: Record<string, unknown>, indent: number = 0): string {\n const indentStr = ' '.repeat(indent);\n const lines: string[] = [];\n\n for (const [key, value] of Object.entries(obj)) {\n if (typeof value === 'object' && value !== null && !Array.isArray(value)) {\n lines.push(`${indentStr}${key}:`);\n lines.push(this.formatObject(value as Record<string, unknown>, indent + 2));\n } else if (Array.isArray(value)) {\n lines.push(`${indentStr}${key}:`);\n for (const item of value) {\n lines.push(`${indentStr} - ${String(item)}`);\n }\n } else {\n lines.push(`${indentStr}${key}: ${String(value)}`);\n }\n }\n\n return lines.join('\\n');\n }\n};\n\n/**\n * Common response parsers\n */\nexport const ResponseParsers = {\n /**\n * Parse JSON from a response\n */\n json<T>(response: string): T {\n // Try to extract JSON from markdown code blocks first\n const jsonMatch = response.match(/```(?:json)?\\s*([\\s\\S]*?)```/);\n if (jsonMatch) {\n return JSON.parse(jsonMatch[1].trim());\n }\n\n // Try to find JSON object or array\n const objectMatch = response.match(/\\{[\\s\\S]*\\}/);\n if (objectMatch) {\n return JSON.parse(objectMatch[0]);\n }\n\n const arrayMatch = response.match(/\\[[\\s\\S]*\\]/);\n if (arrayMatch) {\n return JSON.parse(arrayMatch[0]);\n }\n\n throw new Error('No JSON found in response');\n },\n\n /**\n * Parse a list from a response (one item per line or bullet points)\n */\n list(response: string): string[] {\n const lines = response.split('\\n');\n const items: string[] = [];\n\n for (const line of lines) {\n // Remove bullet points, numbers, and leading whitespace\n const cleaned = line.replace(/^\\s*[-*\u2022]\\s*/, '')\n .replace(/^\\s*\\d+[.)]\\s*/, '')\n .trim();\n\n if (cleaned) {\n items.push(cleaned);\n }\n }\n\n return items;\n },\n\n /**\n * Parse key-value pairs from a response\n */\n keyValue(response: string): Record<string, string> {\n const result: Record<string, string> = {};\n const lines = response.split('\\n');\n\n for (const line of lines) {\n const match = line.match(/^\\s*([^:]+):\\s*(.+)\\s*$/);\n if (match) {\n result[match[1].trim()] = match[2].trim();\n }\n }\n\n return result;\n }\n};\n", "/**\n * Base Reducer Interface and Abstract Class\n *\n * Defines the core reducer interface and provides a base implementation\n * for creating custom reducers.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n MapResult,\n ReduceContext,\n ReduceResult,\n ReduceStats,\n Reducer\n} from '../types';\n\n/**\n * Abstract base class for reducers\n * Provides common functionality and a template for implementing reducers\n */\nexport abstract class BaseReducer<TMapOutput, TReduceOutput> implements Reducer<TMapOutput, TReduceOutput> {\n /**\n * Reduce multiple map outputs into a single result\n */\n abstract reduce(\n results: MapResult<TMapOutput>[],\n context: ReduceContext\n ): Promise<ReduceResult<TReduceOutput>>;\n\n /**\n * Extract successful outputs from map results\n */\n protected extractSuccessfulOutputs(results: MapResult<TMapOutput>[]): TMapOutput[] {\n return results\n .filter(r => r.success && r.output !== undefined)\n .map(r => r.output!);\n }\n\n /**\n * Create reduce stats\n */\n protected createStats(\n inputCount: number,\n outputCount: number,\n reduceTimeMs: number,\n usedAIReduce: boolean\n ): ReduceStats {\n return {\n inputCount,\n outputCount,\n mergedCount: inputCount - outputCount,\n reduceTimeMs,\n usedAIReduce\n };\n }\n\n /**\n * Create an empty result\n */\n protected createEmptyResult(defaultOutput: TReduceOutput): ReduceResult<TReduceOutput> {\n return {\n output: defaultOutput,\n stats: {\n inputCount: 0,\n outputCount: 0,\n mergedCount: 0,\n reduceTimeMs: 0,\n usedAIReduce: false\n }\n };\n }\n}\n\n/**\n * Identity reducer - passes through outputs unchanged\n */\nexport class IdentityReducer<T> extends BaseReducer<T, T[]> {\n async reduce(\n results: MapResult<T>[],\n context: ReduceContext\n ): Promise<ReduceResult<T[]>> {\n const startTime = Date.now();\n const outputs = this.extractSuccessfulOutputs(results);\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: outputs,\n stats: this.createStats(outputs.length, outputs.length, reduceTimeMs, false)\n };\n }\n}\n\n/**\n * Flattening reducer - flattens array outputs into a single array\n */\nexport class FlattenReducer<T> extends BaseReducer<T[], T[]> {\n async reduce(\n results: MapResult<T[]>[],\n context: ReduceContext\n ): Promise<ReduceResult<T[]>> {\n const startTime = Date.now();\n const arrays = this.extractSuccessfulOutputs(results);\n const flattened = arrays.flat();\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: flattened,\n stats: this.createStats(\n arrays.reduce((sum, arr) => sum + arr.length, 0),\n flattened.length,\n reduceTimeMs,\n false\n )\n };\n }\n}\n\n/**\n * Aggregating reducer - combines outputs using a custom aggregation function\n */\nexport class AggregatingReducer<TMapOutput, TReduceOutput> extends BaseReducer<TMapOutput, TReduceOutput> {\n constructor(\n private aggregator: (outputs: TMapOutput[]) => TReduceOutput,\n private defaultOutput: TReduceOutput\n ) {\n super();\n }\n\n async reduce(\n results: MapResult<TMapOutput>[],\n context: ReduceContext\n ): Promise<ReduceResult<TReduceOutput>> {\n const startTime = Date.now();\n const outputs = this.extractSuccessfulOutputs(results);\n\n if (outputs.length === 0) {\n return this.createEmptyResult(this.defaultOutput);\n }\n\n const aggregated = this.aggregator(outputs);\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: aggregated,\n stats: this.createStats(outputs.length, 1, reduceTimeMs, false)\n };\n }\n}\n\n// Re-export types\nexport type { Reducer, ReduceResult, ReduceStats, ReduceContext } from '../types';\n", "/**\n * Deterministic Reducer\n *\n * A code-based reducer that performs deduplication and aggregation\n * without AI calls. Fast, consistent, and reproducible.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n MapResult,\n ReduceContext,\n ReduceResult,\n ReduceStats\n} from '../types';\nimport { BaseReducer } from './reducer';\n\n/**\n * Interface for items that can be deduplicated\n */\nexport interface Deduplicatable {\n /** Unique identifier */\n id?: string;\n /** Content for key generation */\n [key: string]: unknown;\n}\n\n/**\n * Options for the deterministic reducer\n */\nexport interface DeterministicReducerOptions<T> {\n /**\n * Function to generate a deduplication key for an item\n * Items with the same key will be considered duplicates\n */\n getKey: (item: T) => string;\n\n /**\n * Function to merge two duplicate items into one\n * @param existing The existing item\n * @param newItem The new duplicate item\n * @returns The merged item\n */\n merge: (existing: T, newItem: T) => T;\n\n /**\n * Optional function to sort the final results\n */\n sort?: (a: T, b: T) => number;\n\n /**\n * Optional function to create a summary from the results\n */\n summarize?: (items: T[]) => Record<string, unknown>;\n}\n\n/**\n * Result type for deterministic reducer including summary\n */\nexport interface DeterministicReduceOutput<T> {\n /** Deduplicated items */\n items: T[];\n /** Summary statistics/data */\n summary?: Record<string, unknown>;\n}\n\n/**\n * Deterministic reducer that uses code-based logic for deduplication.\n * Fast, consistent, and doesn't require additional API calls.\n */\nexport class DeterministicReducer<T extends Deduplicatable> extends BaseReducer<T[], DeterministicReduceOutput<T>> {\n constructor(private options: DeterministicReducerOptions<T>) {\n super();\n }\n\n /**\n * Reduce findings using deterministic code-based logic\n */\n async reduce(\n results: MapResult<T[]>[],\n context: ReduceContext\n ): Promise<ReduceResult<DeterministicReduceOutput<T>>> {\n const startTime = Date.now();\n\n // Collect all items from successful results\n const allItems: T[] = [];\n for (const result of results) {\n if (result.success && result.output) {\n allItems.push(...result.output);\n }\n }\n\n const originalCount = allItems.length;\n\n // Deduplicate items\n const dedupedItems = this.deduplicateItems(allItems);\n\n // Sort if sorter provided\n if (this.options.sort) {\n dedupedItems.sort(this.options.sort);\n }\n\n // Create summary if summarizer provided\n const summary = this.options.summarize\n ? this.options.summarize(dedupedItems)\n : undefined;\n\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: {\n items: dedupedItems,\n summary\n },\n stats: {\n inputCount: originalCount,\n outputCount: dedupedItems.length,\n mergedCount: originalCount - dedupedItems.length,\n reduceTimeMs,\n usedAIReduce: false\n }\n };\n }\n\n /**\n * Deduplicate items based on key and merge duplicates\n */\n private deduplicateItems(items: T[]): T[] {\n const seen = new Map<string, T>();\n\n for (const item of items) {\n const key = this.options.getKey(item);\n\n if (seen.has(key)) {\n // Merge with existing item\n const existing = seen.get(key)!;\n const merged = this.options.merge(existing, item);\n seen.set(key, merged);\n } else {\n seen.set(key, item);\n }\n }\n\n return Array.from(seen.values());\n }\n}\n\n/**\n * Factory function to create a deterministic reducer\n */\nexport function createDeterministicReducer<T extends Deduplicatable>(\n options: DeterministicReducerOptions<T>\n): DeterministicReducer<T> {\n return new DeterministicReducer(options);\n}\n\n/**\n * Simple string-based deduplication reducer\n * Deduplicates string arrays and returns unique strings\n */\nexport class StringDeduplicationReducer extends BaseReducer<string[], { items: string[]; count: number }> {\n private caseSensitive: boolean;\n\n constructor(caseSensitive: boolean = true) {\n super();\n this.caseSensitive = caseSensitive;\n }\n\n async reduce(\n results: MapResult<string[]>[],\n context: ReduceContext\n ): Promise<ReduceResult<{ items: string[]; count: number }>> {\n const startTime = Date.now();\n\n const allStrings: string[] = [];\n for (const result of results) {\n if (result.success && result.output) {\n allStrings.push(...result.output);\n }\n }\n\n const seen = new Set<string>();\n const unique: string[] = [];\n\n for (const str of allStrings) {\n const key = this.caseSensitive ? str : str.toLowerCase();\n if (!seen.has(key)) {\n seen.add(key);\n unique.push(str);\n }\n }\n\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: {\n items: unique,\n count: unique.length\n },\n stats: {\n inputCount: allStrings.length,\n outputCount: unique.length,\n mergedCount: allStrings.length - unique.length,\n reduceTimeMs,\n usedAIReduce: false\n }\n };\n }\n}\n\n/**\n * Numeric aggregation reducer\n * Aggregates numeric values with sum, average, min, max, etc.\n */\nexport class NumericAggregationReducer extends BaseReducer<number[], {\n sum: number;\n average: number;\n min: number;\n max: number;\n count: number;\n}> {\n async reduce(\n results: MapResult<number[]>[],\n context: ReduceContext\n ): Promise<ReduceResult<{\n sum: number;\n average: number;\n min: number;\n max: number;\n count: number;\n }>> {\n const startTime = Date.now();\n\n const allNumbers: number[] = [];\n for (const result of results) {\n if (result.success && result.output) {\n allNumbers.push(...result.output);\n }\n }\n\n if (allNumbers.length === 0) {\n return {\n output: {\n sum: 0,\n average: 0,\n min: 0,\n max: 0,\n count: 0\n },\n stats: {\n inputCount: 0,\n outputCount: 0,\n mergedCount: 0,\n reduceTimeMs: Date.now() - startTime,\n usedAIReduce: false\n }\n };\n }\n\n const sum = allNumbers.reduce((a, b) => a + b, 0);\n const average = sum / allNumbers.length;\n const min = Math.min(...allNumbers);\n const max = Math.max(...allNumbers);\n\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: {\n sum,\n average,\n min,\n max,\n count: allNumbers.length\n },\n stats: {\n inputCount: allNumbers.length,\n outputCount: 4, // sum, average, min, max\n mergedCount: allNumbers.length - 1,\n reduceTimeMs,\n usedAIReduce: false\n }\n };\n }\n}\n", "/**\n * AI-Powered Reducer\n *\n * A reducer that uses AI to intelligently synthesize and deduplicate results.\n * Falls back to deterministic reduction on failure.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n AIInvoker,\n MapResult,\n ReduceContext,\n ReduceResult,\n ReduceStats\n} from '../types';\nimport { BaseReducer } from './reducer';\nimport { ResponseParsers } from '../prompt-template';\nimport { getLogger, LogCategory } from '../../logger';\n\n/**\n * Options for the AI reducer\n */\nexport interface AIReducerOptions<TMapOutput, TReduceOutput> {\n /**\n * AI invoker function for making the reduce call\n */\n aiInvoker: AIInvoker;\n\n /**\n * Function to build the reduce prompt from map outputs\n */\n buildPrompt: (outputs: TMapOutput[], context: ReduceContext) => string;\n\n /**\n * Function to parse the AI response into the reduce output\n */\n parseResponse: (response: string, originalOutputs: TMapOutput[]) => TReduceOutput;\n\n /**\n * Fallback reducer to use when AI fails\n */\n fallbackReducer: BaseReducer<TMapOutput, TReduceOutput>;\n\n /**\n * Optional model to use for the AI call\n */\n model?: string;\n}\n\n/**\n * AI-powered reducer that uses an additional AI call to synthesize results.\n * Provides intelligent deduplication, conflict resolution, and prioritization.\n */\nexport class AIReducer<TMapOutput, TReduceOutput> extends BaseReducer<TMapOutput, TReduceOutput> {\n constructor(private options: AIReducerOptions<TMapOutput, TReduceOutput>) {\n super();\n }\n\n /**\n * Reduce using AI-powered synthesis\n */\n async reduce(\n results: MapResult<TMapOutput>[],\n context: ReduceContext\n ): Promise<ReduceResult<TReduceOutput>> {\n const startTime = Date.now();\n const outputs = this.extractSuccessfulOutputs(results);\n\n // If no outputs, use fallback\n if (outputs.length === 0) {\n const fallbackResult = await this.options.fallbackReducer.reduce(results, context);\n return {\n ...fallbackResult,\n stats: {\n ...fallbackResult.stats,\n usedAIReduce: false\n }\n };\n }\n\n // Build the reduce prompt\n const prompt = this.options.buildPrompt(outputs, context);\n\n try {\n // Invoke AI\n const aiResult = await this.options.aiInvoker(prompt, {\n model: this.options.model\n });\n\n if (aiResult.success && aiResult.response) {\n // Parse the response\n const output = this.options.parseResponse(aiResult.response, outputs);\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output,\n stats: {\n inputCount: outputs.length,\n outputCount: 1,\n mergedCount: outputs.length - 1,\n reduceTimeMs,\n usedAIReduce: true\n }\n };\n }\n\n // AI failed, use fallback\n getLogger().warn(LogCategory.MAP_REDUCE, `AI reduce failed, falling back to deterministic: ${aiResult.error}`);\n return this.fallbackWithStats(results, context, startTime);\n\n } catch (error) {\n // On any error, use fallback\n getLogger().warn(LogCategory.MAP_REDUCE, `AI reduce error, falling back to deterministic: ${error instanceof Error ? error.message : String(error)}`);\n return this.fallbackWithStats(results, context, startTime);\n }\n }\n\n /**\n * Run fallback reducer and update stats\n */\n private async fallbackWithStats(\n results: MapResult<TMapOutput>[],\n context: ReduceContext,\n startTime: number\n ): Promise<ReduceResult<TReduceOutput>> {\n const fallbackResult = await this.options.fallbackReducer.reduce(results, context);\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n ...fallbackResult,\n stats: {\n ...fallbackResult.stats,\n reduceTimeMs,\n usedAIReduce: false\n }\n };\n }\n}\n\n/**\n * Factory function to create an AI reducer\n */\nexport function createAIReducer<TMapOutput, TReduceOutput>(\n options: AIReducerOptions<TMapOutput, TReduceOutput>\n): AIReducer<TMapOutput, TReduceOutput> {\n return new AIReducer(options);\n}\n\n/**\n * Generic AI synthesis reducer for text outputs\n * Synthesizes multiple text outputs into a single coherent summary\n */\nexport interface TextSynthesisOutput {\n /** Synthesized summary */\n summary: string;\n /** Key points extracted */\n keyPoints: string[];\n /** Original count */\n originalCount: number;\n}\n\n/**\n * Options for text synthesis reducer\n */\nexport interface TextSynthesisOptions {\n /** AI invoker function */\n aiInvoker: AIInvoker;\n /** Optional custom prompt prefix */\n promptPrefix?: string;\n /** Optional model to use */\n model?: string;\n}\n\n/**\n * Create a text synthesis reducer that combines text outputs using AI\n */\nexport function createTextSynthesisReducer(\n options: TextSynthesisOptions\n): AIReducer<string, TextSynthesisOutput> {\n // Create a simple fallback reducer\n const fallbackReducer = new class extends BaseReducer<string, TextSynthesisOutput> {\n async reduce(\n results: MapResult<string>[],\n context: ReduceContext\n ): Promise<ReduceResult<TextSynthesisOutput>> {\n const outputs = this.extractSuccessfulOutputs(results);\n return {\n output: {\n summary: outputs.join('\\n\\n---\\n\\n'),\n keyPoints: outputs.slice(0, 5),\n originalCount: outputs.length\n },\n stats: this.createStats(outputs.length, 1, 0, false)\n };\n }\n }();\n\n return createAIReducer<string, TextSynthesisOutput>({\n aiInvoker: options.aiInvoker,\n model: options.model,\n fallbackReducer,\n\n buildPrompt: (outputs, context) => {\n const prefix = options.promptPrefix || 'Synthesize the following inputs into a coherent summary:';\n const numberedOutputs = outputs.map((o, i) => `[${i + 1}] ${o}`).join('\\n\\n');\n\n return `${prefix}\n\n${numberedOutputs}\n\nPlease provide:\n1. A concise summary that combines all the key information\n2. A list of key points (as a JSON array of strings)\n\nFormat your response as JSON:\n{\n \"summary\": \"Your synthesized summary here\",\n \"keyPoints\": [\"Point 1\", \"Point 2\", \"...\"]\n}`;\n },\n\n parseResponse: (response, originalOutputs) => {\n try {\n const parsed = ResponseParsers.json<{ summary: string; keyPoints: string[] }>(response);\n return {\n summary: parsed.summary || '',\n keyPoints: parsed.keyPoints || [],\n originalCount: originalOutputs.length\n };\n } catch {\n // If parsing fails, return raw response as summary\n return {\n summary: response,\n keyPoints: [],\n originalCount: originalOutputs.length\n };\n }\n }\n });\n}\n", "/**\n * Hybrid Reducer\n *\n * Combines deterministic reduction with AI polishing.\n * First performs code-based deduplication, then uses AI to refine the results.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n AIInvoker,\n MapResult,\n ReduceContext,\n ReduceResult,\n ReduceStats\n} from '../types';\nimport { BaseReducer } from './reducer';\nimport { DeterministicReducer, DeterministicReducerOptions, DeterministicReduceOutput, Deduplicatable } from './deterministic';\nimport { getLogger, LogCategory } from '../../logger';\n\n/**\n * Options for the hybrid reducer\n */\nexport interface HybridReducerOptions<T extends Deduplicatable, TPolished> {\n /**\n * Options for the deterministic reduction phase\n */\n deterministicOptions: DeterministicReducerOptions<T>;\n\n /**\n * AI invoker for the polishing phase\n */\n aiInvoker: AIInvoker;\n\n /**\n * Function to build the polishing prompt from deterministic results\n */\n buildPolishPrompt: (deterministicOutput: DeterministicReduceOutput<T>, context: ReduceContext) => string;\n\n /**\n * Function to parse the polished AI response\n */\n parsePolishedResponse: (response: string, deterministicOutput: DeterministicReduceOutput<T>) => TPolished;\n\n /**\n * Function to create output when AI polishing is skipped or fails\n */\n createFallbackOutput: (deterministicOutput: DeterministicReduceOutput<T>) => TPolished;\n\n /**\n * Optional model to use for AI polishing\n */\n model?: string;\n\n /**\n * Whether to skip AI polishing if deterministic output is empty\n * Default: true\n */\n skipPolishIfEmpty?: boolean;\n}\n\n/**\n * Hybrid reducer that combines deterministic reduction with AI polishing.\n * \n * Flow:\n * 1. Deterministic reduction (deduplication, merging)\n * 2. AI polishing (summarization, prioritization, formatting)\n */\nexport class HybridReducer<T extends Deduplicatable, TPolished> extends BaseReducer<T[], TPolished> {\n private deterministicReducer: DeterministicReducer<T>;\n\n constructor(private options: HybridReducerOptions<T, TPolished>) {\n super();\n this.deterministicReducer = new DeterministicReducer(options.deterministicOptions);\n }\n\n /**\n * Reduce using hybrid approach\n */\n async reduce(\n results: MapResult<T[]>[],\n context: ReduceContext\n ): Promise<ReduceResult<TPolished>> {\n const startTime = Date.now();\n\n // Step 1: Deterministic reduction\n const deterministicResult = await this.deterministicReducer.reduce(results, context);\n const deterministicOutput = deterministicResult.output;\n const deterministicTimeMs = deterministicResult.stats.reduceTimeMs;\n\n // Skip AI polishing if empty and configured to skip\n const skipPolishIfEmpty = this.options.skipPolishIfEmpty ?? true;\n if (skipPolishIfEmpty && deterministicOutput.items.length === 0) {\n const reduceTimeMs = Date.now() - startTime;\n return {\n output: this.options.createFallbackOutput(deterministicOutput),\n stats: {\n ...deterministicResult.stats,\n reduceTimeMs,\n usedAIReduce: false\n }\n };\n }\n\n // Step 2: AI polishing\n const polishStartTime = Date.now();\n const prompt = this.options.buildPolishPrompt(deterministicOutput, context);\n\n try {\n const aiResult = await this.options.aiInvoker(prompt, {\n model: this.options.model\n });\n\n if (aiResult.success && aiResult.response) {\n const polishedOutput = this.options.parsePolishedResponse(\n aiResult.response,\n deterministicOutput\n );\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: polishedOutput,\n stats: {\n inputCount: deterministicResult.stats.inputCount,\n outputCount: deterministicResult.stats.outputCount,\n mergedCount: deterministicResult.stats.mergedCount,\n reduceTimeMs,\n usedAIReduce: true\n }\n };\n }\n\n // AI failed, use fallback\n getLogger().warn(LogCategory.MAP_REDUCE, `AI polishing failed, using deterministic result: ${aiResult.error}`);\n return this.createFallbackResult(deterministicOutput, deterministicResult.stats, startTime);\n\n } catch (error) {\n getLogger().warn(LogCategory.MAP_REDUCE, `AI polishing error, using deterministic result: ${error instanceof Error ? error.message : String(error)}`);\n return this.createFallbackResult(deterministicOutput, deterministicResult.stats, startTime);\n }\n }\n\n /**\n * Create a fallback result using the deterministic output\n */\n private createFallbackResult(\n deterministicOutput: DeterministicReduceOutput<T>,\n deterministicStats: ReduceStats,\n startTime: number\n ): ReduceResult<TPolished> {\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: this.options.createFallbackOutput(deterministicOutput),\n stats: {\n ...deterministicStats,\n reduceTimeMs,\n usedAIReduce: false\n }\n };\n }\n}\n\n/**\n * Factory function to create a hybrid reducer\n */\nexport function createHybridReducer<T extends Deduplicatable, TPolished>(\n options: HybridReducerOptions<T, TPolished>\n): HybridReducer<T, TPolished> {\n return new HybridReducer(options);\n}\n\n/**\n * Simple polished output interface for common use cases\n */\nexport interface SimplePolishedOutput<T> {\n /** The processed items */\n items: T[];\n /** AI-generated summary */\n summary: string;\n /** Statistics */\n stats: {\n originalCount: number;\n processedCount: number;\n dedupedCount: number;\n };\n}\n\n/**\n * Create a simple hybrid reducer with default polishing behavior\n */\nexport function createSimpleHybridReducer<T extends Deduplicatable>(\n deterministicOptions: DeterministicReducerOptions<T>,\n aiInvoker: AIInvoker,\n formatForPrompt: (items: T[]) => string,\n model?: string\n): HybridReducer<T, SimplePolishedOutput<T>> {\n return createHybridReducer<T, SimplePolishedOutput<T>>({\n deterministicOptions,\n aiInvoker,\n model,\n\n buildPolishPrompt: (deterministicOutput, context) => {\n const formatted = formatForPrompt(deterministicOutput.items);\n return `Review and summarize the following ${deterministicOutput.items.length} items:\n\n${formatted}\n\nProvide a brief summary (2-3 sentences) of the key findings.`;\n },\n\n parsePolishedResponse: (response, deterministicOutput) => {\n return {\n items: deterministicOutput.items,\n summary: response.trim(),\n stats: {\n originalCount: deterministicOutput.items.length,\n processedCount: deterministicOutput.items.length,\n dedupedCount: 0\n }\n };\n },\n\n createFallbackOutput: (deterministicOutput) => {\n return {\n items: deterministicOutput.items,\n summary: `Found ${deterministicOutput.items.length} items.`,\n stats: {\n originalCount: deterministicOutput.items.length,\n processedCount: deterministicOutput.items.length,\n dedupedCount: 0\n }\n };\n }\n });\n}\n", "/**\n * Reducers Module\n *\n * Exports all reducer implementations and utilities.\n */\n\n// Base reducer\nexport {\n BaseReducer,\n IdentityReducer,\n FlattenReducer,\n AggregatingReducer\n} from './reducer';\n\n// Deterministic reducer\nexport {\n DeterministicReducer,\n createDeterministicReducer,\n StringDeduplicationReducer,\n NumericAggregationReducer\n} from './deterministic';\nexport type {\n Deduplicatable,\n DeterministicReducerOptions,\n DeterministicReduceOutput\n} from './deterministic';\n\n// AI reducer\nexport {\n AIReducer,\n createAIReducer,\n createTextSynthesisReducer\n} from './ai-reducer';\nexport type {\n AIReducerOptions,\n TextSynthesisOutput,\n TextSynthesisOptions\n} from './ai-reducer';\n\n// Hybrid reducer\nexport {\n HybridReducer,\n createHybridReducer,\n createSimpleHybridReducer\n} from './hybrid-reducer';\nexport type {\n HybridReducerOptions,\n SimplePolishedOutput\n} from './hybrid-reducer';\n", "/**\n * File Splitter\n *\n * Splits input by files for file-based processing.\n * Handles both arrays of files and file-containing objects.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { Splitter, WorkItem } from '../types';\n\n/**\n * A file item with path and optional content\n */\nexport interface FileItem {\n /** File path (relative or absolute) */\n path: string;\n /** File content (optional, may be loaded separately) */\n content?: string;\n /** Additional metadata */\n metadata?: Record<string, unknown>;\n}\n\n/**\n * Input for file splitter\n */\nexport interface FileInput {\n /** Array of files to process */\n files: FileItem[];\n /** Common context to include with each work item */\n context?: Record<string, unknown>;\n}\n\n/**\n * Work item data for file processing\n */\nexport interface FileWorkItemData {\n /** The file being processed */\n file: FileItem;\n /** Common context from input */\n context?: Record<string, unknown>;\n}\n\n/**\n * Options for file splitter\n */\nexport interface FileSplitterOptions {\n /**\n * Function to generate work item ID from file\n * Default: uses file path\n */\n generateId?: (file: FileItem, index: number) => string;\n\n /**\n * Filter function to exclude certain files\n */\n filter?: (file: FileItem) => boolean;\n\n /**\n * Maximum number of files per work item (for batching)\n * Default: 1 (one file per work item)\n */\n batchSize?: number;\n}\n\n/**\n * Splitter that creates a work item for each file\n */\nexport class FileSplitter implements Splitter<FileInput, FileWorkItemData> {\n constructor(private options: FileSplitterOptions = {}) {}\n\n split(input: FileInput): WorkItem<FileWorkItemData>[] {\n const { files, context } = input;\n const { generateId, filter, batchSize = 1 } = this.options;\n\n // Apply filter if provided\n const filteredFiles = filter\n ? files.filter(filter)\n : files;\n\n // Generate work items\n const workItems: WorkItem<FileWorkItemData>[] = [];\n\n if (batchSize === 1) {\n // One file per work item\n for (let i = 0; i < filteredFiles.length; i++) {\n const file = filteredFiles[i];\n const id = generateId\n ? generateId(file, i)\n : `file-${i}-${this.normalizePathForId(file.path)}`;\n\n workItems.push({\n id,\n data: {\n file,\n context\n },\n metadata: {\n index: i,\n totalFiles: filteredFiles.length\n }\n });\n }\n } else {\n // Batch files\n for (let i = 0; i < filteredFiles.length; i += batchSize) {\n const batch = filteredFiles.slice(i, i + batchSize);\n // For batched files, create a work item for each file in batch\n // (This maintains compatibility but allows for future batch processing)\n for (let j = 0; j < batch.length; j++) {\n const file = batch[j];\n const globalIndex = i + j;\n const id = generateId\n ? generateId(file, globalIndex)\n : `file-${globalIndex}-${this.normalizePathForId(file.path)}`;\n\n workItems.push({\n id,\n data: {\n file,\n context\n },\n metadata: {\n index: globalIndex,\n batchIndex: Math.floor(i / batchSize),\n totalFiles: filteredFiles.length\n }\n });\n }\n }\n }\n\n return workItems;\n }\n\n /**\n * Normalize a file path for use in ID\n * Makes it safe and consistent across platforms\n */\n private normalizePathForId(path: string): string {\n // Replace path separators and special characters\n return path\n .replace(/[/\\\\]/g, '-')\n .replace(/[^a-zA-Z0-9-_.]/g, '')\n .toLowerCase()\n .slice(0, 50); // Limit length\n }\n}\n\n/**\n * Factory function to create a file splitter\n */\nexport function createFileSplitter(options?: FileSplitterOptions): FileSplitter {\n return new FileSplitter(options);\n}\n\n/**\n * Create a file splitter with extension filter\n */\nexport function createExtensionFilteredSplitter(\n extensions: string[],\n options?: Omit<FileSplitterOptions, 'filter'>\n): FileSplitter {\n const normalizedExtensions = extensions.map(ext =>\n ext.startsWith('.') ? ext.toLowerCase() : `.${ext.toLowerCase()}`\n );\n\n return new FileSplitter({\n ...options,\n filter: (file) => {\n const ext = getFileExtension(file.path).toLowerCase();\n return normalizedExtensions.includes(ext);\n }\n });\n}\n\n/**\n * Get the file extension from a path\n */\nfunction getFileExtension(path: string): string {\n const lastDot = path.lastIndexOf('.');\n if (lastDot === -1 || lastDot === path.length - 1) {\n return '';\n }\n return path.slice(lastDot);\n}\n\n/**\n * Batched file splitter that groups multiple files into single work items\n */\nexport interface BatchedFileWorkItemData {\n /** Array of files in this batch */\n files: FileItem[];\n /** Common context from input */\n context?: Record<string, unknown>;\n /** Batch index */\n batchIndex: number;\n}\n\n/**\n * Splitter that creates work items with batches of files\n */\nexport class BatchedFileSplitter implements Splitter<FileInput, BatchedFileWorkItemData> {\n constructor(\n private batchSize: number = 5,\n private options: Omit<FileSplitterOptions, 'batchSize'> = {}\n ) {}\n\n split(input: FileInput): WorkItem<BatchedFileWorkItemData>[] {\n const { files, context } = input;\n const { filter } = this.options;\n\n // Apply filter if provided\n const filteredFiles = filter\n ? files.filter(filter)\n : files;\n\n const workItems: WorkItem<BatchedFileWorkItemData>[] = [];\n const totalBatches = Math.ceil(filteredFiles.length / this.batchSize);\n\n for (let i = 0; i < filteredFiles.length; i += this.batchSize) {\n const batch = filteredFiles.slice(i, i + this.batchSize);\n const batchIndex = Math.floor(i / this.batchSize);\n\n workItems.push({\n id: `batch-${batchIndex}`,\n data: {\n files: batch,\n context,\n batchIndex\n },\n metadata: {\n batchIndex,\n totalBatches,\n filesInBatch: batch.length,\n totalFiles: filteredFiles.length\n }\n });\n }\n\n return workItems;\n }\n}\n\n/**\n * Factory function to create a batched file splitter\n */\nexport function createBatchedFileSplitter(\n batchSize: number,\n options?: Omit<FileSplitterOptions, 'batchSize'>\n): BatchedFileSplitter {\n return new BatchedFileSplitter(batchSize, options);\n}\n", "/**\n * Chunk Splitter\n *\n * Splits large content into smaller chunks for processing.\n * Useful for processing large files or texts that exceed model context limits.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { Splitter, WorkItem } from '../types';\nimport {\n DEFAULT_CHUNK_MAX_SIZE,\n DEFAULT_CHUNK_OVERLAP_SIZE,\n DEFAULT_CHUNK_STRATEGY,\n DEFAULT_CHUNK_PRESERVE_BOUNDARIES\n} from '../../config/defaults';\n\n/**\n * Input for chunk splitter\n */\nexport interface ChunkInput {\n /** The content to split into chunks */\n content: string;\n /** Optional source identifier (e.g., file path) */\n source?: string;\n /** Additional context to include with each chunk */\n context?: Record<string, unknown>;\n}\n\n/**\n * Work item data for chunk processing\n */\nexport interface ChunkWorkItemData {\n /** The chunk content */\n content: string;\n /** Chunk index (0-based) */\n chunkIndex: number;\n /** Total number of chunks */\n totalChunks: number;\n /** Source identifier */\n source?: string;\n /** Common context from input */\n context?: Record<string, unknown>;\n /** Start position in original content */\n startOffset?: number;\n /** End position in original content */\n endOffset?: number;\n}\n\n/**\n * Options for chunk splitter\n */\nexport interface ChunkSplitterOptions {\n /**\n * Maximum size of each chunk (in characters)\n * Default: 4000\n */\n maxChunkSize: number;\n\n /**\n * Number of characters to overlap between chunks\n * Default: 200\n */\n overlapSize: number;\n\n /**\n * Strategy for splitting\n * - 'character': Split by character count\n * - 'line': Split by line count\n * - 'paragraph': Split by paragraph (double newlines)\n * - 'sentence': Split by sentence boundaries\n * Default: 'character'\n */\n strategy: 'character' | 'line' | 'paragraph' | 'sentence';\n\n /**\n * Whether to preserve boundaries (lines, paragraphs, etc.)\n * When true, chunks may be smaller than maxChunkSize to avoid\n * breaking boundaries\n * Default: true\n */\n preserveBoundaries: boolean;\n}\n\n/**\n * Default chunk splitter options\n */\nconst DEFAULT_CHUNK_OPTIONS: ChunkSplitterOptions = {\n maxChunkSize: DEFAULT_CHUNK_MAX_SIZE,\n overlapSize: DEFAULT_CHUNK_OVERLAP_SIZE,\n strategy: DEFAULT_CHUNK_STRATEGY,\n preserveBoundaries: DEFAULT_CHUNK_PRESERVE_BOUNDARIES\n};\n\n/**\n * Splitter that divides content into smaller chunks\n */\nexport class ChunkSplitter implements Splitter<ChunkInput, ChunkWorkItemData> {\n private options: ChunkSplitterOptions;\n\n constructor(options: Partial<ChunkSplitterOptions> = {}) {\n this.options = { ...DEFAULT_CHUNK_OPTIONS, ...options };\n }\n\n split(input: ChunkInput): WorkItem<ChunkWorkItemData>[] {\n const { content, source, context } = input;\n\n if (!content || content.length === 0) {\n return [];\n }\n\n const chunks = this.splitContent(content);\n\n return chunks.map((chunk, index) => ({\n id: `chunk-${index}-${source || 'content'}`,\n data: {\n content: chunk.content,\n chunkIndex: index,\n totalChunks: chunks.length,\n source,\n context,\n startOffset: chunk.startOffset,\n endOffset: chunk.endOffset\n },\n metadata: {\n chunkIndex: index,\n totalChunks: chunks.length,\n chunkSize: chunk.content.length\n }\n }));\n }\n\n /**\n * Split content based on the configured strategy\n */\n private splitContent(content: string): Array<{\n content: string;\n startOffset: number;\n endOffset: number;\n }> {\n switch (this.options.strategy) {\n case 'line':\n return this.splitByLines(content);\n case 'paragraph':\n return this.splitByParagraphs(content);\n case 'sentence':\n return this.splitBySentences(content);\n case 'character':\n default:\n return this.splitByCharacters(content);\n }\n }\n\n /**\n * Split content by character count\n */\n private splitByCharacters(content: string): Array<{\n content: string;\n startOffset: number;\n endOffset: number;\n }> {\n const { maxChunkSize, overlapSize, preserveBoundaries } = this.options;\n const chunks: Array<{ content: string; startOffset: number; endOffset: number }> = [];\n\n // Ensure we make progress by limiting effective overlap\n const effectiveOverlap = Math.min(overlapSize, Math.floor(maxChunkSize / 2));\n\n let startOffset = 0;\n\n while (startOffset < content.length) {\n let endOffset = Math.min(startOffset + maxChunkSize, content.length);\n\n // Try to preserve boundaries if configured\n if (preserveBoundaries && endOffset < content.length) {\n // Look for a good break point (newline, period, space)\n const breakPoints = ['\\n\\n', '\\n', '. ', ' '];\n const searchStart = Math.max(startOffset + 1, endOffset - 200);\n for (const breakPoint of breakPoints) {\n const lastBreak = content.lastIndexOf(breakPoint, endOffset);\n if (lastBreak >= searchStart) {\n endOffset = lastBreak + breakPoint.length;\n break;\n }\n }\n }\n\n // Ensure we always make progress\n if (endOffset <= startOffset) {\n endOffset = Math.min(startOffset + maxChunkSize, content.length);\n }\n\n chunks.push({\n content: content.slice(startOffset, endOffset),\n startOffset,\n endOffset\n });\n\n // If we've reached the end, stop\n if (endOffset >= content.length) {\n break;\n }\n\n // Move to next chunk with overlap, ensuring we make progress\n const nextStart = endOffset - effectiveOverlap;\n startOffset = Math.max(nextStart, startOffset + 1);\n }\n\n return chunks;\n }\n\n /**\n * Split content by lines\n */\n private splitByLines(content: string): Array<{\n content: string;\n startOffset: number;\n endOffset: number;\n }> {\n const { maxChunkSize, overlapSize } = this.options;\n const lines = content.split('\\n');\n const chunks: Array<{ content: string; startOffset: number; endOffset: number }> = [];\n\n let currentChunk: string[] = [];\n let currentSize = 0;\n let startOffset = 0;\n let lineStartOffset = 0;\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n const lineSize = line.length + 1; // +1 for newline\n\n if (currentSize + lineSize > maxChunkSize && currentChunk.length > 0) {\n // Save current chunk\n const chunkContent = currentChunk.join('\\n');\n chunks.push({\n content: chunkContent,\n startOffset,\n endOffset: startOffset + chunkContent.length\n });\n\n // Start new chunk with overlap\n const overlapLines = Math.max(1, Math.floor(overlapSize / 50)); // Estimate lines for overlap\n const overlapStart = Math.max(0, currentChunk.length - overlapLines);\n currentChunk = currentChunk.slice(overlapStart);\n currentSize = currentChunk.reduce((sum, l) => sum + l.length + 1, 0);\n startOffset = lineStartOffset - currentSize;\n }\n\n currentChunk.push(line);\n currentSize += lineSize;\n lineStartOffset += lineSize;\n }\n\n // Add final chunk\n if (currentChunk.length > 0) {\n const chunkContent = currentChunk.join('\\n');\n chunks.push({\n content: chunkContent,\n startOffset,\n endOffset: startOffset + chunkContent.length\n });\n }\n\n return chunks;\n }\n\n /**\n * Split content by paragraphs (double newlines)\n */\n private splitByParagraphs(content: string): Array<{\n content: string;\n startOffset: number;\n endOffset: number;\n }> {\n const { maxChunkSize } = this.options;\n const paragraphs = content.split(/\\n\\n+/);\n const chunks: Array<{ content: string; startOffset: number; endOffset: number }> = [];\n\n let currentChunk: string[] = [];\n let currentSize = 0;\n let startOffset = 0;\n let paragraphStartOffset = 0;\n\n for (const paragraph of paragraphs) {\n const paragraphSize = paragraph.length + 2; // +2 for \\n\\n\n\n if (currentSize + paragraphSize > maxChunkSize && currentChunk.length > 0) {\n // Save current chunk\n const chunkContent = currentChunk.join('\\n\\n');\n chunks.push({\n content: chunkContent,\n startOffset,\n endOffset: startOffset + chunkContent.length\n });\n\n // Start new chunk (no overlap for paragraph mode)\n currentChunk = [];\n currentSize = 0;\n startOffset = paragraphStartOffset;\n }\n\n currentChunk.push(paragraph);\n currentSize += paragraphSize;\n paragraphStartOffset += paragraphSize;\n }\n\n // Add final chunk\n if (currentChunk.length > 0) {\n const chunkContent = currentChunk.join('\\n\\n');\n chunks.push({\n content: chunkContent,\n startOffset,\n endOffset: startOffset + chunkContent.length\n });\n }\n\n return chunks;\n }\n\n /**\n * Split content by sentences\n */\n private splitBySentences(content: string): Array<{\n content: string;\n startOffset: number;\n endOffset: number;\n }> {\n const { maxChunkSize, overlapSize } = this.options;\n // Simple sentence splitting - matches period/exclamation/question followed by space and capital\n const sentencePattern = /[.!?]+\\s+(?=[A-Z])/g;\n const sentences: string[] = [];\n let lastIndex = 0;\n let match;\n\n while ((match = sentencePattern.exec(content)) !== null) {\n sentences.push(content.slice(lastIndex, match.index + match[0].length - 1));\n lastIndex = match.index + match[0].length - 1;\n }\n if (lastIndex < content.length) {\n sentences.push(content.slice(lastIndex));\n }\n\n const chunks: Array<{ content: string; startOffset: number; endOffset: number }> = [];\n let currentChunk: string[] = [];\n let currentSize = 0;\n let startOffset = 0;\n let sentenceStartOffset = 0;\n\n for (const sentence of sentences) {\n const sentenceSize = sentence.length;\n\n if (currentSize + sentenceSize > maxChunkSize && currentChunk.length > 0) {\n // Save current chunk\n const chunkContent = currentChunk.join(' ');\n chunks.push({\n content: chunkContent,\n startOffset,\n endOffset: startOffset + chunkContent.length\n });\n\n // Start new chunk with some overlap\n const overlapSentences = Math.max(1, Math.floor(overlapSize / 100));\n const overlapStart = Math.max(0, currentChunk.length - overlapSentences);\n currentChunk = currentChunk.slice(overlapStart);\n currentSize = currentChunk.reduce((sum, s) => sum + s.length + 1, 0);\n startOffset = sentenceStartOffset - currentSize;\n }\n\n currentChunk.push(sentence);\n currentSize += sentenceSize + 1;\n sentenceStartOffset += sentenceSize + 1;\n }\n\n // Add final chunk\n if (currentChunk.length > 0) {\n const chunkContent = currentChunk.join(' ');\n chunks.push({\n content: chunkContent,\n startOffset,\n endOffset: startOffset + chunkContent.length\n });\n }\n\n return chunks;\n }\n}\n\n/**\n * Factory function to create a chunk splitter\n */\nexport function createChunkSplitter(options?: Partial<ChunkSplitterOptions>): ChunkSplitter {\n return new ChunkSplitter(options);\n}\n\n/**\n * Create a line-based chunk splitter\n */\nexport function createLineChunkSplitter(\n maxChunkSize: number = 4000,\n overlapSize: number = 200\n): ChunkSplitter {\n return new ChunkSplitter({\n maxChunkSize,\n overlapSize,\n strategy: 'line',\n preserveBoundaries: true\n });\n}\n\n/**\n * Create a paragraph-based chunk splitter\n */\nexport function createParagraphChunkSplitter(maxChunkSize: number = 4000): ChunkSplitter {\n return new ChunkSplitter({\n maxChunkSize,\n overlapSize: 0, // No overlap for paragraphs\n strategy: 'paragraph',\n preserveBoundaries: true\n });\n}\n", "/**\n * Rule Splitter\n *\n * Splits input by rules for rule-based processing.\n * Designed for code review and similar rule-based workflows.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { Splitter, WorkItem } from '../types';\n\n/**\n * A rule definition\n */\nexport interface Rule {\n /** Unique identifier for the rule */\n id: string;\n /** Rule filename */\n filename: string;\n /** Full path to the rule file */\n path: string;\n /** Rule content (the rule definition text) */\n content: string;\n /** Optional parsed front matter metadata */\n frontMatter?: Record<string, unknown>;\n}\n\n/**\n * Input for rule splitter\n */\nexport interface RuleInput {\n /** Array of rules to process */\n rules: Rule[];\n /** The content to review against rules (e.g., diff) */\n targetContent: string;\n /** Additional context to include with each work item */\n context?: Record<string, unknown>;\n}\n\n/**\n * Work item data for rule processing\n */\nexport interface RuleWorkItemData {\n /** The rule being applied */\n rule: Rule;\n /** The content to review against this rule */\n targetContent: string;\n /** Common context from input */\n context?: Record<string, unknown>;\n}\n\n/**\n * Options for rule splitter\n */\nexport interface RuleSplitterOptions {\n /**\n * Function to generate work item ID from rule\n * Default: uses rule id or filename\n */\n generateId?: (rule: Rule, index: number) => string;\n\n /**\n * Filter function to exclude certain rules\n */\n filter?: (rule: Rule) => boolean;\n\n /**\n * Function to validate a rule before including it\n * Returns true if valid, false to skip\n */\n validate?: (rule: Rule) => boolean;\n\n /**\n * Sort function for rules (determines processing order)\n */\n sort?: (a: Rule, b: Rule) => number;\n}\n\n/**\n * Splitter that creates a work item for each rule\n */\nexport class RuleSplitter implements Splitter<RuleInput, RuleWorkItemData> {\n constructor(private options: RuleSplitterOptions = {}) {}\n\n split(input: RuleInput): WorkItem<RuleWorkItemData>[] {\n const { rules, targetContent, context } = input;\n const { generateId, filter, validate, sort } = this.options;\n\n // Filter rules\n let processedRules = filter\n ? rules.filter(filter)\n : [...rules];\n\n // Validate rules\n if (validate) {\n processedRules = processedRules.filter(validate);\n }\n\n // Sort rules\n if (sort) {\n processedRules.sort(sort);\n }\n\n // Generate work items\n return processedRules.map((rule, index) => {\n const id = generateId\n ? generateId(rule, index)\n : `rule-${rule.id || this.sanitizeFilename(rule.filename)}`;\n\n return {\n id,\n data: {\n rule,\n targetContent,\n context\n },\n metadata: {\n ruleId: rule.id,\n ruleFilename: rule.filename,\n rulePath: rule.path,\n index,\n totalRules: processedRules.length,\n frontMatter: rule.frontMatter\n }\n };\n });\n }\n\n /**\n * Sanitize filename for use in ID\n */\n private sanitizeFilename(filename: string): string {\n return filename\n .replace(/\\.[^/.]+$/, '') // Remove extension\n .replace(/[^a-zA-Z0-9-_]/g, '-') // Replace special chars\n .toLowerCase();\n }\n}\n\n/**\n * Factory function to create a rule splitter\n */\nexport function createRuleSplitter(options?: RuleSplitterOptions): RuleSplitter {\n return new RuleSplitter(options);\n}\n\n/**\n * Create a rule splitter with alphabetical sorting\n */\nexport function createAlphabeticRuleSplitter(options?: Omit<RuleSplitterOptions, 'sort'>): RuleSplitter {\n return new RuleSplitter({\n ...options,\n sort: (a, b) => a.filename.localeCompare(b.filename)\n });\n}\n\n/**\n * Create a rule splitter with priority-based sorting\n * Rules with lower priority numbers are processed first\n */\nexport function createPriorityRuleSplitter(\n getPriority: (rule: Rule) => number,\n options?: Omit<RuleSplitterOptions, 'sort'>\n): RuleSplitter {\n return new RuleSplitter({\n ...options,\n sort: (a, b) => getPriority(a) - getPriority(b)\n });\n}\n\n/**\n * Create a rule splitter that filters by file patterns\n * Only includes rules that apply to the given file extensions\n */\nexport function createPatternFilteredRuleSplitter(\n fileExtensions: string[],\n options?: Omit<RuleSplitterOptions, 'filter'>\n): RuleSplitter {\n const normalizedExtensions = new Set(\n fileExtensions.map(ext =>\n ext.startsWith('.') ? ext.toLowerCase() : `.${ext.toLowerCase()}`\n )\n );\n\n return new RuleSplitter({\n ...options,\n filter: (rule) => {\n // Check if rule has appliesTo in front matter\n const appliesTo = rule.frontMatter?.['applies-to'] as string[] | undefined;\n if (!appliesTo || !Array.isArray(appliesTo)) {\n return true; // Include rules without pattern restrictions\n }\n\n // Check if any pattern matches our extensions\n for (const pattern of appliesTo) {\n // Handle glob patterns like *.ts, *.js\n if (pattern.startsWith('*.')) {\n const ext = pattern.slice(1).toLowerCase();\n if (normalizedExtensions.has(ext)) {\n return true;\n }\n }\n }\n\n return false;\n }\n });\n}\n\n/**\n * Batch rule splitter that groups multiple rules into single work items\n */\nexport interface BatchedRuleWorkItemData {\n /** Array of rules in this batch */\n rules: Rule[];\n /** The content to review against these rules */\n targetContent: string;\n /** Common context from input */\n context?: Record<string, unknown>;\n /** Batch index */\n batchIndex: number;\n}\n\n/**\n * Splitter that creates work items with batches of rules\n */\nexport class BatchedRuleSplitter implements Splitter<RuleInput, BatchedRuleWorkItemData> {\n constructor(\n private batchSize: number = 3,\n private options: Omit<RuleSplitterOptions, 'generateId'> = {}\n ) {}\n\n split(input: RuleInput): WorkItem<BatchedRuleWorkItemData>[] {\n const { rules, targetContent, context } = input;\n const { filter, validate, sort } = this.options;\n\n // Filter and validate rules\n let processedRules = filter\n ? rules.filter(filter)\n : [...rules];\n\n if (validate) {\n processedRules = processedRules.filter(validate);\n }\n\n if (sort) {\n processedRules.sort(sort);\n }\n\n const workItems: WorkItem<BatchedRuleWorkItemData>[] = [];\n const totalBatches = Math.ceil(processedRules.length / this.batchSize);\n\n for (let i = 0; i < processedRules.length; i += this.batchSize) {\n const batch = processedRules.slice(i, i + this.batchSize);\n const batchIndex = Math.floor(i / this.batchSize);\n\n workItems.push({\n id: `rule-batch-${batchIndex}`,\n data: {\n rules: batch,\n targetContent,\n context,\n batchIndex\n },\n metadata: {\n batchIndex,\n totalBatches,\n rulesInBatch: batch.length,\n totalRules: processedRules.length,\n ruleFilenames: batch.map(r => r.filename)\n }\n });\n }\n\n return workItems;\n }\n}\n\n/**\n * Factory function to create a batched rule splitter\n */\nexport function createBatchedRuleSplitter(\n batchSize: number,\n options?: Omit<RuleSplitterOptions, 'generateId'>\n): BatchedRuleSplitter {\n return new BatchedRuleSplitter(batchSize, options);\n}\n", "/**\n * Splitters Module\n *\n * Exports all splitter implementations and utilities.\n */\n\n// File splitter\nexport {\n FileSplitter,\n createFileSplitter,\n createExtensionFilteredSplitter,\n BatchedFileSplitter,\n createBatchedFileSplitter\n} from './file-splitter';\nexport type {\n FileItem,\n FileInput,\n FileWorkItemData,\n FileSplitterOptions,\n BatchedFileWorkItemData\n} from './file-splitter';\n\n// Chunk splitter\nexport {\n ChunkSplitter,\n createChunkSplitter,\n createLineChunkSplitter,\n createParagraphChunkSplitter\n} from './chunk-splitter';\nexport type {\n ChunkInput,\n ChunkWorkItemData,\n ChunkSplitterOptions\n} from './chunk-splitter';\n\n// Rule splitter\nexport {\n RuleSplitter,\n createRuleSplitter,\n createAlphabeticRuleSplitter,\n createPriorityRuleSplitter,\n createPatternFilteredRuleSplitter,\n BatchedRuleSplitter,\n createBatchedRuleSplitter\n} from './rule-splitter';\nexport type {\n Rule,\n RuleInput,\n RuleWorkItemData,\n RuleSplitterOptions,\n BatchedRuleWorkItemData\n} from './rule-splitter';\n", "/**\n * Code Review Job\n *\n * Map-reduce job wrapper for AI-powered code review.\n * Reviews code changes against a set of coding rules.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n AIInvoker,\n MapContext,\n Mapper,\n MapReduceJob,\n MapResult,\n ReduceContext,\n ReduceResult,\n WorkItem\n} from '../types';\nimport { Rule, RuleInput, RuleSplitter, RuleWorkItemData } from '../splitters';\nimport { BaseReducer, Deduplicatable, DeterministicReducer, DeterministicReducerOptions, DeterministicReduceOutput } from '../reducers';\n\n/**\n * Severity levels for code review findings\n */\nexport type ReviewSeverity = 'error' | 'warning' | 'info' | 'suggestion';\n\n/**\n * A single code review finding\n */\nexport interface ReviewFinding extends Deduplicatable {\n /** Unique identifier */\n id: string;\n /** Severity level */\n severity: ReviewSeverity;\n /** Rule that generated this finding */\n rule: string;\n /** Source rule file */\n ruleFile?: string;\n /** File path */\n file?: string;\n /** Line number */\n line?: number;\n /** Description of the issue */\n description: string;\n /** Code snippet */\n codeSnippet?: string;\n /** Suggested fix */\n suggestion?: string;\n /** Additional explanation */\n explanation?: string;\n}\n\n/**\n * Result from a single rule review (map output)\n */\nexport interface RuleReviewResult {\n /** The rule that was checked */\n rule: Rule;\n /** Whether the review succeeded */\n success: boolean;\n /** Error message if failed */\n error?: string;\n /** Findings from this rule */\n findings: ReviewFinding[];\n /** Raw AI response */\n rawResponse?: string;\n /** Overall assessment */\n assessment?: 'pass' | 'needs-attention' | 'fail';\n}\n\n/**\n * Summary of review results\n */\nexport interface ReviewSummary {\n /** Total findings count */\n totalFindings: number;\n /** Count by severity */\n bySeverity: {\n error: number;\n warning: number;\n info: number;\n suggestion: number;\n };\n /** Count by rule */\n byRule: Record<string, number>;\n /** Overall assessment */\n overallAssessment: 'pass' | 'needs-attention' | 'fail';\n /** Summary text */\n summaryText: string;\n}\n\n/**\n * Final reduced output for code review\n */\nexport interface CodeReviewOutput {\n /** Deduplicated findings */\n findings: ReviewFinding[];\n /** Summary statistics */\n summary: ReviewSummary;\n}\n\n/**\n * Input for code review job\n */\nexport interface CodeReviewInput {\n /** The diff content to review */\n diff: string;\n /** Array of rules to check against */\n rules: Rule[];\n /** Additional context */\n context?: {\n commitSha?: string;\n commitMessage?: string;\n filesChanged?: number;\n isHotfix?: boolean;\n repositoryRoot?: string;\n };\n}\n\n/**\n * Options for code review job\n */\nexport interface CodeReviewJobOptions {\n /** AI invoker function */\n aiInvoker: AIInvoker;\n /** Whether to use AI-powered reduce (default: false) */\n useAIReduce?: boolean;\n /** Custom prompt template for rule reviews */\n promptTemplate?: string;\n /** Custom response parser */\n responseParser?: (response: string, rule: Rule) => ReviewFinding[];\n}\n\n/**\n * Default prompt template for single-rule review\n */\nconst DEFAULT_REVIEW_PROMPT_TEMPLATE = `You are a code reviewer checking for ONE specific rule.\n\n## Rule: {{ruleName}}\n{{ruleContent}}\n\n## Instructions\n1. Review the diff below for violations of THIS RULE ONLY\n2. For each violation found, provide:\n - severity: ERROR, WARNING, INFO, or SUGGESTION\n - file: the file path\n - line: the line number\n - description: what's wrong\n - suggestion: how to fix it\n3. If no violations found, return an empty findings array\n4. Be precise - only flag clear violations\n\n## Diff to Review\n\\`\\`\\`diff\n{{diff}}\n\\`\\`\\`\n\n## Output Format\nReturn JSON:\n{\n \"assessment\": \"pass\" | \"needs-attention\" | \"fail\",\n \"findings\": [\n {\n \"severity\": \"error|warning|info|suggestion\",\n \"file\": \"path/to/file.ts\",\n \"line\": 42,\n \"description\": \"Description of the problem\",\n \"suggestion\": \"How to fix it\"\n }\n ]\n}`;\n\n/**\n * Mapper for code review - reviews a single rule\n */\nclass CodeReviewMapper implements Mapper<RuleWorkItemData, RuleReviewResult> {\n constructor(\n private aiInvoker: AIInvoker,\n private promptTemplate: string,\n private responseParser?: (response: string, rule: Rule) => ReviewFinding[]\n ) {}\n\n async map(\n item: WorkItem<RuleWorkItemData>,\n context: MapContext\n ): Promise<RuleReviewResult> {\n const { rule, targetContent } = item.data;\n\n // Build prompt\n const prompt = this.buildPrompt(rule, targetContent);\n\n try {\n // Get model from rule's front matter\n const model = rule.frontMatter?.model as string | undefined;\n\n // Invoke AI\n const result = await this.aiInvoker(prompt, { model });\n\n if (result.success && result.response) {\n const findings = this.parseResponse(result.response, rule);\n const assessment = this.determineAssessment(findings);\n\n return {\n rule,\n success: true,\n findings,\n rawResponse: result.response,\n assessment\n };\n }\n\n return {\n rule,\n success: false,\n error: result.error || 'Unknown error',\n findings: []\n };\n } catch (error) {\n return {\n rule,\n success: false,\n error: error instanceof Error ? error.message : String(error),\n findings: []\n };\n }\n }\n\n private buildPrompt(rule: Rule, diff: string): string {\n return this.promptTemplate\n .replace(/\\{\\{ruleName\\}\\}/g, rule.filename)\n .replace(/\\{\\{ruleContent\\}\\}/g, rule.content)\n .replace(/\\{\\{diff\\}\\}/g, diff);\n }\n\n private parseResponse(response: string, rule: Rule): ReviewFinding[] {\n if (this.responseParser) {\n return this.responseParser(response, rule);\n }\n\n return this.defaultParseResponse(response, rule);\n }\n\n private defaultParseResponse(response: string, rule: Rule): ReviewFinding[] {\n try {\n // Try to extract JSON from response\n const jsonMatch = response.match(/\\{[\\s\\S]*\\}/);\n if (!jsonMatch) {\n return [];\n }\n\n const parsed = JSON.parse(jsonMatch[0]);\n const findings: ReviewFinding[] = [];\n\n if (parsed.findings && Array.isArray(parsed.findings)) {\n for (let i = 0; i < parsed.findings.length; i++) {\n const f = parsed.findings[i];\n findings.push({\n id: `${rule.filename}-${i}`,\n severity: this.mapSeverity(f.severity),\n rule: rule.filename,\n ruleFile: rule.filename,\n file: f.file,\n line: f.line,\n description: f.description || f.issue || '',\n codeSnippet: f.code || f.codeSnippet,\n suggestion: f.suggestion,\n explanation: f.explanation\n });\n }\n }\n\n return findings;\n } catch {\n return [];\n }\n }\n\n private mapSeverity(severity: string): ReviewSeverity {\n const lower = (severity || '').toLowerCase();\n if (lower === 'error' || lower === 'critical') {\n return 'error';\n }\n if (lower === 'warning' || lower === 'major') {\n return 'warning';\n }\n if (lower === 'info' || lower === 'minor') {\n return 'info';\n }\n return 'suggestion';\n }\n\n private determineAssessment(findings: ReviewFinding[]): 'pass' | 'needs-attention' | 'fail' {\n if (findings.some(f => f.severity === 'error')) {\n return 'fail';\n }\n if (findings.some(f => f.severity === 'warning')) {\n return 'needs-attention';\n }\n return 'pass';\n }\n}\n\n/**\n * Reducer for code review - aggregates findings from all rules\n */\nclass CodeReviewReducer extends BaseReducer<RuleReviewResult, CodeReviewOutput> {\n private deterministicReducer: DeterministicReducer<ReviewFinding>;\n\n constructor() {\n super();\n\n const options: DeterministicReducerOptions<ReviewFinding> = {\n getKey: (finding) => {\n const file = finding.file || 'global';\n const line = finding.line || 0;\n const descNormalized = (finding.description || '')\n .toLowerCase()\n .replace(/\\s+/g, ' ')\n .trim()\n .substring(0, 100);\n return `${file}:${line}:${descNormalized}`;\n },\n\n merge: (existing, newFinding) => {\n const severityRank: Record<ReviewSeverity, number> = {\n 'error': 4,\n 'warning': 3,\n 'info': 2,\n 'suggestion': 1\n };\n\n const keepNew = severityRank[newFinding.severity] > severityRank[existing.severity];\n const base = keepNew ? newFinding : existing;\n const other = keepNew ? existing : newFinding;\n\n return {\n ...base,\n rule: base.rule === other.rule ? base.rule : `${base.rule}, ${other.rule}`,\n suggestion: (base.suggestion?.length || 0) >= (other.suggestion?.length || 0)\n ? base.suggestion\n : other.suggestion,\n explanation: (base.explanation?.length || 0) >= (other.explanation?.length || 0)\n ? base.explanation\n : other.explanation\n };\n },\n\n sort: (a, b) => {\n const severityOrder: Record<ReviewSeverity, number> = {\n 'error': 0,\n 'warning': 1,\n 'info': 2,\n 'suggestion': 3\n };\n\n const severityDiff = severityOrder[a.severity] - severityOrder[b.severity];\n if (severityDiff !== 0) {\n return severityDiff;\n }\n\n const fileA = a.file || '';\n const fileB = b.file || '';\n const fileDiff = fileA.localeCompare(fileB);\n if (fileDiff !== 0) {\n return fileDiff;\n }\n\n return (a.line || 0) - (b.line || 0);\n },\n\n summarize: (items) => {\n const bySeverity = { error: 0, warning: 0, info: 0, suggestion: 0 };\n const byRule: Record<string, number> = {};\n\n for (const finding of items) {\n bySeverity[finding.severity]++;\n byRule[finding.rule] = (byRule[finding.rule] || 0) + 1;\n }\n\n return { bySeverity, byRule };\n }\n };\n\n this.deterministicReducer = new DeterministicReducer(options);\n }\n\n async reduce(\n results: MapResult<RuleReviewResult>[],\n context: ReduceContext\n ): Promise<ReduceResult<CodeReviewOutput>> {\n const startTime = Date.now();\n\n // Collect all findings from successful results\n const allFindings: ReviewFinding[][] = [];\n const ruleResults: RuleReviewResult[] = [];\n\n for (const result of results) {\n if (result.success && result.output) {\n const ruleResult = result.output;\n ruleResults.push(ruleResult);\n\n if (ruleResult.success && ruleResult.findings) {\n // Tag findings with rule file\n const taggedFindings = ruleResult.findings.map(f => ({\n ...f,\n ruleFile: ruleResult.rule.filename,\n rule: f.rule || ruleResult.rule.filename\n }));\n allFindings.push(taggedFindings);\n }\n }\n }\n\n // Use deterministic reducer to deduplicate\n const mockMapResults: MapResult<ReviewFinding[]>[] = allFindings.map((findings, i) => ({\n workItemId: `findings-${i}`,\n success: true,\n output: findings,\n executionTimeMs: 0\n }));\n\n const deterministicResult = await this.deterministicReducer.reduce(mockMapResults, context);\n const dedupedFindings = deterministicResult.output.items;\n\n // Create summary\n const summary = this.createSummary(dedupedFindings, ruleResults);\n\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: {\n findings: dedupedFindings,\n summary\n },\n stats: {\n inputCount: allFindings.reduce((sum, arr) => sum + arr.length, 0),\n outputCount: dedupedFindings.length,\n mergedCount: deterministicResult.stats.mergedCount,\n reduceTimeMs,\n usedAIReduce: false\n }\n };\n }\n\n private createSummary(findings: ReviewFinding[], ruleResults: RuleReviewResult[]): ReviewSummary {\n const bySeverity = { error: 0, warning: 0, info: 0, suggestion: 0 };\n const byRule: Record<string, number> = {};\n\n for (const finding of findings) {\n bySeverity[finding.severity]++;\n byRule[finding.rule] = (byRule[finding.rule] || 0) + 1;\n }\n\n // Determine overall assessment\n let overallAssessment: 'pass' | 'needs-attention' | 'fail' = 'pass';\n\n if (bySeverity.error > 0) {\n overallAssessment = 'fail';\n } else if (bySeverity.warning > 0) {\n overallAssessment = 'needs-attention';\n }\n\n // Also check individual rule assessments\n for (const result of ruleResults) {\n if (result.assessment === 'fail') {\n overallAssessment = 'fail';\n break;\n }\n if (result.assessment === 'needs-attention' && overallAssessment !== 'fail') {\n overallAssessment = 'needs-attention';\n }\n }\n\n // Generate summary text\n const failedRules = ruleResults.filter(r => !r.success).length;\n let summaryText: string;\n\n if (failedRules > 0) {\n summaryText = `Reviewed against ${ruleResults.length} rules (${failedRules} failed). `;\n } else {\n summaryText = `Reviewed against ${ruleResults.length} rules. `;\n }\n\n if (findings.length === 0) {\n summaryText += 'No issues found.';\n } else {\n summaryText += `Found ${findings.length} issue(s): ${bySeverity.error} error(s), ${bySeverity.warning} warning(s), ${bySeverity.info} info, ${bySeverity.suggestion} suggestion(s).`;\n }\n\n return {\n totalFindings: findings.length,\n bySeverity,\n byRule,\n overallAssessment,\n summaryText\n };\n }\n}\n\n/**\n * Create a code review job\n */\nexport function createCodeReviewJob(\n options: CodeReviewJobOptions\n): MapReduceJob<CodeReviewInput, RuleWorkItemData, RuleReviewResult, CodeReviewOutput> {\n const promptTemplate = options.promptTemplate || DEFAULT_REVIEW_PROMPT_TEMPLATE;\n\n // Create splitter that converts CodeReviewInput to RuleInput\n const ruleSplitter = new RuleSplitter();\n\n // Create a wrapper splitter for CodeReviewInput\n const splitter = {\n split: (input: CodeReviewInput) => {\n const ruleInput: RuleInput = {\n rules: input.rules,\n targetContent: input.diff,\n context: input.context\n };\n return ruleSplitter.split(ruleInput);\n }\n };\n\n return {\n id: 'code-review',\n name: 'Code Review',\n splitter,\n mapper: new CodeReviewMapper(options.aiInvoker, promptTemplate, options.responseParser),\n reducer: new CodeReviewReducer(),\n options: {\n maxConcurrency: 5,\n reduceMode: options.useAIReduce ? 'ai' : 'deterministic',\n showProgress: true,\n retryOnFailure: false\n }\n };\n}\n\n// Re-export types\nexport type { Rule, RuleInput, RuleWorkItemData } from '../splitters';\n", "/**\n * Template Job\n *\n * Helper for creating list + template prompt workflows.\n * Applies a prompt template to each item in a list.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n AIInvoker,\n MapContext,\n Mapper,\n MapReduceJob,\n MapResult,\n ReduceContext,\n ReduceResult,\n Splitter,\n WorkItem\n} from '../types';\nimport { createTemplate, renderTemplate, MissingVariableError } from '../prompt-template';\nimport { PromptTemplate } from '../types';\nimport { BaseReducer, FlattenReducer } from '../reducers';\n\n/**\n * A single item in the template job input\n */\nexport interface TemplateItem {\n /** Unique identifier for this item */\n id?: string;\n /** Variables to substitute in the template */\n variables: Record<string, string | number | boolean>;\n /** Optional metadata */\n metadata?: Record<string, unknown>;\n}\n\n/**\n * Input for template job\n */\nexport interface TemplateJobInput {\n /** Items to process */\n items: TemplateItem[];\n /** Global variables available to all items */\n globalVariables?: Record<string, string | number | boolean>;\n}\n\n/**\n * Work item data for template processing\n */\nexport interface TemplateWorkItemData {\n /** The template item */\n item: TemplateItem;\n /** Global variables */\n globalVariables?: Record<string, string | number | boolean>;\n}\n\n/**\n * Result from processing a single template item\n */\nexport interface TemplateItemResult<TOutput = string> {\n /** Item ID */\n itemId: string;\n /** Whether processing succeeded */\n success: boolean;\n /** The output (if successful) */\n output?: TOutput;\n /** Error message (if failed) */\n error?: string;\n /** Raw AI response */\n rawResponse?: string;\n}\n\n/**\n * Options for template job\n */\nexport interface TemplateJobOptions<TOutput = string> {\n /** AI invoker function */\n aiInvoker: AIInvoker;\n /** Template string with {{variable}} placeholders */\n template: string;\n /** Required variables that must be present */\n requiredVariables?: string[];\n /** Optional system prompt */\n systemPrompt?: string;\n /** Function to parse the AI response */\n responseParser?: (response: string) => TOutput;\n /** Custom reducer (default: flatten results) */\n reducer?: BaseReducer<TemplateItemResult<TOutput>, unknown>;\n /** Optional model to use */\n model?: string;\n}\n\n/**\n * Splitter for template jobs - creates a work item for each input item\n */\nclass TemplateSplitter implements Splitter<TemplateJobInput, TemplateWorkItemData> {\n split(input: TemplateJobInput): WorkItem<TemplateWorkItemData>[] {\n return input.items.map((item, index) => ({\n id: item.id || `item-${index}`,\n data: {\n item,\n globalVariables: input.globalVariables\n },\n metadata: {\n index,\n totalItems: input.items.length,\n ...item.metadata\n }\n }));\n }\n}\n\n/**\n * Mapper for template jobs - applies template and invokes AI\n */\nclass TemplateMapper<TOutput> implements Mapper<TemplateWorkItemData, TemplateItemResult<TOutput>> {\n private promptTemplate: PromptTemplate;\n\n constructor(\n private aiInvoker: AIInvoker,\n private options: {\n template: string;\n requiredVariables?: string[];\n systemPrompt?: string;\n responseParser?: (response: string) => TOutput;\n model?: string;\n }\n ) {\n this.promptTemplate = createTemplate({\n template: options.template,\n requiredVariables: options.requiredVariables,\n systemPrompt: options.systemPrompt\n });\n }\n\n async map(\n workItem: WorkItem<TemplateWorkItemData>,\n context: MapContext\n ): Promise<TemplateItemResult<TOutput>> {\n const { item, globalVariables } = workItem.data;\n const itemId = item.id || workItem.id;\n\n // Merge global and item variables\n const variables = {\n ...globalVariables,\n ...item.variables\n };\n\n try {\n // Render the prompt\n const prompt = renderTemplate(this.promptTemplate, {\n variables,\n includeSystemPrompt: !!this.options.systemPrompt\n });\n\n // Invoke AI\n const result = await this.aiInvoker(prompt, {\n model: this.options.model\n });\n\n if (result.success && result.response) {\n // Parse response if parser provided\n const output = this.options.responseParser\n ? this.options.responseParser(result.response)\n : result.response as unknown as TOutput;\n\n return {\n itemId,\n success: true,\n output,\n rawResponse: result.response\n };\n }\n\n return {\n itemId,\n success: false,\n error: result.error || 'Unknown error',\n rawResponse: result.response\n };\n } catch (error) {\n if (error instanceof MissingVariableError) {\n return {\n itemId,\n success: false,\n error: `Missing variable: ${error.variableName}`\n };\n }\n\n return {\n itemId,\n success: false,\n error: error instanceof Error ? error.message : String(error)\n };\n }\n }\n}\n\n/**\n * Default reducer for template jobs - collects all results\n */\nclass TemplateResultsReducer<TOutput> extends BaseReducer<TemplateItemResult<TOutput>, TemplateItemResult<TOutput>[]> {\n async reduce(\n results: MapResult<TemplateItemResult<TOutput>>[],\n context: ReduceContext\n ): Promise<ReduceResult<TemplateItemResult<TOutput>[]>> {\n const startTime = Date.now();\n\n const outputs = results\n .filter(r => r.success && r.output)\n .map(r => r.output!);\n\n const reduceTimeMs = Date.now() - startTime;\n\n return {\n output: outputs,\n stats: {\n inputCount: results.length,\n outputCount: outputs.length,\n mergedCount: 0,\n reduceTimeMs,\n usedAIReduce: false\n }\n };\n }\n}\n\n/**\n * Create a template job\n */\nexport function createTemplateJob<TOutput = string>(\n options: TemplateJobOptions<TOutput>\n): MapReduceJob<TemplateJobInput, TemplateWorkItemData, TemplateItemResult<TOutput>, TemplateItemResult<TOutput>[]> {\n return {\n id: 'template-job',\n name: 'Template Processing',\n splitter: new TemplateSplitter(),\n mapper: new TemplateMapper<TOutput>(options.aiInvoker, {\n template: options.template,\n requiredVariables: options.requiredVariables,\n systemPrompt: options.systemPrompt,\n responseParser: options.responseParser,\n model: options.model\n }),\n reducer: new TemplateResultsReducer<TOutput>(),\n options: {\n maxConcurrency: 5,\n reduceMode: 'deterministic',\n showProgress: true,\n retryOnFailure: false\n }\n };\n}\n\n/**\n * Create a simple string template job (no custom parsing)\n */\nexport function createSimpleTemplateJob(\n aiInvoker: AIInvoker,\n template: string,\n options?: {\n systemPrompt?: string;\n model?: string;\n maxConcurrency?: number;\n }\n): MapReduceJob<TemplateJobInput, TemplateWorkItemData, TemplateItemResult<string>, TemplateItemResult<string>[]> {\n return createTemplateJob({\n aiInvoker,\n template,\n systemPrompt: options?.systemPrompt,\n model: options?.model\n });\n}\n\n/**\n * Create a JSON template job with type-safe parsing\n */\nexport function createJsonTemplateJob<TOutput>(\n aiInvoker: AIInvoker,\n template: string,\n options?: {\n systemPrompt?: string;\n model?: string;\n validator?: (obj: unknown) => obj is TOutput;\n }\n): MapReduceJob<TemplateJobInput, TemplateWorkItemData, TemplateItemResult<TOutput>, TemplateItemResult<TOutput>[]> {\n const responseParser = (response: string): TOutput => {\n // Try to extract JSON from response\n const jsonMatch = response.match(/```(?:json)?\\s*([\\s\\S]*?)```/);\n const jsonStr = jsonMatch ? jsonMatch[1].trim() : response;\n\n // Find JSON object or array\n const objectMatch = jsonStr.match(/\\{[\\s\\S]*\\}/);\n const arrayMatch = jsonStr.match(/\\[[\\s\\S]*\\]/);\n const toParse = objectMatch?.[0] || arrayMatch?.[0] || jsonStr;\n\n const parsed = JSON.parse(toParse);\n\n if (options?.validator && !options.validator(parsed)) {\n throw new Error('Response validation failed');\n }\n\n return parsed;\n };\n\n return createTemplateJob<TOutput>({\n aiInvoker,\n template,\n systemPrompt: options?.systemPrompt,\n model: options?.model,\n responseParser\n });\n}\n\n/**\n * Create a list processing template job\n * Useful for processing a list of items and getting structured results\n */\nexport function createListProcessingJob<TInput, TOutput>(\n aiInvoker: AIInvoker,\n config: {\n /** Template with {{item}} placeholder for each list item */\n template: string;\n /** System prompt */\n systemPrompt?: string;\n /** Function to convert input items to template variables */\n itemToVariables: (item: TInput, index: number) => Record<string, string | number | boolean>;\n /** Function to parse AI response */\n responseParser: (response: string) => TOutput;\n /** Model to use */\n model?: string;\n }\n): {\n createInput: (items: TInput[], globalVariables?: Record<string, string | number | boolean>) => TemplateJobInput;\n job: MapReduceJob<TemplateJobInput, TemplateWorkItemData, TemplateItemResult<TOutput>, TemplateItemResult<TOutput>[]>;\n} {\n const job = createTemplateJob<TOutput>({\n aiInvoker,\n template: config.template,\n systemPrompt: config.systemPrompt,\n responseParser: config.responseParser,\n model: config.model\n });\n\n const createInput = (items: TInput[], globalVariables?: Record<string, string | number | boolean>): TemplateJobInput => ({\n items: items.map((item, index) => ({\n id: `item-${index}`,\n variables: config.itemToVariables(item, index)\n })),\n globalVariables\n });\n\n return { createInput, job };\n}\n", "/**\n * Temp File Utilities for Map-Reduce\n *\n * Provides cross-platform temp file management for passing large data\n * to AI processes without shell escaping issues.\n *\n * Key features:\n * - Cross-platform path handling (Windows/Unix)\n * - Automatic cleanup on success or failure\n * - Unique file naming to avoid collisions\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as os from 'os';\nimport * as path from 'path';\nimport { getLogger, LogCategory } from '../logger';\n\n/** Directory name for map-reduce temp files */\nconst TEMP_DIR_NAME = 'vscode-shortcuts-mapreduce';\n\n/**\n * Result of creating a temp file\n */\nexport interface TempFileResult {\n /** Absolute path to the temp file */\n filePath: string;\n /** Cleanup function to delete the file */\n cleanup: () => void;\n}\n\n/**\n * Ensure the temp directory exists\n * @returns The temp directory path, or undefined if creation failed\n */\nexport function ensureTempDir(): string | undefined {\n const tempDir = path.join(os.tmpdir(), TEMP_DIR_NAME);\n try {\n if (!fs.existsSync(tempDir)) {\n fs.mkdirSync(tempDir, { recursive: true });\n }\n return tempDir;\n } catch (error) {\n getLogger().error(LogCategory.UTILS, 'Failed to create temp directory', error instanceof Error ? error : undefined);\n return undefined;\n }\n}\n\n/**\n * Generate a unique temp file name\n * @param prefix Optional prefix for the filename\n * @param extension File extension (default: .json)\n * @returns Unique filename\n */\nexport function generateTempFileName(prefix: string = 'results', extension: string = '.json'): string {\n const timestamp = Date.now();\n const random = Math.random().toString(36).substring(2, 8);\n return `${prefix}_${timestamp}_${random}${extension}`;\n}\n\n/**\n * Write content to a temp file\n *\n * @param content The content to write\n * @param prefix Optional prefix for the filename\n * @param extension File extension (default: .json)\n * @returns TempFileResult with file path and cleanup function, or undefined on failure\n */\nexport function writeTempFile(\n content: string,\n prefix: string = 'results',\n extension: string = '.json'\n): TempFileResult | undefined {\n const tempDir = ensureTempDir();\n if (!tempDir) {\n return undefined;\n }\n\n const fileName = generateTempFileName(prefix, extension);\n const filePath = path.join(tempDir, fileName);\n\n try {\n // Write with UTF-8 encoding - works on both Windows and Unix\n fs.writeFileSync(filePath, content, { encoding: 'utf8' });\n\n return {\n filePath,\n cleanup: () => cleanupTempFile(filePath)\n };\n } catch (error) {\n getLogger().error(LogCategory.UTILS, 'Failed to write temp file', error instanceof Error ? error : undefined);\n return undefined;\n }\n}\n\n/**\n * Clean up a temp file\n * @param filePath Path to the file to delete\n * @returns true if deleted successfully, false otherwise\n */\nexport function cleanupTempFile(filePath: string): boolean {\n try {\n if (fs.existsSync(filePath)) {\n fs.unlinkSync(filePath);\n }\n return true;\n } catch (error) {\n getLogger().error(LogCategory.UTILS, 'Failed to cleanup temp file', error instanceof Error ? error : undefined);\n return false;\n }\n}\n\n/**\n * Clean up all temp files in the temp directory\n * Useful for cleanup on extension deactivation\n * @returns Number of files cleaned up\n */\nexport function cleanupAllTempFiles(): number {\n const tempDir = path.join(os.tmpdir(), TEMP_DIR_NAME);\n let count = 0;\n\n try {\n if (!fs.existsSync(tempDir)) {\n return 0;\n }\n\n const files = fs.readdirSync(tempDir);\n for (const file of files) {\n const filePath = path.join(tempDir, file);\n try {\n fs.unlinkSync(filePath);\n count++;\n } catch {\n // Ignore individual file deletion errors\n }\n }\n } catch (error) {\n getLogger().error(LogCategory.UTILS, 'Failed to cleanup temp directory', error instanceof Error ? error : undefined);\n }\n\n return count;\n}\n\n/**\n * Read content from a temp file\n * @param filePath Path to the file to read\n * @returns File content, or undefined on failure\n */\nexport function readTempFile(filePath: string): string | undefined {\n try {\n return fs.readFileSync(filePath, { encoding: 'utf8' });\n } catch (error) {\n getLogger().error(LogCategory.UTILS, 'Failed to read temp file', error instanceof Error ? error : undefined);\n return undefined;\n }\n}\n\n/**\n * Check if a path looks like a temp file created by this module\n * @param filePath Path to check\n * @returns true if it's a temp file path\n */\nexport function isTempFilePath(filePath: string): boolean {\n const tempDir = path.join(os.tmpdir(), TEMP_DIR_NAME);\n return filePath.startsWith(tempDir);\n}\n\n/**\n * Get the temp directory path (for testing)\n * @returns The temp directory path\n */\nexport function getTempDirPath(): string {\n return path.join(os.tmpdir(), TEMP_DIR_NAME);\n}\n", "/**\n * Prompt Map Job\n *\n * Generic map-reduce job that applies a prompt template to a list of items.\n * Each item's fields are substituted into the template, sent to AI, and results collected.\n *\n * This is a core reusable job type - input sources (CSV, JSON, git, etc.) are handled\n * by the caller (e.g., yaml-pipeline module).\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n AIInvoker,\n MapContext,\n Mapper,\n MapReduceJob,\n MapResult,\n ReduceContext,\n ReduceResult,\n Splitter,\n WorkItem\n} from '../types';\nimport { BaseReducer } from '../reducers';\nimport { \n extractJSON as sharedExtractJSON, \n parseAIResponse as sharedParseAIResponse \n} from '../../utils/ai-response-parser';\nimport { writeTempFile, TempFileResult } from '../temp-file-utils';\nimport { getLogger, LogCategory } from '../../logger';\n\n/**\n * A generic item with string key-value pairs for template substitution\n */\nexport interface PromptItem {\n [key: string]: string;\n}\n\n/**\n * Input for the prompt map job\n */\nexport interface PromptMapInput {\n /** Items to process */\n items: PromptItem[];\n /** Prompt template with {{variable}} placeholders */\n promptTemplate: string;\n /** Expected output field names from AI */\n outputFields: string[];\n}\n\n/**\n * Work item data passed to the mapper\n */\nexport interface PromptWorkItemData {\n /** The item with template variables */\n item: PromptItem;\n /** Prompt template */\n promptTemplate: string;\n /** Expected output fields */\n outputFields: string[];\n /** Original index in input */\n index: number;\n /** All items in the input (for {{ITEMS}} template variable) */\n allItems: PromptItem[];\n}\n\n/**\n * Result from processing a single item (map output)\n */\nexport interface PromptMapResult {\n /** The original input item */\n item: PromptItem;\n /** The AI-generated output (with declared fields) - empty object in text mode */\n output: Record<string, unknown>;\n /** Raw text output when in text mode (no output fields specified) */\n rawText?: string;\n /** Whether processing succeeded */\n success: boolean;\n /** Error message if failed */\n error?: string;\n /** Raw AI response */\n rawResponse?: string;\n /** SDK session ID for session resume functionality */\n sessionId?: string;\n /** Token usage from the AI call (if available) */\n tokenUsage?: import('../../ai/copilot-sdk-service').TokenUsage;\n}\n\n/**\n * Output format for the reduce phase\n * - 'list': Markdown formatted list\n * - 'table': Markdown table\n * - 'json': JSON array of results\n * - 'csv': CSV format\n * - 'ai': AI-powered synthesis of results\n * - 'text': Pure text concatenation (for non-structured AI responses)\n */\nexport type OutputFormat = 'list' | 'table' | 'json' | 'csv' | 'ai' | 'text';\n\n/**\n * Final aggregated output from reduce phase\n */\nexport interface PromptMapOutput {\n /** All processed results */\n results: PromptMapResult[];\n /** Formatted output string */\n formattedOutput: string;\n /** Summary statistics */\n summary: PromptMapSummary;\n}\n\n/**\n * Execution summary\n */\nexport interface PromptMapSummary {\n /** Total items processed */\n totalItems: number;\n /** Successfully processed items */\n successfulItems: number;\n /** Failed items */\n failedItems: number;\n /** Output field names */\n outputFields: string[];\n}\n\n/**\n * Options for creating a prompt map job\n */\nexport interface PromptMapJobOptions {\n /** AI invoker function */\n aiInvoker: AIInvoker;\n /** Output format (default: 'list') */\n outputFormat?: OutputFormat;\n /** Model to use */\n model?: string;\n /** Maximum concurrent AI calls */\n maxConcurrency?: number;\n /** AI reduce prompt template (required if outputFormat is 'ai') */\n aiReducePrompt?: string;\n /** AI reduce output fields (required if outputFormat is 'ai') */\n aiReduceOutput?: string[];\n /** Model to use for AI reduce (optional, defaults to job model) */\n aiReduceModel?: string;\n /** Parameters for AI reduce prompt substitution (from input.parameters) */\n aiReduceParameters?: Record<string, string>;\n}\n\n// ============================================================================\n// Template utilities\n// ============================================================================\n\nconst TEMPLATE_VARIABLE_REGEX = /\\{\\{(\\w+)\\}\\}/g;\n\n/**\n * Substitute template variables with values from a pipeline item\n * \n * Supports special variable {{ITEMS}} which is replaced with JSON array of all items.\n * This allows prompts to reference the full context of all items being processed.\n * \n * @param template Template string with {{variable}} placeholders\n * @param item Current pipeline item containing values\n * @param allItems Optional array of all items (for {{ITEMS}} variable)\n * @returns Substituted string\n */\nfunction substituteTemplate(template: string, item: PromptItem, allItems?: PromptItem[]): string {\n return template.replace(TEMPLATE_VARIABLE_REGEX, (_, variableName) => {\n // Handle special {{ITEMS}} variable - returns JSON array of all items\n if (variableName === 'ITEMS' && allItems) {\n return JSON.stringify(allItems, null, 2);\n }\n return variableName in item ? item[variableName] : '';\n });\n}\n\nfunction buildFullPrompt(userPrompt: string, outputFields: string[]): string {\n if (outputFields.length === 0) {\n return userPrompt;\n }\n return `${userPrompt}\n\nReturn JSON with these fields: ${outputFields.join(', ')}`;\n}\n\n/**\n * Extract JSON from response - delegates to shared utility\n */\nfunction extractJSON(response: string): string | null {\n return sharedExtractJSON(response);\n}\n\n/**\n * Parse AI response - delegates to shared utility\n */\nfunction parseAIResponse(response: string, outputFields: string[]): Record<string, unknown> {\n return sharedParseAIResponse(response, outputFields);\n}\n\n// ============================================================================\n// Splitter\n// ============================================================================\n\nclass PromptMapSplitter implements Splitter<PromptMapInput, PromptWorkItemData> {\n split(input: PromptMapInput): WorkItem<PromptWorkItemData>[] {\n return input.items.map((item, index) => ({\n id: `item-${index}`,\n data: {\n item,\n promptTemplate: input.promptTemplate,\n outputFields: input.outputFields,\n index,\n allItems: input.items\n },\n metadata: { index, totalItems: input.items.length }\n }));\n }\n}\n\n// ============================================================================\n// Mapper\n// ============================================================================\n\nclass PromptMapMapper implements Mapper<PromptWorkItemData, PromptMapResult> {\n constructor(\n private aiInvoker: AIInvoker,\n private modelTemplate?: string\n ) {}\n\n async map(\n workItem: WorkItem<PromptWorkItemData>,\n _context: MapContext\n ): Promise<PromptMapResult> {\n const { item, promptTemplate, outputFields, allItems } = workItem.data;\n const isTextMode = !outputFields || outputFields.length === 0;\n\n try {\n const substituted = substituteTemplate(promptTemplate, item, allItems);\n const prompt = buildFullPrompt(substituted, outputFields);\n \n // Support template substitution in model (e.g., \"{{model}}\" reads from item.model)\n // Ensure modelTemplate is a string before substitution\n let model: string | undefined;\n if (this.modelTemplate && typeof this.modelTemplate === 'string') {\n const substitutedModel = substituteTemplate(this.modelTemplate, item, allItems);\n model = substitutedModel || undefined;\n }\n \n const result = await this.aiInvoker(prompt, { model });\n\n if (result.success && result.response) {\n // Text mode - return raw response without JSON parsing\n if (isTextMode) {\n return {\n item,\n output: {},\n rawText: result.response,\n success: true,\n rawResponse: result.response,\n sessionId: result.sessionId,\n tokenUsage: result.tokenUsage,\n };\n }\n\n // Structured mode - parse JSON response\n try {\n const output = parseAIResponse(result.response, outputFields);\n return { item, output, success: true, rawResponse: result.response, sessionId: result.sessionId, tokenUsage: result.tokenUsage };\n } catch (parseError) {\n const logger = getLogger();\n logger.debug(LogCategory.MAP_REDUCE, `PromptMapMapper: Failed to parse AI response for item ${workItem.id}. Response (${result.response.length} chars): ${result.response.substring(0, 500)}`);\n return {\n item,\n output: this.emptyOutput(outputFields),\n success: false,\n error: `Failed to parse AI response: ${parseError instanceof Error ? parseError.message : String(parseError)}`,\n rawResponse: result.response,\n sessionId: result.sessionId,\n tokenUsage: result.tokenUsage,\n };\n }\n }\n\n return {\n item,\n output: isTextMode ? {} : this.emptyOutput(outputFields),\n success: false,\n error: result.error || 'AI invocation failed',\n rawResponse: result.response,\n sessionId: result.sessionId,\n tokenUsage: result.tokenUsage,\n };\n } catch (error) {\n return {\n item,\n output: isTextMode ? {} : this.emptyOutput(workItem.data.outputFields),\n success: false,\n error: error instanceof Error ? error.message : String(error),\n rawResponse: undefined // No AI response available when exception occurs before AI call\n };\n }\n }\n\n private emptyOutput(fields: string[]): Record<string, unknown> {\n const output: Record<string, unknown> = {};\n for (const field of fields) output[field] = null;\n return output;\n }\n}\n\n// ============================================================================\n// Formatting utilities\n// ============================================================================\n\nfunction formatValue(value: unknown): string {\n if (value === null || value === undefined) return 'null';\n if (typeof value === 'string') return value.length > 50 ? value.substring(0, 47) + '...' : value;\n if (typeof value === 'boolean') return value ? 'true' : 'false';\n if (typeof value === 'number') return String(value);\n if (Array.isArray(value)) return `[${value.length} items]`;\n if (typeof value === 'object') return JSON.stringify(value);\n return String(value);\n}\n\nfunction truncate(value: string, max: number = 30): string {\n return value.length <= max ? value : value.substring(0, max - 3) + '...';\n}\n\nfunction formatAsList(results: PromptMapResult[], summary: PromptMapSummary): string {\n const lines: string[] = [`## Results (${summary.totalItems} items)`, ''];\n if (summary.failedItems > 0) lines.push(`**Warning: ${summary.failedItems} items failed**`, '');\n\n results.forEach((r, i) => {\n lines.push(`### Item ${i + 1}`);\n lines.push(`**Input:** ${Object.entries(r.item).map(([k, v]) => `${k}=${truncate(v)}`).join(', ')}`);\n if (r.success) {\n lines.push(`**Output:** ${Object.entries(r.output).map(([k, v]) => `${k}=${formatValue(v)}`).join(', ')}`);\n } else {\n lines.push(`**Error:** ${r.error || 'Unknown error'}`);\n }\n lines.push('');\n });\n\n lines.push('---', `**Stats:** ${summary.successfulItems} succeeded, ${summary.failedItems} failed`);\n return lines.join('\\n');\n}\n\nfunction formatAsTable(results: PromptMapResult[]): string {\n if (results.length === 0) return 'No results to display.';\n\n const inKeys = [...new Set(results.flatMap(r => Object.keys(r.item)))];\n const outKeys = [...new Set(results.flatMap(r => Object.keys(r.output)))];\n const headers = ['#', ...inKeys.map(k => `[in] ${k}`), ...outKeys.map(k => `[out] ${k}`), 'Status'];\n\n const lines = [\n '| ' + headers.join(' | ') + ' |',\n '| ' + headers.map(() => '---').join(' | ') + ' |'\n ];\n\n results.forEach((r, i) => {\n const cells = [\n String(i + 1),\n ...inKeys.map(k => truncate(r.item[k] ?? '', 20)),\n ...outKeys.map(k => formatValue(r.output[k])),\n r.success ? 'OK' : 'FAIL'\n ];\n lines.push('| ' + cells.join(' | ') + ' |');\n });\n\n return lines.join('\\n');\n}\n\nfunction formatAsJSON(results: PromptMapResult[]): string {\n return JSON.stringify(results.map(r => ({\n input: r.item,\n output: r.output,\n success: r.success,\n ...(r.error && { error: r.error })\n })), null, 2);\n}\n\nfunction escapeCSV(value: string): string {\n return (value.includes(',') || value.includes('\"') || value.includes('\\n'))\n ? `\"${value.replace(/\"/g, '\"\"')}\"`\n : value;\n}\n\nfunction formatAsCSV(results: PromptMapResult[]): string {\n if (results.length === 0) return '';\n\n const inKeys = [...new Set(results.flatMap(r => Object.keys(r.item)))];\n const outKeys = [...new Set(results.flatMap(r => Object.keys(r.output)))];\n const headers = [...inKeys, ...outKeys.map(k => `out_${k}`), 'success'];\n\n const lines = [headers.join(',')];\n for (const r of results) {\n const values = [\n ...inKeys.map(k => escapeCSV(r.item[k] ?? '')),\n ...outKeys.map(k => escapeCSV(formatValue(r.output[k]))),\n r.success ? 'true' : 'false'\n ];\n lines.push(values.join(','));\n }\n return lines.join('\\n');\n}\n\n/**\n * Format results as pure text - concatenates rawText or stringified output\n * Used for text mode where AI responses are not structured JSON\n */\nfunction formatAsText(results: PromptMapResult[]): string {\n const successfulResults = results.filter(r => r.success);\n if (successfulResults.length === 0) {\n return 'No successful results.';\n }\n\n // For single result, return just the text without separators\n if (successfulResults.length === 1) {\n const r = successfulResults[0];\n return r.rawText || JSON.stringify(r.output, null, 2);\n }\n\n // For multiple results, add separators\n return successfulResults\n .map((r, i) => {\n const text = r.rawText || JSON.stringify(r.output, null, 2);\n return `--- Item ${i + 1} ---\\n${text}`;\n })\n .join('\\n\\n');\n}\n\n// ============================================================================\n// Reducer\n// ============================================================================\n\nclass PromptMapReducer extends BaseReducer<PromptMapResult, PromptMapOutput> {\n constructor(\n private outputFormat: OutputFormat = 'list',\n private outputFields: string[] = [],\n private aiInvoker?: AIInvoker,\n private aiReducePrompt?: string,\n private aiReduceOutput?: string[],\n private aiReduceModel?: string,\n private aiReduceParameters?: Record<string, string>\n ) {\n super();\n }\n\n async reduce(\n results: MapResult<PromptMapResult>[],\n context: ReduceContext\n ): Promise<ReduceResult<PromptMapOutput>> {\n const startTime = Date.now();\n\n const itemResults = results.filter(r => r.output).map(r => r.output!);\n const successfulItems = itemResults.filter(r => r.success).length;\n const failedItems = itemResults.filter(r => !r.success).length;\n\n const summary: PromptMapSummary = {\n totalItems: itemResults.length,\n successfulItems,\n failedItems,\n outputFields: this.outputFields\n };\n\n // Handle AI reduce\n if (this.outputFormat === 'ai') {\n return await this.performAIReduce(itemResults, summary, results.length, startTime, context);\n }\n\n // Handle deterministic reduce\n let formattedOutput: string;\n switch (this.outputFormat) {\n case 'table': formattedOutput = formatAsTable(itemResults); break;\n case 'json': formattedOutput = formatAsJSON(itemResults); break;\n case 'csv': formattedOutput = formatAsCSV(itemResults); break;\n case 'text': formattedOutput = formatAsText(itemResults); break;\n default: formattedOutput = formatAsList(itemResults, summary);\n }\n\n return {\n output: { results: itemResults, formattedOutput, summary },\n stats: {\n inputCount: results.length,\n outputCount: itemResults.length,\n mergedCount: 0,\n reduceTimeMs: Date.now() - startTime,\n usedAIReduce: false\n }\n };\n }\n\n private async performAIReduce(\n itemResults: PromptMapResult[],\n summary: PromptMapSummary,\n inputCount: number,\n startTime: number,\n context: ReduceContext\n ): Promise<ReduceResult<PromptMapOutput>> {\n if (!this.aiInvoker || !this.aiReducePrompt) {\n throw new Error('AI reduce requires aiInvoker and aiReducePrompt');\n }\n\n const isTextMode = !this.aiReduceOutput || this.aiReduceOutput.length === 0;\n\n // Register reduce process for tracking\n // Note: parentGroupId may be undefined for single-item pipelines, but we still track the process\n let reduceProcessId: string | undefined;\n if (context.processTracker) {\n reduceProcessId = context.processTracker.registerProcess(\n 'AI Reduce: Synthesizing results',\n context.parentGroupId\n );\n }\n\n // Build prompt with template substitution\n const successfulResults = itemResults.filter(r => r.success);\n\n // For text mode map results, use rawText; otherwise use structured output\n const resultsForPrompt = successfulResults.map(r => {\n if (r.rawText !== undefined) {\n return r.rawText;\n }\n return r.output;\n });\n const resultsString = JSON.stringify(resultsForPrompt, null, 2);\n\n // Check if prompt uses {{RESULTS_FILE}} - if so, write to temp file\n // This avoids shell escaping issues on Windows where newlines in JSON\n // get converted to literal \\n, breaking JSON structure\n let tempFileResult: TempFileResult | undefined;\n let prompt = this.aiReducePrompt;\n\n if (prompt.includes('{{RESULTS_FILE}}')) {\n tempFileResult = writeTempFile(resultsString, 'ai-reduce-results', '.json');\n if (tempFileResult) {\n prompt = prompt.replace(/\\{\\{RESULTS_FILE\\}\\}/g, tempFileResult.filePath);\n } else {\n // Fallback to inline if temp file creation fails\n getLogger().warn(LogCategory.MAP_REDUCE, 'Failed to create temp file for RESULTS_FILE, falling back to inline RESULTS');\n prompt = prompt.replace(/\\{\\{RESULTS_FILE\\}\\}/g, resultsString);\n }\n }\n\n // Replace {{RESULTS}} with inline JSON (original behavior)\n prompt = prompt\n .replace(/\\{\\{RESULTS\\}\\}/g, resultsString)\n .replace(/\\{\\{COUNT\\}\\}/g, String(summary.totalItems))\n .replace(/\\{\\{SUCCESS_COUNT\\}\\}/g, String(summary.successfulItems))\n .replace(/\\{\\{FAILURE_COUNT\\}\\}/g, String(summary.failedItems));\n\n // Substitute input parameters\n if (this.aiReduceParameters) {\n for (const [key, value] of Object.entries(this.aiReduceParameters)) {\n prompt = prompt.replace(new RegExp(`\\\\{\\\\{${key}\\\\}\\\\}`, 'g'), value);\n }\n }\n\n // In text mode, don't append JSON format instruction\n const fullPrompt = isTextMode ? prompt : buildFullPrompt(prompt, this.aiReduceOutput!);\n\n // Call AI and ensure temp file cleanup\n let aiResult;\n try {\n aiResult = await this.aiInvoker(fullPrompt, { model: this.aiReduceModel });\n } finally {\n // Always cleanup temp file after AI call completes\n if (tempFileResult) {\n tempFileResult.cleanup();\n }\n }\n\n if (!aiResult.success || !aiResult.response) {\n // Update process as failed\n if (context.processTracker && reduceProcessId) {\n context.processTracker.updateProcess(\n reduceProcessId,\n 'failed',\n undefined,\n aiResult.error || 'Unknown error'\n );\n }\n throw new Error(`AI reduce failed: ${aiResult.error || 'Unknown error'}`);\n }\n\n // Text mode - return raw AI response without JSON parsing\n if (isTextMode) {\n // Update process as completed\n if (context.processTracker && reduceProcessId) {\n context.processTracker.updateProcess(\n reduceProcessId,\n 'completed',\n aiResult.response,\n undefined,\n JSON.stringify({ mode: 'text', outputLength: aiResult.response.length })\n );\n }\n return {\n output: {\n results: itemResults,\n formattedOutput: aiResult.response,\n summary: {\n ...summary,\n outputFields: []\n }\n },\n stats: {\n inputCount,\n outputCount: 1,\n mergedCount: summary.successfulItems,\n reduceTimeMs: Date.now() - startTime,\n usedAIReduce: true\n }\n };\n }\n\n // Structured mode - parse AI response as JSON\n let aiOutput: Record<string, unknown>;\n try {\n aiOutput = parseAIResponse(aiResult.response, this.aiReduceOutput!);\n } catch (parseError) {\n // Update process as failed\n if (context.processTracker && reduceProcessId) {\n context.processTracker.updateProcess(\n reduceProcessId,\n 'failed',\n undefined,\n parseError instanceof Error ? parseError.message : String(parseError)\n );\n }\n throw new Error(`Failed to parse AI reduce response: ${parseError instanceof Error ? parseError.message : String(parseError)}`);\n }\n\n // Format output as JSON string\n const formattedOutput = JSON.stringify(aiOutput, null, 2);\n\n // Update process as completed\n if (context.processTracker && reduceProcessId) {\n context.processTracker.updateProcess(\n reduceProcessId,\n 'completed',\n formattedOutput,\n undefined,\n JSON.stringify(aiOutput)\n );\n }\n\n return {\n output: {\n results: itemResults,\n formattedOutput,\n summary: {\n ...summary,\n outputFields: this.aiReduceOutput!\n }\n },\n stats: {\n inputCount,\n outputCount: 1, // AI reduce produces single synthesized output\n mergedCount: summary.successfulItems,\n reduceTimeMs: Date.now() - startTime,\n usedAIReduce: true\n }\n };\n }\n}\n\n// ============================================================================\n// Factory functions\n// ============================================================================\n\n/**\n * Create a prompt map job\n */\nexport function createPromptMapJob(\n options: PromptMapJobOptions\n): MapReduceJob<PromptMapInput, PromptWorkItemData, PromptMapResult, PromptMapOutput> {\n return {\n id: 'prompt-map',\n name: 'Prompt Map',\n splitter: new PromptMapSplitter(),\n mapper: new PromptMapMapper(options.aiInvoker, options.model),\n reducer: new PromptMapReducer(\n options.outputFormat || 'list',\n [],\n options.aiInvoker,\n options.aiReducePrompt,\n options.aiReduceOutput,\n options.aiReduceModel,\n options.aiReduceParameters\n ),\n options: {\n maxConcurrency: options.maxConcurrency || 5,\n reduceMode: 'deterministic',\n showProgress: true,\n retryOnFailure: false\n }\n };\n}\n\n/**\n * Helper to create job input\n */\nexport function createPromptMapInput(\n items: PromptItem[],\n promptTemplate: string,\n outputFields: string[]\n): PromptMapInput {\n return { items, promptTemplate, outputFields };\n}\n", "/**\n * Jobs Module\n *\n * Exports all job implementations and utilities.\n */\n\n// Code review job\nexport {\n createCodeReviewJob\n} from './code-review-job';\nexport type {\n ReviewSeverity,\n ReviewFinding,\n RuleReviewResult,\n ReviewSummary,\n CodeReviewOutput,\n CodeReviewInput,\n CodeReviewJobOptions\n} from './code-review-job';\n\n// Template job\nexport {\n createTemplateJob,\n createSimpleTemplateJob,\n createJsonTemplateJob,\n createListProcessingJob\n} from './template-job';\nexport type {\n TemplateItem,\n TemplateJobInput,\n TemplateWorkItemData,\n TemplateItemResult,\n TemplateJobOptions\n} from './template-job';\n\n// Prompt map job (generic item + prompt template processing)\nexport {\n createPromptMapJob,\n createPromptMapInput\n} from './prompt-map-job';\nexport type {\n PromptItem,\n PromptMapInput,\n PromptWorkItemData,\n PromptMapResult,\n PromptMapOutput,\n PromptMapSummary,\n PromptMapJobOptions,\n OutputFormat\n} from './prompt-map-job';\n", "/**\n * Map-Reduce AI Framework\n *\n * A reusable framework for AI map-reduce workflows.\n * Provides pluggable splitters, mappers, reducers, and prompt templates\n * with consistent UI/process tracking.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\n// Core types\nexport type {\n WorkItem,\n MapContext,\n MapResult,\n ReduceContext,\n ReduceResult,\n ReduceStats,\n ReduceMode,\n MapReduceOptions,\n Splitter,\n Mapper,\n Reducer,\n MapReduceJob,\n ProgressCallback,\n JobProgress,\n MapReduceResult,\n ExecutionStats,\n PromptTemplate,\n PromptRenderOptions,\n AIInvoker,\n AIInvokerOptions,\n AIInvokerResult,\n ProcessTracker,\n ExecutorOptions,\n SessionMetadata,\n ItemCompleteCallback\n} from './types';\nexport { DEFAULT_MAP_REDUCE_OPTIONS } from './types';\n\n// Executor\nexport { MapReduceExecutor, createExecutor } from './executor';\n\n// Concurrency limiter\nexport { ConcurrencyLimiter, CancellationError, DEFAULT_MAX_CONCURRENCY } from './concurrency-limiter';\n\n// Prompt template\nexport {\n renderTemplate,\n createTemplate,\n extractVariables,\n validateTemplate,\n composeTemplates,\n TemplateHelpers,\n ResponseParsers,\n MissingVariableError,\n TemplateRenderError\n} from './prompt-template';\n\n// Reducers\nexport {\n // Base reducers\n BaseReducer,\n IdentityReducer,\n FlattenReducer,\n AggregatingReducer,\n // Deterministic reducer\n DeterministicReducer,\n createDeterministicReducer,\n StringDeduplicationReducer,\n NumericAggregationReducer,\n // AI reducer\n AIReducer,\n createAIReducer,\n createTextSynthesisReducer,\n // Hybrid reducer\n HybridReducer,\n createHybridReducer,\n createSimpleHybridReducer\n} from './reducers';\nexport type {\n Deduplicatable,\n DeterministicReducerOptions,\n DeterministicReduceOutput,\n AIReducerOptions,\n TextSynthesisOutput,\n TextSynthesisOptions,\n HybridReducerOptions,\n SimplePolishedOutput\n} from './reducers';\n\n// Splitters\nexport {\n // File splitter\n FileSplitter,\n createFileSplitter,\n createExtensionFilteredSplitter,\n BatchedFileSplitter,\n createBatchedFileSplitter,\n // Chunk splitter\n ChunkSplitter,\n createChunkSplitter,\n createLineChunkSplitter,\n createParagraphChunkSplitter,\n // Rule splitter\n RuleSplitter,\n createRuleSplitter,\n createAlphabeticRuleSplitter,\n createPriorityRuleSplitter,\n createPatternFilteredRuleSplitter,\n BatchedRuleSplitter,\n createBatchedRuleSplitter\n} from './splitters';\nexport type {\n FileItem,\n FileInput,\n FileWorkItemData,\n FileSplitterOptions,\n BatchedFileWorkItemData,\n ChunkInput,\n ChunkWorkItemData,\n ChunkSplitterOptions,\n Rule,\n RuleInput,\n RuleWorkItemData,\n RuleSplitterOptions,\n BatchedRuleWorkItemData\n} from './splitters';\n\n// Jobs\nexport {\n createCodeReviewJob,\n createTemplateJob,\n createSimpleTemplateJob,\n createJsonTemplateJob,\n createListProcessingJob,\n createPromptMapJob,\n createPromptMapInput\n} from './jobs';\nexport type {\n ReviewSeverity,\n ReviewFinding,\n RuleReviewResult,\n ReviewSummary,\n CodeReviewOutput,\n CodeReviewInput,\n CodeReviewJobOptions,\n TemplateItem,\n TemplateJobInput,\n TemplateWorkItemData,\n TemplateItemResult,\n TemplateJobOptions,\n PromptItem,\n PromptMapInput,\n PromptWorkItemData,\n PromptMapResult,\n PromptMapOutput,\n PromptMapSummary,\n PromptMapJobOptions,\n OutputFormat\n} from './jobs';\n\n// Temp file utilities\nexport {\n writeTempFile,\n readTempFile,\n cleanupTempFile,\n cleanupAllTempFiles,\n ensureTempDir,\n generateTempFileName,\n isTempFilePath,\n getTempDirPath\n} from './temp-file-utils';\nexport type { TempFileResult } from './temp-file-utils';\n", "/**\n * YAML Pipeline Framework Types\n *\n * Configuration types for YAML-based pipeline definitions.\n * Execution types are re-exported from the map-reduce framework.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { OutputFormat as MROutputFormat, PromptItem as MRPromptItem } from '../map-reduce/jobs/prompt-map-job';\n\n// Re-export execution types from map-reduce framework\nexport type {\n AIInvoker,\n AIInvokerOptions,\n AIInvokerResult,\n ProcessTracker,\n SessionMetadata,\n ExecutorOptions,\n JobProgress,\n MapReduceResult\n} from '../map-reduce';\n\nexport type {\n PromptItem,\n PromptMapResult,\n PromptMapInput,\n PromptMapOutput,\n PromptMapSummary,\n PromptMapJobOptions,\n OutputFormat\n} from '../map-reduce';\n\n/**\n * Pipeline configuration as defined in YAML file\n */\nexport interface PipelineConfig {\n /** Name of the pipeline */\n name: string;\n /**\n * Optional working directory for AI SDK sessions.\n * Controls the file access context for AI calls (not CSV/prompt resolution).\n * \n * - Absolute paths are used as-is\n * - Relative paths are resolved relative to the pipeline package directory\n * - If omitted, callers use their own default (VS Code uses workspaceRoot, CLI uses --workspace-root or pipeline dir)\n */\n workingDirectory?: string;\n /** Input configuration */\n input: InputConfig;\n /** Optional filter phase configuration */\n filter?: FilterConfig;\n /** Map phase configuration */\n map: MapConfig;\n /** Reduce phase configuration */\n reduce: ReduceConfig;\n}\n\n/**\n * CSV source configuration for loading items from a file\n */\nexport interface CSVSource {\n /** Source type - currently only 'csv' */\n type: 'csv';\n /** Path to CSV file (relative to pipeline directory or absolute) */\n path: string;\n /** CSV delimiter (default: \",\") */\n delimiter?: string;\n}\n\n/**\n * Type guard to check if a value is a CSVSource\n */\nexport function isCSVSource(value: unknown): value is CSVSource {\n return (\n typeof value === 'object' &&\n value !== null &&\n 'type' in value &&\n (value as CSVSource).type === 'csv' &&\n 'path' in value &&\n typeof (value as CSVSource).path === 'string'\n );\n}\n\n/**\n * Parameter definition for pipeline input\n */\nexport interface PipelineParameter {\n /** Parameter name (used as {{name}} in templates) */\n name: string;\n /** Parameter value */\n value: string;\n}\n\n/**\n * Configuration for AI-generated inputs\n * \n * Allows users to generate pipeline input items using AI based on a natural language prompt.\n * The AI will return items matching the specified schema.\n */\nexport interface GenerateInputConfig {\n /** \n * Natural language prompt describing items to generate.\n * Include count in the prompt (e.g., \"Generate 10 test cases for...\")\n */\n prompt: string;\n /** \n * Field names for each generated item.\n * These will be the keys in each generated object.\n */\n schema: string[];\n /**\n * Optional model to use for generation.\n * If not specified, uses the default model.\n */\n model?: string;\n}\n\n/**\n * Type guard to check if a value is a GenerateInputConfig\n */\nexport function isGenerateConfig(value: unknown): value is GenerateInputConfig {\n return (\n typeof value === 'object' &&\n value !== null &&\n 'prompt' in value &&\n typeof (value as GenerateInputConfig).prompt === 'string' &&\n 'schema' in value &&\n Array.isArray((value as GenerateInputConfig).schema)\n );\n}\n\n/**\n * Input configuration - supports inline items, CSV file, inline list for fanout, or AI-generated items\n * \n * Input is always a list of items. You can either:\n * - Provide the list inline in YAML via `items`\n * - Load from CSV file via `from` (CSVSource)\n * - Provide a simple list via `from` (array) - useful for multi-model fanout\n * - Generate items using AI via `generate` (GenerateInputConfig)\n * \n * Must have exactly one of `items`, `from`, or `generate`.\n * \n * Optional `parameters` can define static values available to all items\n * in the map phase template (e.g., {{paramName}}).\n * \n * Multi-model fanout example:\n * ```yaml\n * input:\n * from:\n * - model: gpt-4\n * - model: claude-sonnet\n * parameters:\n * - name: code\n * value: \"function add(a, b) { return a + b; }\"\n * ```\n * \n * AI-generated input example:\n * ```yaml\n * input:\n * generate:\n * prompt: \"Generate 10 test cases for user login validation\"\n * schema:\n * - testName\n * - input\n * - expected\n * ```\n */\nexport interface InputConfig {\n /** Direct list of items (inline) */\n items?: MRPromptItem[];\n\n /** \n * Load items from source:\n * - CSVSource: Load from CSV file\n * - PromptItem[]: Inline list (useful for multi-model fanout with parameters)\n */\n from?: CSVSource | MRPromptItem[];\n\n /** \n * Generate items using AI based on a prompt and schema.\n * The user will be able to review and edit generated items before execution.\n */\n generate?: GenerateInputConfig;\n\n /** Limit number of items to process (default: all) */\n limit?: number;\n\n /** Static parameters available to all items in the map template */\n parameters?: PipelineParameter[];\n}\n\n/**\n * Map phase configuration\n */\nexport interface MapConfig {\n /** \n * Prompt template with {{column}} placeholders.\n * Either `prompt` or `promptFile` must be specified (mutually exclusive).\n * \n * When batchSize > 1, use {{ITEMS}} to access the batch as a JSON array.\n */\n prompt?: string;\n /** \n * Path to a prompt file. Supports:\n * - Bare filename: \"analyze.prompt.md\" (searches pipeline dir, prompts/ subfolder, shared prompts)\n * - Relative path: \"prompts/map.prompt.md\" (relative to pipeline directory)\n * - Parent path: \"../shared/prompts/common.prompt.md\" (relative to pipeline directory)\n * - Absolute path: \"/absolute/path/prompt.md\"\n * \n * Either `prompt` or `promptFile` must be specified (mutually exclusive).\n */\n promptFile?: string;\n /**\n * Optional skill to attach as additional context/guidance.\n * Skills are located at `.github/skills/{name}/SKILL.md`.\n * \n * When specified, the skill's prompt content is prepended to the main prompt\n * as recommended guidance for the AI to follow.\n * \n * Example: `skill: \"go-deep\"` \u2192 loads `.github/skills/go-deep/SKILL.md`\n * \n * Can be combined with either `prompt` or `promptFile`.\n */\n skill?: string;\n /** Output field names expected from AI. If omitted, text mode is used (raw AI response) */\n output?: string[];\n /** Maximum concurrent AI calls (default: 5) */\n parallel?: number;\n /** \n * Model to use for AI calls. Supports {{variable}} template syntax for per-item models.\n * \n * Static model example: `model: \"gpt-4\"`\n * Dynamic model example: `model: \"{{model}}\"` (reads from item's model field)\n */\n model?: string;\n /** \n * Timeout for each AI call in milliseconds (default: 1800000 = 30 minutes).\n * On timeout, the system automatically retries once with doubled timeout value.\n */\n timeoutMs?: number;\n /**\n * Number of items to process per AI call (default: 1).\n * \n * When batchSize > 1:\n * - Items are grouped into batches of the specified size\n * - Use {{ITEMS}} in the prompt to access the batch as a JSON array\n * - AI must return a JSON array with one result per input item\n * - If AI returns wrong count, the batch is marked as failed\n * \n * Example with 95 items and batchSize: 10:\n * - 10 AI calls instead of 95 (9 batches of 10, 1 batch of 5)\n * - Progress shows \"Processing batch 3/10...\"\n * \n * Backward compatible: default is 1 (current behavior).\n */\n batchSize?: number;\n}\n\n/**\n * Reduce phase configuration\n */\nexport interface ReduceConfig {\n /** Reduce type / output format (includes 'text' for pure text concatenation) */\n type: MROutputFormat;\n /** \n * AI prompt template (required if type is 'ai', unless promptFile is specified).\n * Either `prompt` or `promptFile` must be specified for AI reduce (mutually exclusive).\n */\n prompt?: string;\n /** \n * Path to a prompt file for AI reduce. Supports:\n * - Bare filename: \"summarize.prompt.md\" (searches pipeline dir, prompts/ subfolder, shared prompts)\n * - Relative path: \"prompts/reduce.prompt.md\" (relative to pipeline directory)\n * - Parent path: \"../shared/prompts/common.prompt.md\" (relative to pipeline directory)\n * - Absolute path: \"/absolute/path/prompt.md\"\n * \n * Either `prompt` or `promptFile` must be specified for AI reduce (mutually exclusive).\n */\n promptFile?: string;\n /**\n * Optional skill to attach as additional context/guidance for AI reduce.\n * Skills are located at `.github/skills/{name}/SKILL.md`.\n * \n * When specified, the skill's prompt content is prepended to the reduce prompt\n * as recommended guidance for the AI to follow.\n * \n * Example: `skill: \"summarizer\"` \u2192 loads `.github/skills/summarizer/SKILL.md`\n * \n * Can be combined with either `prompt` or `promptFile`.\n */\n skill?: string;\n /** AI output fields. If omitted with type 'ai', returns raw AI text response */\n output?: string[];\n /** Model to use for AI reduce (optional) */\n model?: string;\n}\n\n/**\n * CSV parsing options\n */\nexport interface CSVParseOptions {\n /** Delimiter character (default: \",\") */\n delimiter?: string;\n /** Whether first row is headers (default: true) */\n hasHeaders?: boolean;\n /** Encoding (default: \"utf-8\") */\n encoding?: BufferEncoding;\n}\n\n/**\n * CSV parsing result\n */\nexport interface CSVParseResult {\n /** Parsed items */\n items: MRPromptItem[];\n /** Column headers */\n headers: string[];\n /** Number of rows (excluding header) */\n rowCount: number;\n}\n\n/**\n * Filter operators for rule-based filtering\n */\nexport type FilterOperator = \n | 'equals' | 'not_equals'\n | 'in' | 'not_in'\n | 'contains' | 'not_contains'\n | 'greater_than' | 'less_than' | 'gte' | 'lte'\n | 'matches';\n\n/**\n * Single filter rule for rule-based filtering\n */\nexport interface FilterRule {\n /** Field name to evaluate */\n field: string;\n /** Comparison operator */\n operator: FilterOperator;\n /** Single value for comparison (for equals, greater_than, etc.) */\n value?: any;\n /** Multiple values for comparison (for in, not_in) */\n values?: any[];\n /** Regex pattern (for matches operator) */\n pattern?: string;\n}\n\n/**\n * Rule-based filter configuration\n */\nexport interface RuleFilterConfig {\n /** List of filter rules */\n rules: FilterRule[];\n /** How to combine multiple rules (default: \"all\") */\n mode?: 'all' | 'any';\n}\n\n/**\n * AI-based filter configuration\n */\nexport interface AIFilterConfig {\n /** Prompt template with {{field}} placeholders */\n prompt: string;\n /** Output fields - must include 'include' boolean */\n output?: string[];\n /** Maximum concurrent AI calls (default: 5) */\n parallel?: number;\n /** Optional model to use */\n model?: string;\n /** Timeout per AI call in milliseconds (default: 30000 = 30s) */\n timeoutMs?: number;\n}\n\n/**\n * Filter configuration - optional phase between input and map\n */\nexport interface FilterConfig {\n /** Filter type */\n type: 'rule' | 'ai' | 'hybrid';\n /** Rule-based filter configuration (required for rule/hybrid) */\n rule?: RuleFilterConfig;\n /** AI-based filter configuration (required for ai/hybrid) */\n ai?: AIFilterConfig;\n /** For hybrid: how to combine rule and AI (default: \"and\") */\n combineMode?: 'and' | 'or';\n}\n\n/**\n * Filter statistics\n */\nexport interface FilterStats {\n /** Total input items */\n totalItems: number;\n /** Number of items included */\n includedCount: number;\n /** Number of items excluded */\n excludedCount: number;\n /** Execution time in milliseconds */\n executionTimeMs: number;\n /** Filter type used */\n filterType: 'rule' | 'ai' | 'hybrid';\n}\n\n/**\n * Result from filter execution\n */\nexport interface FilterResult {\n /** Items that passed the filter */\n included: MRPromptItem[];\n /** Items that were filtered out */\n excluded: MRPromptItem[];\n /** Filter statistics */\n stats: FilterStats;\n}\n", "/**\n * CSV Reader\n *\n * Parses CSV files into pipeline items. Handles various CSV formats\n * and edge cases like quoted values, escaped characters, etc.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { CSVParseOptions, CSVParseResult, PromptItem } from './types';\nimport {\n DEFAULT_CSV_DELIMITER,\n DEFAULT_CSV_HAS_HEADER\n} from '../config/defaults';\nimport { PipelineCoreError, ErrorCode } from '../errors';\n\n/**\n * Default CSV parsing options\n */\nexport const DEFAULT_CSV_OPTIONS: Required<CSVParseOptions> = {\n delimiter: DEFAULT_CSV_DELIMITER,\n hasHeaders: DEFAULT_CSV_HAS_HEADER,\n encoding: 'utf-8'\n};\n\n/**\n * Error thrown for CSV parsing issues\n */\nexport class CSVParseError extends PipelineCoreError {\n /** Line number where the error occurred */\n readonly lineNumber?: number;\n /** Column index where the error occurred */\n readonly columnIndex?: number;\n\n constructor(\n message: string,\n lineNumber?: number,\n columnIndex?: number\n ) {\n super(message, {\n code: ErrorCode.CSV_PARSE_ERROR,\n meta: {\n ...(lineNumber !== undefined && { lineNumber }),\n ...(columnIndex !== undefined && { columnIndex }),\n },\n });\n this.name = 'CSVParseError';\n this.lineNumber = lineNumber;\n this.columnIndex = columnIndex;\n }\n}\n\n/**\n * Parse a CSV string into an array of pipeline items\n * @param content CSV content as string\n * @param options Parsing options\n * @returns Parsed CSV result with items and headers\n */\nexport function parseCSVContent(content: string, options?: CSVParseOptions): CSVParseResult {\n // Filter out undefined values from options before merging\n const filteredOptions = options ? Object.fromEntries(\n Object.entries(options).filter(([, v]) => v !== undefined)\n ) as CSVParseOptions : undefined;\n\n const opts = { ...DEFAULT_CSV_OPTIONS, ...filteredOptions };\n\n // Normalize line endings\n const normalizedContent = content.replace(/\\r\\n/g, '\\n').replace(/\\r/g, '\\n');\n\n // Parse into rows\n const rows = parseCSVRows(normalizedContent, opts.delimiter);\n\n if (rows.length === 0) {\n return {\n items: [],\n headers: [],\n rowCount: 0\n };\n }\n\n let headers: string[];\n let dataRows: string[][];\n\n if (opts.hasHeaders) {\n headers = rows[0].map(h => h.trim());\n dataRows = rows.slice(1);\n } else {\n // Generate default headers (col0, col1, etc.)\n const numCols = rows[0].length;\n headers = Array.from({ length: numCols }, (_, i) => `col${i}`);\n dataRows = rows;\n }\n\n // Validate headers are unique\n const headerSet = new Set<string>();\n for (const header of headers) {\n if (headerSet.has(header)) {\n throw new CSVParseError(`Duplicate header: \"${header}\"`);\n }\n headerSet.add(header);\n }\n\n // Convert rows to items\n const items: PromptItem[] = dataRows.map((row, rowIndex) => {\n const item: PromptItem = {};\n for (let i = 0; i < headers.length; i++) {\n item[headers[i]] = row[i] !== undefined ? row[i] : '';\n }\n return item;\n });\n\n return {\n items,\n headers,\n rowCount: items.length\n };\n}\n\n/**\n * Parse CSV content into rows (array of arrays)\n * Handles quoted values, escaped quotes, and multi-line values\n */\nfunction parseCSVRows(content: string, delimiter: string): string[][] {\n const rows: string[][] = [];\n let currentRow: string[] = [];\n let currentCell = '';\n let inQuotes = false;\n let i = 0;\n\n while (i < content.length) {\n const char = content[i];\n const nextChar = content[i + 1];\n\n if (inQuotes) {\n if (char === '\"') {\n if (nextChar === '\"') {\n // Escaped quote \"\"\n currentCell += '\"';\n i += 2;\n } else {\n // End of quoted field\n inQuotes = false;\n i++;\n }\n } else {\n // Character inside quotes (including newlines)\n currentCell += char;\n i++;\n }\n } else {\n if (char === '\"') {\n // Start of quoted field\n inQuotes = true;\n i++;\n } else if (char === delimiter) {\n // End of cell\n currentRow.push(currentCell);\n currentCell = '';\n i++;\n } else if (char === '\\n') {\n // End of row\n currentRow.push(currentCell);\n if (currentRow.length > 0 || currentRow.some(c => c.length > 0)) {\n rows.push(currentRow);\n }\n currentRow = [];\n currentCell = '';\n i++;\n } else {\n currentCell += char;\n i++;\n }\n }\n }\n\n // Don't forget the last cell/row\n if (currentCell.length > 0 || currentRow.length > 0) {\n currentRow.push(currentCell);\n if (currentRow.length > 0) {\n rows.push(currentRow);\n }\n }\n\n return rows;\n}\n\n/**\n * Read and parse a CSV file\n * @param filePath Path to CSV file\n * @param options Parsing options\n * @returns Parsed CSV result\n */\nexport async function readCSVFile(\n filePath: string,\n options?: CSVParseOptions\n): Promise<CSVParseResult> {\n // Filter out undefined values from options before merging\n const filteredOptions = options ? Object.fromEntries(\n Object.entries(options).filter(([, v]) => v !== undefined)\n ) as CSVParseOptions : undefined;\n\n const opts = { ...DEFAULT_CSV_OPTIONS, ...filteredOptions };\n\n try {\n const content = await fs.promises.readFile(filePath, { encoding: opts.encoding });\n return parseCSVContent(content, opts);\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n throw new CSVParseError(`CSV file not found: ${filePath}`);\n }\n throw error;\n }\n}\n\n/**\n * Read and parse a CSV file synchronously\n * @param filePath Path to CSV file\n * @param options Parsing options\n * @returns Parsed CSV result\n */\nexport function readCSVFileSync(\n filePath: string,\n options?: CSVParseOptions\n): CSVParseResult {\n // Filter out undefined values from options before merging\n const filteredOptions = options ? Object.fromEntries(\n Object.entries(options).filter(([, v]) => v !== undefined)\n ) as CSVParseOptions : undefined;\n\n const opts = { ...DEFAULT_CSV_OPTIONS, ...filteredOptions };\n\n try {\n const content = fs.readFileSync(filePath, { encoding: opts.encoding });\n return parseCSVContent(content, opts);\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n throw new CSVParseError(`CSV file not found: ${filePath}`);\n }\n throw error;\n }\n}\n\n/**\n * Resolve a CSV file path relative to a base directory.\n * For pipeline packages, this should be the package directory (where pipeline.yaml lives).\n * \n * @param csvPath Path from config (may be relative)\n * @param baseDirectory Base directory for resolution (typically the pipeline package directory)\n * @returns Absolute file path\n * \n * @example\n * // Given packageDir = '/workspace/.vscode/pipelines/run-tests'\n * resolveCSVPath('input.csv', packageDir) // => '/workspace/.vscode/pipelines/run-tests/input.csv'\n * resolveCSVPath('data/files.csv', packageDir) // => '/workspace/.vscode/pipelines/run-tests/data/files.csv'\n * resolveCSVPath('../shared/common.csv', packageDir) // => '/workspace/.vscode/pipelines/shared/common.csv'\n * resolveCSVPath('/absolute/path.csv', packageDir) // => '/absolute/path.csv'\n */\nexport function resolveCSVPath(csvPath: string, baseDirectory: string): string {\n if (path.isAbsolute(csvPath)) {\n return csvPath;\n }\n return path.resolve(baseDirectory, csvPath);\n}\n\n/**\n * Validate CSV headers against expected columns\n * @param headers Actual headers from CSV\n * @param expectedColumns Expected column names\n * @returns Object with validation result and missing columns\n */\nexport function validateCSVHeaders(\n headers: string[],\n expectedColumns: string[]\n): { valid: boolean; missingColumns: string[] } {\n const headerSet = new Set(headers);\n const missingColumns = expectedColumns.filter(col => !headerSet.has(col));\n\n return {\n valid: missingColumns.length === 0,\n missingColumns\n };\n}\n\n/**\n * Get a preview of CSV data (first N rows)\n * @param result CSV parse result\n * @param maxRows Maximum rows to preview (default: 5)\n * @returns Preview items\n */\nexport function getCSVPreview(\n result: CSVParseResult,\n maxRows: number = 5\n): PromptItem[] {\n return result.items.slice(0, maxRows);\n}\n", "/**\n * Template Engine\n *\n * Simple template substitution for pipeline prompts.\n * Replaces {{column}} placeholders with values from pipeline items.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { PromptItem } from './types';\nimport { \n extractJSON as sharedExtractJSON, \n parseAIResponse as sharedParseAIResponse \n} from '../utils/ai-response-parser';\nimport {\n TEMPLATE_VARIABLE_REGEX,\n SPECIAL_VARIABLES,\n TemplateVariableError,\n extractVariables as extractTemplateVariables,\n validateVariables\n} from '../utils/template-engine';\nimport { PipelineCoreError, ErrorCode } from '../errors';\n\n/**\n * Error thrown when a template variable is missing\n */\nexport class TemplateError extends PipelineCoreError {\n /** Name of the variable that caused the error */\n readonly variableName?: string;\n\n constructor(\n message: string,\n variableName?: string\n ) {\n super(message, {\n code: ErrorCode.TEMPLATE_ERROR,\n meta: variableName ? { variableName } : undefined,\n });\n this.name = 'TemplateError';\n this.variableName = variableName;\n }\n}\n\n/**\n * Options for template substitution\n */\nexport interface SubstituteTemplateOptions {\n /** If true, throws on missing variables; if false, leaves as empty string */\n strict?: boolean;\n /** All items in the input (for {{ITEMS}} template variable) */\n allItems?: PromptItem[];\n}\n\n/**\n * Substitute template variables with values from a pipeline item\n * \n * Supports special variable {{ITEMS}} which is replaced with JSON array of all items.\n * This allows prompts to reference the full context of all items being processed.\n * \n * @param template Template string with {{variable}} placeholders\n * @param item Pipeline item containing values\n * @param strictOrOptions If boolean, strict mode; if object, full options\n * @returns Substituted string\n */\nexport function substituteTemplate(\n template: string,\n item: PromptItem,\n strictOrOptions: boolean | SubstituteTemplateOptions = false\n): string {\n // Handle backward compatibility: boolean for strict mode\n const options: SubstituteTemplateOptions = typeof strictOrOptions === 'boolean'\n ? { strict: strictOrOptions }\n : strictOrOptions;\n \n const { strict = false, allItems } = options;\n \n // Create a fresh regex instance to avoid issues with global flag and lastIndex\n const regex = new RegExp(TEMPLATE_VARIABLE_REGEX.source, 'g');\n \n return template.replace(regex, (match, variableName) => {\n // Handle special {{ITEMS}} variable - returns JSON array of all items\n if (variableName === 'ITEMS' && allItems) {\n return JSON.stringify(allItems, null, 2);\n }\n \n // Handle special system variables that are not in item (don't error in strict mode)\n if (SPECIAL_VARIABLES.has(variableName)) {\n // In non-strict mode, return placeholder; in strict mode, also return placeholder\n // since these are system-provided at runtime\n return match;\n }\n \n if (variableName in item) {\n return item[variableName];\n }\n\n if (strict) {\n throw new TemplateError(\n `Missing variable \"${variableName}\" in template`,\n variableName\n );\n }\n\n // Non-strict: replace with empty string\n return '';\n });\n}\n\n/**\n * Extract all variable names from a template\n * @param template Template string\n * @param excludeSpecial If true, excludes special system variables (ITEMS, RESULTS, etc.)\n * @returns Array of unique variable names\n */\nexport function extractVariables(template: string, excludeSpecial: boolean = true): string[] {\n return extractTemplateVariables(template, excludeSpecial);\n}\n\n/**\n * Validate that a pipeline item has all required template variables\n * @param template Template string\n * @param item Pipeline item to validate\n * @returns Object with validation result and missing variables\n */\nexport function validateItemForTemplate(\n template: string,\n item: PromptItem\n): { valid: boolean; missingVariables: string[] } {\n return validateVariables(template, item);\n}\n\n/**\n * Build the full prompt for AI by appending output field instructions\n * @param userPrompt User's prompt template (already substituted)\n * @param outputFields Field names expected in AI response\n * @returns Full prompt with JSON output instruction\n */\nexport function buildFullPrompt(userPrompt: string, outputFields: string[]): string {\n if (outputFields.length === 0) {\n return userPrompt;\n }\n\n const fieldsStr = outputFields.join(', ');\n return `${userPrompt}\n\nReturn JSON with these fields: ${fieldsStr}`;\n}\n\n/**\n * Build a complete prompt from template, item, and output fields\n * Combines template substitution and output instruction appending\n * @param template Prompt template with {{variable}} placeholders\n * @param item Pipeline item with values\n * @param outputFields Expected output field names\n * @param strict Strict mode for variable validation\n * @returns Complete prompt ready for AI\n */\nexport function buildPromptFromTemplate(\n template: string,\n item: PromptItem,\n outputFields: string[],\n strict: boolean = false\n): string {\n const substituted = substituteTemplate(template, item, strict);\n return buildFullPrompt(substituted, outputFields);\n}\n\n/**\n * Parse JSON response from AI, extracting only the declared fields\n * Wrapper that adds TemplateError for backward compatibility\n * @param response AI response string\n * @param outputFields Expected field names\n * @returns Object with extracted fields (missing fields become null)\n */\nexport function parseAIResponse(\n response: string,\n outputFields: string[]\n): Record<string, unknown> {\n try {\n return sharedParseAIResponse(response, outputFields);\n } catch (error) {\n throw new TemplateError(error instanceof Error ? error.message : String(error));\n }\n}\n\n/**\n * Extract JSON from a response string\n * Re-exported from shared utilities\n * @param response Response string\n * @returns Extracted JSON string or null\n */\nexport function extractJSON(response: string): string | null {\n return sharedExtractJSON(response);\n}\n\n/**\n * Escape special characters in a value for safe template use\n * @param value Value to escape\n * @returns Escaped value\n */\nexport function escapeTemplateValue(value: string): string {\n return value\n .replace(/\\\\/g, '\\\\\\\\')\n .replace(/\\{/g, '\\\\{')\n .replace(/\\}/g, '\\\\}');\n}\n\n/**\n * Preview how a template will render with sample values\n * @param template Template string\n * @param item Sample item\n * @param maxLength Maximum output length\n * @param allItems Optional array of all items (for {{ITEMS}} variable preview)\n * @returns Preview string\n */\nexport function previewTemplate(\n template: string,\n item: PromptItem,\n maxLength: number = 200,\n allItems?: PromptItem[]\n): string {\n try {\n const result = substituteTemplate(template, item, { strict: false, allItems });\n if (result.length > maxLength) {\n return result.substring(0, maxLength) + '...';\n }\n return result;\n } catch {\n return `[Error rendering template]`;\n }\n}\n", "/**\n * Input Generator\n *\n * AI-powered input generation for pipeline items.\n * Constructs prompts from user configuration and parses AI responses into items.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { GenerateInputConfig, PromptItem, AIInvoker } from './types';\nimport { extractJSON } from '../utils/ai-response-parser';\nimport { PipelineCoreError, ErrorCode } from '../errors';\n\n/**\n * Error thrown when input generation fails\n */\nexport class InputGenerationError extends PipelineCoreError {\n constructor(\n message: string,\n cause?: Error\n ) {\n super(message, {\n code: ErrorCode.INPUT_GENERATION_FAILED,\n cause,\n });\n this.name = 'InputGenerationError';\n }\n}\n\n/**\n * Result of generating input items\n */\nexport interface GenerateInputResult {\n /** Whether generation was successful */\n success: boolean;\n /** Generated items (if successful) */\n items?: PromptItem[];\n /** Error message (if failed) */\n error?: string;\n /** Raw AI response for debugging */\n rawResponse?: string;\n}\n\n/**\n * A generated item with selection state for the review UI\n */\nexport interface GeneratedItem {\n /** The actual item data */\n data: PromptItem;\n /** Whether this item is selected for execution */\n selected: boolean;\n}\n\n/**\n * State for the preview webview when using generate\n */\nexport type GenerateState =\n | { status: 'initial' }\n | { status: 'generating' }\n | { status: 'review'; items: GeneratedItem[] }\n | { status: 'error'; message: string };\n\n/**\n * Build the AI prompt for generating input items\n * \n * @param config The generate configuration from the pipeline\n * @returns The constructed prompt to send to AI\n */\nexport function buildGeneratePrompt(config: GenerateInputConfig): string {\n const { prompt, schema } = config;\n \n // Build the field list\n const fieldsList = schema.join(', ');\n \n // Build example object\n const exampleObj: Record<string, string> = {};\n for (const field of schema) {\n exampleObj[field] = '...';\n }\n const exampleJson = JSON.stringify(exampleObj, null, 2);\n \n return `${prompt}\n\nReturn a JSON array where each object has these fields: ${fieldsList}\n\nExample format:\n[\n ${exampleJson},\n ...\n]\n\nIMPORTANT: Return ONLY the JSON array, no additional text or explanation.`;\n}\n\n/**\n * Parse the AI response into generated items\n * \n * @param response The raw AI response\n * @param schema The expected field names\n * @returns Parsed items array\n * @throws InputGenerationError if parsing fails\n */\nexport function parseGenerateResponse(\n response: string,\n schema: string[]\n): PromptItem[] {\n // Try to extract JSON from the response\n const jsonStr = extractJSON(response);\n \n if (!jsonStr) {\n throw new InputGenerationError(\n 'AI response does not contain valid JSON. Expected a JSON array.',\n undefined\n );\n }\n \n let parsed: unknown;\n try {\n parsed = JSON.parse(jsonStr);\n } catch (e) {\n throw new InputGenerationError(\n `Failed to parse JSON from AI response: ${e instanceof Error ? e.message : String(e)}`,\n e instanceof Error ? e : undefined\n );\n }\n \n // Validate it's an array\n if (!Array.isArray(parsed)) {\n throw new InputGenerationError(\n `AI response is not an array. Got: ${typeof parsed}`\n );\n }\n \n // Validate and normalize each item\n const items: PromptItem[] = [];\n for (let i = 0; i < parsed.length; i++) {\n const rawItem = parsed[i];\n \n if (typeof rawItem !== 'object' || rawItem === null) {\n throw new InputGenerationError(\n `Item at index ${i} is not an object. Got: ${typeof rawItem}`\n );\n }\n \n // Create normalized item with all schema fields\n const item: PromptItem = {};\n for (const field of schema) {\n if (field in rawItem) {\n // Convert value to string for consistency\n const value = (rawItem as Record<string, unknown>)[field];\n item[field] = value === null || value === undefined \n ? '' \n : String(value);\n } else {\n // Missing field - set to empty string\n item[field] = '';\n }\n }\n \n items.push(item);\n }\n \n return items;\n}\n\n/**\n * Generate input items using AI\n * \n * @param config The generate configuration\n * @param aiInvoker Function to invoke AI\n * @returns Generation result with items or error\n */\nexport async function generateInputItems(\n config: GenerateInputConfig,\n aiInvoker: AIInvoker\n): Promise<GenerateInputResult> {\n // Build the prompt\n const prompt = buildGeneratePrompt(config);\n \n // Invoke AI with optional model from config\n const aiResult = await aiInvoker(prompt, config.model ? { model: config.model } : undefined);\n \n if (!aiResult.success) {\n return {\n success: false,\n error: aiResult.error || 'AI invocation failed',\n rawResponse: aiResult.response\n };\n }\n \n if (!aiResult.response) {\n return {\n success: false,\n error: 'AI returned empty response'\n };\n }\n \n // Parse the response\n try {\n const items = parseGenerateResponse(aiResult.response, config.schema);\n return {\n success: true,\n items,\n rawResponse: aiResult.response\n };\n } catch (e) {\n return {\n success: false,\n error: e instanceof InputGenerationError ? e.message : String(e),\n rawResponse: aiResult.response\n };\n }\n}\n\n/**\n * Convert generated items to GeneratedItem array with selection state\n * All items are selected by default\n * \n * @param items The generated items\n * @returns Items wrapped with selection state\n */\nexport function toGeneratedItems(items: PromptItem[]): GeneratedItem[] {\n return items.map(data => ({\n data,\n selected: true\n }));\n}\n\n/**\n * Filter generated items to only those that are selected\n * \n * @param items The generated items with selection state\n * @returns Only the selected item data\n */\nexport function getSelectedItems(items: GeneratedItem[]): PromptItem[] {\n return items.filter(item => item.selected).map(item => item.data);\n}\n\n/**\n * Create an empty item matching the schema\n * \n * @param schema The field names\n * @returns Empty item with all fields set to empty string\n */\nexport function createEmptyItem(schema: string[]): PromptItem {\n const item: PromptItem = {};\n for (const field of schema) {\n item[field] = '';\n }\n return item;\n}\n\n/**\n * Validate that a generate config is well-formed\n * \n * @param config The config to validate\n * @returns Validation result with errors if invalid\n */\nexport function validateGenerateConfig(\n config: GenerateInputConfig\n): { valid: boolean; errors: string[] } {\n const errors: string[] = [];\n \n if (!config.prompt || typeof config.prompt !== 'string') {\n errors.push('Generate config requires a \"prompt\" string');\n } else if (config.prompt.trim().length === 0) {\n errors.push('Generate config \"prompt\" cannot be empty');\n }\n \n if (!config.schema || !Array.isArray(config.schema)) {\n errors.push('Generate config requires a \"schema\" array');\n } else if (config.schema.length === 0) {\n errors.push('Generate config \"schema\" must have at least one field');\n } else {\n // Validate each schema field\n for (let i = 0; i < config.schema.length; i++) {\n const field = config.schema[i];\n if (typeof field !== 'string') {\n errors.push(`Schema field at index ${i} must be a string`);\n } else if (field.trim().length === 0) {\n errors.push(`Schema field at index ${i} cannot be empty`);\n } else if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(field)) {\n errors.push(`Schema field \"${field}\" must be a valid identifier (letters, numbers, underscore, not starting with number)`);\n }\n }\n \n // Check for duplicates\n const seen = new Set<string>();\n for (const field of config.schema) {\n if (seen.has(field)) {\n errors.push(`Duplicate schema field: \"${field}\"`);\n }\n seen.add(field);\n }\n }\n \n return {\n valid: errors.length === 0,\n errors\n };\n}\n", "/**\n * Filter Executor\n *\n * Implements rule-based, AI-based, and hybrid filtering for pipeline items.\n * The filter phase reduces the number of items before the expensive map phase.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n AIInvoker,\n AIFilterConfig,\n FilterConfig,\n FilterResult,\n FilterRule,\n FilterStats,\n PromptItem,\n RuleFilterConfig,\n ProcessTracker\n} from './types';\nimport { substituteTemplate } from './template';\nimport { getLogger, LogCategory } from '../logger';\n\n/**\n * Options for filter execution\n */\nexport interface FilterExecuteOptions {\n /** AI invoker function (required for ai/hybrid filters) */\n aiInvoker?: AIInvoker;\n /** Optional process tracker for AI process manager integration */\n processTracker?: ProcessTracker;\n /** Progress callback */\n onProgress?: (progress: FilterProgress) => void;\n /** Optional cancellation check function */\n isCancelled?: () => boolean;\n}\n\n/**\n * Progress information for filter execution\n */\nexport interface FilterProgress {\n /** Current phase */\n phase: 'rule' | 'ai';\n /** Items processed so far */\n processed: number;\n /** Total items to process */\n total: number;\n /** Number of items included so far */\n included: number;\n /** Number of items excluded so far */\n excluded: number;\n}\n\n/**\n * Execute filter phase on input items\n * \n * @param items Input items to filter\n * @param filterConfig Filter configuration\n * @param options Execution options\n * @returns Filtered items with metadata\n */\nexport async function executeFilter(\n items: PromptItem[],\n filterConfig: FilterConfig,\n options: FilterExecuteOptions\n): Promise<FilterResult> {\n switch (filterConfig.type) {\n case 'rule':\n if (!filterConfig.rule) {\n throw new Error('Rule filter requires \"rule\" configuration');\n }\n return executeRuleFilter(items, filterConfig.rule, options);\n \n case 'ai':\n if (!filterConfig.ai) {\n throw new Error('AI filter requires \"ai\" configuration');\n }\n if (!options.aiInvoker) {\n throw new Error('AI filter requires aiInvoker in options');\n }\n return executeAIFilter(items, filterConfig.ai, options);\n \n case 'hybrid':\n if (!filterConfig.rule || !filterConfig.ai) {\n throw new Error('Hybrid filter requires both \"rule\" and \"ai\" configuration');\n }\n if (!options.aiInvoker) {\n throw new Error('Hybrid filter requires aiInvoker in options');\n }\n return executeHybridFilter(items, filterConfig, options);\n \n default:\n throw new Error(`Unknown filter type: ${(filterConfig as any).type}`);\n }\n}\n\n/**\n * Execute rule-based filter (synchronous, fast)\n */\nexport async function executeRuleFilter(\n items: PromptItem[],\n config: RuleFilterConfig,\n options: FilterExecuteOptions\n): Promise<FilterResult> {\n const startTime = Date.now();\n const included: PromptItem[] = [];\n const excluded: PromptItem[] = [];\n\n for (let i = 0; i < items.length; i++) {\n // Check for cancellation\n if (options.isCancelled?.()) {\n throw new Error('Filter execution cancelled');\n }\n\n const item = items[i];\n const passed = evaluateAllRules(item, config);\n \n if (passed) {\n included.push(item);\n } else {\n excluded.push(item);\n }\n\n // Report progress\n if (options.onProgress && (i % 100 === 0 || i === items.length - 1)) {\n options.onProgress({\n phase: 'rule',\n processed: i + 1,\n total: items.length,\n included: included.length,\n excluded: excluded.length\n });\n }\n }\n\n return {\n included,\n excluded,\n stats: {\n totalItems: items.length,\n includedCount: included.length,\n excludedCount: excluded.length,\n executionTimeMs: Date.now() - startTime,\n filterType: 'rule'\n }\n };\n}\n\n/**\n * Execute AI-based filter (asynchronous, uses AI calls)\n */\nexport async function executeAIFilter(\n items: PromptItem[],\n config: AIFilterConfig,\n options: FilterExecuteOptions\n): Promise<FilterResult> {\n const startTime = Date.now();\n const included: PromptItem[] = [];\n const excluded: PromptItem[] = [];\n const parallelLimit = config.parallel ?? 5;\n const timeoutMs = config.timeoutMs ?? 30000; // 30 seconds default\n \n // Process items in parallel batches\n for (let i = 0; i < items.length; i += parallelLimit) {\n // Check for cancellation\n if (options.isCancelled?.()) {\n throw new Error('Filter execution cancelled');\n }\n\n const batch = items.slice(i, Math.min(i + parallelLimit, items.length));\n const results = await Promise.all(\n batch.map(item => evaluateAIRule(item, config, options.aiInvoker!, timeoutMs))\n );\n\n // Categorize results\n for (let j = 0; j < batch.length; j++) {\n if (results[j]) {\n included.push(batch[j]);\n } else {\n excluded.push(batch[j]);\n }\n }\n\n // Report progress\n if (options.onProgress) {\n options.onProgress({\n phase: 'ai',\n processed: Math.min(i + parallelLimit, items.length),\n total: items.length,\n included: included.length,\n excluded: excluded.length\n });\n }\n }\n\n return {\n included,\n excluded,\n stats: {\n totalItems: items.length,\n includedCount: included.length,\n excludedCount: excluded.length,\n executionTimeMs: Date.now() - startTime,\n filterType: 'ai'\n }\n };\n}\n\n/**\n * Execute hybrid filter (rule-based pre-filter + AI confirmation)\n */\nexport async function executeHybridFilter(\n items: PromptItem[],\n config: FilterConfig,\n options: FilterExecuteOptions\n): Promise<FilterResult> {\n const startTime = Date.now();\n const combineMode = config.combineMode ?? 'and';\n\n // Step 1: Apply rule filter\n const ruleResult = await executeRuleFilter(items, config.rule!, options);\n\n if (combineMode === 'or') {\n // OR mode: AI filter evaluates excluded items, include if AI passes\n const aiResult = await executeAIFilter(ruleResult.excluded, config.ai!, options);\n \n return {\n included: [...ruleResult.included, ...aiResult.included],\n excluded: aiResult.excluded,\n stats: {\n totalItems: items.length,\n includedCount: ruleResult.included.length + aiResult.included.length,\n excludedCount: aiResult.excluded.length,\n executionTimeMs: Date.now() - startTime,\n filterType: 'hybrid'\n }\n };\n } else {\n // AND mode (default): AI filter evaluates included items, keep only if AI passes\n const aiResult = await executeAIFilter(ruleResult.included, config.ai!, options);\n \n return {\n included: aiResult.included,\n excluded: [...ruleResult.excluded, ...aiResult.excluded],\n stats: {\n totalItems: items.length,\n includedCount: aiResult.included.length,\n excludedCount: ruleResult.excluded.length + aiResult.excluded.length,\n executionTimeMs: Date.now() - startTime,\n filterType: 'hybrid'\n }\n };\n }\n}\n\n/**\n * Evaluate all rules for an item\n */\nfunction evaluateAllRules(item: PromptItem, config: RuleFilterConfig): boolean {\n const mode = config.mode ?? 'all';\n \n if (mode === 'all') {\n // AND: Every rule must pass\n return config.rules.every(rule => evaluateRule(item, rule));\n } else {\n // OR: At least one rule must pass\n return config.rules.some(rule => evaluateRule(item, rule));\n }\n}\n\n/**\n * Evaluate a single rule against an item\n */\nfunction evaluateRule(item: PromptItem, rule: FilterRule): boolean {\n const fieldValue = getNestedValue(item, rule.field);\n \n // Handle missing field - treat as false\n if (fieldValue === undefined || fieldValue === null) {\n return false;\n }\n \n switch (rule.operator) {\n case 'equals':\n return fieldValue === rule.value;\n \n case 'not_equals':\n return fieldValue !== rule.value;\n \n case 'in':\n return rule.values?.includes(fieldValue) ?? false;\n \n case 'not_in':\n return !rule.values?.includes(fieldValue);\n \n case 'contains':\n return String(fieldValue).toLowerCase()\n .includes(String(rule.value).toLowerCase());\n \n case 'not_contains':\n return !String(fieldValue).toLowerCase()\n .includes(String(rule.value).toLowerCase());\n \n case 'greater_than':\n return Number(fieldValue) > Number(rule.value);\n \n case 'less_than':\n return Number(fieldValue) < Number(rule.value);\n \n case 'gte':\n return Number(fieldValue) >= Number(rule.value);\n \n case 'lte':\n return Number(fieldValue) <= Number(rule.value);\n \n case 'matches':\n if (!rule.pattern) {\n throw new Error('matches operator requires pattern');\n }\n const regex = new RegExp(rule.pattern);\n return regex.test(String(fieldValue));\n \n default:\n throw new Error(`Unknown operator: ${rule.operator}`);\n }\n}\n\n/**\n * Get nested value from object using dot notation (e.g., \"user.role\")\n */\nfunction getNestedValue(item: any, path: string): any {\n return path.split('.').reduce((obj, key) => obj?.[key], item);\n}\n\n/**\n * Evaluate an item using AI\n * Returns true if item should be included\n */\nasync function evaluateAIRule(\n item: PromptItem,\n config: AIFilterConfig,\n aiInvoker: AIInvoker,\n timeoutMs: number\n): Promise<boolean> {\n try {\n // Render prompt with item data\n const prompt = substituteTemplate(config.prompt, item);\n \n // Call AI\n const result = await aiInvoker(prompt, {\n model: config.model,\n timeoutMs\n });\n\n if (!result.success) {\n // On error, default to excluding the item\n getLogger().error(LogCategory.PIPELINE, `AI filter error for item: ${result.error}`);\n return false;\n }\n\n // Parse response\n let response: any;\n if (config.output && config.output.length > 0) {\n // Structured output expected\n try {\n response = JSON.parse(result.response || '');\n } catch {\n // Failed to parse JSON, default to exclude\n getLogger().error(LogCategory.PIPELINE, `Failed to parse AI filter response as JSON: ${result.response}`);\n return false;\n }\n } else {\n // Text mode - check for affirmative response\n response = { include: /\\b(yes|true|include|pass)\\b/i.test(result.response || '') };\n }\n\n // Check for 'include' field\n if (typeof response.include === 'boolean') {\n return response.include;\n }\n\n // Fallback: if no clear include field, default to false\n getLogger().error(LogCategory.PIPELINE, `AI filter response missing 'include' field: ${JSON.stringify(response)}`);\n return false;\n\n } catch (error) {\n getLogger().error(LogCategory.PIPELINE, `AI filter exception: ${error instanceof Error ? error.message : String(error)}`);\n return false;\n }\n}\n", "/**\n * Prompt File Resolver\n *\n * Resolves and loads prompt files for YAML pipelines.\n * Supports relative paths, search order for bare filenames, and frontmatter stripping.\n *\n * Path Resolution Strategy (same as CSV resolution):\n * - Relative paths: resolved from pipeline package directory\n * - Absolute paths: used as-is\n * - Bare filenames: searched in order (pipeline dir, prompts/ subfolder, shared prompts)\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { PipelineCoreError, ErrorCode } from '../errors';\n\n/**\n * Error thrown for prompt file resolution issues\n */\nexport class PromptResolverError extends PipelineCoreError {\n /** Paths that were searched when resolving the prompt */\n readonly searchedPaths?: string[];\n\n constructor(\n message: string,\n searchedPaths?: string[]\n ) {\n super(message, {\n code: ErrorCode.PROMPT_RESOLUTION_FAILED,\n meta: searchedPaths ? { searchedPaths } : undefined,\n });\n this.name = 'PromptResolverError';\n this.searchedPaths = searchedPaths;\n }\n}\n\n/**\n * Result of prompt file resolution\n */\nexport interface PromptResolutionResult {\n /** The resolved prompt content (frontmatter stripped) */\n content: string;\n /** The absolute path where the prompt was found */\n resolvedPath: string;\n /** Whether frontmatter was stripped */\n hadFrontmatter: boolean;\n}\n\n/**\n * Frontmatter regex pattern\n * Matches YAML frontmatter at the start of a file:\n * ---\n * key: value\n * ---\n */\nconst FRONTMATTER_REGEX = /^---\\r?\\n[\\s\\S]*?\\r?\\n---\\r?\\n?/;\n\n/**\n * Check if a path contains path separators (indicating it's not a bare filename)\n */\nfunction hasPathSeparators(filePath: string): boolean {\n return filePath.includes('/') || filePath.includes('\\\\');\n}\n\n/**\n * Get search paths for a bare filename (no path separators)\n * \n * Search order:\n * 1. Pipeline package directory - {pipelineDir}/filename\n * 2. prompts/ subfolder - {pipelineDir}/prompts/filename\n * 3. Shared prompts folder - {pipelinesRoot}/prompts/filename\n * \n * @param filename Bare filename without path separators\n * @param pipelineDirectory Pipeline package directory (where pipeline.yaml lives)\n * @returns Array of paths to search, in order\n */\nexport function getSearchPaths(filename: string, pipelineDirectory: string): string[] {\n const paths: string[] = [];\n\n // 1. Pipeline package directory\n paths.push(path.join(pipelineDirectory, filename));\n\n // 2. prompts/ subfolder within pipeline package\n paths.push(path.join(pipelineDirectory, 'prompts', filename));\n\n // 3. Shared prompts folder (sibling to pipeline package)\n // pipelinesRoot is the parent of pipelineDirectory\n const pipelinesRoot = path.dirname(pipelineDirectory);\n paths.push(path.join(pipelinesRoot, 'prompts', filename));\n\n return paths;\n}\n\n/**\n * Resolve a prompt file path to an absolute path\n * \n * Resolution rules:\n * - Absolute paths: returned as-is\n * - Paths with separators (e.g., \"prompts/map.prompt.md\"): resolved relative to pipelineDirectory\n * - Bare filenames (e.g., \"analyze.prompt.md\"): searched using getSearchPaths()\n * \n * @param promptFile Path or filename from config\n * @param pipelineDirectory Pipeline package directory (where pipeline.yaml lives)\n * @returns Absolute path to the prompt file\n * @throws PromptResolverError if file not found\n */\nexport function resolvePromptPath(promptFile: string, pipelineDirectory: string): string {\n // Absolute path - use as-is\n if (path.isAbsolute(promptFile)) {\n if (!fs.existsSync(promptFile)) {\n throw new PromptResolverError(\n `Prompt file not found: ${promptFile}`,\n [promptFile]\n );\n }\n return promptFile;\n }\n\n // Path with separators - resolve relative to pipeline directory\n if (hasPathSeparators(promptFile)) {\n const resolvedPath = path.resolve(pipelineDirectory, promptFile);\n if (!fs.existsSync(resolvedPath)) {\n throw new PromptResolverError(\n `Prompt file not found: ${promptFile}`,\n [resolvedPath]\n );\n }\n return resolvedPath;\n }\n\n // Bare filename - search in order\n const searchPaths = getSearchPaths(promptFile, pipelineDirectory);\n for (const searchPath of searchPaths) {\n if (fs.existsSync(searchPath)) {\n return searchPath;\n }\n }\n\n // Not found anywhere\n throw new PromptResolverError(\n `Prompt file \"${promptFile}\" not found. Searched paths:\\n - ${searchPaths.join('\\n - ')}`,\n searchPaths\n );\n}\n\n/**\n * Extract prompt content from file content, stripping frontmatter if present\n * \n * Frontmatter format:\n * ---\n * version: 1.0\n * description: Bug analysis prompt\n * variables: [title, description, priority]\n * ---\n * \n * Actual prompt content starts here...\n * \n * @param fileContent Raw file content\n * @returns Object with content and whether frontmatter was stripped\n */\nexport function extractPromptContent(fileContent: string): { content: string; hadFrontmatter: boolean } {\n const match = fileContent.match(FRONTMATTER_REGEX);\n \n if (match) {\n const content = fileContent.slice(match[0].length).trim();\n return {\n content,\n hadFrontmatter: true\n };\n }\n\n return {\n content: fileContent.trim(),\n hadFrontmatter: false\n };\n}\n\n/**\n * Resolve and load a prompt file\n * \n * This is the main API for loading prompts from files.\n * \n * @param promptFile Path or filename from config\n * @param pipelineDirectory Pipeline package directory (where pipeline.yaml lives)\n * @returns Prompt content string (frontmatter stripped)\n * @throws PromptResolverError if file not found or empty\n * \n * @example\n * // Simple - prompt in same folder\n * const prompt = await resolvePromptFile('analyze.prompt.md', '/path/to/pipeline');\n * \n * // With prompts subfolder\n * const prompt = await resolvePromptFile('prompts/map.prompt.md', '/path/to/pipeline');\n * \n * // Using shared prompts\n * const prompt = await resolvePromptFile('../shared/prompts/common.prompt.md', '/path/to/pipeline');\n */\nexport async function resolvePromptFile(\n promptFile: string,\n pipelineDirectory: string\n): Promise<string> {\n const resolvedPath = resolvePromptPath(promptFile, pipelineDirectory);\n \n try {\n const fileContent = await fs.promises.readFile(resolvedPath, 'utf-8');\n const { content } = extractPromptContent(fileContent);\n \n if (!content) {\n throw new PromptResolverError(\n `Prompt file is empty after stripping frontmatter: ${promptFile}`,\n [resolvedPath]\n );\n }\n \n return content;\n } catch (error) {\n if (error instanceof PromptResolverError) {\n throw error;\n }\n throw new PromptResolverError(\n `Failed to read prompt file \"${promptFile}\": ${error instanceof Error ? error.message : String(error)}`,\n [resolvedPath]\n );\n }\n}\n\n/**\n * Resolve and load a prompt file synchronously\n * \n * @param promptFile Path or filename from config\n * @param pipelineDirectory Pipeline package directory (where pipeline.yaml lives)\n * @returns Prompt content string (frontmatter stripped)\n * @throws PromptResolverError if file not found or empty\n */\nexport function resolvePromptFileSync(\n promptFile: string,\n pipelineDirectory: string\n): string {\n const resolvedPath = resolvePromptPath(promptFile, pipelineDirectory);\n \n try {\n const fileContent = fs.readFileSync(resolvedPath, 'utf-8');\n const { content } = extractPromptContent(fileContent);\n \n if (!content) {\n throw new PromptResolverError(\n `Prompt file is empty after stripping frontmatter: ${promptFile}`,\n [resolvedPath]\n );\n }\n \n return content;\n } catch (error) {\n if (error instanceof PromptResolverError) {\n throw error;\n }\n throw new PromptResolverError(\n `Failed to read prompt file \"${promptFile}\": ${error instanceof Error ? error.message : String(error)}`,\n [resolvedPath]\n );\n }\n}\n\n/**\n * Resolve and load a prompt file with full result details\n * \n * @param promptFile Path or filename from config\n * @param pipelineDirectory Pipeline package directory (where pipeline.yaml lives)\n * @returns Full resolution result with content, path, and frontmatter info\n * @throws PromptResolverError if file not found or empty\n */\nexport async function resolvePromptFileWithDetails(\n promptFile: string,\n pipelineDirectory: string\n): Promise<PromptResolutionResult> {\n const resolvedPath = resolvePromptPath(promptFile, pipelineDirectory);\n \n try {\n const fileContent = await fs.promises.readFile(resolvedPath, 'utf-8');\n const { content, hadFrontmatter } = extractPromptContent(fileContent);\n \n if (!content) {\n throw new PromptResolverError(\n `Prompt file is empty after stripping frontmatter: ${promptFile}`,\n [resolvedPath]\n );\n }\n \n return {\n content,\n resolvedPath,\n hadFrontmatter\n };\n } catch (error) {\n if (error instanceof PromptResolverError) {\n throw error;\n }\n throw new PromptResolverError(\n `Failed to read prompt file \"${promptFile}\": ${error instanceof Error ? error.message : String(error)}`,\n [resolvedPath]\n );\n }\n}\n\n/**\n * Check if a prompt file exists (without loading it)\n * \n * @param promptFile Path or filename from config\n * @param pipelineDirectory Pipeline package directory\n * @returns True if the file exists at any of the search locations\n */\nexport function promptFileExists(promptFile: string, pipelineDirectory: string): boolean {\n try {\n resolvePromptPath(promptFile, pipelineDirectory);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Validate that a prompt file can be resolved (for config validation)\n * \n * @param promptFile Path or filename from config\n * @param pipelineDirectory Pipeline package directory\n * @returns Validation result with error message if invalid\n */\nexport function validatePromptFile(\n promptFile: string,\n pipelineDirectory: string\n): { valid: boolean; error?: string; searchedPaths?: string[] } {\n try {\n resolvePromptPath(promptFile, pipelineDirectory);\n return { valid: true };\n } catch (error) {\n if (error instanceof PromptResolverError) {\n return {\n valid: false,\n error: error.message,\n searchedPaths: error.searchedPaths\n };\n }\n return {\n valid: false,\n error: error instanceof Error ? error.message : String(error)\n };\n }\n}\n", "/**\n * Skill Resolver\n *\n * Resolves and loads skill prompts from the .github/skills/ directory.\n * Skills are organized as directories containing a SKILL.md file.\n *\n * Skill Structure:\n * .github/skills/\n * \u251C\u2500\u2500 go-deep/\n * \u2502 \u2514\u2500\u2500 SKILL.md # THE prompt/skill definition (required)\n * \u251C\u2500\u2500 summarizer/\n * \u2502 \u2514\u2500\u2500 SKILL.md\n *\n * Resolution: skill: \"go-deep\" \u2192 .github/skills/go-deep/SKILL.md\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { extractPromptContent } from './prompt-resolver';\nimport { DEFAULT_SKILLS_DIRECTORY as SKILLS_DIR_DEFAULT } from '../config/defaults';\nimport { PipelineCoreError, ErrorCode } from '../errors';\n\n// Re-export for backward compatibility\nexport const DEFAULT_SKILLS_DIRECTORY = SKILLS_DIR_DEFAULT;\n\n/**\n * Standard skill filename within a skill directory (required)\n */\nexport const SKILL_PROMPT_FILENAME = 'SKILL.md';\n\n/**\n * Error thrown for skill resolution issues\n */\nexport class SkillResolverError extends PipelineCoreError {\n /** Name of the skill that failed to resolve */\n readonly skillName: string;\n /** Path that was searched */\n readonly searchedPath?: string;\n\n constructor(\n message: string,\n skillName: string,\n searchedPath?: string\n ) {\n super(message, {\n code: ErrorCode.SKILL_RESOLUTION_FAILED,\n meta: {\n skillName,\n ...(searchedPath && { searchedPath }),\n },\n });\n this.name = 'SkillResolverError';\n this.skillName = skillName;\n this.searchedPath = searchedPath;\n }\n}\n\n/**\n * Result of skill resolution\n */\nexport interface SkillResolutionResult {\n /** The resolved prompt content (frontmatter stripped) */\n content: string;\n /** The absolute path to the skill's SKILL.md */\n resolvedPath: string;\n /** The skill directory path */\n skillDirectory: string;\n /** Whether frontmatter was stripped from the prompt */\n hadFrontmatter: boolean;\n /** Skill metadata from SKILL.md frontmatter */\n metadata?: SkillMetadata;\n}\n\n/**\n * Skill metadata parsed from SKILL.md frontmatter\n */\nexport interface SkillMetadata {\n /** Skill name (from frontmatter or directory name) */\n name?: string;\n /** Skill description */\n description?: string;\n /** Skill version */\n version?: string;\n /** Expected input variables */\n variables?: string[];\n /** Expected output fields */\n output?: string[];\n /** Raw metadata content */\n raw?: string;\n}\n\n/**\n * Get the skills directory path\n * \n * @param workspaceRoot The workspace root directory\n * @param customPath Optional custom skills directory path (relative or absolute)\n * @returns Absolute path to the skills directory\n */\nexport function getSkillsDirectory(workspaceRoot: string, customPath?: string): string {\n if (customPath) {\n if (path.isAbsolute(customPath)) {\n return customPath;\n }\n return path.resolve(workspaceRoot, customPath);\n }\n return path.resolve(workspaceRoot, DEFAULT_SKILLS_DIRECTORY);\n}\n\n/**\n * Get the path to a specific skill's directory\n * \n * @param skillName Name of the skill\n * @param workspaceRoot The workspace root directory\n * @param customSkillsPath Optional custom skills directory path\n * @returns Absolute path to the skill directory\n */\nexport function getSkillDirectory(\n skillName: string,\n workspaceRoot: string,\n customSkillsPath?: string\n): string {\n const skillsDir = getSkillsDirectory(workspaceRoot, customSkillsPath);\n return path.join(skillsDir, skillName);\n}\n\n/**\n * Get the path to a skill's SKILL.md file\n * \n * @param skillName Name of the skill\n * @param workspaceRoot The workspace root directory\n * @param customSkillsPath Optional custom skills directory path\n * @returns Absolute path to the skill's SKILL.md file\n */\nexport function getSkillPromptPath(\n skillName: string,\n workspaceRoot: string,\n customSkillsPath?: string\n): string {\n const skillDir = getSkillDirectory(skillName, workspaceRoot, customSkillsPath);\n return path.join(skillDir, SKILL_PROMPT_FILENAME);\n}\n\n/**\n * Check if a skill exists\n * \n * @param skillName Name of the skill\n * @param workspaceRoot The workspace root directory\n * @param customSkillsPath Optional custom skills directory path\n * @returns True if the skill's SKILL.md exists\n */\nexport function skillExists(\n skillName: string,\n workspaceRoot: string,\n customSkillsPath?: string\n): boolean {\n const promptPath = getSkillPromptPath(skillName, workspaceRoot, customSkillsPath);\n return fs.existsSync(promptPath);\n}\n\n/**\n * List all available skills\n * \n * @param workspaceRoot The workspace root directory\n * @param customSkillsPath Optional custom skills directory path\n * @returns Array of skill names\n */\nexport function listSkills(\n workspaceRoot: string,\n customSkillsPath?: string\n): string[] {\n const skillsDir = getSkillsDirectory(workspaceRoot, customSkillsPath);\n \n if (!fs.existsSync(skillsDir)) {\n return [];\n }\n\n try {\n const entries = fs.readdirSync(skillsDir, { withFileTypes: true });\n return entries\n .filter(entry => {\n if (!entry.isDirectory()) {\n return false;\n }\n // Check if the directory contains a SKILL.md file\n const promptPath = path.join(skillsDir, entry.name, SKILL_PROMPT_FILENAME);\n return fs.existsSync(promptPath);\n })\n .map(entry => entry.name)\n .sort();\n } catch {\n return [];\n }\n}\n\n/**\n * Parse skill metadata from SKILL.md content\n * \n * @param content Raw SKILL.md content\n * @returns Parsed metadata\n */\nfunction parseSkillMetadata(content: string): SkillMetadata {\n const metadata: SkillMetadata = { raw: content };\n \n // Try to extract YAML frontmatter\n const frontmatterMatch = content.match(/^---\\r?\\n([\\s\\S]*?)\\r?\\n---/);\n if (frontmatterMatch) {\n const frontmatter = frontmatterMatch[1];\n \n // Simple YAML parsing for common fields\n const nameMatch = frontmatter.match(/^name:\\s*[\"']?(.+?)[\"']?\\s*$/m);\n if (nameMatch) metadata.name = nameMatch[1];\n \n const descMatch = frontmatter.match(/^description:\\s*[\"']?(.+?)[\"']?\\s*$/m);\n if (descMatch) metadata.description = descMatch[1];\n \n const versionMatch = frontmatter.match(/^version:\\s*[\"']?(.+?)[\"']?\\s*$/m);\n if (versionMatch) metadata.version = versionMatch[1];\n \n // Parse variables array\n const variablesMatch = frontmatter.match(/^variables:\\s*\\[([^\\]]+)\\]/m);\n if (variablesMatch) {\n metadata.variables = variablesMatch[1]\n .split(',')\n .map(v => v.trim().replace(/[\"']/g, ''))\n .filter(v => v.length > 0);\n }\n \n // Parse output array\n const outputMatch = frontmatter.match(/^output:\\s*\\[([^\\]]+)\\]/m);\n if (outputMatch) {\n metadata.output = outputMatch[1]\n .split(',')\n .map(v => v.trim().replace(/[\"']/g, ''))\n .filter(v => v.length > 0);\n }\n }\n \n return metadata;\n}\n\n/**\n * Load skill metadata from SKILL.md file content\n * \n * @param fileContent The content of the SKILL.md file\n * @returns Skill metadata parsed from frontmatter\n */\nfunction loadSkillMetadataFromContent(fileContent: string): SkillMetadata | undefined {\n if (!fileContent) {\n return undefined;\n }\n \n try {\n return parseSkillMetadata(fileContent);\n } catch {\n return undefined;\n }\n}\n\n/**\n * Resolve and load a skill's prompt\n * \n * This is the main API for loading skill prompts.\n * \n * @param skillName Name of the skill (e.g., \"go-deep\")\n * @param workspaceRoot The workspace root directory\n * @param customSkillsPath Optional custom skills directory path\n * @returns Prompt content string (frontmatter stripped)\n * @throws SkillResolverError if skill not found or empty\n * \n * @example\n * // Load a skill prompt\n * const prompt = await resolveSkill('go-deep', '/path/to/workspace');\n */\nexport async function resolveSkill(\n skillName: string,\n workspaceRoot: string,\n customSkillsPath?: string\n): Promise<string> {\n const result = await resolveSkillWithDetails(skillName, workspaceRoot, customSkillsPath);\n return result.content;\n}\n\n/**\n * Resolve and load a skill's prompt synchronously\n * \n * @param skillName Name of the skill\n * @param workspaceRoot The workspace root directory\n * @param customSkillsPath Optional custom skills directory path\n * @returns Prompt content string (frontmatter stripped)\n * @throws SkillResolverError if skill not found or empty\n */\nexport function resolveSkillSync(\n skillName: string,\n workspaceRoot: string,\n customSkillsPath?: string\n): string {\n const result = resolveSkillWithDetailsSync(skillName, workspaceRoot, customSkillsPath);\n return result.content;\n}\n\n/**\n * Resolve and load a skill with full details\n * \n * @param skillName Name of the skill\n * @param workspaceRoot The workspace root directory\n * @param customSkillsPath Optional custom skills directory path\n * @returns Full resolution result with content, paths, and metadata\n * @throws SkillResolverError if skill not found or empty\n */\nexport async function resolveSkillWithDetails(\n skillName: string,\n workspaceRoot: string,\n customSkillsPath?: string\n): Promise<SkillResolutionResult> {\n // Validate skill name\n if (!skillName || typeof skillName !== 'string') {\n throw new SkillResolverError('Skill name must be a non-empty string', skillName || '');\n }\n \n // Sanitize skill name (prevent path traversal)\n if (skillName.includes('/') || skillName.includes('\\\\') || skillName.includes('..')) {\n throw new SkillResolverError(\n `Invalid skill name \"${skillName}\": skill names cannot contain path separators or \"..\"`,\n skillName\n );\n }\n \n const skillDirectory = getSkillDirectory(skillName, workspaceRoot, customSkillsPath);\n const promptPath = path.join(skillDirectory, SKILL_PROMPT_FILENAME);\n \n // Check if skill directory exists\n if (!fs.existsSync(skillDirectory)) {\n const skillsDir = getSkillsDirectory(workspaceRoot, customSkillsPath);\n throw new SkillResolverError(\n `Skill \"${skillName}\" not found. Expected directory: ${skillDirectory}\\n` +\n `Skills should be located in: ${skillsDir}`,\n skillName,\n skillDirectory\n );\n }\n \n // Check if SKILL.md exists\n if (!fs.existsSync(promptPath)) {\n throw new SkillResolverError(\n `Skill \"${skillName}\" is missing SKILL.md. Expected: ${promptPath}`,\n skillName,\n promptPath\n );\n }\n \n try {\n const fileContent = await fs.promises.readFile(promptPath, 'utf-8');\n const { content, hadFrontmatter } = extractPromptContent(fileContent);\n \n if (!content) {\n throw new SkillResolverError(\n `Skill \"${skillName}\" has empty SKILL.md after stripping frontmatter`,\n skillName,\n promptPath\n );\n }\n \n // Extract metadata from the same SKILL.md file content\n const metadata = loadSkillMetadataFromContent(fileContent);\n \n return {\n content,\n resolvedPath: promptPath,\n skillDirectory,\n hadFrontmatter,\n metadata\n };\n } catch (error) {\n if (error instanceof SkillResolverError) {\n throw error;\n }\n throw new SkillResolverError(\n `Failed to read skill \"${skillName}\": ${error instanceof Error ? error.message : String(error)}`,\n skillName,\n promptPath\n );\n }\n}\n\n/**\n * Resolve and load a skill with full details synchronously\n */\nexport function resolveSkillWithDetailsSync(\n skillName: string,\n workspaceRoot: string,\n customSkillsPath?: string\n): SkillResolutionResult {\n // Validate skill name\n if (!skillName || typeof skillName !== 'string') {\n throw new SkillResolverError('Skill name must be a non-empty string', skillName || '');\n }\n \n // Sanitize skill name (prevent path traversal)\n if (skillName.includes('/') || skillName.includes('\\\\') || skillName.includes('..')) {\n throw new SkillResolverError(\n `Invalid skill name \"${skillName}\": skill names cannot contain path separators or \"..\"`,\n skillName\n );\n }\n \n const skillDirectory = getSkillDirectory(skillName, workspaceRoot, customSkillsPath);\n const promptPath = path.join(skillDirectory, SKILL_PROMPT_FILENAME);\n \n // Check if skill directory exists\n if (!fs.existsSync(skillDirectory)) {\n const skillsDir = getSkillsDirectory(workspaceRoot, customSkillsPath);\n throw new SkillResolverError(\n `Skill \"${skillName}\" not found. Expected directory: ${skillDirectory}\\n` +\n `Skills should be located in: ${skillsDir}`,\n skillName,\n skillDirectory\n );\n }\n \n // Check if SKILL.md exists\n if (!fs.existsSync(promptPath)) {\n throw new SkillResolverError(\n `Skill \"${skillName}\" is missing SKILL.md. Expected: ${promptPath}`,\n skillName,\n promptPath\n );\n }\n \n try {\n const fileContent = fs.readFileSync(promptPath, 'utf-8');\n const { content, hadFrontmatter } = extractPromptContent(fileContent);\n \n if (!content) {\n throw new SkillResolverError(\n `Skill \"${skillName}\" has empty SKILL.md after stripping frontmatter`,\n skillName,\n promptPath\n );\n }\n \n // Extract metadata from the same SKILL.md file content\n const metadata = loadSkillMetadataFromContent(fileContent);\n \n return {\n content,\n resolvedPath: promptPath,\n skillDirectory,\n hadFrontmatter,\n metadata\n };\n } catch (error) {\n if (error instanceof SkillResolverError) {\n throw error;\n }\n throw new SkillResolverError(\n `Failed to read skill \"${skillName}\": ${error instanceof Error ? error.message : String(error)}`,\n skillName,\n promptPath\n );\n }\n}\n\n/**\n * Validate that a skill can be resolved (for config validation)\n * \n * @param skillName Name of the skill\n * @param workspaceRoot The workspace root directory\n * @param customSkillsPath Optional custom skills directory path\n * @returns Validation result with error message if invalid\n */\nexport function validateSkill(\n skillName: string,\n workspaceRoot: string,\n customSkillsPath?: string\n): { valid: boolean; error?: string; skillPath?: string } {\n try {\n const promptPath = getSkillPromptPath(skillName, workspaceRoot, customSkillsPath);\n \n // Validate skill name\n if (!skillName || typeof skillName !== 'string') {\n return { valid: false, error: 'Skill name must be a non-empty string' };\n }\n \n // Sanitize skill name\n if (skillName.includes('/') || skillName.includes('\\\\') || skillName.includes('..')) {\n return {\n valid: false,\n error: `Invalid skill name \"${skillName}\": skill names cannot contain path separators or \"..\"`\n };\n }\n \n if (!fs.existsSync(promptPath)) {\n const skillsDir = getSkillsDirectory(workspaceRoot, customSkillsPath);\n return {\n valid: false,\n error: `Skill \"${skillName}\" not found at ${promptPath}. Skills should be in ${skillsDir}`,\n skillPath: promptPath\n };\n }\n \n return { valid: true, skillPath: promptPath };\n } catch (error) {\n return {\n valid: false,\n error: error instanceof Error ? error.message : String(error)\n };\n }\n}\n", "/**\n * Pipeline Executor\n *\n * Executes YAML-defined pipelines using the map-reduce framework.\n * This is a thin wrapper that converts PipelineConfig to map-reduce job execution.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n createExecutor,\n ExecutorOptions,\n MapReduceResult,\n JobProgress,\n createPromptMapJob,\n createPromptMapInput,\n PromptItem,\n PromptMapResult,\n PromptMapOutput,\n PromptMapSummary\n} from '../map-reduce';\nimport { DEFAULT_AI_TIMEOUT_MS, DEFAULT_PARALLEL_LIMIT } from '../config/defaults';\nimport { readCSVFile, resolveCSVPath } from './csv-reader';\nimport { extractVariables } from './template';\nimport { substituteVariables } from '../utils/template-engine';\nimport {\n AIInvoker,\n CSVSource,\n isCSVSource,\n isGenerateConfig,\n PipelineConfig,\n PipelineParameter,\n ProcessTracker,\n FilterResult\n} from './types';\nimport { validateGenerateConfig } from './input-generator';\nimport { executeFilter } from './filter-executor';\nimport { resolvePromptFile } from './prompt-resolver';\nimport { resolveSkill } from './skill-resolver';\nimport { PipelineCoreError, ErrorCode } from '../errors';\nimport { getLogger, LogCategory } from '../logger';\n\n// Re-export for backward compatibility\nexport { DEFAULT_PARALLEL_LIMIT } from '../config/defaults';\n\n/**\n * Error thrown for pipeline execution issues\n */\nexport class PipelineExecutionError extends PipelineCoreError {\n /** Phase where the error occurred */\n readonly phase?: 'input' | 'filter' | 'map' | 'reduce';\n\n constructor(\n message: string,\n phase?: 'input' | 'filter' | 'map' | 'reduce'\n ) {\n super(message, {\n code: ErrorCode.PIPELINE_EXECUTION_FAILED,\n meta: phase ? { phase } : undefined,\n });\n this.name = 'PipelineExecutionError';\n this.phase = phase;\n }\n}\n\n/**\n * Options for executing a pipeline\n */\nexport interface ExecutePipelineOptions {\n /** AI invoker function */\n aiInvoker: AIInvoker;\n /** \n * Pipeline directory for resolving relative paths (package directory where pipeline.yaml lives).\n * All CSV and resource paths in the pipeline config are resolved relative to this directory.\n */\n pipelineDirectory: string;\n /**\n * Workspace root directory for resolving skills.\n * Skills are located at {workspaceRoot}/.github/skills/{name}/SKILL.md.\n * If not provided, defaults to pipelineDirectory's grandparent (assuming standard .vscode/pipelines/ structure).\n */\n workspaceRoot?: string;\n /** Optional process tracker for AI process manager integration */\n processTracker?: ProcessTracker;\n /** Progress callback */\n onProgress?: (progress: JobProgress) => void;\n /** Optional cancellation check function - returns true if execution should be cancelled */\n isCancelled?: () => boolean;\n}\n\n/**\n * Result type from pipeline execution\n */\nexport interface PipelineExecutionResult extends MapReduceResult<PromptMapResult, PromptMapOutput> {\n /** Filter result if filter was used */\n filterResult?: FilterResult;\n}\n\n/**\n * Resolved prompts from config (either inline or from files)\n */\ninterface ResolvedPrompts {\n mapPrompt: string;\n reducePrompt?: string;\n}\n\n/**\n * Execute a pipeline from a YAML configuration\n * \n * @param config Pipeline configuration (parsed from YAML)\n * @param options Execution options\n * @returns Map-reduce result containing pipeline output\n */\nexport async function executePipeline(\n config: PipelineConfig,\n options: ExecutePipelineOptions\n): Promise<PipelineExecutionResult> {\n // Validate config\n validatePipelineConfig(config);\n\n // Resolve prompts (from inline, files, or skills)\n const prompts = await resolvePrompts(config, options.pipelineDirectory, options.workspaceRoot);\n\n // Load items from input source\n let items = await loadInputItems(config, options.pipelineDirectory);\n\n // Apply limit and merge parameters\n items = prepareItems(items, config, prompts.mapPrompt);\n\n // Execute the pipeline with resolved prompts and items\n return executeWithItems(config, items, prompts, options);\n}\n\n/**\n * Execute a pipeline with pre-approved items\n * \n * This function bypasses the normal input loading and uses provided items directly.\n * Used when items have been generated via AI and approved by the user.\n * \n * @param config Pipeline configuration (parsed from YAML)\n * @param items Pre-approved items to process\n * @param options Execution options\n * @returns Map-reduce result containing pipeline output\n */\nexport async function executePipelineWithItems(\n config: PipelineConfig,\n items: PromptItem[],\n options: ExecutePipelineOptions\n): Promise<PipelineExecutionResult> {\n // Validate basic config structure (but skip input validation since we're using pre-approved items)\n validatePipelineConfigForExecution(config);\n\n // Resolve prompts (from inline, files, or skills)\n const prompts = await resolvePrompts(config, options.pipelineDirectory, options.workspaceRoot);\n\n // Apply limit and merge parameters to provided items\n const processItems = prepareItems(items, config, prompts.mapPrompt);\n\n // Execute the pipeline with resolved prompts and items\n return executeWithItems(config, processItems, prompts, options);\n}\n\n/**\n * Validate pipeline configuration for execution (without input source validation)\n * Used when executing with pre-approved items.\n */\nfunction validatePipelineConfigForExecution(config: PipelineConfig): void {\n if (!config.name) {\n throw new PipelineExecutionError('Pipeline config missing \"name\"');\n }\n\n validateMapConfig(config);\n validateReduceConfig(config);\n}\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\n/**\n * Derive workspace root from pipeline directory if not provided.\n * Assumes standard structure: {workspaceRoot}/.vscode/pipelines/{package}/\n */\nfunction deriveWorkspaceRoot(pipelineDirectory: string, providedWorkspaceRoot?: string): string {\n if (providedWorkspaceRoot) {\n return providedWorkspaceRoot;\n }\n // Go up from pipeline package directory to workspace root\n // .vscode/pipelines/my-pipeline/ -> workspace root (3 levels up)\n const path = require('path');\n return path.resolve(pipelineDirectory, '..', '..', '..');\n}\n\n/**\n * Build a prompt with optional skill context prepended\n * \n * When a skill is attached, the skill's prompt content is prepended as guidance:\n * ```\n * [Skill Guidance: {skillName}]\n * {skill prompt content}\n * \n * [Task]\n * {main prompt}\n * ```\n */\nfunction buildPromptWithSkill(mainPrompt: string, skillContent?: string, skillName?: string): string {\n if (!skillContent || !skillName) {\n return mainPrompt;\n }\n \n return `[Skill Guidance: ${skillName}]\n${skillContent}\n\n[Task]\n${mainPrompt}`;\n}\n\n/**\n * Resolve all prompts from config (either inline or from files, with optional skill context)\n */\nasync function resolvePrompts(\n config: PipelineConfig,\n pipelineDirectory: string,\n workspaceRoot?: string\n): Promise<ResolvedPrompts> {\n const effectiveWorkspaceRoot = deriveWorkspaceRoot(pipelineDirectory, workspaceRoot);\n \n let mapPrompt: string;\n try {\n // Resolve main prompt (either inline or from file)\n let mainMapPrompt: string;\n if (config.map.prompt) {\n mainMapPrompt = config.map.prompt;\n } else if (config.map.promptFile) {\n mainMapPrompt = await resolvePromptFile(config.map.promptFile, pipelineDirectory);\n } else {\n throw new PipelineExecutionError('Map phase must have either \"prompt\" or \"promptFile\"', 'map');\n }\n \n // Optionally load and attach skill context\n let skillContent: string | undefined;\n if (config.map.skill) {\n try {\n skillContent = await resolveSkill(config.map.skill, effectiveWorkspaceRoot);\n } catch (error) {\n throw new PipelineExecutionError(\n `Failed to resolve map skill \"${config.map.skill}\": ${error instanceof Error ? error.message : String(error)}`,\n 'map'\n );\n }\n }\n \n mapPrompt = buildPromptWithSkill(mainMapPrompt, skillContent, config.map.skill);\n } catch (error) {\n if (error instanceof PipelineExecutionError) {\n throw error;\n }\n throw new PipelineExecutionError(\n `Failed to resolve map prompt: ${error instanceof Error ? error.message : String(error)}`,\n 'map'\n );\n }\n\n let reducePrompt: string | undefined;\n if (config.reduce.type === 'ai') {\n try {\n // Resolve main reduce prompt (either inline or from file)\n let mainReducePrompt: string;\n if (config.reduce.prompt) {\n mainReducePrompt = config.reduce.prompt;\n } else if (config.reduce.promptFile) {\n mainReducePrompt = await resolvePromptFile(config.reduce.promptFile, pipelineDirectory);\n } else {\n throw new PipelineExecutionError('AI reduce must have either \"prompt\" or \"promptFile\"', 'reduce');\n }\n \n // Optionally load and attach skill context\n let skillContent: string | undefined;\n if (config.reduce.skill) {\n try {\n skillContent = await resolveSkill(config.reduce.skill, effectiveWorkspaceRoot);\n } catch (error) {\n throw new PipelineExecutionError(\n `Failed to resolve reduce skill \"${config.reduce.skill}\": ${error instanceof Error ? error.message : String(error)}`,\n 'reduce'\n );\n }\n }\n \n reducePrompt = buildPromptWithSkill(mainReducePrompt, skillContent, config.reduce.skill);\n } catch (error) {\n if (error instanceof PipelineExecutionError) {\n throw error;\n }\n throw new PipelineExecutionError(\n `Failed to resolve reduce prompt: ${error instanceof Error ? error.message : String(error)}`,\n 'reduce'\n );\n }\n }\n\n return { mapPrompt, reducePrompt };\n}\n\n/**\n * Load items from input source (inline items, CSV, or inline array)\n */\nasync function loadInputItems(config: PipelineConfig, pipelineDirectory: string): Promise<PromptItem[]> {\n try {\n if (config.input.items) {\n return config.input.items;\n }\n \n if (config.input.from) {\n if (isCSVSource(config.input.from)) {\n const csvPath = resolveCSVPath(config.input.from.path, pipelineDirectory);\n const result = await readCSVFile(csvPath, {\n delimiter: config.input.from.delimiter\n });\n return result.items;\n }\n \n if (Array.isArray(config.input.from)) {\n return config.input.from;\n }\n \n throw new PipelineExecutionError('Invalid \"from\" configuration', 'input');\n }\n \n throw new PipelineExecutionError('Input must have either \"items\" or \"from\"', 'input');\n } catch (error) {\n if (error instanceof PipelineExecutionError) {\n throw error;\n }\n throw new PipelineExecutionError(\n `Failed to read input: ${error instanceof Error ? error.message : String(error)}`,\n 'input'\n );\n }\n}\n\n/**\n * Prepare items by applying limit, merging parameters, and validating template variables\n */\nfunction prepareItems(items: PromptItem[], config: PipelineConfig, mapPrompt: string): PromptItem[] {\n // Apply limit\n const limit = config.input.limit ?? items.length;\n let result = items.slice(0, limit);\n\n // Merge parameters into each item (parameters take lower precedence than item fields)\n if (config.input.parameters && config.input.parameters.length > 0) {\n const paramValues = convertParametersToObject(config.input.parameters);\n result = result.map(item => ({ ...paramValues, ...item }));\n }\n\n // Validate that items have required template variables\n if (result.length > 0) {\n const templateVars = extractVariables(mapPrompt);\n const firstItem = result[0];\n const missingVars = templateVars.filter(v => !(v in firstItem));\n if (missingVars.length > 0) {\n throw new PipelineExecutionError(\n `Items missing required fields: ${missingVars.join(', ')}`,\n 'input'\n );\n }\n }\n\n return result;\n}\n\n/**\n * Execute the pipeline with resolved prompts and prepared items\n * This is the core execution logic shared by both executePipeline and executePipelineWithItems\n */\nasync function executeWithItems(\n config: PipelineConfig,\n items: PromptItem[],\n prompts: ResolvedPrompts,\n options: ExecutePipelineOptions\n): Promise<PipelineExecutionResult> {\n let processItems = items;\n \n // Filter Phase (optional): Filter items before map phase\n let filterResult: FilterResult | undefined;\n if (config.filter) {\n try {\n filterResult = await executeFilter(processItems, config.filter, {\n aiInvoker: options.aiInvoker,\n processTracker: options.processTracker,\n onProgress: (progress) => {\n options.onProgress?.({\n phase: 'splitting',\n totalItems: progress.total,\n completedItems: progress.processed,\n failedItems: 0,\n percentage: Math.round((progress.processed / progress.total) * 100)\n });\n },\n isCancelled: options.isCancelled\n });\n\n processItems = filterResult.included;\n\n getLogger().info(\n LogCategory.PIPELINE,\n `Filter: ${filterResult.stats.includedCount}/${filterResult.stats.totalItems} items passed ` +\n `(${filterResult.stats.excludedCount} excluded, ${filterResult.stats.executionTimeMs}ms)`\n );\n\n if (processItems.length === 0) {\n getLogger().warn(LogCategory.PIPELINE, 'Filter excluded all items - map phase will have no work');\n }\n } catch (error) {\n if (error instanceof PipelineExecutionError) {\n throw error;\n }\n throw new PipelineExecutionError(\n `Failed to execute filter: ${error instanceof Error ? error.message : String(error)}`,\n 'filter'\n );\n }\n }\n\n // Check if batch mode is enabled\n const batchSize = config.map.batchSize ?? 1;\n \n if (batchSize > 1) {\n // Batch mode: process items in batches\n return executeBatchMode(config, processItems, prompts, options, filterResult);\n }\n\n // Standard mode: process items individually\n return executeStandardMode(config, processItems, prompts, options, filterResult);\n}\n\n/**\n * Execute pipeline in standard mode (one item per AI call)\n */\nasync function executeStandardMode(\n config: PipelineConfig,\n processItems: PromptItem[],\n prompts: ResolvedPrompts,\n options: ExecutePipelineOptions,\n filterResult?: FilterResult\n): Promise<PipelineExecutionResult> {\n const parallelLimit = config.map.parallel ?? DEFAULT_PARALLEL_LIMIT;\n const timeoutMs = config.map.timeoutMs ?? DEFAULT_AI_TIMEOUT_MS;\n\n const executorOptions: ExecutorOptions = {\n aiInvoker: options.aiInvoker,\n maxConcurrency: parallelLimit,\n reduceMode: 'deterministic',\n showProgress: true,\n retryOnFailure: false,\n processTracker: options.processTracker,\n onProgress: options.onProgress,\n jobName: config.name,\n timeoutMs,\n isCancelled: options.isCancelled\n };\n\n const executor = createExecutor(executorOptions);\n\n const reduceParameters = config.input.parameters\n ? convertParametersToObject(config.input.parameters)\n : undefined;\n\n const job = createPromptMapJob({\n aiInvoker: options.aiInvoker,\n outputFormat: config.reduce.type,\n model: config.map.model,\n maxConcurrency: parallelLimit,\n ...(config.reduce.type === 'ai' && {\n aiReducePrompt: prompts.reducePrompt,\n aiReduceOutput: config.reduce.output,\n aiReduceModel: config.reduce.model,\n aiReduceParameters: reduceParameters\n })\n });\n\n const jobInput = createPromptMapInput(\n processItems,\n prompts.mapPrompt,\n config.map.output || []\n );\n\n try {\n const result = await executor.execute(job, jobInput);\n return { ...result, filterResult };\n } catch (error) {\n throw new PipelineExecutionError(\n `Pipeline execution failed: ${error instanceof Error ? error.message : String(error)}`,\n 'map'\n );\n }\n}\n\n/**\n * Split items into batches of specified size\n */\nfunction splitIntoBatches(items: PromptItem[], batchSize: number): PromptItem[][] {\n const batches: PromptItem[][] = [];\n for (let i = 0; i < items.length; i += batchSize) {\n batches.push(items.slice(i, i + batchSize));\n }\n return batches;\n}\n\n/**\n * Substitute model template variables using item values.\n * Used for dynamic model selection based on item properties.\n *\n * @param modelTemplate The model template string (e.g., \"gpt-4\" or \"{{model}}\")\n * @param item The item to use for variable substitution\n * @returns The substituted model string, or undefined if result is empty\n */\nfunction substituteModelTemplate(\n modelTemplate: string | undefined,\n item: Record<string, unknown>\n): string | undefined {\n if (!modelTemplate || typeof modelTemplate !== 'string') {\n return undefined;\n }\n const substituted = substituteVariables(modelTemplate, item, {\n strict: false,\n missingValueBehavior: 'empty',\n preserveSpecialVariables: false\n });\n return substituted || undefined;\n}\n\n/**\n * Execute pipeline in batch mode (multiple items per AI call)\n * \n * In batch mode:\n * - Items are grouped into batches of `batchSize`\n * - Each batch is sent to AI as a single call with {{ITEMS}} containing the batch\n * - AI must return a JSON array with one result per input item\n * - Results are flattened back into individual PromptMapResult objects\n */\nasync function executeBatchMode(\n config: PipelineConfig,\n processItems: PromptItem[],\n prompts: ResolvedPrompts,\n options: ExecutePipelineOptions,\n filterResult?: FilterResult\n): Promise<PipelineExecutionResult> {\n const batchSize = config.map.batchSize ?? 1;\n const parallelLimit = config.map.parallel ?? DEFAULT_PARALLEL_LIMIT;\n const timeoutMs = config.map.timeoutMs ?? DEFAULT_AI_TIMEOUT_MS;\n const outputFields = config.map.output || [];\n const isTextMode = outputFields.length === 0;\n\n // Split items into batches\n const batches = splitIntoBatches(processItems, batchSize);\n const totalBatches = batches.length;\n\n // Register group process if tracker is available\n let groupId: string | undefined;\n if (options.processTracker && totalBatches > 1) {\n groupId = options.processTracker.registerGroup(`${config.name} (${totalBatches} batches)`);\n }\n\n // Report initial progress\n options.onProgress?.({\n phase: 'mapping',\n totalItems: totalBatches,\n completedItems: 0,\n failedItems: 0,\n percentage: 0,\n message: `Processing ${totalBatches} batches (${processItems.length} items, batch size ${batchSize})...`\n });\n\n // Process batches with concurrency limit\n const startTime = Date.now();\n const allResults: PromptMapResult[] = [];\n let completedBatches = 0;\n let failedBatches = 0;\n\n // Create a simple concurrency limiter for batch processing\n const processBatch = async (batch: PromptItem[], batchIndex: number): Promise<PromptMapResult[]> => {\n // Check for cancellation\n if (options.isCancelled?.()) {\n return batch.map(item => ({\n item,\n output: isTextMode ? {} : createEmptyOutput(outputFields),\n success: false,\n error: 'Operation cancelled'\n }));\n }\n\n // Register batch process\n let processId: string | undefined;\n if (options.processTracker) {\n processId = options.processTracker.registerProcess(\n `Processing batch ${batchIndex + 1}/${totalBatches} (${batch.length} items)`,\n groupId\n );\n }\n\n try {\n // Build the prompt with {{ITEMS}} containing the batch\n const batchPrompt = buildBatchPrompt(prompts.mapPrompt, batch, outputFields);\n \n // Resolve model (use first item for template substitution if model is templated)\n const model = substituteModelTemplate(config.map.model, batch[0]);\n\n // Call AI with timeout\n const aiResult = await Promise.race([\n options.aiInvoker(batchPrompt, { model }),\n createTimeoutPromise(timeoutMs, batchIndex, totalBatches)\n ]);\n\n if (!aiResult.success || !aiResult.response) {\n // AI call failed - mark all items in batch as failed\n if (options.processTracker && processId) {\n options.processTracker.updateProcess(processId, 'failed', undefined, aiResult.error || 'AI invocation failed');\n }\n return batch.map(item => ({\n item,\n output: isTextMode ? {} : createEmptyOutput(outputFields),\n success: false,\n error: aiResult.error || 'AI invocation failed',\n rawResponse: aiResult.response,\n sessionId: aiResult.sessionId\n }));\n }\n\n // Parse batch response\n const batchResults = parseBatchResponse(\n aiResult.response,\n batch,\n outputFields,\n isTextMode,\n aiResult.sessionId\n );\n\n // Update process status\n if (options.processTracker && processId) {\n const successCount = batchResults.filter(r => r.success).length;\n options.processTracker.updateProcess(\n processId,\n 'completed',\n `${successCount}/${batch.length} items succeeded`,\n undefined,\n JSON.stringify(batchResults.map(r => r.output))\n );\n }\n\n return batchResults;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n \n // Check if it's a timeout - retry with doubled timeout\n if (errorMsg.includes('timed out')) {\n try {\n const batchPrompt = buildBatchPrompt(prompts.mapPrompt, batch, outputFields);\n const model = substituteModelTemplate(config.map.model, batch[0]);\n\n const aiResult = await Promise.race([\n options.aiInvoker(batchPrompt, { model }),\n createTimeoutPromise(timeoutMs * 2, batchIndex, totalBatches)\n ]);\n\n if (aiResult.success && aiResult.response) {\n const batchResults = parseBatchResponse(\n aiResult.response,\n batch,\n outputFields,\n isTextMode,\n aiResult.sessionId\n );\n\n if (options.processTracker && processId) {\n const successCount = batchResults.filter(r => r.success).length;\n options.processTracker.updateProcess(\n processId,\n 'completed',\n `${successCount}/${batch.length} items succeeded (after retry)`,\n undefined,\n JSON.stringify(batchResults.map(r => r.output))\n );\n }\n\n return batchResults;\n }\n } catch (retryError) {\n // Retry also failed\n }\n }\n\n // Mark all items in batch as failed\n if (options.processTracker && processId) {\n options.processTracker.updateProcess(processId, 'failed', undefined, errorMsg);\n }\n return batch.map(item => ({\n item,\n output: isTextMode ? {} : createEmptyOutput(outputFields),\n success: false,\n error: errorMsg\n }));\n }\n };\n\n // Process batches with concurrency limit\n const batchPromises: Promise<PromptMapResult[]>[] = [];\n const activeBatches: Promise<void>[] = [];\n\n for (let i = 0; i < batches.length; i++) {\n const batch = batches[i];\n \n // Wait if we've reached the concurrency limit\n if (activeBatches.length >= parallelLimit) {\n await Promise.race(activeBatches);\n }\n\n const batchPromise = processBatch(batch, i).then(results => {\n allResults.push(...results);\n \n // Update progress\n const hasFailures = results.some(r => !r.success);\n if (hasFailures) {\n failedBatches++;\n } else {\n completedBatches++;\n }\n\n options.onProgress?.({\n phase: 'mapping',\n totalItems: totalBatches,\n completedItems: completedBatches,\n failedItems: failedBatches,\n percentage: Math.round(((completedBatches + failedBatches) / totalBatches) * 85),\n message: `Processing batch ${completedBatches + failedBatches}/${totalBatches}...`\n });\n\n return results;\n });\n\n batchPromises.push(batchPromise);\n \n // Track active batch for concurrency limiting\n const activePromise = batchPromise.then(() => {\n const index = activeBatches.indexOf(activePromise);\n if (index > -1) {\n activeBatches.splice(index, 1);\n }\n });\n activeBatches.push(activePromise);\n }\n\n // Wait for all batches to complete\n await Promise.all(batchPromises);\n\n const mapPhaseTimeMs = Date.now() - startTime;\n\n // Calculate statistics\n const successfulMaps = allResults.filter(r => r.success).length;\n const failedMaps = allResults.filter(r => !r.success).length;\n\n // Report map complete\n options.onProgress?.({\n phase: 'reducing',\n totalItems: processItems.length,\n completedItems: successfulMaps,\n failedItems: failedMaps,\n percentage: 90,\n message: 'Aggregating results...'\n });\n\n // Execute reduce phase\n const reduceStartTime = Date.now();\n const reduceParameters = config.input.parameters\n ? convertParametersToObject(config.input.parameters)\n : undefined;\n\n const reduceResult = await executeReducePhase(\n allResults,\n config,\n prompts,\n options,\n reduceParameters,\n groupId\n );\n\n const reducePhaseTimeMs = Date.now() - reduceStartTime;\n const totalTimeMs = Date.now() - startTime;\n\n // Build execution stats\n const executionStats = {\n totalItems: processItems.length,\n successfulMaps,\n failedMaps,\n mapPhaseTimeMs,\n reducePhaseTimeMs,\n maxConcurrency: parallelLimit\n };\n\n // Complete group process if registered\n if (options.processTracker && groupId) {\n options.processTracker.completeGroup(\n groupId,\n `Completed: ${successfulMaps}/${processItems.length} items processed in ${totalBatches} batches`,\n executionStats\n );\n }\n\n // Report complete\n options.onProgress?.({\n phase: 'complete',\n totalItems: processItems.length,\n completedItems: successfulMaps,\n failedItems: failedMaps,\n percentage: 100,\n message: `Complete: ${successfulMaps} succeeded, ${failedMaps} failed (${totalBatches} batches)`\n });\n\n // Build map results for compatibility with existing result structure\n const mapResults = allResults.map(r => ({\n workItemId: `item-${allResults.indexOf(r)}`,\n success: r.success,\n output: r,\n error: r.error,\n executionTimeMs: 0 // Not tracked per-item in batch mode\n }));\n\n const overallSuccess = failedMaps === 0;\n const result: PipelineExecutionResult = {\n success: overallSuccess,\n output: reduceResult,\n mapResults,\n reduceStats: {\n inputCount: allResults.length,\n outputCount: reduceResult ? 1 : 0,\n mergedCount: successfulMaps,\n reduceTimeMs: reducePhaseTimeMs,\n usedAIReduce: config.reduce.type === 'ai'\n },\n totalTimeMs,\n executionStats,\n filterResult\n };\n\n if (!overallSuccess) {\n const failedResults = allResults.filter(r => !r.success);\n if (failedResults.length === 1) {\n result.error = `1 item failed: ${failedResults[0].error || 'Unknown error'}`;\n } else {\n const uniqueErrors = [...new Set(failedResults.map(r => r.error || 'Unknown error'))];\n if (uniqueErrors.length === 1) {\n result.error = `${failedResults.length} items failed: ${uniqueErrors[0]}`;\n } else {\n result.error = `${failedResults.length} items failed with ${uniqueErrors.length} different errors`;\n }\n }\n }\n\n return result;\n}\n\n/**\n * Build the prompt for a batch, substituting {{ITEMS}} with the batch JSON\n * and other template variables from the first item (for parameters)\n */\nfunction buildBatchPrompt(promptTemplate: string, batch: PromptItem[], outputFields: string[]): string {\n // Replace {{ITEMS}} with the batch JSON\n const batchJson = JSON.stringify(batch, null, 2);\n let prompt = promptTemplate.replace(/\\{\\{ITEMS\\}\\}/g, batchJson);\n \n // Substitute other template variables from the first item\n // This allows parameters (which are merged into all items) to be used in the prompt\n if (batch.length > 0) {\n const firstItem = batch[0];\n prompt = prompt.replace(/\\{\\{(\\w+)\\}\\}/g, (match, varName) => {\n // Skip special variables that are handled elsewhere\n if (['ITEMS', 'RESULTS', 'RESULTS_FILE', 'COUNT', 'SUCCESS_COUNT', 'FAILURE_COUNT'].includes(varName)) {\n return match;\n }\n return varName in firstItem ? firstItem[varName] : match;\n });\n }\n \n // Add output instruction if we have output fields\n if (outputFields.length > 0) {\n prompt += `\\n\\nReturn a JSON array with ${batch.length} objects, one for each input item. Each object must have these fields: ${outputFields.join(', ')}`;\n }\n \n return prompt;\n}\n\n/**\n * Create an empty output object with null values for all fields\n */\nfunction createEmptyOutput(fields: string[]): Record<string, unknown> {\n const output: Record<string, unknown> = {};\n for (const field of fields) {\n output[field] = null;\n }\n return output;\n}\n\n/**\n * Create a timeout promise for batch processing\n */\nfunction createTimeoutPromise(timeoutMs: number, batchIndex: number, totalBatches: number): Promise<never> {\n return new Promise((_, reject) => {\n setTimeout(() => {\n reject(new Error(`Batch ${batchIndex + 1}/${totalBatches} timed out after ${timeoutMs}ms`));\n }, timeoutMs);\n });\n}\n\n/**\n * Parse the AI response for a batch\n * \n * Expected response format:\n * - JSON array with one object per input item\n * - Each object contains the output fields\n * \n * If the response count doesn't match the batch size, all items are marked as failed.\n */\nfunction parseBatchResponse(\n response: string,\n batch: PromptItem[],\n outputFields: string[],\n isTextMode: boolean,\n sessionId?: string\n): PromptMapResult[] {\n // Text mode - not supported for batch processing\n if (isTextMode) {\n return batch.map(item => ({\n item,\n output: {},\n rawText: response,\n success: true,\n rawResponse: response,\n sessionId\n }));\n }\n\n try {\n // Extract JSON array from response\n const jsonMatch = response.match(/\\[[\\s\\S]*\\]/);\n if (!jsonMatch) {\n throw new Error('Response does not contain a JSON array');\n }\n\n const parsed = JSON.parse(jsonMatch[0]);\n \n if (!Array.isArray(parsed)) {\n throw new Error('Parsed response is not an array');\n }\n\n // Validate count matches\n if (parsed.length !== batch.length) {\n const errorMsg = `AI returned ${parsed.length} results but batch has ${batch.length} items`;\n return batch.map(item => ({\n item,\n output: createEmptyOutput(outputFields),\n success: false,\n error: errorMsg,\n rawResponse: response,\n sessionId\n }));\n }\n\n // Map results to items\n return batch.map((item, index) => {\n const resultObj = parsed[index];\n \n if (typeof resultObj !== 'object' || resultObj === null) {\n return {\n item,\n output: createEmptyOutput(outputFields),\n success: false,\n error: `Result at index ${index} is not an object`,\n rawResponse: response,\n sessionId\n };\n }\n\n // Extract only the declared output fields\n const output: Record<string, unknown> = {};\n for (const field of outputFields) {\n output[field] = field in resultObj ? resultObj[field] : null;\n }\n\n return {\n item,\n output,\n success: true,\n rawResponse: response,\n sessionId\n };\n });\n } catch (error) {\n const errorMsg = `Failed to parse batch response: ${error instanceof Error ? error.message : String(error)}`;\n return batch.map(item => ({\n item,\n output: createEmptyOutput(outputFields),\n success: false,\n error: errorMsg,\n rawResponse: response,\n sessionId\n }));\n }\n}\n\n/**\n * Execute the reduce phase for batch mode results\n */\nasync function executeReducePhase(\n results: PromptMapResult[],\n config: PipelineConfig,\n prompts: ResolvedPrompts,\n options: ExecutePipelineOptions,\n reduceParameters?: Record<string, string>,\n parentGroupId?: string\n): Promise<PromptMapOutput> {\n const outputFields = config.map.output || [];\n const successfulItems = results.filter(r => r.success).length;\n const failedItems = results.filter(r => !r.success).length;\n\n const summary: PromptMapSummary = {\n totalItems: results.length,\n successfulItems,\n failedItems,\n outputFields\n };\n\n // Handle AI reduce\n if (config.reduce.type === 'ai' && prompts.reducePrompt) {\n return await performAIReduce(\n results,\n summary,\n prompts.reducePrompt,\n config.reduce.output,\n config.reduce.model,\n reduceParameters,\n options,\n parentGroupId\n );\n }\n\n // Handle deterministic reduce\n const formattedOutput = formatResults(results, summary, config.reduce.type);\n\n return {\n results,\n formattedOutput,\n summary\n };\n}\n\n/**\n * Perform AI-powered reduce for batch mode\n */\nasync function performAIReduce(\n results: PromptMapResult[],\n summary: PromptMapSummary,\n reducePrompt: string,\n reduceOutput?: string[],\n reduceModel?: string,\n reduceParameters?: Record<string, string>,\n options?: ExecutePipelineOptions,\n parentGroupId?: string\n): Promise<PromptMapOutput> {\n const isTextMode = !reduceOutput || reduceOutput.length === 0;\n\n // Register reduce process\n let reduceProcessId: string | undefined;\n if (options?.processTracker) {\n reduceProcessId = options.processTracker.registerProcess(\n 'AI Reduce: Synthesizing results',\n parentGroupId\n );\n }\n\n // Build prompt with template substitution\n const successfulResults = results.filter(r => r.success);\n const resultsForPrompt = successfulResults.map(r => r.rawText !== undefined ? r.rawText : r.output);\n const resultsString = JSON.stringify(resultsForPrompt, null, 2);\n\n let prompt = reducePrompt\n .replace(/\\{\\{RESULTS\\}\\}/g, resultsString)\n .replace(/\\{\\{COUNT\\}\\}/g, String(summary.totalItems))\n .replace(/\\{\\{SUCCESS_COUNT\\}\\}/g, String(summary.successfulItems))\n .replace(/\\{\\{FAILURE_COUNT\\}\\}/g, String(summary.failedItems));\n\n // Substitute input parameters\n if (reduceParameters) {\n for (const [key, value] of Object.entries(reduceParameters)) {\n prompt = prompt.replace(new RegExp(`\\\\{\\\\{${key}\\\\}\\\\}`, 'g'), value);\n }\n }\n\n // Add output instruction if not text mode\n if (!isTextMode) {\n prompt += `\\n\\nReturn JSON with these fields: ${reduceOutput!.join(', ')}`;\n }\n\n // Call AI\n const aiResult = await options?.aiInvoker(prompt, { model: reduceModel });\n\n if (!aiResult?.success || !aiResult.response) {\n if (options?.processTracker && reduceProcessId) {\n options.processTracker.updateProcess(\n reduceProcessId,\n 'failed',\n undefined,\n aiResult?.error || 'Unknown error'\n );\n }\n throw new PipelineExecutionError(\n `AI reduce failed: ${aiResult?.error || 'Unknown error'}`,\n 'reduce'\n );\n }\n\n // Text mode - return raw response\n if (isTextMode) {\n if (options?.processTracker && reduceProcessId) {\n options.processTracker.updateProcess(\n reduceProcessId,\n 'completed',\n aiResult.response\n );\n }\n return {\n results,\n formattedOutput: aiResult.response,\n summary: { ...summary, outputFields: [] }\n };\n }\n\n // Parse structured response\n try {\n const jsonMatch = aiResult.response.match(/\\{[\\s\\S]*\\}/);\n if (!jsonMatch) {\n throw new Error('Response does not contain JSON object');\n }\n const parsed = JSON.parse(jsonMatch[0]);\n const formattedOutput = JSON.stringify(parsed, null, 2);\n\n if (options?.processTracker && reduceProcessId) {\n options.processTracker.updateProcess(\n reduceProcessId,\n 'completed',\n formattedOutput,\n undefined,\n JSON.stringify(parsed)\n );\n }\n\n return {\n results,\n formattedOutput,\n summary: { ...summary, outputFields: reduceOutput! }\n };\n } catch (error) {\n if (options?.processTracker && reduceProcessId) {\n options.processTracker.updateProcess(\n reduceProcessId,\n 'failed',\n undefined,\n error instanceof Error ? error.message : String(error)\n );\n }\n throw new PipelineExecutionError(\n `Failed to parse AI reduce response: ${error instanceof Error ? error.message : String(error)}`,\n 'reduce'\n );\n }\n}\n\n/**\n * Format results based on reduce type\n */\nfunction formatResults(\n results: PromptMapResult[],\n summary: PromptMapSummary,\n reduceType: string\n): string {\n switch (reduceType) {\n case 'table':\n return formatAsTable(results);\n case 'json':\n return formatAsJSON(results);\n case 'csv':\n return formatAsCSV(results);\n case 'text':\n return formatAsText(results);\n default:\n return formatAsList(results, summary);\n }\n}\n\n// Formatting utilities for batch mode reduce\nfunction formatAsList(results: PromptMapResult[], summary: PromptMapSummary): string {\n const lines: string[] = [`## Results (${summary.totalItems} items)`, ''];\n if (summary.failedItems > 0) {\n lines.push(`**Warning: ${summary.failedItems} items failed**`, '');\n }\n\n results.forEach((r, i) => {\n lines.push(`### Item ${i + 1}`);\n const inputStr = Object.entries(r.item).map(([k, v]) => `${k}=${truncate(v, 30)}`).join(', ');\n lines.push(`**Input:** ${inputStr}`);\n if (r.success) {\n const outputStr = Object.entries(r.output).map(([k, v]) => `${k}=${formatValue(v)}`).join(', ');\n lines.push(`**Output:** ${outputStr}`);\n } else {\n lines.push(`**Error:** ${r.error || 'Unknown error'}`);\n }\n lines.push('');\n });\n\n lines.push('---', `**Stats:** ${summary.successfulItems} succeeded, ${summary.failedItems} failed`);\n return lines.join('\\n');\n}\n\nfunction formatAsTable(results: PromptMapResult[]): string {\n if (results.length === 0) return 'No results to display.';\n\n const inKeys = [...new Set(results.flatMap(r => Object.keys(r.item)))];\n const outKeys = [...new Set(results.flatMap(r => Object.keys(r.output)))];\n const headers = ['#', ...inKeys.map(k => `[in] ${k}`), ...outKeys.map(k => `[out] ${k}`), 'Status'];\n\n const lines = [\n '| ' + headers.join(' | ') + ' |',\n '| ' + headers.map(() => '---').join(' | ') + ' |'\n ];\n\n results.forEach((r, i) => {\n const cells = [\n String(i + 1),\n ...inKeys.map(k => truncate(r.item[k] ?? '', 20)),\n ...outKeys.map(k => formatValue(r.output[k])),\n r.success ? 'OK' : 'FAIL'\n ];\n lines.push('| ' + cells.join(' | ') + ' |');\n });\n\n return lines.join('\\n');\n}\n\nfunction formatAsJSON(results: PromptMapResult[]): string {\n return JSON.stringify(results.map(r => ({\n input: r.item,\n output: r.output,\n success: r.success,\n ...(r.error && { error: r.error })\n })), null, 2);\n}\n\nfunction formatAsCSV(results: PromptMapResult[]): string {\n if (results.length === 0) return '';\n\n const inKeys = [...new Set(results.flatMap(r => Object.keys(r.item)))];\n const outKeys = [...new Set(results.flatMap(r => Object.keys(r.output)))];\n const headers = [...inKeys, ...outKeys.map(k => `out_${k}`), 'success'];\n\n const lines = [headers.join(',')];\n for (const r of results) {\n const values = [\n ...inKeys.map(k => escapeCSV(r.item[k] ?? '')),\n ...outKeys.map(k => escapeCSV(formatValue(r.output[k]))),\n r.success ? 'true' : 'false'\n ];\n lines.push(values.join(','));\n }\n return lines.join('\\n');\n}\n\nfunction formatAsText(results: PromptMapResult[]): string {\n const successfulResults = results.filter(r => r.success);\n if (successfulResults.length === 0) {\n return 'No successful results.';\n }\n\n if (successfulResults.length === 1) {\n const r = successfulResults[0];\n return r.rawText || JSON.stringify(r.output, null, 2);\n }\n\n return successfulResults\n .map((r, i) => {\n const text = r.rawText || JSON.stringify(r.output, null, 2);\n return `--- Item ${i + 1} ---\\n${text}`;\n })\n .join('\\n\\n');\n}\n\nfunction formatValue(value: unknown): string {\n if (value === null || value === undefined) return 'null';\n if (typeof value === 'string') return value.length > 50 ? value.substring(0, 47) + '...' : value;\n if (typeof value === 'boolean') return value ? 'true' : 'false';\n if (typeof value === 'number') return String(value);\n if (Array.isArray(value)) return `[${value.length} items]`;\n if (typeof value === 'object') return JSON.stringify(value);\n return String(value);\n}\n\nfunction truncate(value: string, max: number = 30): string {\n return value.length <= max ? value : value.substring(0, max - 3) + '...';\n}\n\nfunction escapeCSV(value: string): string {\n return (value.includes(',') || value.includes('\"') || value.includes('\\n'))\n ? `\"${value.replace(/\"/g, '\"\"')}\"`\n : value;\n}\n\n/**\n * Convert parameters array to object for merging with items\n */\nfunction convertParametersToObject(parameters: PipelineParameter[]): Record<string, string> {\n const result: Record<string, string> = {};\n for (const param of parameters) {\n result[param.name] = param.value;\n }\n return result;\n}\n\n// ============================================================================\n// Validation Functions\n// ============================================================================\n\n/**\n * Validate map configuration (prompt/promptFile and optional skill)\n */\nfunction validateMapConfig(config: PipelineConfig): void {\n if (!config.map) {\n throw new PipelineExecutionError('Pipeline config missing \"map\"');\n }\n\n // Validate prompt configuration (must have exactly one of prompt or promptFile)\n const hasPrompt = !!config.map.prompt;\n const hasPromptFile = !!config.map.promptFile;\n \n if (!hasPrompt && !hasPromptFile) {\n throw new PipelineExecutionError('Pipeline config must have either \"map.prompt\" or \"map.promptFile\"');\n }\n if (hasPrompt && hasPromptFile) {\n throw new PipelineExecutionError('Pipeline config cannot have both \"map.prompt\" and \"map.promptFile\"');\n }\n\n // Validate skill name if provided (skill is optional and can be combined with prompt/promptFile)\n if (config.map.skill !== undefined && typeof config.map.skill !== 'string') {\n throw new PipelineExecutionError('Pipeline config \"map.skill\" must be a string');\n }\n\n // map.output is optional - if omitted, text mode is used\n if (config.map.output !== undefined && !Array.isArray(config.map.output)) {\n throw new PipelineExecutionError('Pipeline config \"map.output\" must be an array if provided');\n }\n\n // Validate batchSize if provided\n if (config.map.batchSize !== undefined) {\n if (typeof config.map.batchSize !== 'number' || !Number.isInteger(config.map.batchSize)) {\n throw new PipelineExecutionError('Pipeline config \"map.batchSize\" must be a positive integer');\n }\n if (config.map.batchSize < 1) {\n throw new PipelineExecutionError('Pipeline config \"map.batchSize\" must be at least 1');\n }\n // When batchSize > 1, prompt should contain {{ITEMS}}\n if (config.map.batchSize > 1) {\n const prompt = config.map.prompt || '';\n if (!prompt.includes('{{ITEMS}}')) {\n getLogger().warn(LogCategory.PIPELINE, 'Warning: batchSize > 1 but prompt does not contain {{ITEMS}}. Consider using {{ITEMS}} to access batch items.');\n }\n }\n }\n}\n\n/**\n * Validate reduce configuration\n */\nfunction validateReduceConfig(config: PipelineConfig): void {\n if (!config.reduce) {\n throw new PipelineExecutionError('Pipeline config missing \"reduce\"');\n }\n\n const validReduceTypes = ['list', 'table', 'json', 'csv', 'ai', 'text'];\n if (!validReduceTypes.includes(config.reduce.type)) {\n throw new PipelineExecutionError(\n `Unsupported reduce type: ${config.reduce.type}. Supported types: ${validReduceTypes.join(', ')}`\n );\n }\n\n // Validate AI reduce configuration\n if (config.reduce.type === 'ai') {\n const hasPrompt = !!config.reduce.prompt;\n const hasPromptFile = !!config.reduce.promptFile;\n \n if (!hasPrompt && !hasPromptFile) {\n throw new PipelineExecutionError(\n 'Pipeline config must have either \"reduce.prompt\" or \"reduce.promptFile\" when reduce.type is \"ai\"'\n );\n }\n if (hasPrompt && hasPromptFile) {\n throw new PipelineExecutionError('Pipeline config cannot have both \"reduce.prompt\" and \"reduce.promptFile\"');\n }\n \n // Validate skill name if provided (skill is optional and can be combined with prompt/promptFile)\n if (config.reduce.skill !== undefined && typeof config.reduce.skill !== 'string') {\n throw new PipelineExecutionError('Pipeline config \"reduce.skill\" must be a string');\n }\n \n if (config.reduce.output !== undefined && !Array.isArray(config.reduce.output)) {\n throw new PipelineExecutionError('Pipeline config \"reduce.output\" must be an array if provided');\n }\n }\n}\n\n/**\n * Validate input configuration\n */\nfunction validateInputConfig(config: PipelineConfig): void {\n if (!config.input) {\n throw new PipelineExecutionError('Pipeline config missing \"input\"');\n }\n\n // Count how many input sources are specified\n const hasItems = !!config.input.items;\n const hasFrom = !!config.input.from;\n const hasGenerate = !!config.input.generate;\n const sourceCount = [hasItems, hasFrom, hasGenerate].filter(Boolean).length;\n\n if (sourceCount === 0) {\n throw new PipelineExecutionError('Input must have one of \"items\", \"from\", or \"generate\"');\n }\n if (sourceCount > 1) {\n throw new PipelineExecutionError('Input can only have one of \"items\", \"from\", or \"generate\"');\n }\n\n // Validate generate config if present\n if (hasGenerate) {\n if (!isGenerateConfig(config.input.generate)) {\n throw new PipelineExecutionError('Invalid generate configuration');\n }\n const validation = validateGenerateConfig(config.input.generate);\n if (!validation.valid) {\n throw new PipelineExecutionError(\n `Invalid generate configuration: ${validation.errors.join('; ')}`\n );\n }\n throw new PipelineExecutionError(\n 'Pipelines with \"generate\" input require interactive approval. Use the Pipeline Preview to generate and approve items first.',\n 'input'\n );\n }\n\n // Validate from source if present\n if (config.input.from) {\n if (!Array.isArray(config.input.from) && !isCSVSource(config.input.from)) {\n const fromObj = config.input.from as Record<string, unknown>;\n if (fromObj.type && fromObj.type !== 'csv') {\n throw new PipelineExecutionError(\n `Unsupported source type: ${fromObj.type}. Only \"csv\" is supported.`\n );\n }\n throw new PipelineExecutionError(\n 'Invalid \"from\" configuration. Must be either a CSV source {type: \"csv\", path: \"...\"} or an inline array.'\n );\n }\n if (isCSVSource(config.input.from) && !config.input.from.path) {\n throw new PipelineExecutionError('Pipeline config missing \"input.from.path\"');\n }\n }\n\n // Validate inline items if present\n if (config.input.items && !Array.isArray(config.input.items)) {\n throw new PipelineExecutionError('Pipeline config \"input.items\" must be an array');\n }\n\n // Validate parameters if present\n if (config.input.parameters) {\n if (!Array.isArray(config.input.parameters)) {\n throw new PipelineExecutionError('Pipeline config \"input.parameters\" must be an array');\n }\n for (const param of config.input.parameters) {\n if (!param.name || typeof param.name !== 'string') {\n throw new PipelineExecutionError('Each parameter must have a \"name\" string');\n }\n if (param.value === undefined || param.value === null) {\n throw new PipelineExecutionError(`Parameter \"${param.name}\" must have a \"value\"`);\n }\n }\n }\n}\n\n/**\n * Validate full pipeline configuration (including input)\n */\nfunction validatePipelineConfig(config: PipelineConfig): void {\n if (!config.name) {\n throw new PipelineExecutionError('Pipeline config missing \"name\"');\n }\n\n validateInputConfig(config);\n validateMapConfig(config);\n validateReduceConfig(config);\n}\n\n/**\n * Parse a YAML pipeline configuration\n */\nexport async function parsePipelineYAML(yamlContent: string): Promise<PipelineConfig> {\n const yaml = await import('js-yaml');\n const config = yaml.load(yamlContent) as PipelineConfig;\n validatePipelineConfig(config);\n return config;\n}\n\n/**\n * Parse a YAML pipeline configuration synchronously\n */\nexport function parsePipelineYAMLSync(yamlContent: string): PipelineConfig {\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n const yaml = require('js-yaml');\n const config = yaml.load(yamlContent) as PipelineConfig;\n validatePipelineConfig(config);\n return config;\n}\n", "/**\n * Pipeline Module - Public API\n *\n * YAML-based pipeline execution framework.\n * Provides configuration types, execution, and utilities for AI pipelines.\n */\n\n// Types\nexport type {\n PipelineConfig,\n InputConfig,\n MapConfig,\n ReduceConfig,\n FilterConfig,\n CSVSource,\n CSVParseOptions,\n CSVParseResult,\n PipelineParameter,\n GenerateInputConfig,\n FilterOperator,\n FilterRule,\n RuleFilterConfig,\n AIFilterConfig,\n FilterStats,\n FilterResult,\n // Re-exported from map-reduce\n AIInvoker,\n AIInvokerOptions,\n AIInvokerResult,\n ProcessTracker,\n SessionMetadata,\n ExecutorOptions,\n JobProgress,\n MapReduceResult,\n PromptItem,\n PromptMapResult,\n PromptMapInput,\n PromptMapOutput,\n PromptMapSummary,\n PromptMapJobOptions,\n OutputFormat\n} from './types';\nexport { isCSVSource, isGenerateConfig } from './types';\n\n// Executor\nexport {\n executePipeline,\n executePipelineWithItems,\n parsePipelineYAML,\n parsePipelineYAMLSync,\n PipelineExecutionError,\n DEFAULT_PARALLEL_LIMIT\n} from './executor';\nexport type { ExecutePipelineOptions, PipelineExecutionResult } from './executor';\n\n// CSV Reader\nexport {\n parseCSVContent,\n readCSVFile,\n readCSVFileSync,\n resolveCSVPath,\n validateCSVHeaders,\n getCSVPreview,\n CSVParseError,\n DEFAULT_CSV_OPTIONS\n} from './csv-reader';\n\n// Template Engine\nexport {\n substituteTemplate,\n extractVariables,\n validateItemForTemplate,\n buildFullPrompt,\n buildPromptFromTemplate,\n parseAIResponse,\n extractJSON,\n escapeTemplateValue,\n previewTemplate,\n TemplateError\n} from './template';\nexport type { SubstituteTemplateOptions } from './template';\n\n// Filter Executor\nexport {\n executeFilter,\n executeRuleFilter,\n executeAIFilter,\n executeHybridFilter\n} from './filter-executor';\nexport type { FilterExecuteOptions, FilterProgress } from './filter-executor';\n\n// Prompt Resolver\nexport {\n resolvePromptFile,\n resolvePromptFileSync,\n resolvePromptFileWithDetails,\n resolvePromptPath,\n getSearchPaths,\n extractPromptContent,\n promptFileExists,\n validatePromptFile,\n PromptResolverError\n} from './prompt-resolver';\nexport type { PromptResolutionResult } from './prompt-resolver';\n\n// Skill Resolver\nexport {\n resolveSkill,\n resolveSkillSync,\n resolveSkillWithDetails,\n resolveSkillWithDetailsSync,\n getSkillsDirectory,\n getSkillDirectory,\n getSkillPromptPath,\n skillExists,\n listSkills,\n validateSkill,\n SkillResolverError,\n DEFAULT_SKILLS_DIRECTORY,\n SKILL_PROMPT_FILENAME\n} from './skill-resolver';\nexport type { SkillResolutionResult, SkillMetadata } from './skill-resolver';\n\n// Input Generator\nexport {\n generateInputItems,\n buildGeneratePrompt,\n parseGenerateResponse,\n toGeneratedItems,\n getSelectedItems,\n createEmptyItem,\n validateGenerateConfig,\n InputGenerationError\n} from './input-generator';\nexport type { GenerateInputResult, GeneratedItem, GenerateState } from './input-generator';\n", "/**\n * Queue Types\n *\n * Type definitions for the AI task queue system.\n * These types are used by TaskQueueManager and QueueExecutor.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\n// ============================================================================\n// Core Types\n// ============================================================================\n\n/**\n * Type of task that can be queued\n */\nexport type TaskType =\n | 'follow-prompt'\n | 'resolve-comments'\n | 'code-review'\n | 'ai-clarification'\n | 'custom';\n\n/**\n * Priority level for queued tasks\n * Higher priority tasks are executed first\n */\nexport type TaskPriority = 'high' | 'normal' | 'low';\n\n/**\n * Status of a queued task\n */\nexport type QueueStatus =\n | 'queued' // Waiting in queue\n | 'running' // Currently executing\n | 'completed' // Finished successfully\n | 'failed' // Finished with error\n | 'cancelled'; // Cancelled by user\n\n// ============================================================================\n// Payload Types\n// ============================================================================\n\n/**\n * Payload for follow-prompt tasks\n */\nexport interface FollowPromptPayload {\n /** Path to the prompt file */\n promptFilePath: string;\n /** Optional path to the plan file */\n planFilePath?: string;\n /** Optional skill name to use */\n skillName?: string;\n /** Optional additional context */\n additionalContext?: string;\n /** Working directory for execution */\n workingDirectory?: string;\n}\n\n/**\n * Payload for resolve-comments tasks\n */\nexport interface ResolveCommentsPayload {\n /** URI of the document containing comments */\n documentUri: string;\n /** IDs of comments to resolve */\n commentIds: string[];\n /** Template for generating the prompt */\n promptTemplate: string;\n}\n\n/**\n * Payload for code-review tasks\n */\nexport interface CodeReviewPayload {\n /** Commit SHA to review (optional) */\n commitSha?: string;\n /** Type of diff to review */\n diffType: 'staged' | 'pending' | 'commit';\n /** Path to the rules folder */\n rulesFolder: string;\n /** Working directory for the review */\n workingDirectory?: string;\n}\n\n/**\n * Payload for AI clarification tasks\n */\nexport interface AIClarificationPayload {\n /** The prompt to send to AI (if pre-built) */\n prompt?: string;\n /** Working directory for execution */\n workingDirectory?: string;\n /** Optional model to use */\n model?: string;\n /** Selected text for clarification */\n selectedText?: string;\n /** File path containing the selection */\n filePath?: string;\n /** Start line of selection */\n startLine?: number;\n /** End line of selection */\n endLine?: number;\n /** Surrounding lines for context */\n surroundingLines?: string;\n /** Nearest heading in the document */\n nearestHeading?: string | null;\n /** Instruction type (clarify, go-deeper, custom) */\n instructionType?: string;\n /** Custom instruction text */\n customInstruction?: string;\n /** Content from prompt file */\n promptFileContent?: string;\n /** Skill name if using a skill */\n skillName?: string;\n}\n\n/**\n * Payload for custom tasks\n */\nexport interface CustomTaskPayload {\n /** Custom data for the task */\n data: Record<string, unknown>;\n}\n\n/**\n * Union of all payload types\n */\nexport type TaskPayload =\n | FollowPromptPayload\n | ResolveCommentsPayload\n | CodeReviewPayload\n | AIClarificationPayload\n | CustomTaskPayload;\n\n// ============================================================================\n// Task Configuration\n// ============================================================================\n\n/**\n * Configuration for task execution\n */\nexport interface TaskExecutionConfig {\n /** AI model to use */\n model?: string;\n /** Timeout in milliseconds */\n timeoutMs?: number;\n /** Whether to retry on failure */\n retryOnFailure?: boolean;\n /** Number of retry attempts */\n retryAttempts?: number;\n /** Delay between retries in milliseconds */\n retryDelayMs?: number;\n}\n\n/**\n * Default task execution configuration\n */\nimport {\n DEFAULT_AI_TIMEOUT_MS,\n DEFAULT_RETRY_ATTEMPTS,\n DEFAULT_RETRY_DELAY_MS\n} from '../config/defaults';\n\nexport const DEFAULT_TASK_CONFIG: TaskExecutionConfig = {\n timeoutMs: DEFAULT_AI_TIMEOUT_MS,\n retryOnFailure: false,\n retryAttempts: DEFAULT_RETRY_ATTEMPTS,\n retryDelayMs: DEFAULT_RETRY_DELAY_MS,\n};\n\n// ============================================================================\n// Queued Task\n// ============================================================================\n\n/**\n * A task that has been queued for execution\n */\nexport interface QueuedTask<TPayload extends TaskPayload = TaskPayload, TResult = unknown> {\n /** Unique identifier for the task */\n id: string;\n /** Type of task */\n type: TaskType;\n /** Priority level */\n priority: TaskPriority;\n /** Current status */\n status: QueueStatus;\n /** Timestamp when task was created */\n createdAt: number;\n /** Timestamp when execution started */\n startedAt?: number;\n /** Timestamp when execution completed */\n completedAt?: number;\n\n /** Task-specific payload */\n payload: TPayload;\n\n /** Execution configuration */\n config: TaskExecutionConfig;\n\n /** Display name for the task (shown in UI) */\n displayName?: string;\n\n /** Links to AIProcess when running */\n processId?: string;\n\n /** Result of execution (when completed) */\n result?: TResult;\n\n /** Error message (when failed) */\n error?: string;\n\n /** Number of retry attempts made */\n retryCount?: number;\n}\n\n/**\n * Input for creating a new queued task (without auto-generated fields)\n */\nexport type CreateTaskInput<TPayload extends TaskPayload = TaskPayload> = Omit<\n QueuedTask<TPayload>,\n 'id' | 'createdAt' | 'status' | 'startedAt' | 'completedAt' | 'result' | 'error' | 'retryCount'\n>;\n\n/**\n * Partial update for a queued task\n */\nexport type TaskUpdate<TPayload extends TaskPayload = TaskPayload, TResult = unknown> = Partial<\n Pick<\n QueuedTask<TPayload, TResult>,\n 'status' | 'startedAt' | 'completedAt' | 'processId' | 'result' | 'error' | 'retryCount' | 'priority' | 'displayName'\n >\n>;\n\n// ============================================================================\n// Queue Events\n// ============================================================================\n\n/**\n * Type of queue change event\n */\nexport type QueueChangeType =\n | 'added'\n | 'removed'\n | 'updated'\n | 'reordered'\n | 'cleared'\n | 'paused'\n | 'resumed';\n\n/**\n * Event emitted when the queue changes\n */\nexport interface QueueChangeEvent {\n /** Type of change */\n type: QueueChangeType;\n /** ID of the affected task (if applicable) */\n taskId?: string;\n /** The affected task (if applicable) */\n task?: QueuedTask;\n /** Timestamp of the event */\n timestamp: number;\n}\n\n/**\n * Event types for the queue event emitter\n */\nexport interface QueueEvents {\n change: (event: QueueChangeEvent) => void;\n taskAdded: (task: QueuedTask) => void;\n taskRemoved: (task: QueuedTask) => void;\n taskUpdated: (task: QueuedTask, updates: TaskUpdate) => void;\n taskStarted: (task: QueuedTask) => void;\n taskCompleted: (task: QueuedTask, result: unknown) => void;\n taskFailed: (task: QueuedTask, error: Error) => void;\n taskCancelled: (task: QueuedTask) => void;\n paused: () => void;\n resumed: () => void;\n}\n\n// ============================================================================\n// Executor Types\n// ============================================================================\n\n/**\n * Result of task execution\n */\nexport interface TaskExecutionResult<TResult = unknown> {\n /** Whether execution was successful */\n success: boolean;\n /** Result data (if successful) */\n result?: TResult;\n /** Error (if failed) */\n error?: Error;\n /** Duration of execution in milliseconds */\n durationMs: number;\n}\n\n/**\n * Abstract task executor interface\n * Implement this for different backends (AI service, mock, etc.)\n */\nexport interface TaskExecutor<TResult = unknown> {\n /**\n * Execute a queued task\n * @param task The task to execute\n * @returns Promise resolving to the execution result\n */\n execute(task: QueuedTask): Promise<TaskExecutionResult<TResult>>;\n\n /**\n * Cancel a running task (optional)\n * @param taskId ID of the task to cancel\n */\n cancel?(taskId: string): void;\n}\n\n/**\n * Options for the queue executor\n */\nexport interface QueueExecutorOptions {\n /** Maximum concurrent task executions (default: 1) */\n maxConcurrency?: number;\n /** Whether to auto-start processing (default: true) */\n autoStart?: boolean;\n}\n\n/**\n * Default queue executor options\n */\nexport const DEFAULT_EXECUTOR_OPTIONS: Required<QueueExecutorOptions> = {\n maxConcurrency: 1,\n autoStart: true,\n};\n\n// ============================================================================\n// Queue Manager Types\n// ============================================================================\n\n/**\n * Options for the task queue manager\n */\nexport interface TaskQueueManagerOptions {\n /** Maximum queue size (0 = unlimited, default: 0) */\n maxQueueSize?: number;\n /** Whether to keep completed tasks in history (default: true) */\n keepHistory?: boolean;\n /** Maximum history size (default: 100) */\n maxHistorySize?: number;\n}\n\n/**\n * Default queue manager options\n */\nexport const DEFAULT_QUEUE_MANAGER_OPTIONS: Required<TaskQueueManagerOptions> = {\n maxQueueSize: 0,\n keepHistory: true,\n maxHistorySize: 100,\n};\n\n/**\n * Statistics about the queue\n */\nexport interface QueueStats {\n /** Number of tasks waiting in queue */\n queued: number;\n /** Number of tasks currently running */\n running: number;\n /** Number of completed tasks (in history) */\n completed: number;\n /** Number of failed tasks (in history) */\n failed: number;\n /** Number of cancelled tasks (in history) */\n cancelled: number;\n /** Total tasks processed */\n total: number;\n /** Whether the queue is paused */\n isPaused: boolean;\n}\n\n// ============================================================================\n// Priority Helpers\n// ============================================================================\n\n/**\n * Numeric values for priority comparison\n * Higher value = higher priority\n */\nexport const PRIORITY_VALUES: Record<TaskPriority, number> = {\n high: 3,\n normal: 2,\n low: 1,\n};\n\n/**\n * Compare two tasks by priority (for sorting)\n * Returns negative if a should come before b\n */\nexport function comparePriority(a: QueuedTask, b: QueuedTask): number {\n const priorityDiff = PRIORITY_VALUES[b.priority] - PRIORITY_VALUES[a.priority];\n if (priorityDiff !== 0) {\n return priorityDiff;\n }\n // Same priority: earlier created task comes first (FIFO within priority)\n return a.createdAt - b.createdAt;\n}\n\n// ============================================================================\n// Type Guards\n// ============================================================================\n\n/**\n * Check if a payload is a FollowPromptPayload\n */\nexport function isFollowPromptPayload(payload: TaskPayload): payload is FollowPromptPayload {\n return 'promptFilePath' in payload;\n}\n\n/**\n * Check if a payload is a ResolveCommentsPayload\n */\nexport function isResolveCommentsPayload(payload: TaskPayload): payload is ResolveCommentsPayload {\n return 'documentUri' in payload && 'commentIds' in payload;\n}\n\n/**\n * Check if a payload is a CodeReviewPayload\n */\nexport function isCodeReviewPayload(payload: TaskPayload): payload is CodeReviewPayload {\n return 'diffType' in payload && 'rulesFolder' in payload;\n}\n\n/**\n * Check if a payload is an AIClarificationPayload\n */\nexport function isAIClarificationPayload(payload: TaskPayload): payload is AIClarificationPayload {\n return 'prompt' in payload && !('data' in payload);\n}\n\n/**\n * Check if a payload is a CustomTaskPayload\n */\nexport function isCustomTaskPayload(payload: TaskPayload): payload is CustomTaskPayload {\n return 'data' in payload;\n}\n\n/**\n * Generate a unique task ID\n */\nexport function generateTaskId(): string {\n return `queue-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;\n}\n", "/**\n * TaskQueueManager\n *\n * Manages a queue of tasks with priority-based ordering.\n * Provides operations for enqueue, dequeue, reorder, and queue control.\n *\n * Uses Node.js EventEmitter for cross-platform compatibility.\n * In-memory storage only - queue resets when process restarts.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { EventEmitter } from 'events';\nimport {\n QueuedTask,\n CreateTaskInput,\n TaskUpdate,\n TaskPayload,\n QueueChangeEvent,\n QueueChangeType,\n QueueStats,\n TaskQueueManagerOptions,\n DEFAULT_QUEUE_MANAGER_OPTIONS,\n comparePriority,\n generateTaskId,\n} from './types';\n\n/**\n * Task queue manager for managing AI task execution queue\n */\nexport class TaskQueueManager extends EventEmitter {\n /** Queue of pending tasks (sorted by priority) */\n private queue: QueuedTask[] = [];\n /** Currently running tasks */\n private running: Map<string, QueuedTask> = new Map();\n /** History of completed/failed/cancelled tasks */\n private history: QueuedTask[] = [];\n /** Whether the queue is paused */\n private paused = false;\n /** Configuration options */\n private readonly options: Required<TaskQueueManagerOptions>;\n\n constructor(options: TaskQueueManagerOptions = {}) {\n super();\n this.options = { ...DEFAULT_QUEUE_MANAGER_OPTIONS, ...options };\n }\n\n // ========================================================================\n // Core Operations\n // ========================================================================\n\n /**\n * Add a task to the queue\n * @param input Task input (without auto-generated fields)\n * @returns The ID of the queued task\n */\n enqueue<TPayload extends TaskPayload = TaskPayload>(\n input: CreateTaskInput<TPayload>\n ): string {\n // Check queue size limit\n if (this.options.maxQueueSize > 0 && this.queue.length >= this.options.maxQueueSize) {\n throw new Error(`Queue is full (max size: ${this.options.maxQueueSize})`);\n }\n\n const task: QueuedTask<TPayload> = {\n ...input,\n id: generateTaskId(),\n status: 'queued',\n createdAt: Date.now(),\n retryCount: 0,\n };\n\n // Insert in priority order\n this.insertByPriority(task);\n\n // Emit events\n this.emitChange('added', task);\n this.emit('taskAdded', task);\n\n return task.id;\n }\n\n /**\n * Remove and return the next task from the queue\n * @returns The next task, or undefined if queue is empty\n */\n dequeue(): QueuedTask | undefined {\n if (this.queue.length === 0) {\n return undefined;\n }\n\n const task = this.queue.shift()!;\n return task;\n }\n\n /**\n * Get the next task without removing it\n * @returns The next task, or undefined if queue is empty\n */\n peek(): QueuedTask | undefined {\n return this.queue[0];\n }\n\n // ========================================================================\n // Queue Access\n // ========================================================================\n\n /**\n * Get all tasks (queued + running + history)\n */\n getAll(): QueuedTask[] {\n return [...this.queue, ...Array.from(this.running.values()), ...this.history];\n }\n\n /**\n * Get all queued tasks (waiting for execution)\n */\n getQueued(): QueuedTask[] {\n return [...this.queue];\n }\n\n /**\n * Get all running tasks\n */\n getRunning(): QueuedTask[] {\n return Array.from(this.running.values());\n }\n\n /**\n * Get completed tasks from history\n */\n getCompleted(): QueuedTask[] {\n return this.history.filter(t => t.status === 'completed');\n }\n\n /**\n * Get failed tasks from history\n */\n getFailed(): QueuedTask[] {\n return this.history.filter(t => t.status === 'failed');\n }\n\n /**\n * Get cancelled tasks from history\n */\n getCancelled(): QueuedTask[] {\n return this.history.filter(t => t.status === 'cancelled');\n }\n\n /**\n * Get history (completed + failed + cancelled)\n */\n getHistory(): QueuedTask[] {\n return [...this.history];\n }\n\n /**\n * Get the number of queued tasks\n */\n size(): number {\n return this.queue.length;\n }\n\n /**\n * Get queue statistics\n */\n getStats(): QueueStats {\n return {\n queued: this.queue.length,\n running: this.running.size,\n completed: this.history.filter(t => t.status === 'completed').length,\n failed: this.history.filter(t => t.status === 'failed').length,\n cancelled: this.history.filter(t => t.status === 'cancelled').length,\n total: this.queue.length + this.running.size + this.history.length,\n isPaused: this.paused,\n };\n }\n\n // ========================================================================\n // Task Operations\n // ========================================================================\n\n /**\n * Get a task by ID (searches all: queued, running, history)\n */\n getTask(id: string): QueuedTask | undefined {\n // Check queue\n const queued = this.queue.find(t => t.id === id);\n if (queued) return queued;\n\n // Check running\n const running = this.running.get(id);\n if (running) return running;\n\n // Check history\n return this.history.find(t => t.id === id);\n }\n\n /**\n * Update a task's properties\n * @param id Task ID\n * @param updates Partial updates to apply\n * @returns true if task was found and updated\n */\n updateTask(id: string, updates: TaskUpdate): boolean {\n // Try to find in queue\n const queueIndex = this.queue.findIndex(t => t.id === id);\n if (queueIndex !== -1) {\n const task = this.queue[queueIndex];\n Object.assign(task, updates);\n\n // Re-sort if priority changed\n if (updates.priority !== undefined) {\n this.queue.splice(queueIndex, 1);\n this.insertByPriority(task);\n }\n\n this.emitChange('updated', task);\n this.emit('taskUpdated', task, updates);\n return true;\n }\n\n // Try to find in running\n const running = this.running.get(id);\n if (running) {\n Object.assign(running, updates);\n this.emitChange('updated', running);\n this.emit('taskUpdated', running, updates);\n return true;\n }\n\n // Try to find in history\n const historyIndex = this.history.findIndex(t => t.id === id);\n if (historyIndex !== -1) {\n const task = this.history[historyIndex];\n Object.assign(task, updates);\n this.emitChange('updated', task);\n this.emit('taskUpdated', task, updates);\n return true;\n }\n\n return false;\n }\n\n /**\n * Remove a task from the queue (only works for queued tasks)\n * @param id Task ID\n * @returns true if task was found and removed\n */\n removeTask(id: string): boolean {\n const index = this.queue.findIndex(t => t.id === id);\n if (index === -1) {\n return false;\n }\n\n const [task] = this.queue.splice(index, 1);\n this.emitChange('removed', task);\n this.emit('taskRemoved', task);\n return true;\n }\n\n /**\n * Cancel a task (works for queued or running tasks)\n * @param id Task ID\n * @returns true if task was found and cancelled\n */\n cancelTask(id: string): boolean {\n // Try to cancel from queue\n const queueIndex = this.queue.findIndex(t => t.id === id);\n if (queueIndex !== -1) {\n const [task] = this.queue.splice(queueIndex, 1);\n task.status = 'cancelled';\n task.completedAt = Date.now();\n this.addToHistory(task);\n this.emitChange('removed', task);\n this.emit('taskCancelled', task);\n return true;\n }\n\n // Try to cancel running task\n const running = this.running.get(id);\n if (running) {\n running.status = 'cancelled';\n running.completedAt = Date.now();\n this.running.delete(id);\n this.addToHistory(running);\n this.emitChange('updated', running);\n this.emit('taskCancelled', running);\n return true;\n }\n\n return false;\n }\n\n // ========================================================================\n // Task State Transitions (used by executor)\n // ========================================================================\n\n /**\n * Mark a task as started (moves from queue to running)\n * @param id Task ID\n * @returns The task if found and started\n */\n markStarted(id: string): QueuedTask | undefined {\n const index = this.queue.findIndex(t => t.id === id);\n if (index === -1) {\n return undefined;\n }\n\n const [task] = this.queue.splice(index, 1);\n task.status = 'running';\n task.startedAt = Date.now();\n this.running.set(id, task);\n\n this.emitChange('updated', task);\n this.emit('taskStarted', task);\n return task;\n }\n\n /**\n * Mark a task as completed (moves from running to history)\n * @param id Task ID\n * @param result The result of execution\n * @returns The task if found and completed\n */\n markCompleted(id: string, result?: unknown): QueuedTask | undefined {\n const task = this.running.get(id);\n if (!task) {\n return undefined;\n }\n\n task.status = 'completed';\n task.completedAt = Date.now();\n task.result = result;\n this.running.delete(id);\n this.addToHistory(task);\n\n this.emitChange('updated', task);\n this.emit('taskCompleted', task, result);\n return task;\n }\n\n /**\n * Mark a task as failed (moves from running to history)\n * @param id Task ID\n * @param error The error that occurred\n * @returns The task if found and marked as failed\n */\n markFailed(id: string, error: Error | string): QueuedTask | undefined {\n const task = this.running.get(id);\n if (!task) {\n return undefined;\n }\n\n task.status = 'failed';\n task.completedAt = Date.now();\n task.error = typeof error === 'string' ? error : error.message;\n this.running.delete(id);\n this.addToHistory(task);\n\n this.emitChange('updated', task);\n this.emit('taskFailed', task, typeof error === 'string' ? new Error(error) : error);\n return task;\n }\n\n /**\n * Increment retry count and optionally re-queue the task\n * @param id Task ID\n * @param requeue Whether to re-queue the task\n * @returns The task if found\n */\n markRetry(id: string, requeue: boolean = true): QueuedTask | undefined {\n const task = this.running.get(id);\n if (!task) {\n return undefined;\n }\n\n task.retryCount = (task.retryCount || 0) + 1;\n\n if (requeue) {\n task.status = 'queued';\n task.startedAt = undefined;\n this.running.delete(id);\n this.insertByPriority(task);\n this.emitChange('reordered', task);\n }\n\n return task;\n }\n\n // ========================================================================\n // Reordering\n // ========================================================================\n\n /**\n * Move a task to the top of the queue (highest priority position)\n * @param id Task ID\n * @returns true if task was found and moved\n */\n moveToTop(id: string): boolean {\n const index = this.queue.findIndex(t => t.id === id);\n if (index === -1 || index === 0) {\n return index === 0; // Already at top\n }\n\n const [task] = this.queue.splice(index, 1);\n // Set to high priority and earliest timestamp to ensure it's first\n task.priority = 'high';\n task.createdAt = this.queue.length > 0 ? this.queue[0].createdAt - 1 : Date.now();\n this.queue.unshift(task);\n\n this.emitChange('reordered', task);\n return true;\n }\n\n /**\n * Move a task up one position in the queue\n * @param id Task ID\n * @returns true if task was found and moved\n */\n moveUp(id: string): boolean {\n const index = this.queue.findIndex(t => t.id === id);\n if (index <= 0) {\n return false;\n }\n\n // Swap with previous task\n [this.queue[index - 1], this.queue[index]] = [this.queue[index], this.queue[index - 1]];\n\n this.emitChange('reordered', this.queue[index - 1]);\n return true;\n }\n\n /**\n * Move a task down one position in the queue\n * @param id Task ID\n * @returns true if task was found and moved\n */\n moveDown(id: string): boolean {\n const index = this.queue.findIndex(t => t.id === id);\n if (index === -1 || index >= this.queue.length - 1) {\n return false;\n }\n\n // Swap with next task\n [this.queue[index], this.queue[index + 1]] = [this.queue[index + 1], this.queue[index]];\n\n this.emitChange('reordered', this.queue[index + 1]);\n return true;\n }\n\n /**\n * Get the position of a task in the queue (1-based)\n * @param id Task ID\n * @returns Position (1-based) or -1 if not found\n */\n getPosition(id: string): number {\n const index = this.queue.findIndex(t => t.id === id);\n return index === -1 ? -1 : index + 1;\n }\n\n // ========================================================================\n // Queue Control\n // ========================================================================\n\n /**\n * Pause queue processing\n * Running tasks continue, but no new tasks will be started\n */\n pause(): void {\n if (!this.paused) {\n this.paused = true;\n this.emitChange('paused');\n this.emit('paused');\n }\n }\n\n /**\n * Resume queue processing\n */\n resume(): void {\n if (this.paused) {\n this.paused = false;\n this.emitChange('resumed');\n this.emit('resumed');\n }\n }\n\n /**\n * Check if queue is paused\n */\n isPaused(): boolean {\n return this.paused;\n }\n\n /**\n * Clear all queued tasks (does not affect running or history)\n */\n clear(): void {\n const clearedTasks = [...this.queue];\n this.queue = [];\n\n // Move all to history as cancelled\n for (const task of clearedTasks) {\n task.status = 'cancelled';\n task.completedAt = Date.now();\n this.addToHistory(task);\n }\n\n this.emitChange('cleared');\n }\n\n /**\n * Clear history\n */\n clearHistory(): void {\n this.history = [];\n this.emitChange('cleared');\n }\n\n /**\n * Reset the queue manager (clears everything)\n */\n reset(): void {\n this.queue = [];\n this.running.clear();\n this.history = [];\n this.paused = false;\n this.emitChange('cleared');\n }\n\n // ========================================================================\n // Private Helpers\n // ========================================================================\n\n /**\n * Insert a task into the queue maintaining priority order\n */\n private insertByPriority(task: QueuedTask): void {\n // Find insertion point using binary search\n let low = 0;\n let high = this.queue.length;\n\n while (low < high) {\n const mid = Math.floor((low + high) / 2);\n if (comparePriority(task, this.queue[mid]) < 0) {\n high = mid;\n } else {\n low = mid + 1;\n }\n }\n\n this.queue.splice(low, 0, task);\n }\n\n /**\n * Add a task to history, respecting max history size\n */\n private addToHistory(task: QueuedTask): void {\n if (!this.options.keepHistory) {\n return;\n }\n\n this.history.unshift(task);\n\n // Trim history if needed\n if (this.history.length > this.options.maxHistorySize) {\n this.history = this.history.slice(0, this.options.maxHistorySize);\n }\n }\n\n /**\n * Emit a queue change event\n */\n private emitChange(type: QueueChangeType, task?: QueuedTask): void {\n const event: QueueChangeEvent = {\n type,\n taskId: task?.id,\n task,\n timestamp: Date.now(),\n };\n this.emit('change', event);\n }\n}\n\n/**\n * Create a new TaskQueueManager instance\n */\nexport function createTaskQueueManager(\n options?: TaskQueueManagerOptions\n): TaskQueueManager {\n return new TaskQueueManager(options);\n}\n", "/**\n * QueueExecutor\n *\n * Executes tasks from the queue with configurable concurrency.\n * Uses ConcurrencyLimiter for execution control.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { EventEmitter } from 'events';\nimport { ConcurrencyLimiter, CancellationError } from '../map-reduce/concurrency-limiter';\nimport { TaskQueueManager } from './task-queue-manager';\nimport {\n QueuedTask,\n TaskExecutor,\n TaskExecutionResult,\n QueueExecutorOptions,\n DEFAULT_EXECUTOR_OPTIONS,\n DEFAULT_TASK_CONFIG,\n} from './types';\n\n/**\n * Executor that processes tasks from a queue\n */\nexport class QueueExecutor extends EventEmitter {\n /** The queue manager to pull tasks from */\n private readonly queueManager: TaskQueueManager;\n /** The task executor implementation */\n private readonly taskExecutor: TaskExecutor;\n /** Concurrency limiter for parallel execution */\n private limiter: ConcurrencyLimiter;\n /** Whether the executor is running */\n private running = false;\n /** Whether stop was requested */\n private stopRequested = false;\n /** Set of task IDs that have been requested to cancel */\n private cancelledTasks: Set<string> = new Set();\n /** Current configuration */\n private options: Required<QueueExecutorOptions>;\n /** Processing loop promise */\n private processingPromise: Promise<void> | null = null;\n\n constructor(\n queueManager: TaskQueueManager,\n taskExecutor: TaskExecutor,\n options: QueueExecutorOptions = {}\n ) {\n super();\n this.queueManager = queueManager;\n this.taskExecutor = taskExecutor;\n this.options = { ...DEFAULT_EXECUTOR_OPTIONS, ...options };\n this.limiter = new ConcurrencyLimiter(this.options.maxConcurrency);\n\n // Listen to queue events\n this.setupQueueListeners();\n\n // Auto-start if configured\n if (this.options.autoStart) {\n this.start();\n }\n }\n\n // ========================================================================\n // Lifecycle\n // ========================================================================\n\n /**\n * Start processing tasks from the queue\n */\n start(): void {\n if (this.running) {\n return;\n }\n\n this.running = true;\n this.stopRequested = false;\n this.emit('started');\n\n // Start the processing loop\n this.processingPromise = this.processLoop();\n }\n\n /**\n * Stop processing tasks (running tasks will complete)\n */\n stop(): void {\n if (!this.running) {\n return;\n }\n\n this.stopRequested = true;\n this.running = false;\n this.emit('stopped');\n }\n\n /**\n * Check if the executor is running\n */\n isRunning(): boolean {\n return this.running;\n }\n\n /**\n * Wait for all currently running tasks to complete\n */\n async waitForCompletion(): Promise<void> {\n if (this.processingPromise) {\n await this.processingPromise;\n }\n }\n\n // ========================================================================\n // Configuration\n // ========================================================================\n\n /**\n * Set the maximum concurrency\n * @param n New concurrency limit\n */\n setMaxConcurrency(n: number): void {\n if (n < 1) {\n throw new Error('maxConcurrency must be at least 1');\n }\n this.options.maxConcurrency = n;\n this.limiter = new ConcurrencyLimiter(n);\n }\n\n /**\n * Get the current maximum concurrency\n */\n getMaxConcurrency(): number {\n return this.options.maxConcurrency;\n }\n\n // ========================================================================\n // Task Cancellation\n // ========================================================================\n\n /**\n * Request cancellation of a specific task\n * @param taskId ID of the task to cancel\n */\n cancelTask(taskId: string): void {\n this.cancelledTasks.add(taskId);\n\n // Also cancel in queue manager\n this.queueManager.cancelTask(taskId);\n\n // Notify executor if it has cancel support\n if (this.taskExecutor.cancel) {\n this.taskExecutor.cancel(taskId);\n }\n }\n\n /**\n * Check if a task has been cancelled\n */\n isTaskCancelled(taskId: string): boolean {\n return this.cancelledTasks.has(taskId);\n }\n\n // ========================================================================\n // Processing Loop\n // ========================================================================\n\n /**\n * Main processing loop\n */\n private async processLoop(): Promise<void> {\n while (this.running && !this.stopRequested) {\n // Check if queue is paused\n if (this.queueManager.isPaused()) {\n await this.delay(100);\n continue;\n }\n\n // Check if we have capacity\n if (this.limiter.runningCount >= this.limiter.limit) {\n await this.delay(50);\n continue;\n }\n\n // Try to get next task\n const task = this.queueManager.peek();\n if (!task) {\n // No tasks, wait a bit\n await this.delay(100);\n continue;\n }\n\n // Start executing the task (don't await - let it run in parallel)\n this.executeTask(task).catch(error => {\n // Log error but don't crash the loop\n this.emit('error', error);\n });\n\n // Small delay to prevent tight loop\n await this.delay(10);\n }\n }\n\n /**\n * Execute a single task\n */\n private async executeTask(task: QueuedTask): Promise<void> {\n const taskId = task.id;\n\n // Check if already cancelled\n if (this.cancelledTasks.has(taskId)) {\n return;\n }\n\n // Mark as started in queue manager\n const startedTask = this.queueManager.markStarted(taskId);\n if (!startedTask) {\n // Task was removed from queue\n return;\n }\n\n this.emit('taskStarted', startedTask);\n\n // Create cancellation checker\n const isCancelled = () => this.cancelledTasks.has(taskId);\n\n try {\n // Execute with concurrency limiting\n const result = await this.limiter.run(\n () => this.executeWithTimeout(startedTask),\n isCancelled\n );\n\n // Check if cancelled during execution\n if (isCancelled()) {\n // Already marked as cancelled by cancelTask()\n this.cancelledTasks.delete(taskId);\n return;\n }\n\n if (result.success) {\n this.queueManager.markCompleted(taskId, result.result);\n this.emit('taskCompleted', startedTask, result.result);\n } else {\n await this.handleTaskFailure(startedTask, result.error || new Error('Unknown error'));\n }\n } catch (error) {\n if (error instanceof CancellationError) {\n // Task was cancelled\n this.cancelledTasks.delete(taskId);\n this.emit('taskCancelled', startedTask);\n } else {\n await this.handleTaskFailure(startedTask, error as Error);\n }\n }\n }\n\n /**\n * Execute a task with timeout\n */\n private async executeWithTimeout(task: QueuedTask): Promise<TaskExecutionResult> {\n const timeoutMs = task.config.timeoutMs ?? DEFAULT_TASK_CONFIG.timeoutMs!;\n\n const startTime = Date.now();\n\n // Create timeout promise\n const timeoutPromise = new Promise<TaskExecutionResult>((_, reject) => {\n setTimeout(() => {\n reject(new Error(`Task timed out after ${timeoutMs}ms`));\n }, timeoutMs);\n });\n\n // Race between execution and timeout\n try {\n const result = await Promise.race([\n this.taskExecutor.execute(task),\n timeoutPromise,\n ]);\n\n return {\n ...result,\n durationMs: Date.now() - startTime,\n };\n } catch (error) {\n return {\n success: false,\n error: error as Error,\n durationMs: Date.now() - startTime,\n };\n }\n }\n\n /**\n * Handle task failure with retry logic\n */\n private async handleTaskFailure(task: QueuedTask, error: Error): Promise<void> {\n const config = task.config;\n const retryCount = task.retryCount || 0;\n const maxRetries = config.retryAttempts ?? DEFAULT_TASK_CONFIG.retryAttempts!;\n\n if (config.retryOnFailure && retryCount < maxRetries) {\n // Retry the task\n const retryDelay = config.retryDelayMs ?? DEFAULT_TASK_CONFIG.retryDelayMs!;\n await this.delay(retryDelay);\n\n this.queueManager.markRetry(task.id, true);\n this.emit('taskRetry', task, retryCount + 1);\n } else {\n // Mark as failed\n this.queueManager.markFailed(task.id, error);\n this.emit('taskFailed', task, error);\n }\n }\n\n // ========================================================================\n // Event Listeners\n // ========================================================================\n\n /**\n * Set up listeners for queue events\n */\n private setupQueueListeners(): void {\n // When queue is resumed, make sure we're processing\n this.queueManager.on('resumed', () => {\n if (this.running && !this.processingPromise) {\n this.processingPromise = this.processLoop();\n }\n });\n }\n\n // ========================================================================\n // Utilities\n // ========================================================================\n\n /**\n * Delay helper\n */\n private delay(ms: number): Promise<void> {\n return new Promise(resolve => setTimeout(resolve, ms));\n }\n\n /**\n * Dispose of the executor\n */\n dispose(): void {\n this.stop();\n this.cancelledTasks.clear();\n this.removeAllListeners();\n }\n}\n\n/**\n * Create a new QueueExecutor instance\n */\nexport function createQueueExecutor(\n queueManager: TaskQueueManager,\n taskExecutor: TaskExecutor,\n options?: QueueExecutorOptions\n): QueueExecutor {\n return new QueueExecutor(queueManager, taskExecutor, options);\n}\n\n/**\n * A simple pass-through executor for testing\n * Executes tasks by calling a provided function\n */\nexport class SimpleTaskExecutor implements TaskExecutor {\n private readonly executeFn: (task: QueuedTask) => Promise<unknown>;\n private readonly cancelledTasks: Set<string> = new Set();\n\n constructor(executeFn: (task: QueuedTask) => Promise<unknown>) {\n this.executeFn = executeFn;\n }\n\n async execute(task: QueuedTask): Promise<TaskExecutionResult> {\n if (this.cancelledTasks.has(task.id)) {\n return {\n success: false,\n error: new CancellationError(),\n durationMs: 0,\n };\n }\n\n const startTime = Date.now();\n\n try {\n const result = await this.executeFn(task);\n return {\n success: true,\n result,\n durationMs: Date.now() - startTime,\n };\n } catch (error) {\n return {\n success: false,\n error: error as Error,\n durationMs: Date.now() - startTime,\n };\n }\n }\n\n cancel(taskId: string): void {\n this.cancelledTasks.add(taskId);\n }\n}\n\n/**\n * Create a simple task executor\n */\nexport function createSimpleTaskExecutor(\n executeFn: (task: QueuedTask) => Promise<unknown>\n): SimpleTaskExecutor {\n return new SimpleTaskExecutor(executeFn);\n}\n", "/**\n * Queue Module\n *\n * AI task queue system for managing and executing tasks with priority-based ordering.\n *\n * Key Features:\n * - Priority-based task ordering (high > normal > low)\n * - Configurable concurrency control\n * - Pause/resume queue processing\n * - Task cancellation support\n * - Event-driven architecture (Node.js EventEmitter)\n * - In-memory storage (no persistence)\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n *\n * @example\n * ```typescript\n * import {\n * createTaskQueueManager,\n * createQueueExecutor,\n * createSimpleTaskExecutor\n * } from 'pipeline-core';\n *\n * // Create queue manager\n * const queueManager = createTaskQueueManager();\n *\n * // Create executor with simple task handler\n * const taskExecutor = createSimpleTaskExecutor(async (task) => {\n * // Your task execution logic\n * return { result: 'done' };\n * });\n *\n * // Create queue executor\n * const executor = createQueueExecutor(queueManager, taskExecutor, {\n * maxConcurrency: 1\n * });\n *\n * // Enqueue a task\n * const taskId = queueManager.enqueue({\n * type: 'follow-prompt',\n * priority: 'normal',\n * payload: { promptFilePath: '/path/to/prompt.md' },\n * config: { timeoutMs: 60000 }\n * });\n *\n * // Listen for completion\n * executor.on('taskCompleted', (task, result) => {\n * console.log(`Task ${task.id} completed:`, result);\n * });\n * ```\n */\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport {\n // Core types\n TaskType,\n TaskPriority,\n QueueStatus,\n\n // Payload types\n FollowPromptPayload,\n ResolveCommentsPayload,\n CodeReviewPayload,\n AIClarificationPayload,\n CustomTaskPayload,\n TaskPayload,\n\n // Task configuration\n TaskExecutionConfig,\n DEFAULT_TASK_CONFIG,\n\n // Queued task\n QueuedTask,\n CreateTaskInput,\n TaskUpdate,\n\n // Events\n QueueChangeType,\n QueueChangeEvent,\n QueueEvents,\n\n // Executor types\n TaskExecutionResult,\n TaskExecutor,\n QueueExecutorOptions,\n DEFAULT_EXECUTOR_OPTIONS,\n\n // Queue manager types\n TaskQueueManagerOptions,\n DEFAULT_QUEUE_MANAGER_OPTIONS,\n QueueStats,\n\n // Priority helpers\n PRIORITY_VALUES,\n comparePriority,\n\n // Type guards\n isFollowPromptPayload,\n isResolveCommentsPayload,\n isCodeReviewPayload,\n isAIClarificationPayload,\n isCustomTaskPayload,\n\n // Utilities\n generateTaskId,\n} from './types';\n\n// ============================================================================\n// Task Queue Manager\n// ============================================================================\n\nexport {\n TaskQueueManager,\n createTaskQueueManager,\n} from './task-queue-manager';\n\n// ============================================================================\n// Queue Executor\n// ============================================================================\n\nexport {\n QueueExecutor,\n createQueueExecutor,\n SimpleTaskExecutor,\n createSimpleTaskExecutor,\n} from './queue-executor';\n", "/**\n * pipeline-core\n *\n * AI pipeline execution engine with map-reduce framework.\n * A pure Node.js package for building and executing AI-powered data processing pipelines.\n *\n * Key Features:\n * - YAML-based pipeline configuration\n * - Map-reduce execution framework\n * - Copilot SDK integration\n * - Cross-platform compatible (Linux/Mac/Windows)\n *\n * @example\n * ```typescript\n * import { executePipeline, setLogger, consoleLogger } from 'pipeline-core';\n *\n * // Configure logging\n * setLogger(consoleLogger);\n *\n * // Execute a pipeline\n * const result = await executePipeline(config, {\n * aiInvoker: async (prompt) => {\n * // Your AI invocation logic\n * return { success: true, response: '...' };\n * },\n * pipelineDirectory: '/path/to/pipeline'\n * });\n * ```\n */\n\n// ============================================================================\n// Logger\n// ============================================================================\n\nexport {\n Logger,\n LogCategory,\n consoleLogger,\n nullLogger,\n setLogger,\n getLogger,\n resetLogger\n} from './logger';\n\n// ============================================================================\n// Errors\n// ============================================================================\n\nexport {\n // Error codes\n ErrorCode,\n ErrorCodeType,\n mapSystemErrorCode,\n // Core error class\n PipelineCoreError,\n ErrorMetadata,\n isPipelineCoreError,\n toPipelineCoreError,\n wrapError,\n getErrorCauseMessage,\n logError,\n} from './errors';\n\n// ============================================================================\n// Runtime (Async Policies)\n// ============================================================================\n\nexport {\n // Cancellation\n CancellationError,\n IsCancelledFn,\n isCancellationError,\n throwIfCancelled,\n createCancellationToken,\n // Timeout\n TimeoutError,\n TimeoutOptions,\n withTimeout,\n isTimeoutError,\n createTimeoutPromise,\n // Retry\n RetryExhaustedError,\n BackoffStrategy,\n OnAttemptFn,\n RetryOnFn,\n RetryOptions,\n DEFAULT_RETRY_OPTIONS,\n defaultRetryOn,\n retryOnTimeout,\n calculateDelay,\n withRetry,\n isRetryExhaustedError,\n // Policy (unified runner)\n PolicyOptions,\n DEFAULT_POLICY_OPTIONS,\n runWithPolicy,\n createPolicyRunner,\n} from './runtime';\n\n// ============================================================================\n// Utils\n// ============================================================================\n\nexport {\n // File utilities\n FileOperationResult,\n ReadFileOptions,\n WriteFileOptions,\n YAMLOptions,\n safeExists,\n safeIsDirectory,\n safeIsFile,\n safeReadFile,\n safeWriteFile,\n ensureDirectoryExists,\n safeReadDir,\n safeStats,\n readYAML,\n writeYAML,\n safeCopyFile,\n safeRename,\n safeRemove,\n getFileErrorMessage,\n // Glob utilities\n glob,\n getFilesWithExtension,\n // Exec utilities\n execAsync,\n // HTTP utilities\n HttpResponse,\n httpGet,\n httpDownload,\n httpGetJson,\n // Text matching utilities\n AnchorMatchConfig,\n DEFAULT_ANCHOR_MATCH_CONFIG,\n BaseMatchAnchor,\n hashText,\n levenshteinDistance,\n calculateSimilarity,\n normalizeText,\n splitIntoLines,\n getCharOffset,\n offsetToLineColumn,\n findAllOccurrences,\n scoreMatch,\n findFuzzyMatch,\n extractContext,\n // AI response parser\n extractJSON,\n parseAIResponse,\n // Terminal types\n TerminalType,\n InteractiveSessionStatus,\n InteractiveSession,\n ExternalTerminalLaunchOptions,\n ExternalTerminalLaunchResult,\n WindowFocusResult,\n // Window focus service\n WindowFocusService,\n getWindowFocusService,\n resetWindowFocusService,\n // External terminal launcher\n ExternalTerminalLauncher,\n getExternalTerminalLauncher,\n resetExternalTerminalLauncher,\n // Process monitor\n Disposable,\n ProcessCheckResult,\n ProcessMonitorOptions,\n ProcessMonitor,\n getProcessMonitor,\n resetProcessMonitor,\n DEFAULT_POLL_INTERVAL_MS,\n // Template engine\n TEMPLATE_VARIABLE_REGEX,\n SPECIAL_VARIABLES,\n SubstituteVariablesOptions,\n TemplateVariableError,\n substituteVariables,\n extractTemplateVariables,\n hasVariables,\n containsVariables,\n validateVariables\n} from './utils';\n\n// ============================================================================\n// Config (Centralized Defaults)\n// ============================================================================\n\nexport {\n // Timeouts\n DEFAULT_AI_TIMEOUT_MS,\n // Concurrency\n DEFAULT_PARALLEL_LIMIT,\n DEFAULT_MAX_CONCURRENCY,\n // Session Pool\n DEFAULT_MAX_SESSIONS,\n DEFAULT_IDLE_TIMEOUT_MS,\n DEFAULT_MIN_SESSIONS,\n DEFAULT_CLEANUP_INTERVAL_MS,\n DEFAULT_ACQUIRE_TIMEOUT_MS,\n // Chunk Splitter\n DEFAULT_CHUNK_MAX_SIZE,\n DEFAULT_CHUNK_OVERLAP_SIZE,\n DEFAULT_CHUNK_STRATEGY,\n DEFAULT_CHUNK_PRESERVE_BOUNDARIES,\n // CSV Reader\n DEFAULT_CSV_DELIMITER,\n DEFAULT_CSV_QUOTE,\n DEFAULT_CSV_HAS_HEADER,\n DEFAULT_CSV_SKIP_EMPTY_LINES,\n DEFAULT_CSV_TRIM_FIELDS,\n // Queue Executor\n DEFAULT_RETRY_ATTEMPTS,\n DEFAULT_RETRY_DELAY_MS,\n DEFAULT_QUEUE_MAX_CONCURRENT,\n DEFAULT_QUEUE_PROCESS_ON_STARTUP,\n DEFAULT_QUEUE_AUTO_START,\n DEFAULT_QUEUE_AUTO_PERSIST,\n // Skills\n DEFAULT_SKILLS_DIRECTORY,\n // Text Matching\n DEFAULT_FUZZY_MATCH_THRESHOLD,\n DEFAULT_CONTEXT_LINES,\n DEFAULT_CASE_SENSITIVE\n} from './config';\n\n// ============================================================================\n// AI Service\n// ============================================================================\n\nexport {\n // Types\n AIBackendType,\n AIModel,\n VALID_MODELS,\n AIInvocationResult,\n DEFAULT_PROMPTS,\n InteractiveToolType,\n DEFAULT_MODEL_ID,\n // Model registry\n ModelDefinition,\n MODEL_REGISTRY,\n getModelLabel,\n getModelDescription,\n getModelDefinition,\n getAllModels,\n getActiveModels,\n isValidModelId,\n getModelCount,\n getModelsByTier,\n // AI Command Types\n AICommand,\n AICommandMode,\n AICommandsConfig,\n DEFAULT_AI_COMMANDS,\n SerializedAICommand,\n SerializedAIMenuConfig,\n serializeCommand,\n serializeCommands,\n // Prompt Builder (Pure)\n PromptContext,\n substitutePromptVariables,\n buildPromptFromContext,\n usesTemplateVariables,\n getAvailableVariables,\n // Program Utilities\n checkProgramExists,\n clearProgramExistsCache,\n parseCopilotOutput,\n // Process Types\n AIToolType,\n AIProcessStatus,\n AIProcessType,\n GenericProcessMetadata,\n GenericGroupMetadata,\n TypedProcessOptions,\n ProcessGroupOptions,\n CompleteGroupOptions,\n CodeReviewProcessMetadata,\n DiscoveryProcessMetadata,\n CodeReviewGroupMetadata,\n AIProcess,\n SerializedAIProcess,\n TrackedProcessFields,\n serializeProcess,\n deserializeProcess,\n ProcessEventType,\n ProcessEvent,\n ProcessCounts,\n // Session Pool\n SessionPool,\n IPoolableSession,\n SessionFactory,\n SessionPoolOptions,\n SessionPoolStats,\n // CLI Utilities\n PROMPT_LENGTH_THRESHOLD,\n PROBLEMATIC_CHARS_PATTERN,\n COPILOT_BASE_FLAGS,\n escapeShellArg,\n shouldUseFileDelivery,\n writePromptToTempFile,\n buildCliCommand,\n BuildCliCommandResult,\n BuildCliCommandOptions,\n // Copilot SDK Service\n CopilotSDKService,\n getCopilotSDKService,\n resetCopilotSDKService,\n TokenUsage,\n MCPServerConfigBase,\n MCPLocalServerConfig,\n MCPRemoteServerConfig,\n MCPServerConfig,\n MCPControlOptions,\n SendMessageOptions,\n SDKInvocationResult,\n SDKAvailabilityResult,\n PermissionRequest,\n PermissionRequestResult,\n PermissionHandler,\n SessionPoolConfig,\n DEFAULT_SESSION_POOL_CONFIG,\n approveAllPermissions,\n denyAllPermissions,\n // DEFAULT_AI_TIMEOUT_MS is exported from ./config\n // MCP Config Loader\n MCPConfigFile,\n MCPConfigLoadResult,\n getHomeDirectory,\n getMcpConfigPath,\n loadDefaultMcpConfig,\n loadDefaultMcpConfigAsync,\n mergeMcpConfigs,\n clearMcpConfigCache,\n mcpConfigExists,\n getCachedMcpConfig,\n setHomeDirectoryOverride\n} from './ai';\n\n// ============================================================================\n// Map-Reduce Framework\n// ============================================================================\n\nexport {\n // Core types\n WorkItem,\n MapContext,\n MapResult,\n ReduceContext,\n ReduceResult,\n ReduceStats,\n ReduceMode,\n MapReduceOptions,\n Splitter,\n Mapper,\n Reducer,\n MapReduceJob,\n ProgressCallback,\n JobProgress,\n MapReduceResult,\n ExecutionStats,\n PromptTemplate,\n PromptRenderOptions,\n AIInvoker,\n AIInvokerOptions,\n AIInvokerResult,\n ProcessTracker,\n ExecutorOptions,\n SessionMetadata,\n ItemCompleteCallback,\n DEFAULT_MAP_REDUCE_OPTIONS,\n // Executor\n MapReduceExecutor,\n createExecutor,\n // Concurrency limiter\n ConcurrencyLimiter,\n // CancellationError is now exported from ./runtime\n // DEFAULT_MAX_CONCURRENCY is exported from ./config\n // Prompt template\n renderTemplate,\n createTemplate,\n extractVariables,\n validateTemplate,\n composeTemplates,\n TemplateHelpers,\n ResponseParsers,\n MissingVariableError,\n TemplateRenderError,\n // Reducers\n BaseReducer,\n IdentityReducer,\n FlattenReducer,\n AggregatingReducer,\n DeterministicReducer,\n createDeterministicReducer,\n StringDeduplicationReducer,\n NumericAggregationReducer,\n AIReducer,\n createAIReducer,\n createTextSynthesisReducer,\n HybridReducer,\n createHybridReducer,\n createSimpleHybridReducer,\n Deduplicatable,\n DeterministicReducerOptions,\n DeterministicReduceOutput,\n AIReducerOptions,\n TextSynthesisOutput,\n TextSynthesisOptions,\n HybridReducerOptions,\n SimplePolishedOutput,\n // Splitters\n FileSplitter,\n createFileSplitter,\n createExtensionFilteredSplitter,\n BatchedFileSplitter,\n createBatchedFileSplitter,\n ChunkSplitter,\n createChunkSplitter,\n createLineChunkSplitter,\n createParagraphChunkSplitter,\n RuleSplitter,\n createRuleSplitter,\n createAlphabeticRuleSplitter,\n createPriorityRuleSplitter,\n createPatternFilteredRuleSplitter,\n BatchedRuleSplitter,\n createBatchedRuleSplitter,\n FileItem,\n FileInput,\n FileWorkItemData,\n FileSplitterOptions,\n BatchedFileWorkItemData,\n ChunkInput,\n ChunkWorkItemData,\n ChunkSplitterOptions,\n Rule,\n RuleInput,\n RuleWorkItemData,\n RuleSplitterOptions,\n BatchedRuleWorkItemData,\n // Jobs\n createCodeReviewJob,\n createTemplateJob,\n createSimpleTemplateJob,\n createJsonTemplateJob,\n createListProcessingJob,\n createPromptMapJob,\n createPromptMapInput,\n ReviewSeverity,\n ReviewFinding,\n RuleReviewResult,\n ReviewSummary,\n CodeReviewOutput,\n CodeReviewInput,\n CodeReviewJobOptions,\n TemplateItem,\n TemplateJobInput,\n TemplateWorkItemData,\n TemplateItemResult,\n TemplateJobOptions,\n PromptItem,\n PromptMapInput,\n PromptWorkItemData,\n PromptMapResult,\n PromptMapOutput,\n PromptMapSummary,\n PromptMapJobOptions,\n OutputFormat,\n // Temp file utilities\n writeTempFile,\n readTempFile,\n cleanupTempFile,\n cleanupAllTempFiles,\n ensureTempDir,\n generateTempFileName,\n isTempFilePath,\n getTempDirPath,\n TempFileResult\n} from './map-reduce';\n\n// ============================================================================\n// Pipeline Framework\n// ============================================================================\n\nexport {\n // Types\n PipelineConfig,\n InputConfig,\n MapConfig,\n ReduceConfig,\n FilterConfig,\n CSVSource,\n CSVParseOptions,\n CSVParseResult,\n PipelineParameter,\n GenerateInputConfig,\n FilterOperator,\n FilterRule,\n RuleFilterConfig,\n AIFilterConfig,\n FilterStats,\n FilterResult,\n isCSVSource,\n isGenerateConfig,\n // Executor\n executePipeline,\n executePipelineWithItems,\n parsePipelineYAML,\n parsePipelineYAMLSync,\n PipelineExecutionError,\n // DEFAULT_PARALLEL_LIMIT is exported from ./config\n ExecutePipelineOptions,\n PipelineExecutionResult,\n // CSV Reader\n parseCSVContent,\n readCSVFile,\n readCSVFileSync,\n resolveCSVPath,\n validateCSVHeaders,\n getCSVPreview,\n CSVParseError,\n DEFAULT_CSV_OPTIONS,\n // Template Engine\n substituteTemplate,\n validateItemForTemplate,\n buildFullPrompt,\n buildPromptFromTemplate,\n escapeTemplateValue,\n previewTemplate,\n TemplateError,\n SubstituteTemplateOptions,\n // Filter Executor\n executeFilter,\n executeRuleFilter,\n executeAIFilter,\n executeHybridFilter,\n FilterExecuteOptions,\n FilterProgress,\n // Prompt Resolver\n resolvePromptFile,\n resolvePromptFileSync,\n resolvePromptFileWithDetails,\n resolvePromptPath,\n getSearchPaths,\n extractPromptContent,\n promptFileExists,\n validatePromptFile,\n PromptResolverError,\n PromptResolutionResult,\n // Skill Resolver\n resolveSkill,\n resolveSkillSync,\n resolveSkillWithDetails,\n resolveSkillWithDetailsSync,\n getSkillsDirectory,\n getSkillDirectory,\n getSkillPromptPath,\n skillExists,\n listSkills,\n validateSkill,\n SkillResolverError,\n // DEFAULT_SKILLS_DIRECTORY is exported from ./config\n SKILL_PROMPT_FILENAME,\n SkillResolutionResult,\n SkillMetadata,\n // Input Generator\n generateInputItems,\n buildGeneratePrompt,\n parseGenerateResponse,\n toGeneratedItems,\n getSelectedItems,\n createEmptyItem,\n validateGenerateConfig,\n InputGenerationError,\n GenerateInputResult,\n GeneratedItem,\n GenerateState\n} from './pipeline';\n\n// ============================================================================\n// Queue System\n// ============================================================================\n\nexport {\n // Core types\n TaskType,\n TaskPriority,\n QueueStatus,\n\n // Payload types\n FollowPromptPayload,\n ResolveCommentsPayload,\n CodeReviewPayload,\n AIClarificationPayload,\n CustomTaskPayload,\n TaskPayload,\n\n // Task configuration\n TaskExecutionConfig,\n DEFAULT_TASK_CONFIG,\n\n // Queued task\n QueuedTask,\n CreateTaskInput,\n TaskUpdate,\n\n // Events\n QueueChangeType,\n QueueChangeEvent,\n QueueEvents,\n\n // Executor types\n TaskExecutionResult,\n TaskExecutor,\n QueueExecutorOptions,\n DEFAULT_EXECUTOR_OPTIONS,\n\n // Queue manager types\n TaskQueueManagerOptions,\n DEFAULT_QUEUE_MANAGER_OPTIONS,\n QueueStats,\n\n // Priority helpers\n PRIORITY_VALUES,\n comparePriority,\n\n // Type guards\n isFollowPromptPayload,\n isResolveCommentsPayload,\n isCodeReviewPayload,\n isAIClarificationPayload,\n isCustomTaskPayload,\n\n // Utilities\n generateTaskId,\n\n // Task Queue Manager\n TaskQueueManager,\n createTaskQueueManager,\n\n // Queue Executor\n QueueExecutor,\n createQueueExecutor,\n SimpleTaskExecutor,\n createSimpleTaskExecutor,\n} from './queue';\n", "/**\n * Seeds Phase \u2014 Prompt Templates\n *\n * Prompt templates for Phase 0 topic seed generation. These guide the AI\n * to scan a repository and identify architectural topics/concerns.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\n// ============================================================================\n// Seeds Prompt\n// ============================================================================\n\n/**\n * Build the topic seeds prompt for a repository.\n *\n * @param repoPath - Absolute path to the repository\n * @param maxTopics - Maximum number of topics to generate\n * @returns The rendered prompt string\n */\nexport function buildSeedsPrompt(repoPath: string, maxTopics: number): string {\n return `You are a senior software architect analyzing a codebase to identify feature-level topics and concerns.\nYou have access to grep, glob, and view tools to explore the repository at ${repoPath}.\n\n## Your Task\n\nScan the repository and identify up to ${maxTopics} distinct feature-level topics. Each topic should describe a **user-facing capability, architectural concern, or behavioral pattern** \u2014 NOT a file name or directory path.\n\n## Exploration Strategy\n\nFollow these steps in order \u2014 understanding PURPOSE before STRUCTURE is critical:\n\n1. **Documentation first**: Read README.md, CONTRIBUTING.md, ARCHITECTURE.md, or similar files to understand what the project DOES and what features it provides.\n2. **Package manifests**: Read key configuration files to understand the project:\n - Node.js: package.json, tsconfig.json, webpack.config.js\n - Rust: Cargo.toml\n - Go: go.mod, go.sum\n - Python: pyproject.toml, setup.py, requirements.txt\n - Java/Kotlin: pom.xml, build.gradle\n - General: Makefile, Dockerfile, .github/workflows/\n3. **Entry points**: Read main entry point files (index.ts, main.go, main.rs, app.py) to understand what the project exposes and how features are wired together.\n4. **Directory structure**: Run glob(\"*\") to see the overall layout, then examine top-level directories to confirm feature areas you identified from docs.\n5. **CI/CD configs**: Check .github/workflows/, .gitlab-ci.yml, or similar for build/test patterns.\n6. **Config files**: Look for configuration directories (config/, conf/, etc.) that might indicate separate concerns.\n\n## Naming Guidance\n\nTopics should describe WHAT the code does, not WHERE it lives.\n\n**Good topic names** (feature-focused):\n- \"inline-code-review\" (describes the feature)\n- \"ai-powered-analysis\" (describes the capability)\n- \"real-time-sync\" (describes the behavior)\n- \"plugin-architecture\" (describes the pattern)\n\n**Bad topic names** (file/path mirrors \u2014 DO NOT USE):\n- \"extension-entry-point\" (just echoes a file name)\n- \"tree-items\" (just echoes a file name)\n- \"types-and-interfaces\" (describes a code artifact, not a feature)\n- \"src-utils\" (just a directory path)\n\n## Anti-Patterns \u2014 AVOID These\n\n- Do NOT name topics after individual files (e.g., \"file-system-watcher\" for a single watcher file)\n- Do NOT name topics after directory paths (e.g., \"src-shortcuts-code-review\")\n- Do NOT create topics for generic code artifacts like \"types\", \"utilities\", \"helpers\", \"constants\"\n- Do NOT create a topic for every directory \u2014 group related directories into feature-level concerns\n\n## Output Format\n\nReturn a **single JSON object** matching this schema exactly. Do NOT wrap it in markdown code blocks. Return raw JSON only.\n\n{\n \"topics\": [\n {\n \"topic\": \"string \u2014 short kebab-case identifier describing the FEATURE (e.g., authentication, api-gateway, real-time-sync)\",\n \"description\": \"string \u2014 1-2 sentence description of what this feature/concern does for users or the system\",\n \"hints\": [\"string \u2014 search terms/keywords to find related code\", \"another hint\"]\n }\n ]\n}\n\n## Rules\n\n- Generate up to ${maxTopics} topics\n- Topic IDs must be unique lowercase kebab-case identifiers that describe features (e.g., \"authentication\", \"api-gateway\", \"pipeline-execution\")\n- Each topic should represent a distinct user-facing feature, architectural concern, or behavioral pattern\n- Hints should be an array of 2-5 search terms that would help locate code related to this topic\n- Focus on what the code DOES for users or the system, not on file/directory names\n- If the repository is small, you may generate fewer topics\n- If the repository is large, prioritize the most important or central topics\n- Topics should be useful for guiding breadth-first exploration of the codebase\n- When in doubt, ask \"what feature does this enable?\" rather than \"what file is this?\"`;\n}\n", "/**\n * Deep Wiki Generator \u2014 JSON Schemas\n *\n * JSON schema strings used to instruct the AI on expected output format.\n * These are embedded in prompts to guide structured AI responses.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\n// ============================================================================\n// Module Graph Schema (Phase 1 output)\n// ============================================================================\n\n/**\n * JSON schema string for the ModuleGraph type.\n * Used in the discovery prompt to specify expected output format.\n */\nexport const MODULE_GRAPH_SCHEMA = `{\n \"project\": {\n \"name\": \"string \u2014 project name from config files\",\n \"description\": \"string \u2014 brief description from README or config\",\n \"language\": \"string \u2014 primary programming language\",\n \"buildSystem\": \"string \u2014 build system (e.g., npm + webpack, cargo, go modules)\",\n \"entryPoints\": [\"string \u2014 entry point file paths relative to repo root\"]\n },\n \"modules\": [\n {\n \"id\": \"string \u2014 unique kebab-case identifier describing the FEATURE (e.g., 'auth-engine', 'pipeline-executor'), NOT the file/directory path (avoid 'src-auth', 'packages-core-src')\",\n \"name\": \"string \u2014 human-readable name describing what this module DOES for users/system (e.g., 'Authentication Engine', 'Pipeline Executor'), NOT the file name\",\n \"path\": \"string \u2014 path relative to repo root (e.g., src/auth/)\",\n \"purpose\": \"string \u2014 what this module does for users or the system (feature-focused, not 'contains files in src/auth')\",\n \"keyFiles\": [\"string \u2014 key file paths relative to repo root\"],\n \"dependencies\": [\"string \u2014 IDs of modules this depends on\"],\n \"dependents\": [\"string \u2014 IDs of modules that depend on this\"],\n \"complexity\": \"low | medium | high\",\n \"category\": \"string \u2014 must match one of the declared categories\"\n }\n ],\n \"categories\": [\n {\n \"name\": \"string \u2014 category identifier\",\n \"description\": \"string \u2014 short description\"\n }\n ],\n \"architectureNotes\": \"string \u2014 free-text summary of the overall architecture\"\n}`;\n\n// ============================================================================\n// Structural Scan Schema (Large repo first pass)\n// ============================================================================\n\n/**\n * JSON schema string for the structural scan output (large repo first pass).\n */\nexport const STRUCTURAL_SCAN_SCHEMA = `{\n \"fileCount\": \"number \u2014 estimated total number of files\",\n \"areas\": [\n {\n \"name\": \"string \u2014 descriptive area name focusing on FUNCTIONALITY (e.g., 'AI Pipeline Engine' not just 'packages/core')\",\n \"path\": \"string \u2014 path relative to repo root\",\n \"description\": \"string \u2014 what this area DOES, not just what directory it is\"\n }\n ],\n \"projectInfo\": {\n \"name\": \"string \u2014 project name if found\",\n \"description\": \"string \u2014 project description if found\",\n \"language\": \"string \u2014 primary language if determinable\",\n \"buildSystem\": \"string \u2014 build system if determinable\"\n }\n}`;\n\n// ============================================================================\n// Module Analysis Schema (Phase 3 output)\n// ============================================================================\n\n/**\n * JSON schema string for the ModuleAnalysis type.\n * Used in analysis prompts to specify expected output format.\n */\nexport const MODULE_ANALYSIS_SCHEMA = `{\n \"moduleId\": \"string \u2014 must match the module ID provided\",\n \"overview\": \"string \u2014 high-level overview paragraph\",\n \"keyConcepts\": [\n {\n \"name\": \"string \u2014 concept name\",\n \"description\": \"string \u2014 what this concept represents\",\n \"codeRef\": \"string (optional) \u2014 file path or file:line reference\"\n }\n ],\n \"publicAPI\": [\n {\n \"name\": \"string \u2014 function/class/constant name\",\n \"signature\": \"string \u2014 type signature or declaration\",\n \"description\": \"string \u2014 what it does\"\n }\n ],\n \"internalArchitecture\": \"string \u2014 description of internal structure and design\",\n \"dataFlow\": \"string \u2014 how data moves through this module\",\n \"patterns\": [\"string \u2014 design patterns identified (e.g., Factory, Observer, Middleware)\"],\n \"errorHandling\": \"string \u2014 error handling strategy description\",\n \"codeExamples\": [\n {\n \"title\": \"string \u2014 short title\",\n \"code\": \"string \u2014 the code snippet\",\n \"file\": \"string (optional) \u2014 file path relative to repo root\",\n \"lines\": [0, 0]\n }\n ],\n \"dependencies\": {\n \"internal\": [\n {\n \"module\": \"string \u2014 module ID\",\n \"usage\": \"string \u2014 how this module uses it\"\n }\n ],\n \"external\": [\n {\n \"package\": \"string \u2014 package name\",\n \"usage\": \"string \u2014 how this module uses it\"\n }\n ]\n },\n \"suggestedDiagram\": \"string \u2014 Mermaid diagram code (e.g., graph TD; A-->B)\",\n \"sourceFiles\": [\"string \u2014 all file paths examined during analysis, relative to repo root\"]\n}`;\n\n/**\n * JSON schema string for the reduce output (Phase 4 index/architecture generation).\n */\nexport const REDUCE_OUTPUT_SCHEMA = `{\n \"index\": \"string \u2014 full markdown content for index.md (categorized TOC, project overview, module summaries)\",\n \"architecture\": \"string \u2014 full markdown content for architecture.md (high-level Mermaid diagram, layer descriptions)\",\n \"gettingStarted\": \"string \u2014 full markdown content for getting-started.md (setup, build, run instructions)\"\n}`;\n\n// ============================================================================\n// Module Analysis Validation Helpers\n// ============================================================================\n\n/**\n * Required fields for a valid ModuleAnalysis\n */\nexport const MODULE_ANALYSIS_REQUIRED_FIELDS = ['moduleId', 'overview'] as const;\n\n/**\n * Valid Mermaid diagram type keywords that a diagram should start with\n */\nexport const VALID_MERMAID_KEYWORDS = [\n 'graph', 'flowchart', 'sequenceDiagram', 'classDiagram', 'stateDiagram',\n 'erDiagram', 'gantt', 'pie', 'gitGraph', 'journey', 'mindmap',\n 'timeline', 'quadrantChart', 'sankey', 'xychart', 'block',\n] as const;\n\n/**\n * Check if a string looks like a valid Mermaid diagram (starts with a known keyword).\n */\nexport function isValidMermaidDiagram(diagram: string): boolean {\n if (!diagram || typeof diagram !== 'string') {\n return false;\n }\n const trimmed = diagram.trim();\n return VALID_MERMAID_KEYWORDS.some(keyword =>\n trimmed.startsWith(keyword) || trimmed.startsWith(`${keyword}-`)\n );\n}\n\n// ============================================================================\n// Validation Helpers\n// ============================================================================\n\n/**\n * Required fields for a valid ModuleGraph\n */\nexport const MODULE_GRAPH_REQUIRED_FIELDS = ['project', 'modules', 'categories'] as const;\n\n/**\n * Required fields for a valid ProjectInfo\n */\nexport const PROJECT_INFO_REQUIRED_FIELDS = ['name', 'language'] as const;\n\n/**\n * Required fields for a valid ModuleInfo\n */\nexport const MODULE_INFO_REQUIRED_FIELDS = ['id', 'name', 'path'] as const;\n\n/**\n * Valid complexity values\n */\nexport const VALID_COMPLEXITY_VALUES = ['low', 'medium', 'high'] as const;\n\n/**\n * Validate that a module ID is in the correct format (lowercase kebab-case)\n */\nexport function isValidModuleId(id: string): boolean {\n return /^[a-z][a-z0-9]*(-[a-z0-9]+)*$/.test(id);\n}\n\n/**\n * Normalize a string into a valid module ID (lowercase kebab-case)\n */\nexport function normalizeModuleId(input: string): string {\n return input\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/^-+|-+$/g, '')\n .replace(/-{2,}/g, '-') || 'unknown';\n}\n", "/**\n * Shared AI Response Parsing Utility\n *\n * Consolidates the repeated JSON extraction + validation pattern\n * used across multiple response parsers in deep-wiki.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { extractJSON } from '@plusplusoneplusplus/pipeline-core';\nimport { getErrorMessage } from './error-utils';\n\nexport interface ParseOptions {\n /** Context string for error messages (e.g., 'discovery', 'probe') */\n context: string;\n /** Whether to attempt JSON repair on parse failure. Default: false */\n repair?: boolean;\n}\n\n/**\n * Validates an AI response string, extracts JSON, parses it, and validates it's an object.\n * Throws descriptive errors at each step.\n */\nexport function parseAIJsonResponse(response: string | undefined | null, options: ParseOptions): Record<string, unknown> {\n const { context, repair = false } = options;\n\n if (!response || typeof response !== 'string') {\n throw new Error(`Empty or invalid response from AI (${context})`);\n }\n\n const jsonStr = extractJSON(response);\n if (!jsonStr) {\n throw new Error(`No JSON found in AI response (${context}). The AI may not have returned structured output.`);\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(jsonStr);\n } catch (parseError) {\n if (repair) {\n const fixed = attemptJsonRepair(jsonStr);\n if (fixed) {\n try {\n parsed = JSON.parse(fixed);\n } catch {\n throw new Error(`Invalid JSON in ${context} response: ${getErrorMessage(parseError)}`);\n }\n } else {\n throw new Error(`Invalid JSON in ${context} response: ${getErrorMessage(parseError)}`);\n }\n } else {\n throw new Error(`Invalid JSON in ${context} response: ${getErrorMessage(parseError)}`);\n }\n }\n\n if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {\n throw new Error(`${context} response is not a JSON object`);\n }\n\n return parsed as Record<string, unknown>;\n}\n\n/**\n * Attempt to repair common JSON formatting issues.\n */\nexport function attemptJsonRepair(jsonStr: string): string | null {\n try {\n let fixed = jsonStr;\n // Replace single quotes with double quotes\n fixed = fixed.replace(/'/g, '\"');\n // Quote unquoted keys\n fixed = fixed.replace(/([{,]\\s*)([a-zA-Z_][a-zA-Z0-9_]*)\\s*:/g, '$1\"$2\":');\n // Remove trailing commas\n fixed = fixed.replace(/,(\\s*[}\\]])/g, '$1');\n // Fix missing commas between properties\n fixed = fixed.replace(/\"\\s*\\n\\s*\"/g, '\",\\n\"');\n JSON.parse(fixed);\n return fixed;\n } catch {\n return null;\n }\n}\n", "/**\n * Seeds Phase \u2014 Response Parser\n *\n * Parses and validates AI JSON responses into TopicSeed structures.\n * Handles JSON extraction from markdown, validation, normalization,\n * and error recovery.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { TopicSeed } from '../types';\nimport { normalizeModuleId } from '../schemas';\nimport { parseAIJsonResponse } from '../utils/parse-ai-response';\n\n// ============================================================================\n// Response Parsing\n// ============================================================================\n\n/**\n * Parse an AI response into an array of TopicSeed objects.\n *\n * Handles:\n * 1. Raw JSON \u2192 parse directly\n * 2. JSON in markdown code blocks \u2192 extract and parse\n * 3. Multiple JSON blocks \u2192 take the largest one\n * 4. Trailing text after JSON \u2192 strip and parse\n * 5. Invalid JSON \u2192 attempt repair\n * 6. Missing required fields \u2192 skip invalid entries with warnings\n *\n * @param response - Raw AI response string\n * @returns Parsed and validated TopicSeed array\n * @throws Error if response cannot be parsed into valid seeds\n */\nexport function parseSeedsResponse(response: string): TopicSeed[] {\n const obj = parseAIJsonResponse(response, { context: 'seeds', repair: true });\n if (!('topics' in obj)) {\n throw new Error(\"Missing 'topics' field in AI response\");\n }\n\n return parseTopicsArray(obj.topics);\n}\n\n// ============================================================================\n// Topics Array Parsing\n// ============================================================================\n\n/**\n * Parse and validate an array of TopicSeed objects.\n */\nfunction parseTopicsArray(raw: unknown): TopicSeed[] {\n if (!Array.isArray(raw)) {\n throw new Error(\"'topics' field must be an array\");\n }\n\n const topics: TopicSeed[] = [];\n const warnings: string[] = [];\n\n for (let i = 0; i < raw.length; i++) {\n const item = raw[i];\n if (typeof item !== 'object' || item === null) {\n warnings.push(`Skipping invalid topic at index ${i}: not an object`);\n continue;\n }\n\n const obj = item as Record<string, unknown>;\n\n // Check required fields\n if (typeof obj.topic !== 'string' || !obj.topic) {\n warnings.push(`Skipping topic at index ${i}: missing or invalid 'topic' field`);\n continue;\n }\n\n if (typeof obj.description !== 'string' || !obj.description) {\n warnings.push(`Skipping topic at index ${i}: missing or invalid 'description' field`);\n continue;\n }\n\n // Normalize topic ID to kebab-case\n const topicId = normalizeModuleId(String(obj.topic));\n\n // Parse hints (can be array or comma-separated string)\n let hints: string[] = [];\n if (Array.isArray(obj.hints)) {\n hints = obj.hints\n .filter(h => typeof h === 'string')\n .map(h => String(h).trim())\n .filter(h => h.length > 0);\n } else if (typeof obj.hints === 'string') {\n // Split comma-separated string\n hints = obj.hints\n .split(',')\n .map(h => h.trim())\n .filter(h => h.length > 0);\n } else {\n // Default: use topic name as hint\n hints = [topicId];\n }\n\n // Ensure at least one hint\n if (hints.length === 0) {\n hints = [topicId];\n }\n\n topics.push({\n topic: topicId,\n description: String(obj.description).trim(),\n hints,\n });\n }\n\n // Log warnings to stderr for visibility\n if (warnings.length > 0) {\n for (const w of warnings) {\n process.stderr.write(`[WARN] ${w}\\n`);\n }\n }\n\n // Deduplicate by topic ID\n const seenIds = new Set<string>();\n const deduplicated: TopicSeed[] = [];\n for (const topic of topics) {\n if (seenIds.has(topic.topic)) {\n warnings.push(`Duplicate topic ID '${topic.topic}', keeping first occurrence`);\n continue;\n }\n seenIds.add(topic.topic);\n deduplicated.push(topic);\n }\n\n return deduplicated;\n}\n\n\n", "/**\n * Seeds Phase \u2014 Heuristic Fallback\n *\n * Directory-name-based fallback for generating topic seeds when AI\n * under-generates or fails. Creates seeds from top-level directory names.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { TopicSeed } from '../types';\nimport { normalizeModuleId } from '../schemas';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/**\n * Common directories to exclude from topic seed generation.\n * These are typically build artifacts, dependencies, or non-code directories.\n */\nconst EXCLUDED_DIRS = new Set([\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'out',\n 'target',\n 'vendor',\n '.vscode',\n '.idea',\n '.vs',\n 'coverage',\n '.nyc_output',\n '.cache',\n 'tmp',\n 'temp',\n '.tmp',\n '.temp',\n 'bin',\n 'obj',\n '.next',\n '.nuxt',\n '.svelte-kit',\n '__pycache__',\n '.pytest_cache',\n '.mypy_cache',\n '.tox',\n 'venv',\n 'env',\n '.venv',\n '.env',\n 'Cargo.lock',\n 'yarn.lock',\n 'package-lock.json',\n '.DS_Store',\n 'Thumbs.db',\n]);\n\n// ============================================================================\n// Heuristic Fallback\n// ============================================================================\n\n/**\n * Generate topic seeds from directory names as a fallback.\n *\n * Scans top-level directories in the repository and creates a TopicSeed\n * for each directory that isn't in the exclusion list.\n *\n * @param repoPath - Absolute path to the repository\n * @returns Array of TopicSeed objects generated from directory names\n */\nexport function generateHeuristicSeeds(repoPath: string): TopicSeed[] {\n const seeds: TopicSeed[] = [];\n\n try {\n const entries = fs.readdirSync(repoPath, { withFileTypes: true });\n\n for (const entry of entries) {\n // Only process directories\n if (!entry.isDirectory()) {\n continue;\n }\n\n const dirName = entry.name;\n\n // Skip excluded directories\n if (EXCLUDED_DIRS.has(dirName)) {\n continue;\n }\n\n // Skip hidden directories (except those explicitly allowed)\n if (dirName.startsWith('.') && !EXCLUDED_DIRS.has(dirName)) {\n // Allow some common hidden directories that might be topics\n // but skip most\n continue;\n }\n\n // Normalize directory name to topic ID\n const topicId = normalizeModuleId(dirName);\n\n // Skip if normalization resulted in empty or invalid ID\n if (!topicId || topicId === 'unknown') {\n continue;\n }\n\n // Create a seed from the directory name\n seeds.push({\n topic: topicId,\n description: `Code related to ${dirName} directory`,\n hints: [dirName, topicId],\n });\n }\n } catch (error) {\n // If we can't read the directory, return empty array\n // The caller should handle this gracefully\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return [];\n }\n throw error;\n }\n\n return seeds;\n}\n", "/**\n * Seeds Phase \u2014 SDK Session Orchestration\n *\n * Orchestrates the Copilot SDK session for topic seed generation.\n * Creates a direct session with MCP tools (grep, glob, view),\n * sends the seeds prompt, and parses the response.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n getCopilotSDKService,\n type SendMessageOptions,\n type PermissionRequest,\n type PermissionRequestResult,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type { SeedsCommandOptions, TopicSeed } from '../types';\nimport { buildSeedsPrompt } from './prompts';\nimport { parseSeedsResponse } from './response-parser';\nimport { generateHeuristicSeeds } from './heuristic-fallback';\nimport { printInfo, printWarning, gray } from '../logger';\nimport { getErrorMessage } from '../utils/error-utils';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default timeout for seeds session: 2 minutes */\nconst DEFAULT_SEEDS_TIMEOUT_MS = 120_000;\n\n/** Available tools for seeds (read-only file exploration) */\nconst SEEDS_TOOLS = ['view', 'grep', 'glob'];\n\n// ============================================================================\n// Permission Handler\n// ============================================================================\n\n/**\n * Read-only permission handler for seeds sessions.\n * Allows file reads, denies everything else (writes, shell, MCP, URLs).\n */\nfunction readOnlyPermissions(request: PermissionRequest): PermissionRequestResult {\n if (request.kind === 'read') {\n return { kind: 'approved' };\n }\n return { kind: 'denied-by-rules' };\n}\n\n// ============================================================================\n// Seeds Session\n// ============================================================================\n\n/**\n * Run a seeds generation session against a repository.\n *\n * Creates a direct SDK session with read-only MCP tools, sends the\n * seeds prompt, and parses the AI response into TopicSeed array.\n * Falls back to heuristic directory-based generation if AI under-generates.\n *\n * @param repoPath - Absolute path to the repository\n * @param options - Seeds command options (maxTopics, model, verbose)\n * @returns Array of TopicSeed objects\n * @throws Error if SDK is unavailable, AI times out, or response is malformed\n */\nexport async function runSeedsSession(\n repoPath: string,\n options: Pick<SeedsCommandOptions, 'maxTopics' | 'model' | 'verbose'>\n): Promise<TopicSeed[]> {\n const service = getCopilotSDKService();\n\n // Check SDK availability\n printInfo('Checking Copilot SDK availability...');\n const availability = await service.isAvailable();\n if (!availability) {\n throw new SeedsError(\n 'Copilot SDK is not available. Ensure GitHub Copilot is installed and authenticated.',\n 'sdk-unavailable'\n );\n }\n\n // Build the prompt\n printInfo(`Building seeds prompt ${gray(`(max topics: ${options.maxTopics})`)}`);\n const prompt = buildSeedsPrompt(repoPath, options.maxTopics);\n\n // Configure the SDK session\n const sendOptions: SendMessageOptions = {\n prompt,\n workingDirectory: repoPath,\n availableTools: SEEDS_TOOLS,\n onPermissionRequest: readOnlyPermissions,\n usePool: false, // Direct session for MCP tool access\n timeoutMs: DEFAULT_SEEDS_TIMEOUT_MS,\n };\n\n // Set model if specified\n if (options.model) {\n sendOptions.model = options.model;\n }\n\n // Send the message\n printInfo('Sending seeds prompt to AI \u2014 exploring repository structure...');\n const result = await service.sendMessage(sendOptions);\n\n if (!result.success) {\n const errorMsg = result.error || 'Unknown SDK error';\n if (errorMsg.toLowerCase().includes('timeout')) {\n throw new SeedsError(\n `Seeds generation timed out after ${DEFAULT_SEEDS_TIMEOUT_MS / 1000}s. ` +\n 'Falling back to directory-based heuristic.',\n 'timeout'\n );\n }\n throw new SeedsError(`AI seeds generation failed: ${errorMsg}`, 'ai-error');\n }\n\n if (!result.response) {\n throw new SeedsError('AI returned empty response', 'empty-response');\n }\n\n // Parse the response into TopicSeed array\n printInfo('Parsing AI response into topic seeds...');\n let seeds: TopicSeed[];\n try {\n seeds = parseSeedsResponse(result.response);\n } catch (parseError) {\n // On parse failure, fall back to heuristic\n if (options.verbose) {\n process.stderr.write(\n `[WARN] Failed to parse AI response: ${getErrorMessage(parseError)}. Falling back to heuristic.\\n`\n );\n }\n return generateHeuristicSeeds(repoPath);\n }\n\n // Limit to maxTopics if AI over-generated\n if (seeds.length > options.maxTopics) {\n if (options.verbose) {\n process.stderr.write(\n `[WARN] AI generated ${seeds.length} topics (maximum: ${options.maxTopics}). Truncating to ${options.maxTopics}.\\n`\n );\n }\n return seeds.slice(0, options.maxTopics);\n }\n\n return seeds;\n}\n\n// ============================================================================\n// Error Types\n// ============================================================================\n\n/**\n * Error type for seeds phase failures.\n */\nexport class SeedsError extends Error {\n constructor(\n message: string,\n public readonly code: 'sdk-unavailable' | 'timeout' | 'ai-error' | 'empty-response' | 'parse-error'\n ) {\n super(message);\n this.name = 'SeedsError';\n }\n}\n", "/**\n * Seeds Phase \u2014 Seed File Parser\n *\n * Parses seed files in JSON or CSV format into TopicSeed arrays.\n * Supports both SeedsOutput JSON format and CSV with topic,description,hints columns.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { TopicSeed, SeedsOutput } from '../types';\nimport { normalizeModuleId } from '../schemas';\nimport { getErrorMessage } from '../utils/error-utils';\n\n// ============================================================================\n// Seed File Parsing\n// ============================================================================\n\n/**\n * Parse a seed file (JSON or CSV) into an array of TopicSeed objects.\n *\n * @param filePath - Path to the seed file\n * @returns Array of TopicSeed objects\n * @throws Error if file doesn't exist, is empty, or has invalid format\n */\nexport function parseSeedFile(filePath: string): TopicSeed[] {\n const absolutePath = path.resolve(filePath);\n\n // Check file exists\n if (!fs.existsSync(absolutePath)) {\n throw new Error(`Seed file does not exist: ${absolutePath}`);\n }\n\n const stats = fs.statSync(absolutePath);\n if (!stats.isFile()) {\n throw new Error(`Seed file path is not a file: ${absolutePath}`);\n }\n\n // Read file content\n const content = fs.readFileSync(absolutePath, 'utf-8').trim();\n if (!content) {\n throw new Error(`Seed file is empty: ${absolutePath}`);\n }\n\n // Detect format by extension or content\n const ext = path.extname(absolutePath).toLowerCase();\n if (ext === '.json' || content.trim().startsWith('{')) {\n return parseJsonSeedFile(content, absolutePath);\n } else if (ext === '.csv' || content.includes(',')) {\n return parseCsvSeedFile(content, absolutePath);\n } else {\n // Try JSON first, fall back to CSV\n try {\n return parseJsonSeedFile(content, absolutePath);\n } catch {\n return parseCsvSeedFile(content, absolutePath);\n }\n }\n}\n\n// ============================================================================\n// JSON Parsing\n// ============================================================================\n\n/**\n * Parse a JSON seed file (SeedsOutput format).\n */\nfunction parseJsonSeedFile(content: string, filePath: string): TopicSeed[] {\n let parsed: unknown;\n try {\n parsed = JSON.parse(content);\n } catch (error) {\n throw new Error(`Invalid JSON in seed file ${filePath}: ${getErrorMessage(error)}`);\n }\n\n if (typeof parsed !== 'object' || parsed === null) {\n throw new Error(`Seed file ${filePath} does not contain a JSON object`);\n }\n\n const obj = parsed as Record<string, unknown>;\n\n // Check if it's a SeedsOutput format (has topics array)\n if ('topics' in obj && Array.isArray(obj.topics)) {\n return parseTopicsArray(obj.topics, filePath);\n }\n\n // Otherwise, assume it's a direct array of topics\n if (Array.isArray(parsed)) {\n return parseTopicsArray(parsed, filePath);\n }\n\n throw new Error(`Seed file ${filePath} must contain a 'topics' array or be an array of topics`);\n}\n\n/**\n * Parse an array of topic objects into TopicSeed array.\n */\nfunction parseTopicsArray(raw: unknown[], filePath: string): TopicSeed[] {\n const seeds: TopicSeed[] = [];\n\n for (let i = 0; i < raw.length; i++) {\n const item = raw[i];\n if (typeof item !== 'object' || item === null) {\n throw new Error(`Invalid topic at index ${i} in ${filePath}: not an object`);\n }\n\n const obj = item as Record<string, unknown>;\n\n // Validate required fields\n if (typeof obj.topic !== 'string' || !obj.topic) {\n throw new Error(`Invalid topic at index ${i} in ${filePath}: missing or invalid 'topic' field`);\n }\n\n if (typeof obj.description !== 'string' || !obj.description) {\n throw new Error(`Invalid topic at index ${i} in ${filePath}: missing or invalid 'description' field`);\n }\n\n // Parse hints (can be array or comma-separated string)\n let hints: string[] = [];\n if (Array.isArray(obj.hints)) {\n hints = obj.hints\n .filter(h => typeof h === 'string')\n .map(h => String(h).trim())\n .filter(h => h.length > 0);\n } else if (typeof obj.hints === 'string') {\n hints = obj.hints\n .split(',')\n .map(h => h.trim())\n .filter(h => h.length > 0);\n } else {\n hints = [normalizeModuleId(String(obj.topic))];\n }\n\n if (hints.length === 0) {\n hints = [normalizeModuleId(String(obj.topic))];\n }\n\n seeds.push({\n topic: normalizeModuleId(String(obj.topic)),\n description: String(obj.description).trim(),\n hints,\n });\n }\n\n return seeds;\n}\n\n// ============================================================================\n// CSV Parsing\n// ============================================================================\n\n/**\n * Parse a CSV seed file with columns: topic,description,hints\n */\nfunction parseCsvSeedFile(content: string, filePath: string): TopicSeed[] {\n const lines = content.split('\\n').map(line => line.trim()).filter(line => line.length > 0);\n\n if (lines.length === 0) {\n throw new Error(`CSV seed file ${filePath} is empty`);\n }\n\n // Parse header\n const headerLine = lines[0];\n const headers = parseCsvLine(headerLine);\n\n // Find column indices\n const topicIdx = headers.findIndex(h => h.toLowerCase() === 'topic');\n const descIdx = headers.findIndex(h => h.toLowerCase() === 'description' || h.toLowerCase() === 'desc');\n const hintsIdx = headers.findIndex(h => h.toLowerCase() === 'hints' || h.toLowerCase() === 'hint');\n\n if (topicIdx === -1) {\n throw new Error(`CSV seed file ${filePath} missing 'topic' column`);\n }\n if (descIdx === -1) {\n throw new Error(`CSV seed file ${filePath} missing 'description' column`);\n }\n\n // Parse data rows\n const seeds: TopicSeed[] = [];\n for (let i = 1; i < lines.length; i++) {\n const row = parseCsvLine(lines[i]);\n\n if (row.length <= Math.max(topicIdx, descIdx)) {\n throw new Error(`CSV seed file ${filePath} row ${i + 1} has insufficient columns`);\n }\n\n const topic = normalizeModuleId(row[topicIdx].trim());\n const description = row[descIdx].trim();\n\n if (!topic || !description) {\n throw new Error(`CSV seed file ${filePath} row ${i + 1} has empty topic or description`);\n }\n\n // Parse hints (comma-separated in CSV)\n let hints: string[] = [];\n if (hintsIdx !== -1 && row[hintsIdx]) {\n hints = row[hintsIdx]\n .split(',')\n .map(h => h.trim())\n .filter(h => h.length > 0);\n }\n\n if (hints.length === 0) {\n hints = [topic];\n }\n\n seeds.push({\n topic,\n description,\n hints,\n });\n }\n\n return seeds;\n}\n\n/**\n * Parse a CSV line, handling quoted fields with commas.\n */\nfunction parseCsvLine(line: string): string[] {\n const fields: string[] = [];\n let current = '';\n let inQuotes = false;\n\n for (let i = 0; i < line.length; i++) {\n const char = line[i];\n const nextChar = line[i + 1];\n\n if (char === '\"') {\n if (inQuotes && nextChar === '\"') {\n // Escaped quote\n current += '\"';\n i++; // Skip next quote\n } else {\n // Toggle quote state\n inQuotes = !inQuotes;\n }\n } else if (char === ',' && !inQuotes) {\n // Field separator\n fields.push(current);\n current = '';\n } else {\n current += char;\n }\n }\n\n // Add last field\n fields.push(current);\n\n return fields;\n}\n", "/**\n * Seeds Phase \u2014 Public API\n *\n * Exports the main seeds generation function and related types.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nexport { runSeedsSession as generateTopicSeeds } from './seeds-session';\nexport { parseSeedFile } from './seed-file-parser';\nexport type { TopicSeed, SeedsOutput, SeedsCommandOptions } from '../types';\nexport { SeedsError } from './seeds-session';\n", "/**\n * Seeds Command\n *\n * Implements the `deep-wiki seeds <repo-path>` command.\n * Generates topic seeds for breadth-first discovery (Phase 0).\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport type { SeedsCommandOptions, SeedsOutput } from '../types';\nimport { generateTopicSeeds, SeedsError } from '../seeds';\nimport {\n Spinner,\n printSuccess,\n printError,\n printWarning,\n printInfo,\n printHeader,\n printKeyValue,\n bold,\n cyan,\n gray,\n} from '../logger';\nimport { EXIT_CODES } from '../cli';\nimport { getErrorMessage } from '../utils/error-utils';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Deep-wiki version for seeds output */\nconst DEEP_WIKI_VERSION = '1.0.0';\n\n// ============================================================================\n// Execute Seeds Command\n// ============================================================================\n\n/**\n * Execute the seeds command.\n *\n * @param repoPath - Path to the local git repository\n * @param options - Command options\n * @returns Exit code\n */\nexport async function executeSeeds(\n repoPath: string,\n options: SeedsCommandOptions\n): Promise<number> {\n // Resolve to absolute path\n const absoluteRepoPath = path.resolve(repoPath);\n\n // Validate the repo path exists\n if (!fs.existsSync(absoluteRepoPath)) {\n printError(`Repository path does not exist: ${absoluteRepoPath}`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n if (!fs.statSync(absoluteRepoPath).isDirectory()) {\n printError(`Repository path is not a directory: ${absoluteRepoPath}`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n // Print header\n printHeader('Deep Wiki \u2014 Seeds Generation (Phase 0)');\n printKeyValue('Repository', absoluteRepoPath);\n printKeyValue('Output File', options.output);\n printKeyValue('Max Topics', String(options.maxTopics));\n if (options.model) {\n printKeyValue('Model', options.model);\n }\n process.stderr.write('\\n');\n\n // Run seed generation\n const spinner = new Spinner();\n spinner.start('Generating topic seeds...');\n\n try {\n const seeds = await generateTopicSeeds(absoluteRepoPath, {\n maxTopics: options.maxTopics,\n model: options.model,\n verbose: options.verbose,\n });\n\n spinner.succeed('Seeds generation complete');\n\n // Print summary to stderr\n process.stderr.write('\\n');\n printHeader('Seeds Summary');\n printKeyValue('Topics Found', String(seeds.length));\n\n if (options.verbose) {\n process.stderr.write('\\n');\n printInfo('Topics:');\n for (const seed of seeds) {\n process.stderr.write(\n ` ${cyan(seed.topic)} ${gray('\u2014')} ${seed.description}\\n`\n );\n }\n } else {\n // Print topic list to stderr (non-verbose)\n process.stderr.write('\\n');\n printInfo('Topics:');\n for (const seed of seeds) {\n process.stderr.write(` ${cyan(seed.topic)}\\n`);\n }\n }\n\n // Create output structure\n const output: SeedsOutput = {\n version: DEEP_WIKI_VERSION,\n timestamp: Date.now(),\n repoPath: absoluteRepoPath,\n topics: seeds,\n };\n\n // Write output file\n const outputPath = path.resolve(options.output);\n const outputDir = path.dirname(outputPath);\n\n try {\n fs.mkdirSync(outputDir, { recursive: true });\n fs.writeFileSync(outputPath, JSON.stringify(output, null, 2), 'utf-8');\n process.stderr.write('\\n');\n printSuccess(`Seeds written to ${bold(outputPath)}`);\n } catch (writeError) {\n printWarning(`Could not write to file: ${getErrorMessage(writeError)}`);\n printInfo('Outputting to stdout instead');\n // Fall back to stdout\n process.stdout.write(JSON.stringify(output, null, 2) + '\\n');\n }\n\n return EXIT_CODES.SUCCESS;\n\n } catch (error) {\n spinner.fail('Seeds generation failed');\n\n if (error instanceof SeedsError) {\n switch (error.code) {\n case 'sdk-unavailable':\n printError(error.message);\n printInfo('Setup instructions:');\n printInfo(' 1. Install GitHub Copilot extension');\n printInfo(' 2. Sign in with your GitHub account');\n printInfo(' 3. Ensure Copilot has SDK access');\n return EXIT_CODES.AI_UNAVAILABLE;\n\n case 'timeout':\n printError(error.message);\n return EXIT_CODES.EXECUTION_ERROR;\n\n default:\n printError(error.message);\n return EXIT_CODES.EXECUTION_ERROR;\n }\n }\n\n printError(getErrorMessage(error));\n if (options.verbose && error instanceof Error && error.stack) {\n process.stderr.write(`${gray(error.stack)}\\n`);\n }\n return EXIT_CODES.EXECUTION_ERROR;\n }\n}\n", "/**\n * Discovery Phase \u2014 Prompt Templates\n *\n * Prompt templates for the discovery phase. These guide the AI to explore\n * a repository and produce a structured ModuleGraph JSON.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { MODULE_GRAPH_SCHEMA, STRUCTURAL_SCAN_SCHEMA } from '../schemas';\n\n// ============================================================================\n// Discovery Prompt\n// ============================================================================\n\n/**\n * Build the main discovery prompt for a repository.\n *\n * @param repoPath - Absolute path to the repository\n * @param focus - Optional subtree to focus on\n * @returns The rendered prompt string\n */\nexport function buildDiscoveryPrompt(repoPath: string, focus?: string): string {\n const focusSection = focus\n ? `\\n## Focus Area\\n\\nFocus your analysis on the subtree: ${focus}\\nOnly include modules within or directly related to this area.\\nStill read top-level config files (package.json, README, etc.) for project context.\\n`\n : '';\n\n return `You are a senior software architect analyzing a codebase to produce a comprehensive, feature-oriented module graph.\nYou have access to grep, glob, and view tools to explore the repository at ${repoPath}.\n\n## Your Task\n\nAnalyze the repository and produce a JSON object describing its module structure, dependencies, and architecture. Modules should represent **features, capabilities, and architectural concerns** \u2014 not just files or directories.\n\n## Exploration Strategy\n\nFollow these steps in order \u2014 understand PURPOSE before STRUCTURE:\n\n1. **Documentation first**: Read README.md, ARCHITECTURE.md, or similar files for project context, features, and architecture overview.\n2. **Config files**: Read key configuration files to determine the project type:\n - Node.js: package.json, tsconfig.json, webpack.config.js\n - Rust: Cargo.toml\n - Go: go.mod, go.sum\n - Python: pyproject.toml, setup.py, requirements.txt\n - Java/Kotlin: pom.xml, build.gradle\n - General: Makefile, Dockerfile, .github/workflows/\n3. **Entry points**: Identify and read main entry point files (index.ts, main.go, main.rs, app.py, etc.) to understand what features are wired together.\n4. **File structure**: Run glob(\"**/*\") or glob(\"*\") to understand the overall directory layout and approximate file count.\n5. **Dependency mapping**: Use grep for import/require/use patterns to map dependencies between modules.\n - TypeScript/JavaScript: grep for \"import .* from\" or \"require(\"\n - Go: grep for \"import\" blocks\n - Rust: grep for \"use \" and \"mod \"\n - Python: grep for \"import \" and \"from .* import\"\n6. **Monorepo detection**: For monorepos, identify sub-packages and their relationships.\n - Check for workspaces in package.json\n - Check for packages/ or libs/ directories\n - Each sub-package with its own config file is likely a separate module.\n${focusSection}\n## Module Naming Guidance\n\nModule IDs and names should describe WHAT the code does, not WHERE it lives.\n\n**Good module IDs** (feature-focused):\n- \"inline-code-review\" \u2014 describes the feature\n- \"ai-pipeline-engine\" \u2014 describes the capability\n- \"workspace-shortcuts\" \u2014 describes user-facing functionality\n- \"config-migration\" \u2014 describes the architectural concern\n\n**Bad module IDs** (path mirrors \u2014 DO NOT USE):\n- \"src-shortcuts-code-review\" \u2014 just a directory path turned into kebab-case\n- \"packages-deep-wiki-src-cache\" \u2014 echoes the file path\n- \"extension-entry-point\" \u2014 just the file name\n- \"types-and-interfaces\" \u2014 describes a code artifact, not a feature\n\n## Output Format\n\nReturn a **single JSON object** matching this schema exactly. Do NOT wrap it in markdown code blocks. Return raw JSON only.\n\n${MODULE_GRAPH_SCHEMA}\n\n## Rules\n\n- Module IDs must be unique lowercase kebab-case identifiers describing the FEATURE (e.g., \"auth-service\", \"pipeline-execution\", \"real-time-sync\")\n- Do NOT derive module IDs from file paths or directory names \u2014 describe what the module DOES\n- All paths must be relative to the repo root (no absolute paths)\n- Dependencies and dependents must reference other module IDs that exist in the modules array\n- Complexity: \"low\" = simple utility/config, \"medium\" = moderate logic, \"high\" = complex business logic\n- Every module's category must match one of the declared categories\n- architectureNotes should be a 2-4 sentence summary of the overall architecture pattern\n- Include at least 1-3 key files per module (the most important files for understanding it)\n- If you can't determine a field, use a reasonable default rather than leaving it empty\n- Group related files into feature-level modules \u2014 do NOT create one module per file`;\n}\n\n// ============================================================================\n// Structural Scan Prompt (Large Repo First Pass)\n// ============================================================================\n\n/**\n * Build the structural scan prompt for large repositories.\n * This is the first pass that identifies top-level areas without deep-diving.\n *\n * @param repoPath - Absolute path to the repository\n * @returns The rendered prompt string\n */\nexport function buildStructuralScanPrompt(repoPath: string): string {\n return `You are a senior software architect performing a quick structural scan of a large codebase.\nYou have access to grep, glob, and view tools to explore the repository at ${repoPath}.\n\n## Your Task\n\nThis is a LARGE repository. Perform a quick scan to identify the top-level structure WITHOUT deep-diving into any area. Focus on understanding what each area DOES, not just what directory it is.\n\n## Steps\n\n1. Read top-level README.md and config files (package.json, Cargo.toml, go.mod, pyproject.toml, etc.) to understand the project's purpose and features.\n2. Run glob(\"*\") to see top-level files and directories.\n3. Run glob(\"*/\") or similar to identify major subdirectories.\n4. For each major directory, run glob(\"<dir>/*\") to get a sense of its contents (do NOT recurse deeply).\n5. Estimate the total file count based on what you see.\n\n## Area Naming Guidance\n\nArea names should describe the FUNCTIONALITY of each area, not just echo the directory name.\n\n**Good**: \"AI Pipeline Engine (packages/core)\" \u2014 describes what it does\n**Bad**: \"packages/core\" \u2014 just the directory path\n\nWhen the directory name is already descriptive (e.g., \"authentication/\"), keep it. When it's generic (e.g., \"src/\", \"lib/\", \"pkg/\"), describe what it contains.\n\n## Output Format\n\nReturn a **single JSON object** matching this schema exactly. Do NOT wrap it in markdown code blocks. Return raw JSON only.\n\n${STRUCTURAL_SCAN_SCHEMA}\n\n## Rules\n\n- List only TOP-LEVEL areas (don't go more than 2 levels deep)\n- Estimate fileCount based on directory sizes you observe\n- Area descriptions should explain what the area DOES, not just restate the directory name\n- Keep descriptions brief (1 sentence each)\n- Include all significant directories (skip node_modules, .git, dist, build, vendor, etc.)`;\n}\n\n// ============================================================================\n// Focused Discovery Prompt (Large Repo Second Pass)\n// ============================================================================\n\n/**\n * Build a focused discovery prompt for a specific area of a large repository.\n * Used in the second pass where each top-level area gets its own session.\n *\n * @param repoPath - Absolute path to the repository\n * @param areaPath - Path of the area to focus on\n * @param areaDescription - Description of the area\n * @param projectName - Name of the overall project\n * @returns The rendered prompt string\n */\nexport function buildFocusedDiscoveryPrompt(\n repoPath: string,\n areaPath: string,\n areaDescription: string,\n projectName: string\n): string {\n return `You are a senior software architect analyzing a specific area of the ${projectName} codebase.\nYou have access to grep, glob, and view tools to explore the repository at ${repoPath}.\n\n## Your Task\n\nAnalyze the \"${areaPath}\" directory in detail. This area is described as: ${areaDescription}\n\nFocus on identifying the **features, capabilities, and behavioral patterns** within this area \u2014 not just listing its files.\n\n## Steps\n\n1. Read any README, docs, or config files within \"${areaPath}\" to understand the area's purpose.\n2. Run glob(\"${areaPath}/**/*\") to see all files in this area.\n3. Read key entry points and config files within this area.\n4. Identify feature-level sub-modules, their purposes, and dependencies.\n5. Use grep to trace imports/exports to understand internal and cross-area dependencies.\n\n## Module Naming Guidance\n\nModule IDs should describe WHAT the code does, not echo directory paths.\n\n**Good**: \"core-auth-engine\", \"pipeline-executor\", \"cache-invalidation\"\n**Bad**: \"packages-core-src-auth\", \"src-pipeline\", \"cache-index\" (path mirrors)\n\n## Output Format\n\nReturn a **single JSON object** matching this schema exactly. Do NOT wrap it in markdown code blocks. Return raw JSON only.\n\n${MODULE_GRAPH_SCHEMA}\n\n## Rules\n\n- Module IDs should be prefixed with the area name and describe the FEATURE (e.g., \"core-auth-engine\", \"core-data-pipeline\")\n- Do NOT derive module IDs from file paths \u2014 describe what the module DOES\n- Paths must be relative to the repo root (include the area path prefix)\n- Dependencies may reference modules outside this area \u2014 use your best guess for their IDs\n- For cross-area dependencies, use the convention: area-name + \"-\" + module-name\n- architectureNotes should describe the architecture of THIS area specifically\n- Categories should be specific to this area's contents\n- Group related files into feature-level modules \u2014 do NOT create one module per file`;\n}\n", "/**\n * Discovery Phase \u2014 Response Parser\n *\n * Parses and validates AI JSON responses into ModuleGraph structures.\n * Handles JSON extraction from markdown, validation, normalization,\n * and error recovery.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { extractJSON } from '@plusplusoneplusplus/pipeline-core';\nimport type { ModuleGraph, ModuleInfo, ProjectInfo, CategoryInfo, TopLevelArea, StructuralScanResult } from '../types';\nimport {\n MODULE_GRAPH_REQUIRED_FIELDS,\n PROJECT_INFO_REQUIRED_FIELDS,\n MODULE_INFO_REQUIRED_FIELDS,\n VALID_COMPLEXITY_VALUES,\n isValidModuleId,\n normalizeModuleId,\n} from '../schemas';\nimport { parseAIJsonResponse, attemptJsonRepair } from '../utils/parse-ai-response';\n\n// ============================================================================\n// Module Graph Parsing\n// ============================================================================\n\n/**\n * Parse an AI response into a ModuleGraph.\n *\n * Handles:\n * 1. Raw JSON \u2192 parse directly\n * 2. JSON in markdown code blocks \u2192 extract and parse\n * 3. Multiple JSON blocks \u2192 take the largest one\n * 4. Trailing text after JSON \u2192 strip and parse\n * 5. Invalid JSON \u2192 attempt repair\n * 6. Missing required fields \u2192 fill defaults, warn\n *\n * @param response - Raw AI response string\n * @returns Parsed and validated ModuleGraph\n * @throws Error if response cannot be parsed into a valid graph\n */\nexport function parseModuleGraphResponse(response: string): ModuleGraph {\n const parsed = parseAIJsonResponse(response, { context: 'discovery', repair: true });\n\n // Step 3: Validate and normalize\n return validateAndNormalizeGraph(parsed);\n}\n\n// ============================================================================\n// Structural Scan Parsing\n// ============================================================================\n\n/**\n * Parse an AI response into a StructuralScanResult.\n *\n * @param response - Raw AI response string\n * @returns Parsed structural scan result\n * @throws Error if response cannot be parsed\n */\nexport function parseStructuralScanResponse(response: string): StructuralScanResult {\n if (!response || typeof response !== 'string') {\n throw new Error('Empty or invalid response from AI');\n }\n\n const jsonStr = extractJSON(response);\n if (!jsonStr) {\n throw new Error('No JSON found in structural scan response');\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(jsonStr);\n } catch {\n const fixed = attemptJsonRepair(jsonStr);\n if (fixed) {\n parsed = JSON.parse(fixed);\n } else {\n throw new Error('Invalid JSON in structural scan response');\n }\n }\n\n if (typeof parsed !== 'object' || parsed === null) {\n throw new Error('Structural scan response is not a JSON object');\n }\n\n const raw = parsed as Record<string, unknown>;\n\n return {\n fileCount: typeof raw.fileCount === 'number' ? raw.fileCount : 0,\n areas: parseAreas(raw.areas),\n projectInfo: parsePartialProjectInfo(raw.projectInfo),\n };\n}\n\n// ============================================================================\n// Validation and Normalization\n// ============================================================================\n\n/**\n * Validate and normalize a raw parsed object into a ModuleGraph.\n */\nfunction validateAndNormalizeGraph(raw: Record<string, unknown>): ModuleGraph {\n const warnings: string[] = [];\n\n // Check required top-level fields\n for (const field of MODULE_GRAPH_REQUIRED_FIELDS) {\n if (!(field in raw)) {\n if (field === 'categories') {\n raw.categories = [];\n warnings.push(`Missing '${field}' field, using empty default`);\n } else {\n throw new Error(`Missing required field '${field}' in module graph`);\n }\n }\n }\n\n // Parse project info\n const project = parseProjectInfo(raw.project);\n\n // Parse modules\n const modules = parseModules(raw.modules, warnings);\n\n // Parse categories\n const categories = parseCategories(raw.categories, warnings);\n\n // Parse architecture notes\n const architectureNotes = typeof raw.architectureNotes === 'string'\n ? raw.architectureNotes\n : '';\n\n // Post-processing: ensure module categories match declared categories\n const categoryNames = new Set(categories.map(c => c.name));\n for (const mod of modules) {\n if (mod.category && !categoryNames.has(mod.category)) {\n // Auto-add missing category\n categories.push({ name: mod.category, description: `Auto-generated category for ${mod.category}` });\n categoryNames.add(mod.category);\n warnings.push(`Auto-added missing category '${mod.category}'`);\n }\n }\n\n // Post-processing: validate dependency references\n const moduleIds = new Set(modules.map(m => m.id));\n for (const mod of modules) {\n mod.dependencies = mod.dependencies.filter(dep => {\n if (!moduleIds.has(dep)) {\n warnings.push(`Module '${mod.id}' references unknown dependency '${dep}', removing`);\n return false;\n }\n return true;\n });\n mod.dependents = mod.dependents.filter(dep => {\n if (!moduleIds.has(dep)) {\n warnings.push(`Module '${mod.id}' references unknown dependent '${dep}', removing`);\n return false;\n }\n return true;\n });\n }\n\n // Deduplicate modules by ID\n const seenIds = new Set<string>();\n const deduplicatedModules: ModuleInfo[] = [];\n for (const mod of modules) {\n if (seenIds.has(mod.id)) {\n warnings.push(`Duplicate module ID '${mod.id}', keeping first occurrence`);\n continue;\n }\n seenIds.add(mod.id);\n deduplicatedModules.push(mod);\n }\n\n if (warnings.length > 0) {\n // Log warnings to stderr for visibility\n for (const w of warnings) {\n process.stderr.write(`[WARN] ${w}\\n`);\n }\n }\n\n return {\n project,\n modules: deduplicatedModules,\n categories,\n architectureNotes,\n };\n}\n\n/**\n * Parse and validate ProjectInfo.\n */\nfunction parseProjectInfo(raw: unknown): ProjectInfo {\n if (typeof raw !== 'object' || raw === null) {\n throw new Error(\"Missing or invalid 'project' field in module graph\");\n }\n\n const obj = raw as Record<string, unknown>;\n\n // Validate required fields exist (use defaults if missing)\n for (const field of PROJECT_INFO_REQUIRED_FIELDS) {\n if (!(field in obj) || typeof obj[field] !== 'string') {\n // Use defaults for missing fields\n }\n }\n\n return {\n name: String(obj.name || 'unknown'),\n description: String(obj.description || ''),\n language: String(obj.language || 'unknown'),\n buildSystem: String(obj.buildSystem || 'unknown'),\n entryPoints: parseStringArray(obj.entryPoints),\n };\n}\n\n/**\n * Parse and validate an array of ModuleInfo.\n */\nfunction parseModules(raw: unknown, warnings: string[]): ModuleInfo[] {\n if (!Array.isArray(raw)) {\n throw new Error(\"'modules' field must be an array\");\n }\n\n const modules: ModuleInfo[] = [];\n\n for (let i = 0; i < raw.length; i++) {\n const item = raw[i];\n if (typeof item !== 'object' || item === null) {\n warnings.push(`Skipping invalid module at index ${i}`);\n continue;\n }\n\n const obj = item as Record<string, unknown>;\n\n // Check required fields\n let hasRequired = true;\n for (const field of MODULE_INFO_REQUIRED_FIELDS) {\n if (!(field in obj) || typeof obj[field] !== 'string') {\n warnings.push(`Module at index ${i} missing required field '${field}', skipping`);\n hasRequired = false;\n break;\n }\n }\n if (!hasRequired) { continue; }\n\n // Normalize module ID\n let id = String(obj.id);\n if (!isValidModuleId(id)) {\n const normalized = normalizeModuleId(id);\n warnings.push(`Normalized module ID '${id}' \u2192 '${normalized}'`);\n id = normalized;\n }\n\n // Normalize path (remove leading ./ and trailing /)\n let modulePath = String(obj.path || '');\n modulePath = normalizePath(modulePath);\n\n // Validate complexity\n let complexity = String(obj.complexity || 'medium').toLowerCase();\n if (!VALID_COMPLEXITY_VALUES.includes(complexity as typeof VALID_COMPLEXITY_VALUES[number])) {\n warnings.push(`Module '${id}' has invalid complexity '${complexity}', defaulting to 'medium'`);\n complexity = 'medium';\n }\n\n modules.push({\n id,\n name: String(obj.name),\n path: modulePath,\n purpose: String(obj.purpose || ''),\n keyFiles: parseStringArray(obj.keyFiles).map(normalizePath),\n dependencies: parseStringArray(obj.dependencies),\n dependents: parseStringArray(obj.dependents),\n complexity: complexity as ModuleInfo['complexity'],\n category: String(obj.category || 'general'),\n });\n }\n\n return modules;\n}\n\n/**\n * Parse and validate an array of CategoryInfo.\n */\nfunction parseCategories(raw: unknown, warnings: string[]): CategoryInfo[] {\n if (!Array.isArray(raw)) {\n warnings.push(\"'categories' is not an array, using empty default\");\n return [];\n }\n\n const categories: CategoryInfo[] = [];\n\n for (const item of raw) {\n if (typeof item !== 'object' || item === null) { continue; }\n const obj = item as Record<string, unknown>;\n\n if (typeof obj.name !== 'string' || !obj.name) { continue; }\n\n categories.push({\n name: String(obj.name),\n description: String(obj.description || ''),\n });\n }\n\n return categories;\n}\n\n/**\n * Parse areas from structural scan response.\n */\nfunction parseAreas(raw: unknown): TopLevelArea[] {\n if (!Array.isArray(raw)) { return []; }\n\n const areas: TopLevelArea[] = [];\n for (const item of raw) {\n if (typeof item !== 'object' || item === null) { continue; }\n const obj = item as Record<string, unknown>;\n\n areas.push({\n name: String(obj.name || ''),\n path: normalizePath(String(obj.path || '')),\n description: String(obj.description || ''),\n });\n }\n\n return areas;\n}\n\n/**\n * Parse partial ProjectInfo from structural scan response.\n */\nfunction parsePartialProjectInfo(raw: unknown): Partial<ProjectInfo> {\n if (typeof raw !== 'object' || raw === null) { return {}; }\n const obj = raw as Record<string, unknown>;\n\n const result: Partial<ProjectInfo> = {};\n if (typeof obj.name === 'string') { result.name = obj.name; }\n if (typeof obj.description === 'string') { result.description = obj.description; }\n if (typeof obj.language === 'string') { result.language = obj.language; }\n if (typeof obj.buildSystem === 'string') { result.buildSystem = obj.buildSystem; }\n if (Array.isArray(obj.entryPoints)) { result.entryPoints = parseStringArray(obj.entryPoints); }\n\n return result;\n}\n\n// ============================================================================\n// Utility Helpers\n// ============================================================================\n\n/**\n * Safely parse an unknown value as a string array.\n */\nfunction parseStringArray(raw: unknown): string[] {\n if (!Array.isArray(raw)) { return []; }\n return raw\n .filter(item => typeof item === 'string')\n .map(item => String(item));\n}\n\n/**\n * Normalize a file path: remove leading ./, convert backslashes to forward slashes.\n */\nexport function normalizePath(p: string): string {\n return p\n .replace(/\\\\/g, '/')\n .replace(/^\\.\\//, '')\n .replace(/\\/+/g, '/');\n}\n", "/**\n * Discovery Phase \u2014 SDK Session Orchestration\n *\n * Orchestrates the Copilot SDK session for repository discovery.\n * Creates a direct session with MCP tools (grep, glob, view),\n * sends the discovery prompt, and parses the response.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n getCopilotSDKService,\n type SendMessageOptions,\n type PermissionRequest,\n type PermissionRequestResult,\n type TokenUsage,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type { DiscoveryOptions, ModuleGraph } from '../types';\nimport { buildDiscoveryPrompt } from './prompts';\nimport { parseModuleGraphResponse } from './response-parser';\nimport { printInfo, printWarning, gray } from '../logger';\nimport { getErrorMessage } from '../utils/error-utils';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default timeout for discovery session: 5 minutes */\nconst DEFAULT_DISCOVERY_TIMEOUT_MS = 300_000;\n\n/** Available tools for discovery (read-only file exploration) */\nconst DISCOVERY_TOOLS = ['view', 'grep', 'glob'];\n\n// ============================================================================\n// Permission Handler\n// ============================================================================\n\n/**\n * Read-only permission handler for discovery sessions.\n * Allows file reads, denies everything else (writes, shell, MCP, URLs).\n */\nfunction readOnlyPermissions(request: PermissionRequest): PermissionRequestResult {\n if (request.kind === 'read') {\n return { kind: 'approved' };\n }\n return { kind: 'denied-by-rules' };\n}\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Result from a discovery session, including token usage.\n */\nexport interface DiscoverySessionResult {\n /** The parsed module graph */\n graph: ModuleGraph;\n /** Aggregated token usage across all SDK calls (initial + retry) */\n tokenUsage?: TokenUsage;\n}\n\n// ============================================================================\n// Discovery Session\n// ============================================================================\n\n/**\n * Run a discovery session against a repository.\n *\n * Creates a direct SDK session with read-only MCP tools, sends the\n * discovery prompt, and parses the AI response into a ModuleGraph.\n *\n * @param options - Discovery options (repoPath, model, timeout, focus)\n * @returns The parsed ModuleGraph\n * @throws Error if SDK is unavailable, AI times out, or response is malformed\n */\nexport async function runDiscoverySession(options: DiscoveryOptions): Promise<DiscoverySessionResult> {\n const service = getCopilotSDKService();\n\n // Check SDK availability\n printInfo('Checking Copilot SDK availability...');\n const availability = await service.isAvailable();\n if (!availability) {\n throw new DiscoveryError(\n 'Copilot SDK is not available. Ensure GitHub Copilot is installed and authenticated.',\n 'sdk-unavailable'\n );\n }\n\n // Build the prompt\n printInfo(`Building discovery prompt ${options.focus ? `with focus: ${options.focus}` : 'for full repository'}...`);\n const prompt = buildDiscoveryPrompt(options.repoPath, options.focus);\n\n // Configure the SDK session\n const timeoutMs = options.timeout || DEFAULT_DISCOVERY_TIMEOUT_MS;\n const sendOptions: SendMessageOptions = {\n prompt,\n workingDirectory: options.repoPath,\n availableTools: DISCOVERY_TOOLS,\n onPermissionRequest: readOnlyPermissions,\n usePool: false, // Direct session for MCP tool access\n timeoutMs,\n };\n\n // Set model if specified\n if (options.model) {\n sendOptions.model = options.model;\n }\n\n // Send the message\n printInfo(`Sending discovery prompt to AI ${gray(`(timeout: ${timeoutMs / 1000}s, tools: ${DISCOVERY_TOOLS.join(', ')})`)}`);\n const result = await service.sendMessage(sendOptions);\n\n if (!result.success) {\n const errorMsg = result.error || 'Unknown SDK error';\n if (errorMsg.toLowerCase().includes('timeout')) {\n throw new DiscoveryError(\n `Discovery timed out after ${timeoutMs / 1000}s. ` +\n 'Try increasing --timeout or using --focus to narrow the scope.',\n 'timeout'\n );\n }\n throw new DiscoveryError(`AI discovery failed: ${errorMsg}`, 'ai-error');\n }\n\n if (!result.response) {\n throw new DiscoveryError('AI returned empty response', 'empty-response');\n }\n\n // Parse the response into a ModuleGraph\n printInfo('Parsing AI response into module graph...');\n try {\n const graph = parseModuleGraphResponse(result.response);\n printInfo(`Parsed ${graph.modules.length} modules across ${graph.categories.length} categories`);\n return { graph, tokenUsage: result.tokenUsage };\n } catch (parseError) {\n // On parse failure, retry once with a stricter prompt\n printWarning(`Failed to parse response: ${getErrorMessage(parseError)}. Retrying with stricter prompt...`);\n const retryPrompt = prompt + '\\n\\nIMPORTANT: Your previous response was not valid JSON. Please return ONLY a raw JSON object. No markdown, no explanation, just JSON.';\n\n const retryOptions: SendMessageOptions = {\n ...sendOptions,\n prompt: retryPrompt,\n };\n\n const retryResult = await service.sendMessage(retryOptions);\n\n if (!retryResult.success || !retryResult.response) {\n throw new DiscoveryError(\n `Failed to parse AI response: ${getErrorMessage(parseError)}`,\n 'parse-error'\n );\n }\n\n const graph = parseModuleGraphResponse(retryResult.response);\n printInfo(`Retry succeeded \u2014 parsed ${graph.modules.length} modules`);\n // Merge tokenUsage from both attempts\n const mergedUsage = mergeTokenUsage(result.tokenUsage, retryResult.tokenUsage);\n return { graph, tokenUsage: mergedUsage };\n }\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Merge two TokenUsage objects by summing their fields.\n */\nfunction mergeTokenUsage(a?: TokenUsage, b?: TokenUsage): TokenUsage | undefined {\n if (!a && !b) { return undefined; }\n if (!a) { return b; }\n if (!b) { return a; }\n return {\n inputTokens: a.inputTokens + b.inputTokens,\n outputTokens: a.outputTokens + b.outputTokens,\n cacheReadTokens: a.cacheReadTokens + b.cacheReadTokens,\n cacheWriteTokens: a.cacheWriteTokens + b.cacheWriteTokens,\n totalTokens: a.totalTokens + b.totalTokens,\n cost: (a.cost ?? 0) + (b.cost ?? 0) || undefined,\n turnCount: a.turnCount + b.turnCount,\n };\n}\n\n// ============================================================================\n// Error Types\n// ============================================================================\n\n/**\n * Error type for discovery phase failures.\n */\nexport class DiscoveryError extends Error {\n constructor(\n message: string,\n public readonly code: 'sdk-unavailable' | 'timeout' | 'ai-error' | 'empty-response' | 'parse-error'\n ) {\n super(message);\n this.name = 'DiscoveryError';\n }\n}\n", "/**\n * Cache Layer \u2014 Git Utilities\n *\n * Provides git-related utilities for cache invalidation.\n * Uses `git rev-parse HEAD` for repo-wide hash detection,\n * `git log -1 --format=%H -- <folder>` for subfolder-scoped hash,\n * and `git diff --name-only` for change detection.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as path from 'path';\nimport { execAsync } from '@plusplusoneplusplus/pipeline-core';\n\n// ============================================================================\n// Git Root Detection\n// ============================================================================\n\n/**\n * Get the git root directory for a path.\n *\n * @param repoPath - Path inside a git repository\n * @returns The absolute path to the git root, or null if not inside a git repo\n */\nexport async function getGitRoot(repoPath: string): Promise<string | null> {\n try {\n const result = await execAsync('git rev-parse --show-toplevel', { cwd: repoPath });\n const root = result.stdout.trim();\n if (root.length > 0) {\n return root;\n }\n return null;\n } catch {\n return null;\n }\n}\n\n// ============================================================================\n// Git Hash Detection\n// ============================================================================\n\n/**\n * Get the current HEAD hash for a git repository.\n *\n * @param repoPath - Path to the git repository\n * @returns The HEAD hash string, or null if not a git repo\n */\nexport async function getRepoHeadHash(repoPath: string): Promise<string | null> {\n try {\n const result = await execAsync('git rev-parse HEAD', { cwd: repoPath });\n const hash = result.stdout.trim();\n // Validate it looks like a git hash\n if (/^[0-9a-f]{40}$/.test(hash)) {\n return hash;\n }\n return null;\n } catch {\n return null;\n }\n}\n\n/**\n * Get a folder-scoped HEAD hash for a path.\n *\n * When `repoPath` is a subfolder of a git repo (not the repo root), returns\n * the hash of the last commit that touched files within that subfolder via\n * `git log -1 --format=%H -- <folder>`. This prevents cache invalidation\n * when unrelated parts of the repo change.\n *\n * When `repoPath` IS the git root, falls back to `git rev-parse HEAD`\n * (same as `getRepoHeadHash`).\n *\n * @param repoPath - Path to the git repository or subfolder\n * @returns The scoped hash string, or null if not a git repo\n */\nexport async function getFolderHeadHash(repoPath: string): Promise<string | null> {\n try {\n const gitRoot = await getGitRoot(repoPath);\n if (!gitRoot) {\n return null;\n }\n\n const resolvedRepo = path.resolve(repoPath);\n const resolvedRoot = path.resolve(gitRoot);\n\n // If repoPath IS the git root, fall back to repo-wide HEAD\n if (resolvedRepo === resolvedRoot) {\n return getRepoHeadHash(repoPath);\n }\n\n // Subfolder: get the last commit that touched this folder\n // Use the relative path from git root to the subfolder\n const relativePath = path.relative(resolvedRoot, resolvedRepo).replace(/\\\\/g, '/');\n const result = await execAsync(\n `git log -1 --format=%H -- \"${relativePath}\"`,\n { cwd: resolvedRoot }\n );\n const hash = result.stdout.trim();\n\n // Validate it looks like a git hash\n if (/^[0-9a-f]{40}$/.test(hash)) {\n return hash;\n }\n\n // No commits touching this folder \u2014 fall back to repo HEAD\n return getRepoHeadHash(repoPath);\n } catch {\n return null;\n }\n}\n\n// ============================================================================\n// Change Detection\n// ============================================================================\n\n/**\n * Get the list of files that changed since a given git hash.\n *\n * When `scopePath` is provided, the returned file list is filtered to only\n * include files under the scope path, and paths are remapped to be relative\n * to `scopePath` instead of the git root. This is essential for subfolder\n * cache invalidation where module paths in the graph are relative to the\n * subfolder, not the git root.\n *\n * @param repoPath - Path to the git repository\n * @param sinceHash - Git hash to compare against\n * @param scopePath - Optional subfolder to scope results to. When provided,\n * only files under this path are returned, with paths\n * relative to it.\n * @returns Array of changed file paths, or null on error\n */\nexport async function getChangedFiles(\n repoPath: string,\n sinceHash: string,\n scopePath?: string\n): Promise<string[] | null> {\n try {\n const result = await execAsync(`git diff --name-only ${sinceHash} HEAD`, { cwd: repoPath });\n let files = result.stdout\n .trim()\n .split('\\n')\n .filter(line => line.length > 0);\n\n // If a scope path is specified, filter and remap paths\n if (scopePath) {\n const gitRoot = await getGitRoot(repoPath);\n if (gitRoot) {\n const resolvedScope = path.resolve(scopePath);\n const resolvedRoot = path.resolve(gitRoot);\n // Get the scope's path relative to git root (forward slashes)\n const scopeRelative = path.relative(resolvedRoot, resolvedScope).replace(/\\\\/g, '/');\n\n if (scopeRelative && scopeRelative !== '.') {\n const prefix = scopeRelative + '/';\n files = files\n .filter(f => {\n const normalized = f.replace(/\\\\/g, '/');\n return normalized.startsWith(prefix) || normalized === scopeRelative;\n })\n .map(f => {\n const normalized = f.replace(/\\\\/g, '/');\n return normalized.startsWith(prefix) ? normalized.slice(prefix.length) : normalized;\n });\n }\n // If scopeRelative is empty or '.', repoPath IS the git root \u2014 no filtering needed\n }\n }\n\n return files;\n } catch {\n return null;\n }\n}\n\n/**\n * Check if a repository has any changes since a given hash.\n *\n * @param repoPath - Path to the git repository\n * @param sinceHash - Git hash to compare against\n * @returns True if there are changes, false if unchanged, null on error\n */\nexport async function hasChanges(repoPath: string, sinceHash: string): Promise<boolean | null> {\n const files = await getChangedFiles(repoPath, sinceHash);\n if (files === null) {\n return null;\n }\n return files.length > 0;\n}\n\n/**\n * Check if git is available in the system PATH.\n *\n * @returns True if git command is available\n */\nexport async function isGitAvailable(): Promise<boolean> {\n try {\n await execAsync('git --version');\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Check if a path is inside a git repository.\n *\n * @param dirPath - Path to check\n * @returns True if inside a git repo\n */\nexport async function isGitRepo(dirPath: string): Promise<boolean> {\n try {\n const result = await execAsync('git rev-parse --is-inside-work-tree', { cwd: dirPath });\n return result.stdout.trim() === 'true';\n } catch {\n return false;\n }\n}\n", "/**\n * Cache Layer \u2014 Shared Utilities\n *\n * Provides low-level read/write/clear/scan primitives used by all cache modules\n * (graph, consolidation, analysis, article, discovery). Eliminates code duplication\n * across cache operations while keeping phase-specific validation logic inline.\n *\n * Key features:\n * - Atomic writes (write to temp file, then rename) to prevent partial writes on crash\n * - Generic read with optional validation predicate\n * - Generic scan for batch cache lookups\n * - Unified error handling (return null / false on any error)\n *\n * Internal-only: not exported from the package.\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\n\n// ============================================================================\n// Read Primitives\n// ============================================================================\n\n/**\n * Read and parse a JSON cache file.\n * Returns null on missing file, corrupted JSON, or any I/O error.\n *\n * @param cachePath - Absolute path to the cache file\n * @returns Parsed data of type T, or null on error\n */\nexport function readCacheFile<T>(cachePath: string): T | null {\n try {\n if (!fs.existsSync(cachePath)) {\n return null;\n }\n const content = fs.readFileSync(cachePath, 'utf-8');\n return JSON.parse(content) as T;\n } catch {\n return null; // Graceful degradation\n }\n}\n\n/**\n * Read a cache file and validate with a custom predicate.\n * Returns null if the file is missing, corrupted, or fails validation.\n *\n * @param cachePath - Absolute path to the cache file\n * @param validate - Predicate that returns true if the data is valid\n * @returns Parsed and validated data of type T, or null\n */\nexport function readCacheFileIf<T>(\n cachePath: string,\n validate: (data: T) => boolean\n): T | null {\n const data = readCacheFile<T>(cachePath);\n if (data === null) {\n return null;\n }\n return validate(data) ? data : null;\n}\n\n// ============================================================================\n// Write Primitives\n// ============================================================================\n\n/**\n * Write a JSON cache file atomically, creating parent directories as needed.\n *\n * Uses atomic write pattern (write to .tmp, then rename) to prevent\n * partial writes on crash. This is strictly safer than direct writeFileSync.\n *\n * @param cachePath - Absolute path to the cache file\n * @param data - Data to serialize and write\n */\nexport function writeCacheFile<T>(cachePath: string, data: T): void {\n const dir = path.dirname(cachePath);\n fs.mkdirSync(dir, { recursive: true });\n\n const tempPath = cachePath + '.tmp';\n fs.writeFileSync(tempPath, JSON.stringify(data, null, 2), 'utf-8');\n fs.renameSync(tempPath, cachePath);\n}\n\n// ============================================================================\n// Clear Primitives\n// ============================================================================\n\n/**\n * Delete a single cache file.\n *\n * @param cachePath - Absolute path to the file to delete\n * @returns True if the file was deleted, false if it didn't exist or on error\n */\nexport function clearCacheFile(cachePath: string): boolean {\n if (!fs.existsSync(cachePath)) {\n return false;\n }\n try {\n fs.unlinkSync(cachePath);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Delete a cache directory recursively.\n *\n * @param dirPath - Absolute path to the directory to delete\n * @returns True if the directory was deleted, false if it didn't exist or on error\n */\nexport function clearCacheDir(dirPath: string): boolean {\n if (!fs.existsSync(dirPath)) {\n return false;\n }\n try {\n fs.rmSync(dirPath, { recursive: true, force: true });\n return true;\n } catch {\n return false;\n }\n}\n\n// ============================================================================\n// Scan Primitives\n// ============================================================================\n\n/**\n * Scan for individually cached items by ID.\n *\n * Generic scanner that covers all scan/scanAny variants:\n * - Resolves each ID to a file path via `pathResolver`\n * - Reads and validates via `validator`\n * - Extracts the inner data via `extractor`\n *\n * @param ids - IDs to look up in the cache\n * @param pathResolver - Maps an ID to a cache file path (or null if not found)\n * @param validator - Returns true if the cached data is valid\n * @param extractor - Extracts the inner result from the cached wrapper\n * @returns Object with `found` (valid results) and `missing` (IDs not in cache or invalid)\n */\nexport function scanCacheItems<TCache, TResult>(\n ids: string[],\n pathResolver: (id: string) => string | null,\n validator: (cached: TCache) => boolean,\n extractor: (cached: TCache) => TResult\n): { found: TResult[]; missing: string[] } {\n const found: TResult[] = [];\n const missing: string[] = [];\n\n for (const id of ids) {\n const cachePath = pathResolver(id);\n if (!cachePath) {\n missing.push(id);\n continue;\n }\n\n const cached = readCacheFile<TCache>(cachePath);\n if (cached && validator(cached)) {\n found.push(extractor(cached));\n } else {\n missing.push(id);\n }\n }\n\n return { found, missing };\n}\n\n/**\n * Scan for individually cached items by ID, returning results as a Map.\n *\n * Similar to `scanCacheItems` but returns a Map<string, TResult> instead of an array.\n * Used by discovery cache functions that return Map-based results (probes, areas).\n *\n * @param ids - IDs to look up in the cache\n * @param pathResolver - Maps an ID to a cache file path (or null if not found)\n * @param validator - Returns true if the cached data is valid\n * @param extractor - Extracts the inner result from the cached wrapper\n * @returns Object with `found` (Map of valid results) and `missing` (IDs not in cache or invalid)\n */\nexport function scanCacheItemsMap<TCache, TResult>(\n ids: string[],\n pathResolver: (id: string) => string | null,\n validator: (cached: TCache) => boolean,\n extractor: (cached: TCache) => TResult\n): { found: Map<string, TResult>; missing: string[] } {\n const found = new Map<string, TResult>();\n const missing: string[] = [];\n\n for (const id of ids) {\n const cachePath = pathResolver(id);\n if (!cachePath) {\n missing.push(id);\n continue;\n }\n\n const cached = readCacheFile<TCache>(cachePath);\n if (cached && validator(cached)) {\n found.set(id, extractor(cached));\n } else {\n missing.push(id);\n }\n }\n\n return { found, missing };\n}\n", "/**\n * Cache Layer \u2014 Shared Constants\n *\n * Central location for cache directory names, file names, and version constants.\n * Used by all domain-specific cache modules to avoid duplication.\n */\n\nimport * as path from 'path';\n\n/** Name of the cache directory */\nexport const CACHE_DIR_NAME = '.wiki-cache';\n\n/** Name of the cached module graph file */\nexport const GRAPH_CACHE_FILE = 'module-graph.json';\n\n/** Subdirectory for per-module analysis cache */\nexport const ANALYSES_DIR = 'analyses';\n\n/** Subdirectory for per-module article cache */\nexport const ARTICLES_DIR = 'articles';\n\n/** Name of the cached consolidated graph file */\nexport const CONSOLIDATED_GRAPH_FILE = 'consolidated-graph.json';\n\n/** Metadata file for the analyses cache */\nexport const ANALYSES_METADATA_FILE = '_metadata.json';\n\n/** Metadata file for reduce-phase article cache */\nexport const REDUCE_METADATA_FILE = '_reduce-metadata.json';\n\n/** Prefix for reduce article cache files */\nexport const REDUCE_ARTICLE_PREFIX = '_reduce-';\n\n/** Current version for cache metadata */\nexport const CACHE_VERSION = '1.0.0';\n\n/**\n * Get the cache directory path.\n *\n * @param outputDir - Output directory (the cache is stored inside it)\n * @returns Absolute path to the cache directory\n */\nexport function getCacheDir(outputDir: string): string {\n return path.join(path.resolve(outputDir), CACHE_DIR_NAME);\n}\n", "/**\n * Discovery Cache \u2014 Intermediate Discovery Artifacts\n *\n * Caches intermediate results from the discovery phase (Phase 1):\n * seeds, probe results, structural scans, area sub-graphs, and\n * round progress metadata. Enables crash recovery and avoids\n * redundant AI calls on retry.\n *\n * Cache structure:\n * .wiki-cache/\n * \u251C\u2500\u2500 discovery/\n * \u2502 \u251C\u2500\u2500 _metadata.json # Round progress, convergence state\n * \u2502 \u251C\u2500\u2500 seeds.json # Cached auto-generated seeds\n * \u2502 \u251C\u2500\u2500 structural-scan.json # Large-repo structural scan\n * \u2502 \u251C\u2500\u2500 probes/ # Per-topic probe results\n * \u2502 \u2502 \u251C\u2500\u2500 auth.json\n * \u2502 \u2502 \u2514\u2500\u2500 ...\n * \u2502 \u2514\u2500\u2500 areas/ # Per-area sub-graphs (large repo)\n * \u2502 \u251C\u2500\u2500 frontend.json\n * \u2502 \u2514\u2500\u2500 ...\n *\n * Each file wraps its payload with { data, gitHash, timestamp } for invalidation.\n * Follows the same pattern as saveAnalysis() / scanIndividualAnalysesCache().\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as path from 'path';\nimport type {\n TopicSeed,\n StructuralScanResult,\n ModuleGraph,\n} from '../types';\nimport type {\n TopicProbeResult,\n} from '../discovery/iterative/types';\nimport type {\n CachedProbeResult,\n CachedSeeds,\n CachedStructuralScan,\n CachedAreaGraph,\n DiscoveryProgressMetadata,\n} from './types';\nimport { normalizeModuleId } from '../schemas';\nimport { readCacheFile, readCacheFileIf, writeCacheFile, clearCacheDir, scanCacheItemsMap } from './cache-utils';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Name of the cache directory (must match index.ts) */\nconst CACHE_DIR_NAME = '.wiki-cache';\n\n/** Subdirectory for discovery cache */\nconst DISCOVERY_DIR = 'discovery';\n\n/** Subdirectory for per-topic probe results */\nconst PROBES_DIR = 'probes';\n\n/** Subdirectory for per-area sub-graphs */\nconst AREAS_DIR = 'areas';\n\n/** File name for discovery progress metadata */\nconst METADATA_FILE = '_metadata.json';\n\n/** File name for cached seeds */\nconst SEEDS_FILE = 'seeds.json';\n\n/** File name for cached structural scan */\nconst STRUCTURAL_SCAN_FILE = 'structural-scan.json';\n\n// ============================================================================\n// Cache Paths\n// ============================================================================\n\n/**\n * Get the discovery cache directory path.\n *\n * @param outputDir - Output directory (the cache is stored inside it)\n * @returns Absolute path to the discovery cache directory\n */\nexport function getDiscoveryCacheDir(outputDir: string): string {\n return path.join(path.resolve(outputDir), CACHE_DIR_NAME, DISCOVERY_DIR);\n}\n\n/**\n * Get the probes cache directory.\n */\nfunction getProbesCacheDir(outputDir: string): string {\n return path.join(getDiscoveryCacheDir(outputDir), PROBES_DIR);\n}\n\n/**\n * Get the areas cache directory.\n */\nfunction getAreasCacheDir(outputDir: string): string {\n return path.join(getDiscoveryCacheDir(outputDir), AREAS_DIR);\n}\n\n/**\n * Get the path to a single cached probe result.\n */\nfunction getProbeCachePath(outputDir: string, topic: string): string {\n const slug = normalizeModuleId(topic);\n return path.join(getProbesCacheDir(outputDir), `${slug}.json`);\n}\n\n/**\n * Get the path to a single cached area sub-graph.\n */\nfunction getAreaCachePath(outputDir: string, areaId: string): string {\n const slug = normalizeModuleId(areaId);\n return path.join(getAreasCacheDir(outputDir), `${slug}.json`);\n}\n\n// Local helpers (atomicWriteFileSync and safeReadJSON) have been replaced\n// by shared primitives from cache-utils.ts (writeCacheFile and readCacheFile).\n\n// ============================================================================\n// Seeds Cache\n// ============================================================================\n\n/**\n * Save auto-generated seeds to the cache.\n *\n * @param seeds - The generated topic seeds\n * @param outputDir - Output directory\n * @param gitHash - Current git hash\n */\nexport function saveSeedsCache(\n seeds: TopicSeed[],\n outputDir: string,\n gitHash: string\n): void {\n writeCacheFile<CachedSeeds>(path.join(getDiscoveryCacheDir(outputDir), SEEDS_FILE), {\n seeds,\n gitHash,\n timestamp: Date.now(),\n });\n}\n\n/**\n * Get cached seeds if valid (git hash matches).\n *\n * @param outputDir - Output directory\n * @param gitHash - Current git hash for validation\n * @returns Cached seeds, or null if cache miss\n */\nexport function getCachedSeeds(\n outputDir: string,\n gitHash: string\n): TopicSeed[] | null {\n const cached = readCacheFileIf<CachedSeeds>(\n path.join(getDiscoveryCacheDir(outputDir), SEEDS_FILE),\n (d) => !!d.seeds && d.gitHash === gitHash\n );\n return cached?.seeds ?? null;\n}\n\n/**\n * Get cached seeds regardless of git hash (--use-cache mode).\n *\n * @param outputDir - Output directory\n * @returns Cached seeds, or null if not found\n */\nexport function getCachedSeedsAny(\n outputDir: string\n): TopicSeed[] | null {\n const cached = readCacheFileIf<CachedSeeds>(\n path.join(getDiscoveryCacheDir(outputDir), SEEDS_FILE),\n (d) => !!d.seeds\n );\n return cached?.seeds ?? null;\n}\n\n// ============================================================================\n// Probe Results Cache\n// ============================================================================\n\n/**\n * Save a single probe result to the cache.\n *\n * @param topic - The topic that was probed\n * @param result - The probe result\n * @param outputDir - Output directory\n * @param gitHash - Current git hash\n */\nexport function saveProbeResult(\n topic: string,\n result: TopicProbeResult,\n outputDir: string,\n gitHash: string\n): void {\n writeCacheFile<CachedProbeResult>(getProbeCachePath(outputDir, topic), {\n probeResult: result,\n gitHash,\n timestamp: Date.now(),\n });\n}\n\n/**\n * Get a cached probe result if valid (git hash matches).\n *\n * @param topic - The topic to look up\n * @param outputDir - Output directory\n * @param gitHash - Current git hash for validation\n * @returns Cached probe result, or null if cache miss\n */\nexport function getCachedProbeResult(\n topic: string,\n outputDir: string,\n gitHash: string\n): TopicProbeResult | null {\n const cached = readCacheFileIf<CachedProbeResult>(\n getProbeCachePath(outputDir, topic),\n (d) => !!d.probeResult && d.gitHash === gitHash\n );\n return cached?.probeResult ?? null;\n}\n\n/**\n * Scan for cached probe results across multiple topics.\n *\n * @param topics - Topic names to scan\n * @param outputDir - Output directory\n * @param gitHash - Current git hash for validation\n * @returns Object with `found` (valid cached probes) and `missing` (topics not found or stale)\n */\nexport function scanCachedProbes(\n topics: string[],\n outputDir: string,\n gitHash: string\n): { found: Map<string, TopicProbeResult>; missing: string[] } {\n return scanCacheItemsMap<CachedProbeResult, TopicProbeResult>(\n topics,\n (topic) => getProbeCachePath(outputDir, topic),\n (cached) => !!cached.probeResult && cached.gitHash === gitHash,\n (cached) => cached.probeResult\n );\n}\n\n/**\n * Scan for cached probe results regardless of git hash (--use-cache mode).\n */\nexport function scanCachedProbesAny(\n topics: string[],\n outputDir: string\n): { found: Map<string, TopicProbeResult>; missing: string[] } {\n return scanCacheItemsMap<CachedProbeResult, TopicProbeResult>(\n topics,\n (topic) => getProbeCachePath(outputDir, topic),\n (cached) => !!cached.probeResult,\n (cached) => cached.probeResult\n );\n}\n\n// ============================================================================\n// Structural Scan Cache (Large Repo)\n// ============================================================================\n\n/**\n * Save a structural scan result to the cache.\n *\n * @param scan - The structural scan result\n * @param outputDir - Output directory\n * @param gitHash - Current git hash\n */\nexport function saveStructuralScan(\n scan: StructuralScanResult,\n outputDir: string,\n gitHash: string\n): void {\n writeCacheFile<CachedStructuralScan>(path.join(getDiscoveryCacheDir(outputDir), STRUCTURAL_SCAN_FILE), {\n scanResult: scan,\n gitHash,\n timestamp: Date.now(),\n });\n}\n\n/**\n * Get a cached structural scan if valid (git hash matches).\n *\n * @param outputDir - Output directory\n * @param gitHash - Current git hash for validation\n * @returns Cached scan result, or null if cache miss\n */\nexport function getCachedStructuralScan(\n outputDir: string,\n gitHash: string\n): StructuralScanResult | null {\n const cached = readCacheFileIf<CachedStructuralScan>(\n path.join(getDiscoveryCacheDir(outputDir), STRUCTURAL_SCAN_FILE),\n (d) => !!d.scanResult && d.gitHash === gitHash\n );\n return cached?.scanResult ?? null;\n}\n\n/**\n * Get a cached structural scan regardless of git hash (--use-cache mode).\n */\nexport function getCachedStructuralScanAny(\n outputDir: string\n): StructuralScanResult | null {\n const cached = readCacheFileIf<CachedStructuralScan>(\n path.join(getDiscoveryCacheDir(outputDir), STRUCTURAL_SCAN_FILE),\n (d) => !!d.scanResult\n );\n return cached?.scanResult ?? null;\n}\n\n// ============================================================================\n// Area Sub-Graph Cache (Large Repo)\n// ============================================================================\n\n/**\n * Save an area sub-graph to the cache.\n *\n * @param areaId - The area identifier (path or slug)\n * @param graph - The area's sub-graph\n * @param outputDir - Output directory\n * @param gitHash - Current git hash\n */\nexport function saveAreaSubGraph(\n areaId: string,\n graph: ModuleGraph,\n outputDir: string,\n gitHash: string\n): void {\n writeCacheFile<CachedAreaGraph>(getAreaCachePath(outputDir, areaId), {\n graph,\n gitHash,\n timestamp: Date.now(),\n });\n}\n\n/**\n * Get a cached area sub-graph if valid (git hash matches).\n *\n * @param areaId - The area identifier\n * @param outputDir - Output directory\n * @param gitHash - Current git hash for validation\n * @returns Cached sub-graph, or null if cache miss\n */\nexport function getCachedAreaSubGraph(\n areaId: string,\n outputDir: string,\n gitHash: string\n): ModuleGraph | null {\n const cached = readCacheFileIf<CachedAreaGraph>(\n getAreaCachePath(outputDir, areaId),\n (d) => !!d.graph && d.gitHash === gitHash\n );\n return cached?.graph ?? null;\n}\n\n/**\n * Scan for cached area sub-graphs across multiple area IDs.\n *\n * @param areaIds - Area identifiers to scan\n * @param outputDir - Output directory\n * @param gitHash - Current git hash for validation\n * @returns Object with `found` (valid cached graphs) and `missing` (area IDs not found or stale)\n */\nexport function scanCachedAreas(\n areaIds: string[],\n outputDir: string,\n gitHash: string\n): { found: Map<string, ModuleGraph>; missing: string[] } {\n return scanCacheItemsMap<CachedAreaGraph, ModuleGraph>(\n areaIds,\n (areaId) => getAreaCachePath(outputDir, areaId),\n (cached) => !!cached.graph && cached.gitHash === gitHash,\n (cached) => cached.graph\n );\n}\n\n/**\n * Scan for cached area sub-graphs regardless of git hash (--use-cache mode).\n */\nexport function scanCachedAreasAny(\n areaIds: string[],\n outputDir: string\n): { found: Map<string, ModuleGraph>; missing: string[] } {\n return scanCacheItemsMap<CachedAreaGraph, ModuleGraph>(\n areaIds,\n (areaId) => getAreaCachePath(outputDir, areaId),\n (cached) => !!cached.graph,\n (cached) => cached.graph\n );\n}\n\n// ============================================================================\n// Discovery Progress Metadata\n// ============================================================================\n\n/**\n * Save discovery progress metadata for round resumption.\n *\n * @param metadata - The progress metadata\n * @param outputDir - Output directory\n */\nexport function saveDiscoveryMetadata(\n metadata: DiscoveryProgressMetadata,\n outputDir: string\n): void {\n writeCacheFile(path.join(getDiscoveryCacheDir(outputDir), METADATA_FILE), metadata);\n}\n\n/**\n * Get discovery progress metadata.\n *\n * @param outputDir - Output directory\n * @returns Metadata, or null if not found or corrupted\n */\nexport function getDiscoveryMetadata(\n outputDir: string\n): DiscoveryProgressMetadata | null {\n return readCacheFile<DiscoveryProgressMetadata>(path.join(getDiscoveryCacheDir(outputDir), METADATA_FILE));\n}\n\n// ============================================================================\n// Cleanup\n// ============================================================================\n\n/**\n * Clear all discovery cache artifacts.\n *\n * @param outputDir - Output directory\n * @returns True if cache was cleared, false if no cache existed\n */\nexport function clearDiscoveryCache(outputDir: string): boolean {\n return clearCacheDir(getDiscoveryCacheDir(outputDir));\n}\n", "/**\n * Graph Cache \u2014 Module Graph Discovery Results\n *\n * Caches the module graph produced by Phase 1 (discovery).\n * Uses git HEAD hash for invalidation.\n */\n\nimport * as path from 'path';\nimport type {\n ModuleGraph,\n} from '../types';\nimport type {\n CachedGraph,\n CacheMetadata,\n} from './types';\nimport { getFolderHeadHash } from './git-utils';\nimport { readCacheFileIf, writeCacheFile } from './cache-utils';\nimport { getCacheDir, CACHE_VERSION, GRAPH_CACHE_FILE } from './cache-constants';\n\n// ============================================================================\n// Paths\n// ============================================================================\n\n/**\n * Get the path to the cached module graph file.\n *\n * @param outputDir - Output directory\n * @returns Absolute path to the cached graph file\n */\nexport function getGraphCachePath(outputDir: string): string {\n return path.join(getCacheDir(outputDir), GRAPH_CACHE_FILE);\n}\n\n// ============================================================================\n// Read\n// ============================================================================\n\n/**\n * Get a cached module graph if it exists and is still valid (git hash matches).\n *\n * @param repoPath - Path to the git repository\n * @param outputDir - Output directory containing the cache\n * @returns The cached graph if valid, or null if cache miss\n */\nexport async function getCachedGraph(repoPath: string, outputDir: string): Promise<CachedGraph | null> {\n const cached = readCacheFileIf<CachedGraph>(\n getGraphCachePath(outputDir),\n (d) => !!d.metadata && !!d.graph\n );\n if (!cached) {\n return null;\n }\n\n try {\n const currentHash = await getFolderHeadHash(repoPath);\n if (!currentHash || currentHash !== cached.metadata.gitHash) {\n return null;\n }\n } catch {\n return null;\n }\n\n return cached;\n}\n\n/**\n * Get a cached module graph regardless of git hash (skip hash validation).\n *\n * @param outputDir - Output directory containing the cache\n * @returns The cached graph if it exists and is structurally valid, or null\n */\nexport function getCachedGraphAny(outputDir: string): CachedGraph | null {\n return readCacheFileIf<CachedGraph>(\n getGraphCachePath(outputDir),\n (d) => !!d.metadata && !!d.graph\n );\n}\n\n// ============================================================================\n// Write\n// ============================================================================\n\n/**\n * Save a module graph to the cache.\n *\n * @param repoPath - Path to the git repository\n * @param graph - The module graph to cache\n * @param outputDir - Output directory for the cache\n * @param focus - Optional focus area used during discovery\n */\nexport async function saveGraph(\n repoPath: string,\n graph: ModuleGraph,\n outputDir: string,\n focus?: string\n): Promise<void> {\n const currentHash = await getFolderHeadHash(repoPath);\n if (!currentHash) {\n // Can't determine git hash \u2014 skip caching\n return;\n }\n\n const metadata: CacheMetadata = {\n gitHash: currentHash,\n timestamp: Date.now(),\n version: CACHE_VERSION,\n focus,\n };\n\n writeCacheFile<CachedGraph>(getGraphCachePath(outputDir), { metadata, graph });\n}\n", "/**\n * Consolidation Cache \u2014 Consolidated Module Graph\n *\n * Caches the consolidated graph produced after merging discovery results.\n * Validates against both git hash and input module count.\n */\n\nimport * as path from 'path';\nimport type {\n ModuleGraph,\n} from '../types';\nimport type {\n CachedConsolidation,\n} from './types';\nimport { getFolderHeadHash } from './git-utils';\nimport { readCacheFileIf, writeCacheFile, clearCacheFile } from './cache-utils';\nimport { getCacheDir, CONSOLIDATED_GRAPH_FILE } from './cache-constants';\n\n// ============================================================================\n// Paths\n// ============================================================================\n\n/**\n * Get the path to the cached consolidated graph file.\n *\n * @param outputDir - Output directory\n * @returns Absolute path to the consolidated graph cache file\n */\nexport function getConsolidatedGraphCachePath(outputDir: string): string {\n return path.join(getCacheDir(outputDir), CONSOLIDATED_GRAPH_FILE);\n}\n\n// ============================================================================\n// Read\n// ============================================================================\n\n/**\n * Get a cached consolidated graph if it exists and is still valid.\n *\n * The cache is valid when:\n * - The git hash matches current HEAD\n * - The input module count matches (discovery produced the same graph)\n *\n * @param repoPath - Path to the git repository\n * @param outputDir - Output directory containing the cache\n * @param inputModuleCount - Number of modules in the pre-consolidation graph\n * @returns The cached consolidated graph if valid, or null if cache miss\n */\nexport async function getCachedConsolidation(\n repoPath: string,\n outputDir: string,\n inputModuleCount: number\n): Promise<CachedConsolidation | null> {\n const cached = readCacheFileIf<CachedConsolidation>(\n getConsolidatedGraphCachePath(outputDir),\n (d) => !!d.graph && !!d.gitHash && !!d.inputModuleCount && d.inputModuleCount === inputModuleCount\n );\n if (!cached) {\n return null;\n }\n\n try {\n const currentHash = await getFolderHeadHash(repoPath);\n if (!currentHash || currentHash !== cached.gitHash) {\n return null;\n }\n } catch {\n return null;\n }\n\n return cached;\n}\n\n/**\n * Get a cached consolidated graph regardless of git hash (--use-cache mode).\n *\n * Still validates the input module count so we don't reuse a consolidation\n * from a different discovery result.\n *\n * @param outputDir - Output directory containing the cache\n * @param inputModuleCount - Number of modules in the pre-consolidation graph\n * @returns The cached consolidated graph if structurally valid, or null\n */\nexport function getCachedConsolidationAny(\n outputDir: string,\n inputModuleCount: number\n): CachedConsolidation | null {\n return readCacheFileIf<CachedConsolidation>(\n getConsolidatedGraphCachePath(outputDir),\n (d) => !!d.graph && !!d.gitHash && !!d.inputModuleCount && d.inputModuleCount === inputModuleCount\n );\n}\n\n// ============================================================================\n// Write\n// ============================================================================\n\n/**\n * Save a consolidated graph to the cache.\n *\n * @param repoPath - Path to the git repository\n * @param graph - The consolidated module graph\n * @param outputDir - Output directory for the cache\n * @param inputModuleCount - Number of modules before consolidation\n */\nexport async function saveConsolidation(\n repoPath: string,\n graph: ModuleGraph,\n outputDir: string,\n inputModuleCount: number\n): Promise<void> {\n const currentHash = await getFolderHeadHash(repoPath);\n if (!currentHash) {\n return; // Can't determine git hash\n }\n\n writeCacheFile<CachedConsolidation>(getConsolidatedGraphCachePath(outputDir), {\n graph,\n gitHash: currentHash,\n inputModuleCount,\n timestamp: Date.now(),\n });\n}\n\n// ============================================================================\n// Invalidation\n// ============================================================================\n\n/**\n * Clear the consolidated graph cache.\n *\n * @param outputDir - Output directory\n * @returns True if cache was cleared, false if no cache existed\n */\nexport function clearConsolidationCache(outputDir: string): boolean {\n return clearCacheFile(getConsolidatedGraphCachePath(outputDir));\n}\n", "/**\n * Analysis Cache \u2014 Per-Module Analysis Results\n *\n * Caches per-module analysis results from Phase 3.\n * Supports individual and bulk operations, crash recovery scanning,\n * and metadata-based validation.\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type {\n ModuleAnalysis,\n} from '../types';\nimport type {\n CachedAnalysis,\n AnalysisCacheMetadata,\n} from './types';\nimport { getFolderHeadHash } from './git-utils';\nimport { readCacheFile, readCacheFileIf, writeCacheFile, clearCacheDir, scanCacheItems } from './cache-utils';\nimport { getCacheDir, CACHE_VERSION, ANALYSES_DIR, ANALYSES_METADATA_FILE } from './cache-constants';\n\n// ============================================================================\n// Paths\n// ============================================================================\n\n/**\n * Get the analyses cache directory.\n */\nexport function getAnalysesCacheDir(outputDir: string): string {\n return path.join(getCacheDir(outputDir), ANALYSES_DIR);\n}\n\n/**\n * Get the path to a single cached analysis file.\n */\nexport function getAnalysisCachePath(outputDir: string, moduleId: string): string {\n return path.join(getAnalysesCacheDir(outputDir), `${moduleId}.json`);\n}\n\n/**\n * Get the path to the analyses metadata file.\n */\nexport function getAnalysesMetadataPath(outputDir: string): string {\n return path.join(getAnalysesCacheDir(outputDir), ANALYSES_METADATA_FILE);\n}\n\n// ============================================================================\n// Read\n// ============================================================================\n\n/**\n * Get a single cached module analysis.\n *\n * @param moduleId - Module ID to look up\n * @param outputDir - Output directory\n * @returns The cached analysis, or null if not found\n */\nexport function getCachedAnalysis(moduleId: string, outputDir: string): ModuleAnalysis | null {\n const cached = readCacheFileIf<CachedAnalysis>(\n getAnalysisCachePath(outputDir, moduleId),\n (d) => !!d.analysis && !!d.analysis.moduleId\n );\n return cached?.analysis ?? null;\n}\n\n/**\n * Get all cached analyses if the cache is valid.\n *\n * @param outputDir - Output directory\n * @returns Array of cached analyses, or null if cache is invalid/missing\n */\nexport function getCachedAnalyses(outputDir: string): ModuleAnalysis[] | null {\n const metadata = readCacheFileIf<AnalysisCacheMetadata>(\n getAnalysesMetadataPath(outputDir),\n (d) => !!d.gitHash && !!d.moduleCount\n );\n if (!metadata) {\n return null;\n }\n\n // Read all analysis files\n const analysesDir = getAnalysesCacheDir(outputDir);\n const analyses: ModuleAnalysis[] = [];\n\n try {\n const files = fs.readdirSync(analysesDir);\n for (const file of files) {\n if (file === ANALYSES_METADATA_FILE || !file.endsWith('.json')) {\n continue;\n }\n\n const cached = readCacheFileIf<CachedAnalysis>(\n path.join(analysesDir, file),\n (d) => !!d.analysis && !!d.analysis.moduleId\n );\n if (cached) {\n analyses.push(cached.analysis);\n }\n }\n } catch {\n return null;\n }\n\n return analyses.length > 0 ? analyses : null;\n}\n\n/**\n * Get the analyses cache metadata (for hash checking).\n */\nexport function getAnalysesCacheMetadata(outputDir: string): AnalysisCacheMetadata | null {\n return readCacheFile<AnalysisCacheMetadata>(getAnalysesMetadataPath(outputDir));\n}\n\n// ============================================================================\n// Write\n// ============================================================================\n\n/**\n * Save a single module analysis to the cache.\n *\n * @param moduleId - Module ID\n * @param analysis - The analysis to cache\n * @param outputDir - Output directory\n * @param gitHash - Git hash when the analysis was produced\n */\nexport function saveAnalysis(\n moduleId: string,\n analysis: ModuleAnalysis,\n outputDir: string,\n gitHash: string\n): void {\n writeCacheFile<CachedAnalysis>(getAnalysisCachePath(outputDir, moduleId), {\n analysis,\n gitHash,\n timestamp: Date.now(),\n });\n}\n\n/**\n * Save all analyses to the cache (bulk save with metadata).\n *\n * @param analyses - All module analyses\n * @param outputDir - Output directory\n * @param repoPath - Path to the git repository\n */\nexport async function saveAllAnalyses(\n analyses: ModuleAnalysis[],\n outputDir: string,\n repoPath: string\n): Promise<void> {\n const currentHash = await getFolderHeadHash(repoPath);\n if (!currentHash) {\n return; // Can't determine git hash\n }\n\n // Write individual analysis files\n for (const analysis of analyses) {\n saveAnalysis(analysis.moduleId, analysis, outputDir, currentHash);\n }\n\n // Write metadata\n writeCacheFile<AnalysisCacheMetadata>(getAnalysesMetadataPath(outputDir), {\n gitHash: currentHash,\n timestamp: Date.now(),\n version: CACHE_VERSION,\n moduleCount: analyses.length,\n });\n}\n\n// ============================================================================\n// Scan (Crash Recovery)\n// ============================================================================\n\n/**\n * Scan for individually cached analyses (even without metadata).\n *\n * This is used for crash recovery: if the process was interrupted before\n * `saveAllAnalyses` wrote the metadata file, individual per-module files\n * may still exist from incremental saves via `onItemComplete`.\n *\n * @param moduleIds - Module IDs to look for in the cache\n * @param outputDir - Output directory\n * @param currentGitHash - Current git hash for validation (modules cached with\n * a different hash are considered stale and excluded)\n * @returns Object with `found` (valid cached analyses) and `missing` (module IDs not found or stale)\n */\nexport function scanIndividualAnalysesCache(\n moduleIds: string[],\n outputDir: string,\n currentGitHash: string\n): { found: ModuleAnalysis[]; missing: string[] } {\n return scanCacheItems<CachedAnalysis, ModuleAnalysis>(\n moduleIds,\n (id) => getAnalysisCachePath(outputDir, id),\n (cached) => !!cached.analysis && !!cached.analysis.moduleId && cached.gitHash === currentGitHash,\n (cached) => cached.analysis\n );\n}\n\n/**\n * Scan for individually cached analyses, ignoring git hash validation.\n *\n * @param moduleIds - Module IDs to look for in the cache\n * @param outputDir - Output directory\n * @returns Object with `found` (valid cached analyses) and `missing` (module IDs not found)\n */\nexport function scanIndividualAnalysesCacheAny(\n moduleIds: string[],\n outputDir: string\n): { found: ModuleAnalysis[]; missing: string[] } {\n return scanCacheItems<CachedAnalysis, ModuleAnalysis>(\n moduleIds,\n (id) => getAnalysisCachePath(outputDir, id),\n (cached) => !!cached.analysis && !!cached.analysis.moduleId,\n (cached) => cached.analysis\n );\n}\n\n// ============================================================================\n// Invalidation\n// ============================================================================\n\n/**\n * Clear all cached analyses.\n *\n * @param outputDir - Output directory\n * @returns True if cache was cleared, false if no cache existed\n */\nexport function clearAnalysesCache(outputDir: string): boolean {\n return clearCacheDir(getAnalysesCacheDir(outputDir));\n}\n", "/**\n * Article Cache \u2014 Per-Module Article Results and Reduce-Phase Articles\n *\n * Caches per-module articles from Phase 4 and reduce-phase synthesis articles.\n * Supports flat and area-scoped directory layouts, crash recovery scanning,\n * re-stamping for incremental invalidation, and metadata-based validation.\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type {\n GeneratedArticle,\n} from '../types';\nimport type {\n CachedArticle,\n AnalysisCacheMetadata,\n} from './types';\nimport { getFolderHeadHash } from './git-utils';\nimport { readCacheFile, readCacheFileIf, writeCacheFile, clearCacheDir, scanCacheItems } from './cache-utils';\nimport { getCacheDir, CACHE_VERSION, ARTICLES_DIR, ANALYSES_METADATA_FILE, REDUCE_METADATA_FILE, REDUCE_ARTICLE_PREFIX } from './cache-constants';\n\n// ============================================================================\n// Paths\n// ============================================================================\n\n/**\n * Get the articles cache directory.\n */\nexport function getArticlesCacheDir(outputDir: string): string {\n return path.join(getCacheDir(outputDir), ARTICLES_DIR);\n}\n\n/**\n * Get the path to a single cached article file.\n * When areaId is provided, articles are cached under `articles/{area-id}/{module-id}.json`.\n * Without areaId, articles are cached as `articles/{module-id}.json` (backward compat).\n */\nexport function getArticleCachePath(outputDir: string, moduleId: string, areaId?: string): string {\n if (areaId) {\n return path.join(getArticlesCacheDir(outputDir), areaId, `${moduleId}.json`);\n }\n return path.join(getArticlesCacheDir(outputDir), `${moduleId}.json`);\n}\n\n/**\n * Get the path to the articles metadata file.\n */\nexport function getArticlesMetadataPath(outputDir: string): string {\n return path.join(getArticlesCacheDir(outputDir), ANALYSES_METADATA_FILE);\n}\n\n// ============================================================================\n// Reduce Article Paths\n// ============================================================================\n\n/**\n * Get the path to the reduce articles metadata file.\n */\nexport function getReduceMetadataPath(outputDir: string): string {\n return path.join(getArticlesCacheDir(outputDir), REDUCE_METADATA_FILE);\n}\n\n/**\n * Get the cache path for a reduce-phase article.\n *\n * Naming convention:\n * - `_reduce-index.json` for index article\n * - `_reduce-architecture.json` for architecture article\n * - `_reduce-getting-started.json` for getting-started article\n * - `_reduce-area-{areaId}-index.json` for area-index article\n * - `_reduce-area-{areaId}-architecture.json` for area-architecture article\n *\n * @param outputDir - Output directory\n * @param articleType - Article type (e.g., 'index', 'architecture', 'getting-started')\n * @param areaId - Optional area ID for area-scoped reduce articles\n * @returns Absolute path to the reduce article cache file\n */\nexport function getReduceArticleCachePath(\n outputDir: string,\n articleType: string,\n areaId?: string\n): string {\n const filename = areaId\n ? `${REDUCE_ARTICLE_PREFIX}area-${areaId}-${articleType}.json`\n : `${REDUCE_ARTICLE_PREFIX}${articleType}.json`;\n return path.join(getArticlesCacheDir(outputDir), filename);\n}\n\n// ============================================================================\n// Read\n// ============================================================================\n\n/**\n * Get a single cached module article.\n * Checks area-scoped path first (if areaId provided), then flat path.\n *\n * @param moduleId - Module ID to look up\n * @param outputDir - Output directory\n * @param areaId - Optional area ID for hierarchical lookup\n * @returns The cached article, or null if not found\n */\nexport function getCachedArticle(moduleId: string, outputDir: string, areaId?: string): GeneratedArticle | null {\n // Try area-scoped path first, then flat path\n const pathsToTry = areaId\n ? [getArticleCachePath(outputDir, moduleId, areaId), getArticleCachePath(outputDir, moduleId)]\n : [getArticleCachePath(outputDir, moduleId)];\n\n for (const cachePath of pathsToTry) {\n const cached = readCacheFileIf<CachedArticle>(\n cachePath,\n (d) => !!d.article && !!d.article.slug\n );\n if (cached) {\n return cached.article;\n }\n }\n\n return null;\n}\n\n/**\n * Get all cached articles if the cache is valid (has metadata).\n * Supports both flat and area-scoped directory layouts.\n *\n * @param outputDir - Output directory\n * @returns Array of cached articles, or null if cache is invalid/missing\n */\nexport function getCachedArticles(outputDir: string): GeneratedArticle[] | null {\n const metadata = readCacheFileIf<AnalysisCacheMetadata>(\n getArticlesMetadataPath(outputDir),\n (d) => !!d.gitHash && !!d.moduleCount\n );\n if (!metadata) {\n return null;\n }\n\n // Read all article files (flat + area-scoped)\n const articlesDir = getArticlesCacheDir(outputDir);\n const articles: GeneratedArticle[] = [];\n const articleValidator = (d: CachedArticle) => !!d.article && !!d.article.slug;\n\n try {\n const entries = fs.readdirSync(articlesDir, { withFileTypes: true });\n for (const entry of entries) {\n // Skip metadata and reduce-phase files\n if (entry.name === ANALYSES_METADATA_FILE || entry.name.startsWith(REDUCE_ARTICLE_PREFIX)) {\n continue;\n }\n\n if (entry.isFile() && entry.name.endsWith('.json')) {\n // Flat layout: articles/{module-id}.json\n const cached = readCacheFileIf<CachedArticle>(path.join(articlesDir, entry.name), articleValidator);\n if (cached) {\n articles.push(cached.article);\n }\n } else if (entry.isDirectory()) {\n // Area-scoped layout: articles/{area-id}/{module-id}.json\n const areaDir = path.join(articlesDir, entry.name);\n try {\n const areaFiles = fs.readdirSync(areaDir);\n for (const file of areaFiles) {\n if (!file.endsWith('.json')) { continue; }\n const cached = readCacheFileIf<CachedArticle>(path.join(areaDir, file), articleValidator);\n if (cached) {\n articles.push(cached.article);\n }\n }\n } catch {\n // Skip inaccessible area directories\n }\n }\n }\n } catch {\n return null;\n }\n\n return articles.length > 0 ? articles : null;\n}\n\n/**\n * Get the articles cache metadata (for hash checking).\n */\nexport function getArticlesCacheMetadata(outputDir: string): AnalysisCacheMetadata | null {\n return readCacheFile<AnalysisCacheMetadata>(getArticlesMetadataPath(outputDir));\n}\n\n// ============================================================================\n// Reduce Article Read\n// ============================================================================\n\n/**\n * Get the reduce articles cache metadata (for hash checking).\n */\nexport function getReduceCacheMetadata(outputDir: string): AnalysisCacheMetadata | null {\n return readCacheFile<AnalysisCacheMetadata>(getReduceMetadataPath(outputDir));\n}\n\n/**\n * Get all cached reduce-phase articles if the cache is valid.\n *\n * Reads all `_reduce-*.json` files (excluding `_reduce-metadata.json`) from the\n * articles cache directory. Validates against the provided git hash if specified.\n *\n * @param outputDir - Output directory\n * @param gitHash - Optional git hash for validation. If provided, only returns\n * articles if the reduce metadata git hash matches.\n * @returns Array of cached reduce articles, or null if cache miss\n */\nexport function getCachedReduceArticles(\n outputDir: string,\n gitHash?: string\n): GeneratedArticle[] | null {\n // Check reduce metadata first\n const metadata = getReduceCacheMetadata(outputDir);\n if (!metadata) {\n return null;\n }\n\n // Validate git hash if provided\n if (gitHash && metadata.gitHash !== gitHash) {\n return null;\n }\n\n // Read all _reduce-*.json files (excluding metadata)\n const articlesDir = getArticlesCacheDir(outputDir);\n if (!fs.existsSync(articlesDir)) {\n return null;\n }\n\n const articles: GeneratedArticle[] = [];\n\n try {\n const files = fs.readdirSync(articlesDir);\n for (const file of files) {\n if (\n !file.startsWith(REDUCE_ARTICLE_PREFIX) ||\n file === REDUCE_METADATA_FILE ||\n !file.endsWith('.json')\n ) {\n continue;\n }\n\n const cached = readCacheFileIf<CachedArticle>(\n path.join(articlesDir, file),\n (d) => !!d.article && !!d.article.slug\n );\n if (cached) {\n articles.push(cached.article);\n }\n }\n } catch {\n return null;\n }\n\n return articles.length > 0 ? articles : null;\n}\n\n// ============================================================================\n// Write\n// ============================================================================\n\n/**\n * Save a single module article to the cache.\n * Area-scoped articles are cached under `articles/{area-id}/{module-id}.json`.\n *\n * @param moduleId - Module ID\n * @param article - The article to cache\n * @param outputDir - Output directory\n * @param gitHash - Git hash when the article was generated\n */\nexport function saveArticle(\n moduleId: string,\n article: GeneratedArticle,\n outputDir: string,\n gitHash: string\n): void {\n writeCacheFile<CachedArticle>(getArticleCachePath(outputDir, moduleId, article.areaId), {\n article,\n gitHash,\n timestamp: Date.now(),\n });\n}\n\n/**\n * Save all articles to the cache (bulk save with metadata).\n *\n * @param articles - All module articles (only 'module' type articles are cached)\n * @param outputDir - Output directory\n * @param repoPath - Path to the git repository\n */\nexport async function saveAllArticles(\n articles: GeneratedArticle[],\n outputDir: string,\n repoPath: string\n): Promise<void> {\n const currentHash = await getFolderHeadHash(repoPath);\n if (!currentHash) {\n return; // Can't determine git hash\n }\n\n // Only cache module-type articles (not index/architecture/getting-started/area-*)\n const moduleArticles = articles.filter(a => a.type === 'module' && a.moduleId);\n\n // Write individual article files (saveArticle handles area subdirectories)\n for (const article of moduleArticles) {\n saveArticle(article.moduleId!, article, outputDir, currentHash);\n }\n\n // Write metadata\n writeCacheFile<AnalysisCacheMetadata>(getArticlesMetadataPath(outputDir), {\n gitHash: currentHash,\n timestamp: Date.now(),\n version: CACHE_VERSION,\n moduleCount: moduleArticles.length,\n });\n}\n\n// ============================================================================\n// Reduce Article Write\n// ============================================================================\n\n/**\n * Save reduce-phase articles to the cache.\n *\n * Filters the provided articles to only reduce-type articles (NOT 'module'),\n * writes each to a `_reduce-{type}.json` file, and writes reduce metadata\n * with the git hash and count.\n *\n * @param articles - All articles (will be filtered to reduce types only)\n * @param outputDir - Output directory\n * @param gitHash - Git hash when the articles were generated\n */\nexport function saveReduceArticles(\n articles: GeneratedArticle[],\n outputDir: string,\n gitHash: string\n): void {\n // Only cache reduce-type articles (not 'module')\n const reduceArticles = articles.filter(a => a.type !== 'module');\n if (reduceArticles.length === 0) {\n return;\n }\n\n // Write individual reduce article files\n for (const article of reduceArticles) {\n writeCacheFile<CachedArticle>(getReduceArticleCachePath(outputDir, article.type, article.areaId), {\n article,\n gitHash,\n timestamp: Date.now(),\n });\n }\n\n // Write reduce metadata\n writeCacheFile<AnalysisCacheMetadata>(getReduceMetadataPath(outputDir), {\n gitHash,\n timestamp: Date.now(),\n version: CACHE_VERSION,\n moduleCount: reduceArticles.length,\n });\n}\n\n// ============================================================================\n// Scan (Crash Recovery)\n// ============================================================================\n\n/**\n * Find all possible cache paths for a module article (checks area subdirectories + flat).\n * Returns the first existing path, or null if none found.\n */\nfunction findArticleCachePath(outputDir: string, moduleId: string): string | null {\n // Check flat path first\n const flatPath = getArticleCachePath(outputDir, moduleId);\n if (fs.existsSync(flatPath)) {\n return flatPath;\n }\n\n // Check area subdirectories\n const articlesDir = getArticlesCacheDir(outputDir);\n if (fs.existsSync(articlesDir)) {\n try {\n const entries = fs.readdirSync(articlesDir, { withFileTypes: true });\n for (const entry of entries) {\n if (entry.isDirectory() && entry.name !== '_metadata.json') {\n const areaPath = path.join(articlesDir, entry.name, `${moduleId}.json`);\n if (fs.existsSync(areaPath)) {\n return areaPath;\n }\n }\n }\n } catch {\n // Ignore errors scanning area dirs\n }\n }\n\n return null;\n}\n\n/**\n * Scan for individually cached articles (even without metadata).\n *\n * This is used for crash recovery: if the process was interrupted before\n * `saveAllArticles` wrote the metadata file, individual per-module files\n * may still exist from incremental saves via `onItemComplete`.\n *\n * Supports both flat (`articles/{module-id}.json`) and area-scoped\n * (`articles/{area-id}/{module-id}.json`) cache layouts.\n *\n * @param moduleIds - Module IDs to look for in the cache\n * @param outputDir - Output directory\n * @param currentGitHash - Current git hash for validation (modules cached with\n * a different hash are considered stale and excluded)\n * @returns Object with `found` (valid cached articles) and `missing` (module IDs not found or stale)\n */\nexport function scanIndividualArticlesCache(\n moduleIds: string[],\n outputDir: string,\n currentGitHash: string\n): { found: GeneratedArticle[]; missing: string[] } {\n return scanCacheItems<CachedArticle, GeneratedArticle>(\n moduleIds,\n (id) => findArticleCachePath(outputDir, id),\n (cached) => !!cached.article && !!cached.article.slug && cached.gitHash === currentGitHash,\n (cached) => cached.article\n );\n}\n\n/**\n * Scan for individually cached articles, ignoring git hash validation.\n *\n * Supports both flat and area-scoped cache layouts.\n *\n * @param moduleIds - Module IDs to look for in the cache\n * @param outputDir - Output directory\n * @returns Object with `found` (valid cached articles) and `missing` (module IDs not found)\n */\nexport function scanIndividualArticlesCacheAny(\n moduleIds: string[],\n outputDir: string\n): { found: GeneratedArticle[]; missing: string[] } {\n return scanCacheItems<CachedArticle, GeneratedArticle>(\n moduleIds,\n (id) => findArticleCachePath(outputDir, id),\n (cached) => !!cached.article && !!cached.article.slug,\n (cached) => cached.article\n );\n}\n\n// ============================================================================\n// Re-stamping\n// ============================================================================\n\n/**\n * Re-stamp cached articles for unchanged modules with a new git hash.\n *\n * This is the key operation for Phase 4 incremental invalidation:\n * after Phase 3 identifies which modules changed, unchanged module articles\n * are re-stamped (their gitHash updated) so they pass validation on the\n * current run. Only I/O \u2014 no AI calls needed.\n *\n * @param moduleIds - Module IDs whose articles should be re-stamped\n * @param outputDir - Output directory (cache lives here)\n * @param newGitHash - The current git hash to stamp onto the articles\n * @returns Number of articles successfully re-stamped\n */\nexport function restampArticles(\n moduleIds: string[],\n outputDir: string,\n newGitHash: string\n): number {\n let restamped = 0;\n\n for (const moduleId of moduleIds) {\n const cachePath = findArticleCachePath(outputDir, moduleId);\n if (!cachePath) {\n continue; // No cached article for this module \u2014 it will be regenerated\n }\n\n const cached = readCacheFile<CachedArticle>(cachePath);\n if (!cached || !cached.article || !cached.article.slug) {\n continue; // Corrupted or invalid \u2014 skip, will be regenerated\n }\n\n // Already has the correct hash \u2014 no need to re-write\n if (cached.gitHash === newGitHash) {\n restamped++;\n continue;\n }\n\n // Re-stamp: write back with updated git hash (same article content)\n writeCacheFile<CachedArticle>(cachePath, {\n article: cached.article,\n gitHash: newGitHash,\n timestamp: Date.now(),\n });\n restamped++;\n }\n\n return restamped;\n}\n\n// ============================================================================\n// Invalidation\n// ============================================================================\n\n/**\n * Clear all cached articles (including area subdirectories).\n *\n * @param outputDir - Output directory\n * @returns True if cache was cleared, false if no cache existed\n */\nexport function clearArticlesCache(outputDir: string): boolean {\n return clearCacheDir(getArticlesCacheDir(outputDir));\n}\n", "/**\n * Cache Layer \u2014 Barrel Re-export\n *\n * Re-exports all cache functions from domain-specific modules.\n * Consumers can import everything from 'cache/' or 'cache/index'.\n *\n * Also contains cross-domain functions (getModulesNeedingReanalysis, clearCache,\n * hasCachedGraph) that depend on multiple cache domains.\n */\n\nimport type { ModuleGraph } from '../types';\nimport { getFolderHeadHash, getChangedFiles } from './git-utils';\nimport { clearCacheFile } from './cache-utils';\n\n// Re-export constants and getCacheDir\nexport { getCacheDir } from './cache-constants';\n\n// Re-export cache utilities\nexport * from './cache-utils';\n\n// Re-export git utilities\nexport { getRepoHeadHash, getFolderHeadHash, getGitRoot, getChangedFiles, hasChanges, isGitAvailable, isGitRepo } from './git-utils';\n\n// Re-export discovery cache functions\nexport {\n getDiscoveryCacheDir,\n saveSeedsCache,\n getCachedSeeds,\n getCachedSeedsAny,\n saveProbeResult,\n getCachedProbeResult,\n scanCachedProbes,\n scanCachedProbesAny,\n saveStructuralScan,\n getCachedStructuralScan,\n getCachedStructuralScanAny,\n saveAreaSubGraph,\n getCachedAreaSubGraph,\n scanCachedAreas,\n scanCachedAreasAny,\n saveDiscoveryMetadata,\n getDiscoveryMetadata,\n clearDiscoveryCache,\n} from './discovery-cache';\n\n// Re-export graph cache functions\nexport * from './graph-cache';\n\n// Re-export consolidation cache functions\nexport * from './consolidation-cache';\n\n// Re-export analysis cache functions\nexport * from './analysis-cache';\n\n// Re-export article cache functions\nexport * from './article-cache';\n\n// ============================================================================\n// Cross-Domain Functions\n// ============================================================================\n\n// Import from domain modules for use in cross-domain functions\nimport { getGraphCachePath, getCachedGraph } from './graph-cache';\nimport { getAnalysesCacheMetadata } from './analysis-cache';\n\n/**\n * Determine which modules need re-analysis based on git changes.\n *\n * Algorithm:\n * 1. Get changed files since the cached git hash\n * 2. For each module, check if any changed file falls under module.path or matches module.keyFiles\n * 3. Return affected module IDs\n *\n * @param graph - Module graph\n * @param outputDir - Output directory (for cache access)\n * @param repoPath - Path to the git repository\n * @returns Array of module IDs that need re-analysis, or null if full rebuild needed\n */\nexport async function getModulesNeedingReanalysis(\n graph: ModuleGraph,\n outputDir: string,\n repoPath: string\n): Promise<string[] | null> {\n // Get cached analyses metadata\n const metadata = getAnalysesCacheMetadata(outputDir);\n if (!metadata || !metadata.gitHash) {\n // No cache \u2014 full rebuild\n return null;\n }\n\n // Get current git hash (subfolder-scoped if repoPath is a subfolder)\n const currentHash = await getFolderHeadHash(repoPath);\n if (!currentHash) {\n // Can't determine hash \u2014 full rebuild\n return null;\n }\n\n // If same hash, nothing needs re-analysis\n if (metadata.gitHash === currentHash) {\n return [];\n }\n\n // Get changed files, scoped to repoPath so paths align with module paths in the graph\n const changedFiles = await getChangedFiles(repoPath, metadata.gitHash, repoPath);\n if (changedFiles === null) {\n // Can't determine changes \u2014 full rebuild\n return null;\n }\n\n if (changedFiles.length === 0) {\n return [];\n }\n\n // Normalize changed file paths (forward slashes)\n const normalizedChanged = changedFiles.map(f => f.replace(/\\\\/g, '/'));\n\n // Check each module\n const affectedModules: string[] = [];\n for (const module of graph.modules) {\n const modulePath = module.path.replace(/\\\\/g, '/').replace(/\\/$/, '');\n const keyFiles = module.keyFiles.map(f => f.replace(/\\\\/g, '/'));\n\n const isAffected = normalizedChanged.some(changedFile => {\n // Check if changed file is under the module's path\n if (changedFile.startsWith(modulePath + '/') || changedFile === modulePath) {\n return true;\n }\n\n // Check if changed file matches any key file\n if (keyFiles.some(kf => changedFile === kf || changedFile.endsWith('/' + kf))) {\n return true;\n }\n\n return false;\n });\n\n if (isAffected) {\n affectedModules.push(module.id);\n }\n }\n\n return affectedModules;\n}\n\n// ============================================================================\n// Graph Cache Invalidation\n// ============================================================================\n\n/**\n * Clear the graph cache for a given output directory.\n *\n * @param outputDir - Output directory containing the cache\n * @returns True if cache was cleared, false if no cache existed\n */\nexport function clearCache(outputDir: string): boolean {\n return clearCacheFile(getGraphCachePath(outputDir));\n}\n\n/**\n * Check if a valid graph cache exists for the given configuration.\n *\n * @param repoPath - Path to the git repository\n * @param outputDir - Output directory\n * @returns True if a valid (non-expired) cache exists\n */\nexport async function hasCachedGraph(repoPath: string, outputDir: string): Promise<boolean> {\n const cached = await getCachedGraph(repoPath, outputDir);\n return cached !== null;\n}\n", "/**\n * Discovery Phase \u2014 Large Repo Handler\n *\n * Handles multi-round discovery for large repositories (3000+ files).\n * First pass identifies top-level structure, second pass drills into each area.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n getCopilotSDKService,\n type SendMessageOptions,\n type PermissionRequest,\n type PermissionRequestResult,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type {\n DiscoveryOptions,\n ModuleGraph,\n ModuleInfo,\n CategoryInfo,\n AreaInfo,\n StructuralScanResult,\n TopLevelArea,\n} from '../types';\nimport { normalizeModuleId } from '../schemas';\nimport { getErrorMessage } from '../utils/error-utils';\nimport { buildStructuralScanPrompt, buildFocusedDiscoveryPrompt } from './prompts';\nimport { parseStructuralScanResponse, parseModuleGraphResponse } from './response-parser';\nimport { printInfo, printWarning, gray, cyan } from '../logger';\nimport {\n getCachedStructuralScan,\n getCachedStructuralScanAny,\n saveStructuralScan,\n getCachedAreaSubGraph,\n saveAreaSubGraph,\n} from '../cache';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** File count threshold for triggering multi-round discovery */\nexport const LARGE_REPO_THRESHOLD = 3000;\n\n/** Default timeout for structural scan (shorter than full discovery) */\nconst STRUCTURAL_SCAN_TIMEOUT_MS = 120_000; // 2 minutes\n\n/** Default timeout per area drill-down */\nconst PER_AREA_TIMEOUT_MS = 180_000; // 3 minutes\n\n/** Available tools for discovery (read-only file exploration) */\nconst DISCOVERY_TOOLS = ['view', 'grep', 'glob'];\n\n// ============================================================================\n// Permission Handler\n// ============================================================================\n\n/**\n * Read-only permission handler.\n */\nfunction readOnlyPermissions(request: PermissionRequest): PermissionRequestResult {\n if (request.kind === 'read') {\n return { kind: 'approved' };\n }\n return { kind: 'denied-by-rules' };\n}\n\n// ============================================================================\n// File Count Estimation\n// ============================================================================\n\n/**\n * Estimate the number of files in a repository by counting glob results.\n * Uses a fast glob pattern to avoid reading file contents.\n *\n * @param repoPath - Path to the repository\n * @returns Estimated file count, or -1 if estimation fails\n */\nexport async function estimateFileCount(repoPath: string): Promise<number> {\n const service = getCopilotSDKService();\n\n printInfo('Estimating repository file count...');\n const result = await service.sendMessage({\n prompt: `Count the approximate number of files in this repository. Run glob(\"**/*\") and count the results. Respond with ONLY a single number, nothing else.`,\n workingDirectory: repoPath,\n availableTools: ['glob'],\n onPermissionRequest: readOnlyPermissions,\n usePool: false,\n timeoutMs: 30_000,\n });\n\n if (!result.success || !result.response) {\n printWarning('Could not estimate file count');\n return -1;\n }\n\n // Extract number from response\n const match = result.response.trim().match(/(\\d+)/);\n const count = match ? parseInt(match[1], 10) : -1;\n if (count > 0) {\n printInfo(`Repository contains ~${count} files ${gray(`(threshold: ${LARGE_REPO_THRESHOLD})`)}`);\n }\n return count;\n}\n\n/**\n * Check if a repository is large enough to require multi-round discovery.\n *\n * @param repoPath - Path to the repository\n * @returns True if the repo has more files than the threshold\n */\nexport async function isLargeRepo(repoPath: string): Promise<boolean> {\n const count = await estimateFileCount(repoPath);\n return count > LARGE_REPO_THRESHOLD;\n}\n\n// ============================================================================\n// Multi-Round Discovery\n// ============================================================================\n\n/**\n * Perform multi-round discovery for a large repository.\n *\n * Round 1: Structural scan \u2014 identify top-level areas\n * Round 2: Per-area drill-down \u2014 focused discovery for each area (sequential)\n * Final: Merge all sub-graphs into a unified ModuleGraph\n *\n * @param options - Discovery options\n * @returns Merged ModuleGraph\n */\nexport async function discoverLargeRepo(options: DiscoveryOptions): Promise<ModuleGraph> {\n const cacheEnabled = !!options.outputDir;\n const gitHash = options.gitHash;\n const useCache = options.useCache ?? false;\n\n // Round 1: Structural scan (check cache first)\n printInfo('Large repo detected \u2014 using multi-round discovery');\n printInfo('Round 1: Running structural scan to identify top-level areas...');\n\n let scanResult: StructuralScanResult | null = null;\n\n if (cacheEnabled) {\n scanResult = (useCache || !gitHash)\n ? getCachedStructuralScanAny(options.outputDir!)\n : getCachedStructuralScan(options.outputDir!, gitHash!);\n\n if (scanResult) {\n printInfo(`Using cached structural scan (${scanResult.areas.length} areas)`);\n }\n }\n\n if (!scanResult) {\n scanResult = await performStructuralScan(options);\n\n // Save to cache\n if (cacheEnabled && gitHash) {\n try {\n saveStructuralScan(scanResult, options.outputDir!, gitHash);\n } catch {\n // Non-fatal: cache write failed\n }\n }\n }\n\n if (scanResult.areas.length === 0) {\n throw new Error('Structural scan found no top-level areas. The repository may be empty or inaccessible.');\n }\n\n printInfo(`Structural scan found ${scanResult.areas.length} areas: ${scanResult.areas.map(a => cyan(a.name)).join(', ')}`);\n\n // Round 2: Per-area drill-down (sequential to avoid overloading the SDK)\n printInfo('Round 2: Per-area drill-down...');\n const subGraphs: ModuleGraph[] = [];\n const projectName = scanResult.projectInfo.name || 'project';\n\n for (let i = 0; i < scanResult.areas.length; i++) {\n const area = scanResult.areas[i];\n const areaSlug = normalizeModuleId(area.path);\n\n // Check area cache\n let cachedArea: ModuleGraph | null = null;\n if (cacheEnabled && gitHash) {\n cachedArea = getCachedAreaSubGraph(areaSlug, options.outputDir!, gitHash);\n }\n\n if (cachedArea) {\n printInfo(` Area \"${area.name}\" loaded from cache (${cachedArea.modules.length} modules)`);\n subGraphs.push(cachedArea);\n continue;\n }\n\n printInfo(` Discovering area ${i + 1}/${scanResult.areas.length}: ${cyan(area.name)} ${gray(`(${area.path})`)}`);\n try {\n const subGraph = await discoverArea(options, area, projectName);\n printInfo(` Found ${subGraph.modules.length} modules`);\n subGraphs.push(subGraph);\n\n // Save area sub-graph to cache\n if (cacheEnabled && gitHash) {\n try {\n saveAreaSubGraph(areaSlug, subGraph, options.outputDir!, gitHash);\n } catch {\n // Non-fatal: cache write failed\n }\n }\n } catch (error) {\n // Log error but continue with other areas\n printWarning(`Failed to discover area '${area.name}': ${getErrorMessage(error)}`);\n }\n }\n\n if (subGraphs.length === 0) {\n throw new Error('All area discoveries failed. Cannot produce a module graph.');\n }\n\n // Merge sub-graphs\n printInfo(`Merging ${subGraphs.length} area sub-graphs...`);\n const merged = mergeSubGraphs(subGraphs, scanResult);\n printInfo(`Merged result: ${merged.modules.length} modules, ${merged.categories.length} categories`);\n return merged;\n}\n\n// ============================================================================\n// Round 1: Structural Scan\n// ============================================================================\n\n/**\n * Perform the structural scan (first pass).\n */\nasync function performStructuralScan(options: DiscoveryOptions): Promise<StructuralScanResult> {\n const service = getCopilotSDKService();\n const prompt = buildStructuralScanPrompt(options.repoPath);\n\n const sendOptions: SendMessageOptions = {\n prompt,\n workingDirectory: options.repoPath,\n availableTools: DISCOVERY_TOOLS,\n onPermissionRequest: readOnlyPermissions,\n usePool: false,\n timeoutMs: STRUCTURAL_SCAN_TIMEOUT_MS,\n };\n\n if (options.model) {\n sendOptions.model = options.model;\n }\n\n const result = await service.sendMessage(sendOptions);\n\n if (!result.success || !result.response) {\n throw new Error(`Structural scan failed: ${result.error || 'empty response'}`);\n }\n\n return parseStructuralScanResponse(result.response);\n}\n\n// ============================================================================\n// Round 2: Per-Area Drill-Down\n// ============================================================================\n\n/**\n * Discover a single area of a large repository.\n */\nasync function discoverArea(\n options: DiscoveryOptions,\n area: TopLevelArea,\n projectName: string\n): Promise<ModuleGraph> {\n const service = getCopilotSDKService();\n const prompt = buildFocusedDiscoveryPrompt(\n options.repoPath,\n area.path,\n area.description,\n projectName\n );\n\n const sendOptions: SendMessageOptions = {\n prompt,\n workingDirectory: options.repoPath,\n availableTools: DISCOVERY_TOOLS,\n onPermissionRequest: readOnlyPermissions,\n usePool: false,\n timeoutMs: PER_AREA_TIMEOUT_MS,\n };\n\n if (options.model) {\n sendOptions.model = options.model;\n }\n\n const result = await service.sendMessage(sendOptions);\n\n if (!result.success || !result.response) {\n throw new Error(`Area discovery failed for '${area.name}': ${result.error || 'empty response'}`);\n }\n\n return parseModuleGraphResponse(result.response);\n}\n\n// ============================================================================\n// Sub-Graph Merging\n// ============================================================================\n\n/**\n * Merge multiple sub-graphs from area discoveries into a unified ModuleGraph.\n *\n * - Deduplicates modules by ID\n * - Tags each module with its area slug\n * - Populates graph.areas from TopLevelArea[]\n * - Merges categories (deduplicating by name)\n * - Resolves cross-area dependencies\n * - Combines architecture notes\n */\nexport function mergeSubGraphs(\n subGraphs: ModuleGraph[],\n scanResult: StructuralScanResult\n): ModuleGraph {\n // Merge project info (take from first sub-graph, supplement with scan result)\n const firstProject = subGraphs[0].project;\n const project = {\n name: scanResult.projectInfo.name || firstProject.name,\n description: scanResult.projectInfo.description || firstProject.description,\n language: scanResult.projectInfo.language || firstProject.language,\n buildSystem: scanResult.projectInfo.buildSystem || firstProject.buildSystem,\n entryPoints: firstProject.entryPoints,\n };\n\n // Build area-to-graph mapping for tagging modules with their area\n // Each sub-graph corresponds to one area (same order as scanResult.areas)\n const areaModuleMap = new Map<string, string[]>();\n\n // Merge modules (deduplicate by ID) and tag with area slug\n const moduleMap = new Map<string, ModuleInfo>();\n for (let i = 0; i < subGraphs.length; i++) {\n const graph = subGraphs[i];\n const area = scanResult.areas[i];\n const areaSlug = area ? normalizeModuleId(area.path) : undefined;\n\n for (const mod of graph.modules) {\n if (!moduleMap.has(mod.id)) {\n // Tag module with its area\n const taggedMod = areaSlug ? { ...mod, area: areaSlug } : mod;\n moduleMap.set(mod.id, taggedMod);\n\n // Track which modules belong to each area\n if (areaSlug) {\n if (!areaModuleMap.has(areaSlug)) {\n areaModuleMap.set(areaSlug, []);\n }\n areaModuleMap.get(areaSlug)!.push(mod.id);\n }\n }\n }\n }\n const modules = Array.from(moduleMap.values());\n\n // Merge categories (deduplicate by name)\n const categoryMap = new Map<string, CategoryInfo>();\n for (const graph of subGraphs) {\n for (const cat of graph.categories) {\n if (!categoryMap.has(cat.name)) {\n categoryMap.set(cat.name, cat);\n }\n }\n }\n const categories = Array.from(categoryMap.values());\n\n // Validate cross-area dependencies\n const moduleIds = new Set(modules.map(m => m.id));\n for (const mod of modules) {\n mod.dependencies = mod.dependencies.filter(dep => moduleIds.has(dep));\n mod.dependents = mod.dependents.filter(dep => moduleIds.has(dep));\n }\n\n // Combine architecture notes\n const architectureNotes = subGraphs\n .map(g => g.architectureNotes)\n .filter(Boolean)\n .join('\\n\\n');\n\n // Build AreaInfo[] from TopLevelArea[] + module assignments\n const areas: AreaInfo[] | undefined = scanResult.areas.length > 0\n ? scanResult.areas.map(topLevelArea => {\n const areaSlug = normalizeModuleId(topLevelArea.path);\n return {\n id: areaSlug,\n name: topLevelArea.name,\n path: topLevelArea.path,\n description: topLevelArea.description,\n modules: areaModuleMap.get(areaSlug) || [],\n };\n })\n : undefined;\n\n return {\n project,\n modules,\n categories,\n architectureNotes,\n ...(areas ? { areas } : {}),\n };\n}\n", "/**\n * Iterative Discovery \u2014 Probe Prompts\n *\n * Prompt templates for per-topic probe sessions.\n * Each probe searches the codebase for evidence of a specific topic.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { TopicSeed } from '../../types';\n\n// ============================================================================\n// Topic Probe Prompt\n// ============================================================================\n\n/**\n * JSON schema for TopicProbeResult.\n */\nconst TOPIC_PROBE_RESULT_SCHEMA = `{\n \"topic\": \"string \u2014 the topic that was probed\",\n \"foundModules\": [\n {\n \"id\": \"string \u2014 suggested module ID (kebab-case) describing the FEATURE, not the file path\",\n \"name\": \"string \u2014 human-readable name describing what this module DOES\",\n \"path\": \"string \u2014 path relative to repo root\",\n \"purpose\": \"string \u2014 what this module does for users or the system\",\n \"keyFiles\": [\"string \u2014 key file paths relative to repo root\"],\n \"evidence\": \"string \u2014 behavioral evidence: what functions/APIs/data flows prove this belongs to the topic\",\n \"lineRanges\": [[number, number]] \u2014 optional line ranges for monolithic files\n }\n ],\n \"discoveredTopics\": [\n {\n \"topic\": \"string \u2014 topic name (kebab-case) describing the FEATURE concern\",\n \"description\": \"string \u2014 what this feature/concern does\",\n \"hints\": [\"string \u2014 search hints\"],\n \"source\": \"string \u2014 where it was discovered (e.g., file path)\"\n }\n ],\n \"dependencies\": [\"string \u2014 IDs of other topics this topic depends on\"],\n \"confidence\": \"number \u2014 confidence level (0-1)\"\n}`;\n\n/**\n * Build the prompt for a per-topic probe session.\n *\n * @param repoPath - Absolute path to the repository\n * @param topic - The topic seed to probe\n * @param focus - Optional subtree to focus on\n * @returns The rendered prompt string\n */\nexport function buildProbePrompt(\n repoPath: string,\n topic: TopicSeed,\n focus?: string\n): string {\n const focusSection = focus\n ? `\\n## Focus Area\\n\\nFocus your analysis on the subtree: ${focus}\\nOnly include modules within or directly related to this area.\\n`\n : '';\n\n const hintsList = topic.hints.length > 0\n ? topic.hints.map(h => `- ${h}`).join('\\n')\n : `- ${topic.topic}`;\n\n return `You are investigating the topic \"${topic.topic}\" in this codebase.\nYou have access to grep, glob, and view tools to explore the repository at ${repoPath}.\n\n## Topic Description\n\n${topic.description}\n\n## Search Hints\n\nUse these keywords to find related code:\n${hintsList}\n${focusSection}\n## Your Task\n\n1. Use \\`grep\\` to search for hint keywords across the codebase\n2. Use \\`view\\` to read files that match your searches\n3. For large files, sample sections rather than reading the entire file\n4. Identify feature-level modules belonging to this topic (group related files together)\n5. Note any ADJACENT topics you discover (related but distinct concerns)\n6. Return JSON matching the TopicProbeResult schema\n\n## Exploration Strategy\n\n- Start with broad grep searches using the hints\n- Read key files that match (entry points, config files, main implementation files)\n- Focus on BEHAVIORAL evidence: what functions are called, what APIs are exposed, what data flows through the code\n- For monolithic files, identify specific line ranges that belong to this topic\n- Look for patterns: imports, exports, function names, class names, API surfaces, event handlers\n- If you find related but distinct topics, add them to discoveredTopics\n\n## Module Naming Guidance\n\nModule IDs should describe WHAT the code does, not echo file/directory names.\n\n**Good**: \"session-pool-manager\", \"yaml-pipeline-executor\", \"comment-anchoring\"\n**Bad**: \"src-ai-service\", \"pipeline-core-index\", \"comment-anchor\" (just the file name)\n\n## Output Format\n\nReturn a **single JSON object** matching this schema exactly. Do NOT wrap it in markdown code blocks. Return raw JSON only.\n\n${TOPIC_PROBE_RESULT_SCHEMA}\n\n## Rules\n\n- Module IDs must be unique lowercase kebab-case identifiers describing the FEATURE\n- Do NOT derive module IDs from file paths \u2014 describe what the module DOES\n- All paths must be relative to the repo root (no absolute paths)\n- Confidence should reflect how certain you are that you found all relevant code (0.0 = uncertain, 1.0 = very confident)\n- discoveredTopics should only include NEW topics not already in the seed list\n- dependencies should reference other topic IDs, not module IDs\n- For large monolithic files, use lineRanges to specify which sections belong to this topic\n- evidence should cite behavioral proof: function calls, API surfaces, data flows \u2014 not just \"found in file X\"`;\n}\n", "/**\n * Iterative Discovery \u2014 Probe Response Parser\n *\n * Parses AI responses from topic probe sessions into TopicProbeResult.\n * Handles JSON extraction, validation, and normalization.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { TopicProbeResult, ProbeFoundModule, DiscoveredTopic } from './types';\nimport { normalizeModuleId } from '../../schemas';\nimport { parseAIJsonResponse } from '../../utils/parse-ai-response';\n\n// ============================================================================\n// Probe Response Parsing\n// ============================================================================\n\n/**\n * Parse an AI response into a TopicProbeResult.\n *\n * @param response - Raw AI response string\n * @param topic - The topic that was probed (for validation)\n * @returns Parsed TopicProbeResult\n * @throws Error if response cannot be parsed\n */\nexport function parseProbeResponse(response: string, topic: string): TopicProbeResult {\n const obj = parseAIJsonResponse(response, { context: 'probe' });\n\n // Validate required fields\n if (typeof obj.topic !== 'string') {\n throw new Error('Missing or invalid \"topic\" field in probe response');\n }\n\n if (!Array.isArray(obj.foundModules)) {\n throw new Error('Missing or invalid \"foundModules\" field in probe response');\n }\n\n // Parse foundModules\n const foundModules: ProbeFoundModule[] = [];\n for (let i = 0; i < obj.foundModules.length; i++) {\n const item = obj.foundModules[i];\n if (typeof item !== 'object' || item === null) {\n continue; // Skip invalid items\n }\n\n const mod = item as Record<string, unknown>;\n\n // Required fields\n if (typeof mod.id !== 'string' || typeof mod.name !== 'string' || typeof mod.path !== 'string') {\n continue; // Skip modules missing required fields\n }\n\n // Normalize module ID\n const id = normalizeModuleId(String(mod.id));\n\n // Parse lineRanges if present\n let lineRanges: [number, number][] | undefined;\n if (Array.isArray(mod.lineRanges)) {\n const ranges: [number, number][] = [];\n for (const range of mod.lineRanges) {\n if (Array.isArray(range) && range.length === 2 &&\n typeof range[0] === 'number' && typeof range[1] === 'number') {\n ranges.push([range[0], range[1]]);\n }\n }\n if (ranges.length > 0) {\n lineRanges = ranges;\n }\n }\n\n foundModules.push({\n id,\n name: String(mod.name),\n path: String(mod.path),\n purpose: String(mod.purpose || ''),\n keyFiles: parseStringArray(mod.keyFiles),\n evidence: String(mod.evidence || ''),\n lineRanges,\n });\n }\n\n // Parse discoveredTopics (optional)\n const discoveredTopics: DiscoveredTopic[] = [];\n if (Array.isArray(obj.discoveredTopics)) {\n for (const item of obj.discoveredTopics) {\n if (typeof item !== 'object' || item === null) {\n continue;\n }\n const dt = item as Record<string, unknown>;\n if (typeof dt.topic === 'string' && typeof dt.description === 'string') {\n discoveredTopics.push({\n topic: normalizeModuleId(String(dt.topic)),\n description: String(dt.description),\n hints: parseStringArray(dt.hints),\n source: String(dt.source || ''),\n });\n }\n }\n }\n\n // Parse dependencies (optional)\n const dependencies: string[] = parseStringArray(obj.dependencies);\n\n // Parse confidence (default to 0.5 if not provided)\n let confidence = 0.5;\n if (typeof obj.confidence === 'number') {\n confidence = Math.max(0, Math.min(1, obj.confidence));\n }\n\n return {\n topic: String(obj.topic),\n foundModules,\n discoveredTopics,\n dependencies,\n confidence,\n };\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Safely parse an unknown value as a string array.\n */\nfunction parseStringArray(raw: unknown): string[] {\n if (!Array.isArray(raw)) {\n return [];\n }\n return raw\n .filter(item => typeof item === 'string')\n .map(item => String(item));\n}\n", "/**\n * Iterative Discovery \u2014 Probe Session\n *\n * Runs a single topic probe session using the Copilot SDK.\n * Creates a direct session with MCP tools and parses the response.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n getCopilotSDKService,\n type SendMessageOptions,\n type PermissionRequest,\n type PermissionRequestResult,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type { TopicSeed } from '../../types';\nimport type { TopicProbeResult } from './types';\nimport { buildProbePrompt } from './probe-prompts';\nimport { parseProbeResponse } from './probe-response-parser';\nimport { printInfo, printWarning, gray } from '../../logger';\nimport { getErrorMessage } from '../../utils/error-utils';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default timeout for probe session: 2 minutes */\nconst DEFAULT_PROBE_TIMEOUT_MS = 120_000;\n\n/** Available tools for probe (read-only file exploration) */\nconst PROBE_TOOLS = ['view', 'grep', 'glob'];\n\n// ============================================================================\n// Permission Handler\n// ============================================================================\n\n/**\n * Read-only permission handler for probe sessions.\n * Allows file reads, denies everything else (writes, shell, MCP, URLs).\n */\nfunction readOnlyPermissions(request: PermissionRequest): PermissionRequestResult {\n if (request.kind === 'read') {\n return { kind: 'approved' };\n }\n return { kind: 'denied-by-rules' };\n}\n\n// ============================================================================\n// Probe Session\n// ============================================================================\n\n/**\n * Run a single topic probe session.\n *\n * @param repoPath - Absolute path to the repository\n * @param topic - The topic seed to probe\n * @param options - Probe options (model, timeout, focus)\n * @returns TopicProbeResult (empty result on failure, doesn't throw)\n */\nexport async function runTopicProbe(\n repoPath: string,\n topic: TopicSeed,\n options: {\n model?: string;\n timeout?: number;\n focus?: string;\n } = {}\n): Promise<TopicProbeResult> {\n const service = getCopilotSDKService();\n\n // Check SDK availability\n const availability = await service.isAvailable();\n if (!availability) {\n // Return empty result on SDK unavailability\n return {\n topic: topic.topic,\n foundModules: [],\n discoveredTopics: [],\n dependencies: [],\n confidence: 0,\n };\n }\n\n // Build the prompt\n const prompt = buildProbePrompt(repoPath, topic, options.focus);\n\n // Configure the SDK session\n const sendOptions: SendMessageOptions = {\n prompt,\n workingDirectory: repoPath,\n availableTools: PROBE_TOOLS,\n onPermissionRequest: readOnlyPermissions,\n usePool: false, // Direct session for MCP tool access\n timeoutMs: options.timeout || DEFAULT_PROBE_TIMEOUT_MS,\n };\n\n // Set model if specified\n if (options.model) {\n sendOptions.model = options.model;\n }\n\n try {\n // Send the message\n printInfo(` Probing topic: ${topic.topic} ${gray(`(timeout: ${(options.timeout || DEFAULT_PROBE_TIMEOUT_MS) / 1000}s)`)}`);\n const result = await service.sendMessage(sendOptions);\n\n if (!result.success || !result.response) {\n // Return empty result on failure\n printWarning(` Probe failed for \"${topic.topic}\": ${result.error || 'empty response'}`);\n return {\n topic: topic.topic,\n foundModules: [],\n discoveredTopics: [],\n dependencies: [],\n confidence: 0,\n };\n }\n\n // Parse the response\n const parsed = parseProbeResponse(result.response, topic.topic);\n printInfo(` Probe \"${topic.topic}\" found ${parsed.foundModules.length} modules ${gray(`(confidence: ${parsed.confidence})`)}`);\n return parsed;\n } catch (error) {\n // Return empty result on error (don't crash the loop)\n printWarning(` Probe error for \"${topic.topic}\": ${getErrorMessage(error)}`);\n return {\n topic: topic.topic,\n foundModules: [],\n discoveredTopics: [],\n dependencies: [],\n confidence: 0,\n };\n }\n}\n", "/**\n * Iterative Discovery \u2014 Merge Prompts\n *\n * Prompt templates for the merge + gap analysis session.\n * Merges probe results, identifies gaps, and determines convergence.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { ModuleGraph } from '../../types';\nimport type { TopicProbeResult } from './types';\nimport { MODULE_GRAPH_SCHEMA } from '../../schemas';\n\n// ============================================================================\n// Merge Prompt\n// ============================================================================\n\n/**\n * JSON schema for MergeResult.\n */\nconst MERGE_RESULT_SCHEMA = `{\n \"graph\": ${MODULE_GRAPH_SCHEMA.replace(/\\n/g, '\\n ')},\n \"newTopics\": [\n {\n \"topic\": \"string \u2014 topic name (kebab-case)\",\n \"description\": \"string \u2014 description\",\n \"hints\": [\"string \u2014 search hints\"]\n }\n ],\n \"converged\": \"boolean \u2014 whether convergence was reached\",\n \"coverage\": \"number \u2014 coverage estimate (0-1)\",\n \"reason\": \"string \u2014 reason for convergence or why not converged\"\n}`;\n\n/**\n * Build the merge + gap analysis prompt.\n *\n * @param repoPath - Absolute path to the repository\n * @param probeResults - All probe results from the current round\n * @param existingGraph - Existing partial graph (if any, from prior rounds)\n * @returns The rendered prompt string\n */\nexport function buildMergePrompt(\n repoPath: string,\n probeResults: TopicProbeResult[],\n existingGraph: ModuleGraph | null\n): string {\n const probeResultsJson = JSON.stringify(probeResults, null, 2);\n const existingGraphJson = existingGraph ? JSON.stringify(existingGraph, null, 2) : null;\n\n const existingGraphSection = existingGraph\n ? `\\n## Existing Graph (from prior rounds)\\n\\n${existingGraphJson}\\n\\nMerge new findings into this existing graph.`\n : '\\n## First Round\\n\\nThis is the first round. Build the initial graph from the probe results.';\n\n return `You are merging topic probe results and analyzing coverage gaps in the codebase at ${repoPath}.\nYou have access to grep, glob, and view tools to explore the repository.\n\n## Probe Results (Current Round)\n\n${probeResultsJson}\n${existingGraphSection}\n## Your Task\n\n1. **Merge all probe results** into a coherent ModuleGraph:\n - Combine modules found across different probes\n - Resolve overlapping module claims (same files claimed by multiple topics)\n - Deduplicate modules with the same ID or path\n - Merge dependencies and dependents\n - Ensure module IDs are unique and normalized\n\n2. **Identify coverage gaps**:\n - Use glob(\"**/*\") to see all directories/files\n - Identify directories/files that NO probe touched\n - Estimate what percentage of the codebase is covered (coverage: 0-1)\n\n3. **Collect discovered topics**:\n - Gather all discoveredTopics from all probes\n - Deduplicate topics (same topic from multiple sources)\n - Filter out topics that are too vague or already covered\n\n4. **Determine convergence**:\n - converged=true if: coverage >= 0.8 AND no new topics discovered\n - converged=true if: all major areas have been probed and no gaps remain\n - converged=false if: significant gaps exist or new topics were discovered\n\n## Output Format\n\nReturn a **single JSON object** matching this schema exactly. Do NOT wrap it in markdown code blocks. Return raw JSON only.\n\n${MERGE_RESULT_SCHEMA}\n\n## Rules\n\n- Module IDs must be unique lowercase kebab-case identifiers\n- All paths must be relative to the repo root\n- When resolving overlaps, prefer the probe with higher confidence\n- coverage should be a realistic estimate (0.0 = nothing covered, 1.0 = fully covered)\n- newTopics should only include topics worth probing in the next round\n- reason should explain why convergence was reached or not (e.g., \"coverage 0.85, no new topics\" or \"coverage 0.6, 3 new topics discovered\")\n- If this is the first round, build a complete graph structure even if coverage is low`;\n}\n", "/**\n * Iterative Discovery \u2014 Merge Response Parser\n *\n * Parses AI responses from merge sessions into MergeResult.\n * Handles JSON extraction, validation, and normalization.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { ModuleGraph, TopicSeed } from '../../types';\nimport type { MergeResult } from './types';\nimport { parseModuleGraphResponse } from '../response-parser';\nimport { normalizeModuleId } from '../../schemas';\nimport { parseAIJsonResponse } from '../../utils/parse-ai-response';\nimport { getErrorMessage } from '../../utils/error-utils';\n\n// ============================================================================\n// Merge Response Parsing\n// ============================================================================\n\n/**\n * Parse an AI response into a MergeResult.\n *\n * @param response - Raw AI response string\n * @returns Parsed MergeResult\n * @throws Error if response cannot be parsed\n */\nexport function parseMergeResponse(response: string): MergeResult {\n const obj = parseAIJsonResponse(response, { context: 'merge' });\n\n // Validate required fields\n if (typeof obj.graph !== 'object' || obj.graph === null) {\n throw new Error('Missing or invalid \"graph\" field in merge response');\n }\n\n // Parse graph using existing parser\n let graph: ModuleGraph;\n try {\n graph = parseModuleGraphResponse(JSON.stringify(obj.graph));\n } catch (parseError) {\n throw new Error(`Invalid graph in merge response: ${getErrorMessage(parseError)}`);\n }\n\n // Parse newTopics (optional, defaults to empty array)\n const newTopics: TopicSeed[] = [];\n if (Array.isArray(obj.newTopics)) {\n for (const item of obj.newTopics) {\n if (typeof item !== 'object' || item === null) {\n continue;\n }\n const topic = item as Record<string, unknown>;\n if (typeof topic.topic === 'string' && typeof topic.description === 'string') {\n newTopics.push({\n topic: normalizeModuleId(String(topic.topic)),\n description: String(topic.description),\n hints: parseStringArray(topic.hints),\n });\n }\n }\n }\n\n // Parse converged (required boolean)\n const converged = typeof obj.converged === 'boolean' ? obj.converged : false;\n\n // Parse coverage (default to 0 if not provided)\n let coverage = 0;\n if (typeof obj.coverage === 'number') {\n coverage = Math.max(0, Math.min(1, obj.coverage));\n }\n\n // Parse reason (default to empty string)\n const reason = typeof obj.reason === 'string' ? String(obj.reason) : '';\n\n return {\n graph,\n newTopics,\n converged,\n coverage,\n reason,\n };\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Safely parse an unknown value as a string array.\n */\nfunction parseStringArray(raw: unknown): string[] {\n if (!Array.isArray(raw)) {\n return [];\n }\n return raw\n .filter(item => typeof item === 'string')\n .map(item => String(item));\n}\n", "/**\n * Iterative Discovery \u2014 Merge Session\n *\n * Runs the merge + gap analysis session using the Copilot SDK.\n * Merges probe results, identifies gaps, and determines convergence.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n getCopilotSDKService,\n type SendMessageOptions,\n type PermissionRequest,\n type PermissionRequestResult,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type { ModuleGraph, ModuleInfo, CategoryInfo } from '../../types';\nimport type { TopicProbeResult, MergeResult } from './types';\nimport { normalizeModuleId, isValidModuleId } from '../../schemas';\nimport { buildMergePrompt } from './merge-prompts';\nimport { parseMergeResponse } from './merge-response-parser';\nimport { printInfo, printWarning, gray } from '../../logger';\nimport { getErrorMessage } from '../../utils/error-utils';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default timeout for merge session: 3 minutes */\nconst DEFAULT_MERGE_TIMEOUT_MS = 180_000;\n\n/** Available tools for merge (read-only file exploration) */\nconst MERGE_TOOLS = ['view', 'grep', 'glob'];\n\n// ============================================================================\n// Permission Handler\n// ============================================================================\n\n/**\n * Read-only permission handler for merge sessions.\n * Allows file reads, denies everything else (writes, shell, MCP, URLs).\n */\nfunction readOnlyPermissions(request: PermissionRequest): PermissionRequestResult {\n if (request.kind === 'read') {\n return { kind: 'approved' };\n }\n return { kind: 'denied-by-rules' };\n}\n\n// ============================================================================\n// Merge Session\n// ============================================================================\n\n/**\n * Run the merge + gap analysis session.\n *\n * @param repoPath - Absolute path to the repository\n * @param probeResults - All probe results from the current round\n * @param existingGraph - Existing partial graph (if any, from prior rounds)\n * @param options - Merge options (model, timeout)\n * @returns MergeResult (partial result on failure, doesn't throw)\n */\nexport async function mergeProbeResults(\n repoPath: string,\n probeResults: TopicProbeResult[],\n existingGraph: ModuleGraph | null,\n options: {\n model?: string;\n timeout?: number;\n } = {}\n): Promise<MergeResult> {\n const service = getCopilotSDKService();\n\n // Check SDK availability\n const availability = await service.isAvailable();\n if (!availability) {\n printWarning('SDK unavailable \u2014 using local merge fallback');\n return buildLocalMergeResult(probeResults, existingGraph, 'SDK unavailable');\n }\n\n // Build the prompt\n const prompt = buildMergePrompt(repoPath, probeResults, existingGraph);\n\n // Configure the SDK session\n const sendOptions: SendMessageOptions = {\n prompt,\n workingDirectory: repoPath,\n availableTools: MERGE_TOOLS,\n onPermissionRequest: readOnlyPermissions,\n usePool: false, // Direct session for MCP tool access\n timeoutMs: options.timeout || DEFAULT_MERGE_TIMEOUT_MS,\n };\n\n // Set model if specified\n if (options.model) {\n sendOptions.model = options.model;\n }\n\n try {\n // Send the message\n const validProbes = probeResults.filter(r => r && r.foundModules.length > 0).length;\n printInfo(` Sending merge prompt ${gray(`(${validProbes} valid probes, ${existingGraph ? existingGraph.modules.length + ' existing modules' : 'no prior graph'})`)}`);\n const result = await service.sendMessage(sendOptions);\n\n if (!result.success || !result.response) {\n printWarning(`Merge session failed: ${result.error || 'empty response'} \u2014 using local merge fallback`);\n return buildLocalMergeResult(probeResults, existingGraph, 'Merge session failed');\n }\n\n // Parse the response\n const mergeResult = parseMergeResponse(result.response);\n\n // Guard: if AI merge returned fewer modules than probes found, use local merge\n const probeModuleCount = probeResults.reduce((sum, r) => sum + (r?.foundModules?.length || 0), 0);\n if (mergeResult.graph.modules.length === 0 && probeModuleCount > 0) {\n printWarning(`AI merge returned 0 modules but probes found ${probeModuleCount} \u2014 using local merge fallback`);\n return buildLocalMergeResult(probeResults, existingGraph, 'AI merge returned empty graph');\n }\n\n return mergeResult;\n } catch (error) {\n printWarning(`Merge session error: ${getErrorMessage(error)} \u2014 using local merge fallback`);\n return buildLocalMergeResult(probeResults, existingGraph, `Merge session error: ${getErrorMessage(error)}`);\n }\n}\n\n// ============================================================================\n// Local Merge Fallback\n// ============================================================================\n\n/**\n * Build a MergeResult locally from probe data when the AI merge fails.\n * Deduplicates modules by ID, infers categories, and collects new topics.\n */\nfunction buildLocalMergeResult(\n probeResults: TopicProbeResult[],\n existingGraph: ModuleGraph | null,\n reason: string,\n): MergeResult {\n const moduleMap = new Map<string, ModuleInfo>();\n const categorySet = new Set<string>();\n\n // Incorporate existing graph modules first\n if (existingGraph) {\n for (const mod of existingGraph.modules) {\n moduleMap.set(mod.id, mod);\n if (mod.category) {\n categorySet.add(mod.category);\n }\n }\n }\n\n // Merge probe results into modules\n for (const probe of probeResults) {\n if (!probe || !probe.foundModules) { continue; }\n\n for (const found of probe.foundModules) {\n let id = found.id;\n if (!isValidModuleId(id)) {\n id = normalizeModuleId(id);\n }\n\n if (moduleMap.has(id)) { continue; } // Keep first occurrence\n\n const category = probe.topic || 'general';\n categorySet.add(category);\n\n moduleMap.set(id, {\n id,\n name: found.name,\n path: found.path,\n purpose: found.purpose,\n keyFiles: found.keyFiles || [],\n dependencies: [],\n dependents: [],\n complexity: 'medium',\n category,\n });\n }\n }\n\n const modules = Array.from(moduleMap.values());\n const categories: CategoryInfo[] = Array.from(categorySet).map(name => ({\n name,\n description: '',\n }));\n\n // Collect new topics from probes\n const seenTopics = new Set(probeResults.map(p => p?.topic).filter(Boolean));\n const newTopics: { topic: string; description: string; hints: string[] }[] = [];\n for (const probe of probeResults) {\n if (!probe?.discoveredTopics) { continue; }\n for (const dt of probe.discoveredTopics) {\n if (!seenTopics.has(dt.topic)) {\n seenTopics.add(dt.topic);\n newTopics.push({\n topic: normalizeModuleId(dt.topic),\n description: dt.description,\n hints: dt.hints || [],\n });\n }\n }\n }\n\n const project = existingGraph?.project || {\n name: 'unknown',\n description: '',\n language: 'unknown',\n buildSystem: 'unknown',\n entryPoints: [],\n };\n\n printInfo(` Local merge: ${modules.length} modules, ${categories.length} categories`);\n\n return {\n graph: {\n project,\n modules,\n categories,\n architectureNotes: existingGraph?.architectureNotes || '',\n },\n newTopics,\n converged: newTopics.length === 0,\n coverage: 0,\n reason: `Local merge fallback: ${reason}`,\n };\n}\n", "/**\n * Iterative Discovery \u2014 Main Convergence Loop\n *\n * Implements breadth-first iterative discovery using topic seeds.\n * Runs parallel probes, merges results, and iterates until convergence.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { ModuleGraph, TopicSeed } from '../../types';\nimport type { IterativeDiscoveryOptions, TopicProbeResult } from './types';\nimport { runTopicProbe } from './probe-session';\nimport { mergeProbeResults } from './merge-session';\nimport { printInfo, printWarning, gray, cyan } from '../../logger';\nimport {\n scanCachedProbes,\n scanCachedProbesAny,\n saveProbeResult,\n saveDiscoveryMetadata,\n getDiscoveryMetadata,\n} from '../../cache';\n\n// ============================================================================\n// Concurrency Control\n// ============================================================================\n\n/**\n * Run tasks in parallel with a concurrency limit.\n *\n * @param items - Items to process\n * @param concurrency - Maximum parallel tasks\n * @param fn - Function to run for each item\n * @returns Array of results (in order)\n */\nasync function runParallel<T, R>(\n items: T[],\n concurrency: number,\n fn: (item: T) => Promise<R>\n): Promise<R[]> {\n const results: R[] = new Array(items.length);\n const executing: Promise<void>[] = [];\n\n for (let i = 0; i < items.length; i++) {\n const item = items[i];\n const index = i;\n const promise = fn(item)\n .then(result => {\n results[index] = result;\n })\n .catch(() => {\n // Handle errors gracefully\n })\n .finally(() => {\n // Remove this promise from executing array\n const idx = executing.indexOf(promise);\n if (idx !== -1) {\n executing.splice(idx, 1);\n }\n });\n\n executing.push(promise);\n\n if (executing.length >= concurrency) {\n await Promise.race(executing);\n }\n }\n\n // Wait for remaining tasks\n await Promise.all(executing);\n\n return results;\n}\n\n// ============================================================================\n// Iterative Discovery\n// ============================================================================\n\n/**\n * Run iterative breadth-first discovery.\n *\n * Flow:\n * 1. Load seeds (already provided in options)\n * 2. Run N parallel probe sessions (one per topic)\n * 3. Merge probe results + identify gaps + discover new topics\n * 4. Iterate until convergence (no new topics, good coverage, or max rounds)\n * 5. Return final ModuleGraph\n *\n * @param options - Iterative discovery options\n * @returns Final ModuleGraph\n */\nexport async function runIterativeDiscovery(\n options: IterativeDiscoveryOptions\n): Promise<ModuleGraph> {\n const maxRounds = options.maxRounds ?? 3;\n const concurrency = options.concurrency ?? 5;\n const coverageThreshold = options.coverageThreshold ?? 0.8;\n\n let currentTopics: TopicSeed[] = [...options.seeds];\n let currentGraph: ModuleGraph | null = null;\n let round = 0;\n\n // Handle empty seeds\n if (currentTopics.length === 0) {\n return {\n project: {\n name: 'unknown',\n description: '',\n language: 'unknown',\n buildSystem: 'unknown',\n entryPoints: [],\n },\n modules: [],\n categories: [],\n architectureNotes: 'No seeds provided for iterative discovery.',\n };\n }\n\n // Cache configuration\n const cacheEnabled = !!options.outputDir;\n const gitHash = options.gitHash;\n const useCache = options.useCache ?? false;\n\n // Check for round resumption from metadata\n if (cacheEnabled) {\n const metadata = getDiscoveryMetadata(options.outputDir!);\n if (metadata && metadata.gitHash === gitHash && metadata.currentRound > 0) {\n round = metadata.currentRound - 1; // Will be incremented at loop start\n printInfo(`Resuming from round ${metadata.currentRound} (${metadata.completedTopics.length} topics completed)`);\n }\n }\n\n while (round < maxRounds && currentTopics.length > 0) {\n round++;\n\n printInfo(`Round ${round}/${maxRounds}: Probing ${currentTopics.length} topics ${gray(`(concurrency: ${concurrency})`)}`);\n if (currentTopics.length <= 10) {\n printInfo(` Topics: ${currentTopics.map(t => cyan(t.topic)).join(', ')}`);\n }\n\n // Check probe cache \u2014 skip already-completed probes\n let cachedProbes = new Map<string, TopicProbeResult>();\n let topicsToProbe = currentTopics;\n\n if (cacheEnabled) {\n const topicNames = currentTopics.map(t => t.topic);\n const scanResult = (useCache || !gitHash)\n ? scanCachedProbesAny(topicNames, options.outputDir!)\n : scanCachedProbes(topicNames, options.outputDir!, gitHash!);\n\n cachedProbes = scanResult.found;\n topicsToProbe = currentTopics.filter(t => scanResult.missing.includes(t.topic));\n\n if (cachedProbes.size > 0) {\n printInfo(` Loaded ${cachedProbes.size} probes from cache, ${topicsToProbe.length} remaining`);\n }\n }\n\n // Run parallel probes only for uncached topics\n let freshProbeResults: TopicProbeResult[] = [];\n if (topicsToProbe.length > 0) {\n freshProbeResults = await runParallel(\n topicsToProbe,\n concurrency,\n async (topic) => {\n const result = await runTopicProbe(options.repoPath, topic, {\n model: options.model,\n timeout: options.probeTimeout,\n focus: options.focus,\n });\n // Save probe result to cache immediately after completion\n if (cacheEnabled && gitHash && result) {\n try {\n saveProbeResult(topic.topic, result, options.outputDir!, gitHash);\n } catch {\n // Non-fatal: cache write failed\n }\n }\n return result;\n }\n );\n }\n\n // Combine cached + fresh probe results (in original topic order)\n const allProbeResults: TopicProbeResult[] = currentTopics.map(t => {\n const cached = cachedProbes.get(t.topic);\n if (cached) {\n return cached;\n }\n const fresh = freshProbeResults.find(r => r?.topic === t.topic);\n return fresh ?? {\n topic: t.topic,\n foundModules: [],\n discoveredTopics: [],\n dependencies: [],\n confidence: 0,\n };\n });\n\n // Count successful probes\n const successfulProbes = allProbeResults.filter(r => r && r.foundModules.length > 0).length;\n const totalModulesFound = allProbeResults.reduce((sum, r) => sum + (r?.foundModules?.length || 0), 0);\n printInfo(` Probes completed: ${successfulProbes}/${currentTopics.length} successful, ${totalModulesFound} modules found`);\n\n // Merge results\n printInfo(' Merging probe results...');\n const mergeResult = await mergeProbeResults(\n options.repoPath,\n allProbeResults,\n currentGraph,\n {\n model: options.model,\n timeout: options.mergeTimeout,\n }\n );\n\n currentGraph = mergeResult.graph;\n printInfo(` Merged graph: ${currentGraph.modules.length} modules, coverage: ${(mergeResult.coverage * 100).toFixed(0)}%`);\n\n // Save round progress to metadata\n if (cacheEnabled && gitHash) {\n try {\n saveDiscoveryMetadata({\n gitHash,\n timestamp: Date.now(),\n mode: 'iterative',\n currentRound: round,\n maxRounds,\n completedTopics: currentTopics.map(t => t.topic),\n pendingTopics: mergeResult.newTopics.map(t => t.topic),\n converged: mergeResult.converged,\n coverage: mergeResult.coverage,\n }, options.outputDir!);\n } catch {\n // Non-fatal: metadata save failed\n }\n }\n\n // Check convergence\n if (mergeResult.converged) {\n printInfo(` Converged${mergeResult.reason ? ` \u2014 ${mergeResult.reason}` : ''}`);\n break;\n }\n\n // Check coverage threshold\n if (mergeResult.coverage >= coverageThreshold && mergeResult.newTopics.length === 0) {\n printInfo(` Coverage threshold reached (${(mergeResult.coverage * 100).toFixed(0)}% >= ${(coverageThreshold * 100).toFixed(0)}%)`);\n break;\n }\n\n // Next round: probe newly discovered topics\n if (mergeResult.newTopics.length > 0) {\n printInfo(` Discovered ${mergeResult.newTopics.length} new topics for next round`);\n }\n currentTopics = mergeResult.newTopics.map(t => ({\n topic: t.topic,\n description: t.description,\n hints: t.hints,\n }));\n }\n\n // Return final graph (should never be null due to empty seeds check)\n return currentGraph!;\n}\n", "/**\n * Discovery Phase \u2014 Public API\n *\n * Main entry point for the discovery phase (Phase 1).\n * Analyzes a local repository and produces a ModuleGraph JSON\n * describing the codebase structure, modules, and dependencies.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { DiscoveryOptions, DiscoveryResult } from '../types';\nimport { runDiscoverySession } from './discovery-session';\nimport { isLargeRepo, discoverLargeRepo } from './large-repo-handler';\nimport { printInfo } from '../logger';\n\n// Re-export key types and functions\nexport { DiscoveryError } from './discovery-session';\nexport type { DiscoverySessionResult } from './discovery-session';\nexport { LARGE_REPO_THRESHOLD, mergeSubGraphs } from './large-repo-handler';\nexport { parseModuleGraphResponse, parseStructuralScanResponse, normalizePath } from './response-parser';\nexport { buildDiscoveryPrompt, buildStructuralScanPrompt, buildFocusedDiscoveryPrompt } from './prompts';\nexport { runIterativeDiscovery } from './iterative/iterative-discovery';\n\n/**\n * Discover the module graph for a repository.\n *\n * This is the main entry point for Phase 1 of the deep-wiki pipeline.\n * It analyzes the repository and returns a structured ModuleGraph.\n *\n * For large repositories (3000+ files), it automatically uses multi-round\n * discovery: first a structural scan, then per-area drill-downs.\n *\n * @param options - Discovery options (repoPath is required)\n * @returns DiscoveryResult containing the ModuleGraph and timing info\n */\nexport async function discoverModuleGraph(options: DiscoveryOptions): Promise<DiscoveryResult> {\n const startTime = Date.now();\n\n let graph;\n\n // Check if the repo is large enough for multi-round discovery\n const large = await isLargeRepo(options.repoPath);\n\n if (large) {\n graph = await discoverLargeRepo(options);\n } else {\n printInfo('Standard-size repo \u2014 running single-pass discovery');\n const sessionResult = await runDiscoverySession(options);\n graph = sessionResult.graph;\n\n const duration = Date.now() - startTime;\n return {\n graph,\n duration,\n tokenUsage: sessionResult.tokenUsage,\n };\n }\n\n const duration = Date.now() - startTime;\n\n return {\n graph,\n duration,\n };\n}\n", "/**\n * Discover Command\n *\n * Implements the `deep-wiki discover <repo-path>` command.\n * Runs Phase 1 (Discovery) to produce a ModuleGraph JSON.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport type { DiscoverCommandOptions } from '../types';\nimport { discoverModuleGraph, DiscoveryError, runIterativeDiscovery } from '../discovery';\nimport {\n getCachedGraph,\n getCachedGraphAny,\n saveGraph,\n getFolderHeadHash,\n saveSeedsCache,\n getCachedSeeds,\n getCachedSeedsAny,\n clearDiscoveryCache,\n} from '../cache';\nimport { generateTopicSeeds, parseSeedFile } from '../seeds';\nimport {\n Spinner,\n printSuccess,\n printError,\n printWarning,\n printInfo,\n printHeader,\n printKeyValue,\n bold,\n green,\n cyan,\n gray,\n} from '../logger';\nimport { EXIT_CODES } from '../cli';\nimport { getErrorMessage } from '../utils/error-utils';\n\n// ============================================================================\n// Execute Discover Command\n// ============================================================================\n\n/**\n * Execute the discover command.\n *\n * @param repoPath - Path to the local git repository\n * @param options - Command options\n * @returns Exit code\n */\nexport async function executeDiscover(\n repoPath: string,\n options: DiscoverCommandOptions\n): Promise<number> {\n // Resolve to absolute path\n const absoluteRepoPath = path.resolve(repoPath);\n\n // Validate the repo path exists\n if (!fs.existsSync(absoluteRepoPath)) {\n printError(`Repository path does not exist: ${absoluteRepoPath}`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n if (!fs.statSync(absoluteRepoPath).isDirectory()) {\n printError(`Repository path is not a directory: ${absoluteRepoPath}`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n // Print header\n printHeader('Deep Wiki \u2014 Discovery Phase');\n printKeyValue('Repository', absoluteRepoPath);\n if (options.focus) {\n printKeyValue('Focus', options.focus);\n }\n if (options.model) {\n printKeyValue('Model', options.model);\n }\n process.stderr.write('\\n');\n\n // Get git hash for cache operations (subfolder-scoped when applicable)\n let currentGitHash: string | null = null;\n try {\n currentGitHash = await getFolderHeadHash(absoluteRepoPath);\n } catch {\n // Non-fatal: caching won't work but discovery continues\n }\n\n // Clear discovery cache if --force\n if (options.force) {\n clearDiscoveryCache(options.output);\n }\n\n // Check cache (unless --force)\n if (!options.force) {\n try {\n const cached = options.useCache\n ? getCachedGraphAny(options.output)\n : await getCachedGraph(absoluteRepoPath, options.output);\n if (cached) {\n printSuccess('Found cached module graph (git hash matches)');\n printKeyValue('Modules', String(cached.graph.modules.length));\n printKeyValue('Categories', String(cached.graph.categories.length));\n\n // Output JSON to stdout\n const jsonOutput = JSON.stringify(cached.graph, null, 2);\n process.stdout.write(jsonOutput + '\\n');\n\n return EXIT_CODES.SUCCESS;\n }\n } catch {\n // Cache check failed, continue with fresh discovery\n }\n }\n\n // Run discovery\n const spinner = new Spinner();\n spinner.start('Discovering module graph...');\n\n try {\n let result;\n\n // Check if iterative discovery is requested\n if (options.seeds) {\n // Load or generate seeds\n let seeds;\n if (options.seeds === 'auto') {\n // Check for cached seeds first\n if (!options.force && currentGitHash) {\n const cachedSeeds = options.useCache\n ? getCachedSeedsAny(options.output)\n : getCachedSeeds(options.output, currentGitHash);\n if (cachedSeeds && cachedSeeds.length > 0) {\n seeds = cachedSeeds;\n printInfo(`Using ${seeds.length} cached seeds`);\n }\n }\n\n if (!seeds) {\n spinner.update('Generating topic seeds...');\n seeds = await generateTopicSeeds(absoluteRepoPath, {\n maxTopics: 50,\n model: options.model,\n verbose: options.verbose,\n });\n\n // Cache the generated seeds\n if (currentGitHash) {\n try {\n saveSeedsCache(seeds, options.output, currentGitHash);\n } catch {\n // Non-fatal\n }\n }\n }\n\n spinner.succeed(`Generated ${seeds.length} topic seeds`);\n spinner.start('Running iterative discovery...');\n } else {\n // Parse seed file (file-based seeds don't need caching)\n seeds = parseSeedFile(options.seeds);\n printInfo(`Loaded ${seeds.length} seeds from ${options.seeds}`);\n spinner.update('Running iterative discovery...');\n }\n\n // Run iterative discovery with cache options\n const graph = await runIterativeDiscovery({\n repoPath: absoluteRepoPath,\n seeds,\n model: options.model,\n probeTimeout: options.timeout ? options.timeout * 1000 : undefined,\n mergeTimeout: options.timeout ? options.timeout * 1000 * 1.5 : undefined, // Merge takes longer\n concurrency: 5,\n maxRounds: 3,\n coverageThreshold: 0.8,\n focus: options.focus,\n outputDir: options.output,\n gitHash: currentGitHash ?? undefined,\n useCache: options.useCache,\n });\n\n result = {\n graph,\n duration: 0, // Iterative discovery doesn't track duration yet\n };\n } else {\n // Standard discovery (pass cache options for large-repo handler)\n result = await discoverModuleGraph({\n repoPath: absoluteRepoPath,\n model: options.model,\n timeout: options.timeout ? options.timeout * 1000 : undefined,\n focus: options.focus,\n outputDir: options.output,\n gitHash: currentGitHash ?? undefined,\n useCache: options.useCache,\n });\n }\n\n spinner.succeed('Discovery complete');\n\n // Print summary to stderr\n const { graph, duration } = result;\n process.stderr.write('\\n');\n printHeader('Discovery Summary');\n printKeyValue('Project', graph.project.name);\n printKeyValue('Language', graph.project.language);\n printKeyValue('Build System', graph.project.buildSystem);\n printKeyValue('Modules', String(graph.modules.length));\n printKeyValue('Categories', String(graph.categories.length));\n printKeyValue('Duration', formatDuration(duration));\n\n if (options.verbose) {\n process.stderr.write('\\n');\n printInfo('Modules:');\n for (const mod of graph.modules) {\n process.stderr.write(\n ` ${cyan(mod.id)} ${gray('\u2014')} ${mod.purpose} ${gray(`[${mod.complexity}]`)}\\n`\n );\n }\n }\n\n // Save to cache\n try {\n await saveGraph(absoluteRepoPath, graph, options.output, options.focus);\n if (options.verbose) {\n printInfo('Cached module graph for future use');\n }\n } catch {\n if (options.verbose) {\n printWarning('Failed to cache module graph (non-fatal)');\n }\n }\n\n // Write output file\n const jsonOutput = JSON.stringify(graph, null, 2);\n const outputDir = path.resolve(options.output);\n const outputFile = path.join(outputDir, 'module-graph.json');\n\n try {\n fs.mkdirSync(outputDir, { recursive: true });\n fs.writeFileSync(outputFile, jsonOutput, 'utf-8');\n process.stderr.write('\\n');\n printSuccess(`Module graph written to ${bold(outputFile)}`);\n } catch (writeError) {\n printWarning(`Could not write to file: ${getErrorMessage(writeError)}`);\n printInfo('Outputting to stdout instead');\n }\n\n // Also write to stdout for piping\n process.stdout.write(jsonOutput + '\\n');\n\n return EXIT_CODES.SUCCESS;\n\n } catch (error) {\n spinner.fail('Discovery failed');\n\n if (error instanceof DiscoveryError) {\n switch (error.code) {\n case 'sdk-unavailable':\n printError(error.message);\n printInfo('Setup instructions:');\n printInfo(' 1. Install GitHub Copilot extension');\n printInfo(' 2. Sign in with your GitHub account');\n printInfo(' 3. Ensure Copilot has SDK access');\n return EXIT_CODES.AI_UNAVAILABLE;\n\n case 'timeout':\n printError(error.message);\n return EXIT_CODES.EXECUTION_ERROR;\n\n default:\n printError(error.message);\n return EXIT_CODES.EXECUTION_ERROR;\n }\n }\n\n printError(getErrorMessage(error));\n if (options.verbose && error instanceof Error && error.stack) {\n process.stderr.write(`${gray(error.stack)}\\n`);\n }\n return EXIT_CODES.EXECUTION_ERROR;\n }\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Format a duration in milliseconds to a human-readable string.\n */\nfunction formatDuration(ms: number): string {\n if (ms < 1000) {\n return `${ms}ms`;\n }\n const seconds = Math.round(ms / 1000);\n if (seconds < 60) {\n return `${seconds}s`;\n }\n const minutes = Math.floor(seconds / 60);\n const remainingSeconds = seconds % 60;\n return `${minutes}m ${remainingSeconds}s`;\n}\n", "/**\n * AI Invoker Factory\n *\n * Creates AIInvoker instances for different phases of deep-wiki generation.\n * Phase 3 (Analysis) uses direct sessions with MCP tools for code investigation.\n * Phase 4 (Writing) uses direct sessions without tools for article generation.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n getCopilotSDKService,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type {\n AIInvoker,\n AIInvokerOptions,\n AIInvokerResult,\n SendMessageOptions,\n} from '@plusplusoneplusplus/pipeline-core';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Options for creating an analysis invoker (Phase 3).\n * Uses direct sessions with MCP tools for code investigation.\n */\nexport interface AnalysisInvokerOptions {\n /** Absolute path to the repository (working directory for MCP tools) */\n repoPath: string;\n /** AI model to use */\n model?: string;\n /** Timeout per module in milliseconds (default: 180000 = 3 min) */\n timeoutMs?: number;\n}\n\n/**\n * Options for creating a writing invoker (Phase 4).\n * Uses direct sessions without tools for article generation.\n */\nexport interface WritingInvokerOptions {\n /** AI model to use */\n model?: string;\n /** Timeout per article in milliseconds (default: 120000 = 2 min) */\n timeoutMs?: number;\n}\n\n/**\n * Result from checking AI availability.\n */\nexport interface AIAvailabilityResult {\n available: boolean;\n reason?: string;\n}\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default timeout for analysis per module (3 minutes) */\nconst DEFAULT_ANALYSIS_TIMEOUT_MS = 180_000;\n\n/** Default timeout for writing per article (2 minutes) */\nconst DEFAULT_WRITING_TIMEOUT_MS = 120_000;\n\n/** MCP tools available during analysis */\nconst ANALYSIS_TOOLS = ['view', 'grep', 'glob'];\n\n// ============================================================================\n// AI Availability Check\n// ============================================================================\n\n/**\n * Check if the Copilot SDK is available for AI operations.\n */\nexport async function checkAIAvailability(): Promise<AIAvailabilityResult> {\n try {\n const service = getCopilotSDKService();\n const result = await service.isAvailable();\n return {\n available: result.available,\n reason: result.error,\n };\n } catch (error) {\n return {\n available: false,\n reason: error instanceof Error ? error.message : String(error),\n };\n }\n}\n\n// ============================================================================\n// Analysis Invoker (Phase 3)\n// ============================================================================\n\n/**\n * Create an AIInvoker for Phase 3 (Deep Analysis).\n *\n * Uses direct sessions (usePool: false) with read-only MCP tools\n * (view, grep, glob) so the AI can investigate source code.\n * Permissions: approve reads, deny everything else.\n */\nexport function createAnalysisInvoker(options: AnalysisInvokerOptions): AIInvoker {\n const service = getCopilotSDKService();\n\n return async (prompt: string, invokerOptions?: AIInvokerOptions): Promise<AIInvokerResult> => {\n try {\n const model = invokerOptions?.model || options.model;\n const timeoutMs = invokerOptions?.timeoutMs || options.timeoutMs || DEFAULT_ANALYSIS_TIMEOUT_MS;\n\n const sendOptions: SendMessageOptions = {\n prompt,\n model,\n workingDirectory: options.repoPath,\n timeoutMs,\n usePool: false, // Direct session \u2014 MCP tools require it\n availableTools: ANALYSIS_TOOLS,\n onPermissionRequest: (req) =>\n req.kind === 'read' ? { kind: 'approved' } : { kind: 'denied-by-rules' },\n loadDefaultMcpConfig: false, // Don't load user's MCP config\n };\n\n const result = await service.sendMessage(sendOptions);\n\n return {\n success: result.success,\n response: result.response || '',\n error: result.error,\n tokenUsage: result.tokenUsage,\n };\n } catch (error) {\n return {\n success: false,\n response: '',\n error: error instanceof Error ? error.message : String(error),\n };\n }\n };\n}\n\n// ============================================================================\n// Writing Invoker (Phase 4)\n// ============================================================================\n\n/**\n * Create an AIInvoker for Phase 4 (Article Writing).\n *\n * Uses direct sessions (usePool: false) without tools for\n * article generation. No MCP tools are needed since all context\n * is provided in the prompt.\n */\nexport function createWritingInvoker(options: WritingInvokerOptions): AIInvoker {\n const service = getCopilotSDKService();\n\n return async (prompt: string, invokerOptions?: AIInvokerOptions): Promise<AIInvokerResult> => {\n try {\n const model = invokerOptions?.model || options.model;\n const timeoutMs = invokerOptions?.timeoutMs || options.timeoutMs || DEFAULT_WRITING_TIMEOUT_MS;\n\n const sendOptions: SendMessageOptions = {\n prompt,\n model,\n timeoutMs,\n usePool: false, // Direct session \u2014 consistent with all deep-wiki phases\n loadDefaultMcpConfig: false, // Writing doesn't need MCP; avoid user's global MCP config\n };\n\n const result = await service.sendMessage(sendOptions);\n\n return {\n success: result.success,\n response: result.response || '',\n error: result.error,\n tokenUsage: result.tokenUsage,\n };\n } catch (error) {\n return {\n success: false,\n response: '',\n error: error instanceof Error ? error.message : String(error),\n };\n }\n };\n}\n\n// ============================================================================\n// Consolidation Invoker (Phase 2)\n// ============================================================================\n\n/**\n * Options for creating a consolidation invoker (Phase 2).\n * Uses direct sessions without tools for semantic clustering.\n */\nexport interface ConsolidationInvokerOptions {\n /** Working directory for SDK session (typically the output directory) */\n workingDirectory: string;\n /** AI model to use */\n model?: string;\n /** Timeout for clustering session in milliseconds (default: 120000 = 2 min) */\n timeoutMs?: number;\n}\n\n/** Default timeout for consolidation clustering (2 minutes) */\nconst DEFAULT_CONSOLIDATION_TIMEOUT_MS = 120_000;\n\n/**\n * Create an AIInvoker for Phase 2 (Module Consolidation).\n *\n * Uses direct sessions (usePool: false) without tools.\n * The AI only needs to analyze the module list and return clusters.\n */\nexport function createConsolidationInvoker(options: ConsolidationInvokerOptions): AIInvoker {\n const service = getCopilotSDKService();\n\n return async (prompt: string, invokerOptions?: AIInvokerOptions): Promise<AIInvokerResult> => {\n try {\n const model = invokerOptions?.model || options.model;\n const timeoutMs = invokerOptions?.timeoutMs || options.timeoutMs || DEFAULT_CONSOLIDATION_TIMEOUT_MS;\n\n const sendOptions: SendMessageOptions = {\n prompt,\n model,\n timeoutMs,\n workingDirectory: options.workingDirectory,\n usePool: false,\n loadDefaultMcpConfig: false,\n };\n\n const result = await service.sendMessage(sendOptions);\n\n return {\n success: result.success,\n response: result.response || '',\n error: result.error,\n tokenUsage: result.tokenUsage,\n };\n } catch (error) {\n return {\n success: false,\n response: '',\n error: error instanceof Error ? error.message : String(error),\n };\n }\n };\n}\n", "/**\n * Token Usage Tracker\n *\n * Accumulates TokenUsage data across multiple AI calls, organized by phase.\n * Produces a per-phase and total summary for CLI display and JSON export.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { TokenUsage } from '@plusplusoneplusplus/pipeline-core';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/** Phases tracked by the UsageTracker */\nexport type TrackedPhase = 'discovery' | 'consolidation' | 'analysis' | 'writing';\n\n/** Per-phase accumulated usage data */\nexport interface PhaseUsage {\n inputTokens: number;\n outputTokens: number;\n cacheReadTokens: number;\n cacheWriteTokens: number;\n totalTokens: number;\n cost: number | null;\n calls: number;\n cached: boolean;\n}\n\n/** JSON report structure */\nexport interface UsageReport {\n timestamp: string;\n model?: string;\n phases: Record<TrackedPhase, PhaseUsage>;\n total: {\n inputTokens: number;\n outputTokens: number;\n cacheReadTokens: number;\n cacheWriteTokens: number;\n totalTokens: number;\n cost: number | null;\n calls: number;\n };\n}\n\n// ============================================================================\n// UsageTracker\n// ============================================================================\n\n/**\n * Accumulates TokenUsage across multiple AI calls, grouped by phase.\n */\nexport class UsageTracker {\n private phases: Map<TrackedPhase, PhaseUsage> = new Map();\n\n /**\n * Record token usage from a single AI call.\n */\n addUsage(phase: TrackedPhase, usage?: TokenUsage): void {\n const current = this.getOrCreatePhase(phase);\n current.calls += 1;\n if (usage) {\n current.inputTokens += usage.inputTokens;\n current.outputTokens += usage.outputTokens;\n current.cacheReadTokens += usage.cacheReadTokens;\n current.cacheWriteTokens += usage.cacheWriteTokens;\n current.totalTokens += usage.totalTokens;\n if (usage.cost != null) {\n current.cost = (current.cost ?? 0) + usage.cost;\n }\n }\n }\n\n /**\n * Mark a phase as having been loaded from cache (0 AI calls).\n */\n markCached(phase: TrackedPhase): void {\n const current = this.getOrCreatePhase(phase);\n current.cached = true;\n }\n\n /**\n * Get usage data for a specific phase.\n */\n getPhaseUsage(phase: TrackedPhase): PhaseUsage {\n return this.getOrCreatePhase(phase);\n }\n\n /**\n * Get the total across all phases.\n */\n getTotal(): UsageReport['total'] {\n let inputTokens = 0;\n let outputTokens = 0;\n let cacheReadTokens = 0;\n let cacheWriteTokens = 0;\n let totalTokens = 0;\n let cost: number | null = null;\n let calls = 0;\n\n for (const usage of this.phases.values()) {\n inputTokens += usage.inputTokens;\n outputTokens += usage.outputTokens;\n cacheReadTokens += usage.cacheReadTokens;\n cacheWriteTokens += usage.cacheWriteTokens;\n totalTokens += usage.totalTokens;\n if (usage.cost != null) {\n cost = (cost ?? 0) + usage.cost;\n }\n calls += usage.calls;\n }\n\n return { inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens, totalTokens, cost, calls };\n }\n\n /**\n * Check if any usage data has been recorded.\n */\n hasUsage(): boolean {\n const total = this.getTotal();\n return total.calls > 0 || total.totalTokens > 0;\n }\n\n /**\n * Build the full JSON report.\n */\n toReport(model?: string): UsageReport {\n const allPhases: TrackedPhase[] = ['discovery', 'consolidation', 'analysis', 'writing'];\n const phases = {} as Record<TrackedPhase, PhaseUsage>;\n for (const phase of allPhases) {\n phases[phase] = this.getOrCreatePhase(phase);\n }\n\n return {\n timestamp: new Date().toISOString(),\n model,\n phases,\n total: this.getTotal(),\n };\n }\n\n /**\n * Format token count with commas for CLI display.\n */\n static formatTokens(n: number): string {\n return n.toLocaleString('en-US');\n }\n\n /**\n * Format cost as dollar amount.\n */\n static formatCost(cost: number | null): string {\n if (cost == null) { return 'N/A'; }\n return `$${cost.toFixed(2)}`;\n }\n\n // ========================================================================\n // Private\n // ========================================================================\n\n private getOrCreatePhase(phase: TrackedPhase): PhaseUsage {\n let existing = this.phases.get(phase);\n if (!existing) {\n existing = {\n inputTokens: 0,\n outputTokens: 0,\n cacheReadTokens: 0,\n cacheWriteTokens: 0,\n totalTokens: 0,\n cost: null,\n calls: 0,\n cached: false,\n };\n this.phases.set(phase, existing);\n }\n return existing;\n }\n}\n", "/**\n * Phase 1: Discovery\n *\n * Discovers the module graph of a repository using AI-powered analysis.\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport type { GenerateCommandOptions, ModuleGraph } from '../../types';\nimport type { TokenUsage } from '@plusplusoneplusplus/pipeline-core';\nimport { resolvePhaseModel, resolvePhaseTimeout, resolvePhaseConcurrency } from '../../config-loader';\nimport { discoverModuleGraph, runIterativeDiscovery } from '../../discovery';\nimport { generateTopicSeeds, parseSeedFile } from '../../seeds';\nimport {\n getCachedGraph,\n getCachedGraphAny,\n saveGraph,\n getFolderHeadHash,\n saveSeedsCache,\n getCachedSeeds,\n getCachedSeedsAny,\n clearDiscoveryCache,\n} from '../../cache';\nimport {\n Spinner,\n printSuccess,\n printError,\n printWarning,\n printInfo,\n printHeader,\n} from '../../logger';\nimport { EXIT_CODES } from '../../cli';\nimport { getErrorMessage } from '../../utils/error-utils';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface Phase1Result {\n graph?: ModuleGraph;\n duration: number;\n exitCode?: number;\n tokenUsage?: TokenUsage;\n}\n\n// ============================================================================\n// Phase 1: Discovery\n// ============================================================================\n\nexport async function runPhase1(\n repoPath: string,\n options: GenerateCommandOptions,\n isCancelled: () => boolean\n): Promise<Phase1Result> {\n const startTime = Date.now();\n\n process.stderr.write('\\n');\n printHeader('Phase 1: Discovery');\n\n // Get git hash for cache operations (subfolder-scoped when applicable)\n let currentGitHash: string | null = null;\n try {\n currentGitHash = await getFolderHeadHash(repoPath);\n } catch {\n // Non-fatal\n }\n\n // Clear discovery cache if --force\n if (options.force) {\n clearDiscoveryCache(options.output);\n }\n\n // Check cache (unless --force)\n if (!options.force) {\n try {\n const cached = options.useCache\n ? getCachedGraphAny(options.output)\n : await getCachedGraph(repoPath, options.output);\n if (cached) {\n const duration = Date.now() - startTime;\n printSuccess(`Using cached module graph (${cached.graph.modules.length} modules)`);\n return { graph: cached.graph, duration };\n }\n } catch {\n // Cache read failed \u2014 continue with discovery\n }\n }\n\n const spinner = new Spinner();\n spinner.start('Discovering module graph...');\n\n try {\n let result;\n\n // Resolve per-phase settings for discovery\n const discoveryModel = resolvePhaseModel(options, 'discovery');\n const discoveryTimeout = resolvePhaseTimeout(options, 'discovery');\n const discoveryConcurrency = resolvePhaseConcurrency(options, 'discovery');\n\n // Check if iterative discovery is requested\n if (options.seeds) {\n // Load or generate seeds\n let seeds;\n if (options.seeds === 'auto') {\n // Check for cached seeds first\n if (!options.force && currentGitHash) {\n const cachedSeeds = options.useCache\n ? getCachedSeedsAny(options.output)\n : getCachedSeeds(options.output, currentGitHash);\n if (cachedSeeds && cachedSeeds.length > 0) {\n seeds = cachedSeeds;\n printInfo(`Using ${seeds.length} cached seeds`);\n }\n }\n\n if (!seeds) {\n spinner.update('Generating topic seeds...');\n seeds = await generateTopicSeeds(repoPath, {\n maxTopics: 50,\n model: discoveryModel,\n verbose: options.verbose,\n });\n\n // Cache the generated seeds\n if (currentGitHash) {\n try {\n saveSeedsCache(seeds, options.output, currentGitHash);\n } catch {\n // Non-fatal\n }\n }\n }\n\n spinner.succeed(`Generated ${seeds.length} topic seeds`);\n spinner.start('Running iterative discovery...');\n } else {\n // Parse seed file (file-based seeds don't need caching)\n seeds = parseSeedFile(options.seeds);\n printInfo(`Loaded ${seeds.length} seeds from ${options.seeds}`);\n spinner.update('Running iterative discovery...');\n }\n\n // Run iterative discovery with cache options\n const graph = await runIterativeDiscovery({\n repoPath,\n seeds,\n model: discoveryModel,\n probeTimeout: discoveryTimeout ? discoveryTimeout * 1000 : undefined,\n mergeTimeout: discoveryTimeout ? discoveryTimeout * 1000 * 1.5 : undefined, // Merge takes longer\n concurrency: discoveryConcurrency || 5,\n maxRounds: 3,\n coverageThreshold: 0.8,\n focus: options.focus,\n outputDir: options.output,\n gitHash: currentGitHash ?? undefined,\n useCache: options.useCache,\n });\n\n result = {\n graph,\n duration: 0, // Iterative discovery doesn't track duration yet\n };\n } else {\n // Standard discovery (pass cache options for large-repo handler)\n result = await discoverModuleGraph({\n repoPath,\n model: discoveryModel,\n timeout: discoveryTimeout ? discoveryTimeout * 1000 : undefined,\n focus: options.focus,\n outputDir: options.output,\n gitHash: currentGitHash ?? undefined,\n useCache: options.useCache,\n });\n }\n\n spinner.succeed(`Discovery complete \u2014 ${result.graph.modules.length} modules found`);\n\n // Save to cache\n try {\n await saveGraph(repoPath, result.graph, options.output, options.focus);\n } catch {\n if (options.verbose) {\n printWarning('Failed to cache module graph (non-fatal)');\n }\n }\n\n // Also write module-graph.json to output\n const outputDir = path.resolve(options.output);\n const outputFile = path.join(outputDir, 'module-graph.json');\n try {\n fs.mkdirSync(outputDir, { recursive: true });\n fs.writeFileSync(outputFile, JSON.stringify(result.graph, null, 2), 'utf-8');\n } catch {\n // Non-fatal\n }\n\n return { graph: result.graph, duration: Date.now() - startTime, tokenUsage: result.tokenUsage };\n } catch (error) {\n spinner.fail('Discovery failed');\n printError(getErrorMessage(error));\n return { duration: Date.now() - startTime, exitCode: EXIT_CODES.EXECUTION_ERROR };\n }\n}\n", "/**\n * Shared constants for module consolidation.\n */\n\nexport const COMPLEXITY_LEVELS: Record<string, number> = { low: 0, medium: 1, high: 2 };\nexport const COMPLEXITY_NAMES: Record<number, string> = { 2: 'high', 1: 'medium', 0: 'low' };\n\n/**\n * Resolve the highest complexity level from a set of modules.\n */\nexport function resolveMaxComplexity(modules: { complexity: string }[]): 'low' | 'medium' | 'high' {\n let max = 0;\n for (const m of modules) {\n const level = COMPLEXITY_LEVELS[m.complexity] ?? 0;\n if (level > max) { max = level; }\n }\n return (COMPLEXITY_NAMES[max] ?? 'low') as 'low' | 'medium' | 'high';\n}\n", "/**\n * Rule-Based Module Consolidator\n *\n * Merges fine-grained modules by directory proximity.\n * This is the fast, deterministic first pass of the hybrid consolidation.\n *\n * Algorithm:\n * 1. Group modules by their parent directory path\n * 2. Merge modules sharing the same directory into a single module\n * 3. Fix up dependency references (old IDs \u2192 new merged IDs)\n * 4. Re-derive categories from merged modules\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as path from 'path';\nimport type { ModuleInfo, ModuleGraph, CategoryInfo } from '../types';\nimport { normalizeModuleId } from '../schemas';\nimport { resolveMaxComplexity } from './constants';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Intermediate grouping of modules by directory.\n */\ninterface DirectoryGroup {\n /** Normalized directory path (e.g., \"src/shortcuts/tasks-viewer\") */\n dirPath: string;\n /** Modules in this directory */\n modules: ModuleInfo[];\n}\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Consolidate modules by directory proximity.\n *\n * Modules sharing the same parent directory are merged into a single module.\n * The merged module inherits the union of keyFiles, dependencies, dependents,\n * and picks the highest complexity level.\n *\n * @param graph - The original module graph from discovery\n * @returns A new module graph with consolidated modules\n */\nexport function consolidateByDirectory(graph: ModuleGraph): ModuleGraph {\n const modules = graph.modules;\n\n if (modules.length === 0) {\n return graph;\n }\n\n // Step 1: Group modules by parent directory\n const groups = groupModulesByDirectory(modules);\n\n // Step 2: Merge each group into a single module\n const mergedModules: ModuleInfo[] = [];\n const idMapping = new Map<string, string>(); // old ID \u2192 new ID\n\n for (const group of groups) {\n if (group.modules.length === 1) {\n // Single module in directory \u2014 keep as-is\n const mod = group.modules[0];\n idMapping.set(mod.id, mod.id);\n mergedModules.push(mod);\n } else {\n // Multiple modules \u2014 merge\n const merged = mergeModuleGroup(group);\n for (const mod of group.modules) {\n idMapping.set(mod.id, merged.id);\n }\n mergedModules.push(merged);\n }\n }\n\n // Step 3: Fix up dependency references\n const fixedModules = fixDependencyReferences(mergedModules, idMapping);\n\n // Step 4: Re-derive categories\n const categories = deriveCategories(fixedModules);\n\n return {\n ...graph,\n modules: fixedModules,\n categories,\n };\n}\n\n// ============================================================================\n// Internal Helpers\n// ============================================================================\n\n/**\n * Get the parent directory of a module's path.\n * Handles both file paths and directory paths.\n */\nexport function getModuleDirectory(modulePath: string): string {\n // Normalize path separators\n const normalized = modulePath.replace(/\\\\/g, '/');\n\n // Remove trailing slash\n const cleaned = normalized.replace(/\\/$/, '');\n\n // If path looks like a file (has extension), get its directory\n const lastSegment = cleaned.split('/').pop() || '';\n if (lastSegment.includes('.')) {\n return path.posix.dirname(cleaned);\n }\n\n // It's already a directory path\n return cleaned;\n}\n\n/**\n * Group modules by their parent directory path.\n */\nfunction groupModulesByDirectory(modules: ModuleInfo[]): DirectoryGroup[] {\n const dirMap = new Map<string, ModuleInfo[]>();\n\n for (const mod of modules) {\n const dir = getModuleDirectory(mod.path);\n if (!dirMap.has(dir)) {\n dirMap.set(dir, []);\n }\n dirMap.get(dir)!.push(mod);\n }\n\n return Array.from(dirMap.entries()).map(([dirPath, mods]) => ({\n dirPath,\n modules: mods,\n }));\n}\n\n/**\n * Merge multiple modules in the same directory into a single module.\n */\nfunction mergeModuleGroup(group: DirectoryGroup): ModuleInfo {\n const { dirPath, modules } = group;\n\n // Derive a name from the directory path\n const dirName = dirPath.split('/').pop() || dirPath;\n const id = normalizeModuleId(dirPath);\n const name = dirName\n .split('-')\n .map(w => w.charAt(0).toUpperCase() + w.slice(1))\n .join(' ');\n\n // Union of all key files\n const keyFiles = deduplicateStrings(\n modules.flatMap(m => m.keyFiles)\n );\n\n // Union of all dependency IDs (will be fixed up later)\n const allDeps = deduplicateStrings(\n modules.flatMap(m => m.dependencies)\n );\n // Remove self-references (modules within this group)\n const selfIds = new Set(modules.map(m => m.id));\n const dependencies = allDeps.filter(d => !selfIds.has(d));\n\n // Union of all dependent IDs\n const allDependents = deduplicateStrings(\n modules.flatMap(m => m.dependents)\n );\n const dependents = allDependents.filter(d => !selfIds.has(d));\n\n // Pick highest complexity\n const complexity = pickHighestComplexity(modules);\n\n // Pick most common category\n const category = pickMostCommonCategory(modules);\n\n // Combine purposes\n const purpose = combinePurposes(modules);\n\n // Track provenance\n const mergedFrom = modules.map(m => m.id);\n\n // Preserve area if all modules share the same area\n const areas = new Set(modules.map(m => m.area).filter(Boolean));\n const area = areas.size === 1 ? [...areas][0] : undefined;\n\n return {\n id,\n name,\n path: dirPath.endsWith('/') ? dirPath : dirPath + '/',\n purpose,\n keyFiles,\n dependencies,\n dependents,\n complexity,\n category,\n area,\n mergedFrom,\n };\n}\n\n/**\n * Fix dependency and dependent references to use merged IDs.\n * Also remove self-references created by merging.\n */\nfunction fixDependencyReferences(\n modules: ModuleInfo[],\n idMapping: Map<string, string>\n): ModuleInfo[] {\n const moduleIds = new Set(modules.map(m => m.id));\n\n return modules.map(mod => ({\n ...mod,\n dependencies: deduplicateStrings(\n mod.dependencies\n .map(d => idMapping.get(d) || d)\n .filter(d => d !== mod.id && moduleIds.has(d))\n ),\n dependents: deduplicateStrings(\n mod.dependents\n .map(d => idMapping.get(d) || d)\n .filter(d => d !== mod.id && moduleIds.has(d))\n ),\n }));\n}\n\n/**\n * Derive fresh categories from the merged modules.\n */\nfunction deriveCategories(modules: ModuleInfo[]): CategoryInfo[] {\n const categoryMap = new Map<string, Set<string>>();\n\n for (const mod of modules) {\n if (!categoryMap.has(mod.category)) {\n categoryMap.set(mod.category, new Set());\n }\n categoryMap.get(mod.category)!.add(mod.id);\n }\n\n return Array.from(categoryMap.entries()).map(([name, moduleIds]) => ({\n name,\n description: `Contains ${moduleIds.size} module(s)`,\n }));\n}\n\n// ============================================================================\n// Utility Helpers\n// ============================================================================\n\nfunction deduplicateStrings(arr: string[]): string[] {\n return [...new Set(arr)];\n}\n\nfunction pickHighestComplexity(modules: ModuleInfo[]): 'low' | 'medium' | 'high' {\n return resolveMaxComplexity(modules);\n}\n\nfunction pickMostCommonCategory(modules: ModuleInfo[]): string {\n const counts = new Map<string, number>();\n for (const m of modules) {\n counts.set(m.category, (counts.get(m.category) || 0) + 1);\n }\n let best = modules[0].category;\n let bestCount = 0;\n for (const [cat, count] of counts) {\n if (count > bestCount) {\n best = cat;\n bestCount = count;\n }\n }\n return best;\n}\n\nfunction combinePurposes(modules: ModuleInfo[]): string {\n if (modules.length === 1) {\n return modules[0].purpose;\n }\n // Use first module's purpose as base, mention others are included\n const unique = deduplicateStrings(modules.map(m => m.purpose));\n if (unique.length === 1) {\n return unique[0];\n }\n // Combine up to 3 purposes, truncate rest\n const shown = unique.slice(0, 3);\n const remaining = unique.length - shown.length;\n const combined = shown.join('; ');\n return remaining > 0\n ? `${combined} (+${remaining} more)`\n : combined;\n}\n", "/**\n * AI-Assisted Module Consolidator\n *\n * Uses a single AI session to semantically cluster pre-consolidated modules\n * into a target number of high-level groups. This is the second pass of the\n * hybrid consolidation, running after the rule-based pass.\n *\n * The AI receives a compact module list and returns cluster assignments.\n * Modules within each cluster are then programmatically merged.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { AIInvoker } from '@plusplusoneplusplus/pipeline-core';\nimport { extractJSON } from '@plusplusoneplusplus/pipeline-core';\nimport type { ModuleInfo, ModuleGraph, CategoryInfo } from '../types';\nimport type { ClusterGroup } from './types';\nimport { normalizeModuleId } from '../schemas';\nimport { resolveMaxComplexity } from './constants';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default target module count */\nconst DEFAULT_TARGET_COUNT = 50;\n\n/** Default timeout for AI clustering session */\nconst DEFAULT_CLUSTERING_TIMEOUT_MS = 120_000;\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Options for AI-assisted clustering.\n */\nexport interface AIClusteringOptions {\n /** AI invoker for the clustering session */\n aiInvoker: AIInvoker;\n /** Target number of modules after clustering (default: 50) */\n targetCount?: number;\n /** AI model to use */\n model?: string;\n /** Timeout in milliseconds (default: 120000) */\n timeoutMs?: number;\n}\n\n/**\n * Cluster modules using AI semantic analysis.\n *\n * Sends the module list to AI, which groups semantically related modules.\n * Then programmatically merges each cluster into a single module.\n *\n * @param graph - Module graph (typically after rule-based consolidation)\n * @param options - AI clustering options\n * @returns Consolidated module graph\n */\nexport async function clusterWithAI(\n graph: ModuleGraph,\n options: AIClusteringOptions\n): Promise<ModuleGraph> {\n const { aiInvoker, model } = options;\n const targetCount = options.targetCount || DEFAULT_TARGET_COUNT;\n const timeoutMs = options.timeoutMs || DEFAULT_CLUSTERING_TIMEOUT_MS;\n\n const modules = graph.modules;\n\n // Skip if already at or below target\n if (modules.length <= targetCount) {\n return graph;\n }\n\n // Build the clustering prompt\n const prompt = buildClusteringPrompt(modules, graph.project.name, targetCount);\n\n // Call AI\n const result = await aiInvoker(prompt, { model, timeoutMs });\n\n if (!result.success || !result.response) {\n // AI failed \u2014 return graph unchanged\n return graph;\n }\n\n // Parse the cluster assignments\n const clusters = parseClusterResponse(result.response, modules);\n\n if (clusters.length === 0) {\n // Parse failed \u2014 return unchanged\n return graph;\n }\n\n // Merge modules according to clusters\n return applyClusterMerge(graph, clusters);\n}\n\n// ============================================================================\n// Prompt Building\n// ============================================================================\n\n/**\n * Build the clustering prompt for AI.\n * Sends a compact module list and asks for semantic groupings.\n */\nexport function buildClusteringPrompt(\n modules: ModuleInfo[],\n projectName: string,\n targetCount: number\n): string {\n // Build compact module list\n const moduleList = modules\n .map(m => `- ${m.id}: ${m.path} \u2014 ${m.purpose}`)\n .join('\\n');\n\n return `You are analyzing the codebase of \"${projectName}\" which has ${modules.length} modules.\nYour task is to cluster semantically related modules into ${targetCount} (or fewer) high-level groups for documentation purposes.\n\n## Current Modules\n\n${moduleList}\n\n## Instructions\n\nGroup these modules into approximately ${targetCount} clusters based on:\n1. **Functional cohesion** \u2014 modules that serve the same feature or subsystem\n2. **Directory proximity** \u2014 modules in related paths\n3. **Dependency relationships** \u2014 tightly coupled modules\n\nRules:\n- Every module ID must appear in exactly one cluster\n- Each cluster should have a descriptive name and purpose\n- Prefer fewer, broader clusters over many small ones\n- A cluster can have a single module if it's truly standalone\n\n## Output Format\n\nReturn a JSON object with this exact structure:\n\n\\`\\`\\`json\n{\n \"clusters\": [\n {\n \"id\": \"string \u2014 kebab-case cluster ID\",\n \"name\": \"string \u2014 human-readable cluster name\",\n \"memberIds\": [\"module-id-1\", \"module-id-2\"],\n \"purpose\": \"string \u2014 one-sentence purpose of this cluster\"\n }\n ]\n}\n\\`\\`\\`\n\nReturn ONLY the JSON, no other text.`;\n}\n\n// ============================================================================\n// Response Parsing\n// ============================================================================\n\n/**\n * Parse the AI response into ClusterGroup objects.\n * Validates that all module IDs are accounted for.\n */\nexport function parseClusterResponse(\n response: string,\n modules: ModuleInfo[]\n): ClusterGroup[] {\n const json = extractJSON(response);\n if (!json) {\n return [];\n }\n\n let parsed: unknown;\n try {\n parsed = typeof json === 'string' ? JSON.parse(json) : json;\n } catch {\n return [];\n }\n\n const data = parsed as Record<string, unknown>;\n const rawClusters = data?.clusters;\n if (!Array.isArray(rawClusters)) {\n return [];\n }\n\n const validModuleIds = new Set(modules.map(m => m.id));\n const assignedIds = new Set<string>();\n const clusters: ClusterGroup[] = [];\n\n for (const raw of rawClusters) {\n if (!raw || typeof raw !== 'object') { continue; }\n const r = raw as Record<string, unknown>;\n\n const id = typeof r.id === 'string' ? normalizeModuleId(r.id) : '';\n const name = typeof r.name === 'string' ? r.name : '';\n const purpose = typeof r.purpose === 'string' ? r.purpose : '';\n const memberIds = Array.isArray(r.memberIds)\n ? (r.memberIds as unknown[])\n .filter((mid): mid is string => typeof mid === 'string' && validModuleIds.has(mid))\n .filter(mid => !assignedIds.has(mid))\n : [];\n\n if (id && memberIds.length > 0) {\n for (const mid of memberIds) {\n assignedIds.add(mid);\n }\n clusters.push({ id, name: name || id, memberIds, purpose });\n }\n }\n\n // Assign any unassigned modules to their own singleton cluster\n for (const mod of modules) {\n if (!assignedIds.has(mod.id)) {\n clusters.push({\n id: mod.id,\n name: mod.name,\n memberIds: [mod.id],\n purpose: mod.purpose,\n });\n }\n }\n\n return clusters;\n}\n\n// ============================================================================\n// Cluster Merging\n// ============================================================================\n\n/**\n * Apply cluster assignments by merging modules within each cluster.\n */\nexport function applyClusterMerge(\n graph: ModuleGraph,\n clusters: ClusterGroup[]\n): ModuleGraph {\n const moduleMap = new Map(graph.modules.map(m => [m.id, m]));\n const idMapping = new Map<string, string>(); // old ID \u2192 cluster ID\n const mergedModules: ModuleInfo[] = [];\n\n for (const cluster of clusters) {\n const members = cluster.memberIds\n .map(id => moduleMap.get(id))\n .filter((m): m is ModuleInfo => m !== undefined);\n\n if (members.length === 0) { continue; }\n\n if (members.length === 1) {\n // Singleton \u2014 keep as-is\n const mod = members[0];\n idMapping.set(mod.id, mod.id);\n mergedModules.push(mod);\n } else {\n // Merge members into cluster module\n const merged = mergeClusterMembers(cluster, members);\n for (const mod of members) {\n idMapping.set(mod.id, cluster.id);\n }\n mergedModules.push(merged);\n }\n }\n\n // Fix up dependency references\n const moduleIds = new Set(mergedModules.map(m => m.id));\n const fixedModules = mergedModules.map(mod => ({\n ...mod,\n dependencies: dedup(\n mod.dependencies\n .map(d => idMapping.get(d) || d)\n .filter(d => d !== mod.id && moduleIds.has(d))\n ),\n dependents: dedup(\n mod.dependents\n .map(d => idMapping.get(d) || d)\n .filter(d => d !== mod.id && moduleIds.has(d))\n ),\n }));\n\n // Re-derive categories\n const categories = deriveFreshCategories(fixedModules);\n\n return {\n ...graph,\n modules: fixedModules,\n categories,\n };\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\nfunction mergeClusterMembers(cluster: ClusterGroup, members: ModuleInfo[]): ModuleInfo {\n const selfIds = new Set(members.map(m => m.id));\n\n const keyFiles = dedup(members.flatMap(m => m.keyFiles));\n const dependencies = dedup(\n members.flatMap(m => m.dependencies).filter(d => !selfIds.has(d))\n );\n const dependents = dedup(\n members.flatMap(m => m.dependents).filter(d => !selfIds.has(d))\n );\n\n // Pick highest complexity\n const complexity = resolveMaxComplexity(members);\n\n // Pick most common category\n const catCounts = new Map<string, number>();\n for (const m of members) {\n catCounts.set(m.category, (catCounts.get(m.category) || 0) + 1);\n }\n let category = members[0].category;\n let bestCount = 0;\n for (const [cat, count] of catCounts) {\n if (count > bestCount) { category = cat; bestCount = count; }\n }\n\n // Use shortest path as representative\n const shortestPath = members\n .map(m => m.path)\n .sort((a, b) => a.length - b.length)[0];\n\n // Collect all mergedFrom (flatten if already merged)\n const mergedFrom = dedup(\n members.flatMap(m => m.mergedFrom || [m.id])\n );\n\n // Preserve area if consistent\n const areas = new Set(members.map(m => m.area).filter(Boolean));\n const area = areas.size === 1 ? [...areas][0] : undefined;\n\n return {\n id: cluster.id,\n name: cluster.name,\n path: shortestPath,\n purpose: cluster.purpose || members.map(m => m.purpose).slice(0, 2).join('; '),\n keyFiles,\n dependencies,\n dependents,\n complexity,\n category,\n area,\n mergedFrom,\n };\n}\n\nfunction dedup(arr: string[]): string[] {\n return [...new Set(arr)];\n}\n\nfunction deriveFreshCategories(modules: ModuleInfo[]): CategoryInfo[] {\n const categoryMap = new Map<string, number>();\n for (const mod of modules) {\n categoryMap.set(mod.category, (categoryMap.get(mod.category) || 0) + 1);\n }\n return Array.from(categoryMap.entries()).map(([name, count]) => ({\n name,\n description: `Contains ${count} module(s)`,\n }));\n}\n", "/**\n * Module Consolidation Orchestrator\n *\n * Public API for Phase 2 \u2014 combines rule-based directory consolidation\n * with AI-assisted semantic clustering to reduce module count.\n *\n * Pipeline: Rule-based \u2192 AI clustering (optional)\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { AIInvoker } from '@plusplusoneplusplus/pipeline-core';\nimport type { ModuleGraph } from '../types';\nimport type { ConsolidationOptions, ConsolidationResult } from './types';\nimport { consolidateByDirectory } from './rule-based-consolidator';\nimport { clusterWithAI } from './ai-consolidator';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default target module count for AI clustering */\nconst DEFAULT_TARGET_MODULE_COUNT = 50;\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Run the full hybrid consolidation pipeline.\n *\n * 1. Rule-based pass: merge modules by directory proximity\n * 2. AI-assisted pass: semantic clustering to target count (optional)\n *\n * @param graph - The original module graph from Phase 1 (Discovery)\n * @param aiInvoker - AI invoker for semantic clustering (null to skip AI)\n * @param options - Consolidation options\n * @returns Consolidation result with new graph and stats\n */\nexport async function consolidateModules(\n graph: ModuleGraph,\n aiInvoker: AIInvoker | null,\n options: ConsolidationOptions = {}\n): Promise<ConsolidationResult> {\n const startTime = Date.now();\n const originalCount = graph.modules.length;\n const targetCount = options.targetModuleCount || DEFAULT_TARGET_MODULE_COUNT;\n\n // Step 1: Rule-based consolidation\n const ruleBasedGraph = consolidateByDirectory(graph);\n const afterRuleBasedCount = ruleBasedGraph.modules.length;\n\n // Step 2: AI-assisted clustering (if enabled and needed)\n let finalGraph = ruleBasedGraph;\n\n if (!options.skipAI && aiInvoker && afterRuleBasedCount > targetCount) {\n try {\n finalGraph = await clusterWithAI(ruleBasedGraph, {\n aiInvoker,\n targetCount,\n model: options.model,\n timeoutMs: options.timeoutMs,\n });\n } catch {\n // AI clustering failed \u2014 use rule-based result\n finalGraph = ruleBasedGraph;\n }\n }\n\n return {\n graph: finalGraph,\n originalCount,\n afterRuleBasedCount,\n finalCount: finalGraph.modules.length,\n duration: Date.now() - startTime,\n };\n}\n", "/**\n * Module Consolidation \u2014 Public API\n *\n * Phase 2: Reduces the number of modules from discovery before\n * running the expensive analysis phase. Uses a hybrid approach:\n * 1. Rule-based directory consolidation (fast, deterministic)\n * 2. AI-assisted semantic clustering (one AI session)\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nexport { consolidateModules } from './consolidator';\nexport { consolidateByDirectory, getModuleDirectory } from './rule-based-consolidator';\nexport { clusterWithAI, buildClusteringPrompt, parseClusterResponse, applyClusterMerge } from './ai-consolidator';\n", "/**\n * Phase 2: Module Consolidation\n *\n * Reduces the module graph by consolidating related modules using rule-based and AI-powered clustering.\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport type { GenerateCommandOptions, ModuleGraph } from '../../types';\nimport type { AIInvoker } from '@plusplusoneplusplus/pipeline-core';\nimport { resolvePhaseModel, resolvePhaseTimeout } from '../../config-loader';\nimport { consolidateModules } from '../../consolidation';\nimport { createConsolidationInvoker } from '../../ai-invoker';\nimport { UsageTracker } from '../../usage-tracker';\nimport {\n getCachedConsolidation,\n getCachedConsolidationAny,\n saveConsolidation,\n} from '../../cache';\nimport {\n Spinner,\n printSuccess,\n printWarning,\n printInfo,\n printHeader,\n} from '../../logger';\nimport { getErrorMessage } from '../../utils/error-utils';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface Phase2ConsolidationResult {\n graph: ModuleGraph;\n duration: number;\n}\n\n// ============================================================================\n// Phase 2: Module Consolidation\n// ============================================================================\n\nexport async function runPhase2Consolidation(\n repoPath: string,\n graph: ModuleGraph,\n options: GenerateCommandOptions,\n usageTracker?: UsageTracker\n): Promise<Phase2ConsolidationResult> {\n const startTime = Date.now();\n\n process.stderr.write('\\n');\n printHeader('Phase 2: Consolidation');\n printInfo(`Input: ${graph.modules.length} modules`);\n\n const outputDir = path.resolve(options.output);\n const inputModuleCount = graph.modules.length;\n\n // Check consolidation cache (skip when --force)\n if (!options.force) {\n const cached = options.useCache\n ? getCachedConsolidationAny(outputDir, inputModuleCount)\n : await getCachedConsolidation(repoPath, outputDir, inputModuleCount);\n\n if (cached) {\n printSuccess(\n `Using cached consolidation (${inputModuleCount} \u2192 ${cached.graph.modules.length} modules)`\n );\n usageTracker?.markCached('consolidation');\n return { graph: cached.graph, duration: Date.now() - startTime };\n }\n }\n\n const spinner = new Spinner();\n spinner.start('Consolidating modules...');\n\n try {\n // Resolve per-phase settings for consolidation\n const consolidationModel = resolvePhaseModel(options, 'consolidation');\n const consolidationTimeout = resolvePhaseTimeout(options, 'consolidation');\n const consolidationSkipAI = options.phases?.consolidation?.skipAI;\n\n // Create AI invoker for semantic clustering (uses output dir as cwd)\n fs.mkdirSync(outputDir, { recursive: true });\n const baseInvoker = createConsolidationInvoker({\n workingDirectory: outputDir,\n model: consolidationModel,\n timeoutMs: consolidationTimeout ? consolidationTimeout * 1000 : undefined,\n });\n\n // Wrap invoker to capture token usage\n const aiInvoker: AIInvoker = async (prompt, opts) => {\n const result = await baseInvoker(prompt, opts);\n usageTracker?.addUsage('consolidation', result.tokenUsage);\n return result;\n };\n\n const result = await consolidateModules(graph, aiInvoker, {\n model: consolidationModel,\n timeoutMs: consolidationTimeout ? consolidationTimeout * 1000 : undefined,\n skipAI: consolidationSkipAI,\n });\n\n spinner.succeed(\n `Consolidation complete: ${result.originalCount} \u2192 ${result.afterRuleBasedCount} (rule-based) \u2192 ${result.finalCount} modules`\n );\n\n // Save consolidation result to cache\n await saveConsolidation(repoPath, result.graph, outputDir, inputModuleCount);\n\n return { graph: result.graph, duration: Date.now() - startTime };\n } catch (error) {\n spinner.warn('Consolidation failed \u2014 using original modules');\n if (options.verbose) {\n printWarning(getErrorMessage(error));\n }\n return { graph, duration: Date.now() - startTime };\n }\n}\n", "/**\n * Analysis Prompt Templates\n *\n * Prompt templates for Phase 3 (Deep Analysis). Each module is analyzed\n * by an AI session with MCP tool access. Three depth variants control\n * the level of investigation detail.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { MODULE_ANALYSIS_SCHEMA } from '../schemas';\n\n// ============================================================================\n// Depth Variants\n// ============================================================================\n\n/**\n * Shallow analysis: overview + public API only.\n * Fastest, least detailed \u2014 suitable for large repos or quick surveys.\n */\nconst SHALLOW_INVESTIGATION_STEPS = `\nUse the grep, glob, and view tools to investigate this module:\n\n1. Read the main entry file(s) and understand the public API\n2. Identify the module's primary purpose and key abstractions\n\nReturn a JSON object matching the schema below. For shallow analysis, you may leave\ninternalArchitecture, dataFlow, and errorHandling as brief one-sentence summaries.\nKeep codeExamples to 1 example maximum.`;\n\n/**\n * Normal analysis: full 7-step investigation.\n * Balanced depth \u2014 default for most projects.\n */\nconst NORMAL_INVESTIGATION_STEPS = `\nUse the grep, glob, and view tools to deeply investigate this module:\n\n1. Read all key files and understand the public API\n2. Trace the main control flow and data flow\n3. Identify design patterns and coding conventions\n4. Find error handling strategies\n5. Extract 2-3 illustrative code examples\n6. Map internal dependencies to external packages\n7. Suggest a Mermaid diagram showing the module's internal structure\n\nReturn a JSON object matching the schema below.`;\n\n/**\n * Deep analysis: exhaustive investigation including performance and edge cases.\n * Most thorough \u2014 suitable for critical modules or small repos.\n */\nconst DEEP_INVESTIGATION_STEPS = `\nUse the grep, glob, and view tools to exhaustively investigate this module:\n\n1. Read ALL files in the module, not just key files\n2. Map the complete public API with full type signatures\n3. Trace every control flow path and data flow\n4. Identify ALL design patterns and coding conventions\n5. Analyze error handling, edge cases, and error recovery strategies\n6. Extract 3-5 illustrative code examples covering different aspects\n7. Map ALL internal dependencies and external packages with usage details\n8. Analyze performance characteristics and potential bottlenecks\n9. Identify any security considerations or sensitive operations\n10. Suggest a detailed Mermaid diagram showing the module's internal structure\n\nReturn a JSON object matching the schema below. Be thorough and comprehensive \u2014\ninclude all details you can find.`;\n\n// ============================================================================\n// Template\n// ============================================================================\n\n/**\n * Get the investigation steps for a given depth level.\n */\nexport function getInvestigationSteps(depth: 'shallow' | 'normal' | 'deep'): string {\n switch (depth) {\n case 'shallow': return SHALLOW_INVESTIGATION_STEPS;\n case 'deep': return DEEP_INVESTIGATION_STEPS;\n default: return NORMAL_INVESTIGATION_STEPS;\n }\n}\n\n/**\n * Build the full analysis prompt template.\n *\n * Uses {{variable}} placeholders that will be substituted by the map-reduce framework:\n * - {{moduleName}}, {{moduleId}}, {{modulePath}}, {{purpose}}\n * - {{keyFiles}}, {{dependencies}}, {{dependents}}\n * - {{complexity}}, {{category}}, {{projectName}}, {{architectureNotes}}\n *\n * @param depth Analysis depth level\n * @returns Prompt template string with {{variable}} placeholders\n */\nexport function buildAnalysisPromptTemplate(depth: 'shallow' | 'normal' | 'deep'): string {\n const steps = getInvestigationSteps(depth);\n\n return `You are analyzing module \"{{moduleName}}\" in the {{projectName}} codebase.\n\nModule ID: {{moduleId}}\nModule path: {{modulePath}}\nPurpose: {{purpose}}\nComplexity: {{complexity}}\nCategory: {{category}}\nKey files: {{keyFiles}}\nDependencies (other modules): {{dependencies}}\nDependents (modules that depend on this): {{dependents}}\n\nArchitecture context:\n{{architectureNotes}}\n${steps}\n\n**Output JSON Schema:**\n\\`\\`\\`json\n${MODULE_ANALYSIS_SCHEMA}\n\\`\\`\\`\n\nIMPORTANT:\n- The \"moduleId\" field MUST be exactly \"{{moduleId}}\"\n- All file paths should be relative to the repository root\n- The \"suggestedDiagram\" field should contain valid Mermaid syntax\n- The \"sourceFiles\" field should list ALL files you read or examined during analysis\n- If you cannot determine a field, use an empty string or empty array as appropriate\n- Return ONLY the JSON object, no additional text before or after`;\n}\n\n/**\n * Get the list of output fields expected from the analysis prompt.\n * These fields are used by the map-reduce framework to parse AI responses.\n */\nexport function getAnalysisOutputFields(): string[] {\n return [\n 'moduleId',\n 'overview',\n 'keyConcepts',\n 'publicAPI',\n 'internalArchitecture',\n 'dataFlow',\n 'patterns',\n 'errorHandling',\n 'codeExamples',\n 'dependencies',\n 'suggestedDiagram',\n 'sourceFiles',\n ];\n}\n", "/**\n * Analysis Response Parser\n *\n * Parses AI responses from Phase 3 analysis into structured ModuleAnalysis objects.\n * Handles JSON extraction from markdown code blocks, field validation, default filling,\n * and Mermaid diagram validation.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type {\n ModuleAnalysis,\n} from '../types';\nimport type {\n KeyConcept,\n PublicAPIEntry,\n CodeExample,\n InternalDependency,\n ExternalDependency,\n} from './types';\nimport { isValidMermaidDiagram } from '../schemas';\n\n// ============================================================================\n// JSON Extraction\n// ============================================================================\n\n/**\n * Extract JSON from a response that may contain markdown code blocks.\n * Tries multiple strategies:\n * 1. Direct JSON parse\n * 2. Extract from ```json ... ``` code block\n * 3. Extract from ``` ... ``` code block\n * 4. Find the first { ... } block\n */\nexport function extractJSON(response: string): unknown | null {\n if (!response || typeof response !== 'string') {\n return null;\n }\n\n const trimmed = response.trim();\n\n // Strategy 1: Direct JSON parse\n try {\n return JSON.parse(trimmed);\n } catch {\n // Continue to next strategy\n }\n\n // Strategy 2: Extract from ```json ... ``` code block (flexible whitespace)\n // Handles preamble text before the code block and optional newlines\n const jsonBlockMatch = trimmed.match(/```json\\s*([\\s\\S]*?)\\s*```/);\n if (jsonBlockMatch) {\n const content = jsonBlockMatch[1].trim();\n if (content) {\n try {\n return JSON.parse(content);\n } catch {\n // Continue to next strategy\n }\n }\n }\n\n // Strategy 3: Extract from ``` ... ``` code block (flexible whitespace)\n const codeBlockMatch = trimmed.match(/```\\s*([\\s\\S]*?)\\s*```/);\n if (codeBlockMatch) {\n const content = codeBlockMatch[1].trim();\n if (content) {\n try {\n return JSON.parse(content);\n } catch {\n // Continue to next strategy\n }\n }\n }\n\n // Strategy 4: Find the last ```json block (for multi-block responses)\n // AI sometimes includes earlier code blocks with non-JSON content\n const allJsonBlocks = [...trimmed.matchAll(/```json\\s*([\\s\\S]*?)\\s*```/g)];\n for (let i = allJsonBlocks.length - 1; i >= 0; i--) {\n const content = allJsonBlocks[i][1].trim();\n if (content) {\n try {\n return JSON.parse(content);\n } catch {\n continue;\n }\n }\n }\n\n // Strategy 5: Find the first { ... } block (greedy)\n const firstBrace = trimmed.indexOf('{');\n const lastBrace = trimmed.lastIndexOf('}');\n if (firstBrace !== -1 && lastBrace > firstBrace) {\n try {\n return JSON.parse(trimmed.substring(firstBrace, lastBrace + 1));\n } catch {\n // All strategies failed\n }\n }\n\n return null;\n}\n\n// ============================================================================\n// Field Normalizers\n// ============================================================================\n\n/**\n * Ensure a value is a non-empty string, or return the default.\n */\nfunction ensureString(value: unknown, defaultValue: string = ''): string {\n if (typeof value === 'string') {\n return value;\n }\n return defaultValue;\n}\n\n/**\n * Ensure a value is an array, or return an empty array.\n */\nfunction ensureArray<T>(value: unknown): T[] {\n if (Array.isArray(value)) {\n return value as T[];\n }\n return [];\n}\n\n/**\n * Normalize a KeyConcept from raw data.\n */\nfunction normalizeKeyConcept(raw: unknown): KeyConcept | null {\n if (!raw || typeof raw !== 'object') { return null; }\n const obj = raw as Record<string, unknown>;\n if (!obj.name || typeof obj.name !== 'string') { return null; }\n\n return {\n name: obj.name,\n description: ensureString(obj.description),\n codeRef: typeof obj.codeRef === 'string' ? obj.codeRef : undefined,\n };\n}\n\n/**\n * Normalize a PublicAPIEntry from raw data.\n */\nfunction normalizePublicAPIEntry(raw: unknown): PublicAPIEntry | null {\n if (!raw || typeof raw !== 'object') { return null; }\n const obj = raw as Record<string, unknown>;\n if (!obj.name || typeof obj.name !== 'string') { return null; }\n\n return {\n name: obj.name,\n signature: ensureString(obj.signature),\n description: ensureString(obj.description),\n };\n}\n\n/**\n * Normalize a CodeExample from raw data.\n */\nfunction normalizeCodeExample(raw: unknown): CodeExample | null {\n if (!raw || typeof raw !== 'object') { return null; }\n const obj = raw as Record<string, unknown>;\n if (!obj.title || typeof obj.title !== 'string') { return null; }\n\n const example: CodeExample = {\n title: obj.title,\n code: ensureString(obj.code),\n };\n\n if (typeof obj.file === 'string' && obj.file.length > 0) {\n example.file = normalizeFilePath(obj.file);\n }\n\n if (Array.isArray(obj.lines) && obj.lines.length === 2) {\n const start = Number(obj.lines[0]);\n const end = Number(obj.lines[1]);\n if (!isNaN(start) && !isNaN(end) && start >= 0 && end >= start) {\n example.lines = [start, end];\n }\n }\n\n return example;\n}\n\n/**\n * Normalize an InternalDependency from raw data.\n */\nfunction normalizeInternalDependency(raw: unknown): InternalDependency | null {\n if (!raw || typeof raw !== 'object') { return null; }\n const obj = raw as Record<string, unknown>;\n if (!obj.module || typeof obj.module !== 'string') { return null; }\n\n return {\n module: obj.module,\n usage: ensureString(obj.usage),\n };\n}\n\n/**\n * Normalize an ExternalDependency from raw data.\n */\nfunction normalizeExternalDependency(raw: unknown): ExternalDependency | null {\n if (!raw || typeof raw !== 'object') { return null; }\n const obj = raw as Record<string, unknown>;\n if (!obj.package || typeof obj.package !== 'string') { return null; }\n\n return {\n package: obj.package,\n usage: ensureString(obj.usage),\n };\n}\n\n/**\n * Normalize a file path to be relative to the repo root.\n * Removes leading ./ or / prefixes.\n */\nfunction normalizeFilePath(filePath: string): string {\n // Remove leading ./ or /\n let normalized = filePath.replace(/^\\.\\//, '').replace(/^\\//, '');\n // Normalize backslashes to forward slashes (Windows compat)\n normalized = normalized.replace(/\\\\/g, '/');\n return normalized;\n}\n\n/**\n * Normalize a Mermaid diagram string.\n * Returns empty string if invalid.\n */\nfunction normalizeMermaidDiagram(value: unknown): string {\n if (typeof value !== 'string' || !value.trim()) {\n return '';\n }\n\n // Strip markdown code block wrapper if present\n let diagram = value.trim();\n const mermaidBlockMatch = diagram.match(/```(?:mermaid)?\\s*\\n([\\s\\S]*?)\\n\\s*```/);\n if (mermaidBlockMatch) {\n diagram = mermaidBlockMatch[1].trim();\n }\n\n // Basic validation\n if (!isValidMermaidDiagram(diagram)) {\n return '';\n }\n\n return diagram;\n}\n\n// ============================================================================\n// Main Parser\n// ============================================================================\n\n/**\n * Parse an AI response string into a ModuleAnalysis object.\n *\n * @param response The raw AI response text\n * @param expectedModuleId The expected moduleId (for validation)\n * @returns Parsed ModuleAnalysis\n * @throws Error if the response cannot be parsed at all\n */\nexport function parseAnalysisResponse(response: string, expectedModuleId: string): ModuleAnalysis {\n const parsed = extractJSON(response);\n if (!parsed || typeof parsed !== 'object') {\n throw new Error('Failed to extract JSON from analysis response');\n }\n\n const raw = parsed as Record<string, unknown>;\n\n // Validate moduleId \u2014 use expected if missing or mismatched\n const moduleId = typeof raw.moduleId === 'string' ? raw.moduleId : expectedModuleId;\n\n // Build the ModuleAnalysis with defaults for missing fields\n const analysis: ModuleAnalysis = {\n moduleId,\n overview: ensureString(raw.overview, 'No overview available.'),\n keyConcepts: ensureArray(raw.keyConcepts)\n .map(normalizeKeyConcept)\n .filter((c): c is KeyConcept => c !== null),\n publicAPI: ensureArray(raw.publicAPI)\n .map(normalizePublicAPIEntry)\n .filter((e): e is PublicAPIEntry => e !== null),\n internalArchitecture: ensureString(raw.internalArchitecture),\n dataFlow: ensureString(raw.dataFlow),\n patterns: ensureArray<string>(raw.patterns).filter(\n (p): p is string => typeof p === 'string' && p.length > 0\n ),\n errorHandling: ensureString(raw.errorHandling),\n codeExamples: ensureArray(raw.codeExamples)\n .map(normalizeCodeExample)\n .filter((e): e is CodeExample => e !== null),\n dependencies: {\n internal: [],\n external: [],\n },\n suggestedDiagram: normalizeMermaidDiagram(raw.suggestedDiagram),\n };\n\n // Parse dependencies object\n if (raw.dependencies && typeof raw.dependencies === 'object') {\n const deps = raw.dependencies as Record<string, unknown>;\n analysis.dependencies.internal = ensureArray(deps.internal)\n .map(normalizeInternalDependency)\n .filter((d): d is InternalDependency => d !== null);\n analysis.dependencies.external = ensureArray(deps.external)\n .map(normalizeExternalDependency)\n .filter((d): d is ExternalDependency => d !== null);\n }\n\n return analysis;\n}\n", "/**\n * Analysis Executor\n *\n * Orchestrates Phase 3 (Deep Analysis) using the MapReduceExecutor\n * from pipeline-core. Converts ModuleInfo items into PromptItems,\n * runs parallel AI sessions with MCP tools, and parses results\n * into ModuleAnalysis objects.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n createPromptMapJob,\n createPromptMapInput,\n createExecutor,\n getLogger,\n LogCategory,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type {\n AIInvoker,\n PromptItem,\n PromptMapResult,\n PromptMapOutput,\n JobProgress,\n ItemCompleteCallback,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type { ModuleInfo, ModuleGraph, ModuleAnalysis } from '../types';\nimport { buildAnalysisPromptTemplate, getAnalysisOutputFields } from './prompts';\nimport { parseAnalysisResponse } from './response-parser';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Options for running the analysis executor.\n */\nexport interface AnalysisExecutorOptions {\n /** AI invoker configured for analysis (with MCP tools) */\n aiInvoker: AIInvoker;\n /** Module graph from Phase 1 (Discovery) */\n graph: ModuleGraph;\n /** Analysis depth */\n depth: 'shallow' | 'normal' | 'deep';\n /** Maximum concurrent AI sessions (default: 5) */\n concurrency?: number;\n /** Timeout per module in milliseconds */\n timeoutMs?: number;\n /** Number of retry attempts for failed map operations (default: 1) */\n retryAttempts?: number;\n /** AI model to use */\n model?: string;\n /** Progress callback */\n onProgress?: (progress: JobProgress) => void;\n /** Cancellation check function */\n isCancelled?: () => boolean;\n /**\n * Optional callback invoked after each individual module analysis completes.\n * Useful for incremental per-module cache writes during long-running analysis.\n */\n onItemComplete?: ItemCompleteCallback;\n}\n\n/**\n * Result of the analysis executor.\n */\nexport interface AnalysisExecutorResult {\n /** Successfully parsed analyses */\n analyses: ModuleAnalysis[];\n /** Module IDs that failed analysis */\n failedModuleIds: string[];\n /** Total duration in milliseconds */\n duration: number;\n}\n\n// ============================================================================\n// Module \u2192 PromptItem Conversion\n// ============================================================================\n\n/**\n * Convert a ModuleInfo into a PromptItem for template substitution.\n * PromptItem requires flat string key-value pairs.\n */\nexport function moduleToPromptItem(module: ModuleInfo, graph: ModuleGraph): PromptItem {\n return {\n moduleId: module.id,\n moduleName: module.name,\n modulePath: module.path,\n purpose: module.purpose,\n keyFiles: module.keyFiles.join(', '),\n dependencies: module.dependencies.join(', ') || 'none',\n dependents: module.dependents.join(', ') || 'none',\n complexity: module.complexity,\n category: module.category,\n projectName: graph.project.name,\n architectureNotes: graph.architectureNotes || 'No architecture notes available.',\n };\n}\n\n// ============================================================================\n// Analysis Executor\n// ============================================================================\n\n/**\n * Run the analysis executor on all modules in the graph.\n *\n * Uses MapReduceExecutor from pipeline-core with:\n * - PromptMapJob for template substitution + AI invocation\n * - Analysis prompt template (depth-dependent)\n * - Structured JSON output parsing\n *\n * @param options Executor options\n * @returns Analysis results (successes + failures)\n */\nexport async function runAnalysisExecutor(\n options: AnalysisExecutorOptions\n): Promise<AnalysisExecutorResult> {\n const startTime = Date.now();\n const {\n aiInvoker,\n graph,\n depth,\n concurrency = 5,\n timeoutMs,\n retryAttempts = 1,\n model,\n onProgress,\n isCancelled,\n onItemComplete,\n } = options;\n\n const modules = graph.modules;\n if (modules.length === 0) {\n return { analyses: [], failedModuleIds: [], duration: 0 };\n }\n\n // Build the prompt template and output fields (shared across all rounds)\n const promptTemplate = buildAnalysisPromptTemplate(depth);\n const outputFields = getAnalysisOutputFields();\n\n // Run initial analysis round\n const { analyses, failedModuleIds } = await executeAnalysisRound({\n modules, graph, aiInvoker, promptTemplate, outputFields,\n concurrency, timeoutMs, model, onProgress, isCancelled, onItemComplete,\n });\n\n // Retry failed modules (up to retryAttempts rounds)\n if (failedModuleIds.length > 0 && retryAttempts > 0) {\n const logger = getLogger();\n let remainingFailed = [...failedModuleIds];\n\n for (let attempt = 0; attempt < retryAttempts && remainingFailed.length > 0; attempt++) {\n if (isCancelled?.()) break;\n\n logger.debug(LogCategory.MAP_REDUCE, `Retrying ${remainingFailed.length} failed module(s) (attempt ${attempt + 1}/${retryAttempts})`);\n\n // Get the modules that failed\n const retryModules = modules.filter(m => remainingFailed.includes(m.id));\n\n const retryResult = await executeAnalysisRound({\n modules: retryModules, graph, aiInvoker, promptTemplate, outputFields,\n concurrency, timeoutMs, model, onProgress, isCancelled, onItemComplete,\n });\n\n // Add newly succeeded analyses\n analyses.push(...retryResult.analyses);\n\n // Update remaining failures\n remainingFailed = retryResult.failedModuleIds;\n }\n\n // Replace failedModuleIds with the final set of failures\n failedModuleIds.length = 0;\n failedModuleIds.push(...remainingFailed);\n }\n\n return {\n analyses,\n failedModuleIds,\n duration: Date.now() - startTime,\n };\n}\n\n/**\n * Options for a single analysis round.\n */\ninterface AnalysisRoundOptions {\n modules: ModuleInfo[];\n graph: ModuleGraph;\n aiInvoker: AIInvoker;\n promptTemplate: string;\n outputFields: string[];\n concurrency: number;\n timeoutMs?: number;\n model?: string;\n onProgress?: (progress: JobProgress) => void;\n isCancelled?: () => boolean;\n onItemComplete?: ItemCompleteCallback;\n}\n\n/**\n * Execute a single round of analysis for the given modules.\n * Returns successfully parsed analyses and the IDs of modules that failed.\n */\nasync function executeAnalysisRound(\n options: AnalysisRoundOptions\n): Promise<{ analyses: ModuleAnalysis[]; failedModuleIds: string[] }> {\n const { modules, graph, aiInvoker, promptTemplate, outputFields, concurrency, timeoutMs, model, onProgress, isCancelled, onItemComplete } = options;\n // Convert modules to PromptItems\n const items: PromptItem[] = modules.map(m => moduleToPromptItem(m, graph));\n\n // Create prompt map input\n const input = createPromptMapInput(items, promptTemplate, outputFields);\n\n // Create the job\n const job = createPromptMapJob({\n aiInvoker,\n outputFormat: 'json',\n model,\n maxConcurrency: concurrency,\n });\n\n // Create the executor (no executor-level retry \u2014 we handle retry at the analysis level)\n const executor = createExecutor({\n aiInvoker,\n maxConcurrency: concurrency,\n reduceMode: 'deterministic',\n showProgress: true,\n retryOnFailure: false,\n timeoutMs,\n jobName: 'Deep Analysis',\n onProgress,\n isCancelled,\n onItemComplete,\n });\n\n // Execute map-reduce\n const result = await executor.execute(job, input);\n\n // Parse results into ModuleAnalysis objects\n const analyses: ModuleAnalysis[] = [];\n const failedModuleIds: string[] = [];\n\n if (result.output) {\n const logger = getLogger();\n const output = result.output as PromptMapOutput;\n for (const mapResult of output.results) {\n const moduleId = mapResult.item.moduleId;\n\n if (mapResult.success && mapResult.rawResponse) {\n try {\n const analysis = parseAnalysisResponse(mapResult.rawResponse, moduleId);\n analyses.push(analysis);\n } catch (parseErr1) {\n // Parse failed \u2014 try with the output fields\n try {\n const analysis = parseOutputAsAnalysis(mapResult.output, moduleId);\n analyses.push(analysis);\n } catch (parseErr2) {\n logger.debug(LogCategory.MAP_REDUCE, `Analysis parse failed for module \"${moduleId}\". rawResponse (${mapResult.rawResponse.length} chars): ${mapResult.rawResponse.substring(0, 500)}`);\n logger.debug(LogCategory.MAP_REDUCE, ` Parse error 1: ${parseErr1 instanceof Error ? parseErr1.message : String(parseErr1)}`);\n logger.debug(LogCategory.MAP_REDUCE, ` Parse error 2: ${parseErr2 instanceof Error ? parseErr2.message : String(parseErr2)}`);\n failedModuleIds.push(moduleId);\n }\n }\n } else if (mapResult.success && mapResult.output) {\n // No raw response but has parsed output\n try {\n const analysis = parseOutputAsAnalysis(mapResult.output, moduleId);\n analyses.push(analysis);\n } catch {\n failedModuleIds.push(moduleId);\n }\n } else if (!mapResult.success && mapResult.rawResponse) {\n // Map-reduce reported failure (e.g. pipeline-core JSON parse failed),\n // but raw response is available \u2014 try deep-wiki's more tolerant parser\n try {\n const analysis = parseAnalysisResponse(mapResult.rawResponse, moduleId);\n analyses.push(analysis);\n logger.debug(LogCategory.MAP_REDUCE, `Analysis recovered for module \"${moduleId}\" from rawResponse (pipeline-core parse had failed: ${mapResult.error})`);\n } catch (recoveryErr) {\n // Recovery also failed \u2014 try output fields as last resort\n try {\n const analysis = parseOutputAsAnalysis(mapResult.output, moduleId);\n analyses.push(analysis);\n } catch {\n logger.debug(LogCategory.MAP_REDUCE, `Analysis failed for module \"${moduleId}\": success=${mapResult.success}, error=${mapResult.error || 'none'}, rawResponse=${mapResult.rawResponse.length} chars: ${mapResult.rawResponse.substring(0, 300)}`);\n logger.debug(LogCategory.MAP_REDUCE, ` Recovery parse error: ${recoveryErr instanceof Error ? recoveryErr.message : String(recoveryErr)}`);\n failedModuleIds.push(moduleId);\n }\n }\n } else {\n logger.debug(LogCategory.MAP_REDUCE, `Analysis failed for module \"${moduleId}\": success=${mapResult.success}, error=${mapResult.error || 'none'}, rawResponse=${mapResult.rawResponse ? `${mapResult.rawResponse.length} chars: ${mapResult.rawResponse.substring(0, 300)}` : 'none'}`);\n failedModuleIds.push(moduleId);\n }\n }\n } else {\n // All failed\n for (const module of modules) {\n failedModuleIds.push(module.id);\n }\n }\n\n return { analyses, failedModuleIds };\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Parse a map result output (Record<string, unknown>) as a ModuleAnalysis.\n * Used as a fallback when rawResponse parsing fails but we have structured output.\n */\nfunction parseOutputAsAnalysis(\n output: Record<string, unknown>,\n expectedModuleId: string\n): ModuleAnalysis {\n // Wrap the output in a JSON string and pass through the main parser\n const jsonStr = JSON.stringify(output);\n return parseAnalysisResponse(jsonStr, expectedModuleId);\n}\n", "/**\n * Analysis Module \u2014 Public API\n *\n * Phase 3 (Deep Analysis) entry point. Converts ModuleGraph modules\n * into PromptItems and runs parallel AI sessions with MCP tools\n * to produce detailed ModuleAnalysis results.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { AnalysisOptions, AnalysisResult, ModuleAnalysis } from '../types';\nimport type { AIInvoker, JobProgress, ItemCompleteCallback } from '@plusplusoneplusplus/pipeline-core';\nimport { runAnalysisExecutor } from './analysis-executor';\n\n// Re-export for convenience\nexport { parseAnalysisResponse, extractJSON } from './response-parser';\nexport { buildAnalysisPromptTemplate, getAnalysisOutputFields, getInvestigationSteps } from './prompts';\nexport { moduleToPromptItem, runAnalysisExecutor } from './analysis-executor';\nexport type { AnalysisExecutorOptions, AnalysisExecutorResult } from './analysis-executor';\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Analyze all modules in the graph using AI with MCP tool access.\n *\n * @param options Analysis options\n * @param aiInvoker Configured AI invoker for analysis (with MCP tools)\n * @param onProgress Optional progress callback\n * @param isCancelled Optional cancellation check\n * @param onItemComplete Optional per-item completion callback for incremental saving\n * @returns Analysis results\n */\nexport async function analyzeModules(\n options: AnalysisOptions,\n aiInvoker: AIInvoker,\n onProgress?: (progress: JobProgress) => void,\n isCancelled?: () => boolean,\n onItemComplete?: ItemCompleteCallback,\n): Promise<AnalysisResult> {\n const startTime = Date.now();\n\n const result = await runAnalysisExecutor({\n aiInvoker,\n graph: options.graph,\n depth: options.depth || 'normal',\n concurrency: options.concurrency || 5,\n timeoutMs: options.timeout || 180_000,\n model: options.model,\n onProgress,\n isCancelled,\n onItemComplete,\n });\n\n return {\n analyses: result.analyses,\n duration: Date.now() - startTime,\n };\n}\n", "/**\n * Phase 3: Deep Analysis\n *\n * Performs AI-powered analysis of each module, with incremental caching support.\n */\n\nimport type { GenerateCommandOptions, ModuleGraph, ModuleAnalysis } from '../../types';\nimport type { AIInvoker } from '@plusplusoneplusplus/pipeline-core';\nimport { resolvePhaseModel, resolvePhaseTimeout, resolvePhaseConcurrency, resolvePhaseDepth } from '../../config-loader';\nimport { analyzeModules, parseAnalysisResponse } from '../../analysis';\nimport { createAnalysisInvoker } from '../../ai-invoker';\nimport { UsageTracker } from '../../usage-tracker';\nimport {\n getCachedAnalyses,\n saveAllAnalyses,\n getModulesNeedingReanalysis,\n getCachedAnalysis,\n saveAnalysis,\n getFolderHeadHash,\n scanIndividualAnalysesCache,\n scanIndividualAnalysesCacheAny,\n} from '../../cache';\nimport {\n Spinner,\n printSuccess,\n printError,\n printWarning,\n printInfo,\n printHeader,\n} from '../../logger';\nimport { getErrorMessage } from '../../utils/error-utils';\nimport { EXIT_CODES } from '../../cli';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface Phase3AnalysisResult {\n analyses?: ModuleAnalysis[];\n duration: number;\n exitCode?: number;\n /** Module IDs that were re-analyzed (not loaded from cache) in this run.\n * Empty array means all modules were cached; undefined means unknown. */\n reanalyzedModuleIds?: string[];\n}\n\n// ============================================================================\n// Phase 3: Deep Analysis\n// ============================================================================\n\nexport async function runPhase3Analysis(\n repoPath: string,\n graph: ModuleGraph,\n options: GenerateCommandOptions,\n isCancelled: () => boolean,\n usageTracker?: UsageTracker\n): Promise<Phase3AnalysisResult> {\n const startTime = Date.now();\n\n process.stderr.write('\\n');\n printHeader('Phase 3: Deep Analysis');\n\n // Resolve per-phase settings for analysis\n const analysisModel = resolvePhaseModel(options, 'analysis');\n const analysisTimeout = resolvePhaseTimeout(options, 'analysis');\n const analysisConcurrency = resolvePhaseConcurrency(options, 'analysis') || 5;\n const analysisDepth = resolvePhaseDepth(options, 'analysis');\n const concurrency = analysisConcurrency;\n\n // Determine which modules need analysis\n let modulesToAnalyze = graph.modules;\n let cachedAnalyses: ModuleAnalysis[] = [];\n\n if (!options.force) {\n if (options.useCache) {\n // --use-cache: load all cached analyses regardless of git hash\n const allModuleIds = graph.modules.map(m => m.id);\n const { found, missing } = scanIndividualAnalysesCacheAny(\n allModuleIds, options.output\n );\n\n if (found.length > 0) {\n cachedAnalyses = found;\n modulesToAnalyze = graph.modules.filter(\n m => missing.includes(m.id)\n );\n\n if (missing.length === 0) {\n printSuccess(`All ${found.length} module analyses loaded from cache`);\n } else {\n printInfo(`Loaded ${found.length} cached analyses, ${missing.length} remaining`);\n }\n }\n } else {\n // Try incremental rebuild\n const needingReanalysis = await getModulesNeedingReanalysis(\n graph, options.output, repoPath\n );\n\n if (needingReanalysis !== null) {\n if (needingReanalysis.length === 0) {\n // All modules are up-to-date\n const allCached = getCachedAnalyses(options.output);\n if (allCached && allCached.length > 0) {\n printSuccess(`All ${allCached.length} module analyses are up-to-date (cached)`);\n usageTracker?.markCached('analysis');\n return { analyses: allCached, duration: Date.now() - startTime, reanalyzedModuleIds: [] };\n }\n } else {\n // Partial rebuild\n printInfo(`${needingReanalysis.length} modules changed, ${graph.modules.length - needingReanalysis.length} cached`);\n\n // Load cached analyses for unchanged modules\n for (const module of graph.modules) {\n if (!needingReanalysis.includes(module.id)) {\n const cached = getCachedAnalysis(module.id, options.output);\n if (cached) {\n cachedAnalyses.push(cached);\n } else {\n // Cache miss for this module \u2014 add to re-analyze list\n needingReanalysis.push(module.id);\n }\n }\n }\n\n // Only analyze changed modules\n modulesToAnalyze = graph.modules.filter(\n m => needingReanalysis.includes(m.id)\n );\n }\n } else {\n // No metadata (full rebuild indicated) \u2014 but check for partial cache\n // from a previous interrupted run that saved modules incrementally.\n const currentHash = await getFolderHeadHash(repoPath);\n if (currentHash) {\n const allModuleIds = graph.modules.map(m => m.id);\n const { found, missing } = scanIndividualAnalysesCache(\n allModuleIds, options.output, currentHash\n );\n\n if (found.length > 0) {\n printInfo(`Recovered ${found.length} module analyses from partial cache, ${missing.length} remaining`);\n cachedAnalyses = found;\n modulesToAnalyze = graph.modules.filter(\n m => missing.includes(m.id)\n );\n }\n }\n }\n }\n }\n\n if (modulesToAnalyze.length === 0 && cachedAnalyses.length > 0) {\n printSuccess(`All analyses loaded from cache (${cachedAnalyses.length} modules)`);\n usageTracker?.markCached('analysis');\n return { analyses: cachedAnalyses, duration: Date.now() - startTime, reanalyzedModuleIds: [] };\n }\n\n // Create analysis invoker (MCP-enabled, direct sessions)\n const baseAnalysisInvoker = createAnalysisInvoker({\n repoPath,\n model: analysisModel,\n timeoutMs: analysisTimeout ? analysisTimeout * 1000 : undefined,\n });\n\n // Wrap invoker to capture token usage\n const analysisInvoker: AIInvoker = async (prompt, opts) => {\n const result = await baseAnalysisInvoker(prompt, opts);\n usageTracker?.addUsage('analysis', result.tokenUsage);\n return result;\n };\n\n // Get git hash once upfront for per-module incremental saves (subfolder-scoped)\n let gitHash: string | null = null;\n try {\n gitHash = await getFolderHeadHash(repoPath);\n } catch {\n // Non-fatal: incremental saves won't work but analysis continues\n }\n\n const spinner = new Spinner();\n spinner.start(`Analyzing ${modulesToAnalyze.length} modules (${concurrency} parallel)...`);\n\n try {\n // Build a sub-graph with only the modules to analyze\n const subGraph = {\n ...graph,\n modules: modulesToAnalyze,\n };\n\n const result = await analyzeModules(\n {\n graph: subGraph,\n model: analysisModel,\n timeout: analysisTimeout ? analysisTimeout * 1000 : undefined,\n concurrency,\n depth: analysisDepth,\n repoPath,\n },\n analysisInvoker,\n (progress) => {\n if (progress.phase === 'mapping') {\n spinner.update(\n `Analyzing modules: ${progress.completedItems}/${progress.totalItems} ` +\n `(${progress.failedItems} failed)`\n );\n }\n },\n isCancelled,\n // Per-module incremental save callback\n (item, mapResult) => {\n if (!gitHash || !mapResult.success || !mapResult.output) {\n return;\n }\n try {\n // Extract moduleId and rawResponse from the PromptMapResult\n const output = mapResult.output as { item?: { moduleId?: string }; rawResponse?: string };\n const moduleId = output?.item?.moduleId;\n const rawResponse = output?.rawResponse;\n if (moduleId && rawResponse) {\n const analysis = parseAnalysisResponse(rawResponse, moduleId);\n saveAnalysis(moduleId, analysis, options.output, gitHash);\n }\n } catch {\n // Non-fatal: per-module save failed, bulk save at end will catch it\n }\n },\n );\n\n // Merge fresh + cached\n const allAnalyses = [...cachedAnalyses, ...result.analyses];\n\n if (result.analyses.length === 0 && modulesToAnalyze.length > 0) {\n spinner.fail('All module analyses failed');\n printError('No modules could be analyzed. Check your AI SDK setup or try reducing scope with --focus.');\n return { duration: Date.now() - startTime, exitCode: EXIT_CODES.EXECUTION_ERROR };\n }\n\n const failedCount = modulesToAnalyze.length - result.analyses.length;\n if (failedCount > 0) {\n spinner.warn(`Analysis complete \u2014 ${result.analyses.length} succeeded, ${failedCount} failed`);\n\n // Strict mode: fail the phase if any module failed after retries\n if (options.strict !== false) {\n // Determine which modules failed\n const succeededIds = new Set(result.analyses.map(a => a.moduleId));\n const failedModuleIds = modulesToAnalyze\n .filter(m => !succeededIds.has(m.id))\n .map(m => m.id);\n printError(\n `Strict mode: ${failedCount} module(s) failed analysis: ${failedModuleIds.join(', ')}. ` +\n `Use --no-strict to continue with partial results.`\n );\n return { duration: Date.now() - startTime, exitCode: EXIT_CODES.EXECUTION_ERROR };\n }\n } else {\n spinner.succeed(`Analysis complete \u2014 ${result.analyses.length} modules analyzed`);\n }\n\n // Save to cache (writes metadata + any modules not yet saved incrementally)\n try {\n await saveAllAnalyses(allAnalyses, options.output, repoPath);\n } catch {\n if (options.verbose) {\n printWarning('Failed to cache analyses (non-fatal)');\n }\n }\n\n return {\n analyses: allAnalyses,\n duration: Date.now() - startTime,\n reanalyzedModuleIds: modulesToAnalyze.map(m => m.id),\n };\n } catch (error) {\n spinner.fail('Analysis failed');\n printError(getErrorMessage(error));\n return { duration: Date.now() - startTime, exitCode: EXIT_CODES.EXECUTION_ERROR };\n }\n}\n", "/**\n * Article Writing Prompt Templates\n *\n * Prompt templates for Phase 4 (Article Generation). Each module's analysis\n * is converted into a markdown article. Templates include cross-link\n * information and Mermaid diagram integration.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { ModuleAnalysis, ModuleGraph, ModuleInfo, AreaInfo } from '../types';\n\n// ============================================================================\n// Simplified Graph for Cross-Linking\n// ============================================================================\n\n/**\n * Build a simplified module graph for cross-link reference.\n * Contains only names, IDs, and paths \u2014 not full analysis data.\n */\nexport function buildSimplifiedGraph(graph: ModuleGraph): string {\n const simplified = graph.modules.map(m => ({\n id: m.id,\n name: m.name,\n path: m.path,\n category: m.category,\n }));\n return JSON.stringify(simplified, null, 2);\n}\n\n// ============================================================================\n// Depth Variants\n// ============================================================================\n\nconst SHALLOW_STYLE = `\nWrite a concise article (500-800 words) following this exact section structure:\n\n1. **Title & Overview** \u2014 Start with a level-1 heading and a short overview paragraph summarizing the module's purpose and role in the project.\n2. **Table of Contents** \u2014 Include a bullet list of anchor links to every major section (e.g., \\`- [Public API](#public-api)\\`).\n3. **Purpose & Scope** \u2014 Brief description of what this module does and where it fits.\n4. **Public API** \u2014 Table format with function/class names, signatures, and descriptions.\n5. **Usage Example** \u2014 A basic code example showing how to use this module.\n6. **Dependencies** \u2014 List key internal and external dependencies.\n7. **Sources & References** \u2014 A \"Sources\" section at the end listing the source files examined (from the analysis data's sourceFiles field), formatted as a bullet list of repo-relative file paths.`;\n\nconst NORMAL_STYLE = `\nWrite a comprehensive article (800-1500 words) following this exact section structure:\n\n1. **Title & Overview** \u2014 Start with a level-1 heading and a short overview paragraph summarizing the module's purpose and role in the project.\n2. **Table of Contents** \u2014 Include a bullet list of anchor links to every major section (e.g., \\`- [Architecture](#architecture)\\`).\n3. **Purpose & Scope** \u2014 What this module does, why it exists, and its responsibilities.\n4. **Architecture** \u2014 Internal design, component structure, and design patterns used. Include a Mermaid diagram if the analysis suggests one.\n5. **Public API Reference** \u2014 Table with function/class names, signatures, and descriptions.\n6. **Data Flow** \u2014 How data moves through this module, with clear explanation of inputs and outputs.\n7. **Usage Examples** \u2014 Code examples with fenced code blocks and language tags.\n8. **Dependencies** \u2014 Internal module dependencies and external package dependencies with usage context.\n9. **Sources & References** \u2014 A \"Sources\" section at the end listing ALL source files examined (from the analysis data's sourceFiles field), formatted as a bullet list of repo-relative file paths.`;\n\nconst DEEP_STYLE = `\nWrite a thorough, detailed article (1500-3000 words) following this exact section structure:\n\n1. **Title & Overview** \u2014 Start with a level-1 heading and a short overview paragraph summarizing the module's purpose, role, and significance in the project.\n2. **Table of Contents** \u2014 Include a bullet list of anchor links to every major section and subsection (e.g., \\`- [Architecture](#architecture)\\`, \\` - [Design Patterns](#design-patterns)\\`).\n3. **Purpose & Scope** \u2014 What this module does, why it exists, its responsibilities, and its context within the broader system.\n4. **Architecture** \u2014 Detailed internal design walkthrough with:\n - Component structure and relationships\n - Design patterns identified (### Design Patterns subsection)\n - Mermaid diagrams (architecture + data flow if possible)\n5. **Public API Reference** \u2014 Complete API reference with full type signatures, parameter descriptions, return values, and usage notes. Use tables.\n6. **Data Flow & Control Flow** \u2014 In-depth explanation of how data and control move through this module, including edge cases.\n7. **Error Handling** \u2014 Error handling patterns, recovery strategies, and error propagation.\n8. **Performance Considerations** \u2014 Performance characteristics, potential bottlenecks, and optimization notes.\n9. **Usage Examples** \u2014 Multiple code examples covering different aspects, with fenced code blocks, language tags, and file path references.\n10. **Dependencies** \u2014 Detailed analysis of internal module dependencies and external packages, including how each is used.\n11. **Related Modules** \u2014 Cross-references to related modules with brief descriptions of relationships.\n12. **Sources & References** \u2014 A \"Sources\" section at the end listing ALL source files examined (from the analysis data's sourceFiles field), formatted as a bullet list of repo-relative file paths.`;\n\n// ============================================================================\n// Module Article Prompt\n// ============================================================================\n\n/**\n * Get the style guide for a given depth.\n */\nexport function getArticleStyleGuide(depth: 'shallow' | 'normal' | 'deep'): string {\n switch (depth) {\n case 'shallow': return SHALLOW_STYLE;\n case 'deep': return DEEP_STYLE;\n default: return NORMAL_STYLE;\n }\n}\n\n/**\n * Build the prompt for generating a single module article.\n *\n * @param analysis The module's analysis data\n * @param graph The full module graph (for cross-linking)\n * @param depth Article depth\n * @returns Complete prompt string\n */\nexport function buildModuleArticlePrompt(\n analysis: ModuleAnalysis,\n graph: ModuleGraph,\n depth: 'shallow' | 'normal' | 'deep'\n): string {\n const simplifiedGraph = buildSimplifiedGraph(graph);\n const styleGuide = getArticleStyleGuide(depth);\n\n // Find module info for additional context\n const moduleInfo = graph.modules.find(m => m.id === analysis.moduleId);\n const moduleName = moduleInfo?.name || analysis.moduleId;\n const areaId = moduleInfo?.area;\n const crossLinkRules = buildCrossLinkRules(areaId);\n\n return `You are writing a wiki article for the \"${moduleName}\" module.\n\n## Analysis Data\n\nThe following is a detailed analysis of this module:\n\n\\`\\`\\`json\n${JSON.stringify(analysis, null, 2)}\n\\`\\`\\`\n\n## Module Graph (for cross-linking)\n\nUse this to create cross-references to other modules:\n\n\\`\\`\\`json\n${simplifiedGraph}\n\\`\\`\\`\n\n## Instructions\n${styleGuide}\n\n${crossLinkRules}\n\n## Mermaid Diagrams\n\nIf the analysis includes a suggestedDiagram, include it in the article wrapped in:\n\\`\\`\\`mermaid\n(diagram content)\n\\`\\`\\`\n\n## Format\n\n- Use GitHub-Flavored Markdown\n- Start with a level-1 heading: # ${moduleName}\n- Follow with a short overview summary paragraph (2-3 sentences)\n- Include a **Table of Contents** section with anchor links to all major sections\n- Use proper heading hierarchy (## for sections, ### for subsections) \u2014 keep headers anchor-friendly (lowercase, hyphenated)\n- Use fenced code blocks with language tags for code examples\n- Use tables for API references where appropriate\n- Include file path references (e.g., \\`src/module/file.ts:42\\`) when citing code\n- Include Mermaid diagrams wrapped in \\`\\`\\`mermaid blocks where analysis suggests them\n- End with a ## Sources section listing source file paths as a bullet list\n\nDo NOT write, create, or save any files to disk. Return ONLY the markdown content in your response.`;\n}\n\n/**\n * Build cross-linking rules based on whether areas exist (hierarchical layout).\n *\n * @param areaId - The area this module belongs to (undefined for flat layout)\n * @returns Cross-linking rules string for prompt\n */\nexport function buildCrossLinkRules(areaId?: string): string {\n if (!areaId) {\n // Flat layout (small repos)\n return `## Cross-Linking Rules\n\n- Link to other modules using relative paths: [Module Name](./modules/module-id.md)\n- For the index page, link as: [Module Name](./modules/module-id.md)\n- Use the module graph above to find valid module IDs for links\n- Only link to modules that actually exist in the graph`;\n }\n\n // Hierarchical layout (large repos with areas)\n return `## Cross-Linking Rules\n\n- This article is located at: areas/${areaId}/modules/<this-module>.md\n- Link to modules in the SAME area: [Module Name](./module-id.md) (they are sibling files)\n- Link to modules in OTHER areas: [Module Name](../../other-area-id/modules/module-id.md)\n- Link to this area's index: [Area Index](../index.md)\n- Link to the project index: [Project Index](../../../index.md)\n- Use the module graph above to find valid module IDs and their areas for links\n- Only link to modules that actually exist in the graph`;\n}\n\n/**\n * Build the prompt template for the map-reduce framework.\n * Uses {{variable}} placeholders for template substitution.\n *\n * @param depth Article depth\n * @param areaId Optional area ID for hierarchical cross-linking\n * @returns Prompt template string\n */\nexport function buildModuleArticlePromptTemplate(depth: 'shallow' | 'normal' | 'deep', areaId?: string): string {\n const styleGuide = getArticleStyleGuide(depth);\n const crossLinkRules = buildCrossLinkRules(areaId);\n\n return `You are writing a wiki article for the \"{{moduleName}}\" module.\n\n## Analysis Data\n\nThe following is a detailed analysis of this module:\n\n\\`\\`\\`json\n{{analysis}}\n\\`\\`\\`\n\n## Module Graph (for cross-linking)\n\nUse this to create cross-references to other modules:\n\n\\`\\`\\`json\n{{moduleGraph}}\n\\`\\`\\`\n\n## Instructions\n${styleGuide}\n\n${crossLinkRules}\n\n## Mermaid Diagrams\n\nIf the analysis includes a suggestedDiagram, include it in the article wrapped in:\n\\`\\`\\`mermaid\n(diagram content)\n\\`\\`\\`\n\n## Format\n\n- Use GitHub-Flavored Markdown\n- Start with a level-1 heading: # {{moduleName}}\n- Follow with a short overview summary paragraph (2-3 sentences)\n- Include a **Table of Contents** section with anchor links to all major sections\n- Use proper heading hierarchy (## for sections, ### for subsections) \u2014 keep headers anchor-friendly (lowercase, hyphenated)\n- Use fenced code blocks with language tags for code examples\n- Use tables for API references where appropriate\n- Include file path references (e.g., \\`src/module/file.ts:42\\`) when citing code\n- Include Mermaid diagrams wrapped in \\`\\`\\`mermaid blocks where analysis suggests them\n- End with a ## Sources section listing source file paths as a bullet list\n\nDo NOT write, create, or save any files to disk. Return ONLY the markdown content in your response.`;\n}\n", "/**\n * Reduce Prompt Templates\n *\n * Prompt templates for the reduce phase of Phase 4 (Article Generation).\n * The AI reducer receives module summaries and generates:\n * - index.md: Categorized table of contents with module summaries\n * - architecture.md: High-level architecture with Mermaid diagrams\n * - getting-started.md: Setup, build, and run instructions\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\n/**\n * Build the reduce prompt for generating index, architecture, and getting-started pages.\n *\n * Template variables (substituted by the map-reduce framework):\n * - {{RESULTS}}: JSON array of module summaries (not full articles)\n * - {{COUNT}}: Total number of modules\n * - {{SUCCESS_COUNT}}: Successfully analyzed modules\n * - {{FAILURE_COUNT}}: Failed modules\n * - {{projectName}}: Project name\n * - {{projectDescription}}: Project description\n * - {{buildSystem}}: Build system\n * - {{language}}: Primary language\n *\n * @returns Reduce prompt template string\n */\nexport function buildReducePromptTemplate(): string {\n return `You are generating overview pages for a codebase wiki.\n\n## Project Information\n\n- **Project:** {{projectName}}\n- **Description:** {{projectDescription}}\n- **Language:** {{language}}\n- **Build System:** {{buildSystem}}\n\n## Module Articles\n\nThe following {{COUNT}} modules have been analyzed and documented:\n\n{{RESULTS}}\n\n## Task\n\nGenerate THREE pages as a single JSON object. Each page should be a complete markdown document.\n\n### 1. index.md\n\nCreate a comprehensive index page following DeepWiki structure:\n- Project title (level-1 heading) and a short overview summary paragraph (2-3 sentences)\n- **Table of Contents** with anchor links to every section on the page\n- Categorized module listing: group modules by category, each with a brief (1-2 sentence) summary\n- Links to module articles using: [Module Name](./modules/module-id.md)\n- Quick start section pointing to getting-started.md\n- Use proper heading hierarchy (## for categories, ### where needed)\n\n### 2. architecture.md\n\nCreate an architecture overview page following DeepWiki structure:\n- Title (level-1 heading) and short overview summary\n- **Table of Contents** with anchor links to all sections\n- System Overview section describing the high-level architecture\n- High-level Mermaid component/flowchart diagram showing module relationships\n- Architectural Layers section describing tiers and boundaries\n- Key Design Decisions section covering patterns and rationale\n- Data Flow Overview section explaining cross-module data flow\n- Module Interaction Summary section\n- **Sources** section at the end listing the module files/paths that informed the architecture\n\n### 3. getting-started.md\n\nCreate a getting started guide following DeepWiki structure:\n- Title (level-1 heading) and short overview summary\n- **Table of Contents** with anchor links to all sections\n- Prerequisites section (language, tools, versions)\n- Installation / Setup section with step-by-step instructions\n- Build Instructions section\n- Running the Project section\n- Key Entry Points section describing where to start reading code\n- Links to relevant module articles\n- **Sources** section at the end referencing relevant config/setup files\n\n## Output Format\n\nReturn a JSON object with exactly three fields:\n\\`\\`\\`json\n{\n \"index\": \"full markdown content for index.md\",\n \"architecture\": \"full markdown content for architecture.md\",\n \"gettingStarted\": \"full markdown content for getting-started.md\"\n}\n\\`\\`\\`\n\nIMPORTANT:\n- All links to modules must use the format: [Module Name](./modules/module-id.md)\n- Mermaid diagrams should use \\`\\`\\`mermaid code blocks\n- Each page should be a complete, standalone markdown document\n- Use proper heading hierarchy starting with # for each page\n- Every page MUST include a Table of Contents section with anchor links after the overview paragraph\n- Keep heading text anchor-friendly (descriptive, consistent casing)\n- architecture.md and getting-started.md should end with a ## Sources section\n\nDo NOT write, create, or save any files to disk. Return ONLY the JSON object in your response.`;\n}\n\n/**\n * Get the output fields for the reduce phase.\n */\nexport function getReduceOutputFields(): string[] {\n return ['index', 'architecture', 'gettingStarted'];\n}\n\n/**\n * Build a concise module summary for the reduce phase.\n * We don't send full articles to the reducer \u2014 just names and overviews.\n */\nexport function buildModuleSummaryForReduce(\n moduleId: string,\n moduleName: string,\n category: string,\n overview: string\n): string {\n return JSON.stringify({\n id: moduleId,\n name: moduleName,\n category,\n overview: overview.substring(0, 500), // Truncate for token efficiency\n });\n}\n\n// ============================================================================\n// Area-Level Reduce Prompt (Hierarchical \u2014 Large Repos Only)\n// ============================================================================\n\n/**\n * Build the reduce prompt for generating area-level index and architecture pages.\n * Used in the 2-tier reduce for large repos: per-area reduce first, then project-level.\n *\n * Template variables:\n * - {{RESULTS}}: JSON array of module summaries for this area only\n * - {{COUNT}}: Number of modules in this area\n * - {{SUCCESS_COUNT}}: Successfully analyzed modules\n * - {{FAILURE_COUNT}}: Failed modules\n * - {{areaName}}: Area name\n * - {{areaDescription}}: Area description\n * - {{areaPath}}: Area path\n * - {{projectName}}: Project name\n *\n * @returns Reduce prompt template string\n */\nexport function buildAreaReducePromptTemplate(): string {\n return `You are generating overview pages for the \"{{areaName}}\" area of a codebase wiki.\n\n## Area Information\n\n- **Area:** {{areaName}}\n- **Path:** {{areaPath}}\n- **Description:** {{areaDescription}}\n- **Project:** {{projectName}}\n\n## Module Articles\n\nThe following {{COUNT}} modules in this area have been analyzed and documented:\n\n{{RESULTS}}\n\n## Task\n\nGenerate TWO pages as a single JSON object. Each page should be a complete markdown document.\n\n### 1. index.md (Area Index)\n\nCreate an area-level index page following DeepWiki structure:\n- Area name (level-1 heading) and a short overview summary paragraph (2-3 sentences)\n- **Table of Contents** with anchor links to every section on the page\n- Module listing with brief (1-2 sentence) summary for each module\n- Links to module articles using: [Module Name](./modules/module-id.md)\n- Overview of how modules in this area interact\n\n### 2. architecture.md (Area Architecture)\n\nCreate an area-level architecture page following DeepWiki structure:\n- Title (level-1 heading) and short overview summary\n- **Table of Contents** with anchor links to all sections\n- Mermaid component diagram showing module relationships within this area\n- Description of the area's internal architecture\n- Key design decisions specific to this area\n- Data flow between modules in this area\n- External dependencies (modules from other areas this area depends on)\n- **Sources** section at the end listing the key source files in this area\n\n## Output Format\n\nReturn a JSON object with exactly two fields:\n\\`\\`\\`json\n{\n \"index\": \"full markdown content for index.md\",\n \"architecture\": \"full markdown content for architecture.md\"\n}\n\\`\\`\\`\n\nIMPORTANT:\n- All links to modules WITHIN this area must use: [Module Name](./modules/module-id.md)\n- Links to modules in OTHER areas must use: [Module Name](../../other-area-id/modules/module-id.md)\n- Mermaid diagrams should use \\`\\`\\`mermaid code blocks\n- Each page should be a complete, standalone markdown document\n- Use proper heading hierarchy starting with # for each page\n- Every page MUST include a Table of Contents section with anchor links after the overview paragraph\n- Keep heading text anchor-friendly (descriptive, consistent casing)\n- architecture.md should end with a ## Sources section\n\nDo NOT write, create, or save any files to disk. Return ONLY the JSON object in your response.`;\n}\n\n/**\n * Get the output fields for the area-level reduce phase.\n */\nexport function getAreaReduceOutputFields(): string[] {\n return ['index', 'architecture'];\n}\n\n// ============================================================================\n// Project-Level Reduce Prompt (Hierarchical \u2014 Large Repos Only)\n// ============================================================================\n\n/**\n * Build the project-level reduce prompt for large repos with area hierarchy.\n * Receives area summaries instead of raw module summaries.\n *\n * Template variables:\n * - {{RESULTS}}: JSON array of area summaries\n * - {{COUNT}}: Number of areas\n * - {{SUCCESS_COUNT}}: Successfully processed areas\n * - {{FAILURE_COUNT}}: Failed areas\n * - {{projectName}}: Project name\n * - {{projectDescription}}: Project description\n * - {{buildSystem}}: Build system\n * - {{language}}: Primary language\n *\n * @returns Reduce prompt template string\n */\nexport function buildHierarchicalReducePromptTemplate(): string {\n return `You are generating project-level overview pages for a large codebase wiki.\nThis project uses a hierarchical structure organized by areas.\n\n## Project Information\n\n- **Project:** {{projectName}}\n- **Description:** {{projectDescription}}\n- **Language:** {{language}}\n- **Build System:** {{buildSystem}}\n\n## Areas\n\nThe project is organized into {{COUNT}} top-level areas:\n\n{{RESULTS}}\n\n## Task\n\nGenerate THREE pages as a single JSON object. Each page should be a complete markdown document.\n\n### 1. index.md (Project Index)\n\nCreate a project-level index page following DeepWiki structure:\n- Project title (level-1 heading) and a short overview summary paragraph (2-3 sentences)\n- **Table of Contents** with anchor links to every section on the page\n- Table of areas with brief descriptions and links: [Area Name](./areas/area-id/index.md)\n- Quick start section pointing to getting-started.md\n- High-level project structure overview\n\n### 2. architecture.md (Project Architecture)\n\nCreate a project-level architecture overview following DeepWiki structure:\n- Title (level-1 heading) and short overview summary\n- **Table of Contents** with anchor links to all sections\n- System Overview section describing the high-level architecture\n- High-level Mermaid diagram showing area relationships\n- Architectural Layers section describing tiers and boundaries\n- How areas interact with each other\n- Key design decisions and patterns at the project level\n- **Sources** section at the end referencing key project-level config/entry files\n\n### 3. getting-started.md (Getting Started)\n\nCreate a getting started guide following DeepWiki structure:\n- Title (level-1 heading) and short overview summary\n- **Table of Contents** with anchor links to all sections\n- Prerequisites section (language, tools, versions)\n- Installation / Setup section with step-by-step instructions\n- Build Instructions section\n- Running the Project section\n- Key entry points organized by area\n- Links to relevant area indexes\n- **Sources** section at the end referencing relevant config/setup files\n\n## Output Format\n\nReturn a JSON object with exactly three fields:\n\\`\\`\\`json\n{\n \"index\": \"full markdown content for index.md\",\n \"architecture\": \"full markdown content for architecture.md\",\n \"gettingStarted\": \"full markdown content for getting-started.md\"\n}\n\\`\\`\\`\n\nIMPORTANT:\n- Links to areas must use: [Area Name](./areas/area-id/index.md)\n- Links to area architecture: [Area Architecture](./areas/area-id/architecture.md)\n- Links to specific modules: [Module Name](./areas/area-id/modules/module-id.md)\n- Mermaid diagrams should use \\`\\`\\`mermaid code blocks\n- Each page should be a complete, standalone markdown document\n- Use proper heading hierarchy starting with # for each page\n- Every page MUST include a Table of Contents section with anchor links after the overview paragraph\n- Keep heading text anchor-friendly (descriptive, consistent casing)\n- architecture.md and getting-started.md should end with a ## Sources section\n\nDo NOT write, create, or save any files to disk. Return ONLY the JSON object in your response.`;\n}\n", "/**\n * Article Executor\n *\n * Orchestrates Phase 4 (Article Generation) using the MapReduceExecutor\n * from pipeline-core. Runs two stages:\n * 1. Map: Generate per-module markdown articles (text mode, no structured output)\n * 2. Reduce: AI generates index, architecture, and getting-started pages\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport {\n createPromptMapJob,\n createPromptMapInput,\n createExecutor,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type {\n AIInvoker,\n PromptItem,\n PromptMapOutput,\n JobProgress,\n ItemCompleteCallback,\n} from '@plusplusoneplusplus/pipeline-core';\nimport type {\n ModuleGraph,\n ModuleAnalysis,\n GeneratedArticle,\n} from '../types';\nimport { buildModuleArticlePromptTemplate, buildSimplifiedGraph } from './prompts';\nimport {\n buildReducePromptTemplate,\n getReduceOutputFields,\n buildModuleSummaryForReduce,\n buildAreaReducePromptTemplate,\n getAreaReduceOutputFields,\n buildHierarchicalReducePromptTemplate,\n} from './reduce-prompts';\nimport { normalizeModuleId } from '../schemas';\nimport type { AreaInfo } from '../types';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Options for the article executor.\n */\nexport interface ArticleExecutorOptions {\n /** AI invoker for writing (session pool, no tools) */\n aiInvoker: AIInvoker;\n /** Module graph from Phase 1 (Discovery) */\n graph: ModuleGraph;\n /** Per-module analyses from Phase 3 (Analysis) */\n analyses: ModuleAnalysis[];\n /** Article depth */\n depth: 'shallow' | 'normal' | 'deep';\n /** Maximum concurrent AI sessions (default: 10) */\n concurrency?: number;\n /** Timeout per article in milliseconds */\n timeoutMs?: number;\n /** AI model to use */\n model?: string;\n /** Progress callback */\n onProgress?: (progress: JobProgress) => void;\n /** Cancellation check */\n isCancelled?: () => boolean;\n /**\n * Optional callback invoked after each individual article completes.\n * Useful for incremental per-article cache writes during long-running generation.\n */\n onItemComplete?: ItemCompleteCallback;\n}\n\n/**\n * Result of the article executor.\n */\nexport interface ArticleExecutorResult {\n /** Generated articles (module + index pages) */\n articles: GeneratedArticle[];\n /** Module IDs that failed article generation */\n failedModuleIds: string[];\n /** Total duration in milliseconds */\n duration: number;\n}\n\n// ============================================================================\n// Analysis \u2192 PromptItem Conversion\n// ============================================================================\n\n/**\n * Convert an analysis into a PromptItem for the article template.\n * Uses text mode (no output fields) so the AI returns raw markdown.\n */\nexport function analysisToPromptItem(\n analysis: ModuleAnalysis,\n graph: ModuleGraph\n): PromptItem {\n const moduleInfo = graph.modules.find(m => m.id === analysis.moduleId);\n const moduleName = moduleInfo?.name || analysis.moduleId;\n\n return {\n moduleId: analysis.moduleId,\n moduleName,\n analysis: JSON.stringify(analysis, null, 2),\n moduleGraph: buildSimplifiedGraph(graph),\n };\n}\n\n// ============================================================================\n// Article Executor\n// ============================================================================\n\n/**\n * Run the article executor to generate wiki articles.\n * Detects if `graph.areas` exists (large repo mode) and switches to hierarchical execution:\n * - If areas: group analyses by area \u2192 per-area map-reduce \u2192 project-level reduce\n * - If no areas: existing flat map-reduce (backward compat)\n *\n * @param options Executor options\n * @returns Generated articles\n */\nexport async function runArticleExecutor(\n options: ArticleExecutorOptions\n): Promise<ArticleExecutorResult> {\n const { graph } = options;\n\n // Detect hierarchical mode\n if (graph.areas && graph.areas.length > 0) {\n return runHierarchicalArticleExecutor(options);\n }\n\n return runFlatArticleExecutor(options);\n}\n\n/**\n * Flat article executor \u2014 original behavior for small repos without areas.\n */\nasync function runFlatArticleExecutor(\n options: ArticleExecutorOptions\n): Promise<ArticleExecutorResult> {\n const startTime = Date.now();\n const {\n aiInvoker,\n graph,\n analyses,\n depth,\n concurrency = 5,\n timeoutMs,\n model,\n onProgress,\n isCancelled,\n onItemComplete,\n } = options;\n\n if (analyses.length === 0) {\n return { articles: [], failedModuleIds: [], duration: 0 };\n }\n\n // Convert analyses to PromptItems\n const items: PromptItem[] = analyses.map(a => analysisToPromptItem(a, graph));\n\n // Build the article prompt template (text mode \u2014 no output fields)\n const promptTemplate = buildModuleArticlePromptTemplate(depth);\n\n // Create prompt map input\n const input = createPromptMapInput(items, promptTemplate, []);\n\n // Map phase only \u2014 reduce is done separately with module summaries\n // to avoid exceeding token limits (full articles can be very large)\n const job = createPromptMapJob({\n aiInvoker,\n outputFormat: 'list',\n model,\n maxConcurrency: concurrency,\n });\n\n // Create the executor\n const executor = createExecutor({\n aiInvoker,\n maxConcurrency: concurrency,\n reduceMode: 'deterministic',\n showProgress: true,\n retryOnFailure: false,\n timeoutMs,\n jobName: 'Article Generation',\n onProgress,\n isCancelled,\n onItemComplete,\n });\n\n // Execute map phase\n const result = await executor.execute(job, input);\n\n // Collect module articles from map results\n const articles: GeneratedArticle[] = [];\n const failedModuleIds: string[] = [];\n\n if (result.output) {\n const output = result.output as PromptMapOutput;\n for (const mapResult of output.results) {\n const moduleId = mapResult.item.moduleId;\n const moduleInfo = graph.modules.find(m => m.id === moduleId);\n const moduleName = moduleInfo?.name || moduleId;\n\n if (mapResult.success && (mapResult.rawText || mapResult.rawResponse)) {\n const content = mapResult.rawText || mapResult.rawResponse || '';\n articles.push({\n type: 'module',\n slug: normalizeModuleId(moduleId),\n title: moduleName,\n content,\n moduleId,\n });\n } else {\n failedModuleIds.push(moduleId);\n }\n }\n }\n\n // Separate reduce phase: use compact module summaries (not full articles)\n // to stay within model token limits\n const moduleSummaries = analyses.map(a => {\n const mod = graph.modules.find(m => m.id === a.moduleId);\n return buildModuleSummaryForReduce(\n a.moduleId,\n mod?.name || a.moduleId,\n mod?.category || 'uncategorized',\n a.overview\n );\n });\n\n const reduceInput = createPromptMapInput(\n moduleSummaries.map((summary, i) => ({\n summary,\n moduleId: analyses[i].moduleId,\n })),\n '{{summary}}',\n []\n );\n\n const reduceJob = createPromptMapJob({\n aiInvoker,\n outputFormat: 'ai',\n model,\n maxConcurrency: 1,\n aiReducePrompt: buildReducePromptTemplate(),\n aiReduceOutput: getReduceOutputFields(),\n aiReduceModel: model,\n aiReduceParameters: {\n projectName: graph.project.name,\n projectDescription: graph.project.description || 'No description available',\n buildSystem: graph.project.buildSystem || 'Unknown',\n language: graph.project.language || 'Unknown',\n },\n });\n\n const reduceExecutor = createExecutor({\n aiInvoker,\n maxConcurrency: 1,\n reduceMode: 'deterministic',\n showProgress: false,\n retryOnFailure: false,\n timeoutMs,\n jobName: 'Index Generation',\n onProgress,\n isCancelled,\n });\n\n try {\n const reduceResult = await reduceExecutor.execute(reduceJob, reduceInput);\n const reduceOutput = reduceResult.output as PromptMapOutput | undefined;\n const formattedOutput = reduceOutput?.formattedOutput;\n\n if (formattedOutput) {\n const parsed = JSON.parse(formattedOutput) as Record<string, string>;\n\n if (parsed.index) {\n articles.push({\n type: 'index',\n slug: 'index',\n title: `${graph.project.name} Wiki`,\n content: parsed.index,\n });\n }\n\n if (parsed.architecture) {\n articles.push({\n type: 'architecture',\n slug: 'architecture',\n title: 'Architecture Overview',\n content: parsed.architecture,\n });\n }\n\n if (parsed.gettingStarted) {\n articles.push({\n type: 'getting-started',\n slug: 'getting-started',\n title: 'Getting Started',\n content: parsed.gettingStarted,\n });\n }\n } else {\n articles.push(...generateStaticIndexPages(graph, analyses));\n }\n } catch {\n articles.push(...generateStaticIndexPages(graph, analyses));\n }\n\n return {\n articles,\n failedModuleIds,\n duration: Date.now() - startTime,\n };\n}\n\n// ============================================================================\n// Hierarchical Article Executor (Large Repos with Areas)\n// ============================================================================\n\n/** Result of grouping analyses by area. */\nexport interface AreaGrouping {\n moduleAreaMap: Map<string, string>;\n analysesByArea: Map<string, ModuleAnalysis[]>;\n unassignedAnalyses: ModuleAnalysis[];\n}\n\n/** Result of the module map phase. */\ninterface ModuleMapResult {\n articles: GeneratedArticle[];\n failedIds: Set<string>;\n}\n\n/** Result of a single area reduce phase. */\ninterface AreaReduceResult {\n articles: GeneratedArticle[];\n areaSummary: { areaId: string; name: string; description: string; summary: string; moduleCount: number };\n}\n\n/**\n * Group analyses by their area assignment.\n * Builds module\u2192area mapping and buckets analyses accordingly.\n */\nexport function groupAnalysesByArea(\n analyses: ModuleAnalysis[],\n areas: AreaInfo[]\n): AreaGrouping {\n const moduleAreaMap = new Map<string, string>();\n for (const area of areas) {\n for (const moduleId of area.modules) {\n moduleAreaMap.set(moduleId, area.id);\n }\n }\n\n const analysesByArea = new Map<string, ModuleAnalysis[]>();\n const unassignedAnalyses: ModuleAnalysis[] = [];\n for (const analysis of analyses) {\n const areaId = moduleAreaMap.get(analysis.moduleId);\n if (areaId) {\n if (!analysesByArea.has(areaId)) {\n analysesByArea.set(areaId, []);\n }\n analysesByArea.get(areaId)!.push(analysis);\n } else {\n unassignedAnalyses.push(analysis);\n }\n }\n\n return { moduleAreaMap, analysesByArea, unassignedAnalyses };\n}\n\n/**\n * Run the unified map phase across all modules, tagging results with their area.\n */\nasync function runModuleMapPhase(\n options: ArticleExecutorOptions,\n analyses: ModuleAnalysis[],\n graph: ModuleGraph,\n moduleAreaMap: Map<string, string>\n): Promise<ModuleMapResult> {\n const {\n aiInvoker,\n depth,\n concurrency = 5,\n timeoutMs,\n model,\n onProgress,\n isCancelled,\n onItemComplete,\n } = options;\n\n const allItems: PromptItem[] = analyses.map(a => analysisToPromptItem(a, graph));\n const defaultPromptTemplate = buildModuleArticlePromptTemplate(depth);\n const input = createPromptMapInput(allItems, defaultPromptTemplate, []);\n\n const job = createPromptMapJob({\n aiInvoker,\n outputFormat: 'list',\n model,\n maxConcurrency: concurrency,\n });\n\n const executor = createExecutor({\n aiInvoker,\n maxConcurrency: concurrency,\n reduceMode: 'deterministic',\n showProgress: true,\n retryOnFailure: false,\n timeoutMs,\n jobName: 'Article Generation (Hierarchical)',\n onProgress,\n isCancelled,\n onItemComplete,\n });\n\n const mapResult = await executor.execute(job, input);\n\n const articles: GeneratedArticle[] = [];\n const failedIds = new Set<string>();\n\n if (mapResult.output) {\n const output = mapResult.output as PromptMapOutput;\n for (const result of output.results) {\n const moduleId = result.item.moduleId;\n const moduleInfo = graph.modules.find(m => m.id === moduleId);\n const moduleName = moduleInfo?.name || moduleId;\n const areaId = moduleAreaMap.get(moduleId);\n\n if (result.success && (result.rawText || result.rawResponse)) {\n const content = result.rawText || result.rawResponse || '';\n articles.push({\n type: 'module',\n slug: normalizeModuleId(moduleId),\n title: moduleName,\n content,\n moduleId,\n areaId,\n });\n } else {\n failedIds.add(moduleId);\n }\n }\n }\n\n return { articles, failedIds };\n}\n\n/**\n * Run reduce for a single area: generates area index and architecture articles.\n * Falls back to static pages on failure.\n */\nasync function runAreaReducePhase(\n area: AreaInfo,\n areaAnalyses: ModuleAnalysis[],\n graph: ModuleGraph,\n options: ArticleExecutorOptions\n): Promise<AreaReduceResult> {\n const { aiInvoker, timeoutMs, model, isCancelled } = options;\n\n const areaModuleSummaries = areaAnalyses.map(a => {\n const mod = graph.modules.find(m => m.id === a.moduleId);\n return buildModuleSummaryForReduce(\n a.moduleId,\n mod?.name || a.moduleId,\n mod?.category || 'uncategorized',\n a.overview\n );\n });\n\n const areaReduceInput = createPromptMapInput(\n areaModuleSummaries.map((summary, i) => ({\n summary,\n moduleId: areaAnalyses[i].moduleId,\n })),\n '{{summary}}',\n []\n );\n\n const areaReduceJob = createPromptMapJob({\n aiInvoker,\n outputFormat: 'ai',\n model,\n maxConcurrency: 1,\n aiReducePrompt: buildAreaReducePromptTemplate(),\n aiReduceOutput: getAreaReduceOutputFields(),\n aiReduceModel: model,\n aiReduceParameters: {\n areaName: area.name,\n areaDescription: area.description,\n areaPath: area.path,\n projectName: graph.project.name,\n },\n });\n\n const areaReduceExecutor = createExecutor({\n aiInvoker,\n maxConcurrency: 1,\n reduceMode: 'deterministic',\n showProgress: false,\n retryOnFailure: false,\n timeoutMs,\n jobName: `Area Reduce: ${area.name}`,\n isCancelled,\n });\n\n const fallbackSummary = {\n areaId: area.id,\n name: area.name,\n description: area.description,\n summary: area.description,\n moduleCount: areaAnalyses.length,\n };\n\n try {\n const areaResult = await areaReduceExecutor.execute(areaReduceJob, areaReduceInput);\n const areaOutput = areaResult.output as PromptMapOutput | undefined;\n const formattedOutput = areaOutput?.formattedOutput;\n\n if (formattedOutput) {\n const parsed = JSON.parse(formattedOutput) as Record<string, string>;\n const articles: GeneratedArticle[] = [];\n\n let areaSummary = fallbackSummary;\n if (parsed.index) {\n articles.push({\n type: 'area-index',\n slug: 'index',\n title: `${area.name} \u2014 Overview`,\n content: parsed.index,\n areaId: area.id,\n });\n areaSummary = {\n areaId: area.id,\n name: area.name,\n description: area.description,\n summary: parsed.index.substring(0, 1000),\n moduleCount: areaAnalyses.length,\n };\n }\n\n if (parsed.architecture) {\n articles.push({\n type: 'area-architecture',\n slug: 'architecture',\n title: `${area.name} \u2014 Architecture`,\n content: parsed.architecture,\n areaId: area.id,\n });\n }\n\n return { articles, areaSummary };\n } else {\n return {\n articles: generateStaticAreaPages(area, areaAnalyses, graph),\n areaSummary: fallbackSummary,\n };\n }\n } catch {\n return {\n articles: generateStaticAreaPages(area, areaAnalyses, graph),\n areaSummary: fallbackSummary,\n };\n }\n}\n\n/**\n * Run project-level reduce across all area summaries.\n * Generates top-level index, architecture, and getting-started articles.\n * Falls back to static pages on failure.\n */\nasync function runProjectReducePhase(\n areaSummaries: Array<{ areaId: string; name: string; description: string; summary: string; moduleCount: number }>,\n areas: AreaInfo[],\n graph: ModuleGraph,\n options: ArticleExecutorOptions\n): Promise<GeneratedArticle[]> {\n const { aiInvoker, timeoutMs, model, isCancelled } = options;\n\n const projectReduceItems = areaSummaries.map(s => ({\n areaId: s.areaId,\n areaName: s.name,\n summary: JSON.stringify(s),\n }));\n\n const projectReduceInput = createPromptMapInput(\n projectReduceItems,\n '{{summary}}',\n []\n );\n\n const projectReduceJob = createPromptMapJob({\n aiInvoker,\n outputFormat: 'ai',\n model,\n maxConcurrency: 1,\n aiReducePrompt: buildHierarchicalReducePromptTemplate(),\n aiReduceOutput: getReduceOutputFields(),\n aiReduceModel: model,\n aiReduceParameters: {\n projectName: graph.project.name,\n projectDescription: graph.project.description || 'No description available',\n buildSystem: graph.project.buildSystem || 'Unknown',\n language: graph.project.language || 'Unknown',\n },\n });\n\n const projectReduceExecutor = createExecutor({\n aiInvoker,\n maxConcurrency: 1,\n reduceMode: 'deterministic',\n showProgress: false,\n retryOnFailure: false,\n timeoutMs,\n jobName: 'Project Reduce',\n isCancelled,\n });\n\n try {\n const projectResult = await projectReduceExecutor.execute(projectReduceJob, projectReduceInput);\n const projectOutput = projectResult.output as PromptMapOutput | undefined;\n const formattedOutput = projectOutput?.formattedOutput;\n\n if (formattedOutput) {\n const parsed = JSON.parse(formattedOutput) as Record<string, string>;\n const articles: GeneratedArticle[] = [];\n\n if (parsed.index) {\n articles.push({\n type: 'index',\n slug: 'index',\n title: `${graph.project.name} Wiki`,\n content: parsed.index,\n });\n }\n\n if (parsed.architecture) {\n articles.push({\n type: 'architecture',\n slug: 'architecture',\n title: 'Architecture Overview',\n content: parsed.architecture,\n });\n }\n\n if (parsed.gettingStarted) {\n articles.push({\n type: 'getting-started',\n slug: 'getting-started',\n title: 'Getting Started',\n content: parsed.gettingStarted,\n });\n }\n\n return articles;\n } else {\n return generateStaticHierarchicalIndexPages(graph, areas, areaSummaries);\n }\n } catch {\n return generateStaticHierarchicalIndexPages(graph, areas, areaSummaries);\n }\n}\n\n/**\n * Hierarchical article executor for large repos with areas.\n * Orchestrates a 3-step pipeline:\n * 1. Map: Generate per-module articles (grouped by area)\n * 2. Per-area reduce: Generate area index + area architecture\n * 3. Project-level reduce: Generate project index + architecture + getting-started\n */\nasync function runHierarchicalArticleExecutor(\n options: ArticleExecutorOptions\n): Promise<ArticleExecutorResult> {\n const startTime = Date.now();\n const { graph, analyses } = options;\n\n if (analyses.length === 0) {\n return { articles: [], failedModuleIds: [], duration: 0 };\n }\n\n const areas = graph.areas!;\n\n // Step 1: Group analyses by area\n const { moduleAreaMap, analysesByArea } = groupAnalysesByArea(analyses, areas);\n\n // Step 2: Generate per-module articles\n const mapResult = await runModuleMapPhase(options, analyses, graph, moduleAreaMap);\n\n // Step 3: Per-area reduce\n const areaSummaries: Array<{ areaId: string; name: string; description: string; summary: string; moduleCount: number }> = [];\n for (const area of areas) {\n const areaAnalyses = analysesByArea.get(area.id) || [];\n if (areaAnalyses.length === 0) { continue; }\n\n const result = await runAreaReducePhase(area, areaAnalyses, graph, options);\n mapResult.articles.push(...result.articles);\n areaSummaries.push(result.areaSummary);\n }\n\n // Step 4: Project-level reduce\n const projectArticles = await runProjectReducePhase(areaSummaries, areas, graph, options);\n mapResult.articles.push(...projectArticles);\n\n return {\n articles: mapResult.articles,\n failedModuleIds: [...mapResult.failedIds],\n duration: Date.now() - startTime,\n };\n}\n\n// ============================================================================\n// Static Fallback\n// ============================================================================\n\n/**\n * Generate static area-level pages when area AI reduce fails.\n */\nexport function generateStaticAreaPages(\n area: AreaInfo,\n analyses: ModuleAnalysis[],\n graph: ModuleGraph\n): GeneratedArticle[] {\n const articles: GeneratedArticle[] = [];\n\n // Area index\n const indexLines: string[] = [\n `# ${area.name}`,\n '',\n area.description || '',\n '',\n '## Modules',\n '',\n ];\n\n for (const a of analyses) {\n const mod = graph.modules.find(m => m.id === a.moduleId);\n const name = mod?.name || a.moduleId;\n const slug = normalizeModuleId(a.moduleId);\n indexLines.push(`- [${name}](./modules/${slug}.md) \u2014 ${a.overview.substring(0, 100)}`);\n }\n\n articles.push({\n type: 'area-index',\n slug: 'index',\n title: `${area.name} \u2014 Overview`,\n content: indexLines.join('\\n'),\n areaId: area.id,\n });\n\n // Area architecture placeholder\n articles.push({\n type: 'area-architecture',\n slug: 'architecture',\n title: `${area.name} \u2014 Architecture`,\n content: [\n `# ${area.name} \u2014 Architecture`,\n '',\n area.description || 'No architecture description available.',\n ].join('\\n'),\n areaId: area.id,\n });\n\n return articles;\n}\n\n/**\n * Generate static project-level index pages for hierarchical layout.\n */\nexport function generateStaticHierarchicalIndexPages(\n graph: ModuleGraph,\n areas: AreaInfo[],\n areaSummaries: Array<{ areaId: string; name: string; description: string; moduleCount: number }>\n): GeneratedArticle[] {\n const articles: GeneratedArticle[] = [];\n\n // Project index\n const indexLines: string[] = [\n `# ${graph.project.name}`,\n '',\n graph.project.description || '',\n '',\n '## Areas',\n '',\n ];\n\n for (const summary of areaSummaries) {\n indexLines.push(`- [${summary.name}](./areas/${summary.areaId}/index.md) \u2014 ${summary.description} (${summary.moduleCount} modules)`);\n }\n\n articles.push({\n type: 'index',\n slug: 'index',\n title: `${graph.project.name} Wiki`,\n content: indexLines.join('\\n'),\n });\n\n // Architecture placeholder\n articles.push({\n type: 'architecture',\n slug: 'architecture',\n title: 'Architecture Overview',\n content: [\n '# Architecture Overview',\n '',\n `${graph.project.name} is built with ${graph.project.language} using ${graph.project.buildSystem}.`,\n '',\n graph.architectureNotes || 'No architecture notes available.',\n ].join('\\n'),\n });\n\n return articles;\n}\n\n/**\n * Generate static index pages when AI reduce fails.\n * Produces a basic TOC and architecture placeholder.\n */\nexport function generateStaticIndexPages(\n graph: ModuleGraph,\n analyses: ModuleAnalysis[]\n): GeneratedArticle[] {\n const articles: GeneratedArticle[] = [];\n\n // Static index\n const indexLines: string[] = [\n `# ${graph.project.name}`,\n '',\n graph.project.description || '',\n '',\n '## Modules',\n '',\n ];\n\n // Group by category\n const byCategory = new Map<string, ModuleAnalysis[]>();\n for (const a of analyses) {\n const mod = graph.modules.find(m => m.id === a.moduleId);\n const category = mod?.category || 'uncategorized';\n if (!byCategory.has(category)) {\n byCategory.set(category, []);\n }\n byCategory.get(category)!.push(a);\n }\n\n for (const [category, mods] of byCategory) {\n indexLines.push(`### ${category}`, '');\n for (const a of mods) {\n const mod = graph.modules.find(m => m.id === a.moduleId);\n const name = mod?.name || a.moduleId;\n const slug = normalizeModuleId(a.moduleId);\n indexLines.push(`- [${name}](./modules/${slug}.md) \u2014 ${a.overview.substring(0, 100)}`);\n }\n indexLines.push('');\n }\n\n articles.push({\n type: 'index',\n slug: 'index',\n title: `${graph.project.name} Wiki`,\n content: indexLines.join('\\n'),\n });\n\n // Static architecture placeholder\n articles.push({\n type: 'architecture',\n slug: 'architecture',\n title: 'Architecture Overview',\n content: [\n '# Architecture Overview',\n '',\n `${graph.project.name} is built with ${graph.project.language} using ${graph.project.buildSystem}.`,\n '',\n graph.architectureNotes || 'No architecture notes available.',\n ].join('\\n'),\n });\n\n return articles;\n}\n", "/**\n * File Writer\n *\n * Writes generated wiki articles to disk in a structured directory layout:\n * wiki/\n * \u251C\u2500\u2500 index.md\n * \u251C\u2500\u2500 architecture.md\n * \u251C\u2500\u2500 getting-started.md\n * \u2514\u2500\u2500 modules/\n * \u251C\u2500\u2500 auth.md\n * \u251C\u2500\u2500 database.md\n * \u2514\u2500\u2500 ...\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { WikiOutput, GeneratedArticle } from '../types';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Subdirectory for module articles */\nconst MODULES_DIR = 'modules';\n\n/** Subdirectory for area articles */\nconst AREAS_DIR = 'areas';\n\n// ============================================================================\n// File Writer\n// ============================================================================\n\n/**\n * Write all wiki articles to the output directory.\n *\n * Creates the directory structure and writes each article as a .md file.\n * UTF-8 encoding with LF line endings. Overwrites existing files.\n *\n * Supports both flat layout (small repos):\n * wiki/modules/auth.md\n * And hierarchical layout (large repos with areas):\n * wiki/areas/core/modules/auth.md\n *\n * @param output The wiki output containing all articles\n * @param outputDir The output directory path\n * @returns Array of written file paths\n */\nexport function writeWikiOutput(output: WikiOutput, outputDir: string): string[] {\n const resolvedDir = path.resolve(outputDir);\n const modulesDir = path.join(resolvedDir, MODULES_DIR);\n const writtenPaths: string[] = [];\n\n // Ensure directories exist\n fs.mkdirSync(resolvedDir, { recursive: true });\n fs.mkdirSync(modulesDir, { recursive: true });\n\n // Collect unique area IDs to create area directories\n const areaIds = new Set<string>();\n for (const article of output.articles) {\n if (article.areaId) {\n areaIds.add(article.areaId);\n }\n }\n\n // Create area directories if needed\n for (const areaId of areaIds) {\n const areaModulesDir = path.join(resolvedDir, AREAS_DIR, areaId, MODULES_DIR);\n fs.mkdirSync(areaModulesDir, { recursive: true });\n }\n\n for (const article of output.articles) {\n const filePath = getArticleFilePath(article, resolvedDir);\n\n // Ensure parent directory exists (for safety with deeply nested paths)\n fs.mkdirSync(path.dirname(filePath), { recursive: true });\n\n // Normalize line endings to LF\n const content = normalizeLineEndings(article.content);\n\n // Write file\n fs.writeFileSync(filePath, content, 'utf-8');\n writtenPaths.push(filePath);\n }\n\n return writtenPaths;\n}\n\n// ============================================================================\n// Path Helpers\n// ============================================================================\n\n/**\n * Get the file path for an article based on its type, slug, and optional areaId.\n *\n * For articles with areaId set (hierarchical layout):\n * - module \u2192 areas/{areaId}/modules/{slug}.md\n * - area-index \u2192 areas/{areaId}/index.md\n * - area-architecture \u2192 areas/{areaId}/architecture.md\n *\n * For articles without areaId (flat layout):\n * - module \u2192 modules/{slug}.md\n * - index \u2192 index.md\n * - architecture \u2192 architecture.md\n * - getting-started \u2192 getting-started.md\n */\nexport function getArticleFilePath(article: GeneratedArticle, outputDir: string): string {\n const slug = slugify(article.slug);\n\n switch (article.type) {\n case 'module':\n if (article.areaId) {\n return path.join(outputDir, AREAS_DIR, article.areaId, MODULES_DIR, `${slug}.md`);\n }\n return path.join(outputDir, MODULES_DIR, `${slug}.md`);\n case 'area-index':\n return path.join(outputDir, AREAS_DIR, article.areaId!, 'index.md');\n case 'area-architecture':\n return path.join(outputDir, AREAS_DIR, article.areaId!, 'architecture.md');\n case 'index':\n return path.join(outputDir, 'index.md');\n case 'architecture':\n return path.join(outputDir, 'architecture.md');\n case 'getting-started':\n return path.join(outputDir, 'getting-started.md');\n default:\n return path.join(outputDir, `${slug}.md`);\n }\n}\n\n/**\n * Slugify a string for use as a filename.\n * Converts to lowercase, replaces non-alphanumeric chars with hyphens,\n * and trims leading/trailing hyphens.\n */\nexport function slugify(input: string): string {\n return input\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/^-+|-+$/g, '')\n .replace(/-{2,}/g, '-') || 'untitled';\n}\n\n/**\n * Normalize line endings to LF (Unix-style).\n */\nexport function normalizeLineEndings(content: string): string {\n return content.replace(/\\r\\n/g, '\\n').replace(/\\r/g, '\\n');\n}\n", "/**\n * Shared Mermaid Zoom/Pan Module\n *\n * Provides unified CSS, HTML, and JS strings for mermaid diagram\n * zoom/pan controls used by both the SPA template (serve mode) and\n * the static website generator.\n *\n * Harmonization decisions:\n * - Container class: `mermaid-viewport` (more descriptive)\n * - Label: Parameterized (default: \"Diagram\")\n * - mousemove/mouseup: Attached to `document` (robust \u2014 allows dragging outside container)\n * - Null checks on buttons: Yes (safer)\n * - initMermaid return: Always returns a Promise\n * - Transition: 0.15s ease-out (smoother)\n * - Dragging: `transition: none` on `.mermaid-svg-wrapper` during drag\n * - CSS vars: Uses shared deep-wiki CSS var names\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\n// ============================================================================\n// CSS \u2014 getMermaidZoomStyles()\n// ============================================================================\n\n/**\n * Returns the CSS for mermaid container, toolbar, zoom buttons, viewport,\n * svg-wrapper, and drag states. Merged from both spa-template.ts and\n * website-generator.ts, removing duplicates.\n *\n * @returns CSS string (no surrounding `<style>` tags)\n */\nexport function getMermaidZoomStyles(): string {\n return `/* Mermaid diagrams \u2014 base */\n .markdown-body pre.mermaid {\n background: transparent;\n border: none;\n padding: 0;\n margin: 0;\n text-align: center;\n }\n .markdown-body pre.mermaid svg {\n max-width: 100%;\n height: auto;\n }\n\n /* Mermaid container with zoom/pan support */\n .markdown-body .mermaid-container {\n position: relative;\n margin: 24px 0;\n border: 1px solid var(--content-border);\n border-radius: 8px;\n overflow: hidden;\n background: var(--code-bg);\n max-width: 100%;\n width: 100%;\n }\n .mermaid-toolbar {\n display: flex;\n align-items: center;\n padding: 6px 12px;\n background: var(--code-bg);\n border-bottom: 1px solid var(--content-border);\n gap: 4px;\n user-select: none;\n }\n .mermaid-toolbar-label {\n font-size: 11px;\n font-weight: 600;\n text-transform: uppercase;\n letter-spacing: 0.05em;\n color: var(--content-muted);\n margin-right: auto;\n }\n .mermaid-zoom-btn {\n background: var(--copy-btn-bg);\n border: 1px solid var(--content-border);\n cursor: pointer;\n padding: 2px 8px;\n border-radius: 4px;\n font-size: 14px;\n font-weight: 600;\n line-height: 1.2;\n transition: background-color 0.15s, border-color 0.15s;\n color: var(--content-text);\n min-width: 28px;\n text-align: center;\n }\n .mermaid-zoom-btn:hover {\n background: var(--copy-btn-hover-bg);\n border-color: var(--sidebar-active-border);\n }\n .mermaid-zoom-btn:active {\n transform: scale(0.95);\n }\n .mermaid-zoom-level {\n font-size: 11px;\n font-weight: 500;\n color: var(--content-muted);\n min-width: 42px;\n text-align: center;\n padding: 0 4px;\n }\n .mermaid-zoom-reset {\n font-size: 12px;\n }\n .mermaid-viewport {\n overflow: hidden;\n cursor: grab;\n min-height: 200px;\n position: relative;\n }\n .mermaid-viewport:active {\n cursor: grabbing;\n }\n .mermaid-viewport.mermaid-dragging {\n cursor: grabbing;\n }\n .mermaid-svg-wrapper {\n transform-origin: 0 0;\n transition: transform 0.15s ease-out;\n display: inline-block;\n padding: 24px;\n }\n .mermaid-viewport.mermaid-dragging .mermaid-svg-wrapper {\n transition: none;\n }`;\n}\n\n// ============================================================================\n// HTML \u2014 getMermaidContainerHtml()\n// ============================================================================\n\n/**\n * Returns the HTML string for a mermaid container with toolbar and zoom controls.\n * The `mermaidCode` is placed inside a `<pre class=\"mermaid\">` element within the\n * viewport's svg-wrapper.\n *\n * @param mermaidCode - The raw mermaid diagram source code\n * @param label - The toolbar label (default: \"Diagram\")\n * @returns HTML string for the mermaid container\n */\nexport function getMermaidContainerHtml(mermaidCode: string, label = 'Diagram'): string {\n return '<div class=\"mermaid-container\">' +\n '<div class=\"mermaid-toolbar\">' +\n '<span class=\"mermaid-toolbar-label\">' + escapeHtml(label) + '</span>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-out\" title=\"Zoom out\">\\\\u2212</button>' +\n '<span class=\"mermaid-zoom-level\">100%</span>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-in\" title=\"Zoom in\">+</button>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-reset\" title=\"Reset view\">\\\\u27F2</button>' +\n '</div>' +\n '<div class=\"mermaid-viewport\">' +\n '<div class=\"mermaid-svg-wrapper\">' +\n '<pre class=\"mermaid\">' + mermaidCode + '</pre>' +\n '</div>' +\n '</div>' +\n '</div>';\n}\n\n// ============================================================================\n// JavaScript \u2014 getMermaidZoomScript()\n// ============================================================================\n\n/**\n * Returns the JS string for the `initMermaidZoom()` function definition.\n * Includes zoom constants, button handlers, Ctrl/Cmd + wheel zoom toward cursor,\n * and drag panning with document-level mousemove/mouseup (robust for dragging\n * outside the viewport).\n *\n * Called after `mermaid.run()` completes.\n *\n * @returns JavaScript string (no surrounding `<script>` tags)\n */\nexport function getMermaidZoomScript(): string {\n return `\n var MERMAID_MIN_ZOOM = 0.25;\n var MERMAID_MAX_ZOOM = 4;\n var MERMAID_ZOOM_STEP = 0.25;\n\n function initMermaidZoom() {\n document.querySelectorAll('.mermaid-container').forEach(function(container) {\n var viewport = container.querySelector('.mermaid-viewport');\n var svgWrapper = container.querySelector('.mermaid-svg-wrapper');\n if (!viewport || !svgWrapper) return;\n\n var state = { scale: 1, translateX: 0, translateY: 0, isDragging: false, dragStartX: 0, dragStartY: 0, lastTX: 0, lastTY: 0 };\n\n function applyTransform() {\n svgWrapper.style.transform = 'translate(' + state.translateX + 'px, ' + state.translateY + 'px) scale(' + state.scale + ')';\n var display = container.querySelector('.mermaid-zoom-level');\n if (display) display.textContent = Math.round(state.scale * 100) + '%';\n }\n\n // Zoom in\n var zoomInBtn = container.querySelector('.mermaid-zoom-in');\n if (zoomInBtn) {\n zoomInBtn.addEventListener('click', function(e) {\n e.stopPropagation();\n state.scale = Math.min(MERMAID_MAX_ZOOM, state.scale + MERMAID_ZOOM_STEP);\n applyTransform();\n });\n }\n\n // Zoom out\n var zoomOutBtn = container.querySelector('.mermaid-zoom-out');\n if (zoomOutBtn) {\n zoomOutBtn.addEventListener('click', function(e) {\n e.stopPropagation();\n state.scale = Math.max(MERMAID_MIN_ZOOM, state.scale - MERMAID_ZOOM_STEP);\n applyTransform();\n });\n }\n\n // Reset\n var resetBtn = container.querySelector('.mermaid-zoom-reset');\n if (resetBtn) {\n resetBtn.addEventListener('click', function(e) {\n e.stopPropagation();\n state.scale = 1;\n state.translateX = 0;\n state.translateY = 0;\n applyTransform();\n });\n }\n\n // Ctrl/Cmd + mouse wheel zoom toward cursor\n viewport.addEventListener('wheel', function(e) {\n if (!e.ctrlKey && !e.metaKey) return;\n e.preventDefault();\n e.stopPropagation();\n var delta = e.deltaY > 0 ? -MERMAID_ZOOM_STEP : MERMAID_ZOOM_STEP;\n var newScale = Math.max(MERMAID_MIN_ZOOM, Math.min(MERMAID_MAX_ZOOM, state.scale + delta));\n if (newScale !== state.scale) {\n var rect = viewport.getBoundingClientRect();\n var mx = e.clientX - rect.left;\n var my = e.clientY - rect.top;\n var px = (mx - state.translateX) / state.scale;\n var py = (my - state.translateY) / state.scale;\n state.scale = newScale;\n state.translateX = mx - px * state.scale;\n state.translateY = my - py * state.scale;\n applyTransform();\n }\n }, { passive: false });\n\n // Mouse drag panning\n viewport.addEventListener('mousedown', function(e) {\n if (e.button !== 0) return;\n state.isDragging = true;\n state.dragStartX = e.clientX;\n state.dragStartY = e.clientY;\n state.lastTX = state.translateX;\n state.lastTY = state.translateY;\n viewport.classList.add('mermaid-dragging');\n e.preventDefault();\n });\n\n document.addEventListener('mousemove', function(e) {\n if (!state.isDragging) return;\n state.translateX = state.lastTX + (e.clientX - state.dragStartX);\n state.translateY = state.lastTY + (e.clientY - state.dragStartY);\n applyTransform();\n });\n\n document.addEventListener('mouseup', function() {\n if (!state.isDragging) return;\n state.isDragging = false;\n viewport.classList.remove('mermaid-dragging');\n });\n });\n }`;\n}\n\n// ============================================================================\n// Internal Helpers\n// ============================================================================\n\n/**\n * Escape HTML special characters (for the toolbar label).\n */\nfunction escapeHtml(str: string): string {\n return str\n .replace(/&/g, '&amp;')\n .replace(/</g, '&lt;')\n .replace(/>/g, '&gt;')\n .replace(/\"/g, '&quot;');\n}\n", "/**\n * Website Styles\n *\n * CSS generation for the standalone HTML website.\n * Extracted from website-generator.ts for maintainability.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { getMermaidZoomStyles } from '../rendering/mermaid-zoom';\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Generate the CSS styles for the website template.\n * @returns CSS string to embed in <style> tag\n */\nexport function getStyles(): string {\n return ` :root {\n --sidebar-bg: #1e293b;\n --sidebar-header-bg: #0f172a;\n --sidebar-border: #334155;\n --sidebar-text: #e2e8f0;\n --sidebar-muted: #94a3b8;\n --sidebar-hover: #334155;\n --sidebar-active-border: #3b82f6;\n --content-bg: #ffffff;\n --content-text: #1e293b;\n --content-muted: #64748b;\n --content-border: #e2e8f0;\n --header-bg: #ffffff;\n --header-shadow: rgba(0,0,0,0.05);\n --code-bg: #f1f5f9;\n --code-border: #e2e8f0;\n --link-color: #2563eb;\n --badge-high-bg: #ef4444;\n --badge-medium-bg: #f59e0b;\n --badge-low-bg: #22c55e;\n --card-bg: #ffffff;\n --card-border: #e2e8f0;\n --card-hover-border: #3b82f6;\n --stat-bg: #f8fafc;\n --stat-border: #3b82f6;\n --copy-btn-bg: rgba(0,0,0,0.05);\n --copy-btn-hover-bg: rgba(0,0,0,0.1);\n --search-bg: #334155;\n --search-text: #e2e8f0;\n --search-placeholder: #94a3b8;\n }\n\n .dark-theme,\n html[data-theme=\"dark\"] {\n --content-bg: #0f172a;\n --content-text: #e2e8f0;\n --content-muted: #94a3b8;\n --content-border: #334155;\n --header-bg: #1e293b;\n --header-shadow: rgba(0,0,0,0.2);\n --code-bg: #1e293b;\n --code-border: #334155;\n --link-color: #60a5fa;\n --card-bg: #1e293b;\n --card-border: #334155;\n --stat-bg: #1e293b;\n --copy-btn-bg: rgba(255,255,255,0.08);\n --copy-btn-hover-bg: rgba(255,255,255,0.15);\n }\n\n @media (prefers-color-scheme: dark) {\n html[data-theme=\"auto\"] {\n --content-bg: #0f172a;\n --content-text: #e2e8f0;\n --content-muted: #94a3b8;\n --content-border: #334155;\n --header-bg: #1e293b;\n --header-shadow: rgba(0,0,0,0.2);\n --code-bg: #1e293b;\n --code-border: #334155;\n --link-color: #60a5fa;\n --card-bg: #1e293b;\n --card-border: #334155;\n --stat-bg: #1e293b;\n --copy-btn-bg: rgba(255,255,255,0.08);\n --copy-btn-hover-bg: rgba(255,255,255,0.15);\n }\n }\n\n * { margin: 0; padding: 0; box-sizing: border-box; }\n\n body {\n font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif;\n display: flex;\n height: 100vh;\n overflow: hidden;\n background: var(--content-bg);\n color: var(--content-text);\n }\n\n /* Sidebar */\n .sidebar {\n width: 280px;\n min-width: 280px;\n background: var(--sidebar-bg);\n color: var(--sidebar-text);\n overflow-y: auto;\n border-right: 1px solid var(--sidebar-border);\n transition: margin-left 0.3s;\n }\n\n .sidebar.hidden { margin-left: -280px; }\n\n .sidebar-header {\n padding: 20px;\n background: var(--sidebar-header-bg);\n border-bottom: 1px solid var(--sidebar-border);\n }\n\n .sidebar-header h1 { font-size: 18px; margin-bottom: 8px; }\n .sidebar-header p { font-size: 12px; color: var(--sidebar-muted); line-height: 1.4; }\n\n .nav-section { padding: 12px 0; border-bottom: 1px solid var(--sidebar-border); }\n .nav-section h3 {\n font-size: 11px;\n text-transform: uppercase;\n letter-spacing: 0.05em;\n color: var(--sidebar-muted);\n padding: 8px 20px;\n font-weight: 600;\n }\n\n .nav-item {\n padding: 8px 20px;\n cursor: pointer;\n transition: background 0.15s;\n font-size: 14px;\n border-left: 3px solid transparent;\n display: block;\n }\n\n .nav-item:hover { background: var(--sidebar-hover); }\n .nav-item.active { background: var(--sidebar-hover); border-left-color: var(--sidebar-active-border); }\n .nav-item-name { display: block; color: var(--sidebar-text); margin-bottom: 2px; }\n .nav-item-path { display: block; font-size: 11px; color: var(--sidebar-muted); }\n\n /* Area-based sidebar (DeepWiki-style hierarchy) */\n .nav-area-group { padding: 2px 0; }\n .nav-area-item {\n padding: 8px 20px;\n cursor: pointer;\n font-size: 14px;\n font-weight: 500;\n color: var(--sidebar-text);\n display: block;\n transition: background 0.15s;\n }\n .nav-area-item:hover { background: var(--sidebar-hover); }\n .nav-area-item.active { background: var(--sidebar-hover); border-left: 3px solid var(--sidebar-active-border); }\n\n .nav-area-children { padding-left: 8px; }\n .nav-area-module {\n padding: 6px 20px 6px 28px;\n cursor: pointer;\n font-size: 13px;\n color: var(--sidebar-muted);\n display: block;\n transition: background 0.15s, color 0.15s;\n }\n .nav-area-module:hover { background: var(--sidebar-hover); color: var(--sidebar-text); }\n .nav-area-module.active { background: var(--sidebar-hover); color: var(--sidebar-text); border-left: 3px solid var(--sidebar-active-border); }\n\n .complexity-badge {\n display: inline-block;\n padding: 1px 6px;\n border-radius: 3px;\n font-size: 10px;\n font-weight: 600;\n margin-left: 6px;\n color: white;\n }\n .complexity-high { background: var(--badge-high-bg); }\n .complexity-medium { background: var(--badge-medium-bg); }\n .complexity-low { background: var(--badge-low-bg); }\n\n .search-box { margin: 12px 16px; }\n .search-box input {\n width: 100%;\n padding: 8px 12px;\n border: 1px solid var(--sidebar-border);\n border-radius: 6px;\n background: var(--search-bg);\n color: var(--search-text);\n font-size: 13px;\n outline: none;\n }\n .search-box input::placeholder { color: var(--search-placeholder); }\n .search-box input:focus { border-color: var(--sidebar-active-border); }\n\n /* Content */\n .content {\n flex: 1;\n display: flex;\n flex-direction: column;\n overflow: hidden;\n min-width: 0;\n }\n\n .content-header {\n background: var(--header-bg);\n padding: 16px 32px;\n border-bottom: 1px solid var(--content-border);\n box-shadow: 0 1px 3px var(--header-shadow);\n display: flex;\n justify-content: space-between;\n align-items: center;\n }\n\n .header-left { display: flex; align-items: center; gap: 12px; }\n .breadcrumb { font-size: 13px; color: var(--content-muted); margin-bottom: 4px; }\n .content-title { font-size: 24px; color: var(--content-text); }\n\n .sidebar-toggle {\n background: none;\n border: 1px solid var(--content-border);\n border-radius: 6px;\n padding: 6px 10px;\n cursor: pointer;\n font-size: 18px;\n color: var(--content-muted);\n }\n .sidebar-toggle:hover { background: var(--code-bg); }\n\n .theme-toggle {\n background: none;\n border: 1px solid var(--content-border);\n border-radius: 6px;\n padding: 6px 10px;\n cursor: pointer;\n font-size: 18px;\n color: var(--content-muted);\n }\n .theme-toggle:hover { background: var(--code-bg); }\n\n .content-body {\n flex: 1;\n overflow-y: auto;\n overflow-x: hidden;\n padding: 32px;\n background: var(--content-bg);\n }\n\n /* Markdown styles */\n .markdown-body { max-width: 900px; margin: 0 auto; line-height: 1.6; overflow-wrap: break-word; word-wrap: break-word; }\n .markdown-body h1 { margin-top: 32px; margin-bottom: 16px; font-size: 2em; border-bottom: 1px solid var(--content-border); padding-bottom: 8px; }\n .markdown-body h1:first-child { margin-top: 0; }\n .markdown-body h2 { margin-top: 28px; margin-bottom: 16px; font-size: 1.5em; border-bottom: 1px solid var(--content-border); padding-bottom: 6px; }\n .markdown-body h3 { margin-top: 24px; margin-bottom: 12px; font-size: 1.25em; }\n .markdown-body h4 { margin-top: 20px; margin-bottom: 8px; font-size: 1.1em; }\n .markdown-body p { margin-bottom: 16px; }\n .markdown-body > *:last-child { margin-bottom: 0; }\n .markdown-body code {\n background: var(--code-bg);\n padding: 2px 6px;\n border-radius: 4px;\n font-size: 85%;\n font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, monospace;\n }\n .markdown-body pre {\n background: var(--code-bg);\n border: 1px solid var(--code-border);\n padding: 16px;\n border-radius: 8px;\n overflow-x: auto;\n margin-bottom: 16px;\n position: relative;\n }\n .markdown-body pre code { background: none; padding: 0; border-radius: 0; font-size: 13px; }\n .markdown-body table { border-collapse: collapse; width: 100%; margin: 16px 0; display: block; overflow-x: auto; }\n .markdown-body table th, .markdown-body table td {\n border: 1px solid var(--content-border);\n padding: 8px 12px;\n text-align: left;\n }\n .markdown-body table th { background: var(--code-bg); font-weight: 600; }\n .markdown-body ul, .markdown-body ol { margin-bottom: 16px; padding-left: 2em; }\n .markdown-body li { margin-bottom: 6px; }\n .markdown-body li > p { margin-bottom: 6px; }\n .markdown-body a { color: var(--link-color); text-decoration: none; }\n .markdown-body a:hover { text-decoration: underline; }\n .markdown-body blockquote {\n border-left: 4px solid var(--content-border);\n padding: 8px 16px;\n margin: 16px 0;\n color: var(--content-muted);\n }\n .markdown-body img { max-width: 100%; border-radius: 8px; }\n .markdown-body hr { border: none; border-top: 1px solid var(--content-border); margin: 24px 0; }\n\n /* Heading anchors */\n .heading-anchor {\n color: var(--content-muted);\n text-decoration: none;\n margin-left: 8px;\n opacity: 0;\n transition: opacity 0.15s;\n font-weight: 400;\n }\n .markdown-body h1:hover .heading-anchor,\n .markdown-body h2:hover .heading-anchor,\n .markdown-body h3:hover .heading-anchor,\n .markdown-body h4:hover .heading-anchor { opacity: 1; }\n\n /* Copy button for code blocks */\n .copy-btn {\n position: absolute;\n top: 8px;\n right: 8px;\n background: var(--copy-btn-bg);\n border: 1px solid var(--code-border);\n border-radius: 4px;\n padding: 4px 8px;\n cursor: pointer;\n font-size: 12px;\n color: var(--content-muted);\n opacity: 0;\n transition: opacity 0.15s;\n }\n .markdown-body pre:hover .copy-btn { opacity: 1; }\n .copy-btn:hover { background: var(--copy-btn-hover-bg); }\n\n /* Home view */\n .home-view { max-width: 900px; margin: 0 auto; }\n .project-stats {\n display: grid;\n grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));\n gap: 16px;\n margin: 24px 0;\n }\n .stat-card {\n background: var(--stat-bg);\n padding: 16px;\n border-radius: 8px;\n border-left: 4px solid var(--stat-border);\n }\n .stat-card h3 { font-size: 13px; color: var(--content-muted); margin-bottom: 6px; font-weight: 500; }\n .stat-card .value { font-size: 28px; font-weight: 700; color: var(--content-text); }\n .stat-card .value.small { font-size: 16px; }\n\n .module-grid {\n display: grid;\n grid-template-columns: repeat(auto-fill, minmax(240px, 1fr));\n gap: 12px;\n margin-top: 24px;\n }\n .module-card {\n background: var(--card-bg);\n border: 1px solid var(--card-border);\n border-radius: 8px;\n padding: 14px;\n cursor: pointer;\n transition: border-color 0.15s, box-shadow 0.15s;\n }\n .module-card:hover {\n border-color: var(--card-hover-border);\n box-shadow: 0 4px 12px rgba(0,0,0,0.08);\n }\n .module-card h4 { margin-bottom: 6px; font-size: 14px; }\n .module-card p { font-size: 12px; color: var(--content-muted); line-height: 1.4; }\n\n${getMermaidZoomStyles()}\n\n /* Responsive */\n @media (max-width: 768px) {\n .sidebar { position: fixed; z-index: 100; height: 100vh; }\n .sidebar.hidden { margin-left: -280px; }\n .content-header { padding: 12px 16px; }\n .content-body { padding: 16px; }\n .markdown-body .mermaid-container {\n max-width: 100%;\n width: 100%;\n }\n }`;\n}\n", "/**\n * Website Client Script\n *\n * Client-side JavaScript generation for the standalone HTML website.\n * Extracted from website-generator.ts for maintainability.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { WebsiteTheme } from '../types';\nimport { getMermaidZoomScript } from '../rendering/mermaid-zoom';\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Generate the client-side JavaScript for the website template.\n * @param enableSearch - Whether to include search functionality\n * @param defaultTheme - The default theme setting\n * @returns JavaScript string to embed in <script> tag\n */\nexport function getScript(enableSearch: boolean, defaultTheme: WebsiteTheme): string {\n return ` // ====================================================================\n // Deep Wiki Viewer\n // ====================================================================\n\n let moduleGraph = null;\n let currentModuleId = null;\n let currentTheme = '${defaultTheme}';\n let mermaidInitialized = false;\n\n // Initialize\n try {\n moduleGraph = MODULE_GRAPH;\n initTheme();\n initializeSidebar();\n showHome(true);\n // Use replaceState for initial load to avoid extra history entry\n history.replaceState({ type: 'home' }, '', location.pathname);\n } catch(err) {\n document.getElementById('content').innerHTML =\n '<p style=\"color: red;\">Error loading module graph: ' + err.message + '</p>';\n }\n\n // ================================================================\n // Browser History (Back/Forward)\n // ================================================================\n\n window.addEventListener('popstate', function(e) {\n var state = e.state;\n if (!state) {\n showHome(true);\n return;\n }\n if (state.type === 'home') {\n showHome(true);\n } else if (state.type === 'module' && state.id) {\n loadModule(state.id, true);\n } else if (state.type === 'special' && state.key && state.title) {\n loadSpecialPage(state.key, state.title, true);\n } else {\n showHome(true);\n }\n });\n\n // ================================================================\n // Theme\n // ================================================================\n\n function initTheme() {\n const saved = localStorage.getItem('deep-wiki-theme');\n if (saved) {\n currentTheme = saved;\n document.documentElement.setAttribute('data-theme', currentTheme);\n }\n updateThemeStyles();\n }\n\n function toggleTheme() {\n if (currentTheme === 'auto') {\n currentTheme = 'dark';\n } else if (currentTheme === 'dark') {\n currentTheme = 'light';\n } else {\n currentTheme = 'auto';\n }\n document.documentElement.setAttribute('data-theme', currentTheme);\n localStorage.setItem('deep-wiki-theme', currentTheme);\n updateThemeStyles();\n // Re-render current content to apply new highlight theme\n if (currentModuleId) {\n loadModule(currentModuleId);\n }\n }\n\n function updateThemeStyles() {\n const isDark = currentTheme === 'dark' ||\n (currentTheme === 'auto' && window.matchMedia('(prefers-color-scheme: dark)').matches);\n const lightSheet = document.getElementById('hljs-light');\n const darkSheet = document.getElementById('hljs-dark');\n if (lightSheet) lightSheet.disabled = isDark;\n if (darkSheet) darkSheet.disabled = !isDark;\n\n const btn = document.getElementById('theme-toggle');\n if (btn) btn.textContent = isDark ? '\\\\u2600' : '\\\\u263E';\n }\n\n // Listen for system theme changes\n window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', updateThemeStyles);\n\n document.getElementById('theme-toggle').addEventListener('click', toggleTheme);\n document.getElementById('sidebar-toggle').addEventListener('click', function() {\n document.getElementById('sidebar').classList.toggle('hidden');\n });\n\n // ================================================================\n // Sidebar\n // ================================================================\n\n function initializeSidebar() {\n document.getElementById('project-name').textContent = moduleGraph.project.name;\n document.getElementById('project-description').textContent = moduleGraph.project.description;\n\n var navContainer = document.getElementById('nav-container');\n var hasAreas = moduleGraph.areas && moduleGraph.areas.length > 0;\n\n // Home link\n var homeSection = document.createElement('div');\n homeSection.className = 'nav-section';\n homeSection.innerHTML =\n '<div class=\"nav-item active\" data-id=\"__home\" onclick=\"showHome()\">' +\n '<span class=\"nav-item-name\">Home</span></div>';\n\n // Overview pages\n if (typeof MARKDOWN_DATA !== 'undefined') {\n if (MARKDOWN_DATA['__index']) {\n homeSection.innerHTML +=\n '<div class=\"nav-item\" data-id=\"__index\" onclick=\"loadSpecialPage(\\\\'__index\\\\', \\\\'Index\\\\')\">' +\n '<span class=\"nav-item-name\">Index</span></div>';\n }\n if (MARKDOWN_DATA['__architecture']) {\n homeSection.innerHTML +=\n '<div class=\"nav-item\" data-id=\"__architecture\" onclick=\"loadSpecialPage(\\\\'__architecture\\\\', \\\\'Architecture\\\\')\">' +\n '<span class=\"nav-item-name\">Architecture</span></div>';\n }\n if (MARKDOWN_DATA['__getting-started']) {\n homeSection.innerHTML +=\n '<div class=\"nav-item\" data-id=\"__getting-started\" onclick=\"loadSpecialPage(\\\\'__getting-started\\\\', \\\\'Getting Started\\\\')\">' +\n '<span class=\"nav-item-name\">Getting Started</span></div>';\n }\n }\n navContainer.appendChild(homeSection);\n\n if (hasAreas) {\n // DeepWiki-style: areas as top-level, modules indented underneath\n buildAreaSidebar(navContainer);\n } else {\n // Fallback: category-based grouping\n buildCategorySidebar(navContainer);\n }\n${enableSearch ? `\n // Search\n document.getElementById('search').addEventListener('input', function(e) {\n var query = e.target.value.toLowerCase();\n document.querySelectorAll('.nav-area-module[data-id], .nav-item[data-id]').forEach(function(item) {\n var id = item.getAttribute('data-id');\n if (id === '__home' || id === '__index' || id === '__architecture' || id === '__getting-started') {\n return;\n }\n var text = item.textContent.toLowerCase();\n item.style.display = text.includes(query) ? '' : 'none';\n });\n // Hide area headers when no children match\n document.querySelectorAll('.nav-area-group').forEach(function(group) {\n var visibleChildren = group.querySelectorAll('.nav-area-module:not([style*=\"display: none\"])');\n var areaItem = group.querySelector('.nav-area-item');\n if (areaItem) {\n areaItem.style.display = visibleChildren.length === 0 ? 'none' : '';\n }\n var childrenEl = group.querySelector('.nav-area-children');\n if (childrenEl) {\n childrenEl.style.display = visibleChildren.length === 0 ? 'none' : '';\n }\n });\n // Show/hide category section headers\n document.querySelectorAll('.nav-section').forEach(function(section) {\n var visibleItems = section.querySelectorAll('.nav-item[data-id]:not([style*=\"display: none\"])');\n var header = section.querySelector('h3');\n if (header) {\n header.style.display = visibleItems.length === 0 ? 'none' : '';\n }\n });\n });` : ''}\n }\n\n // Build area-based sidebar (DeepWiki-style hierarchy)\n function buildAreaSidebar(navContainer) {\n var areaModules = {};\n moduleGraph.areas.forEach(function(area) {\n areaModules[area.id] = [];\n });\n\n moduleGraph.modules.forEach(function(mod) {\n var areaId = mod.area;\n if (areaId && areaModules[areaId]) {\n areaModules[areaId].push(mod);\n } else {\n var found = false;\n moduleGraph.areas.forEach(function(area) {\n if (area.modules && area.modules.indexOf(mod.id) !== -1) {\n areaModules[area.id].push(mod);\n found = true;\n }\n });\n if (!found) {\n if (!areaModules['__other']) areaModules['__other'] = [];\n areaModules['__other'].push(mod);\n }\n }\n });\n\n moduleGraph.areas.forEach(function(area) {\n var modules = areaModules[area.id] || [];\n if (modules.length === 0) return;\n\n var group = document.createElement('div');\n group.className = 'nav-area-group';\n\n var areaItem = document.createElement('div');\n areaItem.className = 'nav-area-item';\n areaItem.setAttribute('data-area-id', area.id);\n areaItem.innerHTML = escapeHtml(area.name);\n group.appendChild(areaItem);\n\n var childrenEl = document.createElement('div');\n childrenEl.className = 'nav-area-children';\n\n modules.forEach(function(mod) {\n var item = document.createElement('div');\n item.className = 'nav-area-module';\n item.setAttribute('data-id', mod.id);\n item.innerHTML = escapeHtml(mod.name);\n item.onclick = function() { loadModule(mod.id); };\n childrenEl.appendChild(item);\n });\n\n group.appendChild(childrenEl);\n navContainer.appendChild(group);\n });\n\n var otherModules = areaModules['__other'] || [];\n if (otherModules.length > 0) {\n var group = document.createElement('div');\n group.className = 'nav-area-group';\n var areaItem = document.createElement('div');\n areaItem.className = 'nav-area-item';\n areaItem.innerHTML = 'Other';\n group.appendChild(areaItem);\n\n var childrenEl = document.createElement('div');\n childrenEl.className = 'nav-area-children';\n otherModules.forEach(function(mod) {\n var item = document.createElement('div');\n item.className = 'nav-area-module';\n item.setAttribute('data-id', mod.id);\n item.innerHTML = escapeHtml(mod.name);\n item.onclick = function() { loadModule(mod.id); };\n childrenEl.appendChild(item);\n });\n group.appendChild(childrenEl);\n navContainer.appendChild(group);\n }\n }\n\n // Build category-based sidebar (fallback, uses same visual style as area-based)\n function buildCategorySidebar(navContainer) {\n var categories = {};\n moduleGraph.modules.forEach(function(mod) {\n var cat = mod.category || 'other';\n if (!categories[cat]) categories[cat] = [];\n categories[cat].push(mod);\n });\n\n Object.keys(categories).sort().forEach(function(category) {\n var group = document.createElement('div');\n group.className = 'nav-area-group';\n\n var catItem = document.createElement('div');\n catItem.className = 'nav-area-item';\n catItem.innerHTML = escapeHtml(category);\n group.appendChild(catItem);\n\n var childrenEl = document.createElement('div');\n childrenEl.className = 'nav-area-children';\n\n categories[category].forEach(function(mod) {\n var item = document.createElement('div');\n item.className = 'nav-area-module';\n item.setAttribute('data-id', mod.id);\n item.innerHTML = escapeHtml(mod.name);\n item.onclick = function() { loadModule(mod.id); };\n childrenEl.appendChild(item);\n });\n\n group.appendChild(childrenEl);\n navContainer.appendChild(group);\n });\n }\n\n function setActive(id) {\n document.querySelectorAll('.nav-item, .nav-area-module, .nav-area-item').forEach(function(el) {\n el.classList.remove('active');\n });\n var target = document.querySelector('.nav-item[data-id=\"' + id + '\"]') ||\n document.querySelector('.nav-area-module[data-id=\"' + id + '\"]');\n if (target) target.classList.add('active');\n }\n\n // ================================================================\n // Content\n // ================================================================\n\n function showHome(skipHistory) {\n currentModuleId = null;\n setActive('__home');\n document.getElementById('breadcrumb').textContent = 'Home';\n document.getElementById('content-title').textContent = 'Project Overview';\n if (!skipHistory) {\n history.pushState({ type: 'home' }, '', location.pathname);\n }\n\n var stats = {\n modules: moduleGraph.modules.length,\n categories: (moduleGraph.categories || []).length,\n language: moduleGraph.project.language,\n buildSystem: moduleGraph.project.buildSystem,\n };\n\n var html = '<div class=\"home-view\">' +\n '<p style=\"font-size: 15px; color: var(--content-muted); margin-bottom: 24px;\">' +\n escapeHtml(moduleGraph.project.description) + '</p>' +\n '<div class=\"project-stats\">' +\n '<div class=\"stat-card\"><h3>Modules</h3><div class=\"value\">' + stats.modules + '</div></div>' +\n '<div class=\"stat-card\"><h3>Categories</h3><div class=\"value\">' + stats.categories + '</div></div>' +\n '<div class=\"stat-card\"><h3>Language</h3><div class=\"value small\">' + escapeHtml(stats.language) + '</div></div>' +\n '<div class=\"stat-card\"><h3>Build System</h3><div class=\"value small\">' + escapeHtml(stats.buildSystem) + '</div></div>' +\n '</div>';\n\n if (moduleGraph.project.entryPoints && moduleGraph.project.entryPoints.length > 0) {\n html += '<h3 style=\"margin-top: 24px; margin-bottom: 12px;\">Entry Points</h3><ul>';\n moduleGraph.project.entryPoints.forEach(function(ep) {\n html += '<li><code>' + escapeHtml(ep) + '</code></li>';\n });\n html += '</ul>';\n }\n\n var hasAreas = moduleGraph.areas && moduleGraph.areas.length > 0;\n if (hasAreas) {\n moduleGraph.areas.forEach(function(area) {\n var areaModules = moduleGraph.modules.filter(function(mod) {\n if (mod.area === area.id) return true;\n return area.modules && area.modules.indexOf(mod.id) !== -1;\n });\n if (areaModules.length === 0) return;\n\n html += '<h3 style=\"margin-top: 24px; margin-bottom: 12px;\">' + escapeHtml(area.name) + '</h3>';\n if (area.description) {\n html += '<p style=\"color: var(--content-muted); margin-bottom: 12px; font-size: 14px;\">' +\n escapeHtml(area.description) + '</p>';\n }\n html += '<div class=\"module-grid\">';\n areaModules.forEach(function(mod) {\n html += '<div class=\"module-card\" onclick=\"loadModule(\\\\'' +\n mod.id.replace(/'/g, \"\\\\\\\\'\") + '\\\\')\">' +\n '<h4>' + escapeHtml(mod.name) +\n ' <span class=\"complexity-badge complexity-' + mod.complexity + '\">' +\n mod.complexity + '</span></h4>' +\n '<p>' + escapeHtml(mod.purpose) + '</p></div>';\n });\n html += '</div>';\n });\n\n var assignedIds = new Set();\n moduleGraph.areas.forEach(function(area) {\n moduleGraph.modules.forEach(function(mod) {\n if (mod.area === area.id || (area.modules && area.modules.indexOf(mod.id) !== -1)) {\n assignedIds.add(mod.id);\n }\n });\n });\n var unassigned = moduleGraph.modules.filter(function(mod) { return !assignedIds.has(mod.id); });\n if (unassigned.length > 0) {\n html += '<h3 style=\"margin-top: 24px; margin-bottom: 12px;\">Other</h3><div class=\"module-grid\">';\n unassigned.forEach(function(mod) {\n html += '<div class=\"module-card\" onclick=\"loadModule(\\\\'' +\n mod.id.replace(/'/g, \"\\\\\\\\'\") + '\\\\')\">' +\n '<h4>' + escapeHtml(mod.name) +\n ' <span class=\"complexity-badge complexity-' + mod.complexity + '\">' +\n mod.complexity + '</span></h4>' +\n '<p>' + escapeHtml(mod.purpose) + '</p></div>';\n });\n html += '</div>';\n }\n } else {\n html += '<h3 style=\"margin-top: 24px; margin-bottom: 12px;\">All Modules</h3>' +\n '<div class=\"module-grid\">';\n moduleGraph.modules.forEach(function(mod) {\n html += '<div class=\"module-card\" onclick=\"loadModule(\\\\'' +\n mod.id.replace(/'/g, \"\\\\\\\\'\") + '\\\\')\">' +\n '<h4>' + escapeHtml(mod.name) +\n ' <span class=\"complexity-badge complexity-' + mod.complexity + '\">' +\n mod.complexity + '</span></h4>' +\n '<p>' + escapeHtml(mod.purpose) + '</p></div>';\n });\n html += '</div>';\n }\n\n html += '</div>';\n\n document.getElementById('content').innerHTML = html;\n }\n\n function loadModule(moduleId, skipHistory) {\n var mod = moduleGraph.modules.find(function(m) { return m.id === moduleId; });\n if (!mod) return;\n\n currentModuleId = moduleId;\n setActive(moduleId);\n\n document.getElementById('breadcrumb').textContent = mod.category + ' / ' + mod.name;\n document.getElementById('content-title').textContent = mod.name;\n if (!skipHistory) {\n history.pushState({ type: 'module', id: moduleId }, '', location.pathname + '#module-' + encodeURIComponent(moduleId));\n }\n\n var markdown = (typeof MARKDOWN_DATA !== 'undefined') ? MARKDOWN_DATA[moduleId] : null;\n if (markdown) {\n renderMarkdownContent(markdown);\n } else {\n document.getElementById('content').innerHTML =\n '<div class=\"markdown-body\">' +\n '<h2>' + escapeHtml(mod.name) + '</h2>' +\n '<p><strong>Purpose:</strong> ' + escapeHtml(mod.purpose) + '</p>' +\n '<p><strong>Path:</strong> <code>' + escapeHtml(mod.path) + '</code></p>' +\n '<p><strong>Complexity:</strong> ' + mod.complexity + '</p>' +\n '<h3>Key Files</h3><ul>' +\n mod.keyFiles.map(function(f) { return '<li><code>' + escapeHtml(f) + '</code></li>'; }).join('') +\n '</ul>' +\n '<h3>Dependencies</h3><ul>' +\n mod.dependencies.map(function(d) { return '<li>' + escapeHtml(d) + '</li>'; }).join('') +\n '</ul></div>';\n }\n // Scroll content to top\n document.querySelector('.content-body').scrollTop = 0;\n }\n\n function loadSpecialPage(key, title, skipHistory) {\n currentModuleId = null;\n setActive(key);\n document.getElementById('breadcrumb').textContent = title;\n document.getElementById('content-title').textContent = title;\n if (!skipHistory) {\n history.pushState({ type: 'special', key: key, title: title }, '', location.pathname + '#' + encodeURIComponent(key));\n }\n\n var markdown = MARKDOWN_DATA[key];\n if (markdown) {\n renderMarkdownContent(markdown);\n } else {\n document.getElementById('content').innerHTML = '<p>Content not available.</p>';\n }\n document.querySelector('.content-body').scrollTop = 0;\n }\n\n // ================================================================\n // Markdown Rendering\n // ================================================================\n\n function renderMarkdownContent(markdown) {\n var html = marked.parse(markdown);\n var container = document.getElementById('content');\n container.innerHTML = '<div class=\"markdown-body\">' + html + '</div>';\n\n var body = container.querySelector('.markdown-body');\n\n // Syntax highlighting\n body.querySelectorAll('pre code').forEach(function(block) {\n // Check for mermaid\n if (block.classList.contains('language-mermaid')) {\n var pre = block.parentElement;\n pre.classList.add('mermaid');\n pre.textContent = block.textContent;\n pre.removeAttribute('style');\n // Build zoom/pan container (shared structure from mermaid-zoom)\n var mContainer = document.createElement('div');\n mContainer.className = 'mermaid-container';\n mContainer.innerHTML =\n '<div class=\"mermaid-toolbar\">' +\n '<span class=\"mermaid-toolbar-label\">Diagram</span>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-out\" title=\"Zoom out\">\\\\u2212</button>' +\n '<span class=\"mermaid-zoom-level\">100%</span>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-in\" title=\"Zoom in\">+</button>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-reset\" title=\"Reset view\">\\\\u27F2</button>' +\n '</div>' +\n '<div class=\"mermaid-viewport\">' +\n '<div class=\"mermaid-svg-wrapper\"></div>' +\n '</div>';\n pre.parentNode.insertBefore(mContainer, pre);\n mContainer.querySelector('.mermaid-svg-wrapper').appendChild(pre);\n } else {\n hljs.highlightElement(block);\n addCopyButton(block.parentElement);\n }\n });\n\n // Add anchor links to headings\n body.querySelectorAll('h1, h2, h3, h4').forEach(function(heading) {\n var id = heading.textContent.toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/^-+|-+$/g, '');\n heading.id = id;\n var anchor = document.createElement('a');\n anchor.className = 'heading-anchor';\n anchor.href = '#' + id;\n anchor.textContent = '#';\n anchor.setAttribute('aria-label', 'Link to ' + heading.textContent);\n heading.appendChild(anchor);\n });\n\n // Render mermaid then attach zoom controls\n initMermaid().then(function() { initMermaidZoom(); });\n\n // Intercept internal .md links\n container.addEventListener('click', function(e) {\n var target = e.target;\n while (target && target !== container) {\n if (target.tagName === 'A') break;\n target = target.parentElement;\n }\n if (!target || target.tagName !== 'A') return;\n var href = target.getAttribute('href');\n if (!href || !href.match(/\\\\.md(#.*)?$/)) return;\n // Don't intercept external links\n if (/^https?:\\\\/\\\\//.test(href)) return;\n\n e.preventDefault();\n var hashPart = '';\n var hashIdx = href.indexOf('#');\n if (hashIdx !== -1) {\n hashPart = href.substring(hashIdx + 1);\n href = href.substring(0, hashIdx);\n }\n\n // Extract slug from the href path\n var slug = href.replace(/^(\\\\.\\\\/|\\\\.\\\\.\\\\/)*/, '').replace(/^modules\\\\//, '').replace(/\\\\.md$/, '');\n\n // Check special pages\n var specialPages = {\n 'index': { key: '__index', title: 'Index' },\n 'architecture': { key: '__architecture', title: 'Architecture' },\n 'getting-started': { key: '__getting-started', title: 'Getting Started' }\n };\n if (specialPages[slug]) {\n loadSpecialPage(specialPages[slug].key, specialPages[slug].title);\n return;\n }\n\n // Try to find matching module ID\n var matchedId = findModuleIdBySlugClient(slug);\n if (matchedId) {\n loadModule(matchedId);\n if (hashPart) {\n setTimeout(function() {\n var el = document.getElementById(hashPart);\n if (el) el.scrollIntoView({ behavior: 'smooth' });\n }, 100);\n }\n }\n });\n }\n\n // Client-side module ID lookup by slug\n function findModuleIdBySlugClient(slug) {\n var normalized = slug.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');\n for (var i = 0; i < moduleGraph.modules.length; i++) {\n var mod = moduleGraph.modules[i];\n var modSlug = mod.id.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');\n if (modSlug === normalized) return mod.id;\n }\n return null;\n }\n\n function addCopyButton(pre) {\n var btn = document.createElement('button');\n btn.className = 'copy-btn';\n btn.textContent = 'Copy';\n btn.setAttribute('aria-label', 'Copy code');\n btn.onclick = function() {\n var code = pre.querySelector('code');\n var text = code ? code.textContent : pre.textContent;\n navigator.clipboard.writeText(text).then(function() {\n btn.textContent = 'Copied!';\n setTimeout(function() { btn.textContent = 'Copy'; }, 2000);\n });\n };\n pre.appendChild(btn);\n }\n\n function initMermaid() {\n var mermaidBlocks = document.querySelectorAll('.mermaid');\n if (mermaidBlocks.length === 0) return Promise.resolve();\n\n var isDark = currentTheme === 'dark' ||\n (currentTheme === 'auto' && window.matchMedia('(prefers-color-scheme: dark)').matches);\n\n mermaid.initialize({\n startOnLoad: false,\n theme: isDark ? 'dark' : 'default',\n securityLevel: 'loose',\n flowchart: {\n useMaxWidth: false,\n htmlLabels: true,\n curve: 'basis',\n padding: 15,\n nodeSpacing: 50,\n rankSpacing: 50,\n },\n fontSize: 14,\n });\n return mermaid.run({ nodes: mermaidBlocks });\n }\n\n // ================================================================\n // Mermaid Zoom & Pan (shared via mermaid-zoom module)\n // ================================================================\n${getMermaidZoomScript()}\n\n // ================================================================\n // Utility\n // ================================================================\n\n function escapeHtml(str) {\n if (!str) return '';\n return String(str)\n .replace(/&/g, '&amp;')\n .replace(/</g, '&lt;')\n .replace(/>/g, '&gt;')\n .replace(/\"/g, '&quot;');\n }`;\n}\n", "/**\n * Website Data\n *\n * Data reading, serialization, and utility functions for the website generator.\n * Extracted from website-generator.ts for maintainability.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { ModuleGraph } from '../types';\n\n// ============================================================================\n// Module Graph Reader\n// ============================================================================\n\n/**\n * Read module-graph.json from the wiki directory.\n * @param wikiDir - Resolved wiki directory path\n * @returns Parsed module graph\n */\nexport function readModuleGraph(wikiDir: string): ModuleGraph {\n const graphPath = path.join(wikiDir, 'module-graph.json');\n if (!fs.existsSync(graphPath)) {\n throw new Error(`module-graph.json not found in ${wikiDir}`);\n }\n\n const content = fs.readFileSync(graphPath, 'utf-8');\n return JSON.parse(content) as ModuleGraph;\n}\n\n// ============================================================================\n// Markdown Reader\n// ============================================================================\n\n/**\n * Read all markdown files for modules from the wiki directory.\n *\n * Supports both flat and hierarchical layouts:\n * - Flat: modules/{slug}.md\n * - Hierarchical: areas/{areaId}/modules/{slug}.md\n *\n * Also reads top-level markdown files (index.md, architecture.md, getting-started.md)\n * and area-level index/architecture files.\n *\n * @param wikiDir - Resolved wiki directory path\n * @param moduleGraph - The module graph (for module ID mapping)\n * @returns Map of module ID to markdown content\n */\nexport function readMarkdownFiles(\n wikiDir: string,\n moduleGraph: ModuleGraph\n): Record<string, string> {\n const data: Record<string, string> = {};\n\n // Read top-level markdown files\n const topLevelFiles = ['index.md', 'architecture.md', 'getting-started.md'];\n for (const file of topLevelFiles) {\n const filePath = path.join(wikiDir, file);\n if (fs.existsSync(filePath)) {\n const key = path.basename(file, '.md');\n data[`__${key}`] = fs.readFileSync(filePath, 'utf-8');\n }\n }\n\n // Read flat-layout module files\n const modulesDir = path.join(wikiDir, 'modules');\n if (fs.existsSync(modulesDir) && fs.statSync(modulesDir).isDirectory()) {\n const files = fs.readdirSync(modulesDir).filter(f => f.endsWith('.md'));\n for (const file of files) {\n const slug = path.basename(file, '.md');\n const moduleId = findModuleIdBySlug(slug, moduleGraph);\n const key = moduleId || slug;\n data[key] = fs.readFileSync(path.join(modulesDir, file), 'utf-8');\n }\n }\n\n // Read hierarchical-layout area files\n const areasDir = path.join(wikiDir, 'areas');\n if (fs.existsSync(areasDir) && fs.statSync(areasDir).isDirectory()) {\n const areaDirs = fs.readdirSync(areasDir).filter(d =>\n fs.statSync(path.join(areasDir, d)).isDirectory()\n );\n\n for (const areaId of areaDirs) {\n const areaDir = path.join(areasDir, areaId);\n\n // Area-level files\n for (const file of ['index.md', 'architecture.md']) {\n const filePath = path.join(areaDir, file);\n if (fs.existsSync(filePath)) {\n const key = path.basename(file, '.md');\n data[`__area_${areaId}_${key}`] = fs.readFileSync(filePath, 'utf-8');\n }\n }\n\n // Area module files\n const areaModulesDir = path.join(areaDir, 'modules');\n if (fs.existsSync(areaModulesDir) && fs.statSync(areaModulesDir).isDirectory()) {\n const files = fs.readdirSync(areaModulesDir).filter(f => f.endsWith('.md'));\n for (const file of files) {\n const slug = path.basename(file, '.md');\n const moduleId = findModuleIdBySlug(slug, moduleGraph);\n const key = moduleId || slug;\n data[key] = fs.readFileSync(path.join(areaModulesDir, file), 'utf-8');\n }\n }\n }\n }\n\n return data;\n}\n\n// ============================================================================\n// Data Embedding\n// ============================================================================\n\n/**\n * Generate the embedded-data.js content.\n *\n * Produces a JavaScript file that defines two global constants:\n * - MODULE_GRAPH: The module graph JSON\n * - MARKDOWN_DATA: Map of module ID to markdown content\n *\n * Uses JSON.stringify with sorted keys for deterministic output.\n *\n * @param moduleGraph - The module graph\n * @param markdownData - Map of module ID to markdown content\n * @returns JavaScript source code\n */\nexport function generateEmbeddedData(\n moduleGraph: ModuleGraph,\n markdownData: Record<string, string>\n): string {\n // Sort keys for deterministic output\n const sortedGraph = stableStringify(moduleGraph);\n const sortedMarkdown = stableStringify(markdownData);\n\n return `// Auto-generated by deep-wiki. Do not edit manually.\\nconst MODULE_GRAPH = ${sortedGraph};\\nconst MARKDOWN_DATA = ${sortedMarkdown};\\n`;\n}\n\n// ============================================================================\n// Serialization Helpers\n// ============================================================================\n\n/**\n * JSON.stringify with sorted keys for deterministic output.\n */\nexport function stableStringify(value: unknown): string {\n return JSON.stringify(value, sortedReplacer, 2);\n}\n\n/**\n * JSON replacer that sorts object keys.\n */\nfunction sortedReplacer(_key: string, value: unknown): unknown {\n if (value !== null && typeof value === 'object' && !Array.isArray(value)) {\n const sorted: Record<string, unknown> = {};\n for (const k of Object.keys(value as Record<string, unknown>).sort()) {\n sorted[k] = (value as Record<string, unknown>)[k];\n }\n return sorted;\n }\n return value;\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Find a module ID by its slug.\n * Matches by normalizing the module ID to a slug.\n */\nfunction findModuleIdBySlug(slug: string, moduleGraph: ModuleGraph): string | null {\n const normalized = slug.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');\n for (const mod of moduleGraph.modules) {\n const modSlug = mod.id.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');\n if (modSlug === normalized) {\n return mod.id;\n }\n }\n return null;\n}\n\n/**\n * Escape HTML special characters.\n */\nexport function escapeHtml(str: string): string {\n return str\n .replace(/&/g, '&amp;')\n .replace(/</g, '&lt;')\n .replace(/>/g, '&gt;')\n .replace(/\"/g, '&quot;');\n}\n", "/**\n * Website Generator\n *\n * Generates a standalone HTML website from the wiki output.\n * The generated website includes:\n * - Embedded module graph and markdown data (no CORS issues with file://)\n * - Syntax highlighting via highlight.js CDN\n * - Mermaid diagram rendering via mermaid.js CDN\n * - Markdown rendering via marked.js CDN\n * - Responsive sidebar navigation with search\n * - Dark/light/auto theme support\n * - Copy buttons for code blocks\n * - Anchor links for headings\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { WebsiteOptions, WebsiteTheme } from '../types';\nimport { getStyles } from './website-styles';\nimport { getScript } from './website-client-script';\nimport { escapeHtml, readModuleGraph, readMarkdownFiles, generateEmbeddedData } from './website-data';\n\n// Re-export for backward compatibility\nexport { readModuleGraph, readMarkdownFiles, generateEmbeddedData, stableStringify } from './website-data';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default theme when not specified */\nconst DEFAULT_THEME: WebsiteTheme = 'auto';\n\n/** Filename for the generated website */\nconst INDEX_HTML_FILENAME = 'index.html';\n\n/** Filename for embedded data */\nconst EMBEDDED_DATA_FILENAME = 'embedded-data.js';\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Generate a standalone HTML website from wiki output.\n *\n * Reads module-graph.json and all module markdown files from the wiki directory,\n * then generates index.html with embedded data for offline viewing.\n *\n * @param wikiDir - Path to the wiki output directory (contains module-graph.json and modules/)\n * @param options - Website generation options\n * @returns Paths to the generated files\n */\nexport function generateWebsite(wikiDir: string, options?: WebsiteOptions): string[] {\n const resolvedDir = path.resolve(wikiDir);\n\n // Read module graph\n const moduleGraph = readModuleGraph(resolvedDir);\n\n // Read all markdown files\n const markdownData = readMarkdownFiles(resolvedDir, moduleGraph);\n\n // Determine effective options\n const theme = options?.theme || DEFAULT_THEME;\n const title = options?.title || moduleGraph.project.name;\n const enableSearch = !options?.noSearch;\n\n // Generate embedded data JS\n const embeddedDataContent = generateEmbeddedData(moduleGraph, markdownData);\n const embeddedDataPath = path.join(resolvedDir, EMBEDDED_DATA_FILENAME);\n fs.writeFileSync(embeddedDataPath, embeddedDataContent, 'utf-8');\n\n // Generate HTML\n let htmlContent: string;\n if (options?.customTemplate) {\n const templatePath = path.resolve(options.customTemplate);\n if (!fs.existsSync(templatePath)) {\n throw new Error(`Custom template not found: ${templatePath}`);\n }\n htmlContent = fs.readFileSync(templatePath, 'utf-8');\n } else {\n htmlContent = generateHtmlTemplate({ theme, title, enableSearch });\n }\n\n const htmlPath = path.join(resolvedDir, INDEX_HTML_FILENAME);\n fs.writeFileSync(htmlPath, htmlContent, 'utf-8');\n\n return [htmlPath, embeddedDataPath];\n}\n\n// ============================================================================\n// HTML Template Generator\n// ============================================================================\n\ninterface TemplateOptions {\n theme: WebsiteTheme;\n title: string;\n enableSearch: boolean;\n}\n\n/**\n * Generate the index.html content from the built-in template.\n *\n * @param options - Template options\n * @returns Complete HTML content\n */\nexport function generateHtmlTemplate(options: TemplateOptions): string {\n const { theme, title, enableSearch } = options;\n\n const themeClass = theme === 'auto' ? '' : `class=\"${theme}-theme\"`;\n const themeMetaTag = theme === 'auto'\n ? '<meta name=\"color-scheme\" content=\"light dark\">'\n : '';\n\n return `<!DOCTYPE html>\n<html lang=\"en\" ${themeClass} data-theme=\"${theme}\">\n<head>\n <meta charset=\"UTF-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n ${themeMetaTag}\n <title>${escapeHtml(title)} \u2014 Wiki</title>\n\n <!-- Syntax Highlighting -->\n <link rel=\"stylesheet\" href=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github.min.css\" id=\"hljs-light\">\n <link rel=\"stylesheet\" href=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css\" id=\"hljs-dark\" disabled>\n <script src=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js\"></script>\n\n <!-- Mermaid Diagrams -->\n <script src=\"https://cdn.jsdelivr.net/npm/mermaid@10/dist/mermaid.min.js\"></script>\n\n <!-- Markdown Parser -->\n <script src=\"https://cdn.jsdelivr.net/npm/marked/marked.min.js\"></script>\n\n <style>\n${getStyles()}\n </style>\n</head>\n<body>\n <div class=\"sidebar\" id=\"sidebar\">\n <div class=\"sidebar-header\">\n <h1 id=\"project-name\">${escapeHtml(title)}</h1>\n <p id=\"project-description\"></p>\n </div>\n${enableSearch ? ` <div class=\"search-box\">\n <input type=\"text\" id=\"search\" placeholder=\"Search modules...\" aria-label=\"Search modules\">\n </div>` : ''}\n <div id=\"nav-container\"></div>\n </div>\n\n <div class=\"content\">\n <div class=\"content-header\">\n <div class=\"header-left\">\n <button class=\"sidebar-toggle\" id=\"sidebar-toggle\" aria-label=\"Toggle sidebar\">&#9776;</button>\n <div>\n <div class=\"breadcrumb\" id=\"breadcrumb\">Home</div>\n <h2 class=\"content-title\" id=\"content-title\">Project Overview</h2>\n </div>\n </div>\n <button class=\"theme-toggle\" id=\"theme-toggle\" aria-label=\"Toggle theme\">&#9790;</button>\n </div>\n <div class=\"content-body\">\n <div id=\"content\" class=\"markdown-body\"></div>\n </div>\n </div>\n\n <script src=\"embedded-data.js\"></script>\n <script>\n${getScript(enableSearch, theme)}\n </script>\n</body>\n</html>`;\n}\n", "/**\n * Writing Module \u2014 Public API\n *\n * Phase 4 (Article Generation) entry point. Converts ModuleAnalysis results\n * into markdown wiki articles and generates index/architecture overview pages.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { WritingOptions, WikiOutput } from '../types';\nimport type { AIInvoker, JobProgress, ItemCompleteCallback } from '@plusplusoneplusplus/pipeline-core';\nimport { runArticleExecutor } from './article-executor';\n\n// Re-export for convenience\nexport { buildModuleArticlePrompt, buildModuleArticlePromptTemplate, buildSimplifiedGraph, getArticleStyleGuide, buildCrossLinkRules } from './prompts';\nexport { buildReducePromptTemplate, getReduceOutputFields, buildModuleSummaryForReduce, buildAreaReducePromptTemplate, getAreaReduceOutputFields, buildHierarchicalReducePromptTemplate } from './reduce-prompts';\nexport { runArticleExecutor, analysisToPromptItem, generateStaticIndexPages, generateStaticAreaPages, generateStaticHierarchicalIndexPages } from './article-executor';\nexport { writeWikiOutput, getArticleFilePath, slugify, normalizeLineEndings } from './file-writer';\nexport { generateWebsite, generateEmbeddedData, generateHtmlTemplate, readModuleGraph, readMarkdownFiles, stableStringify } from './website-generator';\nexport type { ArticleExecutorOptions, ArticleExecutorResult } from './article-executor';\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Generate wiki articles from module analyses.\n *\n * Runs a map-reduce job:\n * - Map: Per-module article generation (text mode, raw markdown)\n * - Reduce: AI-generated index, architecture, and getting-started pages\n *\n * @param options Writing options\n * @param aiInvoker Configured AI invoker for writing (session pool)\n * @param onProgress Optional progress callback\n * @param isCancelled Optional cancellation check\n * @param onItemComplete Optional per-item completion callback for incremental saving\n * @returns Wiki output with all articles\n */\nexport async function generateArticles(\n options: WritingOptions,\n aiInvoker: AIInvoker,\n onProgress?: (progress: JobProgress) => void,\n isCancelled?: () => boolean,\n onItemComplete?: ItemCompleteCallback,\n): Promise<WikiOutput> {\n const startTime = Date.now();\n\n const result = await runArticleExecutor({\n aiInvoker,\n graph: options.graph,\n analyses: options.analyses,\n depth: options.depth || 'normal',\n concurrency: options.concurrency || 5,\n timeoutMs: options.timeout || 120_000,\n model: options.model,\n onProgress,\n isCancelled,\n onItemComplete,\n });\n\n return {\n articles: result.articles,\n duration: Date.now() - startTime,\n failedModuleIds: result.failedModuleIds.length > 0 ? result.failedModuleIds : undefined,\n };\n}\n", "/**\n * Phase 4: Article Generation (Writing)\n *\n * Generates wiki articles from module analyses, with incremental caching and reduce-phase support.\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport type { GenerateCommandOptions, ModuleGraph, ModuleAnalysis, GeneratedArticle } from '../../types';\nimport { extractJSON, type AIInvoker } from '@plusplusoneplusplus/pipeline-core';\nimport { resolvePhaseModel, resolvePhaseTimeout, resolvePhaseConcurrency, resolvePhaseDepth } from '../../config-loader';\nimport {\n generateArticles,\n writeWikiOutput,\n buildReducePromptTemplate,\n generateStaticIndexPages,\n} from '../../writing';\nimport { createWritingInvoker } from '../../ai-invoker';\nimport { normalizeModuleId } from '../../schemas';\nimport { UsageTracker } from '../../usage-tracker';\nimport {\n saveArticle,\n saveAllArticles,\n scanIndividualArticlesCache,\n scanIndividualArticlesCacheAny,\n getCachedReduceArticles,\n saveReduceArticles,\n getFolderHeadHash,\n restampArticles,\n} from '../../cache';\nimport {\n Spinner,\n printSuccess,\n printError,\n printWarning,\n printInfo,\n printHeader,\n bold,\n gray,\n} from '../../logger';\nimport { EXIT_CODES } from '../../cli';\nimport { getErrorMessage } from '../../utils/error-utils';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface Phase4WritingResult {\n articlesWritten: number;\n duration: number;\n exitCode?: number;\n}\n\n// ============================================================================\n// Phase 4: Article Generation\n// ============================================================================\n\nexport async function runPhase4Writing(\n repoPath: string,\n graph: ModuleGraph,\n analyses: ModuleAnalysis[],\n options: GenerateCommandOptions,\n isCancelled: () => boolean,\n usageTracker?: UsageTracker,\n reanalyzedModuleIds?: string[]\n): Promise<Phase4WritingResult> {\n const startTime = Date.now();\n\n process.stderr.write('\\n');\n printHeader('Phase 4: Article Generation');\n\n // Resolve per-phase settings for writing\n const writingModel = resolvePhaseModel(options, 'writing');\n const writingTimeout = resolvePhaseTimeout(options, 'writing');\n const writingConcurrency = resolvePhaseConcurrency(options, 'writing');\n const writingDepth = resolvePhaseDepth(options, 'writing');\n const concurrency = writingConcurrency ? Math.min(writingConcurrency * 2, 20) : 5;\n\n // Get git hash once upfront for per-article incremental saves (subfolder-scoped)\n let gitHash: string | null = null;\n try {\n gitHash = await getFolderHeadHash(repoPath);\n } catch {\n // Non-fatal: incremental saves won't work but generation continues\n }\n\n // Determine which modules need article generation\n let analysesToGenerate = analyses;\n let cachedArticles: GeneratedArticle[] = [];\n\n if (!options.force) {\n const moduleIds = analyses\n .map(a => a.moduleId)\n .filter(id => !!id);\n\n // Re-stamp unchanged module articles with the new git hash BEFORE scanning.\n // This is the key to Phase 4 incremental invalidation: modules that were NOT\n // re-analyzed in Phase 3 get their cached articles re-stamped so they pass\n // the git hash validation in scanIndividualArticlesCache().\n //\n // Skip re-stamping when:\n // - No git hash available\n // - reanalyzedModuleIds is undefined (Phase 3 was skipped)\n // - reanalyzedModuleIds is empty (nothing changed, articles already valid)\n // - --use-cache mode (articles are loaded regardless of hash)\n if (gitHash && reanalyzedModuleIds !== undefined && reanalyzedModuleIds.length > 0 && !options.useCache) {\n const unchangedModuleIds = moduleIds.filter(\n id => !reanalyzedModuleIds.includes(id)\n );\n if (unchangedModuleIds.length > 0) {\n const restamped = restampArticles(unchangedModuleIds, options.output, gitHash);\n if (restamped > 0 && options.verbose) {\n printInfo(`Re-stamped ${restamped} unchanged module articles with current git hash`);\n }\n }\n }\n\n // Scan for individually cached articles (handles crash recovery too)\n const { found, missing } = options.useCache\n ? scanIndividualArticlesCacheAny(moduleIds, options.output)\n : gitHash\n ? scanIndividualArticlesCache(moduleIds, options.output, gitHash)\n : { found: [] as GeneratedArticle[], missing: [...moduleIds] };\n\n if (found.length > 0) {\n cachedArticles = found;\n\n if (missing.length === 0) {\n // All module articles are cached \u2014 skip map phase\n printSuccess(`All ${found.length} module articles loaded from cache`);\n } else {\n printInfo(`Recovered ${found.length} cached articles, ${missing.length} remaining`);\n }\n\n // Only generate articles for modules NOT in cache\n analysesToGenerate = analyses.filter(\n a => missing.includes(a.moduleId)\n );\n }\n }\n\n // Create writing invoker (session pool, no tools)\n const baseWritingInvoker = createWritingInvoker({\n model: writingModel,\n timeoutMs: writingTimeout ? writingTimeout * 1000 : undefined,\n });\n\n // Wrap invoker to capture token usage\n const writingInvoker: AIInvoker = async (prompt, opts) => {\n const result = await baseWritingInvoker(prompt, opts);\n usageTracker?.addUsage('writing', result.tokenUsage);\n return result;\n };\n\n const spinner = new Spinner();\n\n try {\n let freshArticles: GeneratedArticle[] = [];\n\n if (analysesToGenerate.length > 0) {\n // Generate articles for modules that are not cached\n spinner.start(`Generating articles for ${analysesToGenerate.length} modules...`);\n\n const wikiOutput = await generateArticles(\n {\n graph,\n analyses: analysesToGenerate,\n model: writingModel,\n concurrency,\n timeout: writingTimeout ? writingTimeout * 1000 : undefined,\n depth: writingDepth,\n },\n writingInvoker,\n (progress) => {\n if (progress.phase === 'mapping') {\n spinner.update(\n `Generating articles: ${progress.completedItems}/${progress.totalItems}`\n );\n } else if (progress.phase === 'reducing') {\n spinner.update('Generating index and overview pages...');\n }\n },\n isCancelled,\n // Per-article incremental save callback\n (item, mapResult) => {\n if (!gitHash || !mapResult.success) {\n return;\n }\n try {\n const output = mapResult.output as { item?: { moduleId?: string }; rawText?: string; rawResponse?: string };\n const moduleId = output?.item?.moduleId;\n const content = output?.rawText || output?.rawResponse;\n if (moduleId && content) {\n const moduleInfo = graph.modules.find(m => m.id === moduleId);\n const article: GeneratedArticle = {\n type: 'module',\n slug: normalizeModuleId(moduleId),\n title: moduleInfo?.name || moduleId,\n content,\n moduleId,\n areaId: moduleInfo?.area,\n };\n saveArticle(moduleId, article, options.output, gitHash);\n }\n } catch {\n // Non-fatal: per-article save failed, bulk save at end will catch it\n }\n },\n );\n\n // Separate module articles from reduce-generated articles\n freshArticles = wikiOutput.articles;\n\n // Check for failed articles\n const failedArticleModuleIds = wikiOutput.failedModuleIds || [];\n if (failedArticleModuleIds.length > 0) {\n spinner.warn(\n `Article generation: ${freshArticles.length} succeeded, ${failedArticleModuleIds.length} failed`\n );\n\n // Strict mode: fail the phase if any article failed\n if (options.strict !== false) {\n printError(\n `Strict mode: ${failedArticleModuleIds.length} module(s) failed article generation: ` +\n `${failedArticleModuleIds.join(', ')}. Use --no-strict to continue with partial results.`\n );\n return {\n articlesWritten: 0,\n duration: Date.now() - startTime,\n exitCode: EXIT_CODES.EXECUTION_ERROR,\n };\n }\n } else {\n spinner.succeed(`Generated ${freshArticles.length} articles`);\n }\n }\n\n // Merge cached + fresh module articles\n // Module-type articles are the per-module ones; all others are reduce/area artifacts\n const moduleTypes = new Set(['module']);\n const freshModuleArticles = freshArticles.filter(a => moduleTypes.has(a.type));\n const reduceArticles = freshArticles.filter(a => !moduleTypes.has(a.type));\n const allModuleArticles = [...cachedArticles, ...freshModuleArticles];\n\n // If we had cached articles but skipped generation, we still need the reduce phase\n // (index/architecture/getting-started) which depends on ALL module articles.\n // Try to load reduce articles from cache first \u2014 skip reduce phase if cached.\n //\n // Reduce skip criteria:\n // - Skip reduce ONLY when NO modules were re-analyzed (truly nothing changed)\n // - When reanalyzedModuleIds is undefined (Phase 3 was skipped via --phase 4),\n // fall back to old behavior: skip reduce when all articles are cached\n const nothingChanged = reanalyzedModuleIds !== undefined\n ? reanalyzedModuleIds.length === 0\n : analysesToGenerate.length === 0;\n\n if (nothingChanged && cachedArticles.length > 0) {\n // Try loading cached reduce articles\n let cachedReduceArticles: GeneratedArticle[] | null = null;\n if (!options.force) {\n cachedReduceArticles = options.useCache\n ? getCachedReduceArticles(options.output)\n : (gitHash ? getCachedReduceArticles(options.output, gitHash) : null);\n }\n\n if (cachedReduceArticles && cachedReduceArticles.length > 0) {\n // All reduce articles loaded from cache \u2014 skip reduce phase entirely\n reduceArticles.push(...cachedReduceArticles);\n printSuccess(\n `All ${cachedArticles.length} module articles + ${cachedReduceArticles.length} reduce articles loaded from cache`\n );\n } else {\n // Reduce articles not cached \u2014 generate them (reduce-only; don't re-generate module articles)\n spinner.start('Generating index and overview pages...');\n\n const reduceOnly = await generateReduceOnlyArticles(\n graph,\n analyses,\n writingInvoker,\n writingModel,\n writingTimeout ? writingTimeout * 1000 : undefined,\n );\n reduceArticles.push(...reduceOnly);\n\n // Cache the newly generated reduce articles\n if (gitHash && reduceOnly.length > 0) {\n try {\n saveReduceArticles(reduceOnly, options.output, gitHash);\n } catch {\n if (options.verbose) {\n printWarning('Failed to cache reduce articles (non-fatal)');\n }\n }\n }\n\n spinner.succeed('Generated index and overview pages');\n }\n } else if (analysesToGenerate.length === 0 && cachedArticles.length > 0 && !nothingChanged) {\n // Modules were re-analyzed but all articles were re-stamped/cached.\n // We still need to regenerate reduce articles because module content changed.\n spinner.start('Regenerating index and overview pages (module content changed)...');\n\n const reduceOnly = await generateReduceOnlyArticles(\n graph,\n analyses,\n writingInvoker,\n writingModel,\n writingTimeout ? writingTimeout * 1000 : undefined,\n );\n reduceArticles.push(...reduceOnly);\n\n // Cache the newly generated reduce articles\n if (gitHash && reduceOnly.length > 0) {\n try {\n saveReduceArticles(reduceOnly, options.output, gitHash);\n } catch {\n if (options.verbose) {\n printWarning('Failed to cache reduce articles (non-fatal)');\n }\n }\n }\n\n spinner.succeed('Regenerated index and overview pages');\n }\n\n // Cache reduce articles from the map+reduce pass (when articles were freshly generated)\n if (reduceArticles.length > 0 && analysesToGenerate.length > 0 && gitHash) {\n try {\n saveReduceArticles(reduceArticles, options.output, gitHash);\n } catch {\n if (options.verbose) {\n printWarning('Failed to cache reduce articles (non-fatal)');\n }\n }\n }\n\n // Combine all articles for writing\n const allArticles = [...allModuleArticles, ...reduceArticles];\n\n // Write to disk\n const outputDir = path.resolve(options.output);\n try {\n const wikiOutput = { articles: allArticles, duration: Date.now() - startTime };\n const writtenPaths = writeWikiOutput(wikiOutput, outputDir);\n printSuccess(`Wrote ${writtenPaths.length} files to ${bold(outputDir)}`);\n\n if (options.verbose) {\n for (const p of writtenPaths) {\n printInfo(` ${gray(path.relative(outputDir, p))}`);\n }\n }\n } catch (writeError) {\n printError(`Failed to write files: ${getErrorMessage(writeError)}`);\n return {\n articlesWritten: 0,\n duration: Date.now() - startTime,\n exitCode: EXIT_CODES.EXECUTION_ERROR,\n };\n }\n\n // Save article cache metadata (marks cache as \"complete\")\n try {\n await saveAllArticles(allModuleArticles, options.output, repoPath);\n } catch {\n if (options.verbose) {\n printWarning('Failed to cache articles (non-fatal)');\n }\n }\n\n return {\n articlesWritten: allArticles.length,\n duration: Date.now() - startTime,\n };\n } catch (error) {\n spinner.fail('Article generation failed');\n printError(getErrorMessage(error));\n return {\n articlesWritten: 0,\n duration: Date.now() - startTime,\n exitCode: EXIT_CODES.EXECUTION_ERROR,\n };\n }\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Generate reduce-only articles (index/architecture/getting-started) without re-generating module articles.\n */\nexport async function generateReduceOnlyArticles(\n graph: ModuleGraph,\n analyses: ModuleAnalysis[],\n writingInvoker: AIInvoker,\n model?: string,\n timeoutMs?: number,\n): Promise<GeneratedArticle[]> {\n if (analyses.length === 0) {\n return [];\n }\n\n // Provide compact per-module summaries to the reducer (avoid re-generating module articles).\n const resultsForPrompt = analyses.map(a => {\n const mod = graph.modules.find(m => m.id === a.moduleId);\n return {\n id: a.moduleId,\n name: mod?.name || a.moduleId,\n category: mod?.category || 'uncategorized',\n overview: (a.overview || '').substring(0, 500),\n };\n });\n\n const resultsString = JSON.stringify(resultsForPrompt, null, 2);\n\n let prompt = buildReducePromptTemplate();\n\n prompt = prompt\n .replace(/\\{\\{RESULTS\\}\\}/g, resultsString)\n .replace(/\\{\\{COUNT\\}\\}/g, String(resultsForPrompt.length))\n .replace(/\\{\\{SUCCESS_COUNT\\}\\}/g, String(resultsForPrompt.length))\n .replace(/\\{\\{FAILURE_COUNT\\}\\}/g, '0');\n\n const reduceParameters: Record<string, string> = {\n projectName: graph.project.name,\n projectDescription: graph.project.description || 'No description available',\n buildSystem: graph.project.buildSystem || 'Unknown',\n language: graph.project.language || 'Unknown',\n };\n\n for (const [key, value] of Object.entries(reduceParameters)) {\n prompt = prompt.replace(new RegExp(`\\\\{\\\\{${key}\\\\}\\\\}`, 'g'), value);\n }\n\n const aiResult = await writingInvoker(prompt, { model, timeoutMs });\n if (!aiResult.success || !aiResult.response) {\n return generateStaticIndexPages(graph, analyses);\n }\n\n const jsonStr = extractJSON(aiResult.response);\n if (!jsonStr) {\n return generateStaticIndexPages(graph, analyses);\n }\n\n try {\n const parsed = JSON.parse(jsonStr) as Record<string, string>;\n const articles: GeneratedArticle[] = [];\n\n if (parsed.index) {\n articles.push({\n type: 'index',\n slug: 'index',\n title: `${graph.project.name} Wiki`,\n content: parsed.index,\n });\n }\n\n if (parsed.architecture) {\n articles.push({\n type: 'architecture',\n slug: 'architecture',\n title: 'Architecture Overview',\n content: parsed.architecture,\n });\n }\n\n if (parsed.gettingStarted) {\n articles.push({\n type: 'getting-started',\n slug: 'getting-started',\n title: 'Getting Started',\n content: parsed.gettingStarted,\n });\n }\n\n return articles.length > 0 ? articles : generateStaticIndexPages(graph, analyses);\n } catch {\n return generateStaticIndexPages(graph, analyses);\n }\n}\n", "/**\n * Phase 5: Website Generation\n *\n * Generates a static HTML website from the wiki markdown files.\n */\n\nimport * as path from 'path';\nimport type { GenerateCommandOptions } from '../../types';\nimport { generateWebsite } from '../../writing';\nimport {\n Spinner,\n printWarning,\n printHeader,\n} from '../../logger';\nimport { getErrorMessage } from '../../utils/error-utils';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface Phase5WebsiteResult {\n success: boolean;\n duration: number;\n}\n\n// ============================================================================\n// Phase 5: Website Generation\n// ============================================================================\n\nexport function runPhase5Website(options: GenerateCommandOptions): Phase5WebsiteResult {\n const startTime = Date.now();\n\n process.stderr.write('\\n');\n printHeader('Phase 5: Website Generation');\n\n const spinner = new Spinner();\n spinner.start('Generating website...');\n\n try {\n const outputDir = path.resolve(options.output);\n const files = generateWebsite(outputDir, {\n theme: options.theme,\n title: options.title,\n });\n\n spinner.succeed(`Website generated (${files.length} files)`);\n return { success: true, duration: Date.now() - startTime };\n } catch (error) {\n spinner.fail('Website generation failed');\n printWarning(`Website generation failed: ${getErrorMessage(error)}`);\n printWarning('Wiki markdown files were still written successfully.');\n return { success: false, duration: Date.now() - startTime };\n }\n}\n", "/**\n * Phase runners barrel export\n *\n * Re-exports all phase runner functions and their associated types.\n */\n\nexport { runPhase1 } from './discovery-phase';\nexport type { Phase1Result } from './discovery-phase';\n\nexport { runPhase2Consolidation } from './consolidation-phase';\nexport type { Phase2ConsolidationResult } from './consolidation-phase';\n\nexport { runPhase3Analysis } from './analysis-phase';\nexport type { Phase3AnalysisResult } from './analysis-phase';\n\nexport { runPhase4Writing, generateReduceOnlyArticles } from './writing-phase';\nexport type { Phase4WritingResult } from './writing-phase';\n\nexport { runPhase5Website } from './website-phase';\nexport type { Phase5WebsiteResult } from './website-phase';\n", "/**\n * Generate Command\n *\n * Implements the `deep-wiki generate <repo-path>` command.\n * Full pipeline wiki generation:\n * Phase 1: Discovery \u2192 ModuleGraph\n * Phase 2: Consolidation \u2192 Reduced ModuleGraph\n * Phase 3: Analysis \u2192 ModuleAnalysis[] (incremental with cache)\n * Phase 4: Writing \u2192 Wiki articles on disk\n * Phase 5: Website \u2192 Static HTML website\n *\n * Phase runner functions are in `./phases/`.\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport type { GenerateCommandOptions, ModuleGraph, ModuleAnalysis } from '../types';\nimport { checkAIAvailability } from '../ai-invoker';\nimport { UsageTracker } from '../usage-tracker';\nimport type { TrackedPhase } from '../usage-tracker';\nimport {\n getCachedGraphAny,\n getCachedGraph,\n getCachedAnalyses,\n} from '../cache';\nimport {\n printSuccess,\n printError,\n printWarning,\n printInfo,\n printHeader,\n printKeyValue,\n bold,\n gray,\n} from '../logger';\nimport { EXIT_CODES } from '../cli';\nimport {\n runPhase1,\n runPhase2Consolidation,\n runPhase3Analysis,\n runPhase4Writing,\n runPhase5Website,\n} from './phases';\n\n// ============================================================================\n// Execute Generate Command\n// ============================================================================\n\n/**\n * Execute the generate command \u2014 full pipeline wiki generation.\n *\n * @param repoPath - Path to the local git repository\n * @param options - Command options\n * @returns Exit code\n */\nexport async function executeGenerate(\n repoPath: string,\n options: GenerateCommandOptions\n): Promise<number> {\n const startTime = Date.now();\n\n // Resolve to absolute path\n const absoluteRepoPath = path.resolve(repoPath);\n\n // Validate the repo path exists\n if (!fs.existsSync(absoluteRepoPath)) {\n printError(`Repository path does not exist: ${absoluteRepoPath}`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n if (!fs.statSync(absoluteRepoPath).isDirectory()) {\n printError(`Repository path is not a directory: ${absoluteRepoPath}`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n // Validate phase option\n const startPhase = options.phase || 1;\n if (startPhase < 1 || startPhase > 4) {\n printError(`Invalid --phase value: ${startPhase}. Must be 1, 2, 3, or 4.`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n // Print header\n printHeader('Deep Wiki \\u2014 Full Generation');\n printKeyValue('Repository', absoluteRepoPath);\n printKeyValue('Output', path.resolve(options.output));\n printKeyValue('Depth', options.depth);\n if (options.focus) { printKeyValue('Focus', options.focus); }\n if (options.model) { printKeyValue('Model', options.model); }\n if (options.concurrency) { printKeyValue('Concurrency', String(options.concurrency)); }\n if (startPhase > 1) { printKeyValue('Starting Phase', String(startPhase)); }\n if (options.force) { printKeyValue('Force', 'yes (ignoring all caches)'); }\n if (options.useCache) { printKeyValue('Use Cache', 'yes (ignoring git hash)'); }\n if (options.strict === false) { printKeyValue('Strict', 'no (partial failures allowed)'); }\n if (options.config) { printKeyValue('Config', options.config); }\n\n // Print per-phase overrides if configured\n if (options.phases) {\n const phaseNames: Array<{ key: import('../types').PhaseName; label: string }> = [\n { key: 'discovery', label: 'Phase 1 (Discovery)' },\n { key: 'consolidation', label: 'Phase 2 (Consolidation)' },\n { key: 'analysis', label: 'Phase 3 (Analysis)' },\n { key: 'writing', label: 'Phase 4 (Writing)' },\n ];\n for (const { key, label } of phaseNames) {\n const phaseConfig = options.phases[key];\n if (phaseConfig) {\n const parts: string[] = [];\n if (phaseConfig.model) { parts.push(`model=${phaseConfig.model}`); }\n if (phaseConfig.timeout) { parts.push(`timeout=${phaseConfig.timeout}s`); }\n if (phaseConfig.concurrency) { parts.push(`concurrency=${phaseConfig.concurrency}`); }\n if (phaseConfig.depth) { parts.push(`depth=${phaseConfig.depth}`); }\n if (phaseConfig.skipAI) { parts.push('skipAI'); }\n if (parts.length > 0) {\n printKeyValue(label, parts.join(', '));\n }\n }\n }\n }\n\n process.stderr.write('\\n');\n\n // Check AI availability\n const availability = await checkAIAvailability();\n if (!availability.available) {\n printError(`Copilot SDK is not available: ${availability.reason || 'Unknown reason'}`);\n printInfo('Setup instructions:');\n printInfo(' 1. Install GitHub Copilot extension');\n printInfo(' 2. Sign in with your GitHub account');\n printInfo(' 3. Ensure Copilot has SDK access');\n return EXIT_CODES.AI_UNAVAILABLE;\n }\n\n // Set up cancellation\n let cancelled = false;\n const isCancelled = () => cancelled;\n const sigintHandler = () => {\n if (cancelled) {\n process.exit(EXIT_CODES.CANCELLED);\n }\n cancelled = true;\n printWarning('Cancellation requested \u2014 finishing current operations...');\n };\n process.on('SIGINT', sigintHandler);\n\n try {\n // Token usage tracker\n const usageTracker = new UsageTracker();\n\n // ================================================================\n // Phase 1: Discovery\n // ================================================================\n let graph: ModuleGraph;\n let phase1Duration = 0;\n\n if (startPhase <= 1) {\n const phase1Result = await runPhase1(absoluteRepoPath, options, isCancelled);\n if (phase1Result.exitCode !== undefined) {\n return phase1Result.exitCode;\n }\n graph = phase1Result.graph!;\n phase1Duration = phase1Result.duration;\n if (phase1Result.tokenUsage) {\n usageTracker.addUsage('discovery', phase1Result.tokenUsage);\n }\n } else {\n // Load from cache\n const cached = options.useCache\n ? getCachedGraphAny(options.output)\n : await getCachedGraph(absoluteRepoPath, options.output);\n if (!cached) {\n printError(`No cached module graph found. Run without --phase (or --phase 1) first.`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n graph = cached.graph;\n printSuccess(`Loaded cached module graph (${graph.modules.length} modules)`);\n usageTracker.markCached('discovery');\n }\n\n if (isCancelled()) {\n return EXIT_CODES.CANCELLED;\n }\n\n // ================================================================\n // Phase 2: Consolidation\n // ================================================================\n let phase2Duration = 0;\n\n if (!options.noCluster && graph.modules.length > 0 && startPhase <= 2) {\n const phase2Result = await runPhase2Consolidation(absoluteRepoPath, graph, options, usageTracker);\n graph = phase2Result.graph;\n phase2Duration = phase2Result.duration;\n }\n\n if (isCancelled()) {\n return EXIT_CODES.CANCELLED;\n }\n\n // ================================================================\n // Phase 3: Deep Analysis\n // ================================================================\n let analyses: ModuleAnalysis[];\n let phase3Duration = 0;\n\n let reanalyzedModuleIds: string[] | undefined;\n\n if (startPhase <= 3) {\n const phase3Result = await runPhase3Analysis(\n absoluteRepoPath, graph, options, isCancelled, usageTracker\n );\n if (phase3Result.exitCode !== undefined) {\n return phase3Result.exitCode;\n }\n analyses = phase3Result.analyses!;\n phase3Duration = phase3Result.duration;\n reanalyzedModuleIds = phase3Result.reanalyzedModuleIds;\n } else {\n // Load from cache\n const cached = getCachedAnalyses(options.output);\n if (!cached || cached.length === 0) {\n printError(`No cached analyses found. Run with --phase 3 (or without --phase) first.`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n analyses = cached;\n printSuccess(`Loaded ${analyses.length} cached module analyses`);\n usageTracker.markCached('analysis');\n }\n\n if (isCancelled()) {\n return EXIT_CODES.CANCELLED;\n }\n\n // ================================================================\n // Phase 4: Article Generation\n // ================================================================\n const phase4Result = await runPhase4Writing(\n absoluteRepoPath, graph, analyses, options, isCancelled, usageTracker, reanalyzedModuleIds\n );\n if (phase4Result.exitCode !== undefined) {\n return phase4Result.exitCode;\n }\n\n // ================================================================\n // Phase 5: Website Generation\n // ================================================================\n let websiteGenerated = false;\n let phase5Duration = 0;\n\n if (!options.skipWebsite) {\n const phase5Result = runPhase5Website(options);\n websiteGenerated = phase5Result.success;\n phase5Duration = phase5Result.duration;\n }\n\n // ================================================================\n // Summary\n // ================================================================\n const totalDuration = Date.now() - startTime;\n process.stderr.write('\\n');\n printHeader('Generation Summary');\n printKeyValue('Modules Discovered', String(graph.modules.length));\n if (graph.areas && graph.areas.length > 0) {\n printKeyValue('Areas', String(graph.areas.length));\n printKeyValue('Layout', 'Hierarchical (3-level)');\n }\n printKeyValue('Modules Analyzed', String(analyses.length));\n printKeyValue('Articles Written', String(phase4Result.articlesWritten));\n if (websiteGenerated) {\n printKeyValue('Website', 'Generated');\n }\n if (phase1Duration > 0) { printKeyValue('Phase 1 Duration', formatDuration(phase1Duration)); }\n if (phase2Duration > 0) { printKeyValue('Phase 2 Duration', formatDuration(phase2Duration)); }\n if (phase3Duration > 0) { printKeyValue('Phase 3 Duration', formatDuration(phase3Duration)); }\n printKeyValue('Phase 4 Duration', formatDuration(phase4Result.duration));\n if (phase5Duration > 0) { printKeyValue('Phase 5 Duration', formatDuration(phase5Duration)); }\n printKeyValue('Total Duration', formatDuration(totalDuration));\n\n // Token usage summary\n if (usageTracker.hasUsage()) {\n process.stderr.write('\\n');\n printTokenUsageSummary(usageTracker);\n\n // Save JSON report\n try {\n const cacheDir = path.join(path.resolve(options.output), '.wiki-cache');\n fs.mkdirSync(cacheDir, { recursive: true });\n const report = usageTracker.toReport(options.model);\n fs.writeFileSync(\n path.join(cacheDir, 'usage-report.json'),\n JSON.stringify(report, null, 2),\n 'utf-8'\n );\n } catch {\n // Non-fatal\n }\n }\n\n process.stderr.write('\\n');\n printSuccess(`Wiki generated at ${bold(path.resolve(options.output))}`);\n if (websiteGenerated) {\n printSuccess(`Website: ${bold(path.join(path.resolve(options.output), 'index.html'))}`);\n }\n\n return EXIT_CODES.SUCCESS;\n\n } finally {\n process.removeListener('SIGINT', sigintHandler);\n }\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Format a duration in milliseconds to a human-readable string.\n */\nfunction formatDuration(ms: number): string {\n if (ms < 1000) {\n return `${ms}ms`;\n }\n const seconds = Math.round(ms / 1000);\n if (seconds < 60) {\n return `${seconds}s`;\n }\n const minutes = Math.floor(seconds / 60);\n const remainingSeconds = seconds % 60;\n return `${minutes}m ${remainingSeconds}s`;\n}\n\n/**\n * Print a token usage summary table to stderr.\n */\nfunction printTokenUsageSummary(tracker: UsageTracker): void {\n const fmt = UsageTracker.formatTokens;\n\n const phases: Array<{ label: string; phase: TrackedPhase }> = [\n { label: 'Phase 1 (Discovery)', phase: 'discovery' },\n { label: 'Phase 2 (Consolidation)', phase: 'consolidation' },\n { label: 'Phase 3 (Analysis)', phase: 'analysis' },\n { label: 'Phase 4 (Writing)', phase: 'writing' },\n ];\n\n printInfo('\u2500\u2500 Token Usage \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500');\n\n for (const { label, phase } of phases) {\n const u = tracker.getPhaseUsage(phase);\n if (u.cached && u.calls === 0) {\n printKeyValue(label, gray('cached'));\n } else if (u.calls > 0) {\n printKeyValue(\n label,\n `${fmt(u.inputTokens)} in / ${fmt(u.outputTokens)} out / ${fmt(u.totalTokens)} total`\n );\n }\n }\n\n const total = tracker.getTotal();\n printInfo('\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500');\n printKeyValue(\n 'Total Tokens',\n `${fmt(total.inputTokens)} in / ${fmt(total.outputTokens)} out / ${fmt(total.totalTokens)} total`\n );\n if (total.cost != null) {\n printKeyValue('Total Cost', UsageTracker.formatCost(total.cost));\n }\n printKeyValue('AI Calls', String(total.calls));\n}\n", "/**\n * Wiki Data Layer\n *\n * Reads and caches wiki data (module graph, markdown articles, analyses)\n * from the wiki output directory on disk.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { ModuleGraph, ModuleInfo, ModuleAnalysis } from '../types';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Module summary returned by the /api/modules endpoint.\n */\nexport interface ModuleSummary {\n id: string;\n name: string;\n category: string;\n complexity: string;\n path: string;\n purpose: string;\n}\n\n/**\n * Module detail returned by the /api/modules/:id endpoint.\n */\nexport interface ModuleDetail {\n module: ModuleInfo;\n markdown: string;\n analysis?: ModuleAnalysis;\n}\n\n/**\n * Special page returned by the /api/pages/:key endpoint.\n */\nexport interface SpecialPage {\n key: string;\n title: string;\n markdown: string;\n}\n\n// ============================================================================\n// WikiData Class\n// ============================================================================\n\n/**\n * Reads and caches wiki data from the wiki output directory.\n *\n * Data is loaded eagerly on construction and can be refreshed via reload().\n */\nexport class WikiData {\n private wikiDir: string;\n private _graph: ModuleGraph | null = null;\n private _markdown: Record<string, string> = {};\n private _analyses: Map<string, ModuleAnalysis> = new Map();\n\n constructor(wikiDir: string) {\n this.wikiDir = path.resolve(wikiDir);\n }\n\n /**\n * Load all wiki data from disk. Call on startup and after rebuilds.\n */\n load(): void {\n this._graph = this.readModuleGraph();\n this._markdown = this.readMarkdownFiles();\n this._analyses = this.readAnalyses();\n }\n\n /**\n * Reload wiki data from disk (alias for load).\n */\n reload(): void {\n this.load();\n }\n\n /**\n * Get the full module graph.\n */\n get graph(): ModuleGraph {\n if (!this._graph) {\n throw new Error('Wiki data not loaded. Call load() first.');\n }\n return this._graph;\n }\n\n /**\n * Get the wiki directory path.\n */\n get dir(): string {\n return this.wikiDir;\n }\n\n /**\n * Get summaries for all modules.\n */\n getModuleSummaries(): ModuleSummary[] {\n return this.graph.modules.map(mod => ({\n id: mod.id,\n name: mod.name,\n category: mod.category,\n complexity: mod.complexity,\n path: mod.path,\n purpose: mod.purpose,\n }));\n }\n\n /**\n * Get detailed info for a single module.\n */\n getModuleDetail(moduleId: string): ModuleDetail | null {\n const mod = this.graph.modules.find(m => m.id === moduleId);\n if (!mod) {\n return null;\n }\n\n return {\n module: mod,\n markdown: this._markdown[moduleId] || '',\n analysis: this._analyses.get(moduleId),\n };\n }\n\n /**\n * Get a special page by key (index, architecture, getting-started).\n */\n getSpecialPage(key: string): SpecialPage | null {\n const TITLES: Record<string, string> = {\n 'index': 'Index',\n 'architecture': 'Architecture',\n 'getting-started': 'Getting Started',\n };\n\n const internalKey = `__${key}`;\n const markdown = this._markdown[internalKey];\n if (!markdown) {\n return null;\n }\n\n return {\n key,\n title: TITLES[key] || key,\n markdown,\n };\n }\n\n /**\n * Get all markdown data (used by SPA template for embedding).\n */\n getMarkdownData(): Record<string, string> {\n return { ...this._markdown };\n }\n\n /**\n * Check if wiki data has been loaded.\n */\n get isLoaded(): boolean {\n return this._graph !== null;\n }\n\n // ========================================================================\n // Private: Disk Readers\n // ========================================================================\n\n private readModuleGraph(): ModuleGraph {\n const graphPath = path.join(this.wikiDir, 'module-graph.json');\n if (!fs.existsSync(graphPath)) {\n throw new Error(`module-graph.json not found in ${this.wikiDir}`);\n }\n const content = fs.readFileSync(graphPath, 'utf-8');\n return JSON.parse(content) as ModuleGraph;\n }\n\n private readMarkdownFiles(): Record<string, string> {\n const data: Record<string, string> = {};\n\n // Read top-level markdown files\n const topLevelFiles = ['index.md', 'architecture.md', 'getting-started.md'];\n for (const file of topLevelFiles) {\n const filePath = path.join(this.wikiDir, file);\n if (fs.existsSync(filePath)) {\n const key = path.basename(file, '.md');\n data[`__${key}`] = fs.readFileSync(filePath, 'utf-8');\n }\n }\n\n // Read flat-layout module files\n const modulesDir = path.join(this.wikiDir, 'modules');\n if (fs.existsSync(modulesDir) && fs.statSync(modulesDir).isDirectory()) {\n const files = fs.readdirSync(modulesDir).filter(f => f.endsWith('.md'));\n for (const file of files) {\n const slug = path.basename(file, '.md');\n const moduleId = this.findModuleIdBySlug(slug);\n data[moduleId || slug] = fs.readFileSync(path.join(modulesDir, file), 'utf-8');\n }\n }\n\n // Read hierarchical-layout area files\n const areasDir = path.join(this.wikiDir, 'areas');\n if (fs.existsSync(areasDir) && fs.statSync(areasDir).isDirectory()) {\n const areaDirs = fs.readdirSync(areasDir).filter(d =>\n fs.statSync(path.join(areasDir, d)).isDirectory()\n );\n\n for (const areaId of areaDirs) {\n const areaDir = path.join(areasDir, areaId);\n\n // Area-level files\n for (const file of ['index.md', 'architecture.md']) {\n const filePath = path.join(areaDir, file);\n if (fs.existsSync(filePath)) {\n const key = path.basename(file, '.md');\n data[`__area_${areaId}_${key}`] = fs.readFileSync(filePath, 'utf-8');\n }\n }\n\n // Area module files\n const areaModulesDir = path.join(areaDir, 'modules');\n if (fs.existsSync(areaModulesDir) && fs.statSync(areaModulesDir).isDirectory()) {\n const files = fs.readdirSync(areaModulesDir).filter(f => f.endsWith('.md'));\n for (const file of files) {\n const slug = path.basename(file, '.md');\n const moduleId = this.findModuleIdBySlug(slug);\n data[moduleId || slug] = fs.readFileSync(path.join(areaModulesDir, file), 'utf-8');\n }\n }\n }\n }\n\n return data;\n }\n\n private readAnalyses(): Map<string, ModuleAnalysis> {\n const analyses = new Map<string, ModuleAnalysis>();\n\n // Try to read from cache directory\n const cacheDir = path.join(this.wikiDir, '.wiki-cache', 'analyses');\n if (!fs.existsSync(cacheDir) || !fs.statSync(cacheDir).isDirectory()) {\n return analyses;\n }\n\n const files = fs.readdirSync(cacheDir).filter(f => f.endsWith('.json'));\n for (const file of files) {\n try {\n const content = fs.readFileSync(path.join(cacheDir, file), 'utf-8');\n const parsed = JSON.parse(content);\n // Handle both direct analysis and cached analysis formats\n const analysis: ModuleAnalysis = parsed.analysis || parsed;\n if (analysis.moduleId) {\n analyses.set(analysis.moduleId, analysis);\n }\n } catch {\n // Skip invalid files\n }\n }\n\n return analyses;\n }\n\n private findModuleIdBySlug(slug: string): string | null {\n if (!this._graph) { return null; }\n const normalized = slug.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');\n for (const mod of this._graph.modules) {\n const modSlug = mod.id.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');\n if (modSlug === normalized) {\n return mod.id;\n }\n }\n return null;\n }\n}\n", "/**\n * Ask Handler\n *\n * POST /api/ask \u2014 AI Q&A endpoint with SSE streaming.\n * Takes a user question + optional conversation history, retrieves\n * relevant module context via TF-IDF, and streams an AI answer via\n * Server-Sent Events.\n *\n * Multi-turn conversation is supported in two modes:\n * 1. Session-based (preferred): Client sends `sessionId` \u2014 server reuses\n * the same ConversationSessionManager session across turns.\n * 2. History-based (fallback): Client sends full `conversationHistory` \u2014\n * server embeds it in the prompt (legacy behavior).\n */\n\nimport type { IncomingMessage, ServerResponse } from 'http';\nimport { ContextBuilder } from './context-builder';\nimport type { ConversationSessionManager } from './conversation-session-manager';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/** A single message in a conversation turn. */\nexport interface ConversationMessage {\n role: 'user' | 'assistant';\n content: string;\n}\n\n/** Request body for POST /api/ask. */\nexport interface AskRequest {\n question: string;\n /** Optional session ID for multi-turn conversations. */\n sessionId?: string;\n /** Legacy: full conversation history (used when no sessionId). */\n conversationHistory?: ConversationMessage[];\n}\n\n/** Options for the ask handler. */\nexport interface AskHandlerOptions {\n contextBuilder: ContextBuilder;\n sendMessage: AskAIFunction;\n model?: string;\n workingDirectory?: string;\n /** Session manager for multi-turn conversations. */\n sessionManager?: ConversationSessionManager;\n}\n\n/**\n * Abstraction over the AI SDK's sendMessage for testability.\n * Returns the full response string.\n * When `onStreamingChunk` is provided, each delta chunk is emitted in real-time\n * while the function still resolves with the complete response.\n */\nexport type AskAIFunction = (prompt: string, options?: {\n model?: string;\n workingDirectory?: string;\n /** Callback invoked for each streaming chunk as it arrives. */\n onStreamingChunk?: (chunk: string) => void;\n}) => Promise<string>;\n\n// ============================================================================\n// Handler\n// ============================================================================\n\n/**\n * Handle POST /api/ask \u2014 streamed as SSE.\n *\n * SSE protocol:\n * data: {\"type\":\"context\",\"moduleIds\":[\"mod1\",\"mod2\"]}\n * data: {\"type\":\"chunk\",\"content\":\"Some partial answer...\"}\n * data: {\"type\":\"done\",\"fullResponse\":\"Full answer text\"}\n * data: {\"type\":\"error\",\"message\":\"Something went wrong\"}\n */\nexport async function handleAskRequest(\n req: IncomingMessage,\n res: ServerResponse,\n options: AskHandlerOptions,\n): Promise<void> {\n // Parse body\n const body = await readBody(req);\n let askReq: AskRequest;\n try {\n askReq = JSON.parse(body);\n } catch {\n res.writeHead(400, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Invalid JSON body' }));\n return;\n }\n\n if (!askReq.question || typeof askReq.question !== 'string') {\n res.writeHead(400, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Missing or invalid \"question\" field' }));\n return;\n }\n\n // Set SSE headers\n res.writeHead(200, {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n 'Access-Control-Allow-Origin': '*',\n });\n\n try {\n // 1. Retrieve context\n const context = options.contextBuilder.retrieve(askReq.question);\n\n // Send context event\n sendSSE(res, {\n type: 'context',\n moduleIds: context.moduleIds,\n });\n\n // 2. Determine session mode vs legacy mode\n const sessionManager = options.sessionManager;\n let sessionId = askReq.sessionId;\n let isSessionMode = false;\n\n if (sessionManager) {\n if (sessionId) {\n // Try to reuse existing session\n const existing = sessionManager.get(sessionId);\n if (existing) {\n isSessionMode = true;\n } else {\n // Session expired/not found \u2014 create a new one\n sessionId = undefined;\n }\n }\n\n if (!sessionId) {\n // Create new session\n const newSession = sessionManager.create();\n if (newSession) {\n sessionId = newSession.sessionId;\n isSessionMode = true;\n }\n // If null (max reached), fall back to stateless mode\n }\n }\n\n let fullResponse: string;\n\n if (isSessionMode && sessionManager && sessionId) {\n // Session mode: build prompt WITHOUT conversation history (SDK retains context)\n const prompt = buildAskPrompt(\n askReq.question,\n context.contextText,\n context.graphSummary,\n undefined, // No history needed \u2014 session retains context\n );\n\n const result = await sessionManager.send(sessionId, prompt, {\n model: options.model,\n workingDirectory: options.workingDirectory,\n onStreamingChunk: (chunk) => {\n sendSSE(res, { type: 'chunk', content: chunk });\n },\n });\n\n fullResponse = result.response;\n } else {\n // Legacy stateless mode: embed conversation history in prompt\n const prompt = buildAskPrompt(\n askReq.question,\n context.contextText,\n context.graphSummary,\n askReq.conversationHistory,\n );\n\n fullResponse = await options.sendMessage(prompt, {\n model: options.model,\n workingDirectory: options.workingDirectory,\n onStreamingChunk: (chunk) => {\n sendSSE(res, { type: 'chunk', content: chunk });\n },\n });\n }\n\n // 3. Send done event (include sessionId so client can reuse it)\n sendSSE(res, {\n type: 'done',\n fullResponse,\n ...(sessionId ? { sessionId } : {}),\n });\n\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Unknown error';\n sendSSE(res, { type: 'error', message });\n }\n\n res.end();\n}\n\n// ============================================================================\n// Prompt Building\n// ============================================================================\n\n/**\n * Build the AI prompt for Q&A.\n */\nexport function buildAskPrompt(\n question: string,\n contextText: string,\n graphSummary: string,\n conversationHistory?: ConversationMessage[],\n): string {\n const parts: string[] = [];\n\n parts.push('You are a knowledgeable assistant for a software project wiki.');\n parts.push('Answer the user\\'s question based on the provided module documentation and architecture context.');\n parts.push('If the documentation doesn\\'t contain enough information to answer, say so clearly.');\n parts.push('Use markdown formatting in your response. Reference specific modules by name when relevant.');\n parts.push('');\n\n // Architecture overview\n parts.push('## Architecture Overview');\n parts.push('');\n parts.push(graphSummary);\n parts.push('');\n\n // Relevant module documentation\n if (contextText) {\n parts.push('## Relevant Module Documentation');\n parts.push('');\n parts.push(contextText);\n parts.push('');\n }\n\n // Conversation history\n if (conversationHistory && conversationHistory.length > 0) {\n parts.push('## Conversation History');\n parts.push('');\n for (const msg of conversationHistory) {\n const role = msg.role === 'user' ? 'User' : 'Assistant';\n parts.push(`**${role}:** ${msg.content}`);\n parts.push('');\n }\n }\n\n // Current question\n parts.push('## Current Question');\n parts.push('');\n parts.push(question);\n\n return parts.join('\\n');\n}\n\n// ============================================================================\n// SSE Utilities\n// ============================================================================\n\n/**\n * Send a Server-Sent Event.\n */\nexport function sendSSE(res: ServerResponse, data: Record<string, unknown>): void {\n res.write(`data: ${JSON.stringify(data)}\\n\\n`);\n}\n\n/**\n * Chunk text into smaller pieces for streaming simulation.\n */\nexport function chunkText(text: string, chunkSize: number): string[] {\n if (!text) return [];\n const chunks: string[] = [];\n for (let i = 0; i < text.length; i += chunkSize) {\n chunks.push(text.slice(i, i + chunkSize));\n }\n return chunks;\n}\n\n// ============================================================================\n// Body Reader\n// ============================================================================\n\nfunction readBody(req: IncomingMessage): Promise<string> {\n return new Promise((resolve, reject) => {\n let body = '';\n req.on('data', (chunk: Buffer) => {\n body += chunk.toString();\n });\n req.on('end', () => resolve(body));\n req.on('error', reject);\n });\n}\n", "/**\n * Explore Handler\n *\n * POST /api/explore/:moduleId \u2014 On-demand deep-dive into a module.\n * Creates a focused AI session that analyzes the module in depth,\n * optionally answering a specific user question.\n *\n * Streams the result as SSE events:\n * data: {\"type\":\"status\",\"message\":\"Analyzing module...\"}\n * data: {\"type\":\"chunk\",\"text\":\"## Deep Analysis\\n\\n...\"}\n * data: {\"type\":\"done\",\"fullResponse\":\"...\"}\n * data: {\"type\":\"error\",\"message\":\"Something went wrong\"}\n */\n\nimport type { IncomingMessage, ServerResponse } from 'http';\nimport type { WikiData } from './wiki-data';\nimport { sendSSE } from './ask-handler';\nimport type { AskAIFunction } from './ask-handler';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/** Request body for POST /api/explore/:moduleId. */\nexport interface ExploreRequest {\n question?: string;\n depth?: 'normal' | 'deep';\n}\n\n/** Options for the explore handler. */\nexport interface ExploreHandlerOptions {\n wikiData: WikiData;\n sendMessage: AskAIFunction;\n model?: string;\n workingDirectory?: string;\n}\n\n// ============================================================================\n// Handler\n// ============================================================================\n\n/**\n * Handle POST /api/explore/:moduleId \u2014 streamed as SSE.\n */\nexport async function handleExploreRequest(\n req: IncomingMessage,\n res: ServerResponse,\n moduleId: string,\n options: ExploreHandlerOptions,\n): Promise<void> {\n // Parse body\n const body = await readBody(req);\n let exploreReq: ExploreRequest = {};\n if (body.trim()) {\n try {\n exploreReq = JSON.parse(body);\n } catch {\n res.writeHead(400, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Invalid JSON body' }));\n return;\n }\n }\n\n // Validate module exists\n const graph = options.wikiData.graph;\n const mod = graph.modules.find(m => m.id === moduleId);\n if (!mod) {\n res.writeHead(404, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: `Module not found: ${moduleId}` }));\n return;\n }\n\n // Set SSE headers\n res.writeHead(200, {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n 'Access-Control-Allow-Origin': '*',\n });\n\n try {\n // 1. Send status\n sendSSE(res, { type: 'status', message: `Analyzing ${mod.name} module...` });\n\n // 2. Load existing analysis\n const detail = options.wikiData.getModuleDetail(moduleId);\n const existingMarkdown = detail?.markdown || '';\n\n // 3. Build explore prompt\n const prompt = buildExplorePrompt(mod, existingMarkdown, graph, exploreReq);\n\n // 4. Call AI with native streaming \u2014 chunks are sent as SSE events in real-time\n const fullResponse = await options.sendMessage(prompt, {\n model: options.model,\n workingDirectory: options.workingDirectory,\n onStreamingChunk: (chunk) => {\n sendSSE(res, { type: 'chunk', text: chunk });\n },\n });\n\n // 5. Done\n sendSSE(res, { type: 'done', fullResponse });\n\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Unknown error';\n sendSSE(res, { type: 'error', message });\n }\n\n res.end();\n}\n\n// ============================================================================\n// Prompt Building\n// ============================================================================\n\n/**\n * Build the AI prompt for deep-dive exploration.\n */\nexport function buildExplorePrompt(\n mod: { id: string; name: string; category: string; path: string; purpose: string; keyFiles: string[]; dependencies: string[]; dependents: string[] },\n existingMarkdown: string,\n graph: { project: { name: string; description: string; language: string }; modules: Array<{ id: string; name: string; purpose: string; dependencies: string[] }> },\n request: ExploreRequest,\n): string {\n const parts: string[] = [];\n\n const depth = request.depth || 'normal';\n const isDeep = depth === 'deep';\n\n parts.push(`You are conducting a ${isDeep ? 'deep' : 'focused'} analysis of the \"${mod.name}\" module.`);\n parts.push('Provide detailed technical insights with code-level specifics.');\n parts.push('Use markdown formatting with headers, code blocks, and lists.');\n parts.push('');\n\n // Module context\n parts.push('## Module Information');\n parts.push('');\n parts.push(`- **Name:** ${mod.name}`);\n parts.push(`- **ID:** ${mod.id}`);\n parts.push(`- **Category:** ${mod.category}`);\n parts.push(`- **Path:** ${mod.path}`);\n parts.push(`- **Purpose:** ${mod.purpose}`);\n parts.push(`- **Key Files:** ${mod.keyFiles.join(', ')}`);\n parts.push(`- **Dependencies:** ${mod.dependencies.length > 0 ? mod.dependencies.join(', ') : 'none'}`);\n parts.push(`- **Dependents:** ${mod.dependents.length > 0 ? mod.dependents.join(', ') : 'none'}`);\n parts.push('');\n\n // Existing analysis\n if (existingMarkdown) {\n parts.push('## Existing Analysis');\n parts.push('');\n parts.push(existingMarkdown);\n parts.push('');\n }\n\n // Architecture context\n parts.push('## Project Architecture');\n parts.push('');\n parts.push(`Project: ${graph.project.name} (${graph.project.language})`);\n for (const m of graph.modules) {\n const deps = m.dependencies.length > 0 ? ` \u2192 ${m.dependencies.join(', ')}` : '';\n parts.push(` - ${m.name}: ${m.purpose}${deps}`);\n }\n parts.push('');\n\n // User question or default exploration\n if (request.question) {\n parts.push('## User Question');\n parts.push('');\n parts.push(request.question);\n } else if (isDeep) {\n parts.push('## Deep Analysis Task');\n parts.push('');\n parts.push('Provide a comprehensive deep-dive analysis covering:');\n parts.push('1. Internal architecture and design patterns');\n parts.push('2. Key algorithms and data structures');\n parts.push('3. Error handling strategies');\n parts.push('4. Performance characteristics and potential bottlenecks');\n parts.push('5. Integration points with other modules');\n parts.push('6. Potential improvements and technical debt');\n } else {\n parts.push('## Analysis Task');\n parts.push('');\n parts.push('Provide a focused analysis covering the most important aspects of this module,');\n parts.push('including architecture, key patterns, and how it integrates with the rest of the system.');\n }\n\n return parts.join('\\n');\n}\n\n// ============================================================================\n// Utilities\n// ============================================================================\n\nfunction readBody(req: IncomingMessage): Promise<string> {\n return new Promise((resolve, reject) => {\n let body = '';\n req.on('data', (chunk: Buffer) => { body += chunk.toString(); });\n req.on('end', () => resolve(body));\n req.on('error', reject);\n });\n}\n", "/**\n * API Handlers\n *\n * Handles all /api/* routes for the deep-wiki server.\n * Provides REST endpoints for module graph, modules, and special pages.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as http from 'http';\nimport type { WikiData } from './wiki-data';\nimport { sendJson, send404, send400 } from './router';\nimport type { ContextBuilder } from './context-builder';\nimport type { AskAIFunction } from './ask-handler';\nimport { handleAskRequest } from './ask-handler';\nimport { getErrorMessage } from '../utils/error-utils';\nimport { handleExploreRequest } from './explore-handler';\nimport type { ConversationSessionManager } from './conversation-session-manager';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface ApiHandlerContext {\n wikiData: WikiData;\n aiEnabled: boolean;\n repoPath?: string;\n /** Context builder for AI Q&A (only when AI is enabled) */\n contextBuilder?: ContextBuilder;\n /** AI SDK send function (only when AI is enabled) */\n aiSendMessage?: AskAIFunction;\n /** AI model override */\n aiModel?: string;\n /** Working directory for AI sessions */\n aiWorkingDirectory?: string;\n /** Session manager for multi-turn conversations */\n sessionManager?: ConversationSessionManager;\n}\n\n// ============================================================================\n// Main API Router\n// ============================================================================\n\n/**\n * Route an API request to the appropriate handler.\n */\nexport function handleApiRequest(\n req: http.IncomingMessage,\n res: http.ServerResponse,\n pathname: string,\n method: string,\n context: ApiHandlerContext\n): void {\n const { wikiData } = context;\n\n // GET /api/graph\n if (method === 'GET' && pathname === '/api/graph') {\n handleGetGraph(res, wikiData);\n return;\n }\n\n // GET /api/modules\n if (method === 'GET' && pathname === '/api/modules') {\n handleGetModules(res, wikiData);\n return;\n }\n\n // GET /api/modules/:id\n const moduleMatch = pathname.match(/^\\/api\\/modules\\/(.+)$/);\n if (method === 'GET' && moduleMatch) {\n const moduleId = decodeURIComponent(moduleMatch[1]);\n handleGetModuleById(res, wikiData, moduleId);\n return;\n }\n\n // GET /api/pages/:key\n const pageMatch = pathname.match(/^\\/api\\/pages\\/(.+)$/);\n if (method === 'GET' && pageMatch) {\n const key = decodeURIComponent(pageMatch[1]);\n handleGetPage(res, wikiData, key);\n return;\n }\n\n // POST /api/ask \u2014 AI Q&A (gated by --ai flag)\n if (method === 'POST' && pathname === '/api/ask') {\n if (!context.aiEnabled) {\n send400(res, 'AI features are not enabled. Start the server with --ai flag.');\n return;\n }\n if (!context.contextBuilder || !context.aiSendMessage) {\n send400(res, 'AI service is not configured.');\n return;\n }\n handleAskRequest(req, res, {\n contextBuilder: context.contextBuilder,\n sendMessage: context.aiSendMessage,\n model: context.aiModel,\n workingDirectory: context.aiWorkingDirectory,\n sessionManager: context.sessionManager,\n }).catch(() => {\n if (!res.headersSent) {\n sendJson(res, { error: 'Internal server error' }, 500);\n }\n });\n return;\n }\n\n // DELETE /api/ask/session/:id \u2014 Destroy a conversation session\n const sessionDeleteMatch = pathname.match(/^\\/api\\/ask\\/session\\/(.+)$/);\n if (method === 'DELETE' && sessionDeleteMatch) {\n if (!context.sessionManager) {\n send400(res, 'Session management is not enabled.');\n return;\n }\n const sessionId = decodeURIComponent(sessionDeleteMatch[1]);\n const destroyed = context.sessionManager.destroy(sessionId);\n sendJson(res, { destroyed, sessionId });\n return;\n }\n\n // POST /api/explore/:id \u2014 Deep dive (gated by --ai flag)\n const exploreMatch = pathname.match(/^\\/api\\/explore\\/(.+)$/);\n if (method === 'POST' && exploreMatch) {\n if (!context.aiEnabled) {\n send400(res, 'AI features are not enabled. Start the server with --ai flag.');\n return;\n }\n if (!context.aiSendMessage) {\n send400(res, 'AI service is not configured.');\n return;\n }\n const exploreModuleId = decodeURIComponent(exploreMatch[1]);\n handleExploreRequest(req, res, exploreModuleId, {\n wikiData: context.wikiData,\n sendMessage: context.aiSendMessage,\n model: context.aiModel,\n workingDirectory: context.aiWorkingDirectory,\n }).catch(() => {\n if (!res.headersSent) {\n sendJson(res, { error: 'Internal server error' }, 500);\n }\n });\n return;\n }\n\n // 404 for unknown API routes\n send404(res, `Unknown API endpoint: ${method} ${pathname}`);\n}\n\n// ============================================================================\n// Handler Implementations\n// ============================================================================\n\n/**\n * GET /api/graph \u2014 Returns the full module graph JSON.\n */\nfunction handleGetGraph(res: http.ServerResponse, wikiData: WikiData): void {\n try {\n sendJson(res, wikiData.graph);\n } catch (error) {\n sendJson(res, { error: getErrorMessage(error) }, 500);\n }\n}\n\n/**\n * GET /api/modules \u2014 Returns a list of module summaries.\n */\nfunction handleGetModules(res: http.ServerResponse, wikiData: WikiData): void {\n try {\n const summaries = wikiData.getModuleSummaries();\n sendJson(res, summaries);\n } catch (error) {\n sendJson(res, { error: getErrorMessage(error) }, 500);\n }\n}\n\n/**\n * GET /api/modules/:id \u2014 Returns detail for a single module.\n */\nfunction handleGetModuleById(\n res: http.ServerResponse,\n wikiData: WikiData,\n moduleId: string\n): void {\n try {\n const detail = wikiData.getModuleDetail(moduleId);\n if (!detail) {\n send404(res, `Module not found: ${moduleId}`);\n return;\n }\n sendJson(res, detail);\n } catch (error) {\n sendJson(res, { error: getErrorMessage(error) }, 500);\n }\n}\n\n/**\n * GET /api/pages/:key \u2014 Returns a special page.\n */\nfunction handleGetPage(\n res: http.ServerResponse,\n wikiData: WikiData,\n key: string\n): void {\n try {\n const page = wikiData.getSpecialPage(key);\n if (!page) {\n send404(res, `Page not found: ${key}`);\n return;\n }\n sendJson(res, page);\n } catch (error) {\n sendJson(res, { error: getErrorMessage(error) }, 500);\n }\n}\n", "/**\n * HTTP Router\n *\n * Simple request routing for the deep-wiki server.\n * Routes requests to static file serving or API handlers.\n * Uses only Node.js built-in modules (http, fs, path, url).\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as http from 'http';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as url from 'url';\nimport type { WikiData } from './wiki-data';\nimport { handleApiRequest } from './api-handlers';\nimport type { ContextBuilder } from './context-builder';\nimport type { AskAIFunction } from './ask-handler';\nimport type { ConversationSessionManager } from './conversation-session-manager';\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** MIME types for static file serving */\nconst MIME_TYPES: Record<string, string> = {\n '.html': 'text/html; charset=utf-8',\n '.js': 'application/javascript; charset=utf-8',\n '.css': 'text/css; charset=utf-8',\n '.json': 'application/json; charset=utf-8',\n '.png': 'image/png',\n '.jpg': 'image/jpeg',\n '.jpeg': 'image/jpeg',\n '.gif': 'image/gif',\n '.svg': 'image/svg+xml',\n '.ico': 'image/x-icon',\n '.woff': 'font/woff',\n '.woff2': 'font/woff2',\n '.ttf': 'font/ttf',\n '.md': 'text/markdown; charset=utf-8',\n};\n\n/** Default MIME type for unknown extensions */\nconst DEFAULT_MIME = 'application/octet-stream';\n\n// ============================================================================\n// Router\n// ============================================================================\n\n/**\n * Options for the router.\n */\nexport interface RouterOptions {\n /** Wiki data layer */\n wikiData: WikiData;\n /** SPA HTML content (served at / and for SPA fallback) */\n spaHtml: string;\n /** Whether AI features are enabled */\n aiEnabled: boolean;\n /** Repo path (needed for AI features) */\n repoPath?: string;\n /** Context builder for AI Q&A (only when AI is enabled) */\n contextBuilder?: ContextBuilder;\n /** AI SDK send function (only when AI is enabled) */\n aiSendMessage?: AskAIFunction;\n /** AI model override */\n aiModel?: string;\n /** Working directory for AI sessions */\n aiWorkingDirectory?: string;\n /** Session manager for multi-turn conversations */\n sessionManager?: ConversationSessionManager;\n}\n\n/**\n * Create a request handler (listener) for the HTTP server.\n *\n * Routes:\n * GET / \u2192 SPA shell (modified index.html)\n * GET /api/* \u2192 API handlers\n * GET /static/* \u2192 Static files from wiki dir\n * GET /* \u2192 SPA fallback (for client-side routing)\n */\nexport function createRequestHandler(\n options: RouterOptions\n): (req: http.IncomingMessage, res: http.ServerResponse) => void {\n const { wikiData, spaHtml, aiEnabled, repoPath, contextBuilder, aiSendMessage, aiModel, aiWorkingDirectory, sessionManager } = options;\n\n return (req: http.IncomingMessage, res: http.ServerResponse) => {\n const parsedUrl = url.parse(req.url || '/', true);\n const pathname = decodeURIComponent(parsedUrl.pathname || '/');\n const method = req.method?.toUpperCase() || 'GET';\n\n // CORS headers for API requests\n res.setHeader('Access-Control-Allow-Origin', '*');\n res.setHeader('Access-Control-Allow-Methods', 'GET, POST, DELETE, OPTIONS');\n res.setHeader('Access-Control-Allow-Headers', 'Content-Type');\n\n // Handle CORS preflight\n if (method === 'OPTIONS') {\n res.writeHead(204);\n res.end();\n return;\n }\n\n // API routes\n if (pathname.startsWith('/api/')) {\n handleApiRequest(req, res, pathname, method, {\n wikiData,\n aiEnabled,\n repoPath,\n contextBuilder,\n aiSendMessage,\n aiModel,\n aiWorkingDirectory,\n sessionManager,\n });\n return;\n }\n\n // Static files from wiki directory (embedded-data.js, etc.)\n if (pathname !== '/' && pathname !== '/index.html') {\n const filePath = path.join(wikiData.dir, pathname);\n if (serveStaticFile(filePath, res)) {\n return;\n }\n }\n\n // SPA shell (index page or fallback for client-side routing)\n res.writeHead(200, { 'Content-Type': 'text/html; charset=utf-8' });\n res.end(spaHtml);\n };\n}\n\n// ============================================================================\n// Static File Server\n// ============================================================================\n\n/**\n * Serve a static file from the given path.\n * Returns true if the file was served, false if not found.\n */\nfunction serveStaticFile(filePath: string, res: http.ServerResponse): boolean {\n // Security: prevent directory traversal\n const normalizedPath = path.normalize(filePath);\n\n if (!fs.existsSync(normalizedPath)) {\n return false;\n }\n\n try {\n const stat = fs.statSync(normalizedPath);\n if (!stat.isFile()) {\n return false;\n }\n\n const ext = path.extname(normalizedPath).toLowerCase();\n const contentType = MIME_TYPES[ext] || DEFAULT_MIME;\n\n res.writeHead(200, {\n 'Content-Type': contentType,\n 'Content-Length': stat.size,\n 'Cache-Control': 'public, max-age=3600',\n });\n\n const stream = fs.createReadStream(normalizedPath);\n stream.pipe(res);\n stream.on('error', () => {\n if (!res.headersSent) {\n res.writeHead(500);\n }\n res.end();\n });\n\n return true;\n } catch {\n return false;\n }\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Send a JSON response.\n */\nexport function sendJson(res: http.ServerResponse, data: unknown, statusCode = 200): void {\n const body = JSON.stringify(data);\n res.writeHead(statusCode, {\n 'Content-Type': 'application/json; charset=utf-8',\n 'Content-Length': Buffer.byteLength(body),\n });\n res.end(body);\n}\n\n/**\n * Send a 404 Not Found response.\n */\nexport function send404(res: http.ServerResponse, message = 'Not Found'): void {\n sendJson(res, { error: message }, 404);\n}\n\n/**\n * Send a 400 Bad Request response.\n */\nexport function send400(res: http.ServerResponse, message = 'Bad Request'): void {\n sendJson(res, { error: message }, 400);\n}\n\n/**\n * Send a 500 Internal Server Error response.\n */\nexport function send500(res: http.ServerResponse, message = 'Internal Server Error'): void {\n sendJson(res, { error: message }, 500);\n}\n\n/**\n * Read the request body as a string.\n */\nexport function readBody(req: http.IncomingMessage): Promise<string> {\n return new Promise((resolve, reject) => {\n const chunks: Buffer[] = [];\n req.on('data', (chunk: Buffer) => chunks.push(chunk));\n req.on('end', () => resolve(Buffer.concat(chunks).toString('utf-8')));\n req.on('error', reject);\n });\n}\n", "/**\n * SPA Template for Server Mode\n *\n * Generates a DeepWiki-style SPA that fetches data from the server's REST API.\n * Designed to match the real DeepWiki (deepwiki.com) UI:\n * - Top navigation bar with project name and dark/light toggle\n * - Collapsible left sidebar with nested navigation\n * - \"Relevant source files\" collapsible per article\n * - \"On this page\" right-hand TOC\n * - Bottom \"Ask AI\" input bar (like DeepWiki's \"Ask Devin\")\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { WebsiteTheme } from '../types';\nimport { getMermaidZoomStyles, getMermaidZoomScript } from '../rendering/mermaid-zoom';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface SpaTemplateOptions {\n /** Website theme */\n theme: WebsiteTheme;\n /** Project title */\n title: string;\n /** Enable search */\n enableSearch: boolean;\n /** Enable AI features (Ask panel) */\n enableAI: boolean;\n /** Enable interactive dependency graph */\n enableGraph: boolean;\n /** Enable watch mode (WebSocket live reload) */\n enableWatch?: boolean;\n}\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Generate the SPA HTML for server mode.\n */\nexport function generateSpaHtml(options: SpaTemplateOptions): string {\n const { theme, title, enableSearch, enableAI, enableGraph, enableWatch = false } = options;\n\n const themeClass = theme === 'auto' ? '' : `class=\"${theme}-theme\"`;\n const themeMetaTag = theme === 'auto'\n ? '<meta name=\"color-scheme\" content=\"light dark\">'\n : '';\n\n return `<!DOCTYPE html>\n<html lang=\"en\" ${themeClass} data-theme=\"${theme}\">\n<head>\n <meta charset=\"UTF-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n ${themeMetaTag}\n <title>${escapeHtml(title)} \u2014 Wiki</title>\n\n <!-- Syntax Highlighting -->\n <link rel=\"stylesheet\" href=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github.min.css\" id=\"hljs-light\">\n <link rel=\"stylesheet\" href=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css\" id=\"hljs-dark\" disabled>\n <script src=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js\"></script>\n\n <!-- Mermaid Diagrams -->\n <script src=\"https://cdn.jsdelivr.net/npm/mermaid@10/dist/mermaid.min.js\"></script>\n\n <!-- Markdown Parser -->\n <script src=\"https://cdn.jsdelivr.net/npm/marked/marked.min.js\"></script>\n\n${enableGraph ? ` <!-- D3.js for interactive dependency graph -->\n <script src=\"https://cdn.jsdelivr.net/npm/d3@7/dist/d3.min.js\"></script>` : ''}\n\n <style>\n${getSpaStyles(enableAI)}\n </style>\n</head>\n<body>\n <!-- Top Navigation Bar -->\n <header class=\"top-bar\" id=\"top-bar\">\n <div class=\"top-bar-left\">\n <span class=\"top-bar-logo\">DeepWiki</span>\n <span class=\"top-bar-project\" id=\"top-bar-project\">${escapeHtml(title)}</span>\n </div>\n <div class=\"top-bar-right\">\n <button class=\"top-bar-btn\" id=\"theme-toggle\" aria-label=\"Toggle theme\" title=\"Toggle theme\">&#9790;</button>\n </div>\n </header>\n\n <div class=\"app-layout\">\n <!-- Left Sidebar -->\n <aside class=\"sidebar\" id=\"sidebar\">\n <button class=\"sidebar-collapse-btn\" id=\"sidebar-collapse\" aria-label=\"Collapse sidebar\" title=\"Collapse sidebar\">&#x25C0;</button>\n${enableSearch ? ` <div class=\"search-box\">\n <input type=\"text\" id=\"search\" placeholder=\"Search modules...\" aria-label=\"Search modules\">\n </div>` : ''}\n <nav id=\"nav-container\" class=\"sidebar-nav\"></nav>\n </aside>\n\n <!-- Main Content Area -->\n <main class=\"main-content\" id=\"main-content\">\n <div class=\"content-scroll\" id=\"content-scroll\">\n <div class=\"content-layout\">\n <article class=\"article\" id=\"article\">\n <div id=\"content\" class=\"markdown-body\">\n <div class=\"loading\">Loading wiki data...</div>\n </div>\n </article>\n\n <!-- Right TOC Sidebar -->\n <aside class=\"toc-sidebar\" id=\"toc-sidebar\">\n <div class=\"toc-container\" id=\"toc-container\">\n <h4 class=\"toc-title\">On this page</h4>\n <nav id=\"toc-nav\" class=\"toc-nav\"></nav>\n </div>\n </aside>\n </div>\n </div>\n </main>\n </div>\n\n${enableAI ? ` <!-- Floating Ask AI Widget -->\n <div class=\"ask-widget\" id=\"ask-widget\">\n <div class=\"ask-widget-header hidden\" id=\"ask-widget-header\">\n <span class=\"ask-widget-title\">Ask AI</span>\n <div class=\"ask-widget-actions\">\n <button class=\"ask-widget-clear\" id=\"ask-clear\" title=\"Clear conversation\">Clear</button>\n <button class=\"ask-widget-close\" id=\"ask-close\" aria-label=\"Close\">&times;</button>\n </div>\n </div>\n <div class=\"ask-messages hidden\" id=\"ask-messages\"></div>\n <div class=\"ask-widget-input\">\n <span class=\"ask-widget-label\" id=\"ask-widget-label\">Ask AI about <strong id=\"ask-bar-subject\">${escapeHtml(title)}</strong></span>\n <div class=\"ask-widget-input-row\">\n <textarea class=\"ask-widget-textarea\" id=\"ask-textarea\" placeholder=\"Ask about this codebase...\" rows=\"1\"></textarea>\n <button class=\"ask-widget-send\" id=\"ask-widget-send\" aria-label=\"Send question\">&#10148;</button>\n </div>\n </div>\n </div>` : ''}\n\n${enableWatch ? ` <div class=\"live-reload-bar\" id=\"live-reload-bar\"></div>` : ''}\n\n <script>\n${getSpaScript({ enableSearch, enableAI, enableGraph, enableWatch, defaultTheme: theme })}\n </script>\n</body>\n</html>`;\n}\n\n// ============================================================================\n// Styles\n// ============================================================================\n\nfunction getSpaStyles(enableAI: boolean): string {\n let styles = ` :root {\n --sidebar-bg: #ffffff;\n --sidebar-header-bg: #ffffff;\n --sidebar-border: #e5e7eb;\n --sidebar-text: #1f2937;\n --sidebar-muted: #6b7280;\n --sidebar-hover: #f3f4f6;\n --sidebar-active-bg: #eff6ff;\n --sidebar-active-text: #2563eb;\n --sidebar-active-border: #2563eb;\n --content-bg: #ffffff;\n --content-text: #1f2937;\n --content-muted: #6b7280;\n --content-border: #e5e7eb;\n --header-bg: #ffffff;\n --header-shadow: rgba(0,0,0,0.06);\n --code-bg: #f3f4f6;\n --code-border: #e5e7eb;\n --link-color: #2563eb;\n --badge-high-bg: #ef4444;\n --badge-medium-bg: #f59e0b;\n --badge-low-bg: #22c55e;\n --card-bg: #ffffff;\n --card-border: #e5e7eb;\n --card-hover-border: #2563eb;\n --stat-bg: #f9fafb;\n --stat-border: #2563eb;\n --copy-btn-bg: rgba(0,0,0,0.05);\n --copy-btn-hover-bg: rgba(0,0,0,0.1);\n --search-bg: #f3f4f6;\n --search-text: #1f2937;\n --search-placeholder: #9ca3af;\n --topbar-bg: #18181b;\n --topbar-text: #ffffff;\n --topbar-muted: #a1a1aa;\n --source-pill-bg: #f3f4f6;\n --source-pill-border: #e5e7eb;\n --source-pill-text: #374151;\n --toc-active: #2563eb;\n --toc-text: #6b7280;\n --toc-hover: #374151;\n --ask-bar-bg: #f9fafb;\n --ask-bar-border: #e5e7eb;\n }\n\n .dark-theme,\n html[data-theme=\"dark\"] {\n --sidebar-bg: #111827;\n --sidebar-header-bg: #111827;\n --sidebar-border: #1f2937;\n --sidebar-text: #e5e7eb;\n --sidebar-muted: #9ca3af;\n --sidebar-hover: #1f2937;\n --sidebar-active-bg: #1e3a5f;\n --sidebar-active-text: #60a5fa;\n --content-bg: #0f172a;\n --content-text: #e5e7eb;\n --content-muted: #9ca3af;\n --content-border: #1f2937;\n --header-bg: #111827;\n --header-shadow: rgba(0,0,0,0.3);\n --code-bg: #1e293b;\n --code-border: #334155;\n --link-color: #60a5fa;\n --card-bg: #1e293b;\n --card-border: #334155;\n --stat-bg: #1e293b;\n --copy-btn-bg: rgba(255,255,255,0.08);\n --copy-btn-hover-bg: rgba(255,255,255,0.15);\n --search-bg: #1f2937;\n --search-text: #e5e7eb;\n --search-placeholder: #6b7280;\n --source-pill-bg: #1e293b;\n --source-pill-border: #334155;\n --source-pill-text: #d1d5db;\n --toc-active: #60a5fa;\n --toc-text: #9ca3af;\n --toc-hover: #e5e7eb;\n --ask-bar-bg: #111827;\n --ask-bar-border: #1f2937;\n }\n\n @media (prefers-color-scheme: dark) {\n html[data-theme=\"auto\"] {\n --sidebar-bg: #111827;\n --sidebar-header-bg: #111827;\n --sidebar-border: #1f2937;\n --sidebar-text: #e5e7eb;\n --sidebar-muted: #9ca3af;\n --sidebar-hover: #1f2937;\n --sidebar-active-bg: #1e3a5f;\n --sidebar-active-text: #60a5fa;\n --content-bg: #0f172a;\n --content-text: #e5e7eb;\n --content-muted: #9ca3af;\n --content-border: #1f2937;\n --header-bg: #111827;\n --header-shadow: rgba(0,0,0,0.3);\n --code-bg: #1e293b;\n --code-border: #334155;\n --link-color: #60a5fa;\n --card-bg: #1e293b;\n --card-border: #334155;\n --stat-bg: #1e293b;\n --copy-btn-bg: rgba(255,255,255,0.08);\n --copy-btn-hover-bg: rgba(255,255,255,0.15);\n --search-bg: #1f2937;\n --search-text: #e5e7eb;\n --search-placeholder: #6b7280;\n --source-pill-bg: #1e293b;\n --source-pill-border: #334155;\n --source-pill-text: #d1d5db;\n --toc-active: #60a5fa;\n --toc-text: #9ca3af;\n --toc-hover: #e5e7eb;\n --ask-bar-bg: #111827;\n --ask-bar-border: #1f2937;\n }\n }\n\n * { margin: 0; padding: 0; box-sizing: border-box; }\n\n body {\n font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif;\n display: flex;\n flex-direction: column;\n height: 100vh;\n overflow: hidden;\n background: var(--content-bg);\n color: var(--content-text);\n }\n\n /* ========== Top Bar ========== */\n .top-bar {\n height: 48px;\n background: var(--topbar-bg);\n color: var(--topbar-text);\n display: flex;\n align-items: center;\n justify-content: space-between;\n padding: 0 16px;\n flex-shrink: 0;\n z-index: 200;\n }\n .top-bar-left {\n display: flex;\n align-items: center;\n gap: 8px;\n font-size: 14px;\n }\n .top-bar-logo {\n font-weight: 700;\n font-size: 15px;\n letter-spacing: -0.01em;\n }\n .top-bar-project {\n color: var(--topbar-muted);\n font-weight: 400;\n }\n .top-bar-right {\n display: flex;\n align-items: center;\n gap: 8px;\n }\n .top-bar-btn {\n background: transparent;\n border: none;\n color: var(--topbar-muted);\n cursor: pointer;\n font-size: 18px;\n padding: 4px 8px;\n border-radius: 6px;\n transition: color 0.15s, background 0.15s;\n }\n .top-bar-btn:hover {\n color: var(--topbar-text);\n background: rgba(255,255,255,0.1);\n }\n\n /* ========== App Layout ========== */\n .app-layout {\n flex: 1;\n display: flex;\n overflow: hidden;\n min-height: 0;\n }\n\n /* ========== Sidebar ========== */\n .sidebar {\n width: 260px;\n min-width: 260px;\n background: var(--sidebar-bg);\n border-right: 1px solid var(--sidebar-border);\n overflow-y: auto;\n overflow-x: hidden;\n transition: width 0.25s, min-width 0.25s;\n position: relative;\n flex-shrink: 0;\n }\n .sidebar.collapsed {\n width: 0;\n min-width: 0;\n overflow: hidden;\n border-right: none;\n }\n .sidebar-collapse-btn {\n position: absolute;\n top: 10px;\n right: -14px;\n width: 28px;\n height: 28px;\n border-radius: 50%;\n background: var(--sidebar-bg);\n border: 1px solid var(--sidebar-border);\n color: var(--sidebar-muted);\n cursor: pointer;\n display: flex;\n align-items: center;\n justify-content: center;\n z-index: 101;\n font-size: 12px;\n transition: background 0.15s, transform 0.25s;\n padding: 0;\n line-height: 1;\n }\n .sidebar-collapse-btn:hover { background: var(--sidebar-hover); }\n .sidebar.collapsed .sidebar-collapse-btn {\n right: -36px;\n background: var(--content-bg);\n border-color: var(--content-border);\n }\n .sidebar.collapsed .sidebar-collapse-btn:hover { background: var(--code-bg); }\n\n .search-box { padding: 12px 14px 8px; }\n .search-box input {\n width: 100%;\n padding: 7px 10px;\n border: 1px solid var(--sidebar-border);\n border-radius: 6px;\n background: var(--search-bg);\n color: var(--search-text);\n font-size: 13px;\n outline: none;\n }\n .search-box input::placeholder { color: var(--search-placeholder); }\n .search-box input:focus { border-color: var(--sidebar-active-border); }\n\n .sidebar-nav { padding: 4px 0 16px; }\n\n .nav-section { padding: 4px 0; }\n .nav-section-title {\n font-size: 13px;\n font-weight: 600;\n color: var(--sidebar-text);\n padding: 8px 14px 4px;\n cursor: pointer;\n user-select: none;\n display: flex;\n align-items: center;\n gap: 4px;\n }\n .nav-section-title:hover { color: var(--sidebar-active-text); }\n .nav-section-arrow {\n font-size: 10px;\n transition: transform 0.2s;\n color: var(--sidebar-muted);\n }\n .nav-section.collapsed .nav-section-arrow { transform: rotate(-90deg); }\n .nav-section.collapsed .nav-section-items { display: none; }\n\n /* Area-based sidebar (DeepWiki-style hierarchy) */\n .nav-area-item {\n padding: 8px 14px;\n cursor: pointer;\n font-size: 14px;\n font-weight: 500;\n color: var(--sidebar-text);\n border-radius: 4px;\n margin: 1px 6px;\n display: flex;\n align-items: center;\n transition: background 0.1s, color 0.1s;\n }\n .nav-area-item:hover { background: var(--sidebar-hover); }\n .nav-area-item.active {\n background: var(--sidebar-active-bg);\n color: var(--sidebar-active-text);\n font-weight: 600;\n }\n .nav-area-item .nav-item-name { flex: 1; min-width: 0; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }\n\n .nav-area-children { padding-left: 8px; }\n .nav-area-module {\n padding: 5px 14px 5px 20px;\n cursor: pointer;\n font-size: 13px;\n color: var(--sidebar-muted);\n border-radius: 4px;\n margin: 1px 6px;\n display: flex;\n align-items: center;\n transition: background 0.1s, color 0.1s;\n }\n .nav-area-module:hover { background: var(--sidebar-hover); color: var(--sidebar-text); }\n .nav-area-module.active {\n background: var(--sidebar-active-bg);\n color: var(--sidebar-active-text);\n font-weight: 500;\n border-left: 3px solid var(--sidebar-active-border);\n }\n .nav-area-module .nav-item-name { flex: 1; min-width: 0; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }\n\n /* Category timestamp in sidebar (for non-area repos) */\n .nav-last-indexed {\n font-size: 12px;\n color: var(--sidebar-muted);\n padding: 12px 14px 4px;\n }\n\n .nav-item {\n padding: 6px 14px 6px 24px;\n cursor: pointer;\n font-size: 13px;\n color: var(--sidebar-text);\n border-radius: 4px;\n margin: 1px 6px;\n display: flex;\n align-items: center;\n justify-content: space-between;\n transition: background 0.1s, color 0.1s;\n }\n .nav-item:hover { background: var(--sidebar-hover); }\n .nav-item.active {\n background: var(--sidebar-active-bg);\n color: var(--sidebar-active-text);\n font-weight: 500;\n }\n .nav-item-name { flex: 1; min-width: 0; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }\n\n .complexity-badge {\n display: inline-block;\n padding: 1px 5px;\n border-radius: 3px;\n font-size: 10px;\n font-weight: 600;\n color: white;\n flex-shrink: 0;\n margin-left: 6px;\n }\n .complexity-high { background: var(--badge-high-bg); }\n .complexity-medium { background: var(--badge-medium-bg); }\n .complexity-low { background: var(--badge-low-bg); }\n\n /* ========== Main Content ========== */\n .main-content {\n flex: 1;\n display: flex;\n flex-direction: column;\n overflow: hidden;\n min-width: 0;\n position: relative;\n }\n\n .content-scroll {\n flex: 1;\n overflow-y: auto;\n overflow-x: hidden;\n }\n\n .content-layout {\n display: flex;\n margin: 0 auto;\n padding: 0 20px;\n min-height: 100%;\n }\n\n .article {\n flex: 1;\n min-width: 0;\n padding: 32px 40px 120px;\n }\n\n /* ========== Source Files Section ========== */\n .source-files-section {\n margin-bottom: 20px;\n border: 1px solid var(--content-border);\n border-radius: 8px;\n overflow: hidden;\n }\n .source-files-toggle {\n display: flex;\n align-items: center;\n gap: 8px;\n padding: 10px 14px;\n background: var(--stat-bg);\n border: none;\n width: 100%;\n cursor: pointer;\n font-size: 13px;\n color: var(--content-text);\n font-weight: 500;\n text-align: left;\n }\n .source-files-toggle:hover { background: var(--code-bg); }\n .source-files-arrow {\n font-size: 10px;\n transition: transform 0.2s;\n color: var(--content-muted);\n }\n .source-files-section.expanded .source-files-arrow { transform: rotate(90deg); }\n .source-files-list {\n display: none;\n padding: 10px 14px;\n gap: 6px;\n flex-wrap: wrap;\n }\n .source-files-section.expanded .source-files-list { display: flex; }\n .source-pill {\n display: inline-flex;\n align-items: center;\n gap: 4px;\n padding: 4px 10px;\n background: var(--source-pill-bg);\n border: 1px solid var(--source-pill-border);\n border-radius: 14px;\n font-size: 12px;\n color: var(--source-pill-text);\n cursor: default;\n }\n .source-pill-icon {\n font-size: 11px;\n color: var(--content-muted);\n }\n .source-pill-lines {\n background: var(--code-bg);\n border-radius: 8px;\n padding: 1px 6px;\n font-size: 10px;\n font-weight: 600;\n color: var(--content-muted);\n margin-left: 2px;\n }\n\n /* ========== TOC Sidebar ========== */\n .toc-sidebar {\n width: 220px;\n flex-shrink: 0;\n padding: 32px 16px 32px 0;\n position: sticky;\n top: 0;\n align-self: flex-start;\n max-height: calc(100vh - 48px);\n overflow-y: auto;\n }\n .toc-container {\n border-left: 1px solid var(--content-border);\n padding-left: 16px;\n }\n .toc-title {\n font-size: 12px;\n font-weight: 600;\n color: var(--content-muted);\n text-transform: uppercase;\n letter-spacing: 0.04em;\n margin-bottom: 10px;\n }\n .toc-nav a {\n display: block;\n padding: 3px 0;\n font-size: 13px;\n color: var(--toc-text);\n text-decoration: none;\n line-height: 1.5;\n transition: color 0.15s;\n border-left: 2px solid transparent;\n padding-left: 0;\n margin-left: -17px;\n padding-left: 15px;\n }\n .toc-nav a:hover { color: var(--toc-hover); }\n .toc-nav a.active {\n color: var(--toc-active);\n border-left-color: var(--toc-active);\n font-weight: 500;\n }\n .toc-nav a.toc-h3 { padding-left: 27px; font-size: 12px; }\n .toc-nav a.toc-h4 { padding-left: 39px; font-size: 12px; }\n\n /* ========== Markdown Body ========== */\n .markdown-body { line-height: 1.7; overflow-wrap: break-word; }\n .markdown-body h1 { margin-top: 32px; margin-bottom: 16px; font-size: 1.85em; font-weight: 700; border-bottom: 1px solid var(--content-border); padding-bottom: 8px; }\n .markdown-body h1:first-child { margin-top: 0; }\n .markdown-body h2 { margin-top: 28px; margin-bottom: 14px; font-size: 1.4em; font-weight: 600; border-bottom: 1px solid var(--content-border); padding-bottom: 6px; }\n .markdown-body h3 { margin-top: 22px; margin-bottom: 10px; font-size: 1.15em; font-weight: 600; }\n .markdown-body h4 { margin-top: 18px; margin-bottom: 8px; font-size: 1em; font-weight: 600; }\n .markdown-body p { margin-bottom: 14px; }\n .markdown-body > *:last-child { margin-bottom: 0; }\n .markdown-body code {\n background: var(--code-bg);\n padding: 2px 6px;\n border-radius: 4px;\n font-size: 85%;\n font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, monospace;\n }\n .markdown-body pre {\n background: var(--code-bg);\n border: 1px solid var(--code-border);\n padding: 14px;\n border-radius: 8px;\n overflow-x: auto;\n margin-bottom: 14px;\n position: relative;\n }\n .markdown-body pre code { background: none; padding: 0; border-radius: 0; font-size: 13px; }\n .markdown-body table { border-collapse: collapse; width: 100%; margin: 14px 0; display: block; overflow-x: auto; }\n .markdown-body table th, .markdown-body table td {\n border: 1px solid var(--content-border);\n padding: 8px 12px;\n text-align: left;\n }\n .markdown-body table th { background: var(--code-bg); font-weight: 600; }\n .markdown-body ul, .markdown-body ol { margin-bottom: 14px; padding-left: 2em; }\n .markdown-body li { margin-bottom: 4px; }\n .markdown-body a { color: var(--link-color); text-decoration: none; }\n .markdown-body a:hover { text-decoration: underline; }\n .markdown-body blockquote {\n border-left: 4px solid var(--content-border);\n padding: 8px 16px;\n margin: 14px 0;\n color: var(--content-muted);\n }\n .markdown-body img { max-width: 100%; border-radius: 8px; }\n .markdown-body hr { border: none; border-top: 1px solid var(--content-border); margin: 24px 0; }\n\n .heading-anchor {\n color: var(--content-muted);\n text-decoration: none;\n margin-left: 8px;\n opacity: 0;\n transition: opacity 0.15s;\n font-weight: 400;\n }\n .markdown-body h1:hover .heading-anchor,\n .markdown-body h2:hover .heading-anchor,\n .markdown-body h3:hover .heading-anchor,\n .markdown-body h4:hover .heading-anchor { opacity: 1; }\n\n .copy-btn {\n position: absolute;\n top: 8px;\n right: 8px;\n background: var(--copy-btn-bg);\n border: 1px solid var(--code-border);\n border-radius: 4px;\n padding: 3px 8px;\n cursor: pointer;\n font-size: 11px;\n color: var(--content-muted);\n opacity: 0;\n transition: opacity 0.15s;\n }\n .markdown-body pre:hover .copy-btn { opacity: 1; }\n .copy-btn:hover { background: var(--copy-btn-hover-bg); }\n\n /* ========== Home View ========== */\n .home-view { max-width: 100%; }\n .project-stats {\n display: grid;\n grid-template-columns: repeat(auto-fit, minmax(160px, 1fr));\n gap: 12px;\n margin: 20px 0;\n }\n .stat-card {\n background: var(--stat-bg);\n padding: 14px;\n border-radius: 8px;\n border-left: 4px solid var(--stat-border);\n }\n .stat-card h3 { font-size: 12px; color: var(--content-muted); margin-bottom: 4px; font-weight: 500; }\n .stat-card .value { font-size: 24px; font-weight: 700; color: var(--content-text); }\n .stat-card .value.small { font-size: 15px; }\n\n .module-grid {\n display: grid;\n grid-template-columns: repeat(auto-fill, minmax(220px, 1fr));\n gap: 10px;\n margin-top: 20px;\n }\n .module-card {\n background: var(--card-bg);\n border: 1px solid var(--card-border);\n border-radius: 8px;\n padding: 12px;\n cursor: pointer;\n transition: border-color 0.15s, box-shadow 0.15s;\n }\n .module-card:hover {\n border-color: var(--card-hover-border);\n box-shadow: 0 2px 8px rgba(0,0,0,0.06);\n }\n .module-card h4 { margin-bottom: 4px; font-size: 14px; }\n .module-card p { font-size: 12px; color: var(--content-muted); line-height: 1.4; }\n\n .loading {\n text-align: center;\n padding: 48px;\n color: var(--content-muted);\n font-size: 15px;\n }\n\n${getMermaidZoomStyles()}\n\n /* ========== Dependency Graph ========== */\n .graph-container { width: 100%; height: 100%; position: relative; }\n .graph-container svg { width: 100%; height: 100%; }\n .graph-toolbar {\n position: absolute;\n top: 12px;\n right: 12px;\n display: flex;\n gap: 6px;\n z-index: 10;\n }\n .graph-toolbar button {\n background: var(--card-bg);\n border: 1px solid var(--content-border);\n border-radius: 4px;\n padding: 4px 10px;\n cursor: pointer;\n font-size: 13px;\n color: var(--content-text);\n }\n .graph-toolbar button:hover { background: var(--code-bg); }\n .graph-legend {\n position: absolute;\n bottom: 12px;\n left: 12px;\n background: var(--card-bg);\n border: 1px solid var(--content-border);\n border-radius: 8px;\n padding: 10px 14px;\n z-index: 10;\n font-size: 12px;\n }\n .graph-legend-title { font-weight: 600; margin-bottom: 6px; color: var(--content-text); }\n .graph-legend-item {\n display: flex;\n align-items: center;\n gap: 6px;\n margin-bottom: 4px;\n cursor: pointer;\n user-select: none;\n }\n .graph-legend-item.disabled { opacity: 0.3; }\n .graph-legend-swatch { width: 12px; height: 12px; border-radius: 3px; }\n .graph-node text { font-size: 11px; fill: var(--content-text); pointer-events: none; }\n .graph-link { stroke: var(--content-border); stroke-opacity: 0.6; }\n .graph-link-arrow { fill: var(--content-border); fill-opacity: 0.6; }\n .graph-tooltip {\n position: absolute;\n background: var(--card-bg);\n border: 1px solid var(--content-border);\n border-radius: 6px;\n padding: 8px 12px;\n font-size: 12px;\n pointer-events: none;\n z-index: 20;\n box-shadow: 0 4px 12px rgba(0,0,0,0.15);\n max-width: 250px;\n }\n .graph-tooltip-name { font-weight: 600; margin-bottom: 4px; }\n .graph-tooltip-purpose { color: var(--content-muted); line-height: 1.4; }\n\n /* ========== Deep Dive ========== */\n .deep-dive-btn {\n display: inline-flex;\n align-items: center;\n gap: 6px;\n background: var(--stat-bg);\n color: var(--link-color);\n border: 1px solid var(--content-border);\n border-radius: 6px;\n padding: 6px 14px;\n cursor: pointer;\n font-size: 13px;\n font-weight: 500;\n margin-bottom: 16px;\n transition: background 0.15s, border-color 0.15s;\n }\n .deep-dive-btn:hover { background: var(--code-bg); border-color: var(--link-color); }\n .deep-dive-section {\n margin-top: 16px;\n padding: 14px;\n background: var(--stat-bg);\n border: 1px solid var(--content-border);\n border-radius: 8px;\n }\n .deep-dive-input-area { display: flex; gap: 8px; margin-bottom: 12px; }\n .deep-dive-input {\n flex: 1;\n padding: 8px 12px;\n border: 1px solid var(--content-border);\n border-radius: 6px;\n font-size: 13px;\n background: var(--content-bg);\n color: var(--content-text);\n outline: none;\n font-family: inherit;\n }\n .deep-dive-input:focus { border-color: var(--sidebar-active-border); }\n .deep-dive-input::placeholder { color: var(--content-muted); }\n .deep-dive-submit {\n background: var(--sidebar-active-border);\n color: white;\n border: none;\n border-radius: 6px;\n padding: 8px 14px;\n cursor: pointer;\n font-size: 13px;\n white-space: nowrap;\n }\n .deep-dive-submit:hover { opacity: 0.9; }\n .deep-dive-submit:disabled { opacity: 0.5; cursor: not-allowed; }\n .deep-dive-result { margin-top: 12px; }\n .deep-dive-status { font-size: 12px; color: var(--content-muted); margin-bottom: 8px; }\n\n /* ========== Live Reload ========== */\n .live-reload-bar {\n position: fixed;\n top: 0;\n left: 0;\n right: 0;\n z-index: 1000;\n padding: 8px 16px;\n font-size: 13px;\n text-align: center;\n transition: transform 0.3s;\n transform: translateY(-100%);\n }\n .live-reload-bar.visible { transform: translateY(0); }\n .live-reload-bar.rebuilding { background: var(--badge-medium-bg); color: white; }\n .live-reload-bar.reloaded { background: var(--badge-low-bg); color: white; }\n .live-reload-bar.error { background: var(--badge-high-bg); color: white; }\n\n /* ========== Responsive ========== */\n @media (max-width: 1024px) {\n .toc-sidebar { display: none; }\n }\n @media (max-width: 768px) {\n .sidebar { position: fixed; z-index: 100; height: calc(100vh - 48px); top: 48px; }\n .sidebar.collapsed { width: 0; min-width: 0; }\n .sidebar-collapse-btn { display: none; }\n .article { padding: 16px 20px; }\n }`;\n\n if (enableAI) {\n styles += `\n\n /* ========== Ask AI Floating Widget ========== */\n .ask-widget {\n position: fixed;\n bottom: 24px;\n left: 50%;\n transform: translateX(-50%);\n z-index: 200;\n background: var(--ask-bar-bg);\n border-radius: 16px;\n border: 1px solid var(--ask-bar-border);\n box-shadow: 0 4px 24px rgba(0, 0, 0, 0.12);\n width: 720px;\n max-width: calc(100vw - 40px);\n display: flex;\n flex-direction: column;\n max-height: calc(100vh - 100px);\n transition: box-shadow 0.2s;\n }\n .ask-widget.expanded {\n box-shadow: 0 8px 40px rgba(0, 0, 0, 0.18);\n }\n\n /* Widget header (visible when expanded) */\n .ask-widget-header {\n display: flex;\n justify-content: space-between;\n align-items: center;\n padding: 10px 16px;\n border-bottom: 1px solid var(--content-border);\n flex-shrink: 0;\n }\n .ask-widget-header.hidden { display: none; }\n .ask-widget-title { font-size: 13px; font-weight: 600; color: var(--content-text); }\n .ask-widget-actions { display: flex; gap: 6px; align-items: center; }\n .ask-widget-clear {\n background: none;\n border: 1px solid var(--content-border);\n border-radius: 4px;\n padding: 2px 8px;\n cursor: pointer;\n font-size: 11px;\n color: var(--content-muted);\n }\n .ask-widget-clear:hover { background: var(--code-bg); }\n .ask-widget-close {\n background: none;\n border: none;\n font-size: 18px;\n cursor: pointer;\n color: var(--content-muted);\n padding: 0 4px;\n line-height: 1;\n }\n .ask-widget-close:hover { color: var(--content-text); }\n\n /* Messages area (visible when expanded) */\n .ask-messages {\n flex: 1;\n overflow-y: auto;\n padding: 16px;\n min-height: 0;\n }\n .ask-messages.hidden { display: none; }\n .ask-message { margin-bottom: 14px; line-height: 1.5; }\n .ask-message-user {\n background: var(--sidebar-active-border);\n color: white;\n padding: 8px 12px;\n border-radius: 12px 12px 4px 12px;\n max-width: 85%;\n margin-left: auto;\n word-wrap: break-word;\n }\n .ask-message-assistant {\n background: var(--code-bg);\n color: var(--content-text);\n padding: 10px 14px;\n border-radius: 12px 12px 12px 4px;\n max-width: 95%;\n word-wrap: break-word;\n }\n .ask-message-assistant .markdown-body { max-width: 100%; font-size: 13px; line-height: 1.5; }\n .ask-message-assistant .markdown-body p { margin-bottom: 8px; }\n .ask-message-assistant .markdown-body p:last-child { margin-bottom: 0; }\n .ask-message-context {\n font-size: 11px;\n color: var(--content-muted);\n margin-bottom: 8px;\n padding: 6px 10px;\n background: var(--stat-bg);\n border-radius: 6px;\n border: 1px solid var(--content-border);\n }\n .ask-message-context a { color: var(--link-color); cursor: pointer; text-decoration: none; }\n .ask-message-context a:hover { text-decoration: underline; }\n .ask-message-error {\n color: var(--badge-high-bg);\n font-size: 12px;\n padding: 8px 12px;\n background: var(--code-bg);\n border-radius: 8px;\n border: 1px solid var(--badge-high-bg);\n }\n .ask-message-typing {\n display: inline-block;\n color: var(--content-muted);\n font-size: 12px;\n }\n .ask-message-typing::after {\n content: '...';\n animation: typing 1s infinite;\n }\n @keyframes typing {\n 0%, 33% { content: '.'; }\n 34%, 66% { content: '..'; }\n 67%, 100% { content: '...'; }\n }\n\n /* Input area (always visible) */\n .ask-widget-input {\n padding: 12px 16px;\n flex-shrink: 0;\n }\n .ask-widget-label {\n display: block;\n font-size: 12px;\n color: var(--content-muted);\n margin-bottom: 6px;\n }\n .ask-widget-label strong { color: var(--content-text); }\n .ask-widget.expanded .ask-widget-label { display: none; }\n .ask-widget.expanded .ask-widget-input {\n border-top: 1px solid var(--content-border);\n }\n .ask-widget-input-row {\n display: flex;\n gap: 8px;\n }\n .ask-widget-textarea {\n flex: 1;\n padding: 9px 14px;\n border: 1px solid var(--content-border);\n border-radius: 8px;\n font-size: 14px;\n background: var(--content-bg);\n color: var(--content-text);\n outline: none;\n resize: none;\n min-height: 38px;\n max-height: 120px;\n font-family: inherit;\n line-height: 1.4;\n }\n .ask-widget-textarea:focus { border-color: var(--sidebar-active-border); }\n .ask-widget-textarea::placeholder { color: var(--content-muted); }\n .ask-widget-send {\n background: var(--sidebar-active-border);\n color: white;\n border: none;\n border-radius: 8px;\n padding: 0 14px;\n cursor: pointer;\n font-size: 16px;\n flex-shrink: 0;\n align-self: flex-end;\n height: 38px;\n }\n .ask-widget-send:hover { opacity: 0.9; }\n .ask-widget-send:disabled { opacity: 0.5; cursor: not-allowed; }\n\n /* ========== Ask AI Responsive ========== */\n @media (max-width: 768px) {\n .ask-widget { bottom: 12px; border-radius: 12px; }\n .ask-widget-label { font-size: 11px; margin-bottom: 4px; }\n }`;\n }\n\n return styles;\n}\n\n// ============================================================================\n// JavaScript\n// ============================================================================\n\ninterface ScriptOptions {\n enableSearch: boolean;\n enableAI: boolean;\n enableGraph: boolean;\n enableWatch: boolean;\n defaultTheme: WebsiteTheme;\n}\n\nfunction getSpaScript(opts: ScriptOptions): string {\n return ` // ====================================================================\n // Deep Wiki \u2014 Server Mode SPA\n // ====================================================================\n\n var moduleGraph = null;\n var currentModuleId = null;\n var currentTheme = '${opts.defaultTheme}';\n var markdownCache = {};\n\n // Initialize\n init();\n\n async function init() {\n try {\n var res = await fetch('/api/graph');\n if (!res.ok) throw new Error('Failed to load module graph');\n moduleGraph = await res.json();\n\n initTheme();\n initializeSidebar();\n showHome(true);\n history.replaceState({ type: 'home' }, '', location.pathname);\n } catch(err) {\n document.getElementById('content').innerHTML =\n '<p style=\"color: red;\">Error loading wiki data: ' + err.message + '</p>';\n }\n }\n\n // ================================================================\n // Browser History\n // ================================================================\n\n window.addEventListener('popstate', function(e) {\n var state = e.state;\n if (!state) { showHome(true); return; }\n if (state.type === 'home') showHome(true);\n else if (state.type === 'module' && state.id) loadModule(state.id, true);\n else if (state.type === 'special' && state.key && state.title) loadSpecialPage(state.key, state.title, true);\n else if (state.type === 'graph') { if (typeof showGraph === 'function') showGraph(true); else showHome(true); }\n else showHome(true);\n });\n\n // ================================================================\n // Theme\n // ================================================================\n\n function initTheme() {\n var saved = localStorage.getItem('deep-wiki-theme');\n if (saved) {\n currentTheme = saved;\n document.documentElement.setAttribute('data-theme', currentTheme);\n }\n updateThemeStyles();\n }\n\n function toggleTheme() {\n if (currentTheme === 'auto') currentTheme = 'dark';\n else if (currentTheme === 'dark') currentTheme = 'light';\n else currentTheme = 'auto';\n document.documentElement.setAttribute('data-theme', currentTheme);\n localStorage.setItem('deep-wiki-theme', currentTheme);\n updateThemeStyles();\n }\n\n function updateThemeStyles() {\n var isDark = currentTheme === 'dark' ||\n (currentTheme === 'auto' && window.matchMedia('(prefers-color-scheme: dark)').matches);\n var ls = document.getElementById('hljs-light');\n var ds = document.getElementById('hljs-dark');\n if (ls) ls.disabled = isDark;\n if (ds) ds.disabled = !isDark;\n var btn = document.getElementById('theme-toggle');\n if (btn) btn.textContent = isDark ? '\\\\u2600' : '\\\\u263E';\n }\n\n window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', updateThemeStyles);\n document.getElementById('theme-toggle').addEventListener('click', toggleTheme);\n\n // Sidebar collapse\n document.getElementById('sidebar-collapse').addEventListener('click', function() {\n var sidebar = document.getElementById('sidebar');\n var isCollapsed = sidebar.classList.toggle('collapsed');\n updateSidebarCollapseBtn(isCollapsed);\n localStorage.setItem('deep-wiki-sidebar-collapsed', isCollapsed ? 'true' : 'false');\n });\n\n function updateSidebarCollapseBtn(isCollapsed) {\n var btn = document.getElementById('sidebar-collapse');\n if (isCollapsed) {\n btn.innerHTML = '&#x25B6;';\n btn.title = 'Expand sidebar';\n btn.setAttribute('aria-label', 'Expand sidebar');\n } else {\n btn.innerHTML = '&#x25C0;';\n btn.title = 'Collapse sidebar';\n btn.setAttribute('aria-label', 'Collapse sidebar');\n }\n }\n\n // Restore sidebar collapsed state\n (function restoreSidebarState() {\n var saved = localStorage.getItem('deep-wiki-sidebar-collapsed');\n if (saved === 'true') {\n document.getElementById('sidebar').classList.add('collapsed');\n updateSidebarCollapseBtn(true);\n }\n })();\n\n // ================================================================\n // Sidebar Navigation\n // ================================================================\n\n function initializeSidebar() {\n document.getElementById('top-bar-project').textContent = moduleGraph.project.name;\n\n var navContainer = document.getElementById('nav-container');\n var hasAreas = moduleGraph.areas && moduleGraph.areas.length > 0;\n\n // Home + special items\n var homeSection = document.createElement('div');\n homeSection.className = 'nav-section';\n homeSection.innerHTML =\n '<div class=\"nav-item active\" data-id=\"__home\" onclick=\"showHome()\">' +\n '<span class=\"nav-item-name\">Overview</span></div>' +\n${opts.enableGraph ? ` '<div class=\"nav-item\" data-id=\"__graph\" onclick=\"showGraph()\">' +\n '<span class=\"nav-item-name\">Dependency Graph</span></div>' +` : ''}\n '';\n navContainer.appendChild(homeSection);\n\n if (hasAreas) {\n // DeepWiki-style: areas as top-level, modules indented underneath\n buildAreaSidebar(navContainer);\n } else {\n // Fallback: category-based grouping\n buildCategorySidebar(navContainer);\n }\n${opts.enableSearch ? `\n document.getElementById('search').addEventListener('input', function(e) {\n var query = e.target.value.toLowerCase();\n // Search area-based items\n document.querySelectorAll('.nav-area-module[data-id], .nav-item[data-id]').forEach(function(item) {\n var id = item.getAttribute('data-id');\n if (id === '__home' || id === '__graph') return;\n var text = item.textContent.toLowerCase();\n item.style.display = text.includes(query) ? '' : 'none';\n });\n // Hide area headers when no children match\n document.querySelectorAll('.nav-area-group').forEach(function(group) {\n var visibleChildren = group.querySelectorAll('.nav-area-module:not([style*=\"display: none\"])');\n var areaItem = group.querySelector('.nav-area-item');\n if (areaItem) {\n areaItem.style.display = visibleChildren.length === 0 ? 'none' : '';\n }\n var childrenEl = group.querySelector('.nav-area-children');\n if (childrenEl) {\n childrenEl.style.display = visibleChildren.length === 0 ? 'none' : '';\n }\n });\n // Hide category sections when no children match\n document.querySelectorAll('.nav-section').forEach(function(section) {\n var title = section.querySelector('.nav-section-title');\n if (!title) return;\n var visible = section.querySelectorAll('.nav-item[data-id]:not([style*=\"display: none\"])');\n title.style.display = visible.length === 0 ? 'none' : '';\n });\n });` : ''}\n }\n\n // Build area-based sidebar (DeepWiki-style hierarchy)\n function buildAreaSidebar(navContainer) {\n // Build a map of area ID \u2192 area info\n var areaMap = {};\n moduleGraph.areas.forEach(function(area) {\n areaMap[area.id] = area;\n });\n\n // Build a map of area ID \u2192 modules\n var areaModules = {};\n moduleGraph.areas.forEach(function(area) {\n areaModules[area.id] = [];\n });\n\n // Assign modules to their areas\n moduleGraph.modules.forEach(function(mod) {\n var areaId = mod.area;\n if (areaId && areaModules[areaId]) {\n areaModules[areaId].push(mod);\n } else {\n // Try to find area by module ID listed in area.modules\n var found = false;\n moduleGraph.areas.forEach(function(area) {\n if (area.modules && area.modules.indexOf(mod.id) !== -1) {\n areaModules[area.id].push(mod);\n found = true;\n }\n });\n if (!found) {\n // Put unassigned modules in an \"Other\" group\n if (!areaModules['__other']) areaModules['__other'] = [];\n areaModules['__other'].push(mod);\n }\n }\n });\n\n // Render each area with its modules\n moduleGraph.areas.forEach(function(area) {\n var modules = areaModules[area.id] || [];\n if (modules.length === 0) return;\n\n var group = document.createElement('div');\n group.className = 'nav-area-group';\n\n // Area header (top-level item)\n var areaItem = document.createElement('div');\n areaItem.className = 'nav-area-item';\n areaItem.setAttribute('data-area-id', area.id);\n areaItem.innerHTML = '<span class=\"nav-item-name\">' + escapeHtml(area.name) + '</span>';\n group.appendChild(areaItem);\n\n // Module children (indented)\n var childrenEl = document.createElement('div');\n childrenEl.className = 'nav-area-children';\n\n modules.forEach(function(mod) {\n var item = document.createElement('div');\n item.className = 'nav-area-module';\n item.setAttribute('data-id', mod.id);\n item.innerHTML = '<span class=\"nav-item-name\">' + escapeHtml(mod.name) + '</span>';\n item.onclick = function() { loadModule(mod.id); };\n childrenEl.appendChild(item);\n });\n\n group.appendChild(childrenEl);\n navContainer.appendChild(group);\n });\n\n // Render unassigned modules if any\n var otherModules = areaModules['__other'] || [];\n if (otherModules.length > 0) {\n var group = document.createElement('div');\n group.className = 'nav-area-group';\n var areaItem = document.createElement('div');\n areaItem.className = 'nav-area-item';\n areaItem.innerHTML = '<span class=\"nav-item-name\">Other</span>';\n group.appendChild(areaItem);\n\n var childrenEl = document.createElement('div');\n childrenEl.className = 'nav-area-children';\n otherModules.forEach(function(mod) {\n var item = document.createElement('div');\n item.className = 'nav-area-module';\n item.setAttribute('data-id', mod.id);\n item.innerHTML = '<span class=\"nav-item-name\">' + escapeHtml(mod.name) + '</span>';\n item.onclick = function() { loadModule(mod.id); };\n childrenEl.appendChild(item);\n });\n group.appendChild(childrenEl);\n navContainer.appendChild(group);\n }\n }\n\n // Build category-based sidebar (fallback for non-area repos)\n // Uses the same visual style as area-based sidebar (DeepWiki-style)\n function buildCategorySidebar(navContainer) {\n var categories = {};\n moduleGraph.modules.forEach(function(mod) {\n var cat = mod.category || 'other';\n if (!categories[cat]) categories[cat] = [];\n categories[cat].push(mod);\n });\n\n Object.keys(categories).sort().forEach(function(category) {\n var group = document.createElement('div');\n group.className = 'nav-area-group';\n\n // Category header (same style as area header)\n var catItem = document.createElement('div');\n catItem.className = 'nav-area-item';\n catItem.innerHTML = '<span class=\"nav-item-name\">' + escapeHtml(category) + '</span>';\n group.appendChild(catItem);\n\n // Module children (indented)\n var childrenEl = document.createElement('div');\n childrenEl.className = 'nav-area-children';\n\n categories[category].forEach(function(mod) {\n var item = document.createElement('div');\n item.className = 'nav-area-module';\n item.setAttribute('data-id', mod.id);\n item.innerHTML = '<span class=\"nav-item-name\">' + escapeHtml(mod.name) + '</span>';\n item.onclick = function() { loadModule(mod.id); };\n childrenEl.appendChild(item);\n });\n\n group.appendChild(childrenEl);\n navContainer.appendChild(group);\n });\n }\n\n function setActive(id) {\n document.querySelectorAll('.nav-item, .nav-area-module, .nav-area-item').forEach(function(el) {\n el.classList.remove('active');\n });\n var target = document.querySelector('.nav-item[data-id=\"' + id + '\"]') ||\n document.querySelector('.nav-area-module[data-id=\"' + id + '\"]');\n if (target) target.classList.add('active');\n }\n\n // ================================================================\n // Content Loading\n // ================================================================\n\n function showHome(skipHistory) {\n currentModuleId = null;\n setActive('__home');\n document.getElementById('toc-nav').innerHTML = '';\n if (!skipHistory) {\n history.pushState({ type: 'home' }, '', location.pathname);\n }\n${opts.enableAI ? ` updateAskSubject(moduleGraph.project.name);` : ''}\n\n var stats = {\n modules: moduleGraph.modules.length,\n categories: (moduleGraph.categories || []).length,\n language: moduleGraph.project.language,\n buildSystem: moduleGraph.project.buildSystem,\n };\n\n var html = '<div class=\"home-view\">' +\n '<h1>' + escapeHtml(moduleGraph.project.name) + '</h1>' +\n '<p style=\"font-size: 15px; color: var(--content-muted); margin-bottom: 24px;\">' +\n escapeHtml(moduleGraph.project.description) + '</p>' +\n '<div class=\"project-stats\">' +\n '<div class=\"stat-card\"><h3>Modules</h3><div class=\"value\">' + stats.modules + '</div></div>' +\n '<div class=\"stat-card\"><h3>Categories</h3><div class=\"value\">' + stats.categories + '</div></div>' +\n '<div class=\"stat-card\"><h3>Language</h3><div class=\"value small\">' + escapeHtml(stats.language) + '</div></div>' +\n '<div class=\"stat-card\"><h3>Build System</h3><div class=\"value small\">' + escapeHtml(stats.buildSystem) + '</div></div>' +\n '</div>';\n\n var hasAreas = moduleGraph.areas && moduleGraph.areas.length > 0;\n if (hasAreas) {\n // Group modules by area for the overview\n moduleGraph.areas.forEach(function(area) {\n var areaModules = moduleGraph.modules.filter(function(mod) {\n if (mod.area === area.id) return true;\n return area.modules && area.modules.indexOf(mod.id) !== -1;\n });\n if (areaModules.length === 0) return;\n\n html += '<h3 style=\"margin-top: 24px; margin-bottom: 12px;\">' + escapeHtml(area.name) + '</h3>';\n if (area.description) {\n html += '<p style=\"color: var(--content-muted); margin-bottom: 12px; font-size: 14px;\">' +\n escapeHtml(area.description) + '</p>';\n }\n html += '<div class=\"module-grid\">';\n areaModules.forEach(function(mod) {\n html += '<div class=\"module-card\" onclick=\"loadModule(\\\\'' +\n mod.id.replace(/'/g, \"\\\\\\\\'\") + '\\\\')\">' +\n '<h4>' + escapeHtml(mod.name) +\n ' <span class=\"complexity-badge complexity-' + mod.complexity + '\">' +\n mod.complexity + '</span></h4>' +\n '<p>' + escapeHtml(mod.purpose) + '</p></div>';\n });\n html += '</div>';\n });\n\n // Show unassigned modules if any\n var assignedIds = new Set();\n moduleGraph.areas.forEach(function(area) {\n moduleGraph.modules.forEach(function(mod) {\n if (mod.area === area.id || (area.modules && area.modules.indexOf(mod.id) !== -1)) {\n assignedIds.add(mod.id);\n }\n });\n });\n var unassigned = moduleGraph.modules.filter(function(mod) { return !assignedIds.has(mod.id); });\n if (unassigned.length > 0) {\n html += '<h3 style=\"margin-top: 24px; margin-bottom: 12px;\">Other</h3><div class=\"module-grid\">';\n unassigned.forEach(function(mod) {\n html += '<div class=\"module-card\" onclick=\"loadModule(\\\\'' +\n mod.id.replace(/'/g, \"\\\\\\\\'\") + '\\\\')\">' +\n '<h4>' + escapeHtml(mod.name) +\n ' <span class=\"complexity-badge complexity-' + mod.complexity + '\">' +\n mod.complexity + '</span></h4>' +\n '<p>' + escapeHtml(mod.purpose) + '</p></div>';\n });\n html += '</div>';\n }\n } else {\n html += '<h3 style=\"margin-top: 24px; margin-bottom: 12px;\">All Modules</h3><div class=\"module-grid\">';\n moduleGraph.modules.forEach(function(mod) {\n html += '<div class=\"module-card\" onclick=\"loadModule(\\\\'' +\n mod.id.replace(/'/g, \"\\\\\\\\'\") + '\\\\')\">' +\n '<h4>' + escapeHtml(mod.name) +\n ' <span class=\"complexity-badge complexity-' + mod.complexity + '\">' +\n mod.complexity + '</span></h4>' +\n '<p>' + escapeHtml(mod.purpose) + '</p></div>';\n });\n html += '</div>';\n }\n\n html += '</div>';\n\n document.getElementById('content').innerHTML = html;\n document.getElementById('content-scroll').scrollTop = 0;\n }\n\n async function loadModule(moduleId, skipHistory) {\n var mod = moduleGraph.modules.find(function(m) { return m.id === moduleId; });\n if (!mod) return;\n\n currentModuleId = moduleId;\n setActive(moduleId);\n if (!skipHistory) {\n history.pushState({ type: 'module', id: moduleId }, '', location.pathname + '#module-' + encodeURIComponent(moduleId));\n }\n${opts.enableAI ? ` updateAskSubject(mod.name);` : ''}\n\n // Check cache\n if (markdownCache[moduleId]) {\n renderModulePage(mod, markdownCache[moduleId]);\n document.getElementById('content-scroll').scrollTop = 0;\n return;\n }\n\n // Fetch from API\n document.getElementById('content').innerHTML = '<div class=\"loading\">Loading module...</div>';\n try {\n var res = await fetch('/api/modules/' + encodeURIComponent(moduleId));\n if (!res.ok) throw new Error('Failed to load module');\n var data = await res.json();\n if (data.markdown) {\n markdownCache[moduleId] = data.markdown;\n renderModulePage(mod, data.markdown);\n } else {\n document.getElementById('content').innerHTML =\n '<div class=\"markdown-body\"><h2>' + escapeHtml(mod.name) + '</h2>' +\n '<p>' + escapeHtml(mod.purpose) + '</p></div>';\n }\n } catch(err) {\n document.getElementById('content').innerHTML =\n '<p style=\"color: red;\">Error loading module: ' + err.message + '</p>';\n }\n document.getElementById('content-scroll').scrollTop = 0;\n }\n\n function renderModulePage(mod, markdown) {\n var html = '';\n\n // Source files section\n if (mod.keyFiles && mod.keyFiles.length > 0) {\n html += '<div class=\"source-files-section\" id=\"source-files\">' +\n '<button class=\"source-files-toggle\" onclick=\"toggleSourceFiles()\">' +\n '<span class=\"source-files-arrow\">&#x25B6;</span> Relevant source files' +\n '</button>' +\n '<div class=\"source-files-list\">';\n mod.keyFiles.forEach(function(f) {\n html += '<span class=\"source-pill\"><span class=\"source-pill-icon\">&#9671;</span> ' +\n escapeHtml(f) + '</span>';\n });\n html += '</div></div>';\n }\n\n // Markdown content\n html += '<div class=\"markdown-body\">' + marked.parse(markdown) + '</div>';\n document.getElementById('content').innerHTML = html;\n\n // Post-processing\n processMarkdownContent();\n buildToc();\n${opts.enableAI ? ` addDeepDiveButton(mod.id);` : ''}\n }\n\n function toggleSourceFiles() {\n var section = document.getElementById('source-files');\n if (section) section.classList.toggle('expanded');\n }\n\n async function loadSpecialPage(key, title, skipHistory) {\n currentModuleId = null;\n setActive(key);\n if (!skipHistory) {\n history.pushState({ type: 'special', key: key, title: title }, '', location.pathname + '#' + encodeURIComponent(key));\n }\n\n var cacheKey = '__page_' + key;\n if (markdownCache[cacheKey]) {\n renderMarkdownContent(markdownCache[cacheKey]);\n buildToc();\n document.getElementById('content-scroll').scrollTop = 0;\n return;\n }\n\n document.getElementById('content').innerHTML = '<div class=\"loading\">Loading page...</div>';\n try {\n var res = await fetch('/api/pages/' + encodeURIComponent(key));\n if (!res.ok) throw new Error('Page not found');\n var data = await res.json();\n markdownCache[cacheKey] = data.markdown;\n renderMarkdownContent(data.markdown);\n buildToc();\n } catch(err) {\n document.getElementById('content').innerHTML = '<p>Content not available.</p>';\n }\n document.getElementById('content-scroll').scrollTop = 0;\n }\n\n // ================================================================\n // Markdown Rendering\n // ================================================================\n\n function renderMarkdownContent(markdown) {\n var html = marked.parse(markdown);\n var container = document.getElementById('content');\n container.innerHTML = '<div class=\"markdown-body\">' + html + '</div>';\n processMarkdownContent();\n }\n\n function processMarkdownContent() {\n var container = document.getElementById('content');\n var body = container.querySelector('.markdown-body');\n if (!body) return;\n\n body.querySelectorAll('pre code').forEach(function(block) {\n if (block.classList.contains('language-mermaid')) {\n var pre = block.parentElement;\n var mermaidCode = block.textContent;\n // Create container with zoom controls (shared structure from mermaid-zoom)\n var mContainer = document.createElement('div');\n mContainer.className = 'mermaid-container';\n mContainer.innerHTML =\n '<div class=\"mermaid-toolbar\">' +\n '<span class=\"mermaid-toolbar-label\">Diagram</span>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-out\" title=\"Zoom out\">\\\\u2212</button>' +\n '<span class=\"mermaid-zoom-level\">100%</span>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-in\" title=\"Zoom in\">+</button>' +\n '<button class=\"mermaid-zoom-btn mermaid-zoom-reset\" title=\"Reset view\">\\\\u27F2</button>' +\n '</div>' +\n '<div class=\"mermaid-viewport\">' +\n '<div class=\"mermaid-svg-wrapper\">' +\n '<pre class=\"mermaid\">' + mermaidCode + '</pre>' +\n '</div>' +\n '</div>';\n pre.parentNode.replaceChild(mContainer, pre);\n } else {\n hljs.highlightElement(block);\n addCopyButton(block.parentElement);\n }\n });\n\n body.querySelectorAll('h1, h2, h3, h4').forEach(function(heading) {\n var id = heading.textContent.toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/^-+|-+$/g, '');\n heading.id = id;\n var anchor = document.createElement('a');\n anchor.className = 'heading-anchor';\n anchor.href = '#' + id;\n anchor.textContent = '#';\n heading.appendChild(anchor);\n });\n\n initMermaid();\n\n // Intercept internal .md links and route through SPA navigation\n body.addEventListener('click', function(e) {\n var target = e.target;\n while (target && target !== body) {\n if (target.tagName === 'A') break;\n target = target.parentElement;\n }\n if (!target || target.tagName !== 'A') return;\n var href = target.getAttribute('href');\n if (!href || !href.match(/\\\\.md(#.*)?$/)) return;\n // Don't intercept external links\n if (/^https?:\\\\/\\\\//.test(href)) return;\n\n e.preventDefault();\n var hashPart = '';\n var hashIdx = href.indexOf('#');\n if (hashIdx !== -1) {\n hashPart = href.substring(hashIdx + 1);\n href = href.substring(0, hashIdx);\n }\n\n // Extract slug from the href path\n // Handle patterns like:\n // ./modules/module-id.md\n // ./module-id.md\n // ../../other-area/modules/module-id.md\n // ./areas/area-id/index.md\n // ../index.md\n var slug = href.replace(/^(\\\\.\\\\.\\\\/|\\\\.\\\\/)*/g, '')\n .replace(/^areas\\\\/[^/]+\\\\/modules\\\\//, '')\n .replace(/^areas\\\\/[^/]+\\\\//, '')\n .replace(/^modules\\\\//, '')\n .replace(/\\\\.md$/, '');\n\n // Check special pages\n var specialPages = {\n 'index': { key: '__index', title: 'Index' },\n 'architecture': { key: '__architecture', title: 'Architecture' },\n 'getting-started': { key: '__getting-started', title: 'Getting Started' }\n };\n if (specialPages[slug]) {\n loadSpecialPage(specialPages[slug].key, specialPages[slug].title);\n return;\n }\n\n // Try to find matching module ID\n var matchedId = findModuleIdBySlugClient(slug);\n if (matchedId) {\n loadModule(matchedId);\n if (hashPart) {\n setTimeout(function() {\n var el = document.getElementById(hashPart);\n if (el) el.scrollIntoView({ behavior: 'smooth' });\n }, 100);\n }\n }\n });\n }\n\n // Client-side module ID lookup by slug\n function findModuleIdBySlugClient(slug) {\n var normalized = slug.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');\n for (var i = 0; i < moduleGraph.modules.length; i++) {\n var mod = moduleGraph.modules[i];\n var modSlug = mod.id.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');\n if (modSlug === normalized) return mod.id;\n }\n return null;\n }\n\n function addCopyButton(pre) {\n var btn = document.createElement('button');\n btn.className = 'copy-btn';\n btn.textContent = 'Copy';\n btn.onclick = function() {\n var code = pre.querySelector('code');\n var text = code ? code.textContent : pre.textContent;\n navigator.clipboard.writeText(text).then(function() {\n btn.textContent = 'Copied!';\n setTimeout(function() { btn.textContent = 'Copy'; }, 2000);\n });\n };\n pre.appendChild(btn);\n }\n\n function initMermaid() {\n var blocks = document.querySelectorAll('.mermaid');\n if (blocks.length === 0) return Promise.resolve();\n\n var isDark = currentTheme === 'dark' ||\n (currentTheme === 'auto' && window.matchMedia('(prefers-color-scheme: dark)').matches);\n\n mermaid.initialize({\n startOnLoad: false,\n theme: isDark ? 'dark' : 'default',\n securityLevel: 'loose',\n flowchart: { useMaxWidth: false, htmlLabels: true, curve: 'basis' },\n fontSize: 14,\n });\n return mermaid.run({ nodes: blocks }).then(function() {\n initMermaidZoom();\n });\n }\n\n // ================================================================\n // Mermaid Zoom & Pan (shared via mermaid-zoom module)\n // ================================================================\n${getMermaidZoomScript()}\n\n // ================================================================\n // Table of Contents\n // ================================================================\n\n function buildToc() {\n var tocNav = document.getElementById('toc-nav');\n tocNav.innerHTML = '';\n var body = document.querySelector('#content .markdown-body');\n if (!body) return;\n\n var headings = body.querySelectorAll('h2, h3, h4');\n headings.forEach(function(heading) {\n if (!heading.id) return;\n var link = document.createElement('a');\n link.href = '#' + heading.id;\n link.textContent = heading.textContent.replace(/#$/, '').trim();\n var level = heading.tagName.toLowerCase();\n if (level === 'h3') link.className = 'toc-h3';\n if (level === 'h4') link.className = 'toc-h4';\n link.onclick = function(e) {\n e.preventDefault();\n var target = document.getElementById(heading.id);\n if (target) {\n target.scrollIntoView({ behavior: 'smooth', block: 'start' });\n }\n };\n tocNav.appendChild(link);\n });\n\n // Scroll spy\n setupScrollSpy();\n }\n\n function setupScrollSpy() {\n var scrollEl = document.getElementById('content-scroll');\n if (!scrollEl) return;\n scrollEl.addEventListener('scroll', updateActiveToc);\n }\n\n function updateActiveToc() {\n var tocLinks = document.querySelectorAll('#toc-nav a');\n if (tocLinks.length === 0) return;\n\n var scrollEl = document.getElementById('content-scroll');\n var scrollTop = scrollEl.scrollTop;\n var activeId = null;\n\n var headings = document.querySelectorAll('#content .markdown-body h2, #content .markdown-body h3, #content .markdown-body h4');\n headings.forEach(function(h) {\n if (h.offsetTop - 80 <= scrollTop) {\n activeId = h.id;\n }\n });\n\n tocLinks.forEach(function(link) {\n var href = link.getAttribute('href');\n if (href === '#' + activeId) {\n link.classList.add('active');\n } else {\n link.classList.remove('active');\n }\n });\n }\n\n // ================================================================\n // Utility\n // ================================================================\n\n function escapeHtml(str) {\n if (!str) return '';\n return String(str).replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/\"/g, '&quot;');\n }\n${opts.enableGraph ? `\n // ================================================================\n // Interactive Dependency Graph (D3.js)\n // ================================================================\n\n var graphRendered = false;\n var disabledCategories = new Set();\n\n var CATEGORY_COLORS = [\n '#3b82f6', '#ef4444', '#22c55e', '#f59e0b', '#8b5cf6',\n '#ec4899', '#06b6d4', '#f97316', '#14b8a6', '#6366f1',\n ];\n\n var COMPLEXITY_RADIUS = { low: 8, medium: 12, high: 18 };\n\n function getCategoryColor(category, allCategories) {\n var idx = allCategories.indexOf(category);\n return CATEGORY_COLORS[idx % CATEGORY_COLORS.length];\n }\n\n function showGraph(skipHistory) {\n currentModuleId = null;\n setActive('__graph');\n document.getElementById('toc-nav').innerHTML = '';\n if (!skipHistory) {\n history.pushState({ type: 'graph' }, '', location.pathname + '#graph');\n }\n\n var article = document.getElementById('article');\n article.style.maxWidth = '100%';\n article.style.padding = '0';\n\n var container = document.getElementById('content');\n container.innerHTML = '<div class=\"graph-container\" id=\"graph-container\">' +\n '<div class=\"graph-toolbar\">' +\n '<button id=\"graph-zoom-in\" title=\"Zoom in\">+</button>' +\n '<button id=\"graph-zoom-out\" title=\"Zoom out\">\\\\u2212</button>' +\n '<button id=\"graph-zoom-reset\" title=\"Reset view\">Reset</button>' +\n '</div>' +\n '<div class=\"graph-legend\" id=\"graph-legend\"></div>' +\n '<div class=\"graph-tooltip\" id=\"graph-tooltip\" style=\"display:none;\"></div>' +\n '</div>';\n\n // Make graph fill the available space\n var gc = document.getElementById('graph-container');\n gc.style.height = (article.parentElement.parentElement.clientHeight - 48) + 'px';\n\n renderGraph();\n }\n\n function renderGraph() {\n if (typeof d3 === 'undefined') return;\n\n var container = document.getElementById('graph-container');\n if (!container) return;\n\n var width = container.clientWidth || 800;\n var height = container.clientHeight || 600;\n\n var allCategories = [];\n moduleGraph.modules.forEach(function(m) {\n if (allCategories.indexOf(m.category) === -1) allCategories.push(m.category);\n });\n allCategories.sort();\n\n var legendEl = document.getElementById('graph-legend');\n legendEl.innerHTML = '<div class=\"graph-legend-title\">Categories</div>';\n allCategories.forEach(function(cat) {\n var color = getCategoryColor(cat, allCategories);\n var item = document.createElement('div');\n item.className = 'graph-legend-item';\n item.setAttribute('data-category', cat);\n item.innerHTML = '<div class=\"graph-legend-swatch\" style=\"background:' + color + '\"></div>' +\n '<span>' + escapeHtml(cat) + '</span>';\n item.onclick = function() {\n if (disabledCategories.has(cat)) {\n disabledCategories.delete(cat);\n item.classList.remove('disabled');\n } else {\n disabledCategories.add(cat);\n item.classList.add('disabled');\n }\n updateGraphVisibility();\n };\n legendEl.appendChild(item);\n });\n\n var nodes = moduleGraph.modules.map(function(m) {\n return { id: m.id, name: m.name, category: m.category, complexity: m.complexity, path: m.path, purpose: m.purpose };\n });\n\n var nodeIds = new Set(nodes.map(function(n) { return n.id; }));\n var links = [];\n moduleGraph.modules.forEach(function(m) {\n (m.dependencies || []).forEach(function(dep) {\n if (nodeIds.has(dep)) {\n links.push({ source: m.id, target: dep });\n }\n });\n });\n\n var svg = d3.select('#graph-container')\n .append('svg')\n .attr('width', width)\n .attr('height', height);\n\n svg.append('defs').append('marker')\n .attr('id', 'arrowhead')\n .attr('viewBox', '0 -5 10 10')\n .attr('refX', 20)\n .attr('refY', 0)\n .attr('markerWidth', 6)\n .attr('markerHeight', 6)\n .attr('orient', 'auto')\n .append('path')\n .attr('d', 'M0,-5L10,0L0,5')\n .attr('class', 'graph-link-arrow');\n\n var g = svg.append('g');\n\n var link = g.selectAll('.graph-link')\n .data(links)\n .join('line')\n .attr('class', 'graph-link')\n .attr('marker-end', 'url(#arrowhead)');\n\n var node = g.selectAll('.graph-node')\n .data(nodes)\n .join('g')\n .attr('class', 'graph-node')\n .style('cursor', 'pointer')\n .call(d3.drag()\n .on('start', dragstarted)\n .on('drag', dragged)\n .on('end', dragended));\n\n node.append('circle')\n .attr('r', function(d) { return COMPLEXITY_RADIUS[d.complexity] || 10; })\n .attr('fill', function(d) { return getCategoryColor(d.category, allCategories); })\n .attr('stroke', '#fff')\n .attr('stroke-width', 1.5);\n\n node.append('text')\n .attr('dx', function(d) { return (COMPLEXITY_RADIUS[d.complexity] || 10) + 4; })\n .attr('dy', 4)\n .text(function(d) { return d.name; });\n\n node.on('click', function(event, d) {\n event.stopPropagation();\n // Restore article styles before loading module\n var article = document.getElementById('article');\n article.style.maxWidth = '';\n article.style.padding = '';\n loadModule(d.id);\n });\n\n var tooltip = document.getElementById('graph-tooltip');\n node.on('mouseover', function(event, d) {\n tooltip.style.display = 'block';\n tooltip.innerHTML = '<div class=\"graph-tooltip-name\">' + escapeHtml(d.name) + '</div>' +\n '<div class=\"graph-tooltip-purpose\">' + escapeHtml(d.purpose) + '</div>' +\n '<div style=\"margin-top:4px;font-size:11px;color:var(--content-muted);\">' +\n 'Complexity: ' + d.complexity + '</div>';\n });\n node.on('mousemove', function(event) {\n tooltip.style.left = (event.pageX + 12) + 'px';\n tooltip.style.top = (event.pageY - 12) + 'px';\n });\n node.on('mouseout', function() { tooltip.style.display = 'none'; });\n\n var simulation = d3.forceSimulation(nodes)\n .force('link', d3.forceLink(links).id(function(d) { return d.id; }).distance(100))\n .force('charge', d3.forceManyBody().strength(-300))\n .force('center', d3.forceCenter(width / 2, height / 2))\n .force('collision', d3.forceCollide().radius(function(d) { return (COMPLEXITY_RADIUS[d.complexity] || 10) + 8; }))\n .on('tick', function() {\n link.attr('x1', function(d) { return d.source.x; })\n .attr('y1', function(d) { return d.source.y; })\n .attr('x2', function(d) { return d.target.x; })\n .attr('y2', function(d) { return d.target.y; });\n node.attr('transform', function(d) { return 'translate(' + d.x + ',' + d.y + ')'; });\n });\n\n var zoom = d3.zoom()\n .scaleExtent([0.1, 4])\n .on('zoom', function(event) { g.attr('transform', event.transform); });\n\n svg.call(zoom);\n\n document.getElementById('graph-zoom-in').onclick = function() { svg.transition().call(zoom.scaleBy, 1.3); };\n document.getElementById('graph-zoom-out').onclick = function() { svg.transition().call(zoom.scaleBy, 0.7); };\n document.getElementById('graph-zoom-reset').onclick = function() { svg.transition().call(zoom.transform, d3.zoomIdentity); };\n\n window._graphNode = node;\n window._graphLink = link;\n\n function dragstarted(event, d) {\n if (!event.active) simulation.alphaTarget(0.3).restart();\n d.fx = d.x; d.fy = d.y;\n }\n function dragged(event, d) { d.fx = event.x; d.fy = event.y; }\n function dragended(event, d) {\n if (!event.active) simulation.alphaTarget(0);\n d.fx = null; d.fy = null;\n }\n\n graphRendered = true;\n }\n\n function updateGraphVisibility() {\n if (!window._graphNode) return;\n window._graphNode.style('display', function(d) {\n return disabledCategories.has(d.category) ? 'none' : null;\n });\n window._graphLink.style('display', function(d) {\n var src = typeof d.source === 'object' ? d.source : { category: '' };\n var tgt = typeof d.target === 'object' ? d.target : { category: '' };\n return (disabledCategories.has(src.category) || disabledCategories.has(tgt.category)) ? 'none' : null;\n });\n }` : ''}\n${opts.enableAI ? `\n // ================================================================\n // Ask AI\n // ================================================================\n\n var conversationHistory = [];\n var askStreaming = false;\n var askPanelOpen = false;\n var currentSessionId = null;\n\n function updateAskSubject(name) {\n var el = document.getElementById('ask-bar-subject');\n if (el) el.textContent = name;\n }\n\n // Widget controls\n document.getElementById('ask-close').addEventListener('click', collapseWidget);\n document.getElementById('ask-clear').addEventListener('click', function() {\n if (currentSessionId) {\n fetch('/api/ask/session/' + encodeURIComponent(currentSessionId), { method: 'DELETE' }).catch(function() {});\n currentSessionId = null;\n }\n conversationHistory = [];\n document.getElementById('ask-messages').innerHTML = '';\n });\n document.getElementById('ask-widget-send').addEventListener('click', askPanelSend);\n document.getElementById('ask-textarea').addEventListener('keydown', function(e) {\n if (e.key === 'Enter' && !e.shiftKey) {\n e.preventDefault();\n askPanelSend();\n }\n });\n document.getElementById('ask-textarea').addEventListener('input', function() {\n this.style.height = 'auto';\n this.style.height = Math.min(this.scrollHeight, 120) + 'px';\n });\n\n function expandWidget() {\n if (askPanelOpen) return;\n askPanelOpen = true;\n var widget = document.getElementById('ask-widget');\n widget.classList.add('expanded');\n document.getElementById('ask-widget-header').classList.remove('hidden');\n document.getElementById('ask-messages').classList.remove('hidden');\n }\n\n function collapseWidget() {\n askPanelOpen = false;\n var widget = document.getElementById('ask-widget');\n widget.classList.remove('expanded');\n document.getElementById('ask-widget-header').classList.add('hidden');\n document.getElementById('ask-messages').classList.add('hidden');\n }\n\n function askPanelSend() {\n if (askStreaming) return;\n var input = document.getElementById('ask-textarea');\n var question = input.value.trim();\n if (!question) return;\n\n expandWidget();\n\n input.value = '';\n input.style.height = 'auto';\n\n appendAskMessage('user', question);\n conversationHistory.push({ role: 'user', content: question });\n\n askStreaming = true;\n document.getElementById('ask-widget-send').disabled = true;\n\n var typingEl = appendAskTyping();\n\n var requestBody = { question: question };\n if (currentSessionId) {\n requestBody.sessionId = currentSessionId;\n } else {\n requestBody.conversationHistory = conversationHistory.slice(0, -1);\n }\n\n fetch('/api/ask', {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(requestBody),\n }).then(function(response) {\n if (!response.ok) {\n return response.json().then(function(err) {\n throw new Error(err.error || 'Request failed');\n });\n }\n\n var reader = response.body.getReader();\n var decoder = new TextDecoder();\n var buffer = '';\n var fullResponse = '';\n var contextShown = false;\n var responseEl = null;\n\n function processChunk(result) {\n if (result.done) {\n if (buffer.trim()) {\n var remaining = buffer.trim();\n if (remaining.startsWith('data: ')) {\n try {\n var data = JSON.parse(remaining.slice(6));\n if (data.type === 'chunk') {\n fullResponse += data.content;\n if (!responseEl) responseEl = appendAskAssistantStreaming('');\n updateAskAssistantStreaming(responseEl, fullResponse);\n } else if (data.type === 'done') {\n fullResponse = data.fullResponse || fullResponse;\n if (data.sessionId) currentSessionId = data.sessionId;\n }\n } catch(e) {}\n }\n }\n finishStreaming(fullResponse, typingEl);\n return;\n }\n\n buffer += decoder.decode(result.value, { stream: true });\n var lines = buffer.split('\\\\n');\n buffer = lines.pop() || '';\n\n for (var i = 0; i < lines.length; i++) {\n var line = lines[i].trim();\n if (!line.startsWith('data: ')) continue;\n try {\n var data = JSON.parse(line.slice(6));\n if (data.type === 'context' && !contextShown) {\n contextShown = true;\n appendAskContext(data.moduleIds);\n } else if (data.type === 'chunk') {\n if (typingEl && typingEl.parentNode) {\n typingEl.parentNode.removeChild(typingEl);\n typingEl = null;\n }\n fullResponse += data.content;\n if (!responseEl) responseEl = appendAskAssistantStreaming('');\n updateAskAssistantStreaming(responseEl, fullResponse);\n } else if (data.type === 'done') {\n fullResponse = data.fullResponse || fullResponse;\n if (data.sessionId) currentSessionId = data.sessionId;\n finishStreaming(fullResponse, typingEl);\n return;\n } else if (data.type === 'error') {\n appendAskError(data.message);\n finishStreaming('', typingEl);\n return;\n }\n } catch(e) {}\n }\n\n return reader.read().then(processChunk);\n }\n\n return reader.read().then(processChunk);\n }).catch(function(err) {\n if (typingEl && typingEl.parentNode) typingEl.parentNode.removeChild(typingEl);\n appendAskError(err.message || 'Failed to connect');\n finishStreaming('', null);\n });\n }\n\n function finishStreaming(fullResponse, typingEl) {\n if (typingEl && typingEl.parentNode) typingEl.parentNode.removeChild(typingEl);\n askStreaming = false;\n document.getElementById('ask-widget-send').disabled = false;\n if (fullResponse) {\n conversationHistory.push({ role: 'assistant', content: fullResponse });\n }\n }\n\n function appendAskMessage(role, content) {\n var messages = document.getElementById('ask-messages');\n var div = document.createElement('div');\n div.className = 'ask-message';\n var inner = document.createElement('div');\n inner.className = 'ask-message-' + role;\n inner.textContent = content;\n div.appendChild(inner);\n messages.appendChild(div);\n messages.scrollTop = messages.scrollHeight;\n return div;\n }\n\n function appendAskAssistantStreaming(content) {\n var messages = document.getElementById('ask-messages');\n var div = document.createElement('div');\n div.className = 'ask-message';\n var inner = document.createElement('div');\n inner.className = 'ask-message-assistant';\n inner.innerHTML = '<div class=\"markdown-body\">' + (typeof marked !== 'undefined' ? marked.parse(content) : escapeHtml(content)) + '</div>';\n div.appendChild(inner);\n messages.appendChild(div);\n messages.scrollTop = messages.scrollHeight;\n return inner;\n }\n\n function updateAskAssistantStreaming(el, content) {\n if (!el) return;\n el.innerHTML = '<div class=\"markdown-body\">' + (typeof marked !== 'undefined' ? marked.parse(content) : escapeHtml(content)) + '</div>';\n var messages = document.getElementById('ask-messages');\n messages.scrollTop = messages.scrollHeight;\n }\n\n function appendAskContext(moduleIds) {\n if (!moduleIds || moduleIds.length === 0) return;\n var messages = document.getElementById('ask-messages');\n var div = document.createElement('div');\n div.className = 'ask-message-context';\n var links = moduleIds.map(function(id) {\n var mod = moduleGraph.modules.find(function(m) { return m.id === id; });\n var name = mod ? mod.name : id;\n return '<a onclick=\"loadModule(\\\\'' + id.replace(/'/g, \"\\\\\\\\'\") + '\\\\')\">' + escapeHtml(name) + '</a>';\n });\n div.innerHTML = 'Context: ' + links.join(', ');\n messages.appendChild(div);\n messages.scrollTop = messages.scrollHeight;\n }\n\n function appendAskTyping() {\n var messages = document.getElementById('ask-messages');\n var div = document.createElement('div');\n div.className = 'ask-message';\n var inner = document.createElement('div');\n inner.className = 'ask-message-typing';\n inner.textContent = 'Thinking';\n div.appendChild(inner);\n messages.appendChild(div);\n messages.scrollTop = messages.scrollHeight;\n return div;\n }\n\n function appendAskError(message) {\n var messages = document.getElementById('ask-messages');\n var div = document.createElement('div');\n div.className = 'ask-message-error';\n div.textContent = 'Error: ' + message;\n messages.appendChild(div);\n messages.scrollTop = messages.scrollHeight;\n }\n\n // Deep Dive (Explore Further)\n var deepDiveStreaming = false;\n\n function addDeepDiveButton(moduleId) {\n var content = document.getElementById('content');\n if (!content) return;\n var markdownBody = content.querySelector('.markdown-body');\n if (!markdownBody) return;\n\n var btn = document.createElement('button');\n btn.className = 'deep-dive-btn';\n btn.innerHTML = '&#128269; Explore Further';\n btn.onclick = function() { toggleDeepDiveSection(moduleId, btn); };\n markdownBody.insertBefore(btn, markdownBody.firstChild);\n }\n\n function toggleDeepDiveSection(moduleId, btn) {\n var existing = document.getElementById('deep-dive-section');\n if (existing) { existing.parentNode.removeChild(existing); return; }\n\n var section = document.createElement('div');\n section.id = 'deep-dive-section';\n section.className = 'deep-dive-section';\n section.innerHTML =\n '<div class=\"deep-dive-input-area\">' +\n '<input type=\"text\" class=\"deep-dive-input\" id=\"deep-dive-input\" ' +\n 'placeholder=\"Ask a specific question about this module... (optional)\">' +\n '<button class=\"deep-dive-submit\" id=\"deep-dive-submit\">Explore</button>' +\n '</div>' +\n '<div class=\"deep-dive-result\" id=\"deep-dive-result\"></div>';\n\n btn.insertAdjacentElement('afterend', section);\n\n document.getElementById('deep-dive-submit').onclick = function() { startDeepDive(moduleId); };\n document.getElementById('deep-dive-input').addEventListener('keydown', function(e) {\n if (e.key === 'Enter') { e.preventDefault(); startDeepDive(moduleId); }\n });\n document.getElementById('deep-dive-input').focus();\n }\n\n function startDeepDive(moduleId) {\n if (deepDiveStreaming) return;\n deepDiveStreaming = true;\n\n var input = document.getElementById('deep-dive-input');\n var submitBtn = document.getElementById('deep-dive-submit');\n var resultDiv = document.getElementById('deep-dive-result');\n var question = input ? input.value.trim() : '';\n\n submitBtn.disabled = true;\n resultDiv.innerHTML = '<div class=\"deep-dive-status\">Analyzing module...</div>';\n\n var body = {};\n if (question) body.question = question;\n body.depth = 'deep';\n\n fetch('/api/explore/' + encodeURIComponent(moduleId), {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body),\n }).then(function(response) {\n if (!response.ok) {\n return response.json().then(function(err) { throw new Error(err.error || 'Request failed'); });\n }\n\n var reader = response.body.getReader();\n var decoder = new TextDecoder();\n var buffer = '';\n var fullResponse = '';\n\n function processChunk(result) {\n if (result.done) {\n if (buffer.trim()) {\n var remaining = buffer.trim();\n if (remaining.startsWith('data: ')) {\n try {\n var data = JSON.parse(remaining.slice(6));\n if (data.type === 'chunk') fullResponse += data.text;\n else if (data.type === 'done') fullResponse = data.fullResponse || fullResponse;\n } catch(e) {}\n }\n }\n finishDeepDive(fullResponse, resultDiv, submitBtn);\n return;\n }\n\n buffer += decoder.decode(result.value, { stream: true });\n var lines = buffer.split('\\\\n');\n buffer = lines.pop() || '';\n\n for (var i = 0; i < lines.length; i++) {\n var line = lines[i].trim();\n if (!line.startsWith('data: ')) continue;\n try {\n var data = JSON.parse(line.slice(6));\n if (data.type === 'status') {\n resultDiv.innerHTML = '<div class=\"deep-dive-status\">' + escapeHtml(data.message) + '</div>';\n } else if (data.type === 'chunk') {\n fullResponse += data.text;\n resultDiv.innerHTML = '<div class=\"markdown-body\">' +\n (typeof marked !== 'undefined' ? marked.parse(fullResponse) : escapeHtml(fullResponse)) + '</div>';\n } else if (data.type === 'done') {\n fullResponse = data.fullResponse || fullResponse;\n finishDeepDive(fullResponse, resultDiv, submitBtn);\n return;\n } else if (data.type === 'error') {\n resultDiv.innerHTML = '<div class=\"ask-message-error\">Error: ' + escapeHtml(data.message) + '</div>';\n finishDeepDive('', resultDiv, submitBtn);\n return;\n }\n } catch(e) {}\n }\n\n return reader.read().then(processChunk);\n }\n\n return reader.read().then(processChunk);\n }).catch(function(err) {\n resultDiv.innerHTML = '<div class=\"ask-message-error\">Error: ' + escapeHtml(err.message) + '</div>';\n finishDeepDive('', resultDiv, submitBtn);\n });\n }\n\n function finishDeepDive(fullResponse, resultDiv, submitBtn) {\n deepDiveStreaming = false;\n if (submitBtn) submitBtn.disabled = false;\n if (fullResponse && resultDiv) {\n resultDiv.innerHTML = '<div class=\"markdown-body\">' +\n (typeof marked !== 'undefined' ? marked.parse(fullResponse) : escapeHtml(fullResponse)) + '</div>';\n resultDiv.querySelectorAll('pre code').forEach(function(block) { hljs.highlightElement(block); });\n }\n }\n\n // Keyboard shortcuts\n document.addEventListener('keydown', function(e) {\n if ((e.ctrlKey || e.metaKey) && e.key === 'b') {\n e.preventDefault();\n document.getElementById('sidebar-collapse').click();\n }\n if ((e.ctrlKey || e.metaKey) && e.key === 'i') {\n e.preventDefault();\n if (askPanelOpen) collapseWidget();\n else { expandWidget(); document.getElementById('ask-textarea').focus(); }\n }\n if (e.key === 'Escape') {\n if (askPanelOpen) collapseWidget();\n }\n });` : ''}\n${opts.enableWatch ? `\n // ================================================================\n // WebSocket Live Reload\n // ================================================================\n\n var wsReconnectTimer = null;\n var wsReconnectDelay = 1000;\n\n function connectWebSocket() {\n var protocol = location.protocol === 'https:' ? 'wss:' : 'ws:';\n var wsUrl = protocol + '//' + location.host + '/ws';\n var ws = new WebSocket(wsUrl);\n\n ws.onopen = function() {\n wsReconnectDelay = 1000;\n setInterval(function() {\n if (ws.readyState === WebSocket.OPEN) {\n ws.send(JSON.stringify({ type: 'ping' }));\n }\n }, 30000);\n };\n\n ws.onmessage = function(event) {\n try {\n var msg = JSON.parse(event.data);\n handleWsMessage(msg);\n } catch(e) {}\n };\n\n ws.onclose = function() {\n wsReconnectTimer = setTimeout(function() {\n wsReconnectDelay = Math.min(wsReconnectDelay * 2, 30000);\n connectWebSocket();\n }, wsReconnectDelay);\n };\n\n ws.onerror = function() {};\n }\n\n function handleWsMessage(msg) {\n var bar = document.getElementById('live-reload-bar');\n if (!bar) return;\n\n if (msg.type === 'rebuilding') {\n bar.className = 'live-reload-bar visible rebuilding';\n bar.textContent = 'Rebuilding: ' + (msg.modules || []).join(', ') + '...';\n } else if (msg.type === 'reload') {\n bar.className = 'live-reload-bar visible reloaded';\n bar.textContent = 'Updated: ' + (msg.modules || []).join(', ');\n (msg.modules || []).forEach(function(id) { delete markdownCache[id]; });\n if (currentModuleId && (msg.modules || []).indexOf(currentModuleId) !== -1) {\n loadModule(currentModuleId, true);\n }\n setTimeout(function() { bar.className = 'live-reload-bar'; }, 3000);\n } else if (msg.type === 'error') {\n bar.className = 'live-reload-bar visible error';\n bar.textContent = 'Error: ' + (msg.message || 'Unknown error');\n setTimeout(function() { bar.className = 'live-reload-bar'; }, 5000);\n }\n }\n\n connectWebSocket();` : ''}`;\n\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\nfunction escapeHtml(str: string): string {\n return str\n .replace(/&/g, '&amp;')\n .replace(/</g, '&lt;')\n .replace(/>/g, '&gt;')\n .replace(/\"/g, '&quot;');\n}\n", "/**\n * Context Builder\n *\n * TF-IDF indexing and context retrieval for the AI Q&A feature.\n * Builds an in-memory index of module articles on startup and\n * retrieves the most relevant modules for a given question.\n *\n * No external dependencies \u2014 TF-IDF is ~100 lines.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport type { ModuleGraph } from '../types';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * A document in the TF-IDF index.\n */\ninterface IndexedDocument {\n /** Module ID */\n moduleId: string;\n /** Module name */\n name: string;\n /** Module category */\n category: string;\n /** Tokenized terms with their TF values */\n termFrequencies: Map<string, number>;\n /** Total number of terms in the document */\n termCount: number;\n}\n\n/**\n * Context retrieval result.\n */\nexport interface RetrievedContext {\n /** Module IDs selected as context */\n moduleIds: string[];\n /** Markdown content for the selected modules */\n contextText: string;\n /** Module graph summary for architectural context */\n graphSummary: string;\n}\n\n// ============================================================================\n// Stop Words\n// ============================================================================\n\n/** Boost factor applied when a module name matches a query term */\nconst NAME_MATCH_BOOST = 1.5;\n\nconst STOP_WORDS = new Set([\n 'the', 'a', 'an', 'and', 'or', 'but', 'in', 'on', 'at', 'to', 'for',\n 'of', 'with', 'by', 'from', 'is', 'are', 'was', 'were', 'be', 'been',\n 'being', 'have', 'has', 'had', 'do', 'does', 'did', 'will', 'would',\n 'could', 'should', 'may', 'might', 'shall', 'can', 'this', 'that',\n 'these', 'those', 'it', 'its', 'i', 'we', 'you', 'he', 'she', 'they',\n 'me', 'us', 'him', 'her', 'them', 'my', 'our', 'your', 'his', 'their',\n 'what', 'which', 'who', 'whom', 'how', 'when', 'where', 'why', 'not',\n 'no', 'if', 'then', 'else', 'so', 'as', 'just', 'also', 'than',\n 'very', 'too', 'more', 'most', 'each', 'every', 'all', 'any', 'some',\n 'about', 'up', 'out', 'into', 'over', 'after', 'before', 'between',\n]);\n\n// ============================================================================\n// ContextBuilder Class\n// ============================================================================\n\n/**\n * Builds a TF-IDF index from module articles and retrieves relevant context.\n */\nexport class ContextBuilder {\n private documents: IndexedDocument[] = [];\n private inverseDocFreq: Map<string, number> = new Map();\n private graph: ModuleGraph;\n private markdownData: Record<string, string>;\n\n constructor(graph: ModuleGraph, markdownData: Record<string, string>) {\n this.graph = graph;\n this.markdownData = markdownData;\n this.buildIndex();\n }\n\n /**\n * Retrieve the most relevant modules for a question.\n *\n * @param question - The user's question\n * @param maxModules - Maximum number of modules to return (default: 5)\n * @returns Retrieved context with module IDs, markdown, and graph summary\n */\n retrieve(question: string, maxModules = 5): RetrievedContext {\n const queryTerms = tokenize(question);\n\n // Score each document\n const scores: Array<{ moduleId: string; score: number }> = [];\n for (const doc of this.documents) {\n let score = 0;\n for (const term of queryTerms) {\n const tf = doc.termFrequencies.get(term) || 0;\n const idf = this.inverseDocFreq.get(term) || 0;\n score += tf * idf;\n }\n\n // Boost if module name matches a query term\n const nameLower = doc.name.toLowerCase();\n for (const term of queryTerms) {\n if (nameLower.includes(term)) {\n score *= NAME_MATCH_BOOST;\n }\n }\n\n if (score > 0) {\n scores.push({ moduleId: doc.moduleId, score });\n }\n }\n\n // Sort by score descending\n scores.sort((a, b) => b.score - a.score);\n\n // Select top-K\n const topModules = scores.slice(0, maxModules);\n const selectedIds = topModules.map(s => s.moduleId);\n\n // Expand with 1-hop dependency neighbors if we have room\n const expandedIds = new Set(selectedIds);\n if (selectedIds.length < maxModules) {\n for (const moduleId of selectedIds) {\n const mod = this.graph.modules.find(m => m.id === moduleId);\n if (mod) {\n for (const dep of mod.dependencies) {\n if (expandedIds.size >= maxModules) break;\n expandedIds.add(dep);\n }\n for (const dep of mod.dependents) {\n if (expandedIds.size >= maxModules) break;\n expandedIds.add(dep);\n }\n }\n }\n }\n\n const finalIds = Array.from(expandedIds);\n\n // Build context text\n const contextParts: string[] = [];\n for (const moduleId of finalIds) {\n const markdown = this.markdownData[moduleId];\n if (markdown) {\n contextParts.push(`## Module: ${moduleId}\\n\\n${markdown}`);\n }\n }\n\n // Build graph summary\n const graphSummary = this.buildGraphSummary();\n\n return {\n moduleIds: finalIds,\n contextText: contextParts.join('\\n\\n---\\n\\n'),\n graphSummary,\n };\n }\n\n /**\n * Get the number of indexed documents.\n */\n get documentCount(): number {\n return this.documents.length;\n }\n\n /**\n * Get the vocabulary size.\n */\n get vocabularySize(): number {\n return this.inverseDocFreq.size;\n }\n\n // ========================================================================\n // Private: Index Building\n // ========================================================================\n\n private buildIndex(): void {\n // Index each module article\n for (const mod of this.graph.modules) {\n const markdown = this.markdownData[mod.id] || '';\n // Combine module metadata with markdown content for better matching\n const text = [\n mod.name,\n mod.purpose,\n mod.category,\n mod.path,\n mod.keyFiles.join(' '),\n markdown,\n ].join(' ');\n\n const terms = tokenize(text);\n const termFrequencies = new Map<string, number>();\n\n for (const term of terms) {\n termFrequencies.set(term, (termFrequencies.get(term) || 0) + 1);\n }\n\n // Normalize term frequencies\n const termCount = terms.length;\n if (termCount > 0) {\n for (const [term, count] of termFrequencies) {\n termFrequencies.set(term, count / termCount);\n }\n }\n\n this.documents.push({\n moduleId: mod.id,\n name: mod.name,\n category: mod.category,\n termFrequencies,\n termCount,\n });\n }\n\n // Compute IDF for each term\n const N = this.documents.length;\n const docFreq = new Map<string, number>();\n\n for (const doc of this.documents) {\n for (const term of doc.termFrequencies.keys()) {\n docFreq.set(term, (docFreq.get(term) || 0) + 1);\n }\n }\n\n for (const [term, df] of docFreq) {\n // IDF = log(N / df) + 1 (smoothed)\n this.inverseDocFreq.set(term, Math.log(N / df) + 1);\n }\n }\n\n private buildGraphSummary(): string {\n const lines: string[] = [];\n lines.push(`Project: ${this.graph.project.name}`);\n lines.push(`Description: ${this.graph.project.description}`);\n lines.push(`Language: ${this.graph.project.language}`);\n lines.push(`Modules: ${this.graph.modules.length}`);\n lines.push('');\n lines.push('Module Graph:');\n\n for (const mod of this.graph.modules) {\n const deps = mod.dependencies.length > 0\n ? ` \u2192 depends on: ${mod.dependencies.join(', ')}`\n : '';\n lines.push(` - ${mod.name} (${mod.id}): ${mod.purpose}${deps}`);\n }\n\n return lines.join('\\n');\n }\n}\n\n// ============================================================================\n// Tokenization\n// ============================================================================\n\n/**\n * Tokenize text into lowercase terms, removing stop words and short words.\n */\nexport function tokenize(text: string): string[] {\n return text\n .toLowerCase()\n .replace(/[^a-z0-9\\s-_]/g, ' ')\n .split(/\\s+/)\n .filter(word =>\n word.length >= 2 &&\n !STOP_WORDS.has(word)\n );\n}\n", "/**\n * WebSocket Server\n *\n * Raw WebSocket implementation using Node.js http upgrade event.\n * No external dependencies \u2014 implements the WebSocket handshake\n * and frame protocol directly (~80 lines).\n *\n * Used for live-reload notifications when --watch is enabled.\n *\n * Messages (Server \u2192 Client):\n * { type: \"reload\", modules: string[] }\n * { type: \"rebuilding\", modules: string[] }\n * { type: \"error\", message: string }\n *\n * Messages (Client \u2192 Server):\n * { type: \"ping\" }\n */\n\nimport * as http from 'http';\nimport * as crypto from 'crypto';\nimport type { Socket } from 'net';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface WSClient {\n socket: Socket;\n send: (data: string) => void;\n close: () => void;\n}\n\nexport interface WSMessage {\n type: string;\n modules?: string[];\n message?: string;\n}\n\nexport type WSMessageHandler = (client: WSClient, message: WSMessage) => void;\n\n// ============================================================================\n// WebSocketServer\n// ============================================================================\n\n/**\n * Minimal WebSocket server that attaches to an existing HTTP server.\n */\nexport class WebSocketServer {\n private clients: Set<WSClient> = new Set();\n private messageHandler?: WSMessageHandler;\n\n get clientCount(): number {\n return this.clients.size;\n }\n\n /**\n * Attach the WebSocket server to an HTTP server.\n * Handles upgrade requests to /ws.\n */\n attach(server: http.Server): void {\n server.on('upgrade', (req: http.IncomingMessage, socket: Socket, head: Buffer) => {\n if (req.url !== '/ws') {\n socket.destroy();\n return;\n }\n\n const key = req.headers['sec-websocket-key'];\n if (!key) {\n socket.destroy();\n return;\n }\n\n // Perform WebSocket handshake\n const acceptKey = crypto\n .createHash('sha1')\n .update(key + '258EAFA5-E914-47DA-95CA-5AB5DC11E65B')\n .digest('base64');\n\n socket.write(\n 'HTTP/1.1 101 Switching Protocols\\r\\n' +\n 'Upgrade: websocket\\r\\n' +\n 'Connection: Upgrade\\r\\n' +\n `Sec-WebSocket-Accept: ${acceptKey}\\r\\n` +\n '\\r\\n',\n );\n\n const client: WSClient = {\n socket,\n send: (data: string) => {\n try {\n sendFrame(socket, data);\n } catch {\n // Ignore send errors on closed sockets\n }\n },\n close: () => {\n try {\n socket.end();\n } catch {\n // Ignore\n }\n this.clients.delete(client);\n },\n };\n\n this.clients.add(client);\n\n socket.on('data', (buf: Buffer) => {\n try {\n const message = decodeFrame(buf);\n if (message !== null && this.messageHandler) {\n const parsed = JSON.parse(message);\n this.messageHandler(client, parsed);\n }\n } catch {\n // Ignore parse errors\n }\n });\n\n const removeClient = () => {\n this.clients.delete(client);\n };\n\n socket.on('close', removeClient);\n socket.on('end', removeClient);\n socket.on('error', removeClient);\n });\n }\n\n /**\n * Register a handler for incoming messages.\n */\n onMessage(handler: WSMessageHandler): void {\n this.messageHandler = handler;\n }\n\n /**\n * Broadcast a message to all connected clients.\n */\n broadcast(message: WSMessage): void {\n const data = JSON.stringify(message);\n for (const client of this.clients) {\n client.send(data);\n }\n }\n\n /**\n * Close all connections.\n */\n closeAll(): void {\n for (const client of this.clients) {\n client.close();\n }\n this.clients.clear();\n }\n}\n\n// ============================================================================\n// WebSocket Frame Encoding/Decoding\n// ============================================================================\n\n/**\n * Send a text frame over the socket.\n */\nfunction sendFrame(socket: Socket, data: string): void {\n const payload = Buffer.from(data, 'utf-8');\n const length = payload.length;\n\n let header: Buffer;\n\n if (length < 126) {\n header = Buffer.alloc(2);\n header[0] = 0x81; // FIN + text opcode\n header[1] = length;\n } else if (length < 65536) {\n header = Buffer.alloc(4);\n header[0] = 0x81;\n header[1] = 126;\n header.writeUInt16BE(length, 2);\n } else {\n header = Buffer.alloc(10);\n header[0] = 0x81;\n header[1] = 127;\n // Write as two 32-bit values for compatibility\n header.writeUInt32BE(0, 2);\n header.writeUInt32BE(length, 6);\n }\n\n socket.write(Buffer.concat([header, payload]));\n}\n\n/**\n * Decode a WebSocket text frame.\n * Returns the decoded text or null if the frame is a close/binary/etc.\n */\nfunction decodeFrame(buf: Buffer): string | null {\n if (buf.length < 2) return null;\n\n const opcode = buf[0] & 0x0f;\n\n // Only handle text frames (opcode 1)\n if (opcode !== 1) return null;\n\n const masked = (buf[1] & 0x80) !== 0;\n let payloadLength = buf[1] & 0x7f;\n let offset = 2;\n\n if (payloadLength === 126) {\n if (buf.length < 4) return null;\n payloadLength = buf.readUInt16BE(2);\n offset = 4;\n } else if (payloadLength === 127) {\n if (buf.length < 10) return null;\n // Read lower 32 bits only (enough for our messages)\n payloadLength = buf.readUInt32BE(6);\n offset = 10;\n }\n\n if (masked) {\n if (buf.length < offset + 4 + payloadLength) return null;\n const maskKey = buf.slice(offset, offset + 4);\n offset += 4;\n const payload = buf.slice(offset, offset + payloadLength);\n for (let i = 0; i < payload.length; i++) {\n payload[i] ^= maskKey[i % 4];\n }\n return payload.toString('utf-8');\n }\n\n if (buf.length < offset + payloadLength) return null;\n return buf.slice(offset, offset + payloadLength).toString('utf-8');\n}\n", "/**\n * File Watcher\n *\n * Watches a repository directory for changes and triggers incremental\n * rebuilds. Uses fs.watch (recursive) with debouncing to avoid\n * excessive rebuilds during rapid file saves.\n *\n * When changes are detected:\n * 1. Debounce for 2 seconds\n * 2. Determine which modules are affected\n * 3. Notify callback with affected module IDs\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { ModuleGraph } from '../types';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface FileWatcherOptions {\n /** Path to the repository to watch */\n repoPath: string;\n /** Wiki output directory (to reload data after rebuild) */\n wikiDir: string;\n /** Module graph for determining affected modules */\n moduleGraph: ModuleGraph;\n /** Debounce interval in milliseconds (default: 2000) */\n debounceMs?: number;\n /** Callback when changes are detected */\n onChange: (affectedModuleIds: string[]) => void;\n /** Optional callback for errors */\n onError?: (error: Error) => void;\n}\n\n/** Default debounce interval in milliseconds */\nconst DEFAULT_DEBOUNCE_MS = 2000;\n\n// ============================================================================\n// FileWatcher\n// ============================================================================\n\nexport class FileWatcher {\n private watcher: fs.FSWatcher | null = null;\n private debounceTimer: ReturnType<typeof setTimeout> | null = null;\n private changedFiles: Set<string> = new Set();\n private options: FileWatcherOptions;\n private _isWatching = false;\n\n constructor(options: FileWatcherOptions) {\n this.options = options;\n }\n\n /**\n * Start watching the repository for changes.\n */\n start(): void {\n if (this._isWatching) return;\n\n const { repoPath, debounceMs = DEFAULT_DEBOUNCE_MS } = this.options;\n\n try {\n this.watcher = fs.watch(repoPath, { recursive: true }, (eventType, filename) => {\n if (!filename) return;\n\n // Ignore common non-source files\n if (shouldIgnore(filename)) return;\n\n this.changedFiles.add(filename);\n\n // Debounce\n if (this.debounceTimer) {\n clearTimeout(this.debounceTimer);\n }\n this.debounceTimer = setTimeout(() => {\n this.processChanges();\n }, debounceMs);\n });\n\n this.watcher.on('error', (err) => {\n if (this.options.onError) {\n this.options.onError(err instanceof Error ? err : new Error(String(err)));\n }\n });\n\n this._isWatching = true;\n } catch (err) {\n if (this.options.onError) {\n this.options.onError(err instanceof Error ? err : new Error(String(err)));\n }\n }\n }\n\n /**\n * Stop watching.\n */\n stop(): void {\n if (this.debounceTimer) {\n clearTimeout(this.debounceTimer);\n this.debounceTimer = null;\n }\n if (this.watcher) {\n this.watcher.close();\n this.watcher = null;\n }\n this.changedFiles.clear();\n this._isWatching = false;\n }\n\n /**\n * Whether the watcher is currently active.\n */\n get isWatching(): boolean {\n return this._isWatching;\n }\n\n // ========================================================================\n // Private\n // ========================================================================\n\n private processChanges(): void {\n const files = Array.from(this.changedFiles);\n this.changedFiles.clear();\n\n // Determine which modules are affected\n const affectedIds = this.findAffectedModules(files);\n\n if (affectedIds.length > 0) {\n this.options.onChange(affectedIds);\n }\n }\n\n /**\n * Determine which modules are affected by the changed files.\n *\n * A module is affected if any changed file is within the module's path.\n */\n private findAffectedModules(changedFiles: string[]): string[] {\n const affected = new Set<string>();\n\n for (const file of changedFiles) {\n const normalizedFile = file.replace(/\\\\/g, '/');\n\n for (const mod of this.options.moduleGraph.modules) {\n const modulePath = mod.path.replace(/\\\\/g, '/');\n\n // Check if the changed file is within the module's directory\n if (normalizedFile.startsWith(modulePath + '/') || normalizedFile === modulePath) {\n affected.add(mod.id);\n continue;\n }\n\n // Also check key files\n for (const keyFile of mod.keyFiles) {\n const normalizedKeyFile = keyFile.replace(/\\\\/g, '/');\n if (normalizedFile === normalizedKeyFile || normalizedFile.endsWith('/' + normalizedKeyFile)) {\n affected.add(mod.id);\n break;\n }\n }\n }\n }\n\n return Array.from(affected);\n }\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Patterns to ignore (node_modules, .git, build artifacts, etc.)\n */\nconst IGNORE_PATTERNS = [\n 'node_modules',\n '.git',\n '.wiki-cache',\n 'dist',\n 'build',\n 'out',\n '.next',\n '.nuxt',\n '__pycache__',\n '.pytest_cache',\n '.tox',\n 'target',\n '.DS_Store',\n 'thumbs.db',\n '.env',\n];\n\nfunction shouldIgnore(filename: string): boolean {\n const normalized = filename.replace(/\\\\/g, '/');\n const parts = normalized.split('/');\n\n for (const part of parts) {\n if (IGNORE_PATTERNS.includes(part)) return true;\n }\n\n // Ignore common generated/temp files\n if (normalized.endsWith('.map') ||\n normalized.endsWith('.lock') ||\n normalized.endsWith('.log')) {\n return true;\n }\n\n return false;\n}\n", "/**\n * Conversation Session Manager\n *\n * Manages server-side conversation sessions for the Ask AI feature.\n * Each session wraps an AskAIFunction and tracks turn history so\n * follow-up questions can reuse the same AI context.\n *\n * Features:\n * - Session creation with auto-generated IDs\n * - Session lookup and reuse for multi-turn conversations\n * - Auto-cleanup of idle sessions (configurable timeout)\n * - Max concurrent sessions limit\n * - Per-session mutex to prevent concurrent sends\n */\n\nimport type { AskAIFunction } from './ask-handler';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/** A single conversation session. */\nexport interface ConversationSession {\n /** Unique session identifier */\n sessionId: string;\n /** Number of AI turns completed */\n turnCount: number;\n /** Timestamp of last activity */\n lastUsedAt: number;\n /** Timestamp of session creation */\n createdAt: number;\n /** Whether a send is currently in progress */\n busy: boolean;\n}\n\n/** Options for creating the ConversationSessionManager. */\nexport interface ConversationSessionManagerOptions {\n /** The AI send function to use for all sessions */\n sendMessage: AskAIFunction;\n /** Max idle time in ms before auto-cleanup (default: 600000 = 10 minutes) */\n idleTimeoutMs?: number;\n /** Max concurrent sessions (default: 5) */\n maxSessions?: number;\n /** Cleanup interval in ms (default: 60000 = 1 minute) */\n cleanupIntervalMs?: number;\n}\n\n/** Result of sending a message through a session. */\nexport interface SessionSendResult {\n /** The AI response */\n response: string;\n /** The session ID (same as input, or new if created) */\n sessionId: string;\n}\n\n// ============================================================================\n// Constants\n// ============================================================================\n\nconst DEFAULT_IDLE_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes\nconst DEFAULT_MAX_SESSIONS = 5;\nconst DEFAULT_CLEANUP_INTERVAL_MS = 60 * 1000; // 1 minute\n\n// ============================================================================\n// Manager\n// ============================================================================\n\n/**\n * Manages conversation sessions for multi-turn AI Q&A.\n *\n * Each session tracks its turn count and last-used time. Sessions are\n * automatically cleaned up when idle for too long. A per-session mutex\n * prevents concurrent AI calls on the same session.\n */\nexport class ConversationSessionManager {\n private readonly sessions = new Map<string, ConversationSession>();\n private readonly sendMessage: AskAIFunction;\n private readonly idleTimeoutMs: number;\n private readonly maxSessions: number;\n private cleanupTimer: ReturnType<typeof setInterval> | null = null;\n\n constructor(options: ConversationSessionManagerOptions) {\n this.sendMessage = options.sendMessage;\n this.idleTimeoutMs = options.idleTimeoutMs ?? DEFAULT_IDLE_TIMEOUT_MS;\n this.maxSessions = options.maxSessions ?? DEFAULT_MAX_SESSIONS;\n\n const cleanupIntervalMs = options.cleanupIntervalMs ?? DEFAULT_CLEANUP_INTERVAL_MS;\n this.cleanupTimer = setInterval(() => this.cleanupIdleSessions(), cleanupIntervalMs);\n // Don't prevent Node.js from exiting\n if (this.cleanupTimer.unref) {\n this.cleanupTimer.unref();\n }\n }\n\n /**\n * Create a new conversation session.\n * @returns The new session, or null if max sessions reached.\n */\n create(): ConversationSession | null {\n if (this.sessions.size >= this.maxSessions) {\n // Try to evict the oldest idle session\n const evicted = this.evictOldestIdle();\n if (!evicted) {\n return null;\n }\n }\n\n const sessionId = generateSessionId();\n const session: ConversationSession = {\n sessionId,\n turnCount: 0,\n lastUsedAt: Date.now(),\n createdAt: Date.now(),\n busy: false,\n };\n\n this.sessions.set(sessionId, session);\n return session;\n }\n\n /**\n * Get an existing session by ID.\n * @returns The session, or undefined if not found.\n */\n get(sessionId: string): ConversationSession | undefined {\n return this.sessions.get(sessionId);\n }\n\n /**\n * Send a message using a session.\n * If the session is busy, rejects with an error.\n */\n async send(\n sessionId: string,\n prompt: string,\n options?: {\n model?: string;\n workingDirectory?: string;\n onStreamingChunk?: (chunk: string) => void;\n },\n ): Promise<SessionSendResult> {\n const session = this.sessions.get(sessionId);\n if (!session) {\n throw new Error(`Session not found: ${sessionId}`);\n }\n\n if (session.busy) {\n throw new Error(`Session is busy: ${sessionId}`);\n }\n\n session.busy = true;\n try {\n const response = await this.sendMessage(prompt, {\n model: options?.model,\n workingDirectory: options?.workingDirectory,\n onStreamingChunk: options?.onStreamingChunk,\n });\n\n session.turnCount++;\n session.lastUsedAt = Date.now();\n\n return { response, sessionId };\n } finally {\n session.busy = false;\n }\n }\n\n /**\n * Destroy a specific session.\n */\n destroy(sessionId: string): boolean {\n return this.sessions.delete(sessionId);\n }\n\n /**\n * Destroy all sessions and stop the cleanup timer.\n */\n destroyAll(): void {\n this.sessions.clear();\n if (this.cleanupTimer) {\n clearInterval(this.cleanupTimer);\n this.cleanupTimer = null;\n }\n }\n\n /**\n * Get the number of active sessions.\n */\n get size(): number {\n return this.sessions.size;\n }\n\n /**\n * Get all session IDs.\n */\n get sessionIds(): string[] {\n return Array.from(this.sessions.keys());\n }\n\n /**\n * Remove sessions that have been idle for longer than idleTimeoutMs.\n */\n private cleanupIdleSessions(): void {\n const now = Date.now();\n for (const [id, session] of this.sessions) {\n if (!session.busy && (now - session.lastUsedAt) > this.idleTimeoutMs) {\n this.sessions.delete(id);\n }\n }\n }\n\n /**\n * Evict the oldest idle session to make room for a new one.\n * @returns true if a session was evicted.\n */\n private evictOldestIdle(): boolean {\n let oldestId: string | null = null;\n let oldestTime = Infinity;\n\n for (const [id, session] of this.sessions) {\n if (!session.busy && session.lastUsedAt < oldestTime) {\n oldestTime = session.lastUsedAt;\n oldestId = id;\n }\n }\n\n if (oldestId) {\n this.sessions.delete(oldestId);\n return true;\n }\n return false;\n }\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Generate a random session ID.\n */\nfunction generateSessionId(): string {\n const chars = 'abcdefghijklmnopqrstuvwxyz0123456789';\n let id = '';\n for (let i = 0; i < 12; i++) {\n id += chars[Math.floor(Math.random() * chars.length)];\n }\n return id;\n}\n", "/**\n * Deep Wiki Interactive Server\n *\n * Creates and manages an HTTP server that serves the wiki with\n * interactive exploration capabilities.\n *\n * Uses only Node.js built-in modules (http, fs, path) and\n * the existing pipeline-core dependency.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as http from 'http';\nimport { WikiData } from './wiki-data';\nimport { createRequestHandler } from './router';\nimport { generateSpaHtml } from './spa-template';\nimport { ContextBuilder } from './context-builder';\nimport { WebSocketServer } from './websocket';\nimport { FileWatcher } from './file-watcher';\nimport type { AskAIFunction } from './ask-handler';\nimport { ConversationSessionManager } from './conversation-session-manager';\nimport type { WebsiteTheme } from '../types';\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Options for creating the wiki server.\n */\nexport interface WikiServerOptions {\n /** Path to the wiki output directory */\n wikiDir: string;\n /** Port to listen on (default: 3000) */\n port?: number;\n /** Host/address to bind to (default: 'localhost') */\n host?: string;\n /** Enable AI features (Q&A, deep dive) */\n aiEnabled?: boolean;\n /** Path to the repository (needed for AI features and watch mode) */\n repoPath?: string;\n /** Website theme */\n theme?: WebsiteTheme;\n /** Override project title */\n title?: string;\n /** AI SDK send function (required when aiEnabled=true) */\n aiSendMessage?: AskAIFunction;\n /** AI model override */\n aiModel?: string;\n /** Enable watch mode for live reload */\n watch?: boolean;\n /** Debounce interval for file watcher in ms (default: 2000) */\n watchDebounceMs?: number;\n}\n\n/**\n * A running wiki server instance.\n */\nexport interface WikiServer {\n /** The underlying HTTP server */\n server: http.Server;\n /** The wiki data layer */\n wikiData: WikiData;\n /** The context builder for AI Q&A (only when AI is enabled) */\n contextBuilder?: ContextBuilder;\n /** The conversation session manager (only when AI is enabled) */\n sessionManager?: ConversationSessionManager;\n /** The WebSocket server (only when watch mode is enabled) */\n wsServer?: WebSocketServer;\n /** The file watcher (only when watch mode is enabled) */\n fileWatcher?: FileWatcher;\n /** The port the server is listening on */\n port: number;\n /** The host the server is bound to */\n host: string;\n /** URL to access the server */\n url: string;\n /** Stop the server */\n close: () => Promise<void>;\n}\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Create and start the wiki server.\n *\n * @param options - Server options\n * @returns A running WikiServer instance\n */\nexport async function createServer(options: WikiServerOptions): Promise<WikiServer> {\n const port = options.port !== undefined ? options.port : 3000;\n const host = options.host || 'localhost';\n const aiEnabled = options.aiEnabled || false;\n const theme = options.theme || 'auto';\n\n // Load wiki data\n const wikiData = new WikiData(options.wikiDir);\n wikiData.load();\n\n // Determine title\n const title = options.title || wikiData.graph.project.name;\n\n // Build context index for AI Q&A\n let contextBuilder: ContextBuilder | undefined;\n let sessionManager: ConversationSessionManager | undefined;\n if (aiEnabled) {\n const markdownData = wikiData.getMarkdownData();\n contextBuilder = new ContextBuilder(wikiData.graph, markdownData);\n\n if (options.aiSendMessage) {\n sessionManager = new ConversationSessionManager({\n sendMessage: options.aiSendMessage,\n });\n }\n }\n\n // Generate SPA HTML\n const spaHtml = generateSpaHtml({\n theme,\n title,\n enableSearch: true,\n enableAI: aiEnabled,\n enableGraph: true,\n enableWatch: !!(options.watch && options.repoPath),\n });\n\n // Create HTTP server\n const handler = createRequestHandler({\n wikiData,\n spaHtml,\n aiEnabled,\n repoPath: options.repoPath,\n contextBuilder,\n aiSendMessage: options.aiSendMessage,\n aiModel: options.aiModel,\n aiWorkingDirectory: options.repoPath,\n sessionManager,\n });\n\n const server = http.createServer(handler);\n\n // Set up WebSocket server for live reload\n let wsServer: WebSocketServer | undefined;\n let fileWatcher: FileWatcher | undefined;\n\n if (options.watch && options.repoPath) {\n wsServer = new WebSocketServer();\n wsServer.attach(server);\n\n // Handle ping from clients\n wsServer.onMessage((client, msg) => {\n if (msg.type === 'ping') {\n client.send(JSON.stringify({ type: 'pong' }));\n }\n });\n\n // Set up file watcher\n fileWatcher = new FileWatcher({\n repoPath: options.repoPath,\n wikiDir: options.wikiDir,\n moduleGraph: wikiData.graph,\n debounceMs: options.watchDebounceMs,\n onChange: (affectedModuleIds) => {\n // Notify clients about rebuild\n wsServer!.broadcast({ type: 'rebuilding', modules: affectedModuleIds });\n\n // Reload wiki data\n try {\n wikiData.reload();\n\n // Rebuild context index if AI is enabled\n if (aiEnabled && contextBuilder) {\n const markdownData = wikiData.getMarkdownData();\n const newBuilder = new ContextBuilder(wikiData.graph, markdownData);\n // Note: we can't reassign contextBuilder since it's const,\n // but the router already has a reference, so we just notify\n }\n\n wsServer!.broadcast({ type: 'reload', modules: affectedModuleIds });\n } catch (err) {\n const msg = err instanceof Error ? err.message : 'Unknown error';\n wsServer!.broadcast({ type: 'error', message: msg });\n }\n },\n onError: (err) => {\n wsServer!.broadcast({ type: 'error', message: err.message });\n },\n });\n\n fileWatcher.start();\n }\n\n // Start listening\n await new Promise<void>((resolve, reject) => {\n server.on('error', reject);\n server.listen(port, host, () => resolve());\n });\n\n // Get actual port (important when port 0 is used for random port)\n const address = server.address();\n const actualPort = typeof address === 'object' && address ? address.port : port;\n const url = `http://${host}:${actualPort}`;\n\n return {\n server,\n wikiData,\n contextBuilder,\n sessionManager,\n wsServer,\n fileWatcher,\n port: actualPort,\n host,\n url,\n close: async () => {\n if (sessionManager) {\n sessionManager.destroyAll();\n }\n if (fileWatcher) {\n fileWatcher.stop();\n }\n if (wsServer) {\n wsServer.closeAll();\n }\n await new Promise<void>((resolve, reject) => {\n server.close((err) => {\n if (err) { reject(err); }\n else { resolve(); }\n });\n });\n },\n };\n}\n\n// Re-export types and modules used by consumers\nexport { WikiData } from './wiki-data';\nexport { generateSpaHtml } from './spa-template';\nexport { ContextBuilder } from './context-builder';\nexport { WebSocketServer } from './websocket';\nexport { FileWatcher } from './file-watcher';\nexport type { SpaTemplateOptions } from './spa-template';\nexport type { ModuleSummary, ModuleDetail, SpecialPage } from './wiki-data';\nexport { ConversationSessionManager } from './conversation-session-manager';\nexport type { ConversationSession, ConversationSessionManagerOptions, SessionSendResult } from './conversation-session-manager';\nexport type { AskAIFunction, AskRequest, ConversationMessage } from './ask-handler';\nexport type { ExploreRequest } from './explore-handler';\nexport type { RetrievedContext } from './context-builder';\nexport type { WSClient, WSMessage } from './websocket';\nexport type { FileWatcherOptions } from './file-watcher';\n", "/**\n * Serve Command\n *\n * Implements the `deep-wiki serve <wiki-dir>` command.\n * Starts an HTTP server to host the wiki with interactive features.\n *\n * Options:\n * --port <n> Port to listen on (default: 3000)\n * --host <addr> Bind address (default: localhost)\n * --generate <repo> Generate wiki before serving\n * --watch Watch repo for changes (requires --generate)\n * --no-ai Disable AI Q&A and deep-dive features (enabled by default)\n * --model <model> AI model for Q&A sessions\n * --open Open browser on start\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs';\nimport { createServer } from '../server';\nimport type { AskAIFunction } from '../server';\nimport { EXIT_CODES } from '../cli';\nimport {\n printSuccess,\n printError,\n printWarning,\n printInfo,\n printHeader,\n printKeyValue,\n bold,\n} from '../logger';\nimport type { ServeCommandOptions } from '../server/types';\nimport { getErrorMessage } from '../utils/error-utils';\n\n// ============================================================================\n// Execute Serve Command\n// ============================================================================\n\n/**\n * Execute the serve command.\n *\n * @param wikiDir - Path to the wiki output directory\n * @param options - Command options\n * @returns Exit code (never returns normally \u2014 server runs until SIGINT)\n */\nexport async function executeServe(\n wikiDir: string,\n options: ServeCommandOptions\n): Promise<number> {\n const resolvedWikiDir = path.resolve(wikiDir);\n\n // ================================================================\n // Optional: Generate wiki before serving\n // ================================================================\n if (options.generate) {\n const repoPath = path.resolve(options.generate);\n\n if (!fs.existsSync(repoPath)) {\n printError(`Repository path does not exist: ${repoPath}`);\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n printHeader('Generating wiki before serving...');\n\n try {\n const { executeGenerate } = await import('./generate');\n const exitCode = await executeGenerate(repoPath, {\n output: resolvedWikiDir,\n model: options.model,\n depth: 'normal',\n force: false,\n useCache: true,\n verbose: false,\n });\n\n if (exitCode !== EXIT_CODES.SUCCESS) {\n printError('Wiki generation failed. Cannot serve.');\n return exitCode;\n }\n } catch (error) {\n printError(`Wiki generation failed: ${getErrorMessage(error)}`);\n return EXIT_CODES.EXECUTION_ERROR;\n }\n }\n\n // ================================================================\n // Validate wiki directory\n // ================================================================\n if (!fs.existsSync(resolvedWikiDir)) {\n printError(`Wiki directory does not exist: ${resolvedWikiDir}`);\n printInfo('Run `deep-wiki generate <repo-path>` first, or use `--generate <repo-path>`.');\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n const graphPath = path.join(resolvedWikiDir, 'module-graph.json');\n if (!fs.existsSync(graphPath)) {\n printError(`module-graph.json not found in ${resolvedWikiDir}`);\n printInfo('The wiki directory does not contain generated wiki data.');\n printInfo('Run `deep-wiki generate <repo-path>` first, or use `--generate <repo-path>`.');\n return EXIT_CODES.CONFIG_ERROR;\n }\n\n // ================================================================\n // Watch mode validation\n // ================================================================\n if (options.watch && !options.generate) {\n printWarning('--watch requires --generate <repo-path>. Ignoring --watch.');\n }\n\n // ================================================================\n // Initialize AI service if enabled\n // ================================================================\n const aiEnabled = options.ai !== false; // Default to true\n let aiSendMessage: AskAIFunction | undefined;\n\n if (aiEnabled) {\n try {\n aiSendMessage = await createAISendFunction(options.model, resolvedWikiDir);\n printInfo('AI service initialized successfully.');\n } catch (error) {\n const errMsg = error instanceof Error ? error.message : String(error);\n printWarning(`AI service unavailable: ${errMsg}`);\n printWarning('Server will start without AI features.');\n }\n }\n\n // ================================================================\n // Start server\n // ================================================================\n printHeader('Deep Wiki \u2014 Interactive Server');\n printKeyValue('Wiki Directory', resolvedWikiDir);\n printKeyValue('Port', String(options.port || 3000));\n printKeyValue('Host', options.host || 'localhost');\n printKeyValue('AI Features', aiSendMessage ? 'Enabled' : aiEnabled ? 'Unavailable' : 'Disabled');\n if (options.watch && options.generate) { printKeyValue('Watch Mode', 'Enabled'); }\n process.stderr.write('\\n');\n\n try {\n const wiki = await createServer({\n wikiDir: resolvedWikiDir,\n port: options.port || 3000,\n host: options.host || 'localhost',\n aiEnabled: !!aiSendMessage,\n aiSendMessage,\n aiModel: options.model,\n repoPath: options.generate ? path.resolve(options.generate) : resolvedWikiDir,\n theme: (options.theme as 'light' | 'dark' | 'auto') || 'auto',\n title: options.title,\n });\n\n printSuccess(`Server running at ${bold(wiki.url)}`);\n printInfo('Press Ctrl+C to stop.');\n\n // Open browser if requested\n if (options.open) {\n openBrowser(wiki.url);\n }\n\n // Wait for SIGINT/SIGTERM\n await new Promise<void>((resolve) => {\n const shutdown = async () => {\n process.stderr.write('\\n');\n printInfo('Shutting down server...');\n await wiki.close();\n printSuccess('Server stopped.');\n resolve();\n };\n\n process.on('SIGINT', () => void shutdown());\n process.on('SIGTERM', () => void shutdown());\n });\n\n return EXIT_CODES.SUCCESS;\n\n } catch (error) {\n const errMsg = getErrorMessage(error);\n if (errMsg.includes('EADDRINUSE')) {\n printError(`Port ${options.port || 3000} is already in use. Try a different port with --port.`);\n } else {\n printError(`Failed to start server: ${errMsg}`);\n }\n return EXIT_CODES.EXECUTION_ERROR;\n }\n}\n\n// ============================================================================\n// AI Initialization\n// ============================================================================\n\n/**\n * Create an AskAIFunction that wraps the Copilot SDK service.\n *\n * Uses direct sessions (usePool: false) without MCP tools \u2014\n * all wiki context is provided in the prompt by the context builder.\n *\n * @param defaultModel - Default AI model override\n * @param defaultWorkingDirectory - Default working directory for SDK sessions (typically the wiki directory)\n * @returns A function matching the AskAIFunction signature\n */\nasync function createAISendFunction(\n defaultModel?: string,\n defaultWorkingDirectory?: string,\n): Promise<AskAIFunction> {\n const { getCopilotSDKService } = await import('@plusplusoneplusplus/pipeline-core');\n const service = getCopilotSDKService();\n\n // Verify the service is available before returning the function\n const availability = await service.isAvailable();\n if (!availability.available) {\n throw new Error(availability.error || 'Copilot SDK is not available');\n }\n\n return async (prompt: string, options?: { model?: string; workingDirectory?: string; onStreamingChunk?: (chunk: string) => void }): Promise<string> => {\n const result = await service.sendMessage({\n prompt,\n model: options?.model || defaultModel,\n workingDirectory: options?.workingDirectory || defaultWorkingDirectory,\n usePool: false,\n loadDefaultMcpConfig: false,\n onStreamingChunk: options?.onStreamingChunk,\n });\n\n if (!result.success) {\n throw new Error(result.error || 'AI request failed');\n }\n\n return result.response || '';\n };\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Open the default browser to the given URL.\n * Cross-platform: uses `open` on macOS, `start` on Windows, `xdg-open` on Linux.\n */\nfunction openBrowser(url: string): void {\n const { exec } = require('child_process') as typeof import('child_process');\n\n const platform = process.platform;\n let command: string;\n\n if (platform === 'darwin') {\n command = `open \"${url}\"`;\n } else if (platform === 'win32') {\n command = `start \"\" \"${url}\"`;\n } else {\n command = `xdg-open \"${url}\"`;\n }\n\n exec(command, (error: Error | null) => {\n if (error) {\n printWarning(`Could not open browser automatically. Open ${url} manually.`);\n }\n });\n}\n", "/**\n * CLI Argument Parser\n *\n * Defines the CLI commands and options using Commander.\n * Routes parsed arguments to the appropriate command handlers.\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { Command } from 'commander';\nimport { getErrorMessage } from './utils/error-utils';\nimport { setColorEnabled, setVerbosity, printInfo } from './logger';\nimport { loadConfig, mergeConfigWithCLI, discoverConfigFile } from './config-loader';\n\n// ============================================================================\n// Exit Codes\n// ============================================================================\n\nexport const EXIT_CODES = {\n SUCCESS: 0,\n EXECUTION_ERROR: 1,\n CONFIG_ERROR: 2,\n AI_UNAVAILABLE: 3,\n CANCELLED: 130,\n} as const;\n\n// ============================================================================\n// CLI Setup\n// ============================================================================\n\n/**\n * Create and configure the CLI program\n */\nexport function createProgram(): Command {\n const program = new Command();\n\n program\n .name('deep-wiki')\n .description('Auto-generate comprehensive wikis for any codebase')\n .version('1.0.0');\n\n // ========================================================================\n // deep-wiki seeds <repo-path>\n // ========================================================================\n\n program\n .command('seeds')\n .description('Generate topic seeds for breadth-first discovery')\n .argument('<repo-path>', 'Path to the local git repository')\n .option('-o, --output <path>', 'Output file path', 'seeds.json')\n .option('--max-topics <n>', 'Maximum number of topics to generate', (v: string) => parseInt(v, 10), 50)\n .option('-m, --model <model>', 'AI model to use')\n .option('-v, --verbose', 'Verbose logging', false)\n .option('--no-color', 'Disable colored output')\n .action(async (repoPath: string, opts: Record<string, unknown>) => {\n applyGlobalOptions(opts);\n const { executeSeeds } = await import('./commands/seeds');\n const exitCode = await executeSeeds(repoPath, {\n output: opts.output as string,\n maxTopics: (opts.maxTopics as number) || 50,\n model: opts.model as string | undefined,\n verbose: Boolean(opts.verbose),\n });\n process.exit(exitCode);\n });\n\n // ========================================================================\n // deep-wiki discover <repo-path>\n // ========================================================================\n\n program\n .command('discover')\n .description('Discover module graph for a repository')\n .argument('<repo-path>', 'Path to the local git repository')\n .option('-o, --output <path>', 'Output directory for results', './wiki')\n .option('-m, --model <model>', 'AI model to use')\n .option('-t, --timeout <seconds>', 'Timeout in seconds for discovery', (v: string) => parseInt(v, 10))\n .option('--focus <path>', 'Focus discovery on a specific subtree')\n .option('--seeds <path>', 'Path to seeds file for breadth-first discovery, or \"auto\" to generate')\n .option('--force', 'Ignore cache, regenerate discovery', false)\n .option('--use-cache', 'Use existing cache regardless of git hash', false)\n .option('-v, --verbose', 'Verbose logging', false)\n .option('--no-color', 'Disable colored output')\n .action(async (repoPath: string, opts: Record<string, unknown>) => {\n applyGlobalOptions(opts);\n\n // Lazy-load to avoid loading heavy deps when just checking --help\n const { executeDiscover } = await import('./commands/discover');\n const exitCode = await executeDiscover(repoPath, {\n output: opts.output as string,\n model: opts.model as string | undefined,\n timeout: opts.timeout as number | undefined,\n focus: opts.focus as string | undefined,\n seeds: opts.seeds as string | undefined,\n force: Boolean(opts.force),\n useCache: Boolean(opts.useCache),\n verbose: Boolean(opts.verbose),\n });\n process.exit(exitCode);\n });\n\n // ========================================================================\n // deep-wiki generate <repo-path> (stub for future phases)\n // ========================================================================\n\n program\n .command('generate')\n .description('Generate full wiki for a repository (Discovery \u2192 Analysis \u2192 Articles \u2192 Website)')\n .argument('<repo-path>', 'Path to the local git repository')\n .option('-o, --output <path>', 'Output directory for wiki', './wiki')\n .option('-m, --model <model>', 'AI model to use')\n .option('-c, --concurrency <number>', 'Number of parallel AI sessions', (v: string) => parseInt(v, 10))\n .option('-t, --timeout <seconds>', 'Timeout in seconds per phase', (v: string) => parseInt(v, 10))\n .option('--focus <path>', 'Focus on a specific subtree')\n .option('--seeds <path>', 'Path to seeds file for breadth-first discovery, or \"auto\" to generate')\n .option('--depth <level>', 'Article detail level: shallow, normal, deep', 'normal')\n .option('--force', 'Ignore cache, regenerate everything', false)\n .option('--use-cache', 'Use existing cache regardless of git hash', false)\n .option('--phase <number>', 'Start from phase N (uses cached prior phases)', (v: string) => parseInt(v, 10))\n .option('--skip-website', 'Skip website generation (Phase 5)', false)\n .option('--no-cluster', 'Skip module consolidation (keep original granularity)')\n .option('--no-strict', 'Allow partial failures (default: strict, any failure aborts)')\n .option('--theme <theme>', 'Website theme: light, dark, auto', 'auto')\n .option('--title <title>', 'Override project name in website title')\n .option('--config <path>', 'Path to YAML configuration file (deep-wiki.config.yaml)')\n .option('-v, --verbose', 'Verbose logging', false)\n .option('--no-color', 'Disable colored output')\n .action(async (repoPath: string, opts: Record<string, unknown>, cmd: Command) => {\n applyGlobalOptions(opts);\n\n // Build base CLI options\n let cliOptions: import('./types').GenerateCommandOptions = {\n output: opts.output as string,\n model: opts.model as string | undefined,\n concurrency: opts.concurrency as number | undefined,\n timeout: opts.timeout as number | undefined,\n focus: opts.focus as string | undefined,\n seeds: opts.seeds as string | undefined,\n depth: (opts.depth as 'shallow' | 'normal' | 'deep') || 'normal',\n force: Boolean(opts.force),\n useCache: Boolean(opts.useCache),\n phase: opts.phase as number | undefined,\n skipWebsite: Boolean(opts.skipWebsite),\n noCluster: opts.cluster === false,\n strict: opts.strict !== false,\n theme: (opts.theme as 'light' | 'dark' | 'auto') || undefined,\n title: opts.title as string | undefined,\n verbose: Boolean(opts.verbose),\n config: opts.config as string | undefined,\n };\n\n // Load config file if --config is specified, or auto-discover\n const configPath = cliOptions.config || discoverConfigFile(repoPath);\n if (configPath) {\n try {\n const config = loadConfig(configPath);\n // Determine which CLI flags were explicitly set (not defaults)\n const explicitFields = getExplicitFields(cmd, opts);\n cliOptions = mergeConfigWithCLI(config, cliOptions, explicitFields);\n if (cliOptions.verbose) {\n printInfo(`Loaded config from ${configPath}`);\n }\n } catch (e) {\n // If --config was explicitly passed, this is a fatal error\n if (opts.config) {\n process.stderr.write(`Error: ${getErrorMessage(e)}\\n`);\n process.exit(EXIT_CODES.CONFIG_ERROR);\n }\n // Auto-discovered config with errors \u2014 warn and continue\n if (cliOptions.verbose) {\n process.stderr.write(`Warning: Ignoring config file: ${getErrorMessage(e)}\\n`);\n }\n }\n }\n\n const { executeGenerate } = await import('./commands/generate');\n const exitCode = await executeGenerate(repoPath, cliOptions);\n process.exit(exitCode);\n });\n\n // ========================================================================\n // deep-wiki serve <wiki-dir>\n // ========================================================================\n\n program\n .command('serve')\n .description('Start an interactive server to explore the wiki')\n .argument('<wiki-dir>', 'Path to the wiki output directory')\n .option('-p, --port <number>', 'Port to listen on', (v: string) => parseInt(v, 10), 3000)\n .option('-H, --host <address>', 'Bind address', 'localhost')\n .option('-g, --generate <repo-path>', 'Generate wiki before serving')\n .option('-w, --watch', 'Watch repo for changes (requires --generate)', false)\n .option('--no-ai', 'Disable AI Q&A and deep-dive features')\n .option('-m, --model <model>', 'AI model for Q&A sessions')\n .option('--open', 'Open browser automatically', false)\n .option('--theme <theme>', 'Website theme: light, dark, auto', 'auto')\n .option('--title <title>', 'Override project name in website title')\n .option('-v, --verbose', 'Verbose logging', false)\n .option('--no-color', 'Disable colored output')\n .action(async (wikiDir: string, opts: Record<string, unknown>) => {\n applyGlobalOptions(opts);\n\n const { executeServe } = await import('./commands/serve');\n const exitCode = await executeServe(wikiDir, {\n port: opts.port as number | undefined,\n host: opts.host as string | undefined,\n generate: opts.generate as string | undefined,\n watch: Boolean(opts.watch),\n ai: Boolean(opts.ai),\n model: opts.model as string | undefined,\n open: Boolean(opts.open),\n theme: opts.theme as string | undefined,\n title: opts.title as string | undefined,\n verbose: Boolean(opts.verbose),\n });\n process.exit(exitCode);\n });\n\n return program;\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/**\n * Apply global options (colors, verbosity) based on CLI flags\n */\nfunction applyGlobalOptions(opts: Record<string, unknown>): void {\n // Handle --no-color: commander sets color: false when --no-color is used\n if (opts.color === false) {\n setColorEnabled(false);\n }\n\n // Also respect NO_COLOR env variable\n if (process.env.NO_COLOR !== undefined) {\n setColorEnabled(false);\n }\n\n // Set verbosity\n if (opts.verbose) {\n setVerbosity('verbose');\n }\n}\n\n/**\n * Determine which CLI option fields were explicitly set by the user (not defaults).\n * Uses Commander's internal state to distinguish user-provided values from defaults.\n *\n * @param cmd - The Commander Command instance\n * @param opts - The parsed options object\n * @returns Set of field names that were explicitly provided\n */\nfunction getExplicitFields(cmd: Command, opts: Record<string, unknown>): Set<string> {\n const explicit = new Set<string>();\n\n // Commander tracks which options were explicitly set via setOptionValueWithSource\n // We can check the source: 'cli' means user passed it, 'default' means it's a default\n for (const option of cmd.options) {\n const key = option.attributeName();\n const source = cmd.getOptionValueSource(key);\n if (source === 'cli') {\n // Map Commander's attribute names to our field names\n explicit.add(key);\n }\n }\n\n return explicit;\n}\n", "\n/**\n * Deep Wiki Generator CLI Entry Point\n *\n * Standalone CLI tool for auto-generating comprehensive wikis for any codebase.\n * Uses @plusplusoneplusplus/pipeline-core for AI interactions and SDK management.\n *\n * Usage:\n * deep-wiki discover <repo-path> Discover module graph for a repository\n * deep-wiki generate <repo-path> Generate full wiki (stub, future phases)\n *\n * Cross-platform compatible (Linux/Mac/Windows).\n */\n\nimport { createProgram, EXIT_CODES } from './cli';\nimport { printError } from './logger';\n\nasync function main(): Promise<void> {\n try {\n const program = createProgram();\n await program.parseAsync(process.argv);\n } catch (error) {\n if (error instanceof Error) {\n printError(error.message);\n } else {\n printError(String(error));\n }\n process.exit(EXIT_CODES.EXECUTION_ERROR);\n }\n}\n\nmain();\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAGO,SAAS,gBAAgB,OAAwB;AACpD,MAAI,iBAAiB,OAAO;AACxB,WAAO,MAAM;AAAA,EACjB;AACA,SAAO,OAAO,KAAK;AACvB;AARA;AAAA;AAAA;AAAA;AAAA;;;AC2CO,SAAS,gBAAgB,SAAwB;AACpD,iBAAe;AACnB;AASA,SAAS,SAAS,OAAe,MAAsB;AACnD,MAAI,CAAC,cAAc;AAAE,WAAO;AAAA,EAAM;AAClC,SAAO,GAAG,KAAK,GAAG,IAAI,GAAG,OAAO,KAAK;AACzC;AAEO,SAAS,IAAI,MAAsB;AAAE,SAAO,SAAS,OAAO,KAAK,IAAI;AAAG;AACxE,SAAS,MAAM,MAAsB;AAAE,SAAO,SAAS,OAAO,OAAO,IAAI;AAAG;AAC5E,SAAS,OAAO,MAAsB;AAAE,SAAO,SAAS,OAAO,QAAQ,IAAI;AAAG;AAC9E,SAAS,KAAK,MAAsB;AAAE,SAAO,SAAS,OAAO,MAAM,IAAI;AAAG;AAC1E,SAAS,KAAK,MAAsB;AAAE,SAAO,SAAS,OAAO,MAAM,IAAI;AAAG;AAC1E,SAAS,KAAK,MAAsB;AAAE,SAAO,SAAS,OAAO,MAAM,IAAI;AAAG;AAC1E,SAAS,KAAK,MAAsB;AAAE,SAAO,SAAS,OAAO,MAAM,IAAI;AAAG;AAqI1E,SAAS,aAAa,OAA6B;AACtD,cAAY;AAChB;AA2CO,SAAS,aAAa,SAAuB;AAChD,UAAQ,OAAO,MAAM,GAAG,MAAM,QAAQ,OAAO,CAAC,IAAI,OAAO;AAAA,CAAI;AACjE;AAKO,SAAS,WAAW,SAAuB;AAC9C,UAAQ,OAAO,MAAM,GAAG,IAAI,QAAQ,KAAK,CAAC,IAAI,OAAO;AAAA,CAAI;AAC7D;AAKO,SAAS,aAAa,SAAuB;AAChD,UAAQ,OAAO,MAAM,GAAG,OAAO,QAAQ,OAAO,CAAC,IAAI,OAAO;AAAA,CAAI;AAClE;AAKO,SAAS,UAAU,SAAuB;AAC7C,UAAQ,OAAO,MAAM,GAAG,KAAK,QAAQ,IAAI,CAAC,IAAI,OAAO;AAAA,CAAI;AAC7D;AAKO,SAAS,YAAY,OAAqB;AAC7C,UAAQ,OAAO,MAAM;AAAA,EAAK,KAAK,KAAK,CAAC;AAAA,CAAI;AAC7C;AAKO,SAAS,cAAc,KAAa,OAAqB;AAC5D,UAAQ,OAAO,MAAM,KAAK,KAAK,MAAM,GAAG,CAAC,IAAI,KAAK;AAAA,CAAI;AAC1D;AAxRA,IAgBM,QAsBF,cAmCE,WAEO,SAmBA,SAmGT;AAjMJ;AAAA;AAAA;AAgBA,IAAM,SAAS;AAAA,MACX,OAAO;AAAA,MACP,MAAM;AAAA,MACN,KAAK;AAAA,MAEL,KAAK;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,SAAS;AAAA,MACT,MAAM;AAAA,MACN,MAAM;AAAA,MAEN,OAAO;AAAA,MACP,SAAS;AAAA,MACT,UAAU;AAAA,IACd;AAMA,IAAI,eAAe;AAmCnB,IAAM,YAAY,QAAQ,aAAa;AAEhC,IAAM,UAAU;AAAA,MACnB,SAAS,YAAY,WAAM;AAAA,MAC3B,OAAO,YAAY,SAAM;AAAA,MACzB,SAAS,YAAY,WAAM;AAAA,MAC3B,MAAM,YAAY,MAAM;AAAA,MACxB,OAAO,YAAY,MAAM;AAAA,MACzB,QAAQ,YAAY,MAAM;AAAA,MAC1B,SAAS,YACH,CAAC,KAAK,KAAK,KAAK,IAAI,IACpB,CAAC,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,QAAG;AAAA,IAC3D;AASO,IAAM,UAAN,MAAc;AAAA,MAMjB,YAAY,UAAkB,IAAI;AALlC,aAAQ,aAAa;AACrB,aAAQ,QAA+C;AAEvD,aAAQ,aAAa;AAGjB,aAAK,WAAW;AAAA,MACpB;AAAA,MAEA,IAAI,YAAqB;AACrB,eAAO,KAAK;AAAA,MAChB;AAAA,MAEA,IAAI,UAAkB;AAClB,eAAO,KAAK;AAAA,MAChB;AAAA;AAAA;AAAA;AAAA,MAKA,MAAM,SAAwB;AAC1B,YAAI,KAAK,YAAY;AAAE,eAAK,KAAK;AAAA,QAAG;AACpC,YAAI,YAAY,QAAW;AAAE,eAAK,WAAW;AAAA,QAAS;AACtD,aAAK,aAAa;AAGlB,YAAI,QAAQ,OAAO,OAAO;AACtB,eAAK,QAAQ,YAAY,MAAM;AAC3B,kBAAM,QAAQ,QAAQ,QAAQ,KAAK,aAAa,QAAQ,QAAQ,MAAM;AACtE,oBAAQ,OAAO,MAAM,KAAK,SAAS,OAAO,MAAM,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;AACzE,iBAAK;AAAA,UACT,GAAG,EAAE;AAAA,QACT,OAAO;AACH,kBAAQ,OAAO,MAAM,GAAG,KAAK,QAAQ;AAAA,CAAI;AAAA,QAC7C;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,OAAO,SAAuB;AAC1B,aAAK,WAAW;AAChB,YAAI,CAAC,QAAQ,OAAO,SAAS,KAAK,YAAY;AAC1C,kBAAQ,OAAO,MAAM,GAAG,OAAO;AAAA,CAAI;AAAA,QACvC;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,KAAK,cAA6B;AAC9B,aAAK,aAAa;AAClB,YAAI,KAAK,OAAO;AACZ,wBAAc,KAAK,KAAK;AACxB,eAAK,QAAQ;AAAA,QACjB;AACA,YAAI,QAAQ,OAAO,OAAO;AACtB,kBAAQ,OAAO,MAAM,UAAU;AAAA,QACnC;AACA,YAAI,cAAc;AACd,kBAAQ,OAAO,MAAM,GAAG,YAAY;AAAA,CAAI;AAAA,QAC5C;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,QAAQ,SAAwB;AAC5B,cAAM,MAAM,WAAW,KAAK;AAC5B,aAAK,KAAK,GAAG,MAAM,QAAQ,OAAO,CAAC,IAAI,GAAG,EAAE;AAAA,MAChD;AAAA;AAAA;AAAA;AAAA,MAKA,KAAK,SAAwB;AACzB,cAAM,MAAM,WAAW,KAAK;AAC5B,aAAK,KAAK,GAAG,IAAI,QAAQ,KAAK,CAAC,IAAI,GAAG,EAAE;AAAA,MAC5C;AAAA;AAAA;AAAA;AAAA,MAKA,KAAK,SAAwB;AACzB,cAAM,MAAM,WAAW,KAAK;AAC5B,aAAK,KAAK,GAAG,OAAO,QAAQ,OAAO,CAAC,IAAI,GAAG,EAAE;AAAA,MACjD;AAAA,IACJ;AAWA,IAAI,YAA4B;AAAA;AAAA;;;AC3JzB,SAAS,WAAW,YAAwC;AAC/D,QAAM,eAAoB,aAAQ,UAAU;AAE5C,MAAI,CAAI,cAAW,YAAY,GAAG;AAC9B,UAAM,IAAI,MAAM,0BAA0B,YAAY,EAAE;AAAA,EAC5D;AAEA,QAAM,UAAa,gBAAa,cAAc,OAAO;AAErD,MAAI;AACJ,MAAI;AACA,aAAc,UAAK,OAAO;AAAA,EAC9B,SAAS,GAAG;AACR,UAAM,IAAI,MAAM,gCAAgC,gBAAgB,CAAC,CAAC,EAAE;AAAA,EACxE;AAEA,MAAI,WAAW,QAAQ,WAAW,UAAa,OAAO,WAAW,UAAU;AACvE,UAAM,IAAI,MAAM,iDAAiD;AAAA,EACrE;AAEA,SAAO,eAAe,MAAiC;AAC3D;AASO,SAAS,mBAAmB,KAAiC;AAChE,QAAM,aAAa,CAAC,yBAAyB,sBAAsB;AACnE,aAAW,YAAY,YAAY;AAC/B,UAAM,YAAiB,UAAK,KAAK,QAAQ;AACzC,QAAO,cAAW,SAAS,GAAG;AAC1B,aAAO;AAAA,IACX;AAAA,EACJ;AACA,SAAO;AACX;AAwBO,SAAS,mBACZ,QACA,YACA,aACsB;AACtB,QAAM,WAAW,eAAe,oBAAI,IAAY;AAGhD,WAASA,UAAW,OAAe,QAAW,WAA6B;AACvE,QAAI,SAAS,IAAI,KAAK,GAAG;AACrB,aAAO;AAAA,IACX;AACA,WAAO,cAAc,SAAY,YAAY;AAAA,EACjD;AAGA,MAAI;AACJ,MAAI,OAAO,UAAU,WAAW,QAAQ;AACpC,mBAAe,EAAE,GAAG,OAAO,OAAO;AAClC,QAAI,WAAW,QAAQ;AACnB,iBAAW,CAAC,OAAO,SAAS,KAAK,OAAO,QAAQ,WAAW,MAAM,GAAG;AAChE,cAAM,YAAY;AAClB,qBAAa,SAAS,IAAI;AAAA,UACtB,GAAG,aAAa,SAAS;AAAA,UACzB,GAAG;AAAA,QACP;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ;AAEA,SAAO;AAAA,IACH,QAAQA,UAAQ,UAAU,WAAW,QAAQ,OAAO,MAAM;AAAA,IAC1D,OAAOA,UAAQ,SAAS,WAAW,OAAO,OAAO,KAAK;AAAA,IACtD,aAAaA,UAAQ,eAAe,WAAW,aAAa,OAAO,WAAW;AAAA,IAC9E,SAASA,UAAQ,WAAW,WAAW,SAAS,OAAO,OAAO;AAAA,IAC9D,OAAOA,UAAQ,SAAS,WAAW,OAAO,OAAO,KAAK;AAAA,IACtD,OAAOA,UAAQ,SAAS,WAAW,OAAO,OAAO,KAAK;AAAA,IACtD,OAAOA,UAAQ,SAAS,WAAW,OAAO,OAAO,KAAK;AAAA,IACtD,UAAUA,UAAQ,YAAY,WAAW,UAAU,OAAO,QAAQ;AAAA,IAClE,OAAOA,UAAQ,SAAS,WAAW,OAAO,OAAO,KAAK;AAAA,IACtD,SAAS,WAAW;AAAA;AAAA,IACpB,aAAaA,UAAQ,eAAe,WAAW,aAAa,OAAO,WAAW;AAAA,IAC9E,OAAOA,UAAQ,SAAS,WAAW,OAAO,OAAO,KAAiC;AAAA,IAClF,OAAOA,UAAQ,SAAS,WAAW,OAAO,OAAO,KAAK;AAAA,IACtD,OAAOA,UAAQ,SAAS,WAAW,OAAO,OAAO,KAAK;AAAA,IACtD,WAAWA,UAAQ,aAAa,WAAW,WAAW,OAAO,SAAS;AAAA,IACtE,QAAQA,UAAQ,UAAU,WAAW,QAAQ,OAAO,MAAM;AAAA,IAC1D,QAAQ,WAAW;AAAA,IACnB,QAAQ;AAAA,EACZ;AACJ;AAcO,SAAS,kBACZ,SACA,OACkB;AAClB,SAAO,QAAQ,SAAS,KAAK,GAAG,SAAS,QAAQ;AACrD;AAUO,SAAS,oBACZ,SACA,OACkB;AAClB,SAAO,QAAQ,SAAS,KAAK,GAAG,WAAW,QAAQ;AACvD;AAUO,SAAS,wBACZ,SACA,OACkB;AAClB,SAAO,QAAQ,SAAS,KAAK,GAAG,eAAe,QAAQ;AAC3D;AASO,SAAS,kBACZ,SACA,OAC6B;AAC7B,SAAO,QAAQ,SAAS,KAAK,GAAG,SAAS,QAAQ;AACrD;AAUA,SAAS,WAAW,OAAe,QAAyB;AACxD,SAAO,SAAS,GAAG,MAAM,GAAG,KAAK,KAAK,IAAI,KAAK;AACnD;AAEA,SAAS,aAAa,KAA8B,OAAe,QAAiC,QAAuB;AACvH,MAAI,IAAI,KAAK,MAAM,QAAW;AAC1B,QAAI,OAAO,IAAI,KAAK,MAAM,UAAU;AAChC,YAAM,IAAI,MAAM,iBAAiB,WAAW,OAAO,MAAM,CAAC,mBAAmB;AAAA,IACjF;AACA,WAAO,KAAK,IAAI,IAAI,KAAK;AAAA,EAC7B;AACJ;AAEA,SAAS,cAAc,KAA8B,OAAe,QAAiC,QAAuB;AACxH,MAAI,IAAI,KAAK,MAAM,QAAW;AAC1B,QAAI,OAAO,IAAI,KAAK,MAAM,WAAW;AACjC,YAAM,IAAI,MAAM,iBAAiB,WAAW,OAAO,MAAM,CAAC,oBAAoB;AAAA,IAClF;AACA,WAAO,KAAK,IAAI,IAAI,KAAK;AAAA,EAC7B;AACJ;AAEA,SAAS,qBAAqB,KAA8B,OAAe,QAAiC,QAAuB;AAC/H,MAAI,IAAI,KAAK,MAAM,QAAW;AAC1B,QAAI,OAAO,IAAI,KAAK,MAAM,YAAY,CAAC,OAAO,SAAS,IAAI,KAAK,CAAW,KAAM,IAAI,KAAK,IAAe,GAAG;AACxG,YAAM,IAAI,MAAM,iBAAiB,WAAW,OAAO,MAAM,CAAC,4BAA4B;AAAA,IAC1F;AACA,WAAO,KAAK,IAAI,IAAI,KAAK;AAAA,EAC7B;AACJ;AAEA,SAAS,WAAW,KAA8B,OAAe,QAAiC,aAA0B,QAAuB;AAC/I,MAAI,IAAI,KAAK,MAAM,QAAW;AAC1B,QAAI,OAAO,IAAI,KAAK,MAAM,YAAY,CAAC,YAAY,IAAI,IAAI,KAAK,CAAW,GAAG;AAC1E,YAAM,IAAI,MAAM,iBAAiB,WAAW,OAAO,MAAM,CAAC,oBAAoB,CAAC,GAAG,WAAW,EAAE,KAAK,IAAI,CAAC,EAAE;AAAA,IAC/G;AACA,WAAO,KAAK,IAAI,IAAI,KAAK;AAAA,EAC7B;AACJ;AASO,SAAS,eAAe,KAAkD;AAC7E,QAAM,SAA6B,CAAC;AAGpC,eAAa,KAAK,YAAY,MAAM;AACpC,eAAa,KAAK,UAAU,MAAM;AAClC,eAAa,KAAK,SAAS,MAAM;AACjC,eAAa,KAAK,SAAS,MAAM;AACjC,eAAa,KAAK,SAAS,MAAM;AACjC,eAAa,KAAK,SAAS,MAAM;AAGjC,uBAAqB,KAAK,eAAe,MAAM;AAC/C,uBAAqB,KAAK,WAAW,MAAM;AAG3C,MAAI,IAAI,UAAU,QAAW;AACzB,QAAI,OAAO,IAAI,UAAU,YAAY,CAAC,OAAO,UAAU,IAAI,KAAK,KAAK,IAAI,QAAQ,KAAK,IAAI,QAAQ,GAAG;AACjG,YAAM,IAAI,MAAM,0DAA0D;AAAA,IAC9E;AACA,WAAO,QAAQ,IAAI;AAAA,EACvB;AAGA,gBAAc,KAAK,YAAY,MAAM;AACrC,gBAAc,KAAK,SAAS,MAAM;AAClC,gBAAc,KAAK,aAAa,MAAM;AACtC,gBAAc,KAAK,UAAU,MAAM;AACnC,gBAAc,KAAK,eAAe,MAAM;AAGxC,aAAW,KAAK,SAAS,QAAQ,YAAY;AAC7C,aAAW,KAAK,SAAS,QAAQ,YAAY;AAG7C,MAAI,IAAI,WAAW,QAAW;AAC1B,QAAI,OAAO,IAAI,WAAW,YAAY,IAAI,WAAW,QAAQ,MAAM,QAAQ,IAAI,MAAM,GAAG;AACpF,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC9D;AAEA,UAAM,SAAuB,CAAC;AAC9B,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,IAAI,MAAiC,GAAG;AAC9E,UAAI,CAAC,kBAAkB,IAAI,GAAG,GAAG;AAC7B,cAAM,IAAI,MAAM,gCAAgC,GAAG,oBAAoB,CAAC,GAAG,iBAAiB,EAAE,KAAK,IAAI,CAAC,EAAE;AAAA,MAC9G;AAEA,UAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,MAAM,QAAQ,KAAK,GAAG;AACrE,cAAM,IAAI,MAAM,wBAAwB,GAAG,oBAAoB;AAAA,MACnE;AAEA,YAAM,WAAW;AACjB,YAAM,cAAuC,CAAC;AAC9C,YAAM,cAAc,UAAU,GAAG;AAEjC,mBAAa,UAAU,SAAS,aAAa,WAAW;AACxD,2BAAqB,UAAU,WAAW,aAAa,WAAW;AAClE,2BAAqB,UAAU,eAAe,aAAa,WAAW;AACtE,iBAAW,UAAU,SAAS,aAAa,cAAc,WAAW;AACpE,oBAAc,UAAU,UAAU,aAAa,WAAW;AAE1D,aAAO,GAAgB,IAAI;AAAA,IAC/B;AAEA,WAAO,SAAS;AAAA,EACpB;AAEA,SAAO;AACX;AAlVA,IAeA,IACA,MACA,MA2MM,cACA,cACA;AA9NN;AAAA;AAAA;AAeA,SAAoB;AACpB,WAAsB;AACtB,WAAsB;AACtB;AA0MA,IAAM,eAAe,oBAAI,IAAI,CAAC,WAAW,UAAU,MAAM,CAAC;AAC1D,IAAM,eAAe,oBAAI,IAAI,CAAC,SAAS,QAAQ,MAAM,CAAC;AACtD,IAAM,oBAAoB,oBAAI,IAAY,CAAC,aAAa,iBAAiB,YAAY,SAAS,CAAC;AAAA;AAAA;;;;;;;;ACtH/F,IAAAC,SAAA,YAAA;AASA,IAAAA,SAAA,YAAAC;AAQA,IAAAD,SAAA,cAAA;AArGA,QAAYE;AAAZ,KAAA,SAAYA,cAAW;AAEnB,MAAAA,aAAA,IAAA,IAAA;AAEA,MAAAA,aAAA,YAAA,IAAA;AAEA,MAAAA,aAAA,UAAA,IAAA;AAEA,MAAAA,aAAA,OAAA,IAAA;AAEA,MAAAA,aAAA,SAAA,IAAA;IACJ,GAXYA,iBAAWF,SAAA,cAAXE,eAAW,CAAA,EAAA;AA0CV,IAAAF,SAAA,gBAAwB;MACjC,OAAO,CAAC,KAAK,QAAQ,QAAQ,MAAM,YAAY,GAAG,KAAK,GAAG,EAAE;MAC5D,MAAM,CAAC,KAAK,QAAQ,QAAQ,IAAI,WAAW,GAAG,KAAK,GAAG,EAAE;MACxD,MAAM,CAAC,KAAK,QAAQ,QAAQ,KAAK,WAAW,GAAG,KAAK,GAAG,EAAE;MACzD,OAAO,CAAC,KAAK,KAAK,QAAQ,QAAQ,MAAM,YAAY,GAAG,KAAK,GAAG,IAAI,OAAO,EAAE;;AAOnE,IAAAA,SAAA,aAAqB;MAC9B,OAAO,MAAK;MAAE;MACd,MAAM,MAAK;MAAE;MACb,MAAM,MAAK;MAAE;MACb,OAAO,MAAK;MAAE;;AAOlB,QAAI,eAAuBA,SAAA;AAoB3B,aAAgB,UAAU,QAAc;AACpC,qBAAe;IACnB;AAOA,aAAgBC,aAAS;AACrB,aAAO;IACX;AAMA,aAAgB,cAAW;AACvB,qBAAeD,SAAA;IACnB;;;;;;;;;;ACZA,IAAAG,SAAA,qBAAA;AA9Fa,IAAAA,SAAA,YAAY;;;;;MAKrB,WAAW;;MAEX,SAAS;;MAET,iBAAiB;;;;;MAMjB,sBAAsB;;MAEtB,0BAA0B;;MAE1B,mBAAmB;;;;;MAMnB,2BAA2B;;MAE3B,wBAAwB;;MAExB,wBAAwB;;MAExB,yBAAyB;;;;;MAMzB,yBAAyB;;MAEzB,uBAAuB;;MAEvB,0BAA0B;;;;;MAM1B,oBAAoB;;MAEpB,mBAAmB;;MAEnB,mBAAmB;;;;;MAMnB,iBAAiB;;MAEjB,gBAAgB;;MAEhB,kBAAkB;;MAElB,0BAA0B;;MAE1B,yBAAyB;;MAEzB,yBAAyB;;;;;MAMzB,gBAAgB;;MAEhB,mBAAmB;;MAEnB,mBAAmB;;;;;MAMnB,SAAS;;AAWb,aAAgB,mBAAmB,UAAgB;AAC/C,cAAQ,UAAU;QACd,KAAK;AACD,iBAAOA,SAAA,UAAU;QACrB,KAAK;QACL,KAAK;AACD,iBAAOA,SAAA,UAAU;QACrB,KAAK;QACL,KAAK;AACD,iBAAOA,SAAA,UAAU;QACrB,KAAK;QACL,KAAK;QACL,KAAK;AACD,iBAAOA,SAAA,UAAU;QACrB;AACI,cAAI,SAAS,WAAW,GAAG,GAAG;AAC1B,mBAAOA,SAAA,UAAU;UACrB;AACA,iBAAOA,SAAA,UAAU;MACzB;IACJ;;;;;;;;;;ACZA,IAAAC,SAAA,sBAAA;AASA,IAAAA,SAAA,sBAAA;AA4CA,IAAAA,SAAA,YAAA;AAqBA,IAAAA,SAAA,uBAAA;AA2BA,IAAAA,SAAA,WAAA;AAlNA,QAAA,gBAAA;AACA,QAAA,WAAA;AAwCA,QAAa,oBAAb,cAAuC,MAAK;MAUxC,YACI,SACA,SAIC;AAED,cAAM,OAAO;AAEb,aAAK,OAAO;AACZ,aAAK,OAAO,SAAS,QAAQ,cAAA,UAAU;AACvC,aAAK,QAAQ,SAAS;AACtB,aAAK,OAAO,SAAS;AAGrB,eAAO,eAAe,MAAM,WAAW,SAAS;MACpD;;;;MAKA,mBAAgB;AACZ,cAAM,QAAQ,CAAC,IAAI,KAAK,IAAI,KAAK,KAAK,OAAO,EAAE;AAE/C,YAAI,KAAK,QAAQ,OAAO,KAAK,KAAK,IAAI,EAAE,SAAS,GAAG;AAChD,gBAAM,KAAK,SAAS,KAAK,UAAU,KAAK,IAAI,CAAC,EAAE;QACnD;AAEA,YAAI,KAAK,iBAAiB,OAAO;AAC7B,gBAAM,KAAK,cAAc,KAAK,MAAM,OAAO,EAAE;QACjD,WAAW,KAAK,UAAU,QAAW;AACjC,gBAAM,KAAK,cAAc,OAAO,KAAK,KAAK,CAAC,EAAE;QACjD;AAEA,eAAO,MAAM,KAAK,IAAI;MAC1B;;;;MAKA,SAAM;AACF,eAAO;UACH,MAAM,KAAK;UACX,MAAM,KAAK;UACX,SAAS,KAAK;UACd,MAAM,KAAK;UACX,OAAO,KAAK,iBAAiB,QACvB,EAAE,MAAM,KAAK,MAAM,MAAM,SAAS,KAAK,MAAM,QAAO,IACpD,KAAK;UACX,OAAO,KAAK;;MAEpB;;AA9DJ,IAAAA,SAAA,oBAAA;AAoEA,aAAgB,oBAAoB,OAAc;AAC9C,aAAO,iBAAiB;IAC5B;AAOA,aAAgB,oBACZ,OACA,cAA6B,cAAA,UAAU,SACvC,MAAoB;AAGpB,UAAI,oBAAoB,KAAK,GAAG;AAE5B,YAAI,MAAM;AACN,iBAAO,IAAI,kBAAkB,MAAM,SAAS;YACxC,MAAM,MAAM;YACZ,OAAO,MAAM;YACb,MAAM,EAAE,GAAG,MAAM,MAAM,GAAG,KAAI;WACjC;QACL;AACA,eAAO;MACX;AAGA,UAAI,iBAAiB,OAAO;AAExB,cAAM,WAAY,MAAgC;AAClD,cAAM,eAAe,YAAW,GAAA,cAAA,oBAAmB,QAAQ,IAAI;AAE/D,eAAO,IAAI,kBAAkB,MAAM,SAAS;UACxC,MAAM,iBAAiB,cAAA,UAAU,UAAU,eAAe;UAC1D,OAAO;UACP;SACH;MACL;AAGA,YAAM,UAAU,OAAO,UAAU,WAAW,QAAQ,OAAO,KAAK;AAChE,aAAO,IAAI,kBAAkB,SAAS;QAClC,MAAM;QACN,OAAO;QACP;OACH;IACL;AAMA,aAAgB,UACZ,SACA,OACA,MACA,MAAoB;AAGpB,YAAM,aAAa,oBAAoB,KAAK,IAAI,QAAQ;AACxD,YAAM,gBAAgB,QAAQ,YAAY,QAAQ,cAAA,UAAU;AAC5D,YAAM,gBAAgB,QAAQ,YAAY;AAE1C,aAAO,IAAI,kBAAkB,SAAS;QAClC,MAAM;QACN;QACA,MAAM;OACT;IACL;AAKA,aAAgB,qBAAqB,OAAgB,WAAW,GAAC;AAC7D,YAAM,WAAqB,CAAA;AAC3B,UAAI,UAAmB;AACvB,UAAI,QAAQ;AAEZ,aAAO,WAAW,QAAQ,UAAU;AAChC,YAAI,mBAAmB,OAAO;AAC1B,mBAAS,KAAK,QAAQ,OAAO;AAE7B,oBAAW,QAA8B;QAC7C,WAAW,OAAO,YAAY,UAAU;AACpC,mBAAS,KAAK,OAAO;AACrB;QACJ,OAAO;AACH,mBAAS,KAAK,OAAO,OAAO,CAAC;AAC7B;QACJ;AACA;MACJ;AAEA,aAAO,SAAS,KAAK,MAAM;IAC/B;AAMA,aAAgB,SACZ,UACA,SACA,OAAc;AAEd,YAAM,UAAS,GAAA,SAAA,WAAS;AAExB,UAAI,oBAAoB,KAAK,GAAG;AAC5B,cAAM,UAAU;UACZ,IAAI,MAAM,IAAI;UACd,MAAM;;AAGV,YAAI,MAAM,QAAQ,OAAO,KAAK,MAAM,IAAI,EAAE,SAAS,GAAG;AAClD,kBAAQ,KAAK,IAAI,KAAK,UAAU,MAAM,IAAI,CAAC,GAAG;QAClD;AAEA,eAAO,MAAM,UAAU,GAAG,OAAO,KAAK,QAAQ,KAAK,GAAG,CAAC,IAAI,KAAK;MACpE,WAAW,iBAAiB,OAAO;AAC/B,eAAO,MAAM,UAAU,GAAG,OAAO,KAAK,MAAM,OAAO,IAAI,KAAK;MAChE,OAAO;AACH,eAAO,MAAM,UAAU,GAAG,OAAO,KAAK,OAAO,KAAK,CAAC,EAAE;MACzD;IACJ;AAGA,QAAA,gBAAA;AAAS,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,cAAA;IAAS,EAAA,CAAA;AAAiB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,cAAA;IAAkB,EAAA,CAAA;;;;;;;;;;AC/OrD,QAAA,gBAAA;AACI,WAAA,eAAAC,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,cAAA;IAAS,EAAA,CAAA;AAET,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,cAAA;IAAkB,EAAA,CAAA;AAItB,QAAA,wBAAA;AACI,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAiB,EAAA,CAAA;AAEjB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,YAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAQ,EAAA,CAAA;;;;;;;;;;ACWZ,IAAAC,SAAA,sBAAA;AAkBA,IAAAA,SAAA,mBAAA;AAaA,IAAAA,SAAA,0BAAA;AAxDA,QAAA,WAAA;AAMA,QAAa,oBAAb,cAAuC,SAAA,kBAAiB;MACpD,YAAY,UAAU,uBAAuB,MAAoB;AAC7D,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB;SACH;AACD,aAAK,OAAO;MAChB;;AAPJ,IAAAA,SAAA,oBAAA;AAmBA,aAAgB,oBAAoB,OAAc;AAC9C,UAAI,iBAAiB,mBAAmB;AACpC,eAAO;MACX;AACA,UAAI,iBAAiB,SAAA,qBAAqB,MAAM,SAAS,SAAA,UAAU,WAAW;AAC1E,eAAO;MACX;AACA,aAAO;IACX;AAUA,aAAgB,iBACZ,aACA,MAAoB;AAEpB,UAAI,cAAa,GAAI;AACjB,cAAM,IAAI,kBAAkB,uBAAuB,IAAI;MAC3D;IACJ;AAMA,aAAgB,wBAAwB,aAA2B;AAI/D,YAAM,KAAoB,gBAAgB,MAAM;AAChD,aAAO;QACH,aAAa;QACb,kBAAkB,CAAC,SAAyB,iBAAiB,IAAI,IAAI;;IAE7E;;;;;;;;;;ACfA,IAAAC,SAAA,cAAA;AAuDA,IAAAA,SAAA,iBAAA;AAcA,IAAAA,SAAA,uBAAA;AAvHA,QAAA,WAAA;AACA,QAAA,iBAAA;AAMA,QAAa,eAAb,cAAkC,SAAA,kBAAiB;MAC/C,YAAY,SAAiB,MAAoB;AAC7C,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB;SACH;AACD,aAAK,OAAO;MAChB;;AAPJ,IAAAA,SAAA,eAAA;AA2CO,mBAAe,YAClB,IACA,SAAuB;AAEvB,YAAM,EAAE,WAAW,WAAW,aAAa,eAAe,KAAI,IAAK;AAGnE,OAAA,GAAA,eAAA,kBAAiB,aAAa,IAAI;AAElC,aAAO,IAAI,QAAW,CAACC,WAAS,WAAU;AACtC,YAAI,YAAY;AAChB,YAAI;AAGJ,oBAAY,WAAW,MAAK;AACxB,cAAI,CAAC,WAAW;AACZ,wBAAY;AACZ,wBAAW;AAEX,kBAAM,OAAO,iBAAiB;AAC9B,mBACI,IAAI,aAAa,GAAG,IAAI,oBAAoB,SAAS,MAAM;cACvD,GAAG;cACH;aACH,CAAC;UAEV;QACJ,GAAG,SAAS;AAGZ,WAAE,EACG,KAAK,CAAC,WAAU;AACb,cAAI,CAAC,WAAW;AACZ,wBAAY;AACZ,gBAAI,WAAW;AACX,2BAAa,SAAS;YAC1B;AACA,YAAAA,UAAQ,MAAM;UAClB;QACJ,CAAC,EACA,MAAM,CAAC,UAAS;AACb,cAAI,CAAC,WAAW;AACZ,wBAAY;AACZ,gBAAI,WAAW;AACX,2BAAa,SAAS;YAC1B;AACA,mBAAO,KAAK;UAChB;QACJ,CAAC;MACT,CAAC;IACL;AAKA,aAAgB,eAAe,OAAc;AACzC,UAAI,iBAAiB,cAAc;AAC/B,eAAO;MACX;AACA,UAAI,iBAAiB,SAAA,qBAAqB,MAAM,SAAS,SAAA,UAAU,SAAS;AACxE,eAAO;MACX;AACA,aAAO;IACX;AAMA,aAAgB,qBACZ,WACA,eACA,MAAoB;AAEpB,aAAO,IAAI,QAAQ,CAAC,GAAG,WAAU;AAC7B,mBAAW,MAAK;AACZ,gBAAM,OAAO,iBAAiB;AAC9B,iBACI,IAAI,aAAa,GAAG,IAAI,oBAAoB,SAAS,MAAM;YACvD,GAAG;YACH;WACH,CAAC;QAEV,GAAG,SAAS;MAChB,CAAC;IACL;;;;;;;;;;AC1CA,IAAAC,SAAA,iBAAA;AA+CA,IAAAA,SAAA,YAAA;AA2DA,IAAAA,SAAA,wBAAA;AAvMA,QAAA,WAAA;AACA,QAAA,iBAAA;AACA,QAAA,YAAA;AAKA,QAAa,sBAAb,cAAyC,SAAA,kBAAiB;MACtD,YACI,SACA,OACA,MAAoB;AAEpB,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB;UACA;SACH;AACD,aAAK,OAAO;MAChB;;AAZJ,IAAAA,SAAA,sBAAA;AAuDa,IAAAA,SAAA,wBAA0H;MACnI,UAAU;MACV,SAAS;MACT,SAAS;MACT,YAAY;;AAMT,QAAM,iBAA4B,CAAC,UAA2B;AAEjE,WAAI,GAAA,eAAA,qBAAoB,KAAK,GAAG;AAC5B,eAAO;MACX;AACA,aAAO;IACX;AANa,IAAAA,SAAA,iBAAc;AAWpB,QAAM,iBAA4B,CAAC,UAA2B;AACjE,WAAI,GAAA,eAAA,qBAAoB,KAAK,GAAG;AAC5B,eAAO;MACX;AACA,cAAO,GAAA,UAAA,gBAAe,KAAK;IAC/B;AALa,IAAAA,SAAA,iBAAc;AAU3B,aAAgB,eACZ,SACA,aACA,SACA,YAAkB;AAElB,UAAI;AAEJ,cAAQ,SAAS;QACb,KAAK;AACD,kBAAQ;AACR;QACJ,KAAK;AACD,kBAAQ,cAAc;AACtB;QACJ,KAAK;QACL;AACI,kBAAQ,cAAc,KAAK,IAAI,GAAG,UAAU,CAAC;AAC7C;MACR;AAEA,aAAO,KAAK,IAAI,OAAO,UAAU;IACrC;AAyBO,mBAAe,UAClB,IACA,SAAsB;AAEtB,YAAM,EACF,WAAWA,SAAA,sBAAsB,UACjC,UAAUA,SAAA,sBAAsB,SAChC,UAAUA,SAAA,sBAAsB,SAChC,aAAaA,SAAA,sBAAsB,YACnC,UAAUA,SAAA,gBACV,WACA,aACA,eACA,KAAI,IACJ,WAAW,CAAA;AAEf,UAAI;AAEJ,eAAS,UAAU,GAAG,WAAW,UAAU,WAAW;AAElD,SAAA,GAAA,eAAA,kBAAiB,aAAa,EAAE,GAAG,MAAM,SAAS,aAAa,SAAQ,CAAE;AAGzE,oBAAY,SAAS,UAAU,SAAS;AAExC,YAAI;AACA,iBAAO,MAAM,GAAE;QACnB,SAAS,OAAO;AACZ,sBAAY;AAGZ,cAAI,CAAC,QAAQ,OAAO,OAAO,GAAG;AAC1B,kBAAM;UACV;AAGA,cAAI,UAAU,UAAU;AACpB,kBAAM,QAAQ,eAAe,SAAS,SAAS,SAAS,UAAU;AAClE,kBAAM,MAAM,KAAK;UACrB;QACJ;MACJ;AAGA,YAAM,OAAO,iBAAiB;AAC9B,YAAM,IAAI,oBACN,GAAG,IAAI,iBAAiB,QAAQ,aAChC,WACA;QACI,GAAG;QACH,SAAS;QACT,aAAa;OAChB;IAET;AAKA,aAAgB,sBAAsB,OAAc;AAChD,UAAI,iBAAiB,qBAAqB;AACtC,eAAO;MACX;AACA,UAAI,iBAAiB,SAAA,qBAAqB,MAAM,SAAS,SAAA,UAAU,iBAAiB;AAChF,eAAO;MACX;AACA,aAAO;IACX;AAKA,aAAS,MAAM,IAAU;AACrB,aAAO,IAAI,QAAQ,CAACC,cAAY,WAAWA,WAAS,EAAE,CAAC;IAC3D;;;;;;;;;;ACxHA,IAAAC,SAAA,gBAAA;AAgEA,IAAAA,SAAA,qBAAA;AA7JA,QAAA,iBAAA;AACA,QAAA,YAAA;AACA,QAAA,UAAA;AA0Ca,IAAAA,SAAA,yBAAiD;MAC1D,gBAAgB;MAChB,eAAe;MACf,cAAc;MACd,SAAS;MACT,iBAAiB;;AA4Cd,mBAAe,cAClB,IACA,SAAuB;AAEvB,YAAM,EACF,WACA,iBAAiB,OACjB,gBAAgB,GAChB,eAAe,KACf,UAAU,eACV,kBAAkB,KAClB,aACA,eACA,KAAI,IACJ,WAAW,CAAA;AAGf,OAAA,GAAA,eAAA,kBAAiB,aAAa,IAAI;AAGlC,YAAM,qBAAqB,YACrB,OACE,GAAA,UAAA,aAAY,IAAI;QACZ;QACA;QACA;QACA;OACe,IACrB;AAGN,UAAI,gBAAgB;AAChB,gBAAO,GAAA,QAAA,WAAU,oBAAoB;UACjC,UAAU;UACV,SAAS;UACT;UACA,YAAY;UACZ;UACA;UACA;SACa;MACrB;AAGA,aAAO,mBAAkB;IAC7B;AAmBA,aAAgB,mBACZ,gBAA6B;AAE7B,aAAO,CAAI,IAAsB,cAC7B,cAAc,IAAI,EAAE,GAAG,gBAAgB,GAAG,UAAS,CAAE;IAC7D;;;;;;;;;;ACnKA,QAAA,iBAAA;AACI,WAAA,eAAAC,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAiB,EAAA,CAAA;AAEjB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAuB,EAAA,CAAA;AAI3B,QAAA,YAAA;AACI,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAY,EAAA,CAAA;AAEZ,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAoB,EAAA,CAAA;AAIxB,QAAA,UAAA;AACI,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AAKnB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAqB,EAAA,CAAA;AAIzB,QAAA,WAAA;AAEI,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAkB,EAAA,CAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC2BtB,IAAAC,SAAA,aAAA;AAeA,IAAAA,SAAA,kBAAA;AAeA,IAAAA,SAAA,aAAA;AA0BA,IAAAA,SAAA,eAAA;AAgCA,IAAAA,SAAA,gBAAA;AAuCA,IAAAA,SAAA,wBAAA;AAuCA,IAAAA,SAAA,cAAA;AAyBA,IAAAA,SAAA,YAAA;AA8BA,IAAAA,SAAA,WAAA;AAsCA,IAAAA,SAAA,YAAA;AAyBA,IAAAA,SAAA,eAAA;AA6BA,IAAAA,SAAA,aAAA;AAqBA,IAAAA,SAAA,aAAA;AAmCA,IAAAA,SAAA,sBAAA;AA3aA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAAC,QAAA,aAAA,QAAA,SAAA,CAAA;AAwDA,aAAgB,WAAW,UAAgB;AACvC,UAAI;AACA,eAAOF,KAAG,WAAW,QAAQ;MACjC,QAAQ;AAEJ,eAAO;MACX;IACJ;AAQA,aAAgB,gBAAgB,SAAe;AAC3C,UAAI;AACA,cAAM,QAAQA,KAAG,SAAS,OAAO;AACjC,eAAO,MAAM,YAAW;MAC5B,QAAQ;AACJ,eAAO;MACX;IACJ;AAQA,aAAgB,WAAW,UAAgB;AACvC,UAAI;AACA,cAAM,QAAQA,KAAG,SAAS,QAAQ;AAClC,eAAO,MAAM,OAAM;MACvB,QAAQ;AACJ,eAAO;MACX;IACJ;AAmBA,aAAgB,aACZ,UACA,UAA2B,CAAA,GAAE;AAE7B,YAAM,EAAE,WAAW,OAAM,IAAK;AAE9B,UAAI;AACA,cAAM,OAAOA,KAAG,aAAa,UAAU,QAAQ;AAC/C,eAAO,EAAE,SAAS,MAAM,KAAI;MAChC,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,iBAAiB,GAAG;AACtC,eAAO,EAAE,SAAS,OAAO,OAAO,KAAK,UAAS;MAClD;IACJ;AAkBA,aAAgB,cACZ,UACA,SACA,UAA4B,CAAA,GAAE;AAE9B,YAAM,EAAE,WAAW,QAAQ,aAAa,KAAI,IAAK;AAEjD,UAAI;AAEA,YAAI,YAAY;AACZ,gBAAM,YAAY,sBAAsBC,OAAK,QAAQ,QAAQ,CAAC;AAC9D,cAAI,CAAC,UAAU,SAAS;AACpB,mBAAO;UACX;QACJ;AAEA,QAAAD,KAAG,cAAc,UAAU,SAAS,QAAQ;AAC5C,eAAO,EAAE,SAAS,KAAI;MAC1B,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,iBAAiB,GAAG;AACtC,eAAO,EAAE,SAAS,OAAO,OAAO,KAAK,UAAS;MAClD;IACJ;AAgBA,aAAgB,sBAAsB,SAAe;AACjD,UAAI;AACA,YAAI,CAACA,KAAG,WAAW,OAAO,GAAG;AACzB,UAAAA,KAAG,UAAU,SAAS,EAAE,WAAW,KAAI,CAAE;QAC7C;AACA,eAAO,EAAE,SAAS,KAAI;MAC1B,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,iBAAiB,GAAG;AACtC,eAAO,EAAE,SAAS,OAAO,OAAO,KAAK,UAAS;MAClD;IACJ;AA4BA,aAAgB,YACZ,SACA,eAAuB;AAEvB,UAAI;AACA,YAAI,eAAe;AACf,gBAAM,UAAUA,KAAG,YAAY,SAAS,EAAE,eAAe,KAAI,CAAE;AAC/D,iBAAO,EAAE,SAAS,MAAM,MAAM,QAAO;QACzC,OAAO;AACH,gBAAM,UAAUA,KAAG,YAAY,OAAO;AACtC,iBAAO,EAAE,SAAS,MAAM,MAAM,QAAO;QACzC;MACJ,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,iBAAiB,GAAG;AACtC,eAAO,EAAE,SAAS,OAAO,OAAO,KAAK,UAAS;MAClD;IACJ;AAQA,aAAgB,UAAU,UAAgB;AACtC,UAAI;AACA,cAAM,QAAQA,KAAG,SAAS,QAAQ;AAClC,eAAO,EAAE,SAAS,MAAM,MAAM,MAAK;MACvC,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,iBAAiB,GAAG;AACtC,eAAO,EAAE,SAAS,OAAO,OAAO,KAAK,UAAS;MAClD;IACJ;AAqBA,aAAgB,SAAsB,UAAgB;AAClD,YAAM,aAAa,aAAa,QAAQ;AACxC,UAAI,CAAC,WAAW,SAAS;AACrB,eAAO;UACH,SAAS;UACT,OAAO,WAAW;UAClB,WAAW,WAAW;;MAE9B;AAEA,UAAI;AACA,cAAM,SAASE,MAAK,KAAK,WAAW,IAAK;AACzC,eAAO,EAAE,SAAS,MAAM,MAAM,OAAM;MACxC,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAEpE,cAAM,YAAY,IAAI,MAAM,uBAAuB,QAAQ,KAAK,IAAI,OAAO,EAAE;AAC7E,eAAO,EAAE,SAAS,OAAO,OAAO,WAAW,WAAW,mBAAkB;MAC5E;IACJ;AAmBA,aAAgB,UACZ,UACA,MACA,UAAuB,CAAA,GAAE;AAEzB,YAAM,EAAE,SAAS,GAAG,YAAY,IAAI,SAAS,KAAI,IAAK;AAEtD,UAAI;AACA,cAAM,cAAcA,MAAK,KAAK,MAAM,EAAE,QAAQ,WAAW,OAAM,CAAE;AACjE,eAAO,cAAc,UAAU,WAAW;MAC9C,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,IAAI,MAAM,6BAA6B,IAAI,OAAO,EAAE;AACtE,eAAO,EAAE,SAAS,OAAO,OAAO,WAAW,WAAW,kBAAiB;MAC3E;IACJ;AAUA,aAAgB,aACZ,SACA,UACA,aAAsB,MAAI;AAE1B,UAAI;AACA,YAAI,YAAY;AACZ,gBAAM,YAAY,sBAAsBD,OAAK,QAAQ,QAAQ,CAAC;AAC9D,cAAI,CAAC,UAAU,SAAS;AACpB,mBAAO;UACX;QACJ;AAEA,QAAAD,KAAG,aAAa,SAAS,QAAQ;AACjC,eAAO,EAAE,SAAS,KAAI;MAC1B,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,iBAAiB,GAAG;AACtC,eAAO,EAAE,SAAS,OAAO,OAAO,KAAK,UAAS;MAClD;IACJ;AASA,aAAgB,WACZ,SACA,SAAe;AAEf,UAAI;AACA,QAAAA,KAAG,WAAW,SAAS,OAAO;AAC9B,eAAO,EAAE,SAAS,KAAI;MAC1B,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,iBAAiB,GAAG;AACtC,eAAO,EAAE,SAAS,OAAO,OAAO,KAAK,UAAS;MAClD;IACJ;AASA,aAAgB,WACZ,YACA,UAAoD,CAAA,GAAE;AAEtD,YAAM,EAAE,YAAY,OAAO,QAAQ,MAAK,IAAK;AAE7C,UAAI;AACA,QAAAA,KAAG,OAAO,YAAY,EAAE,WAAW,MAAK,CAAE;AAC1C,eAAO,EAAE,SAAS,KAAI;MAC1B,SAAS,OAAO;AACZ,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,cAAM,YAAY,iBAAiB,GAAG;AACtC,eAAO,EAAE,SAAS,OAAO,OAAO,KAAK,UAAS;MAClD;IACJ;AAQA,aAAS,iBAAiB,OAAY;AAElC,YAAM,YAAY;AAClB,aAAO,UAAU,QAAQ;IAC7B;AASA,aAAgB,oBAAoB,WAAmB,SAAgB;AACnE,YAAM,SAAS,UAAU,GAAG,OAAO,OAAO;AAE1C,cAAQ,WAAW;QACf,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;QACL,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;QACL,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB,KAAK;AACD,iBAAO,GAAG,MAAM;QACpB;AACI,iBAAO,GAAG,MAAM;MACxB;IACJ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtaA,IAAAG,SAAA,OAAA;AAiDA,IAAAA,SAAA,wBAAA;AAzFA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAA,WAAA;AAQA,aAAS,iBAAiB,UAAkB,WAAiB;AACzD,aAAO,SAAS,YAAW,EAAG,SAAS,UAAU,YAAW,CAAE;IAClE;AAQA,aAAS,iBAAiB,SAAe;AAErC,YAAM,QAAQ,QAAQ,MAAM,2BAA2B;AACvD,UAAI,OAAO;AACP,eAAO,MAAM,CAAC;MAClB;AAEA,YAAM,cAAc,QAAQ,MAAM,sBAAsB;AACxD,UAAI,aAAa;AACb,eAAO,YAAY,CAAC;MACxB;AACA,aAAO;IACX;AAQA,aAAgB,KAAK,SAAiB,SAAe;AACjD,YAAM,UAAoB,CAAA;AAC1B,YAAM,UAAS,GAAA,SAAA,WAAS;AAGxB,YAAM,YAAY,iBAAiB,OAAO;AAE1C,eAAS,QAAQ,KAAW;AACxB,YAAI;AACA,gBAAM,UAAUD,KAAG,YAAY,KAAK,EAAE,eAAe,KAAI,CAAE;AAE3D,qBAAW,SAAS,SAAS;AACzB,kBAAM,YAAYC,OAAK,KAAK,KAAK,MAAM,IAAI;AAE3C,gBAAI,MAAM,YAAW,GAAI;AAErB,kBAAI,MAAM,KAAK,WAAW,GAAG,KAAK,MAAM,SAAS,gBAAgB;AAC7D;cACJ;AACA,sBAAQ,SAAS;YACrB,WAAW,MAAM,OAAM,GAAI;AAEvB,kBAAI,WAAW;AAEX,oBAAI,iBAAiB,MAAM,MAAM,SAAS,GAAG;AACzC,0BAAQ,KAAK,SAAS;gBAC1B;cACJ,OAAO;AAEH,wBAAQ,KAAK,SAAS;cAC1B;YACJ;UACJ;QACJ,SAAS,OAAO;AAEZ,iBAAO,KAAK,SAAA,YAAY,OAAO,yBAAyB,GAAG,KAAK,KAAK,EAAE;QAC3E;MACJ;AAEA,cAAQ,OAAO;AACf,aAAO;IACX;AAQA,aAAgB,sBAAsB,KAAa,WAAiB;AAChE,aAAO,KAAK,OAAO,SAAS,IAAI,GAAG;IACvC;;;;;;;;;ACtFA,IAAAC,SAAA,YAAAC;AARA,QAAA,kBAAA,QAAA,eAAA;AAQA,aAAgBA,WACZ,SACA,SAAqB;AAErB,aAAO,IAAI,QAAQ,CAACC,WAAS,WAAU;AACnC,cAAM,iBAA8B;UAChC,SAAS;;UACT,WAAW,KAAK,OAAO;;UACvB,GAAG;;AAGP,SAAA,GAAA,gBAAA,MAAK,SAAS,EAAE,GAAG,gBAAgB,UAAU,QAAO,GAAI,CAAC,OAAO,QAAQ,WAAU;AAC9E,cAAI,OAAO;AACP,mBAAO,KAAK;UAChB,OAAO;AACH,YAAAA,UAAQ,EAAE,QAA0B,OAAwB,CAAE;UAClE;QACJ,CAAC;MACL,CAAC;IACL;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACTA,IAAAC,SAAA,UAAA;AA4DA,IAAAA,SAAA,eAAA;AAoCA,IAAAA,SAAA,cAAA;AAjHA,QAAA,QAAA,aAAA,QAAA,OAAA,CAAA;AACA,QAAAC,QAAA,aAAA,QAAA,MAAA,CAAA;AAgBA,aAAgB,QAAQC,MAAa,SAGpC;AACG,aAAO,IAAI,QAAQ,CAACC,WAAS,WAAU;AACnC,cAAM,SAAS,IAAI,IAAID,IAAG;AAC1B,cAAM,UAAU,OAAO,aAAa;AACpC,cAAM,SAAS,UAAU,QAAQD;AAEjC,cAAM,iBAAuC;UACzC,UAAU,OAAO;UACjB,MAAM,OAAO,SAAS,UAAU,MAAM;UACtC,MAAM,OAAO,WAAW,OAAO;UAC/B,QAAQ;UACR,SAAS;YACL,cAAc;YACd,UAAU;YACV,GAAG,SAAS;;UAEhB,SAAS,SAAS,WAAW;;AAGjC,cAAM,MAAM,OAAO,QAAQ,gBAAgB,CAAC,QAAO;AAC/C,cAAI,OAAO;AAEX,cAAI,YAAY,OAAO;AACvB,cAAI,GAAG,QAAQ,CAAC,UAAS;AACrB,oBAAQ;UACZ,CAAC;AAED,cAAI,GAAG,OAAO,MAAK;AACf,YAAAE,UAAQ;cACJ,YAAY,IAAI,cAAc;cAC9B;cACA,SAAS,IAAI;aAChB;UACL,CAAC;QACL,CAAC;AAED,YAAI,GAAG,SAAS,CAAC,UAAS;AACtB,iBAAO,KAAK;QAChB,CAAC;AAED,YAAI,GAAG,WAAW,MAAK;AACnB,cAAI,QAAO;AACX,iBAAO,IAAI,MAAM,mBAAmB,CAAC;QACzC,CAAC;AAED,YAAI,IAAG;MACX,CAAC;IACL;AAUO,mBAAe,aAAaD,MAAa,SAI/C;AACG,YAAM,eAAe,SAAS,gBAAgB;AAC9C,UAAI,aAAaA;AACjB,UAAI,gBAAgB;AAEpB,aAAO,gBAAgB,cAAc;AACjC,cAAM,WAAW,MAAM,QAAQ,YAAY,OAAO;AAGlD,YAAI,SAAS,cAAc,OAAO,SAAS,aAAa,OAAO,SAAS,QAAQ,UAAU;AACtF,uBAAa,SAAS,QAAQ;AAC9B;AACA;QACJ;AAEA,YAAI,SAAS,cAAc,OAAO,SAAS,aAAa,KAAK;AACzD,iBAAO,SAAS;QACpB;AAEA,cAAM,IAAI,MAAM,QAAQ,SAAS,UAAU,KAAK,SAAS,KAAK,UAAU,GAAG,GAAG,CAAC,EAAE;MACrF;AAEA,YAAM,IAAI,MAAM,4BAA4B,YAAY,GAAG;IAC/D;AASO,mBAAe,YAAyBA,MAAa,SAG3D;AACG,YAAM,WAAW,MAAM,QAAQA,MAAK;QAChC,GAAG;QACH,SAAS;UACL,UAAU;UACV,GAAG,SAAS;;OAEnB;AAED,UAAI,SAAS,cAAc,OAAO,SAAS,aAAa,KAAK;AACzD,eAAO,KAAK,MAAM,SAAS,IAAI;MACnC;AAGA,UAAI;AACA,cAAM,YAAY,KAAK,MAAM,SAAS,IAAI;AAC1C,YAAI,UAAU,SAAS;AACnB,gBAAM,IAAI,MAAM,UAAU,OAAO;QACrC;MACJ,QAAQ;MAER;AAEA,YAAM,IAAI,MAAM,QAAQ,SAAS,UAAU,KAAK,SAAS,KAAK,UAAU,GAAG,GAAG,CAAC,EAAE;IACrF;;;;;;;;;;ACvGA,IAAAE,SAAA,WAAA;AAaA,IAAAA,SAAA,sBAAA;AAuCA,IAAAA,SAAA,sBAAA;AAiBA,IAAAA,SAAA,gBAAA;AAUA,IAAAA,SAAA,iBAAA;AAOA,IAAAA,SAAA,gBAAA;AAeA,IAAAA,SAAA,qBAAA;AA4BA,IAAAA,SAAA,qBAAA;AAsCA,IAAAA,SAAA,aAAA;AAmCA,IAAAA,SAAA,iBAAA;AA+EA,IAAAA,SAAA,iBAAA;AApSa,IAAAA,SAAA,8BAAiD;MAC1D,oBAAoB;MACpB,mBAAmB;MACnB,wBAAwB;MACxB,uBAAuB;;AAO3B,aAAgB,SAAS,MAAY;AACjC,UAAI,OAAO;AACX,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,gBAAS,QAAQ,KAAK,OAAQ,KAAK,WAAW,CAAC;AAC/C,eAAO,OAAO;MAClB;AACA,aAAO,KAAK,IAAI,IAAI,EAAE,SAAS,EAAE;IACrC;AAMA,aAAgB,oBAAoB,MAAc,MAAY;AAC1D,YAAM,IAAI,KAAK;AACf,YAAM,IAAI,KAAK;AAGf,UAAI,UAAU,IAAI,MAAM,IAAI,CAAC;AAC7B,UAAI,UAAU,IAAI,MAAM,IAAI,CAAC;AAG7B,eAAS,IAAI,GAAG,KAAK,GAAG,KAAK;AACzB,gBAAQ,CAAC,IAAI;MACjB;AAEA,eAAS,IAAI,GAAG,KAAK,GAAG,KAAK;AACzB,gBAAQ,CAAC,IAAI;AAEb,iBAAS,IAAI,GAAG,KAAK,GAAG,KAAK;AACzB,cAAI,KAAK,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,GAAG;AAC7B,oBAAQ,CAAC,IAAI,QAAQ,IAAI,CAAC;UAC9B,OAAO;AACH,oBAAQ,CAAC,IAAI,IAAI,KAAK;cAClB,QAAQ,CAAC;;cACT,QAAQ,IAAI,CAAC;;cACb,QAAQ,IAAI,CAAC;;;UAErB;QACJ;AAGA,SAAC,SAAS,OAAO,IAAI,CAAC,SAAS,OAAO;MAC1C;AAEA,aAAO,QAAQ,CAAC;IACpB;AAMA,aAAgB,oBAAoB,MAAc,MAAY;AAC1D,UAAI,SAAS,MAAM;AACf,eAAO;MACX;AACA,UAAI,KAAK,WAAW,KAAK,KAAK,WAAW,GAAG;AACxC,eAAO;MACX;AAEA,YAAM,WAAW,oBAAoB,MAAM,IAAI;AAC/C,YAAM,YAAY,KAAK,IAAI,KAAK,QAAQ,KAAK,MAAM;AAEnD,aAAO,IAAK,WAAW;IAC3B;AAKA,aAAgB,cAAc,MAAY;AACtC,aAAO,KACF,QAAQ,SAAS,IAAI,EACrB,QAAQ,OAAO,IAAI,EACnB,KAAI;IACb;AAKA,aAAgB,eAAe,SAAe;AAC1C,aAAO,QAAQ,MAAM,OAAO;IAChC;AAKA,aAAgB,cAAc,OAAiB,MAAc,QAAc;AACvE,UAAI,SAAS;AAEb,eAAS,IAAI,GAAG,IAAI,OAAO,KAAK,IAAI,MAAM,QAAQ,KAAK;AACnD,kBAAU,MAAM,CAAC,EAAE,SAAS;MAChC;AAEA,gBAAU,KAAK,IAAI,SAAS,GAAG,MAAM,OAAO,CAAC,GAAG,UAAU,CAAC;AAE3D,aAAO;IACX;AAKA,aAAgB,mBAAmB,SAAiB,QAAc;AAC9D,YAAM,QAAQ,eAAe,OAAO;AACpC,UAAI,gBAAgB;AAEpB,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,cAAM,aAAa,MAAM,CAAC,EAAE,SAAS;AAErC,YAAI,gBAAgB,aAAa,QAAQ;AACrC,iBAAO;YACH,MAAM,IAAI;YACV,QAAQ,SAAS,gBAAgB;;QAEzC;AAEA,yBAAiB;MACrB;AAGA,aAAO;QACH,MAAM,MAAM;QACZ,SAAS,MAAM,MAAM,SAAS,CAAC,GAAG,UAAU,KAAK;;IAEzD;AAMA,aAAgB,mBAAmB,SAAiB,YAAkB;AAClE,YAAM,cAAwB,CAAA;AAC9B,UAAI,CAAC,YAAY;AACb,eAAO;MACX;AAEA,UAAI,aAAa;AACjB,aAAO,MAAM;AACT,cAAM,QAAQ,QAAQ,QAAQ,YAAY,UAAU;AACpD,YAAI,UAAU,IAAI;AACd;QACJ;AACA,oBAAY,KAAK,KAAK;AACtB,qBAAa,QAAQ;MACzB;AAEA,aAAO;IACX;AAqBA,aAAgB,WACZ,SACA,aACA,aACA,QACA,SAA4BA,SAAA,6BAA2B;AAGvD,YAAM,qBAAqB,KAAK,IAAI,GAAG,cAAc,OAAO,kBAAkB;AAC9E,YAAM,sBAAsB,QAAQ,UAAU,oBAAoB,WAAW;AAE7E,YAAM,WAAW,cAAc;AAC/B,YAAM,kBAAkB,KAAK,IAAI,QAAQ,QAAQ,WAAW,OAAO,iBAAiB;AACpF,YAAM,qBAAqB,QAAQ,UAAU,UAAU,eAAe;AAGtE,YAAM,mBAAmB,oBACrB,cAAc,OAAO,aAAa,GAClC,cAAc,mBAAmB,CAAC;AAGtC,YAAM,kBAAkB,oBACpB,cAAc,OAAO,YAAY,GACjC,cAAc,kBAAkB,CAAC;AAKrC,aAAQ,mBAAmB,MAAQ,kBAAkB,MAAO;IAChE;AAMA,aAAgB,eACZ,SACA,YACA,WACA,SAA4BA,SAAA,6BAA2B;AAEvD,YAAM,QAAQ,eAAe,OAAO;AACpC,YAAM,uBAAuB,cAAc,UAAU;AAErD,UAAI,CAAC,sBAAsB;AACvB,eAAO;MACX;AAGA,YAAM,UAAU,KAAK,IAAI,GAAG,YAAY,IAAI,OAAO,qBAAqB;AACxE,YAAM,UAAU,KAAK,IAAI,MAAM,SAAS,GAAG,YAAY,IAAI,OAAO,qBAAqB;AAEvF,UAAI,YAA2D;AAG/D,eAAS,UAAU,SAAS,WAAW,SAAS,WAAW;AAEvD,cAAM,cAAc;AACpB,cAAM,YAAY,KAAK,IAAI,SAAS,UAAU,KAAK,KAAK,qBAAqB,MAAM,IAAI,EAAE,MAAM,IAAI,CAAC;AAEpG,YAAI,aAAa;AACjB,YAAI,eAAe;AAEnB,iBAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AAClC,0BAAgB,MAAM,CAAC,EAAE,SAAS;QACtC;AAEA,iBAAS,IAAI,aAAa,KAAK,WAAW,KAAK;AAC3C,cAAI,IAAI,aAAa;AACjB,0BAAc;UAClB;AACA,wBAAc,MAAM,CAAC;QACzB;AAGA,cAAM,mBAAmB,cAAc,UAAU;AAGjD,cAAM,aAAa,iBAAiB,QAAQ,oBAAoB;AAChE,YAAI,eAAe,IAAI;AAEnB,gBAAM,oBAAoB,WAAW,SAAS,WAAW,UAAS,EAAG;AACrE,gBAAM,eAAe,eAAe,aAAa;AAEjD,iBAAO,EAAE,QAAQ,cAAc,YAAY,EAAG;QAClD;AAGA,YAAI,iBAAiB,UAAU,qBAAqB,SAAS,KAAK;AAE9D,mBAAS,IAAI,GAAG,KAAK,iBAAiB,SAAS,KAAK,MAAM,qBAAqB,SAAS,GAAG,GAAG,KAAK;AAC/F,kBAAM,kBAAkB,KAAK,IAAI,qBAAqB,SAAS,KAAK,iBAAiB,SAAS,CAAC;AAC/F,kBAAM,YAAY,iBAAiB,UAAU,GAAG,IAAI,eAAe;AAEnE,kBAAM,aAAa,oBAAoB,sBAAsB,SAAS;AAEtE,gBAAI,cAAc,OAAO,wBAAwB;AAC7C,kBAAI,CAAC,aAAa,aAAa,UAAU,YAAY;AACjD,4BAAY;kBACR,QAAQ,eAAe;kBACvB;;cAER;YACJ;UACJ;QACJ;MACJ;AAEA,aAAO;IACX;AAKA,aAAgB,eACZ,SACA,aACA,WACA,SAA4BA,SAAA,6BAA2B;AAGvD,YAAM,qBAAqB,KAAK,IAAI,GAAG,cAAc,OAAO,kBAAkB;AAC9E,YAAM,gBAAgB,QAAQ,UAAU,oBAAoB,WAAW;AAGvE,YAAM,kBAAkB,KAAK,IAAI,QAAQ,QAAQ,YAAY,OAAO,iBAAiB;AACrF,YAAM,eAAe,QAAQ,UAAU,WAAW,eAAe;AAEjE,aAAO,EAAE,eAAe,aAAY;IACxC;;;;;;;;;ACxOA,IAAAC,SAAA,cAAAC;AAiEA,IAAAD,SAAA,kBAAA;AAxJA,QAAM,wBAAuC,EAAE,MAAM,KAAK,OAAO,KAAK,MAAM,SAAQ;AAGpF,QAAM,uBAAsC,EAAE,MAAM,KAAK,OAAO,KAAK,MAAM,QAAO;AAKlF,aAAS,YAAY,KAAa,QAAqB;AACnD,UAAI,QAAQ;AACZ,iBAAW,QAAQ,KAAK;AACpB,YAAI,SAAS,OAAO;AAAM;iBACjB,SAAS,OAAO;AAAO;AAChC,YAAI,QAAQ;AAAG,iBAAO;MAC1B;AACA,aAAO,UAAU;IACrB;AAKA,aAAS,wBAAwB,KAAa,QAAqB;AAC/D,YAAM,YAAiD,CAAA;AACvD,UAAI,QAAQ;AACZ,UAAI,QAAQ;AAEZ,eAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,YAAI,IAAI,CAAC,MAAM,OAAO,MAAM;AACxB,cAAI,UAAU;AAAG,oBAAQ;AACzB;QACJ,WAAW,IAAI,CAAC,MAAM,OAAO,OAAO;AAChC;AACA,cAAI,UAAU,KAAK,UAAU,IAAI;AAC7B,sBAAU,KAAK,EAAE,OAAO,KAAK,EAAC,CAAE;AAChC,oBAAQ;UACZ;QACJ;MACJ;AACA,aAAO;IACX;AAQA,aAAS,oBACL,MACA,QACA,sBAAqD;AAErD,YAAM,UAAU,IAAI,OAAO,KAAK,OAAO,IAAI,cAAc,OAAO,KAAK,EAAE;AACvE,YAAM,QAAQ,KAAK,MAAM,OAAO;AAChC,UAAI,OAAO;AACP,cAAM,YAAY,MAAM,CAAC;AACzB,YAAI;AACA,eAAK,MAAM,SAAS;AACpB,iBAAO;QACX,QAAQ;AAEJ,gBAAM,YAAY,wBAAwB,MAAM,MAAM;AACtD,mBAAS,IAAI,UAAU,SAAS,GAAG,KAAK,GAAG,KAAK;AAC5C,kBAAM,EAAC,OAAO,IAAG,IAAI,UAAU,CAAC;AAChC,kBAAM,eAAe,KAAK,UAAU,OAAO,MAAM,CAAC;AAClD,gBAAI;AACA,mBAAK,MAAM,YAAY;AACvB,qBAAO;YACX,QAAQ;AACJ;YACJ;UACJ;AAEA,cAAI,YAAY,WAAW,MAAM,MAAM,CAAC,wBAAwB,qBAAqB,SAAS,IAAI;AAC9F,mBAAO;UACX;QACJ;MACJ;AACA,aAAO;IACX;AAQA,aAAgBC,aAAY,UAAgB;AACxC,UAAI,CAAC,YAAY,OAAO,aAAa,UAAU;AAC3C,eAAO;MACX;AAEA,iBAAW,SAAS,KAAI;AAGxB,YAAM,oBAAoB;QACtB;QACA;QACA;QACA;;AAGJ,iBAAW,WAAW,mBAAmB;AACrC,cAAM,QAAQ,SAAS,MAAM,OAAO;AACpC,YAAI,OAAO;AACP,gBAAM,YAAY,MAAM,CAAC,EAAE,KAAI;AAC/B,cAAI,cAAc,UAAU,WAAW,GAAG,KAAK,UAAU,WAAW,GAAG,IAAI;AACvE,mBAAO;UACX;QACJ;MACJ;AAGA,YAAM,aAAa,SAAS,QAAQ,GAAG;AACvC,YAAM,eAAe,SAAS,QAAQ,GAAG;AAGzC,YAAM,mBAAmB,MAAM,oBAAoB,UAAU,uBAAuB,OAAK,EAAE,SAAS,GAAG,CAAC;AACxG,YAAM,kBAAkB,MAAM,oBAAoB,UAAU,oBAAoB;AAIhF,UAAI,iBAAiB,OAAO,eAAe,MAAM,eAAe,aAAa;AAEzE,cAAM,cAAc,gBAAe;AACnC,YAAI;AAAa,iBAAO;AACxB,cAAM,eAAe,iBAAgB;AACrC,YAAI;AAAc,iBAAO;MAC7B,WAAW,eAAe,IAAI;AAE1B,cAAM,eAAe,iBAAgB;AACrC,YAAI;AAAc,iBAAO;AACzB,cAAM,cAAc,gBAAe;AACnC,YAAI;AAAa,iBAAO;MAC5B;AAGA,YAAM,UAAU,qBAAqB,QAAQ;AAC7C,UAAI,SAAS;AACT,eAAO;MACX;AAEA,aAAO;IACX;AASA,aAAgB,gBACZ,UACA,cAAsB;AAEtB,YAAM,UAAUA,aAAY,QAAQ;AAEpC,UAAI,CAAC,SAAS;AAEV,cAAM,YAAY,iCAAiC,UAAU,YAAY;AACzE,YAAI,WAAW;AACX,iBAAO;QACX;AACA,cAAM,IAAI,MAAM,8BAA8B;MAClD;AAEA,UAAI;AACJ,UAAI;AACA,iBAAS,KAAK,MAAM,OAAO;MAC/B,QAAQ;AAEJ,cAAM,QAAQ,eAAe,OAAO;AACpC,YAAI,OAAO;AACP,cAAI;AACA,qBAAS,KAAK,MAAM,KAAK;UAC7B,QAAQ;AACJ,kBAAM,IAAI,MAAM,gCAAgC,QAAQ,UAAU,GAAG,GAAG,CAAC,KAAK;UAClF;QACJ,OAAO;AACH,gBAAM,IAAI,MAAM,gCAAgC,QAAQ,UAAU,GAAG,GAAG,CAAC,KAAK;QAClF;MACJ;AAGA,UAAI,MAAM,QAAQ,MAAM,GAAG;AACvB,YAAI,OAAO,WAAW,KAAK,OAAO,OAAO,CAAC,MAAM,YAAY,OAAO,CAAC,MAAM,MAAM;AAC5E,mBAAS,OAAO,CAAC;QACrB,OAAO;AACH,gBAAM,MAA+B,CAAA;AACrC,cAAI,gBAAgB;AACpB,qBAAW,QAAQ,QAAQ;AACvB,gBAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,kBAAI,WAAW,QAAQ,WAAW,MAAM;AACpC,oBAAI,OAAO,KAAK,KAAK,CAAC,IAAI,KAAK;AAC/B,gCAAgB;cACpB,WAAW,SAAS,QAAQ,WAAW,MAAM;AACzC,oBAAI,OAAO,KAAK,GAAG,CAAC,IAAI,KAAK;AAC7B,gCAAgB;cACpB;YACJ;UACJ;AACA,cAAI,eAAe;AACf,qBAAS;UACb,OAAO;AACH,kBAAM,IAAI,MAAM,qCAAqC;UACzD;QACJ;MACJ;AAGA,YAAM,SAAkC,CAAA;AACxC,iBAAW,SAAS,cAAc;AAC9B,YAAI,SAAS,QAAQ;AACjB,iBAAO,KAAK,IAAI,YAAY,OAAO,KAAK,CAAC;QAC7C,OAAO;AACH,gBAAM,aAAa,MAAM,YAAW;AACpC,gBAAM,cAAc,OAAO,KAAK,MAAM,EAAE,KAAK,OAAK,EAAE,YAAW,MAAO,UAAU;AAChF,cAAI,aAAa;AACb,mBAAO,KAAK,IAAI,YAAY,OAAO,WAAW,CAAC;UACnD,OAAO;AACH,mBAAO,KAAK,IAAI;UACpB;QACJ;MACJ;AACA,aAAO;IACX;AAMA,aAAS,qBAAqB,UAAgB;AAC1C,YAAM,QAAQ,SAAS,MAAM,IAAI;AACjC,YAAM,UAAkC,CAAA;AACxC,UAAI,WAAW;AAEf,iBAAW,QAAQ,OAAO;AACtB,cAAM,UAAU,KAAK,MAAM,mDAAmD;AAC9E,YAAI,SAAS;AACT,gBAAM,MAAM,QAAQ,CAAC,EAAE,KAAI;AAC3B,cAAI,QAAQ,QAAQ,CAAC,EAAE,KAAI,EAAG,QAAQ,SAAS,EAAE;AACjD,cAAK,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,KAC3C,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAI;AAChD,oBAAQ,MAAM,MAAM,GAAG,EAAE;UAC7B;AACA,kBAAQ,GAAG,IAAI;AACf,qBAAW;QACf;MACJ;AAEA,aAAO,WAAW,KAAK,UAAU,OAAO,IAAI;IAChD;AAKA,aAAS,eAAe,SAAe;AACnC,UAAI;AACA,YAAI,QAAQ,QAAQ,QAAQ,MAAM,GAAG;AACrC,gBAAQ,MAAM,QAAQ,0CAA0C,SAAS;AACzE,gBAAQ,MAAM,QAAQ,gBAAgB,IAAI;AAC1C,gBAAQ,MAAM,QAAQ,eAAe,OAAO;AAC5C,aAAK,MAAM,KAAK;AAChB,eAAO;MACX,QAAQ;AACJ,eAAO;MACX;IACJ;AAKA,aAAS,YAAY,OAAc;AAC/B,UAAI,UAAU,QAAQ,UAAU;AAAW,eAAO;AAClD,UAAI,OAAO,UAAU;AAAU,eAAO;AAEtC,YAAM,MAAO,MAAiB,YAAW,EAAG,KAAI;AAEhD,UAAI,QAAQ,UAAU,QAAQ;AAAO,eAAO;AAC5C,UAAI,QAAQ,WAAW,QAAQ;AAAM,eAAO;AAC5C,UAAI,QAAQ,UAAU,QAAQ,UAAU,QAAQ,SAAS,QAAQ;AAAI,eAAO;AAE5E,UAAI,gBAAgB,KAAK,GAAG,GAAG;AAC3B,cAAM,MAAM,WAAW,GAAG;AAC1B,YAAI,CAAC,MAAM,GAAG;AAAG,iBAAO;MAC5B;AAEA,aAAO;IACX;AAMA,aAAS,iCAAiC,UAAkB,cAAsB;AAC9E,YAAM,SAAkC,CAAA;AACxC,UAAI,WAAW;AAEf,iBAAW,SAAS,cAAc;AAI9B,cAAM,WAAW;;UAEb,IAAI,OAAO,GAAG,KAAK,gCAAgC,GAAG;;UAEtD,IAAI,OAAO,SAAS,KAAK,wEAAwE,GAAG;;UAEpG,IAAI,OAAO,GAAG,KAAK,kEAAkE,GAAG;;UAExF,IAAI,OAAO,GAAG,KAAK,kEAAkE,GAAG;;UAExF,IAAI,OAAO,GAAG,KAAK,+DAA+D,GAAG;;AAGzF,mBAAW,WAAW,UAAU;AAC5B,gBAAM,QAAQ,SAAS,MAAM,OAAO;AACpC,cAAI,OAAO;AACP,gBAAI,QAAQ,MAAM,CAAC,EAAE,KAAI;AAEzB,oBAAQ,MAAM,QAAQ,UAAU,EAAE,EAAE,QAAQ,gBAAgB,EAAE;AAC9D,mBAAO,KAAK,IAAI,YAAY,KAAK;AACjC,uBAAW;AACX;UACJ;QACJ;AAEA,YAAI,EAAE,SAAS,SAAS;AACpB,iBAAO,KAAK,IAAI;QACpB;MACJ;AAEA,aAAO,WAAW,SAAS;IAC/B;;;;;;;;;;ACpLA,IAAAC,SAAA,wBAAA;AAUA,IAAAA,SAAA,0BAAA;AA5KA,QAAA,kBAAA,QAAA,eAAA;AASA,QAAa,qBAAb,MAA+B;MAI3B,YACI,UACA,SAAsB;AAEtB,aAAK,WAAW,YAAY,QAAQ;AACpC,aAAK,UAAU,WAAW,gBAAA;MAC9B;;;;MAKA,cAAW;AACP,eAAO,KAAK,aAAa;MAC7B;;;;;;MAOA,oBAAoB,cAA0B;AAC1C,YAAI,KAAK,aAAa,SAAS;AAC3B,iBAAO;QACX;AAIA,eAAO,iBAAiB,SAAS,iBAAiB;MACtD;;;;;;;MAQA,MAAM,aAAa,SAA2B;AAE1C,YAAI,CAAC,KAAK,YAAW,GAAI;AACrB,iBAAO;YACH,SAAS;YACT,OAAO,uCAAuC,KAAK,QAAQ;;QAEnE;AAGA,YAAI,CAAC,QAAQ,KAAK;AACd,iBAAO;YACH,SAAS;YACT,OAAO;;QAEf;AAGA,YAAI,QAAQ,WAAW,YAAY,QAAQ,WAAW,YAAY;AAC9D,iBAAO;YACH,SAAS;YACT,OAAO,qCAAqC,QAAQ,MAAM;;QAElE;AAGA,eAAO,KAAK,iBAAiB,QAAQ,KAAK,QAAQ,YAAY;MAClE;;;;;;;MAQQ,MAAM,iBAAiB,KAAa,cAA0B;AAClE,eAAO,IAAI,QAAQ,CAACC,cAAW;AAG3B,gBAAM,YAAY,qDAAqD,GAAG;AAE1E,gBAAM,eAA6B;YAC/B,OAAO;YACP,OAAO,CAAC,QAAQ,QAAQ,MAAM;;AAGlC,cAAI;AACA,kBAAM,QAAQ,KAAK,QAAQ,cAAc,CAAC,YAAY,SAAS,GAAG,YAAY;AAE9E,gBAAI,SAAS;AACb,gBAAI,SAAS;AAEb,kBAAM,QAAQ,GAAG,QAAQ,CAAC,SAAQ;AAC9B,wBAAU,KAAK,SAAQ;YAC3B,CAAC;AAED,kBAAM,QAAQ,GAAG,QAAQ,CAAC,SAAQ;AAC9B,wBAAU,KAAK,SAAQ;YAC3B,CAAC;AAED,kBAAM,GAAG,SAAS,CAAC,SAAQ;AACvB,kBAAI,SAAS,GAAG;AAEZ,sBAAM,SAAS,OAAO,KAAI,EAAG,YAAW;AACxC,oBAAI,WAAW,QAAQ;AACnB,kBAAAA,UAAQ,EAAE,SAAS,KAAI,CAAE;gBAC7B,OAAO;AAEH,kBAAAA,UAAQ;oBACJ,SAAS;oBACT,OAAO,mBAAmB,GAAG;mBAChC;gBACL;cACJ,OAAO;AACH,gBAAAA,UAAQ;kBACJ,SAAS;kBACT,OAAO,uCAAuC,IAAI,KAAK,MAAM;iBAChE;cACL;YACJ,CAAC;AAED,kBAAM,GAAG,SAAS,CAAC,UAAS;AACxB,cAAAA,UAAQ;gBACJ,SAAS;gBACT,OAAO,+BAA+B,MAAM,OAAO;eACtD;YACL,CAAC;UACL,SAAS,OAAO;AACZ,kBAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,YAAAA,UAAQ;cACJ,SAAS;cACT,OAAO,2BAA2B,YAAY;aACjD;UACL;QACJ,CAAC;MACL;;;;MAKA,cAAW;AACP,eAAO,KAAK;MAChB;;AA9IJ,IAAAD,SAAA,qBAAA;AAoJA,QAAI;AAKJ,aAAgB,wBAAqB;AACjC,UAAI,CAAC,gBAAgB;AACjB,yBAAiB,IAAI,mBAAkB;MAC3C;AACA,aAAO;IACX;AAKA,aAAgB,0BAAuB;AACnC,uBAAiB;IACrB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtIA,IAAAE,SAAA,iBAAA;AAiDA,IAAAA,SAAA,wBAAA;AAiBA,IAAAA,SAAA,wBAAA;AAmDA,IAAAA,SAAA,kBAAA;AAjKA,QAAA,KAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AAIa,IAAAF,SAAA,0BAA0B;AAgB1B,IAAAA,SAAA,4BAA4B;AAG5B,IAAAA,SAAA,qBAAqB;AAmBlC,aAAgB,eAAe,KAAa,UAA0B;AAClE,YAAMG,cAAa,YAAY,QAAQ,cAAc;AAErD,UAAIA,YAAW;AAQX,cAAM,UAAU,IACX,QAAQ,SAAS,KAAK,EACtB,QAAQ,OAAO,EAAE,EACjB,QAAQ,OAAO,KAAK,EACpB,QAAQ,MAAM,IAAI,EAClB,QAAQ,MAAM,IAAI,EAClB,QAAQ,MAAM,IAAI;AAEvB,eAAO,IAAI,OAAO;MACtB,OAAO;AAQH,cAAM,UAAU,IAAI,QAAQ,MAAM,OAAO;AAGzC,eAAO,IAAI,OAAO;MACtB;IACJ;AAgBA,aAAgB,sBAAsB,QAAc;AAChD,UAAI,OAAO,SAASH,SAAA,yBAAyB;AACzC,eAAO;MACX;AACA,aAAOA,SAAA,0BAA0B,KAAK,MAAM;IAChD;AAYA,aAAgB,sBAAsB,QAAc;AAChD,YAAM,SAAS,GAAG,OAAM;AACxB,YAAM,YAAY,KAAK,IAAG;AAC1B,YAAM,eAAe,KAAK,OAAM,EAAG,SAAS,EAAE,EAAE,MAAM,GAAG,CAAC;AAC1D,YAAM,WAAW,kBAAkB,SAAS,IAAI,YAAY;AAC5D,YAAM,WAAWC,OAAK,KAAK,QAAQ,QAAQ;AAC3C,MAAAC,KAAG,cAAc,UAAU,QAAQ,OAAO;AAC1C,aAAO;IACX;AA2CA,aAAgB,gBACZ,MACA,SAAgC;AAEhC,YAAM,cAAc,SAAS,YAAY,YAAY;AACrD,YAAM,EAAE,QAAQ,OAAO,UAAU,gBAAe,IAAK,WAAW,CAAA;AAChE,YAAM,YAAY,QAAQ,YAAY,KAAK,KAAK;AAGhD,UAAI,iBAAiB;AACjB,eAAO;UACH,SAAS,GAAG,WAAW,IAAIF,SAAA,kBAAkB,GAAG,SAAS,aAAa,eAAe;UACrF,gBAAgB;;MAExB;AAEA,UAAI,CAAC,QAAQ;AACT,eAAO;UACH,SAAS,GAAG,WAAW,IAAIA,SAAA,kBAAkB,GAAG,SAAS;UACzD,gBAAgB;;MAExB;AAGA,UAAI,sBAAsB,MAAM,GAAG;AAE/B,cAAM,eAAe,sBAAsB,MAAM;AAIjD,cAAM,iBAAiB,8BAA8B,YAAY;AACjE,cAAM,kBAAkB,eAAe,gBAAgB,QAAQ;AAE/D,eAAO;UACH,SAAS,GAAG,WAAW,IAAIA,SAAA,kBAAkB,GAAG,SAAS,OAAO,eAAe;UAC/E,gBAAgB;UAChB;;MAER;AAGA,YAAM,gBAAgB,eAAe,QAAQ,QAAQ;AAErD,aAAO;QACH,SAAS,GAAG,WAAW,IAAIA,SAAA,kBAAkB,GAAG,SAAS,OAAO,aAAa;QAC7E,gBAAgB;;IAExB;;;;;;;;;;AC0IA,IAAAI,SAAA,8BAAA;AAUA,IAAAA,SAAA,gCAAA;AA/VA,QAAA,kBAAA,QAAA,eAAA;AAMA,QAAA,cAAA;AAeA,QAAM,mBAAmE;MACrE,QAAQ;QACJ,SAAS;UACL,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM;cACF;cAAM;cACN;cAAM;cACN;cAAM;cACN;cAAM,kBAAkB,kBAAkB,GAAG,CAAC,OAAO,kBAAkB,OAAO,CAAC;cAC/E;cAAM;cACN;cAAM;;;;QAIlB,gBAAgB;UACZ,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM;cACF;cAAM;cACN;cAAM,iBAAiB,kBAAkB,GAAG,CAAC,OAAO,kBAAkB,OAAO,CAAC;cAC9E;cAAM;cACN;cAAM;;;;;MAKtB,OAAO;QACH,oBAAoB;UAChB,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM,CAAC,MAAM,KAAK,OAAO,MAAM,OAAO;;;QAG9C,cAAc;UACV,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM,CAAC,WAAW,YAAY,iBAAiB,GAAG,MAAM,OAAO,EAAE;;;QAGzE,OAAO;UACH,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,OAAO,EAAE;;;;MAIvD,OAAO;QACH,kBAAkB;UACd,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM,CAAC,uBAAuB,KAAK,MAAM,QAAQ,MAAM,GAAG,OAAO,aAAa;;;QAGtF,WAAW;UACP,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM,CAAC,aAAa,KAAK,MAAM,QAAQ,MAAM,GAAG,OAAO,aAAa;;;QAG5E,kBAAkB;UACd,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM,CAAC,uBAAuB,KAAK,MAAM,YAAY,OAAO,cAAc;;;QAGlF,SAAS;UACL,cAAc;UACd,gBAAgB,CAAC,KAAa,aAAqB;YAC/C,KAAK;YACL,MAAM,CAAC,MAAM,OAAO,GAAG,QAAQ,OAAO,eAAe;;;;;AASrE,QAAM,4BAA4D;MAC9D,QAAQ,CAAC,SAAS,cAAc;MAChC,OAAO,CAAC,oBAAoB,cAAc,KAAK;MAC/C,OAAO,CAAC,kBAAkB,WAAW,kBAAkB,OAAO;;AAQlE,aAAS,kBAAkB,KAAW;AAClC,aAAO,IACF,QAAQ,OAAO,MAAM,EACrB,QAAQ,MAAM,KAAK;IAC5B;AASA,QAAa,2BAAb,MAAqC;MAQjC,YACI,UACA,YACA,SAAsB;AATlB,aAAA,gBAA4C,oBAAI,IAAG;AAWvD,aAAK,WAAW,YAAY,QAAQ;AACpC,aAAK,aAAa,cAAc,gBAAA;AAChC,aAAK,UAAU,WAAW,gBAAA;MAC9B;;;;MAKA,cAAW;AACP,eAAO,KAAK;MAChB;;;;MAKA,oBAAoB,cAA0B;AAE1C,cAAM,SAAS,KAAK,cAAc,IAAI,YAAY;AAClD,YAAI,WAAW,QAAW;AACtB,iBAAO;QACX;AAEA,cAAM,cAAc,KAAK,eAAc;AACvC,cAAM,UAAU,iBAAiB,WAAW;AAE5C,YAAI,CAAC,WAAW,CAAC,QAAQ,YAAY,GAAG;AACpC,eAAK,cAAc,IAAI,cAAc,KAAK;AAC1C,iBAAO;QACX;AAEA,cAAM,SAAS,QAAQ,YAAY;AAEnC,YAAI;AACA,eAAK,WAAW,OAAO,cAAc;YACjC,UAAU;YACV,OAAO,CAAC,QAAQ,QAAQ,MAAM;YAC9B,SAAS;WACZ;AACD,eAAK,cAAc,IAAI,cAAc,IAAI;AACzC,iBAAO;QACX,QAAQ;AACJ,eAAK,cAAc,IAAI,cAAc,KAAK;AAC1C,iBAAO;QACX;MACJ;;;;MAKA,iBAAc;AACV,cAAM,cAAc,KAAK,eAAc;AACvC,cAAM,kBAAkB,0BAA0B,WAAW;AAE7D,YAAI,CAAC,iBAAiB;AAClB,iBAAO;QACX;AAEA,mBAAW,YAAY,iBAAiB;AACpC,cAAI,KAAK,oBAAoB,QAAQ,GAAG;AACpC,mBAAO;UACX;QACJ;AAEA,eAAO;MACX;;;;MAKA,wBAAqB;AACjB,cAAM,cAAc,KAAK,eAAc;AACvC,cAAM,kBAAkB,0BAA0B,WAAW;AAE7D,YAAI,CAAC,iBAAiB;AAClB,iBAAO,CAAA;QACX;AAEA,eAAO,gBAAgB,OAAO,cAAY,KAAK,oBAAoB,QAAQ,CAAC;MAChF;;;;MAKA,MAAM,OAAO,SAAsC;AAC/C,cAAM,EAAE,kBAAkB,MAAM,eAAe,mBAAmB,OAAO,gBAAe,IAAK;AAG7F,YAAI;AAEJ,YAAI,qBAAqB,KAAK,oBAAoB,iBAAiB,GAAG;AAClE,yBAAe;QACnB,OAAO;AACH,yBAAe,KAAK,eAAc;QACtC;AAEA,YAAI,iBAAiB,WAAW;AAC5B,iBAAO;YACH,SAAS;YACT,cAAc;YACd,OAAO,6CAA6C,KAAK,QAAQ;;QAEzE;AAGA,cAAM,UAAS,GAAA,YAAA,iBAAgB,MAAM,EAAE,QAAQ,eAAe,OAAO,gBAAe,CAAE;AACtF,cAAM,UAAU,OAAO;AAGvB,cAAM,cAAc,KAAK,eAAc;AACvC,cAAM,SAAS,iBAAiB,WAAW,IAAI,YAAY;AAE3D,YAAI,CAAC,QAAQ;AACT,iBAAO;YACH,SAAS;YACT;YACA,OAAO,yCAAyC,YAAY;;QAEpE;AAEA,YAAI;AACA,gBAAM,EAAE,KAAK,KAAI,IAAK,OAAO,eAAe,kBAAkB,OAAO;AAErE,gBAAM,eAA6B;YAC/B,UAAU;YACV,OAAO;;YAEP,OAAO,KAAK,aAAa;;AAG7B,gBAAM,QAAQ,KAAK,QAAQ,KAAK,MAAM,YAAY;AAGlD,gBAAM,MAAK;AAEX,iBAAO;YACH,SAAS;YACT;YACA,KAAK,MAAM;;QAEnB,SAAS,OAAO;AACZ,gBAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,iBAAO;YACH,SAAS;YACT;YACA,OAAO,8BAA8B,YAAY;;QAEzD;MACJ;;;;MAKQ,iBAAc;AAElB,YAAI,KAAK,aAAa,UAAU;AAC5B,iBAAO;QACX,WAAW,KAAK,aAAa,SAAS;AAClC,iBAAO;QACX,OAAO;AAEH,iBAAO;QACX;MACJ;;;;MAKA,aAAU;AACN,aAAK,cAAc,MAAK;MAC5B;;;;MAKA,OAAO,sBAAsB,UAAyB;AAClD,YAAI;AACJ,YAAI,aAAa,UAAU;AACvB,wBAAc;QAClB,WAAW,aAAa,SAAS;AAC7B,wBAAc;QAClB,OAAO;AACH,wBAAc;QAClB;AAEA,eAAO,0BAA0B,WAAW,KAAK,CAAA;MACrD;;AAtMJ,IAAAA,SAAA,2BAAA;AA4MA,QAAI;AAKJ,aAAgB,8BAA2B;AACvC,UAAI,CAAC,iBAAiB;AAClB,0BAAkB,IAAI,yBAAwB;MAClD;AACA,aAAO;IACX;AAKA,aAAgB,gCAA6B;AACzC,wBAAkB;IACtB;;;;;;;;;;AC5Va,IAAAC,SAAA,wBAAwB,KAAK,KAAK;AAKlC,IAAAA,SAAA,2BAA2B;AAS3B,IAAAA,SAAA,yBAAyB;AAKzB,IAAAA,SAAA,0BAA0B;AAS1B,IAAAA,SAAA,uBAAuB;AAKvB,IAAAA,SAAA,0BAA0B;AAK1B,IAAAA,SAAA,uBAAuB;AAKvB,IAAAA,SAAA,8BAA8B;AAK9B,IAAAA,SAAA,6BAA6B;AAS7B,IAAAA,SAAA,yBAAyB;AAKzB,IAAAA,SAAA,6BAA6B;AAK7B,IAAAA,SAAA,yBAA6D;AAK7D,IAAAA,SAAA,oCAAoC;AASpC,IAAAA,SAAA,wBAAwB;AAKxB,IAAAA,SAAA,oBAAoB;AAKpB,IAAAA,SAAA,yBAAyB;AAKzB,IAAAA,SAAA,+BAA+B;AAK/B,IAAAA,SAAA,0BAA0B;AAS1B,IAAAA,SAAA,yBAAyB;AAKzB,IAAAA,SAAA,yBAAyB;AAKzB,IAAAA,SAAA,+BAA+B;AAK/B,IAAAA,SAAA,mCAAmC;AAKnC,IAAAA,SAAA,2BAA2B;AAK3B,IAAAA,SAAA,6BAA6B;AAS7B,IAAAA,SAAA,2BAA2B;AAS3B,IAAAA,SAAA,gCAAgC;AAKhC,IAAAA,SAAA,wBAAwB;AAKxB,IAAAA,SAAA,yBAAyB;;;;;;;;;;AC0GtC,IAAAC,SAAA,oBAAA;AAUA,IAAAA,SAAA,sBAAA;AA7RA,QAAA,kBAAA,QAAA,eAAA;AACA,QAAA,WAAA;AA2CA,QAAA,aAAA;AAAS,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAwB,EAAA,CAAA;AAGjC,QAAA,aAAA;AAQA,QAAa,iBAAb,MAA2B;MAQvB,YAAY,UAAiC,CAAA,GAAE;AANvC,aAAA,oBAAmD,oBAAI,IAAG;AAI1D,aAAA,aAAsB;AAG1B,aAAK,iBAAiB,QAAQ,kBAAkB,WAAA;AAChD,aAAK,WAAW,QAAQ,YAAY,QAAQ;AAC5C,aAAK,aAAa,QAAQ,cAAc,gBAAA;MAC5C;;;;;;;MAQA,iBAAiB,KAAW;AACxB,YAAI,OAAO,GAAG;AACV,iBAAO,EAAE,WAAW,OAAO,OAAO,cAAa;QACnD;AAEA,YAAI;AACA,cAAI,KAAK,aAAa,SAAS;AAC3B,mBAAO,KAAK,oBAAoB,GAAG;UACvC,OAAO;AACH,mBAAO,KAAK,iBAAiB,GAAG;UACpC;QACJ,SAAS,OAAO;AACZ,iBAAO;YACH,WAAW;YACX,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;;QAEpE;MACJ;;;;MAKQ,oBAAoB,KAAW;AACnC,YAAI;AAGA,gBAAM,SAAS,KAAK,WAAW,wBAAwB,GAAG,SAAS;YAC/D,UAAU;YACV,aAAa;YACb,SAAS;WACZ;AAKD,gBAAM,YAAY,CAAC,OAAO,SAAS,sBAAsB,KACrD,OAAO,SAAS,OAAO,GAAG,CAAC;AAE/B,iBAAO,EAAE,UAAS;QACtB,SAAS,OAAO;AAEZ,iBAAO,EAAE,WAAW,MAAK;QAC7B;MACJ;;;;MAKQ,iBAAiB,KAAW;AAChC,YAAI;AAEA,eAAK,WAAW,SAAS,GAAG,IAAI;YAC5B,UAAU;YACV,SAAS;YACT,OAAO,CAAC,QAAQ,QAAQ,MAAM;WACjC;AAGD,iBAAO,EAAE,WAAW,KAAI;QAC5B,QAAQ;AAEJ,iBAAO,EAAE,WAAW,MAAK;QAC7B;MACJ;;;;;;;;MASA,gBAAgB,WAAmB,KAAa,cAAwB;AACpE,YAAI,KAAK,YAAY;AACjB;QACJ;AAEA,YAAI,OAAO,GAAG;AAEV;QACJ;AAGA,aAAK,kBAAkB,IAAI,WAAW,EAAE,KAAK,aAAY,CAAE;AAG3D,YAAI,CAAC,KAAK,iBAAiB,KAAK,kBAAkB,OAAO,GAAG;AACxD,eAAK,aAAY;QACrB;MACJ;;;;;;MAOA,eAAe,WAAiB;AAC5B,aAAK,kBAAkB,OAAO,SAAS;AAGvC,YAAI,KAAK,kBAAkB,SAAS,KAAK,KAAK,eAAe;AACzD,eAAK,YAAW;QACpB;MACJ;;;;MAKA,2BAAwB;AACpB,eAAO,KAAK,kBAAkB;MAClC;;;;MAKA,aAAa,WAAiB;AAC1B,eAAO,KAAK,kBAAkB,IAAI,SAAS;MAC/C;;;;MAKQ,eAAY;AAChB,YAAI,KAAK,iBAAiB,KAAK,YAAY;AACvC;QACJ;AAEA,aAAK,gBAAgB,YAAY,MAAK;AAClC,eAAK,kBAAiB;QAC1B,GAAG,KAAK,cAAc;MAC1B;;;;MAKQ,cAAW;AACf,YAAI,KAAK,eAAe;AACpB,wBAAc,KAAK,aAAa;AAChC,eAAK,gBAAgB;QACzB;MACJ;;;;MAKQ,oBAAiB;AACrB,cAAM,qBAA+B,CAAA;AAErC,mBAAW,CAAC,WAAW,EAAE,KAAK,aAAY,CAAE,KAAK,KAAK,kBAAkB,QAAO,GAAI;AAC/E,gBAAM,SAAS,KAAK,iBAAiB,GAAG;AAExC,cAAI,CAAC,OAAO,WAAW;AACnB,+BAAmB,KAAK,SAAS;AAGjC,gBAAI;AACA,2BAAY;YAChB,SAAS,OAAO;AAEZ,eAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,OAAO,6CAA6C,SAAS,IAAI,iBAAiB,QAAQ,QAAQ,MAAS;YAC7I;UACJ;QACJ;AAGA,mBAAW,aAAa,oBAAoB;AACxC,eAAK,kBAAkB,OAAO,SAAS;QAC3C;AAGA,YAAI,KAAK,kBAAkB,SAAS,KAAK,KAAK,eAAe;AACzD,eAAK,YAAW;QACpB;MACJ;;;;MAKA,WAAQ;AACJ,aAAK,kBAAiB;MAC1B;;;;MAKA,UAAO;AACH,aAAK,aAAa;AAClB,aAAK,YAAW;AAChB,aAAK,kBAAkB,MAAK;MAChC;;AAjNJ,IAAAA,SAAA,iBAAA;AAuNA,QAAI;AAKJ,aAAgB,oBAAiB;AAC7B,UAAI,CAAC,gBAAgB;AACjB,yBAAiB,IAAI,eAAc;MACvC;AACA,aAAO;IACX;AAKA,aAAgB,sBAAmB;AAC/B,UAAI,gBAAgB;AAChB,uBAAe,QAAO;AACtB,yBAAiB;MACrB;IACJ;;;;;;;;;;ACnMA,IAAAC,SAAA,sBAAA;AAqEA,IAAAA,SAAA,mBAAA;AAsBA,IAAAA,SAAA,eAAA;AAWA,IAAAA,SAAA,oBAAA;AAYA,IAAAA,SAAA,oBAAA;AAnNA,QAAA,WAAA;AAMa,IAAAA,SAAA,0BAA0B;AAa1B,IAAAA,SAAA,oBAAoB,oBAAI,IAAI;MACrC;MACA;MACA;MACA;MACA;MACA;KACH;AA8BD,QAAa,wBAAb,cAA2C,SAAA,kBAAiB;MAIxD,YACI,SACA,cAAqB;AAErB,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB,MAAM,eAAe,EAAE,aAAY,IAAK;SAC3C;AACD,aAAK,OAAO;AACZ,aAAK,eAAe;MACxB;;AAdJ,IAAAA,SAAA,wBAAA;AAyCA,aAAgB,oBACZ,UACA,WACA,UAAsC,CAAA,GAAE;AAExC,YAAM,EACF,SAAS,OACT,uBAAuB,SACvB,2BAA2B,KAAI,IAC/B;AAGJ,YAAM,QAAQ,IAAI,OAAOA,SAAA,wBAAwB,QAAQ,GAAG;AAE5D,aAAO,SAAS,QAAQ,OAAO,CAAC,OAAO,iBAAwB;AAE3D,YAAI,4BAA4BA,SAAA,kBAAkB,IAAI,YAAY,GAAG;AACjE,iBAAO;QACX;AAGA,YAAI,gBAAgB,WAAW;AAC3B,gBAAM,QAAQ,UAAU,YAAY;AAEpC,cAAI,UAAU,QAAQ,UAAU,QAAW;AACvC,mBAAO;UACX;AAEA,cAAI,OAAO,UAAU,UAAU;AAC3B,mBAAO,KAAK,UAAU,KAAK;UAC/B;AACA,iBAAO,OAAO,KAAK;QACvB;AAGA,YAAI,QAAQ;AACR,gBAAM,IAAI,sBACN,qBAAqB,YAAY,iBACjC,YAAY;QAEpB;AAGA,YAAI,yBAAyB,YAAY;AACrC,iBAAO;QACX;AACA,eAAO;MACX,CAAC;IACL;AAqBA,aAAgB,iBAAiB,UAAkB,iBAA0B,MAAI;AAC7E,YAAM,YAAY,oBAAI,IAAG;AACzB,YAAM,QAAQ,IAAI,OAAOA,SAAA,wBAAwB,QAAQ,GAAG;AAC5D,YAAM,UAAU,SAAS,SAAS,KAAK;AAEvC,iBAAW,SAAS,SAAS;AACzB,cAAM,UAAU,MAAM,CAAC;AAEvB,YAAI,CAAC,kBAAkB,CAACA,SAAA,kBAAkB,IAAI,OAAO,GAAG;AACpD,oBAAU,IAAI,OAAO;QACzB;MACJ;AAEA,aAAO,MAAM,KAAK,SAAS;IAC/B;AAQA,aAAgB,aAAa,UAAgB;AACzC,aAAOA,SAAA,wBAAwB,KAAK,QAAQ;IAChD;AASA,aAAgB,kBAAkB,UAAkB,eAAuB;AACvE,YAAM,QAAQ,iBAAiB,UAAU,KAAK;AAC9C,aAAO,cAAc,KAAK,UAAQ,MAAM,SAAS,IAAI,CAAC;IAC1D;AASA,aAAgB,kBACZ,UACA,WAAkC;AAElC,YAAM,oBAAoB,iBAAiB,UAAU,IAAI;AACzD,YAAM,mBAAmB,kBAAkB,OAAO,OAAK,EAAE,KAAK,UAAU;AAExE,aAAO;QACH,OAAO,iBAAiB,WAAW;QACnC;;IAER;;;;;;;;;;;AC/NA,QAAA,eAAA;AAKI,WAAA,eAAAC,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,YAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAQ,EAAA,CAAA;AACR,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AAIvB,QAAA,eAAA;AACI,WAAA,eAAAA,UAAA,QAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAI,EAAA,CAAA;AACJ,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAqB,EAAA,CAAA;AAIzB,QAAA,eAAA;AACI,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAS,EAAA,CAAA;AAIb,QAAA,eAAA;AAEI,WAAA,eAAAA,UAAA,WAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAO,EAAA,CAAA;AACP,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAW,EAAA,CAAA;AAIf,QAAA,kBAAA;AAEI,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAA2B,EAAA,CAAA;AAE3B,WAAA,eAAAA,UAAA,YAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAQ,EAAA,CAAA;AACR,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAc,EAAA,CAAA;AAIlB,QAAA,uBAAA;AACI,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,qBAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,qBAAA;IAAe,EAAA,CAAA;AAcnB,QAAA,yBAAA;AACI,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,uBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,uBAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,uBAAA;IAAuB,EAAA,CAAA;AAI3B,QAAA,+BAAA;AACI,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,6BAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,6BAAA;IAA2B,EAAA,CAAA;AAC3B,WAAA,eAAAA,UAAA,iCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,6BAAA;IAA6B,EAAA,CAAA;AAIjC,QAAA,oBAAA;AAII,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAwB,EAAA,CAAA;AAI5B,QAAA,oBAAA;AACI,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAiB,EAAA,CAAA;AAEjB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAiB,EAAA,CAAA;;;;;;;;;;AC9GrB,QAAA,aAAA;AAEI,WAAA,eAAAC,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAwB,EAAA,CAAA;AAExB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;AAEvB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA2B,EAAA,CAAA;AAC3B,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA0B,EAAA,CAAA;AAE1B,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,qCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiC,EAAA,CAAA;AAEjC,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;AAEvB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,oCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAgC,EAAA,CAAA;AAChC,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA0B,EAAA,CAAA;AAE1B,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAwB,EAAA,CAAA;AAExB,WAAA,eAAAA,UAAA,iCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA6B,EAAA,CAAA;AAC7B,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;;;;;;;;;;ACqF1B,IAAAC,SAAA,gBAAA;AAQA,IAAAA,SAAA,sBAAA;AAQA,IAAAA,SAAA,qBAAA;AAOA,IAAAA,SAAA,eAAA;AAOA,IAAAA,SAAA,kBAAA;AAOA,IAAAA,SAAA,iBAAA;AAOA,IAAAA,SAAA,gBAAA;AAOA,IAAAA,SAAA,kBAAA;AApIA,QAAM,oBAAgD;MAClD;QACI,IAAI;QACJ,OAAO;QACP,aAAa;QACb,MAAM;;MAEV;QACI,IAAI;QACJ,OAAO;QACP,aAAa;QACb,MAAM;;MAEV;QACI,IAAI;QACJ,OAAO;QACP,aAAa;QACb,MAAM;;MAEV;QACI,IAAI;QACJ,OAAO;QACP,aAAa;QACb,MAAM;;MAEV;QACI,IAAI;QACJ,OAAO;QACP,aAAa;QACb,MAAM;;MAEV;QACI,IAAI;QACJ,OAAO;QACP,aAAa;QACb,MAAM;;;AAOD,IAAAA,SAAA,iBAAuD,IAAI,IACpE,kBAAkB,IAAI,OAAK,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AAW5B,IAAAA,SAAA,eAAe,kBAAkB,IAAI,OAAK,EAAE,EAAE;AAiB9C,IAAAA,SAAA,mBAA4B,kBAAkB,CAAC,EAAE;AAU9D,aAAgB,cAAc,SAAe;AACzC,aAAOA,SAAA,eAAe,IAAI,OAAO,GAAG,SAAS;IACjD;AAMA,aAAgB,oBAAoB,SAAe;AAC/C,aAAOA,SAAA,eAAe,IAAI,OAAO,GAAG,eAAe;IACvD;AAMA,aAAgB,mBAAmB,SAAe;AAC9C,aAAOA,SAAA,eAAe,IAAI,OAAO;IACrC;AAKA,aAAgB,eAAY;AACxB,aAAO;IACX;AAKA,aAAgB,kBAAe;AAC3B,aAAO,kBAAkB,OAAO,OAAK,CAAC,EAAE,UAAU;IACtD;AAKA,aAAgB,eAAe,IAAU;AACrC,aAAOA,SAAA,eAAe,IAAI,EAAE;IAChC;AAKA,aAAgB,gBAAa;AACzB,aAAO,kBAAkB;IAC7B;AAKA,aAAgB,gBAAgB,MAA6B;AACzD,aAAO,kBAAkB,OAAO,OAAK,EAAE,SAAS,IAAI;IACxD;;;;;;;;;;AClKA,QAAA,mBAAA;AAAS,WAAA,eAAAC,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAY,EAAA,CAAA;AAAW,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAgB,EAAA,CAAA;AAGhD,QAAA,mBAAA;AAEI,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAe,EAAA,CAAA;AAkBN,IAAAA,SAAA,kBAAkB;MAC3B,SAAS;;;;;;;;;MAST,UAAU;;;;;;;;;;;;MAYV,eAAe;;;;;;;;;;;AC+CnB,IAAAC,SAAA,mBAAA;AAcA,IAAAA,SAAA,oBAAA;AA9HA,QAAA,UAAA;AAuDa,IAAAA,SAAA,sBAAmC;MAC5C;QACI,IAAI;QACJ,OAAO;QACP,MAAM;QACN,QAAQ,QAAA,gBAAgB;QACxB,OAAO;QACP,aAAa;QACb,eAAe;;MAEnB;QACI,IAAI;QACJ,OAAO;QACP,MAAM;QACN,QAAQ,QAAA,gBAAgB;QACxB,OAAO;QACP,aAAa;QACb,eAAe;;MAEnB;QACI,IAAI;QACJ,OAAO;QACP,MAAM;QACN,QAAQ,QAAA,gBAAgB;QACxB,OAAO;QACP,eAAe;QACf,eAAe;;;AA+BvB,aAAgB,iBAAiB,SAAkB;AAC/C,aAAO;QACH,IAAI,QAAQ;QACZ,OAAO,QAAQ;QACf,MAAM,QAAQ;QACd,OAAO,QAAQ;QACf,eAAe,QAAQ;QACvB,QAAQ,QAAQ;;IAExB;AAKA,aAAgB,kBAAkB,UAAqB;AACnD,aAAO,SAAS,IAAI,gBAAgB;IACxC;;;;;;;;;AC1FA,IAAAC,SAAA,4BAAA;AA4BA,IAAAA,SAAA,yBAAA;AAwBA,IAAAA,SAAA,wBAAA;AAQA,IAAAA,SAAA,wBAAA;AAlGA,QAAA,oBAAA;AAsBA,QAAM,wBAAwB,CAAC,aAAa,QAAQ,WAAW,WAAW,UAAU;AAgBpF,aAAgB,0BAA0B,UAAkB,SAAsB;AAE9E,YAAM,YAAoC;QACtC,WAAW,QAAQ;QACnB,MAAM,QAAQ;QACd,SAAS,QAAQ,kBAAkB;QACnC,SAAS,QAAQ,sBAAsB;QACvC,UAAU,QAAQ,UAAU,KAAK,IAAI,KAAK;;AAG9C,cAAO,GAAA,kBAAA,qBAAoB,UAAU,WAAW;QAC5C,QAAQ;QACR,sBAAsB;QACtB,0BAA0B;OAC7B;IACL;AAaA,aAAgB,uBACZ,gBACA,SACA,sBAA+B,OAAK;AAGpC,UAAI,SAAS,0BAA0B,gBAAgB,OAAO;AAI9D,UAAI,CAAC,OAAO,SAAS,IAAI,GAAG;AAExB,YAAI,qBAAqB;AACrB,iBAAO,GAAG,MAAM,MAAM,QAAQ,YAAY,iBAAiB,QAAQ,QAAQ;QAC/E;AACA,eAAO,GAAG,MAAM,KAAK,QAAQ,YAAY,iBAAiB,QAAQ,QAAQ;MAC9E;AAEA,aAAO;IACX;AAKA,aAAgB,sBAAsB,UAAgB;AAClD,YAAM,UAAU,IAAI,OAAO,UAAU,sBAAsB,KAAK,GAAG,CAAC,SAAS;AAC7E,aAAO,QAAQ,KAAK,QAAQ;IAChC;AAKA,aAAgB,wBAAqB;AACjC,aAAO;QACH,EAAE,MAAM,iBAAiB,aAAa,oBAAmB;QACzD,EAAE,MAAM,YAAY,aAAa,+BAA8B;QAC/D,EAAE,MAAM,eAAe,aAAa,0CAAyC;QAC7E,EAAE,MAAM,eAAe,aAAa,kCAAiC;QACrE,EAAE,MAAM,gBAAgB,aAAa,0CAAyC;;IAEtF;;;;;;;;;ACxFA,IAAAC,SAAA,qBAAA;AA0DA,IAAAA,SAAA,0BAAA;AAoBA,IAAAA,SAAA,qBAAA;AAhGA,QAAA,kBAAA,QAAA,eAAA;AACA,QAAA,WAAA;AAGA,QAAM,qBAAqB,oBAAI,IAAG;AAclC,aAAgB,mBACZ,aACA,UAA0B;AAG1B,YAAM,WAAW,GAAG,WAAW,IAAI,YAAY,QAAQ,QAAQ;AAG/D,YAAM,SAAS,mBAAmB,IAAI,QAAQ;AAC9C,UAAI,WAAW,QAAW;AACtB,eAAO;MACX;AAEA,YAAMC,cAAa,YAAY,QAAQ,cAAc;AACrD,YAAM,eAAeA,aAAY,SAAS,WAAW,KAAK,SAAS,WAAW;AAE9E,UAAI;AAEJ,YAAM,UAAS,GAAA,SAAA,WAAS;AAExB,UAAI;AACA,cAAM,UAAS,GAAA,gBAAA,UAAS,cAAc;UAClC,UAAU;UACV,OAAO,CAAC,QAAQ,QAAQ,MAAM;UAC9B,SAAS;;SACZ;AAGD,cAAM,cAAc,OAAO,KAAI,EAAG,MAAM,IAAI,EAAE,CAAC,EAAE,KAAI;AAErD,iBAAS;UACL,QAAQ;UACR,MAAM;;AAGV,eAAO,MAAM,gBAAgB,YAAY,WAAW,eAAe,WAAW,EAAE;MACpF,SAAS,OAAO;AAEZ,cAAM,WAAW,IAAI,WAAW;AAChC,iBAAS;UACL,QAAQ;UACR,OAAO;;AAGX,eAAO,MAAM,gBAAgB,YAAY,WAAW,gBAAgB,QAAQ,EAAE;MAClF;AAGA,yBAAmB,IAAI,UAAU,MAAM;AACvC,aAAO;IACX;AAQA,aAAgB,wBAAwB,aAAoB;AACxD,UAAI,aAAa;AAEb,mBAAW,OAAO,mBAAmB,KAAI,GAAI;AACzC,cAAI,IAAI,WAAW,GAAG,WAAW,GAAG,GAAG;AACnC,+BAAmB,OAAO,GAAG;UACjC;QACJ;MACJ,OAAO;AACH,2BAAmB,MAAK;MAC5B;IACJ;AASA,aAAgB,mBAAmB,QAAc;AAC7C,YAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,YAAM,cAAwB,CAAA;AAC9B,UAAI,YAAY;AAEhB,iBAAW,QAAQ,OAAO;AAEtB,cAAM,YAAY,KAAK,QAAQ,mBAAmB,EAAE,EAAE,KAAI;AAG1D,YAAI,CAAC,aAAa,cAAc,IAAI;AAChC;QACJ;AAIA,YAAI,UAAU,WAAW,QAAG,KACxB,UAAU,WAAW,QAAG,KACxB,UAAU,WAAW,QAAG,KACxB,UAAU,WAAW,QAAG,GAAG;AAC3B;QACJ;AAGA,YAAI,UAAU,WAAW,iBAAiB,KACtC,UAAU,SAAS,YAAY,KAC/B,UAAU,WAAW,QAAQ,KAC7B,UAAU,WAAW,UAAU,GAAG;AAClC;QACJ;AAGA,YAAI,UAAU,MAAM,mDAAmD,GAAG;AACtE;QACJ;AAGA,YAAI,UAAU,WAAW,aAAa,KAClC,UAAU,WAAW,gBAAgB,KACrC,UAAU,WAAW,oBAAoB,KACzC,UAAU,WAAW,gBAAgB,GAAG;AACxC;QACJ;AAGA,oBAAY;AACZ,oBAAY,KAAK,SAAS;MAC9B;AAGA,aAAO,YAAY,SAAS,KAAK,YAAY,YAAY,SAAS,CAAC,MAAM,IAAI;AACzE,oBAAY,IAAG;MACnB;AAEA,aAAO,YAAY,KAAK,IAAI,EAAE,KAAI;IACtC;;;;;;;;;ACgIA,IAAAC,SAAA,mBAAA;AA8BA,IAAAA,SAAA,qBAAA;AA9BA,aAAgB,iBAAiBC,UAAkD;AAC/E,aAAO;QACH,IAAIA,SAAQ;QACZ,MAAMA,SAAQ;QACd,eAAeA,SAAQ;QACvB,YAAYA,SAAQ;QACpB,QAAQA,SAAQ;QAChB,WAAWA,SAAQ,UAAU,YAAW;QACxC,SAASA,SAAQ,SAAS,YAAW;QACrC,OAAOA,SAAQ;QACf,QAAQA,SAAQ;QAChB,gBAAgBA,SAAQ;QACxB,mBAAmBA,SAAQ;QAC3B,UAAUA,SAAQ;QAClB,eAAeA,SAAQ;QACvB,oBAAoBA,SAAQ;QAC5B,mBAAmBA,SAAQ;QAC3B,yBAAyBA,SAAQ;QACjC,kBAAkBA,SAAQ;QAC1B,iBAAiBA,SAAQ;;QAEzB,cAAcA,SAAQ;QACtB,SAASA,SAAQ;QACjB,kBAAkBA,SAAQ;;IAElC;AAKA,aAAgB,mBAAmB,YAA+B;AAC9D,aAAO;QACH,IAAI,WAAW;QACf,MAAM,WAAW,QAAQ;QACzB,eAAe,WAAW;QAC1B,YAAY,WAAW;QACvB,QAAQ,WAAW;QACnB,WAAW,IAAI,KAAK,WAAW,SAAS;QACxC,SAAS,WAAW,UAAU,IAAI,KAAK,WAAW,OAAO,IAAI;QAC7D,OAAO,WAAW;QAClB,QAAQ,WAAW;QACnB,gBAAgB,WAAW;QAC3B,mBAAmB,WAAW;QAC9B,UAAU,WAAW;QACrB,eAAe,WAAW;QAC1B,oBAAoB,WAAW;QAC/B,mBAAmB,WAAW;QAC9B,yBAAyB,WAAW;QACpC,kBAAkB,WAAW;QAC7B,iBAAiB,WAAW;;QAE5B,cAAc,WAAW;QACzB,SAAS,WAAW;QACpB,kBAAkB,WAAW;;IAErC;;;;;;;;;;ACxUA,QAAA,WAAA;AAoGA,QAAa,cAAb,MAAa,aAAW;;;;;;;MA4BpB,YAAY,gBAAgC,UAA8B,CAAA,GAAE;AA3B3D,aAAA,WAAuC,oBAAI,IAAG;AAC9C,aAAA,UAA2B,CAAA;AAOpC,aAAA,WAAW;AAoBf,aAAK,iBAAiB;AACtB,aAAK,cAAc,QAAQ,eAAe,aAAY;AACtD,aAAK,gBAAgB,QAAQ,iBAAiB,aAAY;AAC1D,aAAK,cAAc,QAAQ,eAAe,aAAY;AACtD,aAAK,oBAAoB,QAAQ,qBAAqB,aAAY;AAGlE,YAAI,KAAK,cAAc,GAAG;AACtB,gBAAM,IAAI,MAAM,gCAAgC;QACpD;AACA,YAAI,KAAK,cAAc,KAAK,aAAa;AACrC,gBAAM,IAAI,MAAM,uCAAuC;QAC3D;AACA,YAAI,KAAK,gBAAgB,GAAG;AACxB,gBAAM,IAAI,MAAM,kCAAkC;QACtD;AAGA,aAAK,kBAAiB;AAEtB,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,yCAAyC,KAAK,WAAW,mBAAmB,KAAK,aAAa,EAAE;MACjI;;;;;;;;;;MAWO,MAAM,QAAQ,YAAoB,aAAY,4BAA0B;AAC3E,YAAI,KAAK,UAAU;AACf,gBAAM,IAAI,MAAM,+BAA+B;QACnD;AAEA,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,yCAAyC,KAAK,SAAS,IAAI,WAAW,KAAK,cAAa,CAAE,GAAG;AAG1H,cAAM,cAAc,KAAK,gBAAe;AACxC,YAAI,aAAa;AACb,sBAAY,QAAQ;AACpB,sBAAY,aAAa,KAAK,IAAG;AACjC,iBAAO,MAAM,SAAA,YAAY,IAAI,qCAAqC,YAAY,QAAQ,SAAS,EAAE;AACjG,iBAAO,YAAY;QACvB;AAGA,YAAI,KAAK,SAAS,OAAO,KAAK,aAAa;AACvC,gBAAM,UAAU,MAAM,KAAK,oBAAmB;AAC9C,iBAAO,MAAM,SAAA,YAAY,IAAI,oCAAoC,QAAQ,SAAS,EAAE;AACpF,iBAAO;QACX;AAGA,eAAO,MAAM,SAAA,YAAY,IAAI,8DAA8D;AAC3F,eAAO,KAAK,eAAe,SAAS;MACxC;;;;;;;MAQO,QAAQ,SAAyB;AACpC,YAAI,KAAK,UAAU;AAEf,eAAK,eAAe,OAAO,EAAE,MAAM,MAAK;UAExC,CAAC;AACD;QACJ;AAEA,cAAM,gBAAgB,KAAK,SAAS,IAAI,QAAQ,SAAS;AACzD,YAAI,CAAC,eAAe;AAEhB,gBAAMC,WAAS,GAAA,SAAA,WAAS;AACxB,UAAAA,QAAO,MAAM,SAAA,YAAY,IAAI,wBAAwB,QAAQ,SAAS,0BAA0B;AAChG,eAAK,eAAe,OAAO,EAAE,MAAM,MAAK;UAExC,CAAC;AACD;QACJ;AAEA,cAAM,UAAS,GAAA,SAAA,WAAS;AAGxB,YAAI,KAAK,QAAQ,SAAS,GAAG;AACzB,gBAAM,SAAS,KAAK,QAAQ,MAAK;AACjC,cAAI,OAAO,WAAW;AAClB,yBAAa,OAAO,SAAS;UACjC;AACA,wBAAc,aAAa,KAAK,IAAG;AACnC,iBAAO,MAAM,SAAA,YAAY,IAAI,gCAAgC,QAAQ,SAAS,qBAAqB;AACnG,iBAAO,QAAQ,OAAO;AACtB;QACJ;AAGA,sBAAc,QAAQ;AACtB,sBAAc,aAAa,KAAK,IAAG;AACnC,eAAO,MAAM,SAAA,YAAY,IAAI,iCAAiC,QAAQ,SAAS,eAAe;MAClG;;;;;;;MAQO,MAAM,QAAQ,SAAyB;AAC1C,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,mCAAmC,QAAQ,SAAS,EAAE;AAEnF,aAAK,SAAS,OAAO,QAAQ,SAAS;AACtC,cAAM,KAAK,eAAe,OAAO;MACrC;;;;;;MAOO,WAAQ;AACX,cAAM,gBAAgB,KAAK,cAAa;AACxC,eAAO;UACH,eAAe,KAAK,SAAS;UAC7B;UACA,cAAc,KAAK,SAAS,OAAO;UACnC,iBAAiB,KAAK,QAAQ;UAC9B,aAAa,KAAK;UAClB,eAAe,KAAK;;MAE5B;;;;MAKO,aAAU;AACb,eAAO,KAAK;MAChB;;;;;MAMO,MAAM,UAAO;AAChB,YAAI,KAAK,UAAU;AACf;QACJ;AAEA,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,6BAA6B;AAE1D,aAAK,WAAW;AAGhB,YAAI,KAAK,cAAc;AACnB,wBAAc,KAAK,YAAY;AAC/B,eAAK,eAAe;QACxB;AAGA,mBAAW,UAAU,KAAK,SAAS;AAC/B,cAAI,OAAO,WAAW;AAClB,yBAAa,OAAO,SAAS;UACjC;AACA,iBAAO,OAAO,IAAI,MAAM,+BAA+B,CAAC;QAC5D;AACA,aAAK,QAAQ,SAAS;AAGtB,cAAM,kBAAmC,CAAA;AACzC,mBAAW,CAAC,EAAE,aAAa,KAAK,KAAK,UAAU;AAC3C,0BAAgB,KAAK,KAAK,eAAe,cAAc,OAAO,CAAC;QACnE;AACA,aAAK,SAAS,MAAK;AAEnB,cAAM,QAAQ,WAAW,eAAe;AACxC,eAAO,MAAM,SAAA,YAAY,IAAI,uBAAuB;MACxD;;;;;MAMO,MAAM,sBAAmB;AAC5B,YAAI,KAAK,UAAU;AACf,iBAAO;QACX;AAEA,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,cAAM,MAAM,KAAK,IAAG;AACpB,cAAM,mBAA6B,CAAA;AAGnC,mBAAW,CAAC,WAAW,aAAa,KAAK,KAAK,UAAU;AACpD,cAAI,CAAC,cAAc,OAAO;AACtB,kBAAM,WAAW,MAAM,cAAc;AACrC,gBAAI,WAAW,KAAK,eAAe;AAE/B,oBAAM,YAAY,KAAK,SAAS,OAAO,KAAK,cAAa;AACzD,oBAAM,0BAA0B,YAAY,iBAAiB,SAAS;AACtE,kBAAI,2BAA2B,KAAK,aAAa;AAC7C,iCAAiB,KAAK,SAAS;cACnC;YACJ;UACJ;QACJ;AAGA,mBAAW,aAAa,kBAAkB;AACtC,gBAAM,gBAAgB,KAAK,SAAS,IAAI,SAAS;AACjD,cAAI,eAAe;AACf,iBAAK,SAAS,OAAO,SAAS;AAC9B,kBAAM,KAAK,eAAe,cAAc,OAAO;AAC/C,mBAAO,MAAM,SAAA,YAAY,IAAI,wCAAwC,SAAS,EAAE;UACpF;QACJ;AAEA,YAAI,iBAAiB,SAAS,GAAG;AAC7B,iBAAO,MAAM,SAAA,YAAY,IAAI,2BAA2B,iBAAiB,MAAM,gBAAgB;QACnG;AAEA,eAAO,iBAAiB;MAC5B;;;;;;;MASQ,kBAAe;AACnB,mBAAW,CAAC,EAAE,aAAa,KAAK,KAAK,UAAU;AAC3C,cAAI,CAAC,cAAc,OAAO;AACtB,mBAAO;UACX;QACJ;AACA,eAAO;MACX;;;;MAKQ,gBAAa;AACjB,YAAI,QAAQ;AACZ,mBAAW,CAAC,EAAE,aAAa,KAAK,KAAK,UAAU;AAC3C,cAAI,cAAc,OAAO;AACrB;UACJ;QACJ;AACA,eAAO;MACX;;;;MAKQ,MAAM,sBAAmB;AAC7B,cAAM,UAAU,MAAM,KAAK,eAAc;AACzC,cAAM,MAAM,KAAK,IAAG;AAEpB,aAAK,SAAS,IAAI,QAAQ,WAAW;UACjC;UACA,OAAO;UACP,YAAY;UACZ,WAAW;SACd;AAED,eAAO;MACX;;;;MAKQ,eAAe,WAAiB;AACpC,eAAO,IAAI,QAAQ,CAACC,WAAS,WAAU;AACnC,gBAAM,SAAwB,EAAE,SAAAA,WAAS,OAAM;AAG/C,iBAAO,YAAY,WAAW,MAAK;AAC/B,kBAAM,QAAQ,KAAK,QAAQ,QAAQ,MAAM;AACzC,gBAAI,UAAU,IAAI;AACd,mBAAK,QAAQ,OAAO,OAAO,CAAC;YAChC;AACA,mBAAO,IAAI,MAAM,qCAAqC,SAAS,IAAI,CAAC;UACxE,GAAG,SAAS;AAEZ,eAAK,QAAQ,KAAK,MAAM;QAC5B,CAAC;MACL;;;;MAKQ,MAAM,eAAe,SAAyB;AAClD,YAAI;AACA,gBAAM,QAAQ,QAAO;QACzB,SAAS,OAAO;AACZ,gBAAM,UAAS,GAAA,SAAA,WAAS;AACxB,iBAAO,MAAM,SAAA,YAAY,IAAI,yCAAyC,QAAQ,SAAS,KAAK,KAAK,EAAE;QACvG;MACJ;;;;MAKQ,oBAAiB;AACrB,YAAI,KAAK,oBAAoB,GAAG;AAC5B,eAAK,eAAe,YAAY,MAAK;AACjC,iBAAK,oBAAmB,EAAG,MAAM,MAAK;YAEtC,CAAC;UACL,GAAG,KAAK,iBAAiB;AAGzB,cAAI,KAAK,aAAa,OAAO;AACzB,iBAAK,aAAa,MAAK;UAC3B;QACJ;MACJ;;AAlWJ,IAAAC,SAAA,cAAA;AAY2B,gBAAA,uBAAuB;AAEvB,gBAAA,0BAA0B;AAE1B,gBAAA,uBAAuB;AAEvB,gBAAA,8BAA8B;AAE9B,gBAAA,6BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACzExD,IAAAC,SAAA,2BAAA;AAYA,IAAAA,SAAA,mBAAA;AAgBA,IAAAA,SAAA,mBAAA;AAWA,IAAAA,SAAA,uBAAA;AAkEA,IAAAA,SAAA,4BAAA;AAYA,IAAAA,SAAA,kBAAA;AA0BA,IAAAA,SAAA,sBAAA;AAWA,IAAAA,SAAA,kBAAA;AAUA,IAAAA,SAAA,qBAAA;AAlNA,QAAA,KAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AAEA,QAAA,WAAA;AA2BA,QAAM,aAAa;AACnB,QAAM,cAAc;AAGpB,QAAI,eAA2C;AAG/C,QAAI,wBAAuC;AAQ3C,aAAgB,yBAAyB,KAAkB;AACvD,8BAAwB;AAExB,qBAAe;IACnB;AAQA,aAAgB,mBAAgB;AAE5B,UAAI,0BAA0B,MAAM;AAChC,eAAO;MACX;AAIA,aAAO,GAAG,QAAO;IACrB;AAOA,aAAgB,mBAAgB;AAC5B,aAAOD,OAAK,KAAK,iBAAgB,GAAI,YAAY,WAAW;IAChE;AASA,aAAgB,qBAAqB,cAAc,OAAK;AACpD,YAAM,UAAS,GAAA,SAAA,WAAS;AACxB,YAAM,aAAa,iBAAgB;AAGnC,UAAI,gBAAgB,CAAC,aAAa;AAC9B,eAAO,MAAM,SAAA,YAAY,IAAI,0CAA0C;AACvE,eAAO;MACX;AAEA,aAAO,MAAM,SAAA,YAAY,IAAI,wCAAwC,UAAU,EAAE;AAGjF,UAAI,CAACC,KAAG,WAAW,UAAU,GAAG;AAC5B,eAAO,MAAM,SAAA,YAAY,IAAI,2EAA2E;AACxG,uBAAe;UACX,SAAS;UACT,YAAY,CAAA;UACZ;UACA,YAAY;;AAEhB,eAAO;MACX;AAEA,UAAI;AAEA,cAAM,UAAUA,KAAG,aAAa,YAAY,OAAO;AACnD,cAAM,SAAwB,KAAK,MAAM,OAAO;AAGhD,cAAM,aAAa,OAAO,cAAc,CAAA;AAGxC,cAAM,cAAc,OAAO,KAAK,UAAU,EAAE;AAC5C,eAAO,MAAM,SAAA,YAAY,IAAI,2BAA2B,WAAW,4BAA4B;AAE/F,uBAAe;UACX,SAAS;UACT;UACA;UACA,YAAY;;AAEhB,eAAO;MAEX,SAAS,OAAO;AACZ,cAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,eAAO,KAAK,SAAA,YAAY,IAAI,iDAAiD,YAAY,EAAE;AAE3F,uBAAe;UACX,SAAS;UACT,YAAY,CAAA;UACZ;UACA,YAAY;UACZ,OAAO,+BAA+B,YAAY;;AAEtD,eAAO;MACX;IACJ;AASO,mBAAe,0BAA0B,cAAc,OAAK;AAC/D,aAAO,qBAAqB,WAAW;IAC3C;AAUA,aAAgB,gBACZ,eACA,gBAAgD;AAGhD,UAAI,CAAC,gBAAgB;AACjB,eAAO,EAAE,GAAG,cAAa;MAC7B;AAIA,UAAI,OAAO,KAAK,cAAc,EAAE,WAAW,GAAG;AAC1C,eAAO,CAAA;MACX;AAGA,aAAO;QACH,GAAG;QACH,GAAG;;IAEX;AAMA,aAAgB,sBAAmB;AAC/B,YAAM,UAAS,GAAA,SAAA,WAAS;AACxB,aAAO,MAAM,SAAA,YAAY,IAAI,wCAAwC;AACrE,qBAAe;IACnB;AAOA,aAAgB,kBAAe;AAC3B,aAAOA,KAAG,WAAW,iBAAgB,CAAE;IAC3C;AAQA,aAAgB,qBAAkB;AAC9B,aAAO;IACX;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtMA,IAAAC,SAAA,+BAAA;AAgBA,IAAAA,SAAA,uBAAA;AAiDA,IAAAA,SAAA,kBAAA;AAoBA,IAAAA,SAAA,sBAAA;AAtGA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAA,KAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAA,WAAA;AAGA,QAAM,aAAa;AAEnB,QAAM,cAAc;AAGpB,QAAI,wBAAuC;AAM3C,aAAgB,6BAA6B,KAAkB;AAC3D,8BAAwB;IAC5B;AAMA,aAAS,eAAY;AACjB,YAAM,OAAO,yBAAyB,GAAG,QAAO;AAChD,aAAO,QAAQ,IAAI,iBAAiB,KAAKD,OAAK,KAAK,MAAM,UAAU;IACvE;AAKA,aAAgB,uBAAoB;AAChC,aAAOA,OAAK,KAAK,aAAY,GAAI,WAAW;IAChD;AAMA,aAAS,WAAW,YAAkB;AAClC,UAAI;AACA,YAAI,CAACC,KAAG,WAAW,UAAU,GAAG;AAC5B,iBAAO,CAAA;QACX;AACA,cAAM,UAAUA,KAAG,aAAa,YAAY,OAAO;AACnD,cAAM,SAAS,KAAK,MAAM,OAAO;AACjC,eAAO,OAAO,WAAW,YAAY,WAAW,OAAO,SAAS,CAAA;MACpE,QAAQ;AACJ,eAAO,CAAA;MACX;IACJ;AAMA,aAAS,YAAY,YAAoB,QAA+B;AACpE,YAAM,MAAMD,OAAK,QAAQ,UAAU;AACnC,UAAI,CAACC,KAAG,WAAW,GAAG,GAAG;AACrB,QAAAA,KAAG,UAAU,KAAK,EAAE,WAAW,KAAI,CAAE;MACzC;AACA,MAAAA,KAAG,cAAc,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI,MAAM,OAAO;IAChF;AAMA,aAAS,oBAAoB,QAAc;AACvC,UAAI,WAAWD,OAAK,QAAQ,MAAM;AAElC,aAAO,SAAS,SAAS,MAAM,SAAS,SAASA,OAAK,GAAG,KAAK,SAAS,SAAS,GAAG,IAAI;AACnF,mBAAW,SAAS,MAAM,GAAG,EAAE;MACnC;AACA,aAAO;IACX;AAKA,aAAgB,gBAAgB,QAAc;AAC1C,YAAM,aAAa,qBAAoB;AACvC,YAAM,SAAS,WAAW,UAAU;AACpC,YAAM,iBAAiB,MAAM,QAAQ,OAAO,iBAAiB,CAAC,IAAI,OAAO,iBAAiB,IAAgB,CAAA;AAC1G,YAAM,aAAa,oBAAoB,MAAM;AAC7C,aAAO,eAAe,KAAK,OAAK,oBAAoB,CAAC,MAAM,UAAU;IACzE;AAcA,aAAgB,oBAAoB,QAAc;AAC9C,YAAM,UAAS,GAAA,SAAA,WAAS;AACxB,YAAM,aAAa,oBAAoB,MAAM;AAC7C,YAAM,aAAa,qBAAoB;AAEvC,UAAI;AACA,cAAM,SAAS,WAAW,UAAU;AACpC,cAAM,iBAAiB,MAAM,QAAQ,OAAO,iBAAiB,CAAC,IAAI,OAAO,iBAAiB,IAAgB,CAAA;AAG1G,YAAI,eAAe,KAAK,OAAK,oBAAoB,CAAC,MAAM,UAAU,GAAG;AACjE,iBAAO,MAAM,SAAA,YAAY,IAAI,mBAAmB,UAAU,sBAAsB;AAChF;QACJ;AAGA,uBAAe,KAAK,UAAU;AAC9B,eAAO,iBAAiB,IAAI;AAC5B,oBAAY,YAAY,MAAM;AAC9B,eAAO,MAAM,SAAA,YAAY,IAAI,yBAAyB,UAAU,sBAAsB;MAC1F,SAAS,OAAO;AAEZ,eAAO,MAAM,SAAA,YAAY,IAAI,2CAA2C,KAAK,EAAE;MACnF;IACJ;;;;;;;;;;ACnIA,QAAA,aAAA;AAAS,WAAA,eAAAE,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC26C9B,IAAAC,SAAA,uBAAAC;AAOA,IAAAD,SAAA,yBAAA;AAx6CA,QAAAE,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AAEA,QAAA,WAAA;AAEA,QAAA,iBAAA;AACA,QAAA,sBAAA;AACA,QAAA,mBAAA;AACA,QAAA,aAAA;AAsba,IAAAH,SAAA,8BAA2D;MACpE,aAAa;MACb,eAAe;;AAqBnB,QAAa,oBAAb,MAAa,mBAAiB;MAkB1B,cAAA;AAfQ,aAAA,SAAgC;AAChC,aAAA,YAAgC;AAChC,aAAA,YAA+F;AAC/F,aAAA,wBAA8C;AAC9C,aAAA,oBAAkD;AAClD,aAAA,cAAkC;AAClC,aAAA,oBAAiD,EAAE,GAAGA,SAAA,4BAA2B;AACjF,aAAA,WAAW;AAGX,aAAA,iBAA+C,oBAAI,IAAG;MAO9D;;;;MAKO,OAAO,cAAW;AACrB,YAAI,CAAC,mBAAkB,UAAU;AAC7B,6BAAkB,WAAW,IAAI,mBAAiB;QACtD;AACA,eAAO,mBAAkB;MAC7B;;;;MAKO,OAAO,gBAAa;AACvB,YAAI,mBAAkB,UAAU;AAC5B,6BAAkB,SAAS,QAAO;AAClC,6BAAkB,WAAW;QACjC;MACJ;;;;;;;;MASO,qBAAqB,QAAyB;AACjD,aAAK,oBAAoB;UACrB,aAAa,OAAO,eAAeA,SAAA,4BAA4B;UAC/D,eAAe,OAAO,iBAAiBA,SAAA,4BAA4B;;AAGvE,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MACH,SAAA,YAAY,IACZ,+DAA+D,KAAK,kBAAkB,WAAW,mBAAmB,KAAK,kBAAkB,aAAa,EAAE;MAElK;;;;;;;MAQO,MAAM,cAAW;AACpB,YAAI,KAAK,UAAU;AACf,iBAAO,EAAE,WAAW,OAAO,OAAO,4BAA2B;QACjE;AAEA,YAAI,KAAK,mBAAmB;AACxB,iBAAO,KAAK;QAChB;AAEA,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,8CAA8C;AAE3E,YAAI;AACA,gBAAM,UAAU,KAAK,YAAW;AAChC,cAAI,CAAC,SAAS;AACV,iBAAK,oBAAoB;cACrB,WAAW;cACX,OAAO;;AAEX,mBAAO,MAAM,SAAA,YAAY,IAAI,kCAAkC;AAC/D,mBAAO,KAAK;UAChB;AAGA,gBAAM,KAAK,cAAc,OAAO;AAEhC,eAAK,oBAAoB;YACrB,WAAW;YACX;;AAEJ,iBAAO,MAAM,SAAA,YAAY,IAAI,wCAAwC,OAAO,EAAE;AAC9E,iBAAO,KAAK;QAEhB,SAAS,OAAO;AACZ,gBAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,eAAK,oBAAoB;YACrB,WAAW;YACX,OAAO,+BAA+B,YAAY;;AAEtD,iBAAO,MAAM,SAAA,YAAY,IAAI,oDAAoD,iBAAiB,QAAQ,QAAQ,MAAS;AAC3H,iBAAO,KAAK;QAChB;MACJ;;;;;MAMO,yBAAsB;AACzB,aAAK,oBAAoB;MAC7B;;;;;;;;;MAUO,MAAM,aAAa,KAAY;AAClC,YAAI,KAAK,UAAU;AACf,gBAAM,IAAI,MAAM,qCAAqC;QACzD;AAGA,YAAI,KAAK,UAAU,KAAK,cAAc,KAAK;AACvC,iBAAO,KAAK;QAChB;AAGA,YAAI,KAAK,UAAU,KAAK,cAAc,KAAK;AACvC,gBAAMI,WAAS,GAAA,SAAA,WAAS;AACxB,UAAAA,QAAO,MAAM,SAAA,YAAY,IAAI,sDAAsD,KAAK,SAAS,SAAS,GAAG,wBAAwB;AACrI,cAAI;AACA,kBAAM,KAAK,OAAO,KAAI;UAC1B,SAAS,OAAO;AACZ,YAAAA,QAAO,MAAM,SAAA,YAAY,IAAI,0DAA0D,KAAK,EAAE;UAClG;AACA,eAAK,SAAS;AACd,eAAK,YAAY;QACrB;AAGA,YAAI,KAAK,uBAAuB;AAC5B,gBAAM,KAAK;AACX,cAAI,KAAK,UAAU,KAAK,cAAc,KAAK;AACvC,mBAAO,KAAK;UAChB;QACJ;AAEA,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,wDAAwD,OAAO,WAAW,EAAE;AAEzG,aAAK,wBAAwB,KAAK,iBAAiB,GAAG;AACtD,cAAM,KAAK;AACX,aAAK,wBAAwB;AAE7B,YAAI,CAAC,KAAK,QAAQ;AACd,gBAAM,IAAI,MAAM,yCAAyC;QAC7D;AAEA,eAAO,KAAK;MAChB;;;;;;;;;MAUO,MAAM,YAAY,SAA2B;AAChD,YAAI,QAAQ,SAAS;AACjB,iBAAO,KAAK,oBAAoB,OAAO;QAC3C;AACA,eAAO,KAAK,kBAAkB,OAAO;MACzC;;;;;;;;MASQ,MAAM,oBAAoB,SAA2B;AACzD,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,cAAM,YAAY,KAAK,IAAG;AAG1B,cAAM,eAAe,MAAM,KAAK,YAAW;AAC3C,YAAI,CAAC,aAAa,WAAW;AACzB,iBAAO;YACH,SAAS;YACT,OAAO,aAAa,SAAS;;QAErC;AAEA,YAAI,UAAmC;AACvC,YAAI,uBAAuB;AAE3B,YAAI;AACA,gBAAM,OAAO,MAAM,KAAK,kBAAiB;AACzC,gBAAM,YAAY,QAAQ,aAAa,mBAAkB;AAEzD,iBAAO,MAAM,SAAA,YAAY,IAAI,gDAAgD;AAC7E,oBAAU,MAAM,KAAK,QAAQ,SAAS;AACtC,iBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,+BAA+B;AAGnG,gBAAM,SAAS,MAAM,KAAK,gBAAgB,SAAS,QAAQ,QAAQ,SAAS;AAE5E,gBAAM,WAAW,QAAQ,MAAM,WAAW;AAC1C,gBAAM,aAAa,KAAK,IAAG,IAAK;AAEhC,iBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,kCAAkC,UAAU,IAAI;AAEpH,cAAI,CAAC,UAAU;AACX,mBAAO;cACH,SAAS;cACT,OAAO;cACP,WAAW,QAAQ;;UAE3B;AAEA,iBAAO;YACH,SAAS;YACT;YACA,WAAW,QAAQ;YACnB,aAAa;;QAGrB,SAAS,OAAO;AACZ,gBAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,gBAAM,aAAa,KAAK,IAAG,IAAK;AAEhC,iBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,SAAS,aAAa,YAAY,kCAAkC,UAAU,MAAM,iBAAiB,QAAQ,QAAQ,MAAS;AAGjL,iCAAuB;AAEvB,iBAAO;YACH,SAAS;YACT,OAAO,sBAAsB,YAAY;YACzC,WAAW,SAAS;;QAG5B;AAEI,cAAI,WAAW,KAAK,aAAa;AAC7B,gBAAI,sBAAsB;AACtB,kBAAI;AACA,sBAAM,KAAK,YAAY,QAAQ,OAAO;AACtC,uBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,kCAAkC;cAC1G,SAAS,cAAc;AACnB,uBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,yCAAyC,YAAY,EAAE;cAC/H;YACJ,OAAO;AACH,mBAAK,YAAY,QAAQ,OAAO;AAChC,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,kCAAkC;YAC1G;UACJ;QACJ;MACJ;;;;;;;;MASQ,MAAM,kBAAkB,SAA2B;AACvD,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,cAAM,YAAY,KAAK,IAAG;AAG1B,cAAM,eAAe,MAAM,KAAK,YAAW;AAC3C,YAAI,CAAC,aAAa,WAAW;AACzB,iBAAO;YACH,SAAS;YACT,OAAO,aAAa,SAAS;;QAErC;AAEA,YAAI,UAAkC;AAEtC,YAAI;AAEA,gBAAM,SAAS,MAAM,KAAK,aAAa,QAAQ,gBAAgB;AAG/D,gBAAM,iBAAkC,CAAA;AACxC,cAAI,QAAQ,OAAO;AACf,2BAAe,QAAQ,QAAQ;UACnC;AACA,cAAI,QAAQ,WAAW;AACnB,2BAAe,YAAY,QAAQ;UACvC;AAGA,cAAI,QAAQ,gBAAgB;AACxB,2BAAe,iBAAiB,QAAQ;UAC5C;AACA,cAAI,QAAQ,eAAe;AACvB,2BAAe,gBAAgB,QAAQ;UAC3C;AAIA,gBAAM,uBAAuB,QAAQ,yBAAyB;AAC9D,cAAI,wBAAwB,QAAQ,eAAe,QAAW;AAC1D,gBAAI;AAEJ,gBAAI,sBAAsB;AAEtB,oBAAM,iBAAgB,GAAA,oBAAA,sBAAoB;AAC1C,qBAAO,MAAM,SAAA,YAAY,IAAI,8DAA8D,cAAc,OAAO,gBAAgB,cAAc,UAAU,iBAAiB,OAAO,KAAK,cAAc,UAAU,EAAE,MAAM,EAAE;AACvN,kBAAI,cAAc,OAAO;AACrB,uBAAO,MAAM,SAAA,YAAY,IAAI,gDAAgD,cAAc,KAAK,EAAE;cACtG;AACA,kBAAI,cAAc,WAAW,OAAO,KAAK,cAAc,UAAU,EAAE,SAAS,GAAG;AAC3E,uBAAO,MAAM,SAAA,YAAY,IAAI,6BAA6B,OAAO,KAAK,cAAc,UAAU,EAAE,MAAM,2BAA2B,KAAK,UAAU,cAAc,UAAU,CAAC,EAAE;cAC/K;AAEA,iCAAkB,GAAA,oBAAA,iBAAgB,cAAc,YAAY,QAAQ,UAAU;YAClF,WAAW,QAAQ,eAAe,QAAW;AAEzC,gCAAkB,QAAQ;YAC9B;AAEA,gBAAI,mBAAmB,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AAC5D,6BAAe,aAAa;AAC5B,qBAAO,MAAM,SAAA,YAAY,IAAI,4BAA4B,OAAO,KAAK,eAAe,EAAE,MAAM,mBAAmB,OAAO,KAAK,eAAe,EAAE,KAAK,IAAI,CAAC,EAAE;AACxJ,qBAAO,MAAM,SAAA,YAAY,IAAI,0CAA0C,KAAK,UAAU,eAAe,CAAC,EAAE;YAC5G,WAAW,QAAQ,eAAe,UAAa,OAAO,KAAK,QAAQ,UAAU,EAAE,WAAW,GAAG;AAEzF,6BAAe,aAAa,CAAA;AAC5B,qBAAO,MAAM,SAAA,YAAY,IAAI,oDAAoD;YACrF;UACJ;AAGA,cAAI,QAAQ,qBAAqB;AAC7B,2BAAe,sBAAsB,QAAQ;UACjD;AAEA,gBAAM,oBAAoB,OAAO,KAAK,cAAc,EAAE,SAAS,IACzD,KAAK,UAAU,cAAc,IAC7B;AACN,iBAAO,MAAM,SAAA,YAAY,IAAI,6CAA6C,QAAQ,oBAAoB,WAAW,cAAc,iBAAiB,GAAG;AAEnJ,oBAAU,MAAM,OAAO,cAAc,cAAc;AACnD,iBAAO,MAAM,SAAA,YAAY,IAAI,uCAAuC,QAAQ,SAAS,EAAE;AAGvF,eAAK,aAAa,OAAO;AAGzB,gBAAM,YAAY,QAAQ,aAAa,mBAAkB;AAKzD,cAAI;AACJ,cAAI;AACJ,cAAI,YAAY;AAChB,eAAK,QAAQ,aAAa,QAAQ,oBAAoB,YAAY,SAAW,QAAQ,MAAM,QAAQ,MAAM;AACrG,kBAAM,kBAAkB,MAAM,KAAK,kBAAkB,SAAS,QAAQ,QAAQ,WAAW,QAAQ,gBAAgB;AACjH,uBAAW,gBAAgB;AAC3B,yBAAa,gBAAgB;AAC7B,wBAAY,gBAAgB;UAChC,OAAO;AACH,kBAAM,SAAS,MAAM,KAAK,gBAAgB,SAAS,QAAQ,QAAQ,SAAS;AAC5E,uBAAW,QAAQ,MAAM,WAAW;UACxC;AAEA,gBAAM,aAAa,KAAK,IAAG,IAAK;AAEhC,iBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,2BAA2B,UAAU,IAAI;AAE7G,cAAI,CAAC,UAAU;AAKX,gBAAI,YAAY,GAAG;AACf,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,8BAA8B,SAAS,oEAA+D;AAC1K,qBAAO;gBACH,SAAS;gBACT,UAAU;gBACV,WAAW,QAAQ;gBACnB;;YAER;AACA,mBAAO;cACH,SAAS;cACT,OAAO;cACP,WAAW,QAAQ;cACnB;;UAER;AAEA,iBAAO;YACH,SAAS;YACT;YACA,WAAW,QAAQ;YACnB;;QAGR,SAAS,OAAO;AACZ,gBAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,gBAAM,aAAa,KAAK,IAAG,IAAK;AAEhC,iBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,SAAS,aAAa,YAAY,2BAA2B,UAAU,MAAM,iBAAiB,QAAQ,QAAQ,MAAS;AAE1K,iBAAO;YACH,SAAS;YACT,OAAO,sBAAsB,YAAY;YACzC,WAAW,SAAS;;QAG5B;AAEI,cAAI,SAAS;AAET,iBAAK,eAAe,QAAQ,SAAS;AACrC,gBAAI;AACA,oBAAM,QAAQ,QAAO;AACrB,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,sBAAsB;YAC9F,SAAS,cAAc;AACnB,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,QAAQ,SAAS,yCAAyC,YAAY,EAAE;YAC/H;UACJ;QACJ;MACJ;;;;;;;;MASQ,MAAM,oBAAiB;AAC3B,YAAI,KAAK,UAAU;AACf,gBAAM,IAAI,MAAM,qCAAqC;QACzD;AAEA,YAAI,KAAK,aAAa;AAClB,iBAAO,KAAK;QAChB;AAEA,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,0CAA0C;AAGvE,cAAM,SAAS,MAAM,KAAK,aAAY;AAGtC,aAAK,cAAc,IAAI,eAAA,YACnB,YAAW;AACP,gBAAM,UAAU,MAAM,OAAO,cAAa;AAC1C,iBAAO;QACX,GACA;UACI,aAAa,KAAK,kBAAkB;UACpC,eAAe,KAAK,kBAAkB;SACzC;AAGL,eAAO,MAAM,SAAA,YAAY,IAAI,yCAAyC;AACtE,eAAO,KAAK;MAChB;;;;;;;MAQO,eAAY;AACf,eAAO,KAAK,aAAa,SAAQ,KAAM;MAC3C;;;;;;MAOO,gBAAa;AAChB,eAAO,KAAK,gBAAgB,QAAQ,CAAC,KAAK,YAAY,WAAU;MACpE;;;;;;;;;MAUO,MAAM,aAAa,WAAiB;AACvC,cAAM,UAAS,GAAA,SAAA,WAAS;AAExB,cAAM,UAAU,KAAK,eAAe,IAAI,SAAS;AACjD,YAAI,CAAC,SAAS;AACV,iBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,SAAS,gCAAgC;AAC5F,iBAAO;QACX;AAEA,eAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,SAAS,qBAAqB;AAEjF,YAAI;AACA,gBAAM,QAAQ,QAAO;AACrB,eAAK,eAAe,OAAO,SAAS;AACpC,iBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,SAAS,iCAAiC;AAC7F,iBAAO;QACX,SAAS,OAAO;AACZ,iBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,SAAS,6BAA6B,iBAAiB,QAAQ,QAAQ,MAAS;AAEnI,eAAK,eAAe,OAAO,SAAS;AACpC,iBAAO;QACX;MACJ;;;;;;;MAQO,iBAAiB,WAAiB;AACrC,eAAO,KAAK,eAAe,IAAI,SAAS;MAC5C;;;;;;MAOO,wBAAqB;AACxB,eAAO,KAAK,eAAe;MAC/B;;;;;;;MAQQ,aAAa,SAAwB;AACzC,aAAK,eAAe,IAAI,QAAQ,WAAW,OAAO;MACtD;;;;;;MAOQ,eAAe,WAAiB;AACpC,aAAK,eAAe,OAAO,SAAS;MACxC;;;;MAKO,MAAM,UAAO;AAChB,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,4CAA4C;AAGzE,cAAM,gBAAiC,CAAA;AACvC,mBAAW,CAAC,SAAS,KAAK,KAAK,gBAAgB;AAC3C,wBAAc,KAAK,KAAK,aAAa,SAAS,EAAE,KAAK,MAAK;UAAE,CAAC,CAAC;QAClE;AACA,cAAM,QAAQ,WAAW,aAAa;AACtC,aAAK,eAAe,MAAK;AAGzB,YAAI,KAAK,aAAa;AAClB,cAAI;AACA,kBAAM,KAAK,YAAY,QAAO;AAC9B,mBAAO,MAAM,SAAA,YAAY,IAAI,0CAA0C;UAC3E,SAAS,OAAO;AACZ,mBAAO,MAAM,SAAA,YAAY,IAAI,6DAA6D,KAAK,EAAE;UACrG;AACA,eAAK,cAAc;QACvB;AAEA,YAAI,KAAK,QAAQ;AACb,cAAI;AACA,kBAAM,KAAK,OAAO,KAAI;AACtB,mBAAO,MAAM,SAAA,YAAY,IAAI,mCAAmC;UACpE,SAAS,OAAO;AACZ,mBAAO,MAAM,SAAA,YAAY,IAAI,sDAAsD,KAAK,EAAE;UAC9F;AACA,eAAK,SAAS;AACd,eAAK,YAAY;QACrB;AAEA,aAAK,YAAY;AACjB,aAAK,oBAAoB;MAC7B;;;;MAKO,UAAO;AACV,aAAK,WAAW;AAEhB,aAAK,QAAO,EAAG,MAAM,MAAK;QAE1B,CAAC;MACL;;;;;MAMQ,cAAW;AACf,cAAM,gBAAgB;;UAElBF,OAAK,KAAK,WAAW,MAAM,gBAAgB,WAAW,aAAa;;UAEnEA,OAAK,KAAK,WAAW,MAAM,MAAM,MAAM,gBAAgB,WAAW,aAAa;;UAE/EA,OAAK,KAAK,WAAW,gBAAgB,WAAW,aAAa;;UAE7DA,OAAK,KAAK,WAAW,MAAM,MAAM,MAAM,MAAM,gBAAgB,WAAW,aAAa;;AAGzF,mBAAW,YAAY,eAAe;AAClC,gBAAM,YAAYA,OAAK,KAAK,UAAU,QAAQ,UAAU;AACxD,cAAIC,KAAG,WAAW,SAAS,GAAG;AAC1B,mBAAO;UACX;QACJ;AAEA,eAAO;MACX;;;;;MAMQ,MAAM,cAAc,SAAe;AACvC,YAAI,KAAK,WAAW;AAChB;QACJ;AAEA,cAAM,eAAeD,OAAK,KAAK,SAAS,QAAQ,UAAU;AAI1D,cAAM,EAAE,cAAa,IAAK,MAAA,QAAA,QAAA,EAAA,KAAA,MAAA,aAAA,QAAa,KAAK,CAAA,CAAA;AAC5C,cAAM,SAAS,cAAc,YAAY,EAAE;AAK3C,cAAM,gBAAgB,IAAI,SAAS,aAAa,0BAA0B;AAC1E,cAAM,MAAM,MAAM,cAAc,MAAM;AAEtC,YAAI,CAAC,IAAI,eAAe;AACpB,gBAAM,IAAI,MAAM,uCAAuC;QAC3D;AAEA,aAAK,YAAY;MACrB;;;;;;MAOQ,MAAM,iBAAiB,KAAY;AACvC,cAAM,UAAU,KAAK,YAAW;AAChC,YAAI,CAAC,SAAS;AACV,gBAAM,IAAI,MAAM,uBAAuB;QAC3C;AAEA,cAAM,KAAK,cAAc,OAAO;AAEhC,YAAI,CAAC,KAAK,WAAW;AACjB,gBAAM,IAAI,MAAM,uBAAuB;QAC3C;AAGA,cAAM,UAAiC,CAAA;AACvC,YAAI,KAAK;AACL,kBAAQ,MAAM;AAGd,cAAI;AACA,aAAA,GAAA,iBAAA,qBAAoB,GAAG;UAC3B,QAAQ;UAER;QACJ;AAEA,cAAM,UAAS,GAAA,SAAA,WAAS;AACxB,eAAO,MAAM,SAAA,YAAY,IAAI,2DAA2D,KAAK,UAAU,OAAO,CAAC,EAAE;AAEjH,aAAK,SAAS,IAAI,KAAK,UAAU,cAAc,OAAO;AACtD,aAAK,YAAY;MACrB;;;;;;MAOQ,MAAM,gBACV,SACA,QACA,WAAiB;AAIjB,eAAO,QAAQ,YAAY,EAAE,OAAM,GAAI,SAAS;MACpD;;;;;;;;;;;;;;;;;;;;;;;MAwBQ,MAAM,kBACV,SACA,QACA,WACA,kBAA0C;AAE1C,eAAO,IAAI,QAAQ,CAACG,WAAS,WAAU;AACnC,gBAAM,UAAS,GAAA,SAAA,WAAS;AACxB,gBAAM,MAAM,QAAQ;AACpB,cAAI,WAAW;AAKf,cAAI,cAAwB,CAAA;AAC5B,cAAI,UAAU;AACd,cAAI,oBAA0D;AAC9D,cAAI,YAAY;AAGhB,cAAI,mBAAmB;AACvB,cAAI,oBAAoB;AACxB,cAAI,uBAAuB;AAC3B,cAAI,wBAAwB;AAC5B,cAAI;AACJ,cAAI;AACJ,cAAI,iBAAiB;AACrB,cAAI;AACJ,cAAI;AAEJ,gBAAM,UAAU,MAAK;AACjB,gBAAI,aAAa;AACb,0BAAW;YACf;AACA,yBAAa,SAAS;AACtB,gBAAI,mBAAmB;AACnB,2BAAa,iBAAiB;AAC9B,kCAAoB;YACxB;UACJ;AAEA,gBAAM,SAAS,CAAC,UAA4C,UAA0B;AAClF,gBAAI,CAAC,SAAS;AACV,wBAAU;AACV,sBAAO;AACP,uBAAS,KAAK;YAClB;UACJ;AAEA,gBAAM,cAAc,CAAC,UAAgB;AACjC,gBAAI,CAAC,SAAS;AACV,wBAAU;AACV,sBAAO;AACP,qBAAO,KAAK;YAChB;UACJ;AAEA,gBAAM,kBAAkB,MAA6B;AACjD,gBAAI,mBAAmB,GAAG;AACtB,qBAAO;YACX;AACA,mBAAO;cACH,aAAa;cACb,cAAc;cACd,iBAAiB;cACjB,kBAAkB;cAClB,aAAa,mBAAmB;cAChC,MAAM;cACN,UAAU;cACV,WAAW;cACX,YAAY;cACZ,eAAe;;UAEvB;AAEA,gBAAM,mBAAmB,MAAK;AAM1B,kBAAM,iBAAiB,YAAY,SAAS,IACtC,YAAY,OAAO,OAAK,EAAE,KAAI,CAAE,EAAE,KAAK,MAAM,IAC7C;AACN,kBAAM,SAAS,kBAAkB;AACjC,mBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,2BAA2B,OAAO,MAAM,WAAW,SAAS,WAAW,YAAY,MAAM,YAAY;AAC3J,mBAAOA,WAAS,EAAE,UAAU,QAAQ,YAAY,gBAAe,GAAI,UAAS,CAAE;UAClF;AAEA,gBAAM,YAAY,WAAW,MAAK;AAC9B,wBAAY,IAAI,MAAM,2BAA2B,SAAS,IAAI,CAAC;UACnE,GAAG,SAAS;AAIZ,gBAAM,cAAc,QAAQ,GAAI,CAAC,UAAwB;AACrD,kBAAM,YAAY,MAAM;AAExB,gBAAI,cAAc,2BAA2B;AAEzC,oBAAM,QAAQ,MAAM,MAAM,gBAAgB;AAC1C,0BAAY;AAEZ,kBAAI,oBAAoB,OAAO;AAC3B,oBAAI;AACA,mCAAiB,KAAK;gBAC1B,SAAS,SAAS;AACd,yBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,uCAAuC,OAAO,EAAE;gBAC1G;cACJ;YACJ,WAAW,cAAc,qBAAqB;AAK1C,oBAAM,iBAAiB,MAAM,MAAM,WAAW;AAC9C,kBAAI,gBAAgB;AAChB,4BAAY,KAAK,cAAc;cACnC;AACA,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,wBAAwB,YAAY,MAAM,KAAK,eAAe,MAAM,SAAS;AAGnI,kBAAI,oBAAoB,kBAAkB,CAAC,UAAU;AACjD,oBAAI;AACA,mCAAiB,cAAc;gBACnC,SAAS,SAAS;AACd,yBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,uCAAuC,OAAO,EAAE;gBAC1G;cACJ;YACJ,WAAW,cAAc,wBAAwB;AAO7C,kBAAI,mBAAmB;AACnB,6BAAa,iBAAiB;AAC9B,oCAAoB;AACpB,uBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,4DAAuD;cACjH;YACJ,WAAW,cAAc,sBAAsB;AAW3C;AACA,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,WAAW,SAAS,WAAW,YAAY,MAAM,mBAAmB;AAI1H,kBAAI,CAAC,WAAW,CAAC,mBAAmB;AAChC,oCAAoB,WAAW,MAAK;AAChC,sCAAoB;AACpB,sBAAI,CAAC,YAAY,YAAY,SAAS,KAAK,WAAW;AAClD,2BAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,iDAAiD,SAAS,GAAG;AACnH,qCAAgB;kBACpB;gBACJ,GAAG,GAAI;cACX;YACJ,WAAW,cAAc,gBAAgB;AAErC,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,yBAAyB,SAAS,QAAQ;AAChG,+BAAgB;YACpB,WAAW,cAAc,iBAAiB;AAEtC,oBAAM,eAAe,MAAM,MAAM,WAAW;AAC5C,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,qBAAqB,YAAY,EAAE;AACzF,0BAAY,IAAI,MAAM,0BAA0B,YAAY,EAAE,CAAC;YACnE,WAAW,cAAc,mBAAmB;AAExC;AACA,kCAAoB,MAAM,MAAM,eAAe;AAC/C,mCAAqB,MAAM,MAAM,gBAAgB;AACjD,sCAAwB,MAAM,MAAM,mBAAmB;AACvD,uCAAyB,MAAM,MAAM,oBAAoB;AACzD,kBAAI,MAAM,MAAM,QAAQ,MAAM;AAC1B,6BAAa,aAAa,KAAK,MAAM,KAAK;cAC9C;AACA,kBAAI,MAAM,MAAM,YAAY,MAAM;AAC9B,iCAAiB,iBAAiB,KAAK,MAAM,KAAK;cACtD;AACA,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,iBAAiB,cAAc,QAAQ,MAAM,MAAM,eAAe,CAAC,QAAQ,MAAM,MAAM,gBAAgB,CAAC,EAAE;YACpK,WAAW,cAAc,sBAAsB;AAE3C,kBAAI,MAAM,MAAM,cAAc,MAAM;AAChC,kCAAkB,MAAM,KAAK;cACjC;AACA,kBAAI,MAAM,MAAM,iBAAiB,MAAM;AACnC,qCAAqB,MAAM,KAAK;cACpC;AACA,qBAAO,MAAM,SAAA,YAAY,IAAI,sBAAsB,GAAG,gCAAgC,eAAe,YAAY,kBAAkB,EAAE;YACzI;UACJ,CAAC;AAGD,kBAAQ,KAAM,EAAE,OAAM,CAAE,EAAE,MAAM,WAAQ;AACpC,wBAAY,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;UACzE,CAAC;QACL,CAAC;MACL;;AAj8BJ,IAAAL,SAAA,oBAAA;AACmB,sBAAA,WAAqC;AAe5B,sBAAA,qBAAqB,WAAA;AA47BjD,aAAgBC,wBAAoB;AAChC,aAAO,kBAAkB,YAAW;IACxC;AAKA,aAAgB,yBAAsB;AAClC,wBAAkB,cAAa;IACnC;AAyBO,QAAM,wBAA2C,MAAK;AACzD,aAAO,EAAE,MAAM,WAAU;IAC7B;AAFa,IAAAD,SAAA,wBAAqB;AAgB3B,QAAM,qBAAwC,MAAK;AACtD,aAAO,EAAE,MAAM,kBAAiB;IACpC;AAFa,IAAAA,SAAA,qBAAkB;;;;;;;;;;;AC59C/B,QAAA,UAAA;AAGI,WAAA,eAAAM,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAY,EAAA,CAAA;AAEZ,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AAEf,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAgB,EAAA,CAAA;AAGhB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AAInB,QAAA,kBAAA;AAII,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAmB,EAAA,CAAA;AAGnB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAiB,EAAA,CAAA;AAIrB,QAAA,mBAAA;AAEI,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAyB,EAAA,CAAA;AACzB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAqB,EAAA,CAAA;AAIzB,QAAA,kBAAA;AACI,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAkB,EAAA,CAAA;AAItB,QAAA,kBAAA;AAeI,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAkB,EAAA,CAAA;AAOtB,QAAA,iBAAA;AACI,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAW,EAAA,CAAA;AAQf,QAAA,cAAA;AACI,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAyB,EAAA,CAAA;AACzB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAe,EAAA,CAAA;AAMnB,QAAA,wBAAA;AACI,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAsB,EAAA,CAAA;AActB,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAA2B,EAAA,CAAA;AAC3B,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAkB,EAAA,CAAA;AAItB,QAAA,aAAA;AAAS,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AAG9B,QAAA,sBAAA;AAGI,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAyB,EAAA,CAAA;AACzB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,oBAAA;IAAwB,EAAA,CAAA;AAI5B,QAAA,mBAAA;AACI,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAA4B,EAAA,CAAA;;;;;;;;;;ACdhC,QAAA,aAAA;AAEa,IAAAC,SAAA,6BAA+C;MACxD,gBAAgB,WAAA;MAChB,YAAY;MACZ,cAAc;MACd,gBAAgB;MAChB,eAAe;MACf,WAAW,WAAA;;;;;;;;;;;ACpIf,QAAA,iBAAA;AAMA,QAAa,oBAAb,cAAuC,eAAA,kBAAwB;MAC3D,YAAY,UAAU,uBAAqB;AACvC,cAAM,OAAO;AAEb,aAAK,OAAO;MAChB;;AALJ,IAAAC,SAAA,oBAAA;AAYA,QAAa,qBAAb,MAA+B;;;;;MAQ3B,YAA6B,iBAAyB,GAAC;AAA1B,aAAA,iBAAA;AAPrB,aAAA,UAAU;AACV,aAAA,QAA2B,CAAA;AAO/B,YAAI,iBAAiB,GAAG;AACpB,gBAAM,IAAI,MAAM,mCAAmC;QACvD;MACJ;;;;MAKA,IAAI,eAAY;AACZ,eAAO,KAAK;MAChB;;;;MAKA,IAAI,cAAW;AACX,eAAO,KAAK,MAAM;MACtB;;;;MAKA,IAAI,QAAK;AACL,eAAO,KAAK;MAChB;;;;;;;;;MAUA,MAAM,IAAO,IAAsB,aAA2B;AAE1D,YAAI,cAAa,GAAI;AACjB,gBAAM,IAAI,kBAAiB;QAC/B;AAEA,cAAM,KAAK,QAAO;AAGlB,YAAI,cAAa,GAAI;AACjB,eAAK,QAAO;AACZ,gBAAM,IAAI,kBAAiB;QAC/B;AAEA,YAAI;AACA,iBAAO,MAAM,GAAE;QACnB;AACI,eAAK,QAAO;QAChB;MACJ;;;;;;;;;MAUA,MAAM,IAAO,OAAgC,aAA2B;AACpE,eAAO,QAAQ,IAAI,MAAM,IAAI,UAAQ,KAAK,IAAI,MAAM,WAAW,CAAC,CAAC;MACrE;;;;;;;;;MAUA,MAAM,WAAc,OAAgC,aAA2B;AAC3E,eAAO,QAAQ,IACX,MAAM,IAAI,UACN,KAAK,IAAI,MAAM,WAAW,EACrB,KAAK,YAAU,EAAE,QAAQ,aAAsB,MAAK,EAAG,EACvD,MAAM,aAAW,EAAE,QAAQ,YAAqB,OAAM,EAAG,CAAC,CAClE;MAET;;;;;MAMQ,UAAO;AACX,YAAI,KAAK,UAAU,KAAK,gBAAgB;AACpC,eAAK;AACL,iBAAO,QAAQ,QAAO;QAC1B;AAGA,eAAO,IAAI,QAAc,CAAAC,cAAU;AAC/B,eAAK,MAAM,KAAKA,SAAO;QAC3B,CAAC;MACL;;;;;MAMQ,UAAO;AACX,aAAK;AAGL,cAAM,OAAO,KAAK,MAAM,MAAK;AAC7B,YAAI,MAAM;AACN,eAAK;AACL,eAAI;QACR;MACJ;;AA3HJ,IAAAD,SAAA,qBAAA;AA+HA,QAAA,aAAA;AAAS,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;;;;;;;;;;ACuchC,IAAAE,SAAA,iBAAAC;AAxlBA,QAAA,wBAAA;AACA,QAAA,UAAA;AAkBA,aAAS,sBAAmB;AACxB,aAAO,MAAM,KAAK,IAAG,CAAE,IAAI,KAAK,OAAM,EAAG,SAAS,EAAE,EAAE,UAAU,GAAG,CAAC,CAAC;IACzE;AAYA,QAAa,oBAAb,MAA8B;MAI1B,YAAY,SAAwB;AAChC,aAAK,UAAU;UACX,GAAG,QAAA;UACH,GAAG;;AAEP,aAAK,UAAU,IAAI,sBAAA,mBAAmB,KAAK,QAAQ,cAAc;MACrE;;;;;;;MAQA,MAAM,QACF,KACA,OAAa;AAEb,cAAM,cAAc,oBAAmB;AACvC,cAAM,YAAY,KAAK,IAAG;AAG1B,cAAM,UAA4B;UAC9B,GAAG,KAAK;UACR,GAAG,IAAI;;AAIX,aAAK,eAAe;UAChB,OAAO;UACP,YAAY;UACZ,gBAAgB;UAChB,aAAa;UACb,YAAY;UACZ,SAAS;SACZ;AAGD,YAAI;AACJ,YAAI;AACA,sBAAY,IAAI,SAAS,MAAM,KAAK;QACxC,SAAS,OAAO;AACZ,gBAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACtE,iBAAO,KAAK,mBAAmB,WAAW,uBAAuB,QAAQ,EAAE;QAC/E;AAEA,YAAI,UAAU,WAAW,GAAG;AACxB,iBAAO,KAAK,kBAAkB,SAAS;QAC3C;AAGA,YAAI;AACJ,YAAI,KAAK,QAAQ,kBAAkB,UAAU,SAAS,GAAG;AACrD,gBAAM,cAAc,QAAQ,WAAW,IAAI;AAC3C,oBAAU,KAAK,QAAQ,eAAe,cAAc,WAAW;QACnE;AAGA,aAAK,eAAe;UAChB,OAAO;UACP,YAAY,UAAU;UACtB,gBAAgB;UAChB,aAAa;UACb,YAAY;UACZ,SAAS,cAAc,UAAU,MAAM,eAAe,QAAQ,cAAc;SAC/E;AAGD,cAAM,eAAe,KAAK,IAAG;AAC7B,YAAI;AACJ,YAAI;AACA,uBAAa,MAAM,KAAK,gBACpB,KACA,WACA,aACA,SACA,OAAO;QAEf,SAAS,OAAO;AACZ,cAAI,iBAAiB,sBAAA,mBAAmB;AACpC,kBAAMC,kBAAiB,KAAK,IAAG,IAAK;AACpC,mBAAO,KAAK,sBAAsB,WAAWA,iBAAgB,UAAU,QAAQ,QAAQ,cAAc;UACzG;AACA,gBAAM;QACV;AACA,cAAM,iBAAiB,KAAK,IAAG,IAAK;AAGpC,cAAM,iBAAiB,WAAW,OAAO,OAAK,EAAE,OAAO,EAAE;AACzD,cAAM,aAAa,WAAW,OAAO,OAAK,CAAC,EAAE,OAAO,EAAE;AAGtD,aAAK,eAAe;UAChB,OAAO;UACP,YAAY,UAAU;UACtB,gBAAgB;UAChB,aAAa;UACb,YAAY;UACZ,SAAS;SACZ;AAGD,cAAM,kBAAkB,KAAK,IAAG;AAChC,cAAM,gBAA+B;UACjC;UACA;UACA;UACA;UACA,gBAAgB,KAAK,QAAQ;UAC7B,eAAe;;AAGnB,YAAI;AACJ,YAAI;AACA,yBAAe,MAAM,IAAI,QAAQ,OAAO,YAAY,aAAa;QACrE,SAAS,OAAO;AACZ,gBAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACtE,gBAAMC,eAAc,KAAK,IAAG,IAAK;AAGjC,iBAAO;YACH,SAAS;YACT;YACA,aAAAA;YACA,gBAAgB;cACZ,YAAY,UAAU;cACtB;cACA;cACA;cACA,mBAAmB,KAAK,IAAG,IAAK;cAChC,gBAAgB,QAAQ;;YAE5B,OAAO,wBAAwB,QAAQ;;QAE/C;AAEA,cAAM,oBAAoB,KAAK,IAAG,IAAK;AACvC,cAAM,cAAc,KAAK,IAAG,IAAK;AAGjC,cAAM,iBAAiC;UACnC,YAAY,UAAU;UACtB;UACA;UACA;UACA;UACA,gBAAgB,QAAQ;;AAI5B,YAAI,KAAK,QAAQ,kBAAkB,SAAS;AACxC,eAAK,QAAQ,eAAe,cACxB,SACA,cAAc,cAAc,IAAI,UAAU,MAAM,oBAChD,cAAc;QAEtB;AAGA,aAAK,eAAe;UAChB,OAAO;UACP,YAAY,UAAU;UACtB,gBAAgB;UAChB,aAAa;UACb,YAAY;UACZ,SAAS,aAAa,cAAc,eAAe,UAAU;SAChE;AAGD,cAAM,iBAAiB,eAAe;AACtC,cAAM,SAAqD;UACvD,SAAS;UACT,QAAQ,aAAa;UACrB;UACA,aAAa,aAAa;UAC1B;UACA;;AAIJ,YAAI,CAAC,gBAAgB;AAEjB,gBAAM,gBAAgB,WAAW,OAAO,OAAK,CAAC,EAAE,OAAO;AACvD,cAAI,cAAc,WAAW,GAAG;AAC5B,mBAAO,QAAQ,kBAAkB,cAAc,CAAC,EAAE,SAAS,eAAe;UAC9E,OAAO;AAEH,kBAAM,eAAe,CAAC,GAAG,IAAI,IAAI,cAAc,IAAI,OAAK,EAAE,SAAS,eAAe,CAAC,CAAC;AACpF,gBAAI,aAAa,WAAW,GAAG;AAC3B,qBAAO,QAAQ,GAAG,cAAc,MAAM,kBAAkB,aAAa,CAAC,CAAC;YAC3E,OAAO;AACH,qBAAO,QAAQ,GAAG,cAAc,MAAM,sBAAsB,aAAa,MAAM;YACnF;UACJ;QACJ;AAEA,eAAO;MACX;;;;MAKQ,MAAM,gBACV,KACA,WACA,aACA,SACA,eAAsB;AAEtB,YAAI,iBAAiB;AACrB,YAAI,cAAc;AAClB,YAAI,YAAY;AAGhB,cAAM,QAAQ,UAAU,IAAI,CAAC,MAAM,UAAS;AACxC,iBAAO,MAAK;AAER,gBAAI,aAAa,KAAK,QAAQ,cAAa,GAAI;AAC3C,0BAAY;AAEZ,qBAAO,QAAQ,QAA+B;gBAC1C,YAAY,KAAK;gBACjB,SAAS;gBACT,OAAO;gBACP,iBAAiB;eACpB;YACL;AAEA,mBAAO,KAAK,eACR,KACA,MACA;cACI;cACA,YAAY,UAAU;cACtB,WAAW;cACX;cACA,aAAa,KAAK,QAAQ;eAE9B,OAAO,EACT,KAAK,YAAS;AAEZ,kBAAI,OAAO,SAAS;AAChB;cACJ,OAAO;AACH;cACJ;AAEA,mBAAK,eAAe;gBAChB,OAAO;gBACP,YAAY,UAAU;gBACtB,gBAAgB;gBAChB,aAAa;gBACb,YAAY,KAAK,OAAQ,iBAAiB,eAAe,UAAU,SAAU,EAAE;gBAC/E,SAAS,aAAa,iBAAiB,WAAW,IAAI,UAAU,MAAM;eACzE;AAGD,kBAAI,KAAK,QAAQ,gBAAgB;AAC7B,oBAAI;AACA,uBAAK,QAAQ,eAAe,MAAM,MAAM;gBAC5C,QAAQ;gBAER;cACJ;AAEA,qBAAO;YACX,CAAC;UACL;QACJ,CAAC;AAGD,YAAI;AACA,iBAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,KAAK,QAAQ,WAAW;QACjE,SAAS,OAAO;AACZ,cAAI,iBAAiB,sBAAA,mBAAmB;AAEpC,kBAAM,iBAAiB,iBAAiB;AACxC,kBAAM,mBAA4C,CAAA;AAClD,qBAAS,IAAI,gBAAgB,IAAI,UAAU,QAAQ,KAAK;AACpD,+BAAiB,KAAK;gBAClB,YAAY,UAAU,CAAC,EAAE;gBACzB,SAAS;gBACT,OAAO;gBACP,iBAAiB;eACpB;YACL;AACA,kBAAM;UACV;AACA,gBAAM;QACV;MACJ;;;;MAKQ,MAAM,eACV,KACA,MACA,SACA,SAAyB;AAEzB,cAAM,YAAY,KAAK,IAAG;AAC1B,cAAM,cAAc,QAAQ,kBAAkB,QAAQ,iBAAiB,KAAK,IAAI;AAChF,cAAM,gBAAgB,QAAQ,aAAa,QAAA,2BAA2B;AAGtE,YAAI;AACJ,YAAI,KAAK,QAAQ,gBAAgB;AAC7B,sBAAY,KAAK,QAAQ,eAAe,gBACpC,mBAAmB,QAAQ,YAAY,CAAC,IAAI,QAAQ,UAAU,IAC9D,QAAQ,aAAa;QAE7B;AAEA,iBAAS,UAAU,GAAG,UAAU,aAAa,WAAW;AACpD,cAAI;AAEA,kBAAM,SAAS,MAAM,KAAK,+BACtB,KACA,MACA,SACA,aAAa;AAGjB,kBAAM,kBAAkB,KAAK,IAAG,IAAK;AAGrC,gBAAI,KAAK,QAAQ,kBAAkB,WAAW;AAE1C,kBAAI;AACJ,kBAAI;AACA,mCAAmB,KAAK,UAAU,MAAM;cAC5C,QAAQ;cAER;AACA,mBAAK,QAAQ,eAAe,cAAc,WAAW,aAAa,QAAW,QAAW,gBAAgB;AAIxG,kBAAI,KAAK,QAAQ,eAAe,uBAAuB;AACnD,sBAAM,oBAAoB;AAC1B,oBAAI,mBAAmB,WAAW;AAC9B,uBAAK,QAAQ,eAAe,sBAAsB,WAAW;oBACzD,WAAW,kBAAkB;oBAC7B,SAAS;;mBACZ;gBACL;cACJ;YACJ;AAEA,mBAAO;cACH,YAAY,KAAK;cACjB,SAAS;cACT;cACA;cACA;;UAER,SAAS,OAAO;AACZ,kBAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAGtE,gBAAI,YAAY,cAAc,GAAG;AAC7B,oBAAM,kBAAkB,KAAK,IAAG,IAAK;AAGrC,kBAAI,KAAK,QAAQ,kBAAkB,WAAW;AAC1C,qBAAK,QAAQ,eAAe,cAAc,WAAW,UAAU,QAAW,QAAQ;cACtF;AAEA,qBAAO;gBACH,YAAY,KAAK;gBACjB,SAAS;gBACT,OAAO;gBACP;gBACA;;YAER;AAGA,kBAAM,KAAK,MAAM,OAAQ,UAAU,EAAE;UACzC;QACJ;AAGA,eAAO;UACH,YAAY,KAAK;UACjB,SAAS;UACT,OAAO;UACP,iBAAiB,KAAK,IAAG,IAAK;UAC9B;;MAER;;;;;MAMQ,MAAM,+BACV,KACA,MACA,SACA,eAAiC;AAGjC,YAAI;AACA,iBAAO,MAAM,KAAK,0BACd,KACA,MACA,SACA,aAAa;QAErB,SAAS,OAAO;AAEZ,gBAAM,iBAAiB,iBAAiB,SACpC,MAAM,QAAQ,SAAS,iBAAiB;AAG5C,cAAI,CAAC,gBAAgB;AACjB,kBAAM;UACV;AAGA,gBAAM,mBAAmB,gBAAgB,gBAAgB,IAAI;AAG7D,iBAAO,MAAM,KAAK,0BACd,KACA,MACA,SACA,gBAAgB;QAExB;MACJ;;;;MAKQ,MAAM,0BACV,KACA,MACA,SACA,WAA6B;AAE7B,cAAM,aAAa,IAAI,OAAO,IAAI,MAAM,OAAO;AAE/C,YAAI,aAAa,YAAY,GAAG;AAC5B,iBAAO,MAAM,QAAQ,KAAK;YACtB;YACA,KAAK,qBAAiC,SAAS;WAClD;QACL,OAAO;AACH,iBAAO,MAAM;QACjB;MACJ;;;;MAKQ,qBAAwB,WAAiB;AAC7C,eAAO,IAAI,QAAQ,CAAC,GAAG,WAAU;AAC7B,qBAAW,MAAK;AACZ,mBAAO,IAAI,MAAM,6BAA6B,SAAS,IAAI,CAAC;UAChE,GAAG,SAAS;QAChB,CAAC;MACL;;;;MAKQ,MAAM,IAAU;AACpB,eAAO,IAAI,QAAQ,CAAAC,cAAW,WAAWA,WAAS,EAAE,CAAC;MACzD;;;;MAKQ,eAAe,UAAqB;AACxC,YAAI,KAAK,QAAQ,YAAY;AACzB,eAAK,QAAQ,WAAW,QAAQ;QACpC;MACJ;;;;MAKQ,mBACJ,WACA,OAAa;AAEb,eAAO;UACH,SAAS;UACT,YAAY,CAAA;UACZ,aAAa,KAAK,IAAG,IAAK;UAC1B,gBAAgB;YACZ,YAAY;YACZ,gBAAgB;YAChB,YAAY;YACZ,gBAAgB;YAChB,mBAAmB;YACnB,gBAAgB,KAAK,QAAQ;;UAEjC;;MAER;;;;MAKQ,kBACJ,WAAiB;AAEjB,eAAO;UACH,SAAS;UACT,QAAQ;UACR,YAAY,CAAA;UACZ,aAAa;YACT,YAAY;YACZ,aAAa;YACb,aAAa;YACb,cAAc;YACd,cAAc;;UAElB,aAAa,KAAK,IAAG,IAAK;UAC1B,gBAAgB;YACZ,YAAY;YACZ,gBAAgB;YAChB,YAAY;YACZ,gBAAgB;YAChB,mBAAmB;YACnB,gBAAgB,KAAK,QAAQ;;;MAGzC;;;;MAKQ,sBACJ,WACA,gBACA,YACA,gBAAsB;AAEtB,eAAO;UACH,SAAS;UACT,YAAY,CAAA;UACZ,aAAa,KAAK,IAAG,IAAK;UAC1B,gBAAgB;YACZ;YACA,gBAAgB;YAChB,YAAY;YACZ;YACA,mBAAmB;YACnB;;UAEJ,OAAO;;MAEf;;AAjjBJ,IAAAJ,SAAA,oBAAA;AAujBA,aAAgBC,gBAAe,SAAwB;AACnD,aAAO,IAAI,kBAAkB,OAAO;IACxC;;;;;;;;;;AC5hBA,IAAAI,SAAA,iBAAA;AAyCA,IAAAA,SAAA,iBAAA;AAsBA,IAAAA,SAAA,mBAAA;AAUA,IAAAA,SAAA,mBAAA;AA4BA,IAAAA,SAAA,mBAAA;AAlKA,QAAA,oBAAA;AAKA,QAAA,WAAA;AAQA,QAAa,uBAAb,cAA0C,SAAA,kBAAiB;MAMvD,YACI,cACA,cAAqB;AAErB,cAAM,UAAU,eAAe,iBAAiB,YAAY,MAAM;AAClE,cAAM,8BAA8B,YAAY,IAAI,OAAO,IAAI;UAC3D,MAAM,SAAA,UAAU;UAChB,MAAM;YACF;YACA,GAAI,gBAAgB,EAAE,aAAY;;SAEzC;AACD,aAAK,OAAO;AACZ,aAAK,eAAe;AACpB,aAAK,eAAe;MACxB;;AArBJ,IAAAA,SAAA,uBAAA;AA2BA,QAAa,sBAAb,cAAyC,SAAA,kBAAiB;MACtD,YACI,SACA,OAAa;AAEb,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB;SACH;AACD,aAAK,OAAO;MAChB;;AAVJ,IAAAA,SAAA,sBAAA;AAqBA,aAAgB,eACZ,UACA,SAA4B;AAE5B,YAAM,EAAE,WAAW,sBAAsB,MAAK,IAAK;AAGnD,iBAAW,YAAY,SAAS,mBAAmB;AAC/C,YAAI,EAAE,YAAY,cAAc,UAAU,QAAQ,MAAM,UAAa,UAAU,QAAQ,MAAM,MAAM;AAC/F,gBAAM,IAAI,qBAAqB,QAAQ;QAC3C;MACJ;AAEA,UAAI;AAGA,YAAI,YAAW,GAAA,kBAAA,qBAAoB,SAAS,UAAU,WAAW;UAC7D,QAAQ;UACR,sBAAsB;UACtB,0BAA0B;;SAC7B;AAGD,YAAI,uBAAuB,SAAS,cAAc;AAC9C,qBAAW,GAAG,SAAS,YAAY;;EAAO,QAAQ;QACtD;AAEA,eAAO;MACX,SAAS,OAAO;AACZ,cAAM,IAAI,oBACN,6BACA,iBAAiB,QAAQ,QAAQ,MAAS;MAElD;IACJ;AAOA,aAAgB,eAAe,QAK9B;AAEG,YAAM,oBAAoB,OAAO,qBAAqB,iBAAiB,OAAO,QAAQ;AAEtF,aAAO;QACH,UAAU,OAAO;QACjB;QACA,cAAc,OAAO;QACrB,gBAAgB,OAAO;;IAE/B;AAOA,aAAgB,iBAAiB,UAAgB;AAE7C,cAAO,GAAA,kBAAA,kBAAyB,UAAU,KAAK;IACnD;AAOA,aAAgB,iBAAiB,UAAwB;AAKrD,YAAM,oBAAoB,iBAAiB,SAAS,QAAQ;AAC5D,YAAM,cAAc,IAAI,IAAI,SAAS,iBAAiB;AACtD,YAAM,cAAc,IAAI,IAAI,iBAAiB;AAG7C,YAAM,oBAAoB,SAAS,kBAAkB,OAAO,OAAK,CAAC,YAAY,IAAI,CAAC,CAAC;AAGpF,YAAM,sBAAsB,kBAAkB,OAAO,OAAK,CAAC,YAAY,IAAI,CAAC,CAAC;AAE7E,aAAO;QACH,OAAO,kBAAkB,WAAW;QACpC;QACA;;IAER;AAQA,aAAgB,iBACZ,WACA,YAAoB,QAAM;AAE1B,YAAM,mBAAmB,UAAU,IAAI,OAAK,EAAE,QAAQ,EAAE,KAAK,SAAS;AACtE,YAAM,mBAAmB,MAAM,KAC3B,IAAI,IAAI,UAAU,QAAQ,OAAK,EAAE,iBAAiB,CAAC,CAAC;AAIxD,YAAM,eAAe,UAAU,KAAK,OAAK,EAAE,YAAY,GAAG;AAE1D,aAAO;QACH,UAAU;QACV,mBAAmB;QACnB;;IAER;AAKa,IAAAA,SAAA,kBAAkB;;;;MAI3B,OAAO,KAAW;AACd,eAAO,IACF,QAAQ,OAAO,MAAM,EACrB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK;MAC7B;;;;MAKA,SAAS,KAAa,WAAmB,SAAiB,OAAK;AAC3D,YAAI,IAAI,UAAU,WAAW;AACzB,iBAAO;QACX;AACA,eAAO,IAAI,MAAM,GAAG,YAAY,OAAO,MAAM,IAAI;MACrD;;;;MAKA,OAAO,KAAa,SAAiB,GAAC;AAClC,cAAM,SAAS,IAAI,OAAO,MAAM;AAChC,eAAO,IAAI,MAAM,IAAI,EAAE,IAAI,UAAQ,SAAS,IAAI,EAAE,KAAK,IAAI;MAC/D;;;;MAKA,aAAa,KAA8B,SAAiB,GAAC;AACzD,cAAM,YAAY,IAAI,OAAO,MAAM;AACnC,cAAM,QAAkB,CAAA;AAExB,mBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC5C,cAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,CAAC,MAAM,QAAQ,KAAK,GAAG;AACtE,kBAAM,KAAK,GAAG,SAAS,GAAG,GAAG,GAAG;AAChC,kBAAM,KAAK,KAAK,aAAa,OAAkC,SAAS,CAAC,CAAC;UAC9E,WAAW,MAAM,QAAQ,KAAK,GAAG;AAC7B,kBAAM,KAAK,GAAG,SAAS,GAAG,GAAG,GAAG;AAChC,uBAAW,QAAQ,OAAO;AACtB,oBAAM,KAAK,GAAG,SAAS,OAAO,OAAO,IAAI,CAAC,EAAE;YAChD;UACJ,OAAO;AACH,kBAAM,KAAK,GAAG,SAAS,GAAG,GAAG,KAAK,OAAO,KAAK,CAAC,EAAE;UACrD;QACJ;AAEA,eAAO,MAAM,KAAK,IAAI;MAC1B;;AAMS,IAAAA,SAAA,kBAAkB;;;;MAI3B,KAAQ,UAAgB;AAEpB,cAAM,YAAY,SAAS,MAAM,8BAA8B;AAC/D,YAAI,WAAW;AACX,iBAAO,KAAK,MAAM,UAAU,CAAC,EAAE,KAAI,CAAE;QACzC;AAGA,cAAM,cAAc,SAAS,MAAM,aAAa;AAChD,YAAI,aAAa;AACb,iBAAO,KAAK,MAAM,YAAY,CAAC,CAAC;QACpC;AAEA,cAAM,aAAa,SAAS,MAAM,aAAa;AAC/C,YAAI,YAAY;AACZ,iBAAO,KAAK,MAAM,WAAW,CAAC,CAAC;QACnC;AAEA,cAAM,IAAI,MAAM,2BAA2B;MAC/C;;;;MAKA,KAAK,UAAgB;AACjB,cAAM,QAAQ,SAAS,MAAM,IAAI;AACjC,cAAM,QAAkB,CAAA;AAExB,mBAAW,QAAQ,OAAO;AAEtB,gBAAM,UAAU,KAAK,QAAQ,gBAAgB,EAAE,EAC1C,QAAQ,kBAAkB,EAAE,EAC5B,KAAI;AAET,cAAI,SAAS;AACT,kBAAM,KAAK,OAAO;UACtB;QACJ;AAEA,eAAO;MACX;;;;MAKA,SAAS,UAAgB;AACrB,cAAM,SAAiC,CAAA;AACvC,cAAM,QAAQ,SAAS,MAAM,IAAI;AAEjC,mBAAW,QAAQ,OAAO;AACtB,gBAAM,QAAQ,KAAK,MAAM,yBAAyB;AAClD,cAAI,OAAO;AACP,mBAAO,MAAM,CAAC,EAAE,KAAI,CAAE,IAAI,MAAM,CAAC,EAAE,KAAI;UAC3C;QACJ;AAEA,eAAO;MACX;;;;;;;;;;;ACnSJ,QAAsB,cAAtB,MAAiC;;;;MAYnB,yBAAyB,SAAgC;AAC/D,eAAO,QACF,OAAO,OAAK,EAAE,WAAW,EAAE,WAAW,MAAS,EAC/C,IAAI,OAAK,EAAE,MAAO;MAC3B;;;;MAKU,YACN,YACA,aACA,cACA,cAAqB;AAErB,eAAO;UACH;UACA;UACA,aAAa,aAAa;UAC1B;UACA;;MAER;;;;MAKU,kBAAkB,eAA4B;AACpD,eAAO;UACH,QAAQ;UACR,OAAO;YACH,YAAY;YACZ,aAAa;YACb,aAAa;YACb,cAAc;YACd,cAAc;;;MAG1B;;AAlDJ,IAAAC,SAAA,cAAA;AAwDA,QAAa,kBAAb,cAAwC,YAAmB;MACvD,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAC1B,cAAM,UAAU,KAAK,yBAAyB,OAAO;AACrD,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ;UACR,OAAO,KAAK,YAAY,QAAQ,QAAQ,QAAQ,QAAQ,cAAc,KAAK;;MAEnF;;AAbJ,IAAAA,SAAA,kBAAA;AAmBA,QAAa,iBAAb,cAAuC,YAAqB;MACxD,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAC1B,cAAM,SAAS,KAAK,yBAAyB,OAAO;AACpD,cAAM,YAAY,OAAO,KAAI;AAC7B,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ;UACR,OAAO,KAAK,YACR,OAAO,OAAO,CAAC,KAAK,QAAQ,MAAM,IAAI,QAAQ,CAAC,GAC/C,UAAU,QACV,cACA,KAAK;;MAGjB;;AAnBJ,IAAAA,SAAA,iBAAA;AAyBA,QAAa,qBAAb,cAAmE,YAAsC;MACrG,YACY,YACA,eAA4B;AAEpC,cAAK;AAHG,aAAA,aAAA;AACA,aAAA,gBAAA;MAGZ;MAEA,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAC1B,cAAM,UAAU,KAAK,yBAAyB,OAAO;AAErD,YAAI,QAAQ,WAAW,GAAG;AACtB,iBAAO,KAAK,kBAAkB,KAAK,aAAa;QACpD;AAEA,cAAM,aAAa,KAAK,WAAW,OAAO;AAC1C,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ;UACR,OAAO,KAAK,YAAY,QAAQ,QAAQ,GAAG,cAAc,KAAK;;MAEtE;;AA1BJ,IAAAA,SAAA,qBAAA;;;;;;;;;;AC6BA,IAAAC,SAAA,6BAAA;AAvIA,QAAA,YAAA;AAuDA,QAAa,uBAAb,cAAoE,UAAA,YAA8C;MAC9G,YAAoB,SAAuC;AACvD,cAAK;AADW,aAAA,UAAA;MAEpB;;;;MAKA,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAG1B,cAAM,WAAgB,CAAA;AACtB,mBAAW,UAAU,SAAS;AAC1B,cAAI,OAAO,WAAW,OAAO,QAAQ;AACjC,qBAAS,KAAK,GAAG,OAAO,MAAM;UAClC;QACJ;AAEA,cAAM,gBAAgB,SAAS;AAG/B,cAAM,eAAe,KAAK,iBAAiB,QAAQ;AAGnD,YAAI,KAAK,QAAQ,MAAM;AACnB,uBAAa,KAAK,KAAK,QAAQ,IAAI;QACvC;AAGA,cAAM,UAAU,KAAK,QAAQ,YACvB,KAAK,QAAQ,UAAU,YAAY,IACnC;AAEN,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ;YACJ,OAAO;YACP;;UAEJ,OAAO;YACH,YAAY;YACZ,aAAa,aAAa;YAC1B,aAAa,gBAAgB,aAAa;YAC1C;YACA,cAAc;;;MAG1B;;;;MAKQ,iBAAiB,OAAU;AAC/B,cAAM,OAAO,oBAAI,IAAG;AAEpB,mBAAW,QAAQ,OAAO;AACtB,gBAAM,MAAM,KAAK,QAAQ,OAAO,IAAI;AAEpC,cAAI,KAAK,IAAI,GAAG,GAAG;AAEf,kBAAM,WAAW,KAAK,IAAI,GAAG;AAC7B,kBAAM,SAAS,KAAK,QAAQ,MAAM,UAAU,IAAI;AAChD,iBAAK,IAAI,KAAK,MAAM;UACxB,OAAO;AACH,iBAAK,IAAI,KAAK,IAAI;UACtB;QACJ;AAEA,eAAO,MAAM,KAAK,KAAK,OAAM,CAAE;MACnC;;AA1EJ,IAAAA,SAAA,uBAAA;AAgFA,aAAgB,2BACZ,SAAuC;AAEvC,aAAO,IAAI,qBAAqB,OAAO;IAC3C;AAMA,QAAa,6BAAb,cAAgD,UAAA,YAAyD;MAGrG,YAAY,gBAAyB,MAAI;AACrC,cAAK;AACL,aAAK,gBAAgB;MACzB;MAEA,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAE1B,cAAM,aAAuB,CAAA;AAC7B,mBAAW,UAAU,SAAS;AAC1B,cAAI,OAAO,WAAW,OAAO,QAAQ;AACjC,uBAAW,KAAK,GAAG,OAAO,MAAM;UACpC;QACJ;AAEA,cAAM,OAAO,oBAAI,IAAG;AACpB,cAAM,SAAmB,CAAA;AAEzB,mBAAW,OAAO,YAAY;AAC1B,gBAAM,MAAM,KAAK,gBAAgB,MAAM,IAAI,YAAW;AACtD,cAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAChB,iBAAK,IAAI,GAAG;AACZ,mBAAO,KAAK,GAAG;UACnB;QACJ;AAEA,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ;YACJ,OAAO;YACP,OAAO,OAAO;;UAElB,OAAO;YACH,YAAY,WAAW;YACvB,aAAa,OAAO;YACpB,aAAa,WAAW,SAAS,OAAO;YACxC;YACA,cAAc;;;MAG1B;;AA/CJ,IAAAA,SAAA,6BAAA;AAsDA,QAAa,4BAAb,cAA+C,UAAA,YAM7C;MACE,MAAM,OACF,SACA,SAAsB;AAQtB,cAAM,YAAY,KAAK,IAAG;AAE1B,cAAM,aAAuB,CAAA;AAC7B,mBAAW,UAAU,SAAS;AAC1B,cAAI,OAAO,WAAW,OAAO,QAAQ;AACjC,uBAAW,KAAK,GAAG,OAAO,MAAM;UACpC;QACJ;AAEA,YAAI,WAAW,WAAW,GAAG;AACzB,iBAAO;YACH,QAAQ;cACJ,KAAK;cACL,SAAS;cACT,KAAK;cACL,KAAK;cACL,OAAO;;YAEX,OAAO;cACH,YAAY;cACZ,aAAa;cACb,aAAa;cACb,cAAc,KAAK,IAAG,IAAK;cAC3B,cAAc;;;QAG1B;AAEA,cAAM,MAAM,WAAW,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAChD,cAAM,UAAU,MAAM,WAAW;AACjC,cAAM,MAAM,KAAK,IAAI,GAAG,UAAU;AAClC,cAAM,MAAM,KAAK,IAAI,GAAG,UAAU;AAElC,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ;YACJ;YACA;YACA;YACA;YACA,OAAO,WAAW;;UAEtB,OAAO;YACH,YAAY,WAAW;YACvB,aAAa;;YACb,aAAa,WAAW,SAAS;YACjC;YACA,cAAc;;;MAG1B;;AApEJ,IAAAA,SAAA,4BAAA;;;;;;;;;;ACvEA,IAAAC,SAAA,kBAAA;AAkCA,IAAAA,SAAA,6BAAA;AAjKA,QAAA,YAAA;AACA,QAAA,oBAAA;AACA,QAAA,WAAA;AAoCA,QAAa,YAAb,cAA0D,UAAA,YAAsC;MAC5F,YAAoB,SAAoD;AACpE,cAAK;AADW,aAAA,UAAA;MAEpB;;;;MAKA,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAC1B,cAAM,UAAU,KAAK,yBAAyB,OAAO;AAGrD,YAAI,QAAQ,WAAW,GAAG;AACtB,gBAAM,iBAAiB,MAAM,KAAK,QAAQ,gBAAgB,OAAO,SAAS,OAAO;AACjF,iBAAO;YACH,GAAG;YACH,OAAO;cACH,GAAG,eAAe;cAClB,cAAc;;;QAG1B;AAGA,cAAM,SAAS,KAAK,QAAQ,YAAY,SAAS,OAAO;AAExD,YAAI;AAEA,gBAAM,WAAW,MAAM,KAAK,QAAQ,UAAU,QAAQ;YAClD,OAAO,KAAK,QAAQ;WACvB;AAED,cAAI,SAAS,WAAW,SAAS,UAAU;AAEvC,kBAAM,SAAS,KAAK,QAAQ,cAAc,SAAS,UAAU,OAAO;AACpE,kBAAM,eAAe,KAAK,IAAG,IAAK;AAElC,mBAAO;cACH;cACA,OAAO;gBACH,YAAY,QAAQ;gBACpB,aAAa;gBACb,aAAa,QAAQ,SAAS;gBAC9B;gBACA,cAAc;;;UAG1B;AAGA,WAAA,GAAA,SAAA,WAAS,EAAG,KAAK,SAAA,YAAY,YAAY,oDAAoD,SAAS,KAAK,EAAE;AAC7G,iBAAO,KAAK,kBAAkB,SAAS,SAAS,SAAS;QAE7D,SAAS,OAAO;AAEZ,WAAA,GAAA,SAAA,WAAS,EAAG,KAAK,SAAA,YAAY,YAAY,mDAAmD,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AACpJ,iBAAO,KAAK,kBAAkB,SAAS,SAAS,SAAS;QAC7D;MACJ;;;;MAKQ,MAAM,kBACV,SACA,SACA,WAAiB;AAEjB,cAAM,iBAAiB,MAAM,KAAK,QAAQ,gBAAgB,OAAO,SAAS,OAAO;AACjF,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,GAAG;UACH,OAAO;YACH,GAAG,eAAe;YAClB;YACA,cAAc;;;MAG1B;;AAnFJ,IAAAA,SAAA,YAAA;AAyFA,aAAgB,gBACZ,SAAoD;AAEpD,aAAO,IAAI,UAAU,OAAO;IAChC;AA8BA,aAAgB,2BACZ,SAA6B;AAG7B,YAAM,kBAAkB,IAAI,cAAc,UAAA,YAAwC;QAC9E,MAAM,OACF,SACA,SAAsB;AAEtB,gBAAM,UAAU,KAAK,yBAAyB,OAAO;AACrD,iBAAO;YACH,QAAQ;cACJ,SAAS,QAAQ,KAAK,aAAa;cACnC,WAAW,QAAQ,MAAM,GAAG,CAAC;cAC7B,eAAe,QAAQ;;YAE3B,OAAO,KAAK,YAAY,QAAQ,QAAQ,GAAG,GAAG,KAAK;;QAE3D;QACH;AAED,aAAO,gBAA6C;QAChD,WAAW,QAAQ;QACnB,OAAO,QAAQ;QACf;QAEA,aAAa,CAAC,SAAS,YAAW;AAC9B,gBAAM,SAAS,QAAQ,gBAAgB;AACvC,gBAAM,kBAAkB,QAAQ,IAAI,CAAC,GAAG,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC,EAAE,EAAE,KAAK,MAAM;AAE5E,iBAAO,GAAG,MAAM;;EAE1B,eAAe;;;;;;;;;;;QAWT;QAEA,eAAe,CAAC,UAAU,oBAAmB;AACzC,cAAI;AACA,kBAAM,SAAS,kBAAA,gBAAgB,KAA+C,QAAQ;AACtF,mBAAO;cACH,SAAS,OAAO,WAAW;cAC3B,WAAW,OAAO,aAAa,CAAA;cAC/B,eAAe,gBAAgB;;UAEvC,QAAQ;AAEJ,mBAAO;cACH,SAAS;cACT,WAAW,CAAA;cACX,eAAe,gBAAgB;;UAEvC;QACJ;OACH;IACL;;;;;;;;;;AC1EA,IAAAC,SAAA,sBAAA;AAyBA,IAAAA,SAAA,4BAAA;AA/KA,QAAA,YAAA;AACA,QAAA,kBAAA;AACA,QAAA,WAAA;AAkDA,QAAa,gBAAb,cAAwE,UAAA,YAA2B;MAG/F,YAAoB,SAA2C;AAC3D,cAAK;AADW,aAAA,UAAA;AAEhB,aAAK,uBAAuB,IAAI,gBAAA,qBAAqB,QAAQ,oBAAoB;MACrF;;;;MAKA,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAG1B,cAAM,sBAAsB,MAAM,KAAK,qBAAqB,OAAO,SAAS,OAAO;AACnF,cAAM,sBAAsB,oBAAoB;AAChD,cAAM,sBAAsB,oBAAoB,MAAM;AAGtD,cAAM,oBAAoB,KAAK,QAAQ,qBAAqB;AAC5D,YAAI,qBAAqB,oBAAoB,MAAM,WAAW,GAAG;AAC7D,gBAAM,eAAe,KAAK,IAAG,IAAK;AAClC,iBAAO;YACH,QAAQ,KAAK,QAAQ,qBAAqB,mBAAmB;YAC7D,OAAO;cACH,GAAG,oBAAoB;cACvB;cACA,cAAc;;;QAG1B;AAGA,cAAM,kBAAkB,KAAK,IAAG;AAChC,cAAM,SAAS,KAAK,QAAQ,kBAAkB,qBAAqB,OAAO;AAE1E,YAAI;AACA,gBAAM,WAAW,MAAM,KAAK,QAAQ,UAAU,QAAQ;YAClD,OAAO,KAAK,QAAQ;WACvB;AAED,cAAI,SAAS,WAAW,SAAS,UAAU;AACvC,kBAAM,iBAAiB,KAAK,QAAQ,sBAChC,SAAS,UACT,mBAAmB;AAEvB,kBAAM,eAAe,KAAK,IAAG,IAAK;AAElC,mBAAO;cACH,QAAQ;cACR,OAAO;gBACH,YAAY,oBAAoB,MAAM;gBACtC,aAAa,oBAAoB,MAAM;gBACvC,aAAa,oBAAoB,MAAM;gBACvC;gBACA,cAAc;;;UAG1B;AAGA,WAAA,GAAA,SAAA,WAAS,EAAG,KAAK,SAAA,YAAY,YAAY,oDAAoD,SAAS,KAAK,EAAE;AAC7G,iBAAO,KAAK,qBAAqB,qBAAqB,oBAAoB,OAAO,SAAS;QAE9F,SAAS,OAAO;AACZ,WAAA,GAAA,SAAA,WAAS,EAAG,KAAK,SAAA,YAAY,YAAY,mDAAmD,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AACpJ,iBAAO,KAAK,qBAAqB,qBAAqB,oBAAoB,OAAO,SAAS;QAC9F;MACJ;;;;MAKQ,qBACJ,qBACA,oBACA,WAAiB;AAEjB,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ,KAAK,QAAQ,qBAAqB,mBAAmB;UAC7D,OAAO;YACH,GAAG;YACH;YACA,cAAc;;;MAG1B;;AA5FJ,IAAAA,SAAA,gBAAA;AAkGA,aAAgB,oBACZ,SAA2C;AAE3C,aAAO,IAAI,cAAc,OAAO;IACpC;AAqBA,aAAgB,0BACZ,sBACA,WACA,iBACA,OAAc;AAEd,aAAO,oBAAgD;QACnD;QACA;QACA;QAEA,mBAAmB,CAAC,qBAAqB,YAAW;AAChD,gBAAM,YAAY,gBAAgB,oBAAoB,KAAK;AAC3D,iBAAO,sCAAsC,oBAAoB,MAAM,MAAM;;EAEvF,SAAS;;;QAGH;QAEA,uBAAuB,CAAC,UAAU,wBAAuB;AACrD,iBAAO;YACH,OAAO,oBAAoB;YAC3B,SAAS,SAAS,KAAI;YACtB,OAAO;cACH,eAAe,oBAAoB,MAAM;cACzC,gBAAgB,oBAAoB,MAAM;cAC1C,cAAc;;;QAG1B;QAEA,sBAAsB,CAAC,wBAAuB;AAC1C,iBAAO;YACH,OAAO,oBAAoB;YAC3B,SAAS,SAAS,oBAAoB,MAAM,MAAM;YAClD,OAAO;cACH,eAAe,oBAAoB,MAAM;cACzC,gBAAgB,oBAAoB,MAAM;cAC1C,cAAc;;;QAG1B;OACH;IACL;;;;;;;;;;ACpOA,QAAA,YAAA;AACI,WAAA,eAAAC,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAkB,EAAA,CAAA;AAItB,QAAA,kBAAA;AACI,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAyB,EAAA,CAAA;AAS7B,QAAA,eAAA;AACI,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA0B,EAAA,CAAA;AAS9B,QAAA,mBAAA;AACI,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAyB,EAAA,CAAA;;;;;;;;;;AC6G7B,IAAAC,SAAA,qBAAA;AAOA,IAAAA,SAAA,kCAAA;AAwFA,IAAAA,SAAA,4BAAA;AAnLA,QAAa,eAAb,MAAyB;MACrB,YAAoB,UAA+B,CAAA,GAAE;AAAjC,aAAA,UAAA;MAAoC;MAExD,MAAM,OAAgB;AAClB,cAAM,EAAE,OAAO,QAAO,IAAK;AAC3B,cAAM,EAAE,YAAY,QAAQ,YAAY,EAAC,IAAK,KAAK;AAGnD,cAAM,gBAAgB,SAChB,MAAM,OAAO,MAAM,IACnB;AAGN,cAAM,YAA0C,CAAA;AAEhD,YAAI,cAAc,GAAG;AAEjB,mBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC3C,kBAAM,OAAO,cAAc,CAAC;AAC5B,kBAAM,KAAK,aACL,WAAW,MAAM,CAAC,IAClB,QAAQ,CAAC,IAAI,KAAK,mBAAmB,KAAK,IAAI,CAAC;AAErD,sBAAU,KAAK;cACX;cACA,MAAM;gBACF;gBACA;;cAEJ,UAAU;gBACN,OAAO;gBACP,YAAY,cAAc;;aAEjC;UACL;QACJ,OAAO;AAEH,mBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK,WAAW;AACtD,kBAAM,QAAQ,cAAc,MAAM,GAAG,IAAI,SAAS;AAGlD,qBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,oBAAM,OAAO,MAAM,CAAC;AACpB,oBAAM,cAAc,IAAI;AACxB,oBAAM,KAAK,aACL,WAAW,MAAM,WAAW,IAC5B,QAAQ,WAAW,IAAI,KAAK,mBAAmB,KAAK,IAAI,CAAC;AAE/D,wBAAU,KAAK;gBACX;gBACA,MAAM;kBACF;kBACA;;gBAEJ,UAAU;kBACN,OAAO;kBACP,YAAY,KAAK,MAAM,IAAI,SAAS;kBACpC,YAAY,cAAc;;eAEjC;YACL;UACJ;QACJ;AAEA,eAAO;MACX;;;;;MAMQ,mBAAmBC,QAAY;AAEnC,eAAOA,OACF,QAAQ,UAAU,GAAG,EACrB,QAAQ,oBAAoB,EAAE,EAC9B,YAAW,EACX,MAAM,GAAG,EAAE;MACpB;;AA9EJ,IAAAD,SAAA,eAAA;AAoFA,aAAgB,mBAAmB,SAA6B;AAC5D,aAAO,IAAI,aAAa,OAAO;IACnC;AAKA,aAAgB,gCACZ,YACA,SAA6C;AAE7C,YAAM,uBAAuB,WAAW,IAAI,SACxC,IAAI,WAAW,GAAG,IAAI,IAAI,YAAW,IAAK,IAAI,IAAI,YAAW,CAAE,EAAE;AAGrE,aAAO,IAAI,aAAa;QACpB,GAAG;QACH,QAAQ,CAAC,SAAQ;AACb,gBAAM,MAAM,iBAAiB,KAAK,IAAI,EAAE,YAAW;AACnD,iBAAO,qBAAqB,SAAS,GAAG;QAC5C;OACH;IACL;AAKA,aAAS,iBAAiBC,QAAY;AAClC,YAAM,UAAUA,OAAK,YAAY,GAAG;AACpC,UAAI,YAAY,MAAM,YAAYA,OAAK,SAAS,GAAG;AAC/C,eAAO;MACX;AACA,aAAOA,OAAK,MAAM,OAAO;IAC7B;AAiBA,QAAa,sBAAb,MAAgC;MAC5B,YACY,YAAoB,GACpB,UAAkD,CAAA,GAAE;AADpD,aAAA,YAAA;AACA,aAAA,UAAA;MACT;MAEH,MAAM,OAAgB;AAClB,cAAM,EAAE,OAAO,QAAO,IAAK;AAC3B,cAAM,EAAE,OAAM,IAAK,KAAK;AAGxB,cAAM,gBAAgB,SAChB,MAAM,OAAO,MAAM,IACnB;AAEN,cAAM,YAAiD,CAAA;AACvD,cAAM,eAAe,KAAK,KAAK,cAAc,SAAS,KAAK,SAAS;AAEpE,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK,KAAK,WAAW;AAC3D,gBAAM,QAAQ,cAAc,MAAM,GAAG,IAAI,KAAK,SAAS;AACvD,gBAAM,aAAa,KAAK,MAAM,IAAI,KAAK,SAAS;AAEhD,oBAAU,KAAK;YACX,IAAI,SAAS,UAAU;YACvB,MAAM;cACF,OAAO;cACP;cACA;;YAEJ,UAAU;cACN;cACA;cACA,cAAc,MAAM;cACpB,YAAY,cAAc;;WAEjC;QACL;AAEA,eAAO;MACX;;AAvCJ,IAAAD,SAAA,sBAAA;AA6CA,aAAgB,0BACZ,WACA,SAAgD;AAEhD,aAAO,IAAI,oBAAoB,WAAW,OAAO;IACrD;;;;;;;;;;AC0IA,IAAAE,SAAA,sBAAA;AAOA,IAAAA,SAAA,0BAAA;AAeA,IAAAA,SAAA,+BAAA;AAlZA,QAAA,aAAA;AA6EA,QAAM,wBAA8C;MAChD,cAAc,WAAA;MACd,aAAa,WAAA;MACb,UAAU,WAAA;MACV,oBAAoB,WAAA;;AAMxB,QAAa,gBAAb,MAA0B;MAGtB,YAAY,UAAyC,CAAA,GAAE;AACnD,aAAK,UAAU,EAAE,GAAG,uBAAuB,GAAG,QAAO;MACzD;MAEA,MAAM,OAAiB;AACnB,cAAM,EAAE,SAAS,QAAQ,QAAO,IAAK;AAErC,YAAI,CAAC,WAAW,QAAQ,WAAW,GAAG;AAClC,iBAAO,CAAA;QACX;AAEA,cAAM,SAAS,KAAK,aAAa,OAAO;AAExC,eAAO,OAAO,IAAI,CAAC,OAAO,WAAW;UACjC,IAAI,SAAS,KAAK,IAAI,UAAU,SAAS;UACzC,MAAM;YACF,SAAS,MAAM;YACf,YAAY;YACZ,aAAa,OAAO;YACpB;YACA;YACA,aAAa,MAAM;YACnB,WAAW,MAAM;;UAErB,UAAU;YACN,YAAY;YACZ,aAAa,OAAO;YACpB,WAAW,MAAM,QAAQ;;UAE/B;MACN;;;;MAKQ,aAAa,SAAe;AAKhC,gBAAQ,KAAK,QAAQ,UAAU;UAC3B,KAAK;AACD,mBAAO,KAAK,aAAa,OAAO;UACpC,KAAK;AACD,mBAAO,KAAK,kBAAkB,OAAO;UACzC,KAAK;AACD,mBAAO,KAAK,iBAAiB,OAAO;UACxC,KAAK;UACL;AACI,mBAAO,KAAK,kBAAkB,OAAO;QAC7C;MACJ;;;;MAKQ,kBAAkB,SAAe;AAKrC,cAAM,EAAE,cAAc,aAAa,mBAAkB,IAAK,KAAK;AAC/D,cAAM,SAA6E,CAAA;AAGnF,cAAM,mBAAmB,KAAK,IAAI,aAAa,KAAK,MAAM,eAAe,CAAC,CAAC;AAE3E,YAAI,cAAc;AAElB,eAAO,cAAc,QAAQ,QAAQ;AACjC,cAAI,YAAY,KAAK,IAAI,cAAc,cAAc,QAAQ,MAAM;AAGnE,cAAI,sBAAsB,YAAY,QAAQ,QAAQ;AAElD,kBAAM,cAAc,CAAC,QAAQ,MAAM,MAAM,GAAG;AAC5C,kBAAM,cAAc,KAAK,IAAI,cAAc,GAAG,YAAY,GAAG;AAC7D,uBAAW,cAAc,aAAa;AAClC,oBAAM,YAAY,QAAQ,YAAY,YAAY,SAAS;AAC3D,kBAAI,aAAa,aAAa;AAC1B,4BAAY,YAAY,WAAW;AACnC;cACJ;YACJ;UACJ;AAGA,cAAI,aAAa,aAAa;AAC1B,wBAAY,KAAK,IAAI,cAAc,cAAc,QAAQ,MAAM;UACnE;AAEA,iBAAO,KAAK;YACR,SAAS,QAAQ,MAAM,aAAa,SAAS;YAC7C;YACA;WACH;AAGD,cAAI,aAAa,QAAQ,QAAQ;AAC7B;UACJ;AAGA,gBAAM,YAAY,YAAY;AAC9B,wBAAc,KAAK,IAAI,WAAW,cAAc,CAAC;QACrD;AAEA,eAAO;MACX;;;;MAKQ,aAAa,SAAe;AAKhC,cAAM,EAAE,cAAc,YAAW,IAAK,KAAK;AAC3C,cAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,cAAM,SAA6E,CAAA;AAEnF,YAAI,eAAyB,CAAA;AAC7B,YAAI,cAAc;AAClB,YAAI,cAAc;AAClB,YAAI,kBAAkB;AAEtB,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,gBAAM,OAAO,MAAM,CAAC;AACpB,gBAAM,WAAW,KAAK,SAAS;AAE/B,cAAI,cAAc,WAAW,gBAAgB,aAAa,SAAS,GAAG;AAElE,kBAAM,eAAe,aAAa,KAAK,IAAI;AAC3C,mBAAO,KAAK;cACR,SAAS;cACT;cACA,WAAW,cAAc,aAAa;aACzC;AAGD,kBAAM,eAAe,KAAK,IAAI,GAAG,KAAK,MAAM,cAAc,EAAE,CAAC;AAC7D,kBAAM,eAAe,KAAK,IAAI,GAAG,aAAa,SAAS,YAAY;AACnE,2BAAe,aAAa,MAAM,YAAY;AAC9C,0BAAc,aAAa,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,SAAS,GAAG,CAAC;AACnE,0BAAc,kBAAkB;UACpC;AAEA,uBAAa,KAAK,IAAI;AACtB,yBAAe;AACf,6BAAmB;QACvB;AAGA,YAAI,aAAa,SAAS,GAAG;AACzB,gBAAM,eAAe,aAAa,KAAK,IAAI;AAC3C,iBAAO,KAAK;YACR,SAAS;YACT;YACA,WAAW,cAAc,aAAa;WACzC;QACL;AAEA,eAAO;MACX;;;;MAKQ,kBAAkB,SAAe;AAKrC,cAAM,EAAE,aAAY,IAAK,KAAK;AAC9B,cAAM,aAAa,QAAQ,MAAM,OAAO;AACxC,cAAM,SAA6E,CAAA;AAEnF,YAAI,eAAyB,CAAA;AAC7B,YAAI,cAAc;AAClB,YAAI,cAAc;AAClB,YAAI,uBAAuB;AAE3B,mBAAW,aAAa,YAAY;AAChC,gBAAM,gBAAgB,UAAU,SAAS;AAEzC,cAAI,cAAc,gBAAgB,gBAAgB,aAAa,SAAS,GAAG;AAEvE,kBAAM,eAAe,aAAa,KAAK,MAAM;AAC7C,mBAAO,KAAK;cACR,SAAS;cACT;cACA,WAAW,cAAc,aAAa;aACzC;AAGD,2BAAe,CAAA;AACf,0BAAc;AACd,0BAAc;UAClB;AAEA,uBAAa,KAAK,SAAS;AAC3B,yBAAe;AACf,kCAAwB;QAC5B;AAGA,YAAI,aAAa,SAAS,GAAG;AACzB,gBAAM,eAAe,aAAa,KAAK,MAAM;AAC7C,iBAAO,KAAK;YACR,SAAS;YACT;YACA,WAAW,cAAc,aAAa;WACzC;QACL;AAEA,eAAO;MACX;;;;MAKQ,iBAAiB,SAAe;AAKpC,cAAM,EAAE,cAAc,YAAW,IAAK,KAAK;AAE3C,cAAM,kBAAkB;AACxB,cAAM,YAAsB,CAAA;AAC5B,YAAI,YAAY;AAChB,YAAI;AAEJ,gBAAQ,QAAQ,gBAAgB,KAAK,OAAO,OAAO,MAAM;AACrD,oBAAU,KAAK,QAAQ,MAAM,WAAW,MAAM,QAAQ,MAAM,CAAC,EAAE,SAAS,CAAC,CAAC;AAC1E,sBAAY,MAAM,QAAQ,MAAM,CAAC,EAAE,SAAS;QAChD;AACA,YAAI,YAAY,QAAQ,QAAQ;AAC5B,oBAAU,KAAK,QAAQ,MAAM,SAAS,CAAC;QAC3C;AAEA,cAAM,SAA6E,CAAA;AACnF,YAAI,eAAyB,CAAA;AAC7B,YAAI,cAAc;AAClB,YAAI,cAAc;AAClB,YAAI,sBAAsB;AAE1B,mBAAW,YAAY,WAAW;AAC9B,gBAAM,eAAe,SAAS;AAE9B,cAAI,cAAc,eAAe,gBAAgB,aAAa,SAAS,GAAG;AAEtE,kBAAM,eAAe,aAAa,KAAK,GAAG;AAC1C,mBAAO,KAAK;cACR,SAAS;cACT;cACA,WAAW,cAAc,aAAa;aACzC;AAGD,kBAAM,mBAAmB,KAAK,IAAI,GAAG,KAAK,MAAM,cAAc,GAAG,CAAC;AAClE,kBAAM,eAAe,KAAK,IAAI,GAAG,aAAa,SAAS,gBAAgB;AACvE,2BAAe,aAAa,MAAM,YAAY;AAC9C,0BAAc,aAAa,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,SAAS,GAAG,CAAC;AACnE,0BAAc,sBAAsB;UACxC;AAEA,uBAAa,KAAK,QAAQ;AAC1B,yBAAe,eAAe;AAC9B,iCAAuB,eAAe;QAC1C;AAGA,YAAI,aAAa,SAAS,GAAG;AACzB,gBAAM,eAAe,aAAa,KAAK,GAAG;AAC1C,iBAAO,KAAK;YACR,SAAS;YACT;YACA,WAAW,cAAc,aAAa;WACzC;QACL;AAEA,eAAO;MACX;;AA/RJ,IAAAA,SAAA,gBAAA;AAqSA,aAAgB,oBAAoB,SAAuC;AACvE,aAAO,IAAI,cAAc,OAAO;IACpC;AAKA,aAAgB,wBACZ,eAAuB,KACvB,cAAsB,KAAG;AAEzB,aAAO,IAAI,cAAc;QACrB;QACA;QACA,UAAU;QACV,oBAAoB;OACvB;IACL;AAKA,aAAgB,6BAA6B,eAAuB,KAAI;AACpE,aAAO,IAAI,cAAc;QACrB;QACA,aAAa;;QACb,UAAU;QACV,oBAAoB;OACvB;IACL;;;;;;;;;;ACrRA,IAAAC,SAAA,qBAAA;AAOA,IAAAA,SAAA,+BAAA;AAWA,IAAAA,SAAA,6BAAA;AAcA,IAAAA,SAAA,oCAAA;AA2GA,IAAAA,SAAA,4BAAA;AAxMA,QAAa,eAAb,MAAyB;MACrB,YAAoB,UAA+B,CAAA,GAAE;AAAjC,aAAA,UAAA;MAAoC;MAExD,MAAM,OAAgB;AAClB,cAAM,EAAE,OAAO,eAAe,QAAO,IAAK;AAC1C,cAAM,EAAE,YAAY,QAAQ,UAAU,KAAI,IAAK,KAAK;AAGpD,YAAI,iBAAiB,SACf,MAAM,OAAO,MAAM,IACnB,CAAC,GAAG,KAAK;AAGf,YAAI,UAAU;AACV,2BAAiB,eAAe,OAAO,QAAQ;QACnD;AAGA,YAAI,MAAM;AACN,yBAAe,KAAK,IAAI;QAC5B;AAGA,eAAO,eAAe,IAAI,CAAC,MAAM,UAAS;AACtC,gBAAM,KAAK,aACL,WAAW,MAAM,KAAK,IACtB,QAAQ,KAAK,MAAM,KAAK,iBAAiB,KAAK,QAAQ,CAAC;AAE7D,iBAAO;YACH;YACA,MAAM;cACF;cACA;cACA;;YAEJ,UAAU;cACN,QAAQ,KAAK;cACb,cAAc,KAAK;cACnB,UAAU,KAAK;cACf;cACA,YAAY,eAAe;cAC3B,aAAa,KAAK;;;QAG9B,CAAC;MACL;;;;MAKQ,iBAAiB,UAAgB;AACrC,eAAO,SACF,QAAQ,aAAa,EAAE,EACvB,QAAQ,mBAAmB,GAAG,EAC9B,YAAW;MACpB;;AAvDJ,IAAAA,SAAA,eAAA;AA6DA,aAAgB,mBAAmB,SAA6B;AAC5D,aAAO,IAAI,aAAa,OAAO;IACnC;AAKA,aAAgB,6BAA6B,SAA2C;AACpF,aAAO,IAAI,aAAa;QACpB,GAAG;QACH,MAAM,CAAC,GAAG,MAAM,EAAE,SAAS,cAAc,EAAE,QAAQ;OACtD;IACL;AAMA,aAAgB,2BACZ,aACA,SAA2C;AAE3C,aAAO,IAAI,aAAa;QACpB,GAAG;QACH,MAAM,CAAC,GAAG,MAAM,YAAY,CAAC,IAAI,YAAY,CAAC;OACjD;IACL;AAMA,aAAgB,kCACZ,gBACA,SAA6C;AAE7C,YAAM,uBAAuB,IAAI,IAC7B,eAAe,IAAI,SACf,IAAI,WAAW,GAAG,IAAI,IAAI,YAAW,IAAK,IAAI,IAAI,YAAW,CAAE,EAAE,CACpE;AAGL,aAAO,IAAI,aAAa;QACpB,GAAG;QACH,QAAQ,CAAC,SAAQ;AAEb,gBAAM,YAAY,KAAK,cAAc,YAAY;AACjD,cAAI,CAAC,aAAa,CAAC,MAAM,QAAQ,SAAS,GAAG;AACzC,mBAAO;UACX;AAGA,qBAAW,WAAW,WAAW;AAE7B,gBAAI,QAAQ,WAAW,IAAI,GAAG;AAC1B,oBAAM,MAAM,QAAQ,MAAM,CAAC,EAAE,YAAW;AACxC,kBAAI,qBAAqB,IAAI,GAAG,GAAG;AAC/B,uBAAO;cACX;YACJ;UACJ;AAEA,iBAAO;QACX;OACH;IACL;AAmBA,QAAa,sBAAb,MAAgC;MAC5B,YACY,YAAoB,GACpB,UAAmD,CAAA,GAAE;AADrD,aAAA,YAAA;AACA,aAAA,UAAA;MACT;MAEH,MAAM,OAAgB;AAClB,cAAM,EAAE,OAAO,eAAe,QAAO,IAAK;AAC1C,cAAM,EAAE,QAAQ,UAAU,KAAI,IAAK,KAAK;AAGxC,YAAI,iBAAiB,SACf,MAAM,OAAO,MAAM,IACnB,CAAC,GAAG,KAAK;AAEf,YAAI,UAAU;AACV,2BAAiB,eAAe,OAAO,QAAQ;QACnD;AAEA,YAAI,MAAM;AACN,yBAAe,KAAK,IAAI;QAC5B;AAEA,cAAM,YAAiD,CAAA;AACvD,cAAM,eAAe,KAAK,KAAK,eAAe,SAAS,KAAK,SAAS;AAErE,iBAAS,IAAI,GAAG,IAAI,eAAe,QAAQ,KAAK,KAAK,WAAW;AAC5D,gBAAM,QAAQ,eAAe,MAAM,GAAG,IAAI,KAAK,SAAS;AACxD,gBAAM,aAAa,KAAK,MAAM,IAAI,KAAK,SAAS;AAEhD,oBAAU,KAAK;YACX,IAAI,cAAc,UAAU;YAC5B,MAAM;cACF,OAAO;cACP;cACA;cACA;;YAEJ,UAAU;cACN;cACA;cACA,cAAc,MAAM;cACpB,YAAY,eAAe;cAC3B,eAAe,MAAM,IAAI,OAAK,EAAE,QAAQ;;WAE/C;QACL;AAEA,eAAO;MACX;;AAjDJ,IAAAA,SAAA,sBAAA;AAuDA,aAAgB,0BACZ,WACA,SAAiD;AAEjD,aAAO,IAAI,oBAAoB,WAAW,OAAO;IACrD;;;;;;;;;;ACvRA,QAAA,kBAAA;AACI,WAAA,eAAAC,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,mCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAA+B,EAAA,CAAA;AAC/B,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAyB,EAAA,CAAA;AAW7B,QAAA,mBAAA;AACI,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAA4B,EAAA,CAAA;AAShC,QAAA,kBAAA;AACI,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,qCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAiC,EAAA,CAAA;AACjC,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,gBAAA;IAAyB,EAAA,CAAA;;;;;;;;;AC4c7B,IAAAC,SAAA,sBAAA;AApeA,QAAA,cAAA;AACA,QAAA,aAAA;AAqHA,QAAM,iCAAiC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuCvC,QAAM,mBAAN,MAAsB;MAClB,YACY,WACA,gBACA,gBAAkE;AAFlE,aAAA,YAAA;AACA,aAAA,iBAAA;AACA,aAAA,iBAAA;MACT;MAEH,MAAM,IACF,MACA,SAAmB;AAEnB,cAAM,EAAE,MAAM,cAAa,IAAK,KAAK;AAGrC,cAAM,SAAS,KAAK,YAAY,MAAM,aAAa;AAEnD,YAAI;AAEA,gBAAM,QAAQ,KAAK,aAAa;AAGhC,gBAAM,SAAS,MAAM,KAAK,UAAU,QAAQ,EAAE,MAAK,CAAE;AAErD,cAAI,OAAO,WAAW,OAAO,UAAU;AACnC,kBAAM,WAAW,KAAK,cAAc,OAAO,UAAU,IAAI;AACzD,kBAAM,aAAa,KAAK,oBAAoB,QAAQ;AAEpD,mBAAO;cACH;cACA,SAAS;cACT;cACA,aAAa,OAAO;cACpB;;UAER;AAEA,iBAAO;YACH;YACA,SAAS;YACT,OAAO,OAAO,SAAS;YACvB,UAAU,CAAA;;QAElB,SAAS,OAAO;AACZ,iBAAO;YACH;YACA,SAAS;YACT,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;YAC5D,UAAU,CAAA;;QAElB;MACJ;MAEQ,YAAY,MAAY,MAAY;AACxC,eAAO,KAAK,eACP,QAAQ,qBAAqB,KAAK,QAAQ,EAC1C,QAAQ,wBAAwB,KAAK,OAAO,EAC5C,QAAQ,iBAAiB,IAAI;MACtC;MAEQ,cAAc,UAAkB,MAAU;AAC9C,YAAI,KAAK,gBAAgB;AACrB,iBAAO,KAAK,eAAe,UAAU,IAAI;QAC7C;AAEA,eAAO,KAAK,qBAAqB,UAAU,IAAI;MACnD;MAEQ,qBAAqB,UAAkB,MAAU;AACrD,YAAI;AAEA,gBAAM,YAAY,SAAS,MAAM,aAAa;AAC9C,cAAI,CAAC,WAAW;AACZ,mBAAO,CAAA;UACX;AAEA,gBAAM,SAAS,KAAK,MAAM,UAAU,CAAC,CAAC;AACtC,gBAAM,WAA4B,CAAA;AAElC,cAAI,OAAO,YAAY,MAAM,QAAQ,OAAO,QAAQ,GAAG;AACnD,qBAAS,IAAI,GAAG,IAAI,OAAO,SAAS,QAAQ,KAAK;AAC7C,oBAAM,IAAI,OAAO,SAAS,CAAC;AAC3B,uBAAS,KAAK;gBACV,IAAI,GAAG,KAAK,QAAQ,IAAI,CAAC;gBACzB,UAAU,KAAK,YAAY,EAAE,QAAQ;gBACrC,MAAM,KAAK;gBACX,UAAU,KAAK;gBACf,MAAM,EAAE;gBACR,MAAM,EAAE;gBACR,aAAa,EAAE,eAAe,EAAE,SAAS;gBACzC,aAAa,EAAE,QAAQ,EAAE;gBACzB,YAAY,EAAE;gBACd,aAAa,EAAE;eAClB;YACL;UACJ;AAEA,iBAAO;QACX,QAAQ;AACJ,iBAAO,CAAA;QACX;MACJ;MAEQ,YAAY,UAAgB;AAChC,cAAM,SAAS,YAAY,IAAI,YAAW;AAC1C,YAAI,UAAU,WAAW,UAAU,YAAY;AAC3C,iBAAO;QACX;AACA,YAAI,UAAU,aAAa,UAAU,SAAS;AAC1C,iBAAO;QACX;AACA,YAAI,UAAU,UAAU,UAAU,SAAS;AACvC,iBAAO;QACX;AACA,eAAO;MACX;MAEQ,oBAAoB,UAAyB;AACjD,YAAI,SAAS,KAAK,OAAK,EAAE,aAAa,OAAO,GAAG;AAC5C,iBAAO;QACX;AACA,YAAI,SAAS,KAAK,OAAK,EAAE,aAAa,SAAS,GAAG;AAC9C,iBAAO;QACX;AACA,eAAO;MACX;;AAMJ,QAAM,oBAAN,cAAgC,WAAA,YAA+C;MAG3E,cAAA;AACI,cAAK;AAEL,cAAM,UAAsD;UACxD,QAAQ,CAAC,YAAW;AAChB,kBAAM,OAAO,QAAQ,QAAQ;AAC7B,kBAAM,OAAO,QAAQ,QAAQ;AAC7B,kBAAM,kBAAkB,QAAQ,eAAe,IAC1C,YAAW,EACX,QAAQ,QAAQ,GAAG,EACnB,KAAI,EACJ,UAAU,GAAG,GAAG;AACrB,mBAAO,GAAG,IAAI,IAAI,IAAI,IAAI,cAAc;UAC5C;UAEA,OAAO,CAAC,UAAU,eAAc;AAC5B,kBAAM,eAA+C;cACjD,SAAS;cACT,WAAW;cACX,QAAQ;cACR,cAAc;;AAGlB,kBAAM,UAAU,aAAa,WAAW,QAAQ,IAAI,aAAa,SAAS,QAAQ;AAClF,kBAAM,OAAO,UAAU,aAAa;AACpC,kBAAM,QAAQ,UAAU,WAAW;AAEnC,mBAAO;cACH,GAAG;cACH,MAAM,KAAK,SAAS,MAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,KAAK,MAAM,IAAI;cACxE,aAAa,KAAK,YAAY,UAAU,OAAO,MAAM,YAAY,UAAU,KACrE,KAAK,aACL,MAAM;cACZ,cAAc,KAAK,aAAa,UAAU,OAAO,MAAM,aAAa,UAAU,KACxE,KAAK,cACL,MAAM;;UAEpB;UAEA,MAAM,CAAC,GAAG,MAAK;AACX,kBAAM,gBAAgD;cAClD,SAAS;cACT,WAAW;cACX,QAAQ;cACR,cAAc;;AAGlB,kBAAM,eAAe,cAAc,EAAE,QAAQ,IAAI,cAAc,EAAE,QAAQ;AACzE,gBAAI,iBAAiB,GAAG;AACpB,qBAAO;YACX;AAEA,kBAAM,QAAQ,EAAE,QAAQ;AACxB,kBAAM,QAAQ,EAAE,QAAQ;AACxB,kBAAM,WAAW,MAAM,cAAc,KAAK;AAC1C,gBAAI,aAAa,GAAG;AAChB,qBAAO;YACX;AAEA,oBAAQ,EAAE,QAAQ,MAAM,EAAE,QAAQ;UACtC;UAEA,WAAW,CAAC,UAAS;AACjB,kBAAM,aAAa,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,GAAG,YAAY,EAAC;AACjE,kBAAM,SAAiC,CAAA;AAEvC,uBAAW,WAAW,OAAO;AACzB,yBAAW,QAAQ,QAAQ;AAC3B,qBAAO,QAAQ,IAAI,KAAK,OAAO,QAAQ,IAAI,KAAK,KAAK;YACzD;AAEA,mBAAO,EAAE,YAAY,OAAM;UAC/B;;AAGJ,aAAK,uBAAuB,IAAI,WAAA,qBAAqB,OAAO;MAChE;MAEA,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAG1B,cAAM,cAAiC,CAAA;AACvC,cAAM,cAAkC,CAAA;AAExC,mBAAW,UAAU,SAAS;AAC1B,cAAI,OAAO,WAAW,OAAO,QAAQ;AACjC,kBAAM,aAAa,OAAO;AAC1B,wBAAY,KAAK,UAAU;AAE3B,gBAAI,WAAW,WAAW,WAAW,UAAU;AAE3C,oBAAM,iBAAiB,WAAW,SAAS,IAAI,QAAM;gBACjD,GAAG;gBACH,UAAU,WAAW,KAAK;gBAC1B,MAAM,EAAE,QAAQ,WAAW,KAAK;gBAClC;AACF,0BAAY,KAAK,cAAc;YACnC;UACJ;QACJ;AAGA,cAAM,iBAA+C,YAAY,IAAI,CAAC,UAAU,OAAO;UACnF,YAAY,YAAY,CAAC;UACzB,SAAS;UACT,QAAQ;UACR,iBAAiB;UACnB;AAEF,cAAM,sBAAsB,MAAM,KAAK,qBAAqB,OAAO,gBAAgB,OAAO;AAC1F,cAAM,kBAAkB,oBAAoB,OAAO;AAGnD,cAAM,UAAU,KAAK,cAAc,iBAAiB,WAAW;AAE/D,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ;YACJ,UAAU;YACV;;UAEJ,OAAO;YACH,YAAY,YAAY,OAAO,CAAC,KAAK,QAAQ,MAAM,IAAI,QAAQ,CAAC;YAChE,aAAa,gBAAgB;YAC7B,aAAa,oBAAoB,MAAM;YACvC;YACA,cAAc;;;MAG1B;MAEQ,cAAc,UAA2B,aAA+B;AAC5E,cAAM,aAAa,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,GAAG,YAAY,EAAC;AACjE,cAAM,SAAiC,CAAA;AAEvC,mBAAW,WAAW,UAAU;AAC5B,qBAAW,QAAQ,QAAQ;AAC3B,iBAAO,QAAQ,IAAI,KAAK,OAAO,QAAQ,IAAI,KAAK,KAAK;QACzD;AAGA,YAAI,oBAAyD;AAE7D,YAAI,WAAW,QAAQ,GAAG;AACtB,8BAAoB;QACxB,WAAW,WAAW,UAAU,GAAG;AAC/B,8BAAoB;QACxB;AAGA,mBAAW,UAAU,aAAa;AAC9B,cAAI,OAAO,eAAe,QAAQ;AAC9B,gCAAoB;AACpB;UACJ;AACA,cAAI,OAAO,eAAe,qBAAqB,sBAAsB,QAAQ;AACzE,gCAAoB;UACxB;QACJ;AAGA,cAAM,cAAc,YAAY,OAAO,OAAK,CAAC,EAAE,OAAO,EAAE;AACxD,YAAI;AAEJ,YAAI,cAAc,GAAG;AACjB,wBAAc,oBAAoB,YAAY,MAAM,WAAW,WAAW;QAC9E,OAAO;AACH,wBAAc,oBAAoB,YAAY,MAAM;QACxD;AAEA,YAAI,SAAS,WAAW,GAAG;AACvB,yBAAe;QACnB,OAAO;AACH,yBAAe,SAAS,SAAS,MAAM,cAAc,WAAW,KAAK,cAAc,WAAW,OAAO,gBAAgB,WAAW,IAAI,UAAU,WAAW,UAAU;QACvK;AAEA,eAAO;UACH,eAAe,SAAS;UACxB;UACA;UACA;UACA;;MAER;;AAMJ,aAAgB,oBACZ,SAA6B;AAE7B,YAAM,iBAAiB,QAAQ,kBAAkB;AAGjD,YAAM,eAAe,IAAI,YAAA,aAAY;AAGrC,YAAM,WAAW;QACb,OAAO,CAAC,UAA0B;AAC9B,gBAAM,YAAuB;YACzB,OAAO,MAAM;YACb,eAAe,MAAM;YACrB,SAAS,MAAM;;AAEnB,iBAAO,aAAa,MAAM,SAAS;QACvC;;AAGJ,aAAO;QACH,IAAI;QACJ,MAAM;QACN;QACA,QAAQ,IAAI,iBAAiB,QAAQ,WAAW,gBAAgB,QAAQ,cAAc;QACtF,SAAS,IAAI,kBAAiB;QAC9B,SAAS;UACL,gBAAgB;UAChB,YAAY,QAAQ,cAAc,OAAO;UACzC,cAAc;UACd,gBAAgB;;;IAG5B;;;;;;;;;AClTA,IAAAC,SAAA,oBAAA;AA2BA,IAAAA,SAAA,0BAAA;AAoBA,IAAAA,SAAA,wBAAA;AAyCA,IAAAA,SAAA,0BAAA;AA1SA,QAAA,oBAAA;AAEA,QAAA,aAAA;AAyEA,QAAM,mBAAN,MAAsB;MAClB,MAAM,OAAuB;AACzB,eAAO,MAAM,MAAM,IAAI,CAAC,MAAM,WAAW;UACrC,IAAI,KAAK,MAAM,QAAQ,KAAK;UAC5B,MAAM;YACF;YACA,iBAAiB,MAAM;;UAE3B,UAAU;YACN;YACA,YAAY,MAAM,MAAM;YACxB,GAAG,KAAK;;UAEd;MACN;;AAMJ,QAAM,iBAAN,MAAoB;MAGhB,YACY,WACA,SAMP;AAPO,aAAA,YAAA;AACA,aAAA,UAAA;AAQR,aAAK,kBAAiB,GAAA,kBAAA,gBAAe;UACjC,UAAU,QAAQ;UAClB,mBAAmB,QAAQ;UAC3B,cAAc,QAAQ;SACzB;MACL;MAEA,MAAM,IACF,UACA,SAAmB;AAEnB,cAAM,EAAE,MAAM,gBAAe,IAAK,SAAS;AAC3C,cAAM,SAAS,KAAK,MAAM,SAAS;AAGnC,cAAM,YAAY;UACd,GAAG;UACH,GAAG,KAAK;;AAGZ,YAAI;AAEA,gBAAM,UAAS,GAAA,kBAAA,gBAAe,KAAK,gBAAgB;YAC/C;YACA,qBAAqB,CAAC,CAAC,KAAK,QAAQ;WACvC;AAGD,gBAAM,SAAS,MAAM,KAAK,UAAU,QAAQ;YACxC,OAAO,KAAK,QAAQ;WACvB;AAED,cAAI,OAAO,WAAW,OAAO,UAAU;AAEnC,kBAAM,SAAS,KAAK,QAAQ,iBACtB,KAAK,QAAQ,eAAe,OAAO,QAAQ,IAC3C,OAAO;AAEb,mBAAO;cACH;cACA,SAAS;cACT;cACA,aAAa,OAAO;;UAE5B;AAEA,iBAAO;YACH;YACA,SAAS;YACT,OAAO,OAAO,SAAS;YACvB,aAAa,OAAO;;QAE5B,SAAS,OAAO;AACZ,cAAI,iBAAiB,kBAAA,sBAAsB;AACvC,mBAAO;cACH;cACA,SAAS;cACT,OAAO,qBAAqB,MAAM,YAAY;;UAEtD;AAEA,iBAAO;YACH;YACA,SAAS;YACT,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;;QAEpE;MACJ;;AAMJ,QAAM,yBAAN,cAA8C,WAAA,YAAuE;MACjH,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAE1B,cAAM,UAAU,QACX,OAAO,OAAK,EAAE,WAAW,EAAE,MAAM,EACjC,IAAI,OAAK,EAAE,MAAO;AAEvB,cAAM,eAAe,KAAK,IAAG,IAAK;AAElC,eAAO;UACH,QAAQ;UACR,OAAO;YACH,YAAY,QAAQ;YACpB,aAAa,QAAQ;YACrB,aAAa;YACb;YACA,cAAc;;;MAG1B;;AAMJ,aAAgB,kBACZ,SAAoC;AAEpC,aAAO;QACH,IAAI;QACJ,MAAM;QACN,UAAU,IAAI,iBAAgB;QAC9B,QAAQ,IAAI,eAAwB,QAAQ,WAAW;UACnD,UAAU,QAAQ;UAClB,mBAAmB,QAAQ;UAC3B,cAAc,QAAQ;UACtB,gBAAgB,QAAQ;UACxB,OAAO,QAAQ;SAClB;QACD,SAAS,IAAI,uBAAsB;QACnC,SAAS;UACL,gBAAgB;UAChB,YAAY;UACZ,cAAc;UACd,gBAAgB;;;IAG5B;AAKA,aAAgB,wBACZ,WACA,UACA,SAIC;AAED,aAAO,kBAAkB;QACrB;QACA;QACA,cAAc,SAAS;QACvB,OAAO,SAAS;OACnB;IACL;AAKA,aAAgB,sBACZ,WACA,UACA,SAIC;AAED,YAAM,iBAAiB,CAAC,aAA6B;AAEjD,cAAM,YAAY,SAAS,MAAM,8BAA8B;AAC/D,cAAM,UAAU,YAAY,UAAU,CAAC,EAAE,KAAI,IAAK;AAGlD,cAAM,cAAc,QAAQ,MAAM,aAAa;AAC/C,cAAM,aAAa,QAAQ,MAAM,aAAa;AAC9C,cAAM,UAAU,cAAc,CAAC,KAAK,aAAa,CAAC,KAAK;AAEvD,cAAM,SAAS,KAAK,MAAM,OAAO;AAEjC,YAAI,SAAS,aAAa,CAAC,QAAQ,UAAU,MAAM,GAAG;AAClD,gBAAM,IAAI,MAAM,4BAA4B;QAChD;AAEA,eAAO;MACX;AAEA,aAAO,kBAA2B;QAC9B;QACA;QACA,cAAc,SAAS;QACvB,OAAO,SAAS;QAChB;OACH;IACL;AAMA,aAAgB,wBACZ,WACA,QAWC;AAKD,YAAM,MAAM,kBAA2B;QACnC;QACA,UAAU,OAAO;QACjB,cAAc,OAAO;QACrB,gBAAgB,OAAO;QACvB,OAAO,OAAO;OACjB;AAED,YAAM,cAAc,CAAC,OAAiB,qBAAmF;QACrH,OAAO,MAAM,IAAI,CAAC,MAAM,WAAW;UAC/B,IAAI,QAAQ,KAAK;UACjB,WAAW,OAAO,gBAAgB,MAAM,KAAK;UAC/C;QACF;;AAGJ,aAAO,EAAE,aAAa,IAAG;IAC7B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7TA,IAAAC,SAAA,gBAAA;AAmBA,IAAAA,SAAA,uBAAA;AAcA,IAAAA,SAAA,gBAAA;AAgCA,IAAAA,SAAA,kBAAA;AAiBA,IAAAA,SAAA,sBAAA;AA+BA,IAAAA,SAAA,eAAA;AAcA,IAAAA,SAAA,iBAAA;AASA,IAAAA,SAAA,iBAAA;AA9JA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAA,KAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAA,WAAA;AAGA,QAAM,gBAAgB;AAgBtB,aAAgB,gBAAa;AACzB,YAAM,UAAUA,OAAK,KAAK,GAAG,OAAM,GAAI,aAAa;AACpD,UAAI;AACA,YAAI,CAACD,KAAG,WAAW,OAAO,GAAG;AACzB,UAAAA,KAAG,UAAU,SAAS,EAAE,WAAW,KAAI,CAAE;QAC7C;AACA,eAAO;MACX,SAAS,OAAO;AACZ,SAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,OAAO,mCAAmC,iBAAiB,QAAQ,QAAQ,MAAS;AAClH,eAAO;MACX;IACJ;AAQA,aAAgB,qBAAqB,SAAiB,WAAW,YAAoB,SAAO;AACxF,YAAM,YAAY,KAAK,IAAG;AAC1B,YAAM,SAAS,KAAK,OAAM,EAAG,SAAS,EAAE,EAAE,UAAU,GAAG,CAAC;AACxD,aAAO,GAAG,MAAM,IAAI,SAAS,IAAI,MAAM,GAAG,SAAS;IACvD;AAUA,aAAgB,cACZ,SACA,SAAiB,WACjB,YAAoB,SAAO;AAE3B,YAAM,UAAU,cAAa;AAC7B,UAAI,CAAC,SAAS;AACV,eAAO;MACX;AAEA,YAAM,WAAW,qBAAqB,QAAQ,SAAS;AACvD,YAAM,WAAWC,OAAK,KAAK,SAAS,QAAQ;AAE5C,UAAI;AAEA,QAAAD,KAAG,cAAc,UAAU,SAAS,EAAE,UAAU,OAAM,CAAE;AAExD,eAAO;UACH;UACA,SAAS,MAAM,gBAAgB,QAAQ;;MAE/C,SAAS,OAAO;AACZ,SAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,OAAO,6BAA6B,iBAAiB,QAAQ,QAAQ,MAAS;AAC5G,eAAO;MACX;IACJ;AAOA,aAAgB,gBAAgB,UAAgB;AAC5C,UAAI;AACA,YAAIA,KAAG,WAAW,QAAQ,GAAG;AACzB,UAAAA,KAAG,WAAW,QAAQ;QAC1B;AACA,eAAO;MACX,SAAS,OAAO;AACZ,SAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,OAAO,+BAA+B,iBAAiB,QAAQ,QAAQ,MAAS;AAC9G,eAAO;MACX;IACJ;AAOA,aAAgB,sBAAmB;AAC/B,YAAM,UAAUC,OAAK,KAAK,GAAG,OAAM,GAAI,aAAa;AACpD,UAAI,QAAQ;AAEZ,UAAI;AACA,YAAI,CAACD,KAAG,WAAW,OAAO,GAAG;AACzB,iBAAO;QACX;AAEA,cAAM,QAAQA,KAAG,YAAY,OAAO;AACpC,mBAAW,QAAQ,OAAO;AACtB,gBAAM,WAAWC,OAAK,KAAK,SAAS,IAAI;AACxC,cAAI;AACA,YAAAD,KAAG,WAAW,QAAQ;AACtB;UACJ,QAAQ;UAER;QACJ;MACJ,SAAS,OAAO;AACZ,SAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,OAAO,oCAAoC,iBAAiB,QAAQ,QAAQ,MAAS;MACvH;AAEA,aAAO;IACX;AAOA,aAAgB,aAAa,UAAgB;AACzC,UAAI;AACA,eAAOA,KAAG,aAAa,UAAU,EAAE,UAAU,OAAM,CAAE;MACzD,SAAS,OAAO;AACZ,SAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,OAAO,4BAA4B,iBAAiB,QAAQ,QAAQ,MAAS;AAC3G,eAAO;MACX;IACJ;AAOA,aAAgB,eAAe,UAAgB;AAC3C,YAAM,UAAUC,OAAK,KAAK,GAAG,OAAM,GAAI,aAAa;AACpD,aAAO,SAAS,WAAW,OAAO;IACtC;AAMA,aAAgB,iBAAc;AAC1B,aAAOA,OAAK,KAAK,GAAG,OAAM,GAAI,aAAa;IAC/C;;;;;;;;;ACkfA,IAAAC,SAAA,qBAAAC;AA6BA,IAAAD,SAAA,uBAAAE;AAtqBA,QAAA,aAAA;AACA,QAAA,uBAAA;AAIA,QAAA,oBAAA;AACA,QAAA,WAAA;AA0HA,QAAM,0BAA0B;AAahC,aAAS,mBAAmB,UAAkB,MAAkB,UAAuB;AACnF,aAAO,SAAS,QAAQ,yBAAyB,CAAC,GAAG,iBAAgB;AAEjE,YAAI,iBAAiB,WAAW,UAAU;AACtC,iBAAO,KAAK,UAAU,UAAU,MAAM,CAAC;QAC3C;AACA,eAAO,gBAAgB,OAAO,KAAK,YAAY,IAAI;MACvD,CAAC;IACL;AAEA,aAAS,gBAAgB,YAAoB,cAAsB;AAC/D,UAAI,aAAa,WAAW,GAAG;AAC3B,eAAO;MACX;AACA,aAAO,GAAG,UAAU;;iCAES,aAAa,KAAK,IAAI,CAAC;IACxD;AAYA,aAAS,gBAAgB,UAAkB,cAAsB;AAC7D,cAAO,GAAA,qBAAA,iBAAsB,UAAU,YAAY;IACvD;AAMA,QAAM,oBAAN,MAAuB;MACnB,MAAM,OAAqB;AACvB,eAAO,MAAM,MAAM,IAAI,CAAC,MAAM,WAAW;UACrC,IAAI,QAAQ,KAAK;UACjB,MAAM;YACF;YACA,gBAAgB,MAAM;YACtB,cAAc,MAAM;YACpB;YACA,UAAU,MAAM;;UAEpB,UAAU,EAAE,OAAO,YAAY,MAAM,MAAM,OAAM;UACnD;MACN;;AAOJ,QAAM,kBAAN,MAAqB;MACjB,YACY,WACA,eAAsB;AADtB,aAAA,YAAA;AACA,aAAA,gBAAA;MACT;MAEH,MAAM,IACF,UACA,UAAoB;AAEpB,cAAM,EAAE,MAAM,gBAAgB,cAAc,SAAQ,IAAK,SAAS;AAClE,cAAM,aAAa,CAAC,gBAAgB,aAAa,WAAW;AAE5D,YAAI;AACA,gBAAM,cAAc,mBAAmB,gBAAgB,MAAM,QAAQ;AACrE,gBAAM,SAAS,gBAAgB,aAAa,YAAY;AAIxD,cAAI;AACJ,cAAI,KAAK,iBAAiB,OAAO,KAAK,kBAAkB,UAAU;AAC9D,kBAAM,mBAAmB,mBAAmB,KAAK,eAAe,MAAM,QAAQ;AAC9E,oBAAQ,oBAAoB;UAChC;AAEA,gBAAM,SAAS,MAAM,KAAK,UAAU,QAAQ,EAAE,MAAK,CAAE;AAErD,cAAI,OAAO,WAAW,OAAO,UAAU;AAEnC,gBAAI,YAAY;AACZ,qBAAO;gBACH;gBACA,QAAQ,CAAA;gBACR,SAAS,OAAO;gBAChB,SAAS;gBACT,aAAa,OAAO;gBACpB,WAAW,OAAO;gBAClB,YAAY,OAAO;;YAE3B;AAGA,gBAAI;AACA,oBAAM,SAAS,gBAAgB,OAAO,UAAU,YAAY;AAC5D,qBAAO,EAAE,MAAM,QAAQ,SAAS,MAAM,aAAa,OAAO,UAAU,WAAW,OAAO,WAAW,YAAY,OAAO,WAAU;YAClI,SAAS,YAAY;AACjB,oBAAM,UAAS,GAAA,SAAA,WAAS;AACxB,qBAAO,MAAM,SAAA,YAAY,YAAY,yDAAyD,SAAS,EAAE,eAAe,OAAO,SAAS,MAAM,YAAY,OAAO,SAAS,UAAU,GAAG,GAAG,CAAC,EAAE;AAC7L,qBAAO;gBACH;gBACA,QAAQ,KAAK,YAAY,YAAY;gBACrC,SAAS;gBACT,OAAO,gCAAgC,sBAAsB,QAAQ,WAAW,UAAU,OAAO,UAAU,CAAC;gBAC5G,aAAa,OAAO;gBACpB,WAAW,OAAO;gBAClB,YAAY,OAAO;;YAE3B;UACJ;AAEA,iBAAO;YACH;YACA,QAAQ,aAAa,CAAA,IAAK,KAAK,YAAY,YAAY;YACvD,SAAS;YACT,OAAO,OAAO,SAAS;YACvB,aAAa,OAAO;YACpB,WAAW,OAAO;YAClB,YAAY,OAAO;;QAE3B,SAAS,OAAO;AACZ,iBAAO;YACH;YACA,QAAQ,aAAa,CAAA,IAAK,KAAK,YAAY,SAAS,KAAK,YAAY;YACrE,SAAS;YACT,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;YAC5D,aAAa;;;QAErB;MACJ;MAEQ,YAAY,QAAgB;AAChC,cAAM,SAAkC,CAAA;AACxC,mBAAW,SAAS;AAAQ,iBAAO,KAAK,IAAI;AAC5C,eAAO;MACX;;AAOJ,aAAS,YAAY,OAAc;AAC/B,UAAI,UAAU,QAAQ,UAAU;AAAW,eAAO;AAClD,UAAI,OAAO,UAAU;AAAU,eAAO,MAAM,SAAS,KAAK,MAAM,UAAU,GAAG,EAAE,IAAI,QAAQ;AAC3F,UAAI,OAAO,UAAU;AAAW,eAAO,QAAQ,SAAS;AACxD,UAAI,OAAO,UAAU;AAAU,eAAO,OAAO,KAAK;AAClD,UAAI,MAAM,QAAQ,KAAK;AAAG,eAAO,IAAI,MAAM,MAAM;AACjD,UAAI,OAAO,UAAU;AAAU,eAAO,KAAK,UAAU,KAAK;AAC1D,aAAO,OAAO,KAAK;IACvB;AAEA,aAAS,SAAS,OAAe,MAAc,IAAE;AAC7C,aAAO,MAAM,UAAU,MAAM,QAAQ,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI;IACvE;AAEA,aAAS,aAAa,SAA4B,SAAyB;AACvE,YAAM,QAAkB,CAAC,eAAe,QAAQ,UAAU,WAAW,EAAE;AACvE,UAAI,QAAQ,cAAc;AAAG,cAAM,KAAK,cAAc,QAAQ,WAAW,mBAAmB,EAAE;AAE9F,cAAQ,QAAQ,CAAC,GAAG,MAAK;AACrB,cAAM,KAAK,YAAY,IAAI,CAAC,EAAE;AAC9B,cAAM,KAAK,cAAc,OAAO,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,SAAS,CAAC,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE;AACnG,YAAI,EAAE,SAAS;AACX,gBAAM,KAAK,eAAe,OAAO,QAAQ,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,YAAY,CAAC,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE;QAC7G,OAAO;AACH,gBAAM,KAAK,cAAc,EAAE,SAAS,eAAe,EAAE;QACzD;AACA,cAAM,KAAK,EAAE;MACjB,CAAC;AAED,YAAM,KAAK,OAAO,cAAc,QAAQ,eAAe,eAAe,QAAQ,WAAW,SAAS;AAClG,aAAO,MAAM,KAAK,IAAI;IAC1B;AAEA,aAAS,cAAc,SAA0B;AAC7C,UAAI,QAAQ,WAAW;AAAG,eAAO;AAEjC,YAAM,SAAS,CAAC,GAAG,IAAI,IAAI,QAAQ,QAAQ,OAAK,OAAO,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,YAAM,UAAU,CAAC,GAAG,IAAI,IAAI,QAAQ,QAAQ,OAAK,OAAO,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC;AACxE,YAAM,UAAU,CAAC,KAAK,GAAG,OAAO,IAAI,OAAK,QAAQ,CAAC,EAAE,GAAG,GAAG,QAAQ,IAAI,OAAK,SAAS,CAAC,EAAE,GAAG,QAAQ;AAElG,YAAM,QAAQ;QACV,OAAO,QAAQ,KAAK,KAAK,IAAI;QAC7B,OAAO,QAAQ,IAAI,MAAM,KAAK,EAAE,KAAK,KAAK,IAAI;;AAGlD,cAAQ,QAAQ,CAAC,GAAG,MAAK;AACrB,cAAM,QAAQ;UACV,OAAO,IAAI,CAAC;UACZ,GAAG,OAAO,IAAI,OAAK,SAAS,EAAE,KAAK,CAAC,KAAK,IAAI,EAAE,CAAC;UAChD,GAAG,QAAQ,IAAI,OAAK,YAAY,EAAE,OAAO,CAAC,CAAC,CAAC;UAC5C,EAAE,UAAU,OAAO;;AAEvB,cAAM,KAAK,OAAO,MAAM,KAAK,KAAK,IAAI,IAAI;MAC9C,CAAC;AAED,aAAO,MAAM,KAAK,IAAI;IAC1B;AAEA,aAAS,aAAa,SAA0B;AAC5C,aAAO,KAAK,UAAU,QAAQ,IAAI,QAAM;QACpC,OAAO,EAAE;QACT,QAAQ,EAAE;QACV,SAAS,EAAE;QACX,GAAI,EAAE,SAAS,EAAE,OAAO,EAAE,MAAK;QACjC,GAAG,MAAM,CAAC;IAChB;AAEA,aAAS,UAAU,OAAa;AAC5B,aAAQ,MAAM,SAAS,GAAG,KAAK,MAAM,SAAS,GAAG,KAAK,MAAM,SAAS,IAAI,IACnE,IAAI,MAAM,QAAQ,MAAM,IAAI,CAAC,MAC7B;IACV;AAEA,aAAS,YAAY,SAA0B;AAC3C,UAAI,QAAQ,WAAW;AAAG,eAAO;AAEjC,YAAM,SAAS,CAAC,GAAG,IAAI,IAAI,QAAQ,QAAQ,OAAK,OAAO,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,YAAM,UAAU,CAAC,GAAG,IAAI,IAAI,QAAQ,QAAQ,OAAK,OAAO,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC;AACxE,YAAM,UAAU,CAAC,GAAG,QAAQ,GAAG,QAAQ,IAAI,OAAK,OAAO,CAAC,EAAE,GAAG,SAAS;AAEtE,YAAM,QAAQ,CAAC,QAAQ,KAAK,GAAG,CAAC;AAChC,iBAAW,KAAK,SAAS;AACrB,cAAM,SAAS;UACX,GAAG,OAAO,IAAI,OAAK,UAAU,EAAE,KAAK,CAAC,KAAK,EAAE,CAAC;UAC7C,GAAG,QAAQ,IAAI,OAAK,UAAU,YAAY,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC;UACvD,EAAE,UAAU,SAAS;;AAEzB,cAAM,KAAK,OAAO,KAAK,GAAG,CAAC;MAC/B;AACA,aAAO,MAAM,KAAK,IAAI;IAC1B;AAMA,aAAS,aAAa,SAA0B;AAC5C,YAAM,oBAAoB,QAAQ,OAAO,OAAK,EAAE,OAAO;AACvD,UAAI,kBAAkB,WAAW,GAAG;AAChC,eAAO;MACX;AAGA,UAAI,kBAAkB,WAAW,GAAG;AAChC,cAAM,IAAI,kBAAkB,CAAC;AAC7B,eAAO,EAAE,WAAW,KAAK,UAAU,EAAE,QAAQ,MAAM,CAAC;MACxD;AAGA,aAAO,kBACF,IAAI,CAAC,GAAG,MAAK;AACV,cAAM,OAAO,EAAE,WAAW,KAAK,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC1D,eAAO,YAAY,IAAI,CAAC;EAAS,IAAI;MACzC,CAAC,EACA,KAAK,MAAM;IACpB;AAMA,QAAM,mBAAN,cAA+B,WAAA,YAA6C;MACxE,YACY,eAA6B,QAC7B,eAAyB,CAAA,GACzB,WACA,gBACA,gBACA,eACA,oBAA2C;AAEnD,cAAK;AARG,aAAA,eAAA;AACA,aAAA,eAAA;AACA,aAAA,YAAA;AACA,aAAA,iBAAA;AACA,aAAA,iBAAA;AACA,aAAA,gBAAA;AACA,aAAA,qBAAA;MAGZ;MAEA,MAAM,OACF,SACA,SAAsB;AAEtB,cAAM,YAAY,KAAK,IAAG;AAE1B,cAAM,cAAc,QAAQ,OAAO,OAAK,EAAE,MAAM,EAAE,IAAI,OAAK,EAAE,MAAO;AACpE,cAAM,kBAAkB,YAAY,OAAO,OAAK,EAAE,OAAO,EAAE;AAC3D,cAAM,cAAc,YAAY,OAAO,OAAK,CAAC,EAAE,OAAO,EAAE;AAExD,cAAM,UAA4B;UAC9B,YAAY,YAAY;UACxB;UACA;UACA,cAAc,KAAK;;AAIvB,YAAI,KAAK,iBAAiB,MAAM;AAC5B,iBAAO,MAAM,KAAK,gBAAgB,aAAa,SAAS,QAAQ,QAAQ,WAAW,OAAO;QAC9F;AAGA,YAAI;AACJ,gBAAQ,KAAK,cAAc;UACvB,KAAK;AAAS,8BAAkB,cAAc,WAAW;AAAG;UAC5D,KAAK;AAAQ,8BAAkB,aAAa,WAAW;AAAG;UAC1D,KAAK;AAAO,8BAAkB,YAAY,WAAW;AAAG;UACxD,KAAK;AAAQ,8BAAkB,aAAa,WAAW;AAAG;UAC1D;AAAS,8BAAkB,aAAa,aAAa,OAAO;QAChE;AAEA,eAAO;UACH,QAAQ,EAAE,SAAS,aAAa,iBAAiB,QAAO;UACxD,OAAO;YACH,YAAY,QAAQ;YACpB,aAAa,YAAY;YACzB,aAAa;YACb,cAAc,KAAK,IAAG,IAAK;YAC3B,cAAc;;;MAG1B;MAEQ,MAAM,gBACV,aACA,SACA,YACA,WACA,SAAsB;AAEtB,YAAI,CAAC,KAAK,aAAa,CAAC,KAAK,gBAAgB;AACzC,gBAAM,IAAI,MAAM,iDAAiD;QACrE;AAEA,cAAM,aAAa,CAAC,KAAK,kBAAkB,KAAK,eAAe,WAAW;AAI1E,YAAI;AACJ,YAAI,QAAQ,gBAAgB;AACxB,4BAAkB,QAAQ,eAAe,gBACrC,mCACA,QAAQ,aAAa;QAE7B;AAGA,cAAM,oBAAoB,YAAY,OAAO,OAAK,EAAE,OAAO;AAG3D,cAAM,mBAAmB,kBAAkB,IAAI,OAAI;AAC/C,cAAI,EAAE,YAAY,QAAW;AACzB,mBAAO,EAAE;UACb;AACA,iBAAO,EAAE;QACb,CAAC;AACD,cAAM,gBAAgB,KAAK,UAAU,kBAAkB,MAAM,CAAC;AAK9D,YAAI;AACJ,YAAI,SAAS,KAAK;AAElB,YAAI,OAAO,SAAS,kBAAkB,GAAG;AACrC,4BAAiB,GAAA,kBAAA,eAAc,eAAe,qBAAqB,OAAO;AAC1E,cAAI,gBAAgB;AAChB,qBAAS,OAAO,QAAQ,yBAAyB,eAAe,QAAQ;UAC5E,OAAO;AAEH,aAAA,GAAA,SAAA,WAAS,EAAG,KAAK,SAAA,YAAY,YAAY,6EAA6E;AACtH,qBAAS,OAAO,QAAQ,yBAAyB,aAAa;UAClE;QACJ;AAGA,iBAAS,OACJ,QAAQ,oBAAoB,aAAa,EACzC,QAAQ,kBAAkB,OAAO,QAAQ,UAAU,CAAC,EACpD,QAAQ,0BAA0B,OAAO,QAAQ,eAAe,CAAC,EACjE,QAAQ,0BAA0B,OAAO,QAAQ,WAAW,CAAC;AAGlE,YAAI,KAAK,oBAAoB;AACzB,qBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,KAAK,kBAAkB,GAAG;AAChE,qBAAS,OAAO,QAAQ,IAAI,OAAO,SAAS,GAAG,UAAU,GAAG,GAAG,KAAK;UACxE;QACJ;AAGA,cAAM,aAAa,aAAa,SAAS,gBAAgB,QAAQ,KAAK,cAAe;AAGrF,YAAI;AACJ,YAAI;AACA,qBAAW,MAAM,KAAK,UAAU,YAAY,EAAE,OAAO,KAAK,cAAa,CAAE;QAC7E;AAEI,cAAI,gBAAgB;AAChB,2BAAe,QAAO;UAC1B;QACJ;AAEA,YAAI,CAAC,SAAS,WAAW,CAAC,SAAS,UAAU;AAEzC,cAAI,QAAQ,kBAAkB,iBAAiB;AAC3C,oBAAQ,eAAe,cACnB,iBACA,UACA,QACA,SAAS,SAAS,eAAe;UAEzC;AACA,gBAAM,IAAI,MAAM,qBAAqB,SAAS,SAAS,eAAe,EAAE;QAC5E;AAGA,YAAI,YAAY;AAEZ,cAAI,QAAQ,kBAAkB,iBAAiB;AAC3C,oBAAQ,eAAe,cACnB,iBACA,aACA,SAAS,UACT,QACA,KAAK,UAAU,EAAE,MAAM,QAAQ,cAAc,SAAS,SAAS,OAAM,CAAE,CAAC;UAEhF;AACA,iBAAO;YACH,QAAQ;cACJ,SAAS;cACT,iBAAiB,SAAS;cAC1B,SAAS;gBACL,GAAG;gBACH,cAAc,CAAA;;;YAGtB,OAAO;cACH;cACA,aAAa;cACb,aAAa,QAAQ;cACrB,cAAc,KAAK,IAAG,IAAK;cAC3B,cAAc;;;QAG1B;AAGA,YAAI;AACJ,YAAI;AACA,qBAAW,gBAAgB,SAAS,UAAU,KAAK,cAAe;QACtE,SAAS,YAAY;AAEjB,cAAI,QAAQ,kBAAkB,iBAAiB;AAC3C,oBAAQ,eAAe,cACnB,iBACA,UACA,QACA,sBAAsB,QAAQ,WAAW,UAAU,OAAO,UAAU,CAAC;UAE7E;AACA,gBAAM,IAAI,MAAM,uCAAuC,sBAAsB,QAAQ,WAAW,UAAU,OAAO,UAAU,CAAC,EAAE;QAClI;AAGA,cAAM,kBAAkB,KAAK,UAAU,UAAU,MAAM,CAAC;AAGxD,YAAI,QAAQ,kBAAkB,iBAAiB;AAC3C,kBAAQ,eAAe,cACnB,iBACA,aACA,iBACA,QACA,KAAK,UAAU,QAAQ,CAAC;QAEhC;AAEA,eAAO;UACH,QAAQ;YACJ,SAAS;YACT;YACA,SAAS;cACL,GAAG;cACH,cAAc,KAAK;;;UAG3B,OAAO;YACH;YACA,aAAa;;YACb,aAAa,QAAQ;YACrB,cAAc,KAAK,IAAG,IAAK;YAC3B,cAAc;;;MAG1B;;AAUJ,aAAgBD,oBACZ,SAA4B;AAE5B,aAAO;QACH,IAAI;QACJ,MAAM;QACN,UAAU,IAAI,kBAAiB;QAC/B,QAAQ,IAAI,gBAAgB,QAAQ,WAAW,QAAQ,KAAK;QAC5D,SAAS,IAAI,iBACT,QAAQ,gBAAgB,QACxB,CAAA,GACA,QAAQ,WACR,QAAQ,gBACR,QAAQ,gBACR,QAAQ,eACR,QAAQ,kBAAkB;QAE9B,SAAS;UACL,gBAAgB,QAAQ,kBAAkB;UAC1C,YAAY;UACZ,cAAc;UACd,gBAAgB;;;IAG5B;AAKA,aAAgBC,sBACZ,OACA,gBACA,cAAsB;AAEtB,aAAO,EAAE,OAAO,gBAAgB,aAAY;IAChD;;;;;;;;;;AC5rBA,QAAA,oBAAA;AACI,WAAA,eAAAC,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAmB,EAAA,CAAA;AAavB,QAAA,iBAAA;AACI,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,eAAA;IAAuB,EAAA,CAAA;AAW3B,QAAA,mBAAA;AACI,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAoB,EAAA,CAAA;;;;;;;;;;;ACAxB,QAAA,UAAA;AAAS,WAAA,eAAAC,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAA0B,EAAA,CAAA;AAGnC,QAAA,aAAA;AAAS,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiB,EAAA,CAAA;AAAE,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAc,EAAA,CAAA;AAG1C,QAAA,wBAAA;AAAS,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAkB,EAAA,CAAA;AAAE,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAiB,EAAA,CAAA;AAAE,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,sBAAA;IAAuB,EAAA,CAAA;AAGvE,QAAA,oBAAA;AACI,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAmB,EAAA,CAAA;AAIvB,QAAA,aAAA;AAEI,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAElB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAyB,EAAA,CAAA;AAEzB,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA0B,EAAA,CAAA;AAE1B,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAyB,EAAA,CAAA;AAc7B,QAAA,cAAA;AAEI,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,mCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAA+B,EAAA,CAAA;AAC/B,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAyB,EAAA,CAAA;AAEzB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAA4B,EAAA,CAAA;AAE5B,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,qCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAiC,EAAA,CAAA;AACjC,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,YAAA;IAAyB,EAAA,CAAA;AAmB7B,QAAA,SAAA;AACI,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,OAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,OAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,OAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,OAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,OAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,OAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,OAAA;IAAoB,EAAA,CAAA;AA0BxB,QAAA,oBAAA;AACI,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAc,EAAA,CAAA;;;;;;;;;AClGlB,IAAAC,SAAA,cAAA;AAgDA,IAAAA,SAAA,mBAAA;AAhDA,aAAgB,YAAY,OAAc;AACtC,aACI,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAAoB,SAAS,SAC9B,UAAU,SACV,OAAQ,MAAoB,SAAS;IAE7C;AAuCA,aAAgB,iBAAiB,OAAc;AAC3C,aACI,OAAO,UAAU,YACjB,UAAU,QACV,YAAY,SACZ,OAAQ,MAA8B,WAAW,YACjD,YAAY,SACZ,MAAM,QAAS,MAA8B,MAAM;IAE3D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtEA,IAAAC,SAAA,kBAAA;AAsIA,IAAAA,SAAA,cAAA;AA4BA,IAAAA,SAAA,kBAAA;AAqCA,IAAAA,SAAA,iBAAA;AAaA,IAAAA,SAAA,qBAAA;AAmBA,IAAAA,SAAA,gBAAA;AA1RA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AAEA,QAAA,aAAA;AAIA,QAAA,WAAA;AAKa,IAAAF,SAAA,sBAAiD;MAC1D,WAAW,WAAA;MACX,YAAY,WAAA;MACZ,UAAU;;AAMd,QAAa,gBAAb,cAAmC,SAAA,kBAAiB;MAMhD,YACI,SACA,YACA,aAAoB;AAEpB,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB,MAAM;YACF,GAAI,eAAe,UAAa,EAAE,WAAU;YAC5C,GAAI,gBAAgB,UAAa,EAAE,YAAW;;SAErD;AACD,aAAK,OAAO;AACZ,aAAK,aAAa;AAClB,aAAK,cAAc;MACvB;;AArBJ,IAAAA,SAAA,gBAAA;AA8BA,aAAgB,gBAAgB,SAAiB,SAAyB;AAEtE,YAAM,kBAAkB,UAAU,OAAO,YACrC,OAAO,QAAQ,OAAO,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS,CAAC,IACvC;AAEvB,YAAM,OAAO,EAAE,GAAGA,SAAA,qBAAqB,GAAG,gBAAe;AAGzD,YAAM,oBAAoB,QAAQ,QAAQ,SAAS,IAAI,EAAE,QAAQ,OAAO,IAAI;AAG5E,YAAM,OAAO,aAAa,mBAAmB,KAAK,SAAS;AAE3D,UAAI,KAAK,WAAW,GAAG;AACnB,eAAO;UACH,OAAO,CAAA;UACP,SAAS,CAAA;UACT,UAAU;;MAElB;AAEA,UAAI;AACJ,UAAI;AAEJ,UAAI,KAAK,YAAY;AACjB,kBAAU,KAAK,CAAC,EAAE,IAAI,OAAK,EAAE,KAAI,CAAE;AACnC,mBAAW,KAAK,MAAM,CAAC;MAC3B,OAAO;AAEH,cAAM,UAAU,KAAK,CAAC,EAAE;AACxB,kBAAU,MAAM,KAAK,EAAE,QAAQ,QAAO,GAAI,CAAC,GAAG,MAAM,MAAM,CAAC,EAAE;AAC7D,mBAAW;MACf;AAGA,YAAM,YAAY,oBAAI,IAAG;AACzB,iBAAW,UAAU,SAAS;AAC1B,YAAI,UAAU,IAAI,MAAM,GAAG;AACvB,gBAAM,IAAI,cAAc,sBAAsB,MAAM,GAAG;QAC3D;AACA,kBAAU,IAAI,MAAM;MACxB;AAGA,YAAM,QAAsB,SAAS,IAAI,CAAC,KAAK,aAAY;AACvD,cAAM,OAAmB,CAAA;AACzB,iBAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACrC,eAAK,QAAQ,CAAC,CAAC,IAAI,IAAI,CAAC,MAAM,SAAY,IAAI,CAAC,IAAI;QACvD;AACA,eAAO;MACX,CAAC;AAED,aAAO;QACH;QACA;QACA,UAAU,MAAM;;IAExB;AAMA,aAAS,aAAa,SAAiB,WAAiB;AACpD,YAAM,OAAmB,CAAA;AACzB,UAAI,aAAuB,CAAA;AAC3B,UAAI,cAAc;AAClB,UAAI,WAAW;AACf,UAAI,IAAI;AAER,aAAO,IAAI,QAAQ,QAAQ;AACvB,cAAM,OAAO,QAAQ,CAAC;AACtB,cAAM,WAAW,QAAQ,IAAI,CAAC;AAE9B,YAAI,UAAU;AACV,cAAI,SAAS,KAAK;AACd,gBAAI,aAAa,KAAK;AAElB,6BAAe;AACf,mBAAK;YACT,OAAO;AAEH,yBAAW;AACX;YACJ;UACJ,OAAO;AAEH,2BAAe;AACf;UACJ;QACJ,OAAO;AACH,cAAI,SAAS,KAAK;AAEd,uBAAW;AACX;UACJ,WAAW,SAAS,WAAW;AAE3B,uBAAW,KAAK,WAAW;AAC3B,0BAAc;AACd;UACJ,WAAW,SAAS,MAAM;AAEtB,uBAAW,KAAK,WAAW;AAC3B,gBAAI,WAAW,SAAS,KAAK,WAAW,KAAK,OAAK,EAAE,SAAS,CAAC,GAAG;AAC7D,mBAAK,KAAK,UAAU;YACxB;AACA,yBAAa,CAAA;AACb,0BAAc;AACd;UACJ,OAAO;AACH,2BAAe;AACf;UACJ;QACJ;MACJ;AAGA,UAAI,YAAY,SAAS,KAAK,WAAW,SAAS,GAAG;AACjD,mBAAW,KAAK,WAAW;AAC3B,YAAI,WAAW,SAAS,GAAG;AACvB,eAAK,KAAK,UAAU;QACxB;MACJ;AAEA,aAAO;IACX;AAQO,mBAAe,YAClB,UACA,SAAyB;AAGzB,YAAM,kBAAkB,UAAU,OAAO,YACrC,OAAO,QAAQ,OAAO,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS,CAAC,IACvC;AAEvB,YAAM,OAAO,EAAE,GAAGA,SAAA,qBAAqB,GAAG,gBAAe;AAEzD,UAAI;AACA,cAAM,UAAU,MAAMC,KAAG,SAAS,SAAS,UAAU,EAAE,UAAU,KAAK,SAAQ,CAAE;AAChF,eAAO,gBAAgB,SAAS,IAAI;MACxC,SAAS,OAAO;AACZ,YAAK,MAAgC,SAAS,UAAU;AACpD,gBAAM,IAAI,cAAc,uBAAuB,QAAQ,EAAE;QAC7D;AACA,cAAM;MACV;IACJ;AAQA,aAAgB,gBACZ,UACA,SAAyB;AAGzB,YAAM,kBAAkB,UAAU,OAAO,YACrC,OAAO,QAAQ,OAAO,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS,CAAC,IACvC;AAEvB,YAAM,OAAO,EAAE,GAAGD,SAAA,qBAAqB,GAAG,gBAAe;AAEzD,UAAI;AACA,cAAM,UAAUC,KAAG,aAAa,UAAU,EAAE,UAAU,KAAK,SAAQ,CAAE;AACrE,eAAO,gBAAgB,SAAS,IAAI;MACxC,SAAS,OAAO;AACZ,YAAK,MAAgC,SAAS,UAAU;AACpD,gBAAM,IAAI,cAAc,uBAAuB,QAAQ,EAAE;QAC7D;AACA,cAAM;MACV;IACJ;AAiBA,aAAgB,eAAe,SAAiB,eAAqB;AACjE,UAAIC,OAAK,WAAW,OAAO,GAAG;AAC1B,eAAO;MACX;AACA,aAAOA,OAAK,QAAQ,eAAe,OAAO;IAC9C;AAQA,aAAgB,mBACZ,SACA,iBAAyB;AAEzB,YAAM,YAAY,IAAI,IAAI,OAAO;AACjC,YAAM,iBAAiB,gBAAgB,OAAO,SAAO,CAAC,UAAU,IAAI,GAAG,CAAC;AAExE,aAAO;QACH,OAAO,eAAe,WAAW;QACjC;;IAER;AAQA,aAAgB,cACZ,QACA,UAAkB,GAAC;AAEnB,aAAO,OAAO,MAAM,MAAM,GAAG,OAAO;IACxC;;;;;;;;;;ACxOA,IAAAC,SAAA,qBAAA;AAkDA,IAAAA,SAAA,mBAAA;AAUA,IAAAA,SAAA,0BAAA;AAaA,IAAAA,SAAA,kBAAA;AAoBA,IAAAA,SAAA,0BAAA;AAiBA,IAAAA,SAAA,kBAAA;AAiBA,IAAAA,SAAA,cAAAC;AASA,IAAAD,SAAA,sBAAA;AAeA,IAAAA,SAAA,kBAAA;AA7MA,QAAA,uBAAA;AAIA,QAAA,oBAAA;AAOA,QAAA,WAAA;AAKA,QAAa,gBAAb,cAAmC,SAAA,kBAAiB;MAIhD,YACI,SACA,cAAqB;AAErB,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB,MAAM,eAAe,EAAE,aAAY,IAAK;SAC3C;AACD,aAAK,OAAO;AACZ,aAAK,eAAe;MACxB;;AAdJ,IAAAA,SAAA,gBAAA;AAsCA,aAAgB,mBACZ,UACA,MACA,kBAAuD,OAAK;AAG5D,YAAM,UAAqC,OAAO,oBAAoB,YAChE,EAAE,QAAQ,gBAAe,IACzB;AAEN,YAAM,EAAE,SAAS,OAAO,SAAQ,IAAK;AAGrC,YAAM,QAAQ,IAAI,OAAO,kBAAA,wBAAwB,QAAQ,GAAG;AAE5D,aAAO,SAAS,QAAQ,OAAO,CAAC,OAAO,iBAAgB;AAEnD,YAAI,iBAAiB,WAAW,UAAU;AACtC,iBAAO,KAAK,UAAU,UAAU,MAAM,CAAC;QAC3C;AAGA,YAAI,kBAAA,kBAAkB,IAAI,YAAY,GAAG;AAGrC,iBAAO;QACX;AAEA,YAAI,gBAAgB,MAAM;AACtB,iBAAO,KAAK,YAAY;QAC5B;AAEA,YAAI,QAAQ;AACR,gBAAM,IAAI,cACN,qBAAqB,YAAY,iBACjC,YAAY;QAEpB;AAGA,eAAO;MACX,CAAC;IACL;AAQA,aAAgB,iBAAiB,UAAkB,iBAA0B,MAAI;AAC7E,cAAO,GAAA,kBAAA,kBAAyB,UAAU,cAAc;IAC5D;AAQA,aAAgB,wBACZ,UACA,MAAgB;AAEhB,cAAO,GAAA,kBAAA,mBAAkB,UAAU,IAAI;IAC3C;AAQA,aAAgB,gBAAgB,YAAoB,cAAsB;AACtE,UAAI,aAAa,WAAW,GAAG;AAC3B,eAAO;MACX;AAEA,YAAM,YAAY,aAAa,KAAK,IAAI;AACxC,aAAO,GAAG,UAAU;;iCAES,SAAS;IAC1C;AAWA,aAAgB,wBACZ,UACA,MACA,cACA,SAAkB,OAAK;AAEvB,YAAM,cAAc,mBAAmB,UAAU,MAAM,MAAM;AAC7D,aAAO,gBAAgB,aAAa,YAAY;IACpD;AASA,aAAgB,gBACZ,UACA,cAAsB;AAEtB,UAAI;AACA,gBAAO,GAAA,qBAAA,iBAAsB,UAAU,YAAY;MACvD,SAAS,OAAO;AACZ,cAAM,IAAI,cAAc,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;MAClF;IACJ;AAQA,aAAgBC,aAAY,UAAgB;AACxC,cAAO,GAAA,qBAAA,aAAkB,QAAQ;IACrC;AAOA,aAAgB,oBAAoB,OAAa;AAC7C,aAAO,MACF,QAAQ,OAAO,MAAM,EACrB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK;IAC7B;AAUA,aAAgB,gBACZ,UACA,MACA,YAAoB,KACpB,UAAuB;AAEvB,UAAI;AACA,cAAM,SAAS,mBAAmB,UAAU,MAAM,EAAE,QAAQ,OAAO,SAAQ,CAAE;AAC7E,YAAI,OAAO,SAAS,WAAW;AAC3B,iBAAO,OAAO,UAAU,GAAG,SAAS,IAAI;QAC5C;AACA,eAAO;MACX,QAAQ;AACJ,eAAO;MACX;IACJ;;;;;;;;;;AClKA,IAAAC,SAAA,sBAAA;AAkCA,IAAAA,SAAA,wBAAA;AAsEA,IAAAA,SAAA,qBAAA;AAiDA,IAAAA,SAAA,mBAAA;AAaA,IAAAA,SAAA,mBAAA;AAUA,IAAAA,SAAA,kBAAA;AAcA,IAAAA,SAAA,yBAAA;AAxPA,QAAA,uBAAA;AACA,QAAA,WAAA;AAKA,QAAa,uBAAb,cAA0C,SAAA,kBAAiB;MACvD,YACI,SACA,OAAa;AAEb,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB;SACH;AACD,aAAK,OAAO;MAChB;;AAVJ,IAAAA,SAAA,uBAAA;AAoDA,aAAgB,oBAAoB,QAA2B;AAC3D,YAAM,EAAE,QAAQ,OAAM,IAAK;AAG3B,YAAM,aAAa,OAAO,KAAK,IAAI;AAGnC,YAAM,aAAqC,CAAA;AAC3C,iBAAW,SAAS,QAAQ;AACxB,mBAAW,KAAK,IAAI;MACxB;AACA,YAAM,cAAc,KAAK,UAAU,YAAY,MAAM,CAAC;AAEtD,aAAO,GAAG,MAAM;;0DAEsC,UAAU;;;;IAIhE,WAAW;;;;;IAKf;AAUA,aAAgB,sBACZ,UACA,QAAgB;AAGhB,YAAM,WAAU,GAAA,qBAAA,aAAY,QAAQ;AAEpC,UAAI,CAAC,SAAS;AACV,cAAM,IAAI,qBACN,mEACA,MAAS;MAEjB;AAEA,UAAI;AACJ,UAAI;AACA,iBAAS,KAAK,MAAM,OAAO;MAC/B,SAAS,GAAG;AACR,cAAM,IAAI,qBACN,0CAA0C,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,IACpF,aAAa,QAAQ,IAAI,MAAS;MAE1C;AAGA,UAAI,CAAC,MAAM,QAAQ,MAAM,GAAG;AACxB,cAAM,IAAI,qBACN,qCAAqC,OAAO,MAAM,EAAE;MAE5D;AAGA,YAAM,QAAsB,CAAA;AAC5B,eAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACpC,cAAM,UAAU,OAAO,CAAC;AAExB,YAAI,OAAO,YAAY,YAAY,YAAY,MAAM;AACjD,gBAAM,IAAI,qBACN,iBAAiB,CAAC,2BAA2B,OAAO,OAAO,EAAE;QAErE;AAGA,cAAM,OAAmB,CAAA;AACzB,mBAAW,SAAS,QAAQ;AACxB,cAAI,SAAS,SAAS;AAElB,kBAAM,QAAS,QAAoC,KAAK;AACxD,iBAAK,KAAK,IAAI,UAAU,QAAQ,UAAU,SACpC,KACA,OAAO,KAAK;UACtB,OAAO;AAEH,iBAAK,KAAK,IAAI;UAClB;QACJ;AAEA,cAAM,KAAK,IAAI;MACnB;AAEA,aAAO;IACX;AASO,mBAAe,mBAClB,QACA,WAAoB;AAGpB,YAAM,SAAS,oBAAoB,MAAM;AAGzC,YAAM,WAAW,MAAM,UAAU,QAAQ,OAAO,QAAQ,EAAE,OAAO,OAAO,MAAK,IAAK,MAAS;AAE3F,UAAI,CAAC,SAAS,SAAS;AACnB,eAAO;UACH,SAAS;UACT,OAAO,SAAS,SAAS;UACzB,aAAa,SAAS;;MAE9B;AAEA,UAAI,CAAC,SAAS,UAAU;AACpB,eAAO;UACH,SAAS;UACT,OAAO;;MAEf;AAGA,UAAI;AACA,cAAM,QAAQ,sBAAsB,SAAS,UAAU,OAAO,MAAM;AACpE,eAAO;UACH,SAAS;UACT;UACA,aAAa,SAAS;;MAE9B,SAAS,GAAG;AACR,eAAO;UACH,SAAS;UACT,OAAO,aAAa,uBAAuB,EAAE,UAAU,OAAO,CAAC;UAC/D,aAAa,SAAS;;MAE9B;IACJ;AASA,aAAgB,iBAAiB,OAAmB;AAChD,aAAO,MAAM,IAAI,WAAS;QACtB;QACA,UAAU;QACZ;IACN;AAQA,aAAgB,iBAAiB,OAAsB;AACnD,aAAO,MAAM,OAAO,UAAQ,KAAK,QAAQ,EAAE,IAAI,UAAQ,KAAK,IAAI;IACpE;AAQA,aAAgB,gBAAgB,QAAgB;AAC5C,YAAM,OAAmB,CAAA;AACzB,iBAAW,SAAS,QAAQ;AACxB,aAAK,KAAK,IAAI;MAClB;AACA,aAAO;IACX;AAQA,aAAgB,uBACZ,QAA2B;AAE3B,YAAM,SAAmB,CAAA;AAEzB,UAAI,CAAC,OAAO,UAAU,OAAO,OAAO,WAAW,UAAU;AACrD,eAAO,KAAK,4CAA4C;MAC5D,WAAW,OAAO,OAAO,KAAI,EAAG,WAAW,GAAG;AAC1C,eAAO,KAAK,0CAA0C;MAC1D;AAEA,UAAI,CAAC,OAAO,UAAU,CAAC,MAAM,QAAQ,OAAO,MAAM,GAAG;AACjD,eAAO,KAAK,2CAA2C;MAC3D,WAAW,OAAO,OAAO,WAAW,GAAG;AACnC,eAAO,KAAK,uDAAuD;MACvE,OAAO;AAEH,iBAAS,IAAI,GAAG,IAAI,OAAO,OAAO,QAAQ,KAAK;AAC3C,gBAAM,QAAQ,OAAO,OAAO,CAAC;AAC7B,cAAI,OAAO,UAAU,UAAU;AAC3B,mBAAO,KAAK,yBAAyB,CAAC,mBAAmB;UAC7D,WAAW,MAAM,KAAI,EAAG,WAAW,GAAG;AAClC,mBAAO,KAAK,yBAAyB,CAAC,kBAAkB;UAC5D,WAAW,CAAC,2BAA2B,KAAK,KAAK,GAAG;AAChD,mBAAO,KAAK,iBAAiB,KAAK,uFAAuF;UAC7H;QACJ;AAGA,cAAM,OAAO,oBAAI,IAAG;AACpB,mBAAW,SAAS,OAAO,QAAQ;AAC/B,cAAI,KAAK,IAAI,KAAK,GAAG;AACjB,mBAAO,KAAK,4BAA4B,KAAK,GAAG;UACpD;AACA,eAAK,IAAI,KAAK;QAClB;MACJ;AAEA,aAAO;QACH,OAAO,OAAO,WAAW;QACzB;;IAER;;;;;;;;;AC/OA,IAAAC,SAAA,gBAAA;AAsCA,IAAAA,SAAA,oBAAA;AAoDA,IAAAA,SAAA,kBAAA;AA4DA,IAAAA,SAAA,sBAAA;AA/LA,QAAA,aAAA;AACA,QAAA,WAAA;AAwCO,mBAAe,cAClB,OACA,cACA,SAA6B;AAE7B,cAAQ,aAAa,MAAM;QACvB,KAAK;AACD,cAAI,CAAC,aAAa,MAAM;AACpB,kBAAM,IAAI,MAAM,2CAA2C;UAC/D;AACA,iBAAO,kBAAkB,OAAO,aAAa,MAAM,OAAO;QAE9D,KAAK;AACD,cAAI,CAAC,aAAa,IAAI;AAClB,kBAAM,IAAI,MAAM,uCAAuC;UAC3D;AACA,cAAI,CAAC,QAAQ,WAAW;AACpB,kBAAM,IAAI,MAAM,yCAAyC;UAC7D;AACA,iBAAO,gBAAgB,OAAO,aAAa,IAAI,OAAO;QAE1D,KAAK;AACD,cAAI,CAAC,aAAa,QAAQ,CAAC,aAAa,IAAI;AACxC,kBAAM,IAAI,MAAM,2DAA2D;UAC/E;AACA,cAAI,CAAC,QAAQ,WAAW;AACpB,kBAAM,IAAI,MAAM,6CAA6C;UACjE;AACA,iBAAO,oBAAoB,OAAO,cAAc,OAAO;QAE3D;AACI,gBAAM,IAAI,MAAM,wBAAyB,aAAqB,IAAI,EAAE;MAC5E;IACJ;AAKO,mBAAe,kBAClB,OACA,QACA,SAA6B;AAE7B,YAAM,YAAY,KAAK,IAAG;AAC1B,YAAM,WAAyB,CAAA;AAC/B,YAAM,WAAyB,CAAA;AAE/B,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AAEnC,YAAI,QAAQ,cAAa,GAAI;AACzB,gBAAM,IAAI,MAAM,4BAA4B;QAChD;AAEA,cAAM,OAAO,MAAM,CAAC;AACpB,cAAM,SAAS,iBAAiB,MAAM,MAAM;AAE5C,YAAI,QAAQ;AACR,mBAAS,KAAK,IAAI;QACtB,OAAO;AACH,mBAAS,KAAK,IAAI;QACtB;AAGA,YAAI,QAAQ,eAAe,IAAI,QAAQ,KAAK,MAAM,MAAM,SAAS,IAAI;AACjE,kBAAQ,WAAW;YACf,OAAO;YACP,WAAW,IAAI;YACf,OAAO,MAAM;YACb,UAAU,SAAS;YACnB,UAAU,SAAS;WACtB;QACL;MACJ;AAEA,aAAO;QACH;QACA;QACA,OAAO;UACH,YAAY,MAAM;UAClB,eAAe,SAAS;UACxB,eAAe,SAAS;UACxB,iBAAiB,KAAK,IAAG,IAAK;UAC9B,YAAY;;;IAGxB;AAKO,mBAAe,gBAClB,OACA,QACA,SAA6B;AAE7B,YAAM,YAAY,KAAK,IAAG;AAC1B,YAAM,WAAyB,CAAA;AAC/B,YAAM,WAAyB,CAAA;AAC/B,YAAM,gBAAgB,OAAO,YAAY;AACzC,YAAM,YAAY,OAAO,aAAa;AAGtC,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,eAAe;AAElD,YAAI,QAAQ,cAAa,GAAI;AACzB,gBAAM,IAAI,MAAM,4BAA4B;QAChD;AAEA,cAAM,QAAQ,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,eAAe,MAAM,MAAM,CAAC;AACtE,cAAM,UAAU,MAAM,QAAQ,IAC1B,MAAM,IAAI,UAAQ,eAAe,MAAM,QAAQ,QAAQ,WAAY,SAAS,CAAC,CAAC;AAIlF,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,cAAI,QAAQ,CAAC,GAAG;AACZ,qBAAS,KAAK,MAAM,CAAC,CAAC;UAC1B,OAAO;AACH,qBAAS,KAAK,MAAM,CAAC,CAAC;UAC1B;QACJ;AAGA,YAAI,QAAQ,YAAY;AACpB,kBAAQ,WAAW;YACf,OAAO;YACP,WAAW,KAAK,IAAI,IAAI,eAAe,MAAM,MAAM;YACnD,OAAO,MAAM;YACb,UAAU,SAAS;YACnB,UAAU,SAAS;WACtB;QACL;MACJ;AAEA,aAAO;QACH;QACA;QACA,OAAO;UACH,YAAY,MAAM;UAClB,eAAe,SAAS;UACxB,eAAe,SAAS;UACxB,iBAAiB,KAAK,IAAG,IAAK;UAC9B,YAAY;;;IAGxB;AAKO,mBAAe,oBAClB,OACA,QACA,SAA6B;AAE7B,YAAM,YAAY,KAAK,IAAG;AAC1B,YAAM,cAAc,OAAO,eAAe;AAG1C,YAAM,aAAa,MAAM,kBAAkB,OAAO,OAAO,MAAO,OAAO;AAEvE,UAAI,gBAAgB,MAAM;AAEtB,cAAM,WAAW,MAAM,gBAAgB,WAAW,UAAU,OAAO,IAAK,OAAO;AAE/E,eAAO;UACH,UAAU,CAAC,GAAG,WAAW,UAAU,GAAG,SAAS,QAAQ;UACvD,UAAU,SAAS;UACnB,OAAO;YACH,YAAY,MAAM;YAClB,eAAe,WAAW,SAAS,SAAS,SAAS,SAAS;YAC9D,eAAe,SAAS,SAAS;YACjC,iBAAiB,KAAK,IAAG,IAAK;YAC9B,YAAY;;;MAGxB,OAAO;AAEH,cAAM,WAAW,MAAM,gBAAgB,WAAW,UAAU,OAAO,IAAK,OAAO;AAE/E,eAAO;UACH,UAAU,SAAS;UACnB,UAAU,CAAC,GAAG,WAAW,UAAU,GAAG,SAAS,QAAQ;UACvD,OAAO;YACH,YAAY,MAAM;YAClB,eAAe,SAAS,SAAS;YACjC,eAAe,WAAW,SAAS,SAAS,SAAS,SAAS;YAC9D,iBAAiB,KAAK,IAAG,IAAK;YAC9B,YAAY;;;MAGxB;IACJ;AAKA,aAAS,iBAAiB,MAAkB,QAAwB;AAChE,YAAM,OAAO,OAAO,QAAQ;AAE5B,UAAI,SAAS,OAAO;AAEhB,eAAO,OAAO,MAAM,MAAM,UAAQ,aAAa,MAAM,IAAI,CAAC;MAC9D,OAAO;AAEH,eAAO,OAAO,MAAM,KAAK,UAAQ,aAAa,MAAM,IAAI,CAAC;MAC7D;IACJ;AAKA,aAAS,aAAa,MAAkB,MAAgB;AACpD,YAAM,aAAa,eAAe,MAAM,KAAK,KAAK;AAGlD,UAAI,eAAe,UAAa,eAAe,MAAM;AACjD,eAAO;MACX;AAEA,cAAQ,KAAK,UAAU;QACnB,KAAK;AACD,iBAAO,eAAe,KAAK;QAE/B,KAAK;AACD,iBAAO,eAAe,KAAK;QAE/B,KAAK;AACD,iBAAO,KAAK,QAAQ,SAAS,UAAU,KAAK;QAEhD,KAAK;AACD,iBAAO,CAAC,KAAK,QAAQ,SAAS,UAAU;QAE5C,KAAK;AACD,iBAAO,OAAO,UAAU,EAAE,YAAW,EAChC,SAAS,OAAO,KAAK,KAAK,EAAE,YAAW,CAAE;QAElD,KAAK;AACD,iBAAO,CAAC,OAAO,UAAU,EAAE,YAAW,EACjC,SAAS,OAAO,KAAK,KAAK,EAAE,YAAW,CAAE;QAElD,KAAK;AACD,iBAAO,OAAO,UAAU,IAAI,OAAO,KAAK,KAAK;QAEjD,KAAK;AACD,iBAAO,OAAO,UAAU,IAAI,OAAO,KAAK,KAAK;QAEjD,KAAK;AACD,iBAAO,OAAO,UAAU,KAAK,OAAO,KAAK,KAAK;QAElD,KAAK;AACD,iBAAO,OAAO,UAAU,KAAK,OAAO,KAAK,KAAK;QAElD,KAAK;AACD,cAAI,CAAC,KAAK,SAAS;AACf,kBAAM,IAAI,MAAM,mCAAmC;UACvD;AACA,gBAAM,QAAQ,IAAI,OAAO,KAAK,OAAO;AACrC,iBAAO,MAAM,KAAK,OAAO,UAAU,CAAC;QAExC;AACI,gBAAM,IAAI,MAAM,qBAAqB,KAAK,QAAQ,EAAE;MAC5D;IACJ;AAKA,aAAS,eAAe,MAAWC,QAAY;AAC3C,aAAOA,OAAK,MAAM,GAAG,EAAE,OAAO,CAAC,KAAK,QAAQ,MAAM,GAAG,GAAG,IAAI;IAChE;AAMA,mBAAe,eACX,MACA,QACA,WACA,WAAiB;AAEjB,UAAI;AAEA,cAAM,UAAS,GAAA,WAAA,oBAAmB,OAAO,QAAQ,IAAI;AAGrD,cAAM,SAAS,MAAM,UAAU,QAAQ;UACnC,OAAO,OAAO;UACd;SACH;AAED,YAAI,CAAC,OAAO,SAAS;AAEjB,WAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,UAAU,6BAA6B,OAAO,KAAK,EAAE;AACnF,iBAAO;QACX;AAGA,YAAI;AACJ,YAAI,OAAO,UAAU,OAAO,OAAO,SAAS,GAAG;AAE3C,cAAI;AACA,uBAAW,KAAK,MAAM,OAAO,YAAY,EAAE;UAC/C,QAAQ;AAEJ,aAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,UAAU,+CAA+C,OAAO,QAAQ,EAAE;AACxG,mBAAO;UACX;QACJ,OAAO;AAEH,qBAAW,EAAE,SAAS,+BAA+B,KAAK,OAAO,YAAY,EAAE,EAAC;QACpF;AAGA,YAAI,OAAO,SAAS,YAAY,WAAW;AACvC,iBAAO,SAAS;QACpB;AAGA,SAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,UAAU,+CAA+C,KAAK,UAAU,QAAQ,CAAC,EAAE;AACjH,eAAO;MAEX,SAAS,OAAO;AACZ,SAAA,GAAA,SAAA,WAAS,EAAG,MAAM,SAAA,YAAY,UAAU,wBAAwB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AACxH,eAAO;MACX;IACJ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtTA,IAAAC,SAAA,iBAAA;AA8BA,IAAAA,SAAA,oBAAA;AAsDA,IAAAA,SAAA,uBAAA;AAqCA,IAAAA,SAAA,oBAAA;AAqCA,IAAAA,SAAA,wBAAA;AAqCA,IAAAA,SAAA,+BAAA;AAwCA,IAAAA,SAAA,mBAAA;AAgBA,IAAAA,SAAA,qBAAA;AA3TA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAA,WAAA;AAKA,QAAa,sBAAb,cAAyC,SAAA,kBAAiB;MAItD,YACI,SACA,eAAwB;AAExB,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB,MAAM,gBAAgB,EAAE,cAAa,IAAK;SAC7C;AACD,aAAK,OAAO;AACZ,aAAK,gBAAgB;MACzB;;AAdJ,IAAAF,SAAA,sBAAA;AAoCA,QAAM,oBAAoB;AAK1B,aAAS,kBAAkB,UAAgB;AACvC,aAAO,SAAS,SAAS,GAAG,KAAK,SAAS,SAAS,IAAI;IAC3D;AAcA,aAAgB,eAAe,UAAkB,mBAAyB;AACtE,YAAM,QAAkB,CAAA;AAGxB,YAAM,KAAKE,OAAK,KAAK,mBAAmB,QAAQ,CAAC;AAGjD,YAAM,KAAKA,OAAK,KAAK,mBAAmB,WAAW,QAAQ,CAAC;AAI5D,YAAM,gBAAgBA,OAAK,QAAQ,iBAAiB;AACpD,YAAM,KAAKA,OAAK,KAAK,eAAe,WAAW,QAAQ,CAAC;AAExD,aAAO;IACX;AAeA,aAAgB,kBAAkB,YAAoB,mBAAyB;AAE3E,UAAIA,OAAK,WAAW,UAAU,GAAG;AAC7B,YAAI,CAACD,KAAG,WAAW,UAAU,GAAG;AAC5B,gBAAM,IAAI,oBACN,0BAA0B,UAAU,IACpC,CAAC,UAAU,CAAC;QAEpB;AACA,eAAO;MACX;AAGA,UAAI,kBAAkB,UAAU,GAAG;AAC/B,cAAM,eAAeC,OAAK,QAAQ,mBAAmB,UAAU;AAC/D,YAAI,CAACD,KAAG,WAAW,YAAY,GAAG;AAC9B,gBAAM,IAAI,oBACN,0BAA0B,UAAU,IACpC,CAAC,YAAY,CAAC;QAEtB;AACA,eAAO;MACX;AAGA,YAAM,cAAc,eAAe,YAAY,iBAAiB;AAChE,iBAAW,cAAc,aAAa;AAClC,YAAIA,KAAG,WAAW,UAAU,GAAG;AAC3B,iBAAO;QACX;MACJ;AAGA,YAAM,IAAI,oBACN,gBAAgB,UAAU;MAAqC,YAAY,KAAK,QAAQ,CAAC,IACzF,WAAW;IAEnB;AAiBA,aAAgB,qBAAqB,aAAmB;AACpD,YAAM,QAAQ,YAAY,MAAM,iBAAiB;AAEjD,UAAI,OAAO;AACP,cAAM,UAAU,YAAY,MAAM,MAAM,CAAC,EAAE,MAAM,EAAE,KAAI;AACvD,eAAO;UACH;UACA,gBAAgB;;MAExB;AAEA,aAAO;QACH,SAAS,YAAY,KAAI;QACzB,gBAAgB;;IAExB;AAsBO,mBAAe,kBAClB,YACA,mBAAyB;AAEzB,YAAM,eAAe,kBAAkB,YAAY,iBAAiB;AAEpE,UAAI;AACA,cAAM,cAAc,MAAMA,KAAG,SAAS,SAAS,cAAc,OAAO;AACpE,cAAM,EAAE,QAAO,IAAK,qBAAqB,WAAW;AAEpD,YAAI,CAAC,SAAS;AACV,gBAAM,IAAI,oBACN,qDAAqD,UAAU,IAC/D,CAAC,YAAY,CAAC;QAEtB;AAEA,eAAO;MACX,SAAS,OAAO;AACZ,YAAI,iBAAiB,qBAAqB;AACtC,gBAAM;QACV;AACA,cAAM,IAAI,oBACN,+BAA+B,UAAU,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IACrG,CAAC,YAAY,CAAC;MAEtB;IACJ;AAUA,aAAgB,sBACZ,YACA,mBAAyB;AAEzB,YAAM,eAAe,kBAAkB,YAAY,iBAAiB;AAEpE,UAAI;AACA,cAAM,cAAcA,KAAG,aAAa,cAAc,OAAO;AACzD,cAAM,EAAE,QAAO,IAAK,qBAAqB,WAAW;AAEpD,YAAI,CAAC,SAAS;AACV,gBAAM,IAAI,oBACN,qDAAqD,UAAU,IAC/D,CAAC,YAAY,CAAC;QAEtB;AAEA,eAAO;MACX,SAAS,OAAO;AACZ,YAAI,iBAAiB,qBAAqB;AACtC,gBAAM;QACV;AACA,cAAM,IAAI,oBACN,+BAA+B,UAAU,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IACrG,CAAC,YAAY,CAAC;MAEtB;IACJ;AAUO,mBAAe,6BAClB,YACA,mBAAyB;AAEzB,YAAM,eAAe,kBAAkB,YAAY,iBAAiB;AAEpE,UAAI;AACA,cAAM,cAAc,MAAMA,KAAG,SAAS,SAAS,cAAc,OAAO;AACpE,cAAM,EAAE,SAAS,eAAc,IAAK,qBAAqB,WAAW;AAEpE,YAAI,CAAC,SAAS;AACV,gBAAM,IAAI,oBACN,qDAAqD,UAAU,IAC/D,CAAC,YAAY,CAAC;QAEtB;AAEA,eAAO;UACH;UACA;UACA;;MAER,SAAS,OAAO;AACZ,YAAI,iBAAiB,qBAAqB;AACtC,gBAAM;QACV;AACA,cAAM,IAAI,oBACN,+BAA+B,UAAU,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IACrG,CAAC,YAAY,CAAC;MAEtB;IACJ;AASA,aAAgB,iBAAiB,YAAoB,mBAAyB;AAC1E,UAAI;AACA,0BAAkB,YAAY,iBAAiB;AAC/C,eAAO;MACX,QAAQ;AACJ,eAAO;MACX;IACJ;AASA,aAAgB,mBACZ,YACA,mBAAyB;AAEzB,UAAI;AACA,0BAAkB,YAAY,iBAAiB;AAC/C,eAAO,EAAE,OAAO,KAAI;MACxB,SAAS,OAAO;AACZ,YAAI,iBAAiB,qBAAqB;AACtC,iBAAO;YACH,OAAO;YACP,OAAO,MAAM;YACb,eAAe,MAAM;;QAE7B;AACA,eAAO;UACH,OAAO;UACP,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;;MAEpE;IACJ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACzPA,IAAAE,SAAA,qBAAA;AAkBA,IAAAA,SAAA,oBAAA;AAiBA,IAAAA,SAAA,qBAAA;AAiBA,IAAAA,SAAA,cAAA;AAgBA,IAAAA,SAAA,aAAA;AA2GA,IAAAA,SAAA,eAAA;AAkBA,IAAAA,SAAA,mBAAA;AAkBA,IAAAA,SAAA,0BAAA;AA8EA,IAAAA,SAAA,8BAAA;AAmFA,IAAAA,SAAA,gBAAA;AAtcA,QAAAC,OAAA,aAAA,QAAA,IAAA,CAAA;AACA,QAAAC,SAAA,aAAA,QAAA,MAAA,CAAA;AACA,QAAA,oBAAA;AACA,QAAA,aAAA;AACA,QAAA,WAAA;AAGa,IAAAF,SAAA,2BAA2B,WAAA;AAK3B,IAAAA,SAAA,wBAAwB;AAKrC,QAAa,qBAAb,cAAwC,SAAA,kBAAiB;MAMrD,YACI,SACA,WACA,cAAqB;AAErB,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB,MAAM;YACF;YACA,GAAI,gBAAgB,EAAE,aAAY;;SAEzC;AACD,aAAK,OAAO;AACZ,aAAK,YAAY;AACjB,aAAK,eAAe;MACxB;;AArBJ,IAAAA,SAAA,qBAAA;AAiEA,aAAgB,mBAAmB,eAAuB,YAAmB;AACzE,UAAI,YAAY;AACZ,YAAIE,OAAK,WAAW,UAAU,GAAG;AAC7B,iBAAO;QACX;AACA,eAAOA,OAAK,QAAQ,eAAe,UAAU;MACjD;AACA,aAAOA,OAAK,QAAQ,eAAeF,SAAA,wBAAwB;IAC/D;AAUA,aAAgB,kBACZ,WACA,eACA,kBAAyB;AAEzB,YAAM,YAAY,mBAAmB,eAAe,gBAAgB;AACpE,aAAOE,OAAK,KAAK,WAAW,SAAS;IACzC;AAUA,aAAgB,mBACZ,WACA,eACA,kBAAyB;AAEzB,YAAM,WAAW,kBAAkB,WAAW,eAAe,gBAAgB;AAC7E,aAAOA,OAAK,KAAK,UAAUF,SAAA,qBAAqB;IACpD;AAUA,aAAgB,YACZ,WACA,eACA,kBAAyB;AAEzB,YAAM,aAAa,mBAAmB,WAAW,eAAe,gBAAgB;AAChF,aAAOC,KAAG,WAAW,UAAU;IACnC;AASA,aAAgB,WACZ,eACA,kBAAyB;AAEzB,YAAM,YAAY,mBAAmB,eAAe,gBAAgB;AAEpE,UAAI,CAACA,KAAG,WAAW,SAAS,GAAG;AAC3B,eAAO,CAAA;MACX;AAEA,UAAI;AACA,cAAM,UAAUA,KAAG,YAAY,WAAW,EAAE,eAAe,KAAI,CAAE;AACjE,eAAO,QACF,OAAO,WAAQ;AACZ,cAAI,CAAC,MAAM,YAAW,GAAI;AACtB,mBAAO;UACX;AAEA,gBAAM,aAAaC,OAAK,KAAK,WAAW,MAAM,MAAMF,SAAA,qBAAqB;AACzE,iBAAOC,KAAG,WAAW,UAAU;QACnC,CAAC,EACA,IAAI,WAAS,MAAM,IAAI,EACvB,KAAI;MACb,QAAQ;AACJ,eAAO,CAAA;MACX;IACJ;AAQA,aAAS,mBAAmB,SAAe;AACvC,YAAM,WAA0B,EAAE,KAAK,QAAO;AAG9C,YAAM,mBAAmB,QAAQ,MAAM,6BAA6B;AACpE,UAAI,kBAAkB;AAClB,cAAM,cAAc,iBAAiB,CAAC;AAGtC,cAAM,YAAY,YAAY,MAAM,+BAA+B;AACnE,YAAI;AAAW,mBAAS,OAAO,UAAU,CAAC;AAE1C,cAAM,YAAY,YAAY,MAAM,sCAAsC;AAC1E,YAAI;AAAW,mBAAS,cAAc,UAAU,CAAC;AAEjD,cAAM,eAAe,YAAY,MAAM,kCAAkC;AACzE,YAAI;AAAc,mBAAS,UAAU,aAAa,CAAC;AAGnD,cAAM,iBAAiB,YAAY,MAAM,6BAA6B;AACtE,YAAI,gBAAgB;AAChB,mBAAS,YAAY,eAAe,CAAC,EAChC,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAI,EAAG,QAAQ,SAAS,EAAE,CAAC,EACtC,OAAO,OAAK,EAAE,SAAS,CAAC;QACjC;AAGA,cAAM,cAAc,YAAY,MAAM,0BAA0B;AAChE,YAAI,aAAa;AACb,mBAAS,SAAS,YAAY,CAAC,EAC1B,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAI,EAAG,QAAQ,SAAS,EAAE,CAAC,EACtC,OAAO,OAAK,EAAE,SAAS,CAAC;QACjC;MACJ;AAEA,aAAO;IACX;AAQA,aAAS,6BAA6B,aAAmB;AACrD,UAAI,CAAC,aAAa;AACd,eAAO;MACX;AAEA,UAAI;AACA,eAAO,mBAAmB,WAAW;MACzC,QAAQ;AACJ,eAAO;MACX;IACJ;AAiBO,mBAAe,aAClB,WACA,eACA,kBAAyB;AAEzB,YAAM,SAAS,MAAM,wBAAwB,WAAW,eAAe,gBAAgB;AACvF,aAAO,OAAO;IAClB;AAWA,aAAgB,iBACZ,WACA,eACA,kBAAyB;AAEzB,YAAM,SAAS,4BAA4B,WAAW,eAAe,gBAAgB;AACrF,aAAO,OAAO;IAClB;AAWO,mBAAe,wBAClB,WACA,eACA,kBAAyB;AAGzB,UAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC7C,cAAM,IAAI,mBAAmB,yCAAyC,aAAa,EAAE;MACzF;AAGA,UAAI,UAAU,SAAS,GAAG,KAAK,UAAU,SAAS,IAAI,KAAK,UAAU,SAAS,IAAI,GAAG;AACjF,cAAM,IAAI,mBACN,uBAAuB,SAAS,yDAChC,SAAS;MAEjB;AAEA,YAAM,iBAAiB,kBAAkB,WAAW,eAAe,gBAAgB;AACnF,YAAM,aAAaC,OAAK,KAAK,gBAAgBF,SAAA,qBAAqB;AAGlE,UAAI,CAACC,KAAG,WAAW,cAAc,GAAG;AAChC,cAAM,YAAY,mBAAmB,eAAe,gBAAgB;AACpE,cAAM,IAAI,mBACN,UAAU,SAAS,oCAAoC,cAAc;+BACrC,SAAS,IACzC,WACA,cAAc;MAEtB;AAGA,UAAI,CAACA,KAAG,WAAW,UAAU,GAAG;AAC5B,cAAM,IAAI,mBACN,UAAU,SAAS,oCAAoC,UAAU,IACjE,WACA,UAAU;MAElB;AAEA,UAAI;AACA,cAAM,cAAc,MAAMA,KAAG,SAAS,SAAS,YAAY,OAAO;AAClE,cAAM,EAAE,SAAS,eAAc,KAAK,GAAA,kBAAA,sBAAqB,WAAW;AAEpE,YAAI,CAAC,SAAS;AACV,gBAAM,IAAI,mBACN,UAAU,SAAS,oDACnB,WACA,UAAU;QAElB;AAGA,cAAM,WAAW,6BAA6B,WAAW;AAEzD,eAAO;UACH;UACA,cAAc;UACd;UACA;UACA;;MAER,SAAS,OAAO;AACZ,YAAI,iBAAiB,oBAAoB;AACrC,gBAAM;QACV;AACA,cAAM,IAAI,mBACN,yBAAyB,SAAS,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IAC9F,WACA,UAAU;MAElB;IACJ;AAKA,aAAgB,4BACZ,WACA,eACA,kBAAyB;AAGzB,UAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC7C,cAAM,IAAI,mBAAmB,yCAAyC,aAAa,EAAE;MACzF;AAGA,UAAI,UAAU,SAAS,GAAG,KAAK,UAAU,SAAS,IAAI,KAAK,UAAU,SAAS,IAAI,GAAG;AACjF,cAAM,IAAI,mBACN,uBAAuB,SAAS,yDAChC,SAAS;MAEjB;AAEA,YAAM,iBAAiB,kBAAkB,WAAW,eAAe,gBAAgB;AACnF,YAAM,aAAaC,OAAK,KAAK,gBAAgBF,SAAA,qBAAqB;AAGlE,UAAI,CAACC,KAAG,WAAW,cAAc,GAAG;AAChC,cAAM,YAAY,mBAAmB,eAAe,gBAAgB;AACpE,cAAM,IAAI,mBACN,UAAU,SAAS,oCAAoC,cAAc;+BACrC,SAAS,IACzC,WACA,cAAc;MAEtB;AAGA,UAAI,CAACA,KAAG,WAAW,UAAU,GAAG;AAC5B,cAAM,IAAI,mBACN,UAAU,SAAS,oCAAoC,UAAU,IACjE,WACA,UAAU;MAElB;AAEA,UAAI;AACA,cAAM,cAAcA,KAAG,aAAa,YAAY,OAAO;AACvD,cAAM,EAAE,SAAS,eAAc,KAAK,GAAA,kBAAA,sBAAqB,WAAW;AAEpE,YAAI,CAAC,SAAS;AACV,gBAAM,IAAI,mBACN,UAAU,SAAS,oDACnB,WACA,UAAU;QAElB;AAGA,cAAM,WAAW,6BAA6B,WAAW;AAEzD,eAAO;UACH;UACA,cAAc;UACd;UACA;UACA;;MAER,SAAS,OAAO;AACZ,YAAI,iBAAiB,oBAAoB;AACrC,gBAAM;QACV;AACA,cAAM,IAAI,mBACN,yBAAyB,SAAS,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IAC9F,WACA,UAAU;MAElB;IACJ;AAUA,aAAgB,cACZ,WACA,eACA,kBAAyB;AAEzB,UAAI;AACA,cAAM,aAAa,mBAAmB,WAAW,eAAe,gBAAgB;AAGhF,YAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC7C,iBAAO,EAAE,OAAO,OAAO,OAAO,wCAAuC;QACzE;AAGA,YAAI,UAAU,SAAS,GAAG,KAAK,UAAU,SAAS,IAAI,KAAK,UAAU,SAAS,IAAI,GAAG;AACjF,iBAAO;YACH,OAAO;YACP,OAAO,uBAAuB,SAAS;;QAE/C;AAEA,YAAI,CAACA,KAAG,WAAW,UAAU,GAAG;AAC5B,gBAAM,YAAY,mBAAmB,eAAe,gBAAgB;AACpE,iBAAO;YACH,OAAO;YACP,OAAO,UAAU,SAAS,kBAAkB,UAAU,yBAAyB,SAAS;YACxF,WAAW;;QAEnB;AAEA,eAAO,EAAE,OAAO,MAAM,WAAW,WAAU;MAC/C,SAAS,OAAO;AACZ,eAAO;UACH,OAAO;UACP,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;;MAEpE;IACJ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC5YA,IAAAE,SAAA,kBAAA;AA+BA,IAAAA,SAAA,2BAAA;AAq1CA,IAAAA,SAAA,oBAAA;AAUA,IAAAA,SAAA,wBAAA;AAt+CA,QAAA,eAAA;AAYA,QAAA,aAAA;AACA,QAAA,eAAA;AACA,QAAA,aAAA;AACA,QAAA,oBAAA;AACA,QAAA,UAAA;AAUA,QAAA,oBAAA;AACA,QAAA,oBAAA;AACA,QAAA,oBAAA;AACA,QAAA,mBAAA;AACA,QAAA,WAAA;AACA,QAAA,WAAA;AAGA,QAAA,aAAA;AAAS,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AAK/B,QAAa,yBAAb,cAA4C,SAAA,kBAAiB;MAIzD,YACI,SACA,OAA6C;AAE7C,cAAM,SAAS;UACX,MAAM,SAAA,UAAU;UAChB,MAAM,QAAQ,EAAE,MAAK,IAAK;SAC7B;AACD,aAAK,OAAO;AACZ,aAAK,QAAQ;MACjB;;AAdJ,IAAAA,SAAA,yBAAA;AAiEO,mBAAe,gBAClB,QACA,SAA+B;AAG/B,6BAAuB,MAAM;AAG7B,YAAM,UAAU,MAAM,eAAe,QAAQ,QAAQ,mBAAmB,QAAQ,aAAa;AAG7F,UAAI,QAAQ,MAAM,eAAe,QAAQ,QAAQ,iBAAiB;AAGlE,cAAQ,aAAa,OAAO,QAAQ,QAAQ,SAAS;AAGrD,aAAO,iBAAiB,QAAQ,OAAO,SAAS,OAAO;IAC3D;AAaO,mBAAe,yBAClB,QACA,OACA,SAA+B;AAG/B,yCAAmC,MAAM;AAGzC,YAAM,UAAU,MAAM,eAAe,QAAQ,QAAQ,mBAAmB,QAAQ,aAAa;AAG7F,YAAM,eAAe,aAAa,OAAO,QAAQ,QAAQ,SAAS;AAGlE,aAAO,iBAAiB,QAAQ,cAAc,SAAS,OAAO;IAClE;AAMA,aAAS,mCAAmC,QAAsB;AAC9D,UAAI,CAAC,OAAO,MAAM;AACd,cAAM,IAAI,uBAAuB,gCAAgC;MACrE;AAEA,wBAAkB,MAAM;AACxB,2BAAqB,MAAM;IAC/B;AAUA,aAAS,oBAAoB,mBAA2B,uBAA8B;AAClF,UAAI,uBAAuB;AACvB,eAAO;MACX;AAGA,YAAMC,SAAO,QAAQ,MAAM;AAC3B,aAAOA,OAAK,QAAQ,mBAAmB,MAAM,MAAM,IAAI;IAC3D;AAcA,aAAS,qBAAqB,YAAoB,cAAuB,WAAkB;AACvF,UAAI,CAAC,gBAAgB,CAAC,WAAW;AAC7B,eAAO;MACX;AAEA,aAAO,oBAAoB,SAAS;EACtC,YAAY;;;EAGZ,UAAU;IACZ;AAKA,mBAAe,eACX,QACA,mBACA,eAAsB;AAEtB,YAAM,yBAAyB,oBAAoB,mBAAmB,aAAa;AAEnF,UAAI;AACJ,UAAI;AAEA,YAAI;AACJ,YAAI,OAAO,IAAI,QAAQ;AACnB,0BAAgB,OAAO,IAAI;QAC/B,WAAW,OAAO,IAAI,YAAY;AAC9B,0BAAgB,OAAM,GAAA,kBAAA,mBAAkB,OAAO,IAAI,YAAY,iBAAiB;QACpF,OAAO;AACH,gBAAM,IAAI,uBAAuB,uDAAuD,KAAK;QACjG;AAGA,YAAI;AACJ,YAAI,OAAO,IAAI,OAAO;AAClB,cAAI;AACA,2BAAe,OAAM,GAAA,iBAAA,cAAa,OAAO,IAAI,OAAO,sBAAsB;UAC9E,SAAS,OAAO;AACZ,kBAAM,IAAI,uBACN,gCAAgC,OAAO,IAAI,KAAK,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IAC5G,KAAK;UAEb;QACJ;AAEA,oBAAY,qBAAqB,eAAe,cAAc,OAAO,IAAI,KAAK;MAClF,SAAS,OAAO;AACZ,YAAI,iBAAiB,wBAAwB;AACzC,gBAAM;QACV;AACA,cAAM,IAAI,uBACN,iCAAiC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IACvF,KAAK;MAEb;AAEA,UAAI;AACJ,UAAI,OAAO,OAAO,SAAS,MAAM;AAC7B,YAAI;AAEA,cAAI;AACJ,cAAI,OAAO,OAAO,QAAQ;AACtB,+BAAmB,OAAO,OAAO;UACrC,WAAW,OAAO,OAAO,YAAY;AACjC,+BAAmB,OAAM,GAAA,kBAAA,mBAAkB,OAAO,OAAO,YAAY,iBAAiB;UAC1F,OAAO;AACH,kBAAM,IAAI,uBAAuB,uDAAuD,QAAQ;UACpG;AAGA,cAAI;AACJ,cAAI,OAAO,OAAO,OAAO;AACrB,gBAAI;AACA,6BAAe,OAAM,GAAA,iBAAA,cAAa,OAAO,OAAO,OAAO,sBAAsB;YACjF,SAAS,OAAO;AACZ,oBAAM,IAAI,uBACN,mCAAmC,OAAO,OAAO,KAAK,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IAClH,QAAQ;YAEhB;UACJ;AAEA,yBAAe,qBAAqB,kBAAkB,cAAc,OAAO,OAAO,KAAK;QAC3F,SAAS,OAAO;AACZ,cAAI,iBAAiB,wBAAwB;AACzC,kBAAM;UACV;AACA,gBAAM,IAAI,uBACN,oCAAoC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IAC1F,QAAQ;QAEhB;MACJ;AAEA,aAAO,EAAE,WAAW,aAAY;IACpC;AAKA,mBAAe,eAAe,QAAwB,mBAAyB;AAC3E,UAAI;AACA,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO,OAAO,MAAM;QACxB;AAEA,YAAI,OAAO,MAAM,MAAM;AACnB,eAAI,GAAA,QAAA,aAAY,OAAO,MAAM,IAAI,GAAG;AAChC,kBAAM,WAAU,GAAA,aAAA,gBAAe,OAAO,MAAM,KAAK,MAAM,iBAAiB;AACxE,kBAAM,SAAS,OAAM,GAAA,aAAA,aAAY,SAAS;cACtC,WAAW,OAAO,MAAM,KAAK;aAChC;AACD,mBAAO,OAAO;UAClB;AAEA,cAAI,MAAM,QAAQ,OAAO,MAAM,IAAI,GAAG;AAClC,mBAAO,OAAO,MAAM;UACxB;AAEA,gBAAM,IAAI,uBAAuB,gCAAgC,OAAO;QAC5E;AAEA,cAAM,IAAI,uBAAuB,4CAA4C,OAAO;MACxF,SAAS,OAAO;AACZ,YAAI,iBAAiB,wBAAwB;AACzC,gBAAM;QACV;AACA,cAAM,IAAI,uBACN,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IAC/E,OAAO;MAEf;IACJ;AAKA,aAAS,aAAa,OAAqB,QAAwB,WAAiB;AAEhF,YAAM,QAAQ,OAAO,MAAM,SAAS,MAAM;AAC1C,UAAI,SAAS,MAAM,MAAM,GAAG,KAAK;AAGjC,UAAI,OAAO,MAAM,cAAc,OAAO,MAAM,WAAW,SAAS,GAAG;AAC/D,cAAM,cAAc,0BAA0B,OAAO,MAAM,UAAU;AACrE,iBAAS,OAAO,IAAI,WAAS,EAAE,GAAG,aAAa,GAAG,KAAI,EAAG;MAC7D;AAGA,UAAI,OAAO,SAAS,GAAG;AACnB,cAAM,gBAAe,GAAA,WAAA,kBAAiB,SAAS;AAC/C,cAAM,YAAY,OAAO,CAAC;AAC1B,cAAM,cAAc,aAAa,OAAO,OAAK,EAAE,KAAK,UAAU;AAC9D,YAAI,YAAY,SAAS,GAAG;AACxB,gBAAM,IAAI,uBACN,kCAAkC,YAAY,KAAK,IAAI,CAAC,IACxD,OAAO;QAEf;MACJ;AAEA,aAAO;IACX;AAMA,mBAAe,iBACX,QACA,OACA,SACA,SAA+B;AAE/B,UAAI,eAAe;AAGnB,UAAI;AACJ,UAAI,OAAO,QAAQ;AACf,YAAI;AACA,yBAAe,OAAM,GAAA,kBAAA,eAAc,cAAc,OAAO,QAAQ;YAC5D,WAAW,QAAQ;YACnB,gBAAgB,QAAQ;YACxB,YAAY,CAAC,aAAY;AACrB,sBAAQ,aAAa;gBACjB,OAAO;gBACP,YAAY,SAAS;gBACrB,gBAAgB,SAAS;gBACzB,aAAa;gBACb,YAAY,KAAK,MAAO,SAAS,YAAY,SAAS,QAAS,GAAG;eACrE;YACL;YACA,aAAa,QAAQ;WACxB;AAED,yBAAe,aAAa;AAE5B,WAAA,GAAA,SAAA,WAAS,EAAG,KACR,SAAA,YAAY,UACZ,WAAW,aAAa,MAAM,aAAa,IAAI,aAAa,MAAM,UAAU,kBACxE,aAAa,MAAM,aAAa,cAAc,aAAa,MAAM,eAAe,KAAK;AAG7F,cAAI,aAAa,WAAW,GAAG;AAC3B,aAAA,GAAA,SAAA,WAAS,EAAG,KAAK,SAAA,YAAY,UAAU,yDAAyD;UACpG;QACJ,SAAS,OAAO;AACZ,cAAI,iBAAiB,wBAAwB;AACzC,kBAAM;UACV;AACA,gBAAM,IAAI,uBACN,6BAA6B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IACnF,QAAQ;QAEhB;MACJ;AAGA,YAAM,YAAY,OAAO,IAAI,aAAa;AAE1C,UAAI,YAAY,GAAG;AAEf,eAAO,iBAAiB,QAAQ,cAAc,SAAS,SAAS,YAAY;MAChF;AAGA,aAAO,oBAAoB,QAAQ,cAAc,SAAS,SAAS,YAAY;IACnF;AAKA,mBAAe,oBACX,QACA,cACA,SACA,SACA,cAA2B;AAE3B,YAAM,gBAAgB,OAAO,IAAI,YAAY,WAAA;AAC7C,YAAM,YAAY,OAAO,IAAI,aAAa,WAAA;AAE1C,YAAM,kBAAmC;QACrC,WAAW,QAAQ;QACnB,gBAAgB;QAChB,YAAY;QACZ,cAAc;QACd,gBAAgB;QAChB,gBAAgB,QAAQ;QACxB,YAAY,QAAQ;QACpB,SAAS,OAAO;QAChB;QACA,aAAa,QAAQ;;AAGzB,YAAM,YAAW,GAAA,aAAA,gBAAe,eAAe;AAE/C,YAAM,mBAAmB,OAAO,MAAM,aAChC,0BAA0B,OAAO,MAAM,UAAU,IACjD;AAEN,YAAM,OAAM,GAAA,aAAA,oBAAmB;QAC3B,WAAW,QAAQ;QACnB,cAAc,OAAO,OAAO;QAC5B,OAAO,OAAO,IAAI;QAClB,gBAAgB;QAChB,GAAI,OAAO,OAAO,SAAS,QAAQ;UAC/B,gBAAgB,QAAQ;UACxB,gBAAgB,OAAO,OAAO;UAC9B,eAAe,OAAO,OAAO;UAC7B,oBAAoB;;OAE3B;AAED,YAAM,YAAW,GAAA,aAAA,sBACb,cACA,QAAQ,WACR,OAAO,IAAI,UAAU,CAAA,CAAE;AAG3B,UAAI;AACA,cAAM,SAAS,MAAM,SAAS,QAAQ,KAAK,QAAQ;AACnD,eAAO,EAAE,GAAG,QAAQ,aAAY;MACpC,SAAS,OAAO;AACZ,cAAM,IAAI,uBACN,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IACpF,KAAK;MAEb;IACJ;AAKA,aAAS,iBAAiB,OAAqB,WAAiB;AAC5D,YAAM,UAA0B,CAAA;AAChC,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAC9C,gBAAQ,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;MAC9C;AACA,aAAO;IACX;AAUA,aAAS,wBACL,eACA,MAA6B;AAE7B,UAAI,CAAC,iBAAiB,OAAO,kBAAkB,UAAU;AACrD,eAAO;MACX;AACA,YAAM,eAAc,GAAA,kBAAA,qBAAoB,eAAe,MAAM;QACzD,QAAQ;QACR,sBAAsB;QACtB,0BAA0B;OAC7B;AACD,aAAO,eAAe;IAC1B;AAWA,mBAAe,iBACX,QACA,cACA,SACA,SACA,cAA2B;AAE3B,YAAM,YAAY,OAAO,IAAI,aAAa;AAC1C,YAAM,gBAAgB,OAAO,IAAI,YAAY,WAAA;AAC7C,YAAM,YAAY,OAAO,IAAI,aAAa,WAAA;AAC1C,YAAM,eAAe,OAAO,IAAI,UAAU,CAAA;AAC1C,YAAM,aAAa,aAAa,WAAW;AAG3C,YAAM,UAAU,iBAAiB,cAAc,SAAS;AACxD,YAAM,eAAe,QAAQ;AAG7B,UAAI;AACJ,UAAI,QAAQ,kBAAkB,eAAe,GAAG;AAC5C,kBAAU,QAAQ,eAAe,cAAc,GAAG,OAAO,IAAI,KAAK,YAAY,WAAW;MAC7F;AAGA,cAAQ,aAAa;QACjB,OAAO;QACP,YAAY;QACZ,gBAAgB;QAChB,aAAa;QACb,YAAY;QACZ,SAAS,cAAc,YAAY,aAAa,aAAa,MAAM,sBAAsB,SAAS;OACrG;AAGD,YAAM,YAAY,KAAK,IAAG;AAC1B,YAAM,aAAgC,CAAA;AACtC,UAAI,mBAAmB;AACvB,UAAI,gBAAgB;AAGpB,YAAM,eAAe,OAAO,OAAqB,eAAkD;AAE/F,YAAI,QAAQ,cAAa,GAAI;AACzB,iBAAO,MAAM,IAAI,WAAS;YACtB;YACA,QAAQ,aAAa,CAAA,IAAK,kBAAkB,YAAY;YACxD,SAAS;YACT,OAAO;YACT;QACN;AAGA,YAAI;AACJ,YAAI,QAAQ,gBAAgB;AACxB,sBAAY,QAAQ,eAAe,gBAC/B,oBAAoB,aAAa,CAAC,IAAI,YAAY,KAAK,MAAM,MAAM,WACnE,OAAO;QAEf;AAEA,YAAI;AAEA,gBAAM,cAAc,iBAAiB,QAAQ,WAAW,OAAO,YAAY;AAG3E,gBAAM,QAAQ,wBAAwB,OAAO,IAAI,OAAO,MAAM,CAAC,CAAC;AAGhE,gBAAM,WAAW,MAAM,QAAQ,KAAK;YAChC,QAAQ,UAAU,aAAa,EAAE,MAAK,CAAE;YACxC,qBAAqB,WAAW,YAAY,YAAY;WAC3D;AAED,cAAI,CAAC,SAAS,WAAW,CAAC,SAAS,UAAU;AAEzC,gBAAI,QAAQ,kBAAkB,WAAW;AACrC,sBAAQ,eAAe,cAAc,WAAW,UAAU,QAAW,SAAS,SAAS,sBAAsB;YACjH;AACA,mBAAO,MAAM,IAAI,WAAS;cACtB;cACA,QAAQ,aAAa,CAAA,IAAK,kBAAkB,YAAY;cACxD,SAAS;cACT,OAAO,SAAS,SAAS;cACzB,aAAa,SAAS;cACtB,WAAW,SAAS;cACtB;UACN;AAGA,gBAAM,eAAe,mBACjB,SAAS,UACT,OACA,cACA,YACA,SAAS,SAAS;AAItB,cAAI,QAAQ,kBAAkB,WAAW;AACrC,kBAAM,eAAe,aAAa,OAAO,OAAK,EAAE,OAAO,EAAE;AACzD,oBAAQ,eAAe,cACnB,WACA,aACA,GAAG,YAAY,IAAI,MAAM,MAAM,oBAC/B,QACA,KAAK,UAAU,aAAa,IAAI,OAAK,EAAE,MAAM,CAAC,CAAC;UAEvD;AAEA,iBAAO;QACX,SAAS,OAAO;AACZ,gBAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAGtE,cAAI,SAAS,SAAS,WAAW,GAAG;AAChC,gBAAI;AACA,oBAAM,cAAc,iBAAiB,QAAQ,WAAW,OAAO,YAAY;AAC3E,oBAAM,QAAQ,wBAAwB,OAAO,IAAI,OAAO,MAAM,CAAC,CAAC;AAEhE,oBAAM,WAAW,MAAM,QAAQ,KAAK;gBAChC,QAAQ,UAAU,aAAa,EAAE,MAAK,CAAE;gBACxC,qBAAqB,YAAY,GAAG,YAAY,YAAY;eAC/D;AAED,kBAAI,SAAS,WAAW,SAAS,UAAU;AACvC,sBAAM,eAAe,mBACjB,SAAS,UACT,OACA,cACA,YACA,SAAS,SAAS;AAGtB,oBAAI,QAAQ,kBAAkB,WAAW;AACrC,wBAAM,eAAe,aAAa,OAAO,OAAK,EAAE,OAAO,EAAE;AACzD,0BAAQ,eAAe,cACnB,WACA,aACA,GAAG,YAAY,IAAI,MAAM,MAAM,kCAC/B,QACA,KAAK,UAAU,aAAa,IAAI,OAAK,EAAE,MAAM,CAAC,CAAC;gBAEvD;AAEA,uBAAO;cACX;YACJ,SAAS,YAAY;YAErB;UACJ;AAGA,cAAI,QAAQ,kBAAkB,WAAW;AACrC,oBAAQ,eAAe,cAAc,WAAW,UAAU,QAAW,QAAQ;UACjF;AACA,iBAAO,MAAM,IAAI,WAAS;YACtB;YACA,QAAQ,aAAa,CAAA,IAAK,kBAAkB,YAAY;YACxD,SAAS;YACT,OAAO;YACT;QACN;MACJ;AAGA,YAAM,gBAA8C,CAAA;AACpD,YAAM,gBAAiC,CAAA;AAEvC,eAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACrC,cAAM,QAAQ,QAAQ,CAAC;AAGvB,YAAI,cAAc,UAAU,eAAe;AACvC,gBAAM,QAAQ,KAAK,aAAa;QACpC;AAEA,cAAM,eAAe,aAAa,OAAO,CAAC,EAAE,KAAK,aAAU;AACvD,qBAAW,KAAK,GAAG,OAAO;AAG1B,gBAAM,cAAc,QAAQ,KAAK,OAAK,CAAC,EAAE,OAAO;AAChD,cAAI,aAAa;AACb;UACJ,OAAO;AACH;UACJ;AAEA,kBAAQ,aAAa;YACjB,OAAO;YACP,YAAY;YACZ,gBAAgB;YAChB,aAAa;YACb,YAAY,KAAK,OAAQ,mBAAmB,iBAAiB,eAAgB,EAAE;YAC/E,SAAS,oBAAoB,mBAAmB,aAAa,IAAI,YAAY;WAChF;AAED,iBAAO;QACX,CAAC;AAED,sBAAc,KAAK,YAAY;AAG/B,cAAM,gBAAgB,aAAa,KAAK,MAAK;AACzC,gBAAM,QAAQ,cAAc,QAAQ,aAAa;AACjD,cAAI,QAAQ,IAAI;AACZ,0BAAc,OAAO,OAAO,CAAC;UACjC;QACJ,CAAC;AACD,sBAAc,KAAK,aAAa;MACpC;AAGA,YAAM,QAAQ,IAAI,aAAa;AAE/B,YAAM,iBAAiB,KAAK,IAAG,IAAK;AAGpC,YAAM,iBAAiB,WAAW,OAAO,OAAK,EAAE,OAAO,EAAE;AACzD,YAAM,aAAa,WAAW,OAAO,OAAK,CAAC,EAAE,OAAO,EAAE;AAGtD,cAAQ,aAAa;QACjB,OAAO;QACP,YAAY,aAAa;QACzB,gBAAgB;QAChB,aAAa;QACb,YAAY;QACZ,SAAS;OACZ;AAGD,YAAM,kBAAkB,KAAK,IAAG;AAChC,YAAM,mBAAmB,OAAO,MAAM,aAChC,0BAA0B,OAAO,MAAM,UAAU,IACjD;AAEN,YAAM,eAAe,MAAM,mBACvB,YACA,QACA,SACA,SACA,kBACA,OAAO;AAGX,YAAM,oBAAoB,KAAK,IAAG,IAAK;AACvC,YAAM,cAAc,KAAK,IAAG,IAAK;AAGjC,YAAM,iBAAiB;QACnB,YAAY,aAAa;QACzB;QACA;QACA;QACA;QACA,gBAAgB;;AAIpB,UAAI,QAAQ,kBAAkB,SAAS;AACnC,gBAAQ,eAAe,cACnB,SACA,cAAc,cAAc,IAAI,aAAa,MAAM,uBAAuB,YAAY,YACtF,cAAc;MAEtB;AAGA,cAAQ,aAAa;QACjB,OAAO;QACP,YAAY,aAAa;QACzB,gBAAgB;QAChB,aAAa;QACb,YAAY;QACZ,SAAS,aAAa,cAAc,eAAe,UAAU,YAAY,YAAY;OACxF;AAGD,YAAM,aAAa,WAAW,IAAI,QAAM;QACpC,YAAY,QAAQ,WAAW,QAAQ,CAAC,CAAC;QACzC,SAAS,EAAE;QACX,QAAQ;QACR,OAAO,EAAE;QACT,iBAAiB;;QACnB;AAEF,YAAM,iBAAiB,eAAe;AACtC,YAAM,SAAkC;QACpC,SAAS;QACT,QAAQ;QACR;QACA,aAAa;UACT,YAAY,WAAW;UACvB,aAAa,eAAe,IAAI;UAChC,aAAa;UACb,cAAc;UACd,cAAc,OAAO,OAAO,SAAS;;QAEzC;QACA;QACA;;AAGJ,UAAI,CAAC,gBAAgB;AACjB,cAAM,gBAAgB,WAAW,OAAO,OAAK,CAAC,EAAE,OAAO;AACvD,YAAI,cAAc,WAAW,GAAG;AAC5B,iBAAO,QAAQ,kBAAkB,cAAc,CAAC,EAAE,SAAS,eAAe;QAC9E,OAAO;AACH,gBAAM,eAAe,CAAC,GAAG,IAAI,IAAI,cAAc,IAAI,OAAK,EAAE,SAAS,eAAe,CAAC,CAAC;AACpF,cAAI,aAAa,WAAW,GAAG;AAC3B,mBAAO,QAAQ,GAAG,cAAc,MAAM,kBAAkB,aAAa,CAAC,CAAC;UAC3E,OAAO;AACH,mBAAO,QAAQ,GAAG,cAAc,MAAM,sBAAsB,aAAa,MAAM;UACnF;QACJ;MACJ;AAEA,aAAO;IACX;AAMA,aAAS,iBAAiB,gBAAwB,OAAqB,cAAsB;AAEzF,YAAM,YAAY,KAAK,UAAU,OAAO,MAAM,CAAC;AAC/C,UAAI,SAAS,eAAe,QAAQ,kBAAkB,SAAS;AAI/D,UAAI,MAAM,SAAS,GAAG;AAClB,cAAM,YAAY,MAAM,CAAC;AACzB,iBAAS,OAAO,QAAQ,kBAAkB,CAAC,OAAO,YAAW;AAEzD,cAAI,CAAC,SAAS,WAAW,gBAAgB,SAAS,iBAAiB,eAAe,EAAE,SAAS,OAAO,GAAG;AACnG,mBAAO;UACX;AACA,iBAAO,WAAW,YAAY,UAAU,OAAO,IAAI;QACvD,CAAC;MACL;AAGA,UAAI,aAAa,SAAS,GAAG;AACzB,kBAAU;;2BAAgC,MAAM,MAAM,0EAA0E,aAAa,KAAK,IAAI,CAAC;MAC3J;AAEA,aAAO;IACX;AAKA,aAAS,kBAAkB,QAAgB;AACvC,YAAM,SAAkC,CAAA;AACxC,iBAAW,SAAS,QAAQ;AACxB,eAAO,KAAK,IAAI;MACpB;AACA,aAAO;IACX;AAKA,aAAS,qBAAqB,WAAmB,YAAoB,cAAoB;AACrF,aAAO,IAAI,QAAQ,CAAC,GAAG,WAAU;AAC7B,mBAAW,MAAK;AACZ,iBAAO,IAAI,MAAM,SAAS,aAAa,CAAC,IAAI,YAAY,oBAAoB,SAAS,IAAI,CAAC;QAC9F,GAAG,SAAS;MAChB,CAAC;IACL;AAWA,aAAS,mBACL,UACA,OACA,cACA,YACA,WAAkB;AAGlB,UAAI,YAAY;AACZ,eAAO,MAAM,IAAI,WAAS;UACtB;UACA,QAAQ,CAAA;UACR,SAAS;UACT,SAAS;UACT,aAAa;UACb;UACF;MACN;AAEA,UAAI;AAEA,cAAM,YAAY,SAAS,MAAM,aAAa;AAC9C,YAAI,CAAC,WAAW;AACZ,gBAAM,IAAI,MAAM,wCAAwC;QAC5D;AAEA,cAAM,SAAS,KAAK,MAAM,UAAU,CAAC,CAAC;AAEtC,YAAI,CAAC,MAAM,QAAQ,MAAM,GAAG;AACxB,gBAAM,IAAI,MAAM,iCAAiC;QACrD;AAGA,YAAI,OAAO,WAAW,MAAM,QAAQ;AAChC,gBAAM,WAAW,eAAe,OAAO,MAAM,0BAA0B,MAAM,MAAM;AACnF,iBAAO,MAAM,IAAI,WAAS;YACtB;YACA,QAAQ,kBAAkB,YAAY;YACtC,SAAS;YACT,OAAO;YACP,aAAa;YACb;YACF;QACN;AAGA,eAAO,MAAM,IAAI,CAAC,MAAM,UAAS;AAC7B,gBAAM,YAAY,OAAO,KAAK;AAE9B,cAAI,OAAO,cAAc,YAAY,cAAc,MAAM;AACrD,mBAAO;cACH;cACA,QAAQ,kBAAkB,YAAY;cACtC,SAAS;cACT,OAAO,mBAAmB,KAAK;cAC/B,aAAa;cACb;;UAER;AAGA,gBAAM,SAAkC,CAAA;AACxC,qBAAW,SAAS,cAAc;AAC9B,mBAAO,KAAK,IAAI,SAAS,YAAY,UAAU,KAAK,IAAI;UAC5D;AAEA,iBAAO;YACH;YACA;YACA,SAAS;YACT,aAAa;YACb;;QAER,CAAC;MACL,SAAS,OAAO;AACZ,cAAM,WAAW,mCAAmC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAC1G,eAAO,MAAM,IAAI,WAAS;UACtB;UACA,QAAQ,kBAAkB,YAAY;UACtC,SAAS;UACT,OAAO;UACP,aAAa;UACb;UACF;MACN;IACJ;AAKA,mBAAe,mBACX,SACA,QACA,SACA,SACA,kBACA,eAAsB;AAEtB,YAAM,eAAe,OAAO,IAAI,UAAU,CAAA;AAC1C,YAAM,kBAAkB,QAAQ,OAAO,OAAK,EAAE,OAAO,EAAE;AACvD,YAAM,cAAc,QAAQ,OAAO,OAAK,CAAC,EAAE,OAAO,EAAE;AAEpD,YAAM,UAA4B;QAC9B,YAAY,QAAQ;QACpB;QACA;QACA;;AAIJ,UAAI,OAAO,OAAO,SAAS,QAAQ,QAAQ,cAAc;AACrD,eAAO,MAAM,gBACT,SACA,SACA,QAAQ,cACR,OAAO,OAAO,QACd,OAAO,OAAO,OACd,kBACA,SACA,aAAa;MAErB;AAGA,YAAM,kBAAkB,cAAc,SAAS,SAAS,OAAO,OAAO,IAAI;AAE1E,aAAO;QACH;QACA;QACA;;IAER;AAKA,mBAAe,gBACX,SACA,SACA,cACA,cACA,aACA,kBACA,SACA,eAAsB;AAEtB,YAAM,aAAa,CAAC,gBAAgB,aAAa,WAAW;AAG5D,UAAI;AACJ,UAAI,SAAS,gBAAgB;AACzB,0BAAkB,QAAQ,eAAe,gBACrC,mCACA,aAAa;MAErB;AAGA,YAAM,oBAAoB,QAAQ,OAAO,OAAK,EAAE,OAAO;AACvD,YAAM,mBAAmB,kBAAkB,IAAI,OAAK,EAAE,YAAY,SAAY,EAAE,UAAU,EAAE,MAAM;AAClG,YAAM,gBAAgB,KAAK,UAAU,kBAAkB,MAAM,CAAC;AAE9D,UAAI,SAAS,aACR,QAAQ,oBAAoB,aAAa,EACzC,QAAQ,kBAAkB,OAAO,QAAQ,UAAU,CAAC,EACpD,QAAQ,0BAA0B,OAAO,QAAQ,eAAe,CAAC,EACjE,QAAQ,0BAA0B,OAAO,QAAQ,WAAW,CAAC;AAGlE,UAAI,kBAAkB;AAClB,mBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,gBAAgB,GAAG;AACzD,mBAAS,OAAO,QAAQ,IAAI,OAAO,SAAS,GAAG,UAAU,GAAG,GAAG,KAAK;QACxE;MACJ;AAGA,UAAI,CAAC,YAAY;AACb,kBAAU;;iCAAsC,aAAc,KAAK,IAAI,CAAC;MAC5E;AAGA,YAAM,WAAW,MAAM,SAAS,UAAU,QAAQ,EAAE,OAAO,YAAW,CAAE;AAExE,UAAI,CAAC,UAAU,WAAW,CAAC,SAAS,UAAU;AAC1C,YAAI,SAAS,kBAAkB,iBAAiB;AAC5C,kBAAQ,eAAe,cACnB,iBACA,UACA,QACA,UAAU,SAAS,eAAe;QAE1C;AACA,cAAM,IAAI,uBACN,qBAAqB,UAAU,SAAS,eAAe,IACvD,QAAQ;MAEhB;AAGA,UAAI,YAAY;AACZ,YAAI,SAAS,kBAAkB,iBAAiB;AAC5C,kBAAQ,eAAe,cACnB,iBACA,aACA,SAAS,QAAQ;QAEzB;AACA,eAAO;UACH;UACA,iBAAiB,SAAS;UAC1B,SAAS,EAAE,GAAG,SAAS,cAAc,CAAA,EAAE;;MAE/C;AAGA,UAAI;AACA,cAAM,YAAY,SAAS,SAAS,MAAM,aAAa;AACvD,YAAI,CAAC,WAAW;AACZ,gBAAM,IAAI,MAAM,uCAAuC;QAC3D;AACA,cAAM,SAAS,KAAK,MAAM,UAAU,CAAC,CAAC;AACtC,cAAM,kBAAkB,KAAK,UAAU,QAAQ,MAAM,CAAC;AAEtD,YAAI,SAAS,kBAAkB,iBAAiB;AAC5C,kBAAQ,eAAe,cACnB,iBACA,aACA,iBACA,QACA,KAAK,UAAU,MAAM,CAAC;QAE9B;AAEA,eAAO;UACH;UACA;UACA,SAAS,EAAE,GAAG,SAAS,cAAc,aAAa;;MAE1D,SAAS,OAAO;AACZ,YAAI,SAAS,kBAAkB,iBAAiB;AAC5C,kBAAQ,eAAe,cACnB,iBACA,UACA,QACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;QAE9D;AACA,cAAM,IAAI,uBACN,uCAAuC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,IAC7F,QAAQ;MAEhB;IACJ;AAKA,aAAS,cACL,SACA,SACA,YAAkB;AAElB,cAAQ,YAAY;QAChB,KAAK;AACD,iBAAO,cAAc,OAAO;QAChC,KAAK;AACD,iBAAO,aAAa,OAAO;QAC/B,KAAK;AACD,iBAAO,YAAY,OAAO;QAC9B,KAAK;AACD,iBAAO,aAAa,OAAO;QAC/B;AACI,iBAAO,aAAa,SAAS,OAAO;MAC5C;IACJ;AAGA,aAAS,aAAa,SAA4B,SAAyB;AACvE,YAAM,QAAkB,CAAC,eAAe,QAAQ,UAAU,WAAW,EAAE;AACvE,UAAI,QAAQ,cAAc,GAAG;AACzB,cAAM,KAAK,cAAc,QAAQ,WAAW,mBAAmB,EAAE;MACrE;AAEA,cAAQ,QAAQ,CAAC,GAAG,MAAK;AACrB,cAAM,KAAK,YAAY,IAAI,CAAC,EAAE;AAC9B,cAAM,WAAW,OAAO,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,SAAS,GAAG,EAAE,CAAC,EAAE,EAAE,KAAK,IAAI;AAC5F,cAAM,KAAK,cAAc,QAAQ,EAAE;AACnC,YAAI,EAAE,SAAS;AACX,gBAAM,YAAY,OAAO,QAAQ,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,YAAY,CAAC,CAAC,EAAE,EAAE,KAAK,IAAI;AAC9F,gBAAM,KAAK,eAAe,SAAS,EAAE;QACzC,OAAO;AACH,gBAAM,KAAK,cAAc,EAAE,SAAS,eAAe,EAAE;QACzD;AACA,cAAM,KAAK,EAAE;MACjB,CAAC;AAED,YAAM,KAAK,OAAO,cAAc,QAAQ,eAAe,eAAe,QAAQ,WAAW,SAAS;AAClG,aAAO,MAAM,KAAK,IAAI;IAC1B;AAEA,aAAS,cAAc,SAA0B;AAC7C,UAAI,QAAQ,WAAW;AAAG,eAAO;AAEjC,YAAM,SAAS,CAAC,GAAG,IAAI,IAAI,QAAQ,QAAQ,OAAK,OAAO,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,YAAM,UAAU,CAAC,GAAG,IAAI,IAAI,QAAQ,QAAQ,OAAK,OAAO,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC;AACxE,YAAM,UAAU,CAAC,KAAK,GAAG,OAAO,IAAI,OAAK,QAAQ,CAAC,EAAE,GAAG,GAAG,QAAQ,IAAI,OAAK,SAAS,CAAC,EAAE,GAAG,QAAQ;AAElG,YAAM,QAAQ;QACV,OAAO,QAAQ,KAAK,KAAK,IAAI;QAC7B,OAAO,QAAQ,IAAI,MAAM,KAAK,EAAE,KAAK,KAAK,IAAI;;AAGlD,cAAQ,QAAQ,CAAC,GAAG,MAAK;AACrB,cAAM,QAAQ;UACV,OAAO,IAAI,CAAC;UACZ,GAAG,OAAO,IAAI,OAAK,SAAS,EAAE,KAAK,CAAC,KAAK,IAAI,EAAE,CAAC;UAChD,GAAG,QAAQ,IAAI,OAAK,YAAY,EAAE,OAAO,CAAC,CAAC,CAAC;UAC5C,EAAE,UAAU,OAAO;;AAEvB,cAAM,KAAK,OAAO,MAAM,KAAK,KAAK,IAAI,IAAI;MAC9C,CAAC;AAED,aAAO,MAAM,KAAK,IAAI;IAC1B;AAEA,aAAS,aAAa,SAA0B;AAC5C,aAAO,KAAK,UAAU,QAAQ,IAAI,QAAM;QACpC,OAAO,EAAE;QACT,QAAQ,EAAE;QACV,SAAS,EAAE;QACX,GAAI,EAAE,SAAS,EAAE,OAAO,EAAE,MAAK;QACjC,GAAG,MAAM,CAAC;IAChB;AAEA,aAAS,YAAY,SAA0B;AAC3C,UAAI,QAAQ,WAAW;AAAG,eAAO;AAEjC,YAAM,SAAS,CAAC,GAAG,IAAI,IAAI,QAAQ,QAAQ,OAAK,OAAO,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,YAAM,UAAU,CAAC,GAAG,IAAI,IAAI,QAAQ,QAAQ,OAAK,OAAO,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC;AACxE,YAAM,UAAU,CAAC,GAAG,QAAQ,GAAG,QAAQ,IAAI,OAAK,OAAO,CAAC,EAAE,GAAG,SAAS;AAEtE,YAAM,QAAQ,CAAC,QAAQ,KAAK,GAAG,CAAC;AAChC,iBAAW,KAAK,SAAS;AACrB,cAAM,SAAS;UACX,GAAG,OAAO,IAAI,OAAK,UAAU,EAAE,KAAK,CAAC,KAAK,EAAE,CAAC;UAC7C,GAAG,QAAQ,IAAI,OAAK,UAAU,YAAY,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC;UACvD,EAAE,UAAU,SAAS;;AAEzB,cAAM,KAAK,OAAO,KAAK,GAAG,CAAC;MAC/B;AACA,aAAO,MAAM,KAAK,IAAI;IAC1B;AAEA,aAAS,aAAa,SAA0B;AAC5C,YAAM,oBAAoB,QAAQ,OAAO,OAAK,EAAE,OAAO;AACvD,UAAI,kBAAkB,WAAW,GAAG;AAChC,eAAO;MACX;AAEA,UAAI,kBAAkB,WAAW,GAAG;AAChC,cAAM,IAAI,kBAAkB,CAAC;AAC7B,eAAO,EAAE,WAAW,KAAK,UAAU,EAAE,QAAQ,MAAM,CAAC;MACxD;AAEA,aAAO,kBACF,IAAI,CAAC,GAAG,MAAK;AACV,cAAM,OAAO,EAAE,WAAW,KAAK,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC1D,eAAO,YAAY,IAAI,CAAC;EAAS,IAAI;MACzC,CAAC,EACA,KAAK,MAAM;IACpB;AAEA,aAAS,YAAY,OAAc;AAC/B,UAAI,UAAU,QAAQ,UAAU;AAAW,eAAO;AAClD,UAAI,OAAO,UAAU;AAAU,eAAO,MAAM,SAAS,KAAK,MAAM,UAAU,GAAG,EAAE,IAAI,QAAQ;AAC3F,UAAI,OAAO,UAAU;AAAW,eAAO,QAAQ,SAAS;AACxD,UAAI,OAAO,UAAU;AAAU,eAAO,OAAO,KAAK;AAClD,UAAI,MAAM,QAAQ,KAAK;AAAG,eAAO,IAAI,MAAM,MAAM;AACjD,UAAI,OAAO,UAAU;AAAU,eAAO,KAAK,UAAU,KAAK;AAC1D,aAAO,OAAO,KAAK;IACvB;AAEA,aAAS,SAAS,OAAe,MAAc,IAAE;AAC7C,aAAO,MAAM,UAAU,MAAM,QAAQ,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI;IACvE;AAEA,aAAS,UAAU,OAAa;AAC5B,aAAQ,MAAM,SAAS,GAAG,KAAK,MAAM,SAAS,GAAG,KAAK,MAAM,SAAS,IAAI,IACnE,IAAI,MAAM,QAAQ,MAAM,IAAI,CAAC,MAC7B;IACV;AAKA,aAAS,0BAA0B,YAA+B;AAC9D,YAAM,SAAiC,CAAA;AACvC,iBAAW,SAAS,YAAY;AAC5B,eAAO,MAAM,IAAI,IAAI,MAAM;MAC/B;AACA,aAAO;IACX;AASA,aAAS,kBAAkB,QAAsB;AAC7C,UAAI,CAAC,OAAO,KAAK;AACb,cAAM,IAAI,uBAAuB,+BAA+B;MACpE;AAGA,YAAM,YAAY,CAAC,CAAC,OAAO,IAAI;AAC/B,YAAM,gBAAgB,CAAC,CAAC,OAAO,IAAI;AAEnC,UAAI,CAAC,aAAa,CAAC,eAAe;AAC9B,cAAM,IAAI,uBAAuB,mEAAmE;MACxG;AACA,UAAI,aAAa,eAAe;AAC5B,cAAM,IAAI,uBAAuB,oEAAoE;MACzG;AAGA,UAAI,OAAO,IAAI,UAAU,UAAa,OAAO,OAAO,IAAI,UAAU,UAAU;AACxE,cAAM,IAAI,uBAAuB,8CAA8C;MACnF;AAGA,UAAI,OAAO,IAAI,WAAW,UAAa,CAAC,MAAM,QAAQ,OAAO,IAAI,MAAM,GAAG;AACtE,cAAM,IAAI,uBAAuB,2DAA2D;MAChG;AAGA,UAAI,OAAO,IAAI,cAAc,QAAW;AACpC,YAAI,OAAO,OAAO,IAAI,cAAc,YAAY,CAAC,OAAO,UAAU,OAAO,IAAI,SAAS,GAAG;AACrF,gBAAM,IAAI,uBAAuB,4DAA4D;QACjG;AACA,YAAI,OAAO,IAAI,YAAY,GAAG;AAC1B,gBAAM,IAAI,uBAAuB,oDAAoD;QACzF;AAEA,YAAI,OAAO,IAAI,YAAY,GAAG;AAC1B,gBAAM,SAAS,OAAO,IAAI,UAAU;AACpC,cAAI,CAAC,OAAO,SAAS,WAAW,GAAG;AAC/B,aAAA,GAAA,SAAA,WAAS,EAAG,KAAK,SAAA,YAAY,UAAU,+GAA+G;UAC1J;QACJ;MACJ;IACJ;AAKA,aAAS,qBAAqB,QAAsB;AAChD,UAAI,CAAC,OAAO,QAAQ;AAChB,cAAM,IAAI,uBAAuB,kCAAkC;MACvE;AAEA,YAAM,mBAAmB,CAAC,QAAQ,SAAS,QAAQ,OAAO,MAAM,MAAM;AACtE,UAAI,CAAC,iBAAiB,SAAS,OAAO,OAAO,IAAI,GAAG;AAChD,cAAM,IAAI,uBACN,4BAA4B,OAAO,OAAO,IAAI,sBAAsB,iBAAiB,KAAK,IAAI,CAAC,EAAE;MAEzG;AAGA,UAAI,OAAO,OAAO,SAAS,MAAM;AAC7B,cAAM,YAAY,CAAC,CAAC,OAAO,OAAO;AAClC,cAAM,gBAAgB,CAAC,CAAC,OAAO,OAAO;AAEtC,YAAI,CAAC,aAAa,CAAC,eAAe;AAC9B,gBAAM,IAAI,uBACN,kGAAkG;QAE1G;AACA,YAAI,aAAa,eAAe;AAC5B,gBAAM,IAAI,uBAAuB,0EAA0E;QAC/G;AAGA,YAAI,OAAO,OAAO,UAAU,UAAa,OAAO,OAAO,OAAO,UAAU,UAAU;AAC9E,gBAAM,IAAI,uBAAuB,iDAAiD;QACtF;AAEA,YAAI,OAAO,OAAO,WAAW,UAAa,CAAC,MAAM,QAAQ,OAAO,OAAO,MAAM,GAAG;AAC5E,gBAAM,IAAI,uBAAuB,8DAA8D;QACnG;MACJ;IACJ;AAKA,aAAS,oBAAoB,QAAsB;AAC/C,UAAI,CAAC,OAAO,OAAO;AACf,cAAM,IAAI,uBAAuB,iCAAiC;MACtE;AAGA,YAAM,WAAW,CAAC,CAAC,OAAO,MAAM;AAChC,YAAM,UAAU,CAAC,CAAC,OAAO,MAAM;AAC/B,YAAM,cAAc,CAAC,CAAC,OAAO,MAAM;AACnC,YAAM,cAAc,CAAC,UAAU,SAAS,WAAW,EAAE,OAAO,OAAO,EAAE;AAErE,UAAI,gBAAgB,GAAG;AACnB,cAAM,IAAI,uBAAuB,uDAAuD;MAC5F;AACA,UAAI,cAAc,GAAG;AACjB,cAAM,IAAI,uBAAuB,2DAA2D;MAChG;AAGA,UAAI,aAAa;AACb,YAAI,EAAC,GAAA,QAAA,kBAAiB,OAAO,MAAM,QAAQ,GAAG;AAC1C,gBAAM,IAAI,uBAAuB,gCAAgC;QACrE;AACA,cAAM,cAAa,GAAA,kBAAA,wBAAuB,OAAO,MAAM,QAAQ;AAC/D,YAAI,CAAC,WAAW,OAAO;AACnB,gBAAM,IAAI,uBACN,mCAAmC,WAAW,OAAO,KAAK,IAAI,CAAC,EAAE;QAEzE;AACA,cAAM,IAAI,uBACN,+HACA,OAAO;MAEf;AAGA,UAAI,OAAO,MAAM,MAAM;AACnB,YAAI,CAAC,MAAM,QAAQ,OAAO,MAAM,IAAI,KAAK,EAAC,GAAA,QAAA,aAAY,OAAO,MAAM,IAAI,GAAG;AACtE,gBAAM,UAAU,OAAO,MAAM;AAC7B,cAAI,QAAQ,QAAQ,QAAQ,SAAS,OAAO;AACxC,kBAAM,IAAI,uBACN,4BAA4B,QAAQ,IAAI,4BAA4B;UAE5E;AACA,gBAAM,IAAI,uBACN,0GAA0G;QAElH;AACA,aAAI,GAAA,QAAA,aAAY,OAAO,MAAM,IAAI,KAAK,CAAC,OAAO,MAAM,KAAK,MAAM;AAC3D,gBAAM,IAAI,uBAAuB,2CAA2C;QAChF;MACJ;AAGA,UAAI,OAAO,MAAM,SAAS,CAAC,MAAM,QAAQ,OAAO,MAAM,KAAK,GAAG;AAC1D,cAAM,IAAI,uBAAuB,gDAAgD;MACrF;AAGA,UAAI,OAAO,MAAM,YAAY;AACzB,YAAI,CAAC,MAAM,QAAQ,OAAO,MAAM,UAAU,GAAG;AACzC,gBAAM,IAAI,uBAAuB,qDAAqD;QAC1F;AACA,mBAAW,SAAS,OAAO,MAAM,YAAY;AACzC,cAAI,CAAC,MAAM,QAAQ,OAAO,MAAM,SAAS,UAAU;AAC/C,kBAAM,IAAI,uBAAuB,0CAA0C;UAC/E;AACA,cAAI,MAAM,UAAU,UAAa,MAAM,UAAU,MAAM;AACnD,kBAAM,IAAI,uBAAuB,cAAc,MAAM,IAAI,uBAAuB;UACpF;QACJ;MACJ;IACJ;AAKA,aAAS,uBAAuB,QAAsB;AAClD,UAAI,CAAC,OAAO,MAAM;AACd,cAAM,IAAI,uBAAuB,gCAAgC;MACrE;AAEA,0BAAoB,MAAM;AAC1B,wBAAkB,MAAM;AACxB,2BAAqB,MAAM;IAC/B;AAKO,mBAAe,kBAAkB,aAAmB;AACvD,YAAMC,QAAO,MAAA,QAAA,QAAA,EAAA,KAAA,MAAA,aAAA,QAAa,SAAS,CAAA,CAAA;AACnC,YAAM,SAASA,MAAK,KAAK,WAAW;AACpC,6BAAuB,MAAM;AAC7B,aAAO;IACX;AAKA,aAAgB,sBAAsB,aAAmB;AAErD,YAAMA,QAAO,QAAQ,SAAS;AAC9B,YAAM,SAASA,MAAK,KAAK,WAAW;AACpC,6BAAuB,MAAM;AAC7B,aAAO;IACX;;;;;;;;;;;AC38CA,QAAA,UAAA;AAAS,WAAA,eAAAC,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAW,EAAA,CAAA;AAAE,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAgB,EAAA,CAAA;AAGtC,QAAA,aAAA;AACI,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AAK1B,QAAA,eAAA;AACI,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AAIvB,QAAA,aAAA;AACI,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAa,EAAA,CAAA;AAKjB,QAAA,oBAAA;AACI,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAmB,EAAA,CAAA;AAKvB,QAAA,oBAAA;AACI,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAmB,EAAA,CAAA;AAKvB,QAAA,mBAAA;AACI,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAA2B,EAAA,CAAA;AAC3B,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAqB,EAAA,CAAA;AAKzB,QAAA,oBAAA;AACI,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,kBAAA;IAAoB,EAAA,CAAA;;;;;;;;;;AC0QxB,IAAAC,SAAA,kBAAA;AAgBA,IAAAA,SAAA,wBAAA;AAOA,IAAAA,SAAA,2BAAA;AAOA,IAAAA,SAAA,sBAAA;AAOA,IAAAA,SAAA,2BAAA;AAOA,IAAAA,SAAA,sBAAA;AAOA,IAAAA,SAAA,iBAAA;AAnSA,QAAA,aAAA;AAMa,IAAAA,SAAA,sBAA2C;MACpD,WAAW,WAAA;MACX,gBAAgB;MAChB,eAAe,WAAA;MACf,cAAc,WAAA;;AAkKL,IAAAA,SAAA,2BAA2D;MACpE,gBAAgB;MAChB,WAAW;;AAsBF,IAAAA,SAAA,gCAAmE;MAC5E,cAAc;MACd,aAAa;MACb,gBAAgB;;AA+BP,IAAAA,SAAA,kBAAgD;MACzD,MAAM;MACN,QAAQ;MACR,KAAK;;AAOT,aAAgB,gBAAgB,GAAe,GAAa;AACxD,YAAM,eAAeA,SAAA,gBAAgB,EAAE,QAAQ,IAAIA,SAAA,gBAAgB,EAAE,QAAQ;AAC7E,UAAI,iBAAiB,GAAG;AACpB,eAAO;MACX;AAEA,aAAO,EAAE,YAAY,EAAE;IAC3B;AASA,aAAgB,sBAAsB,SAAoB;AACtD,aAAO,oBAAoB;IAC/B;AAKA,aAAgB,yBAAyB,SAAoB;AACzD,aAAO,iBAAiB,WAAW,gBAAgB;IACvD;AAKA,aAAgB,oBAAoB,SAAoB;AACpD,aAAO,cAAc,WAAW,iBAAiB;IACrD;AAKA,aAAgB,yBAAyB,SAAoB;AACzD,aAAO,YAAY,WAAW,EAAE,UAAU;IAC9C;AAKA,aAAgB,oBAAoB,SAAoB;AACpD,aAAO,UAAU;IACrB;AAKA,aAAgB,iBAAc;AAC1B,aAAO,SAAS,KAAK,IAAG,CAAE,IAAI,KAAK,OAAM,EAAG,SAAS,EAAE,EAAE,UAAU,GAAG,CAAC,CAAC;IAC5E;;;;;;;;;;ACyIA,IAAAC,SAAA,yBAAA;AAhkBA,QAAA,WAAA,QAAA,QAAA;AACA,QAAA,UAAA;AAiBA,QAAa,mBAAb,cAAsC,SAAA,aAAY;MAY9C,YAAY,UAAmC,CAAA,GAAE;AAC7C,cAAK;AAXD,aAAA,QAAsB,CAAA;AAEtB,aAAA,UAAmC,oBAAI,IAAG;AAE1C,aAAA,UAAwB,CAAA;AAExB,aAAA,SAAS;AAMb,aAAK,UAAU,EAAE,GAAG,QAAA,+BAA+B,GAAG,QAAO;MACjE;;;;;;;;;MAWA,QACI,OAAgC;AAGhC,YAAI,KAAK,QAAQ,eAAe,KAAK,KAAK,MAAM,UAAU,KAAK,QAAQ,cAAc;AACjF,gBAAM,IAAI,MAAM,4BAA4B,KAAK,QAAQ,YAAY,GAAG;QAC5E;AAEA,cAAM,OAA6B;UAC/B,GAAG;UACH,KAAI,GAAA,QAAA,gBAAc;UAClB,QAAQ;UACR,WAAW,KAAK,IAAG;UACnB,YAAY;;AAIhB,aAAK,iBAAiB,IAAI;AAG1B,aAAK,WAAW,SAAS,IAAI;AAC7B,aAAK,KAAK,aAAa,IAAI;AAE3B,eAAO,KAAK;MAChB;;;;;MAMA,UAAO;AACH,YAAI,KAAK,MAAM,WAAW,GAAG;AACzB,iBAAO;QACX;AAEA,cAAM,OAAO,KAAK,MAAM,MAAK;AAC7B,eAAO;MACX;;;;;MAMA,OAAI;AACA,eAAO,KAAK,MAAM,CAAC;MACvB;;;;;;;MASA,SAAM;AACF,eAAO,CAAC,GAAG,KAAK,OAAO,GAAG,MAAM,KAAK,KAAK,QAAQ,OAAM,CAAE,GAAG,GAAG,KAAK,OAAO;MAChF;;;;MAKA,YAAS;AACL,eAAO,CAAC,GAAG,KAAK,KAAK;MACzB;;;;MAKA,aAAU;AACN,eAAO,MAAM,KAAK,KAAK,QAAQ,OAAM,CAAE;MAC3C;;;;MAKA,eAAY;AACR,eAAO,KAAK,QAAQ,OAAO,OAAK,EAAE,WAAW,WAAW;MAC5D;;;;MAKA,YAAS;AACL,eAAO,KAAK,QAAQ,OAAO,OAAK,EAAE,WAAW,QAAQ;MACzD;;;;MAKA,eAAY;AACR,eAAO,KAAK,QAAQ,OAAO,OAAK,EAAE,WAAW,WAAW;MAC5D;;;;MAKA,aAAU;AACN,eAAO,CAAC,GAAG,KAAK,OAAO;MAC3B;;;;MAKA,OAAI;AACA,eAAO,KAAK,MAAM;MACtB;;;;MAKA,WAAQ;AACJ,eAAO;UACH,QAAQ,KAAK,MAAM;UACnB,SAAS,KAAK,QAAQ;UACtB,WAAW,KAAK,QAAQ,OAAO,OAAK,EAAE,WAAW,WAAW,EAAE;UAC9D,QAAQ,KAAK,QAAQ,OAAO,OAAK,EAAE,WAAW,QAAQ,EAAE;UACxD,WAAW,KAAK,QAAQ,OAAO,OAAK,EAAE,WAAW,WAAW,EAAE;UAC9D,OAAO,KAAK,MAAM,SAAS,KAAK,QAAQ,OAAO,KAAK,QAAQ;UAC5D,UAAU,KAAK;;MAEvB;;;;;;;MASA,QAAQ,IAAU;AAEd,cAAM,SAAS,KAAK,MAAM,KAAK,OAAK,EAAE,OAAO,EAAE;AAC/C,YAAI;AAAQ,iBAAO;AAGnB,cAAM,UAAU,KAAK,QAAQ,IAAI,EAAE;AACnC,YAAI;AAAS,iBAAO;AAGpB,eAAO,KAAK,QAAQ,KAAK,OAAK,EAAE,OAAO,EAAE;MAC7C;;;;;;;MAQA,WAAW,IAAY,SAAmB;AAEtC,cAAM,aAAa,KAAK,MAAM,UAAU,OAAK,EAAE,OAAO,EAAE;AACxD,YAAI,eAAe,IAAI;AACnB,gBAAM,OAAO,KAAK,MAAM,UAAU;AAClC,iBAAO,OAAO,MAAM,OAAO;AAG3B,cAAI,QAAQ,aAAa,QAAW;AAChC,iBAAK,MAAM,OAAO,YAAY,CAAC;AAC/B,iBAAK,iBAAiB,IAAI;UAC9B;AAEA,eAAK,WAAW,WAAW,IAAI;AAC/B,eAAK,KAAK,eAAe,MAAM,OAAO;AACtC,iBAAO;QACX;AAGA,cAAM,UAAU,KAAK,QAAQ,IAAI,EAAE;AACnC,YAAI,SAAS;AACT,iBAAO,OAAO,SAAS,OAAO;AAC9B,eAAK,WAAW,WAAW,OAAO;AAClC,eAAK,KAAK,eAAe,SAAS,OAAO;AACzC,iBAAO;QACX;AAGA,cAAM,eAAe,KAAK,QAAQ,UAAU,OAAK,EAAE,OAAO,EAAE;AAC5D,YAAI,iBAAiB,IAAI;AACrB,gBAAM,OAAO,KAAK,QAAQ,YAAY;AACtC,iBAAO,OAAO,MAAM,OAAO;AAC3B,eAAK,WAAW,WAAW,IAAI;AAC/B,eAAK,KAAK,eAAe,MAAM,OAAO;AACtC,iBAAO;QACX;AAEA,eAAO;MACX;;;;;;MAOA,WAAW,IAAU;AACjB,cAAM,QAAQ,KAAK,MAAM,UAAU,OAAK,EAAE,OAAO,EAAE;AACnD,YAAI,UAAU,IAAI;AACd,iBAAO;QACX;AAEA,cAAM,CAAC,IAAI,IAAI,KAAK,MAAM,OAAO,OAAO,CAAC;AACzC,aAAK,WAAW,WAAW,IAAI;AAC/B,aAAK,KAAK,eAAe,IAAI;AAC7B,eAAO;MACX;;;;;;MAOA,WAAW,IAAU;AAEjB,cAAM,aAAa,KAAK,MAAM,UAAU,OAAK,EAAE,OAAO,EAAE;AACxD,YAAI,eAAe,IAAI;AACnB,gBAAM,CAAC,IAAI,IAAI,KAAK,MAAM,OAAO,YAAY,CAAC;AAC9C,eAAK,SAAS;AACd,eAAK,cAAc,KAAK,IAAG;AAC3B,eAAK,aAAa,IAAI;AACtB,eAAK,WAAW,WAAW,IAAI;AAC/B,eAAK,KAAK,iBAAiB,IAAI;AAC/B,iBAAO;QACX;AAGA,cAAM,UAAU,KAAK,QAAQ,IAAI,EAAE;AACnC,YAAI,SAAS;AACT,kBAAQ,SAAS;AACjB,kBAAQ,cAAc,KAAK,IAAG;AAC9B,eAAK,QAAQ,OAAO,EAAE;AACtB,eAAK,aAAa,OAAO;AACzB,eAAK,WAAW,WAAW,OAAO;AAClC,eAAK,KAAK,iBAAiB,OAAO;AAClC,iBAAO;QACX;AAEA,eAAO;MACX;;;;;;;;;MAWA,YAAY,IAAU;AAClB,cAAM,QAAQ,KAAK,MAAM,UAAU,OAAK,EAAE,OAAO,EAAE;AACnD,YAAI,UAAU,IAAI;AACd,iBAAO;QACX;AAEA,cAAM,CAAC,IAAI,IAAI,KAAK,MAAM,OAAO,OAAO,CAAC;AACzC,aAAK,SAAS;AACd,aAAK,YAAY,KAAK,IAAG;AACzB,aAAK,QAAQ,IAAI,IAAI,IAAI;AAEzB,aAAK,WAAW,WAAW,IAAI;AAC/B,aAAK,KAAK,eAAe,IAAI;AAC7B,eAAO;MACX;;;;;;;MAQA,cAAc,IAAY,QAAgB;AACtC,cAAM,OAAO,KAAK,QAAQ,IAAI,EAAE;AAChC,YAAI,CAAC,MAAM;AACP,iBAAO;QACX;AAEA,aAAK,SAAS;AACd,aAAK,cAAc,KAAK,IAAG;AAC3B,aAAK,SAAS;AACd,aAAK,QAAQ,OAAO,EAAE;AACtB,aAAK,aAAa,IAAI;AAEtB,aAAK,WAAW,WAAW,IAAI;AAC/B,aAAK,KAAK,iBAAiB,MAAM,MAAM;AACvC,eAAO;MACX;;;;;;;MAQA,WAAW,IAAY,OAAqB;AACxC,cAAM,OAAO,KAAK,QAAQ,IAAI,EAAE;AAChC,YAAI,CAAC,MAAM;AACP,iBAAO;QACX;AAEA,aAAK,SAAS;AACd,aAAK,cAAc,KAAK,IAAG;AAC3B,aAAK,QAAQ,OAAO,UAAU,WAAW,QAAQ,MAAM;AACvD,aAAK,QAAQ,OAAO,EAAE;AACtB,aAAK,aAAa,IAAI;AAEtB,aAAK,WAAW,WAAW,IAAI;AAC/B,aAAK,KAAK,cAAc,MAAM,OAAO,UAAU,WAAW,IAAI,MAAM,KAAK,IAAI,KAAK;AAClF,eAAO;MACX;;;;;;;MAQA,UAAU,IAAY,UAAmB,MAAI;AACzC,cAAM,OAAO,KAAK,QAAQ,IAAI,EAAE;AAChC,YAAI,CAAC,MAAM;AACP,iBAAO;QACX;AAEA,aAAK,cAAc,KAAK,cAAc,KAAK;AAE3C,YAAI,SAAS;AACT,eAAK,SAAS;AACd,eAAK,YAAY;AACjB,eAAK,QAAQ,OAAO,EAAE;AACtB,eAAK,iBAAiB,IAAI;AAC1B,eAAK,WAAW,aAAa,IAAI;QACrC;AAEA,eAAO;MACX;;;;;;;;;MAWA,UAAU,IAAU;AAChB,cAAM,QAAQ,KAAK,MAAM,UAAU,OAAK,EAAE,OAAO,EAAE;AACnD,YAAI,UAAU,MAAM,UAAU,GAAG;AAC7B,iBAAO,UAAU;QACrB;AAEA,cAAM,CAAC,IAAI,IAAI,KAAK,MAAM,OAAO,OAAO,CAAC;AAEzC,aAAK,WAAW;AAChB,aAAK,YAAY,KAAK,MAAM,SAAS,IAAI,KAAK,MAAM,CAAC,EAAE,YAAY,IAAI,KAAK,IAAG;AAC/E,aAAK,MAAM,QAAQ,IAAI;AAEvB,aAAK,WAAW,aAAa,IAAI;AACjC,eAAO;MACX;;;;;;MAOA,OAAO,IAAU;AACb,cAAM,QAAQ,KAAK,MAAM,UAAU,OAAK,EAAE,OAAO,EAAE;AACnD,YAAI,SAAS,GAAG;AACZ,iBAAO;QACX;AAGA,SAAC,KAAK,MAAM,QAAQ,CAAC,GAAG,KAAK,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,MAAM,KAAK,GAAG,KAAK,MAAM,QAAQ,CAAC,CAAC;AAEtF,aAAK,WAAW,aAAa,KAAK,MAAM,QAAQ,CAAC,CAAC;AAClD,eAAO;MACX;;;;;;MAOA,SAAS,IAAU;AACf,cAAM,QAAQ,KAAK,MAAM,UAAU,OAAK,EAAE,OAAO,EAAE;AACnD,YAAI,UAAU,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;AAChD,iBAAO;QACX;AAGA,SAAC,KAAK,MAAM,KAAK,GAAG,KAAK,MAAM,QAAQ,CAAC,CAAC,IAAI,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG,KAAK,MAAM,KAAK,CAAC;AAEtF,aAAK,WAAW,aAAa,KAAK,MAAM,QAAQ,CAAC,CAAC;AAClD,eAAO;MACX;;;;;;MAOA,YAAY,IAAU;AAClB,cAAM,QAAQ,KAAK,MAAM,UAAU,OAAK,EAAE,OAAO,EAAE;AACnD,eAAO,UAAU,KAAK,KAAK,QAAQ;MACvC;;;;;;;;MAUA,QAAK;AACD,YAAI,CAAC,KAAK,QAAQ;AACd,eAAK,SAAS;AACd,eAAK,WAAW,QAAQ;AACxB,eAAK,KAAK,QAAQ;QACtB;MACJ;;;;MAKA,SAAM;AACF,YAAI,KAAK,QAAQ;AACb,eAAK,SAAS;AACd,eAAK,WAAW,SAAS;AACzB,eAAK,KAAK,SAAS;QACvB;MACJ;;;;MAKA,WAAQ;AACJ,eAAO,KAAK;MAChB;;;;MAKA,QAAK;AACD,cAAM,eAAe,CAAC,GAAG,KAAK,KAAK;AACnC,aAAK,QAAQ,CAAA;AAGb,mBAAW,QAAQ,cAAc;AAC7B,eAAK,SAAS;AACd,eAAK,cAAc,KAAK,IAAG;AAC3B,eAAK,aAAa,IAAI;QAC1B;AAEA,aAAK,WAAW,SAAS;MAC7B;;;;MAKA,eAAY;AACR,aAAK,UAAU,CAAA;AACf,aAAK,WAAW,SAAS;MAC7B;;;;MAKA,QAAK;AACD,aAAK,QAAQ,CAAA;AACb,aAAK,QAAQ,MAAK;AAClB,aAAK,UAAU,CAAA;AACf,aAAK,SAAS;AACd,aAAK,WAAW,SAAS;MAC7B;;;;;;;MASQ,iBAAiB,MAAgB;AAErC,YAAI,MAAM;AACV,YAAI,OAAO,KAAK,MAAM;AAEtB,eAAO,MAAM,MAAM;AACf,gBAAM,MAAM,KAAK,OAAO,MAAM,QAAQ,CAAC;AACvC,eAAI,GAAA,QAAA,iBAAgB,MAAM,KAAK,MAAM,GAAG,CAAC,IAAI,GAAG;AAC5C,mBAAO;UACX,OAAO;AACH,kBAAM,MAAM;UAChB;QACJ;AAEA,aAAK,MAAM,OAAO,KAAK,GAAG,IAAI;MAClC;;;;MAKQ,aAAa,MAAgB;AACjC,YAAI,CAAC,KAAK,QAAQ,aAAa;AAC3B;QACJ;AAEA,aAAK,QAAQ,QAAQ,IAAI;AAGzB,YAAI,KAAK,QAAQ,SAAS,KAAK,QAAQ,gBAAgB;AACnD,eAAK,UAAU,KAAK,QAAQ,MAAM,GAAG,KAAK,QAAQ,cAAc;QACpE;MACJ;;;;MAKQ,WAAW,MAAuB,MAAiB;AACvD,cAAM,QAA0B;UAC5B;UACA,QAAQ,MAAM;UACd;UACA,WAAW,KAAK,IAAG;;AAEvB,aAAK,KAAK,UAAU,KAAK;MAC7B;;AAxiBJ,IAAAA,SAAA,mBAAA;AA8iBA,aAAgB,uBACZ,SAAiC;AAEjC,aAAO,IAAI,iBAAiB,OAAO;IACvC;;;;;;;;;;AChPA,IAAAC,SAAA,sBAAA;AAuDA,IAAAA,SAAA,2BAAA;AA9YA,QAAA,WAAA,QAAA,QAAA;AACA,QAAA,wBAAA;AAEA,QAAA,UAAA;AAYA,QAAa,gBAAb,cAAmC,SAAA,aAAY;MAkB3C,YACI,cACA,cACA,UAAgC,CAAA,GAAE;AAElC,cAAK;AAfD,aAAA,UAAU;AAEV,aAAA,gBAAgB;AAEhB,aAAA,iBAA8B,oBAAI,IAAG;AAIrC,aAAA,oBAA0C;AAQ9C,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,UAAU,EAAE,GAAG,QAAA,0BAA0B,GAAG,QAAO;AACxD,aAAK,UAAU,IAAI,sBAAA,mBAAmB,KAAK,QAAQ,cAAc;AAGjE,aAAK,oBAAmB;AAGxB,YAAI,KAAK,QAAQ,WAAW;AACxB,eAAK,MAAK;QACd;MACJ;;;;;;;MASA,QAAK;AACD,YAAI,KAAK,SAAS;AACd;QACJ;AAEA,aAAK,UAAU;AACf,aAAK,gBAAgB;AACrB,aAAK,KAAK,SAAS;AAGnB,aAAK,oBAAoB,KAAK,YAAW;MAC7C;;;;MAKA,OAAI;AACA,YAAI,CAAC,KAAK,SAAS;AACf;QACJ;AAEA,aAAK,gBAAgB;AACrB,aAAK,UAAU;AACf,aAAK,KAAK,SAAS;MACvB;;;;MAKA,YAAS;AACL,eAAO,KAAK;MAChB;;;;MAKA,MAAM,oBAAiB;AACnB,YAAI,KAAK,mBAAmB;AACxB,gBAAM,KAAK;QACf;MACJ;;;;;;;;MAUA,kBAAkB,GAAS;AACvB,YAAI,IAAI,GAAG;AACP,gBAAM,IAAI,MAAM,mCAAmC;QACvD;AACA,aAAK,QAAQ,iBAAiB;AAC9B,aAAK,UAAU,IAAI,sBAAA,mBAAmB,CAAC;MAC3C;;;;MAKA,oBAAiB;AACb,eAAO,KAAK,QAAQ;MACxB;;;;;;;;MAUA,WAAW,QAAc;AACrB,aAAK,eAAe,IAAI,MAAM;AAG9B,aAAK,aAAa,WAAW,MAAM;AAGnC,YAAI,KAAK,aAAa,QAAQ;AAC1B,eAAK,aAAa,OAAO,MAAM;QACnC;MACJ;;;;MAKA,gBAAgB,QAAc;AAC1B,eAAO,KAAK,eAAe,IAAI,MAAM;MACzC;;;;;;;MASQ,MAAM,cAAW;AACrB,eAAO,KAAK,WAAW,CAAC,KAAK,eAAe;AAExC,cAAI,KAAK,aAAa,SAAQ,GAAI;AAC9B,kBAAM,KAAK,MAAM,GAAG;AACpB;UACJ;AAGA,cAAI,KAAK,QAAQ,gBAAgB,KAAK,QAAQ,OAAO;AACjD,kBAAM,KAAK,MAAM,EAAE;AACnB;UACJ;AAGA,gBAAM,OAAO,KAAK,aAAa,KAAI;AACnC,cAAI,CAAC,MAAM;AAEP,kBAAM,KAAK,MAAM,GAAG;AACpB;UACJ;AAGA,eAAK,YAAY,IAAI,EAAE,MAAM,WAAQ;AAEjC,iBAAK,KAAK,SAAS,KAAK;UAC5B,CAAC;AAGD,gBAAM,KAAK,MAAM,EAAE;QACvB;MACJ;;;;MAKQ,MAAM,YAAY,MAAgB;AACtC,cAAM,SAAS,KAAK;AAGpB,YAAI,KAAK,eAAe,IAAI,MAAM,GAAG;AACjC;QACJ;AAGA,cAAM,cAAc,KAAK,aAAa,YAAY,MAAM;AACxD,YAAI,CAAC,aAAa;AAEd;QACJ;AAEA,aAAK,KAAK,eAAe,WAAW;AAGpC,cAAM,cAAc,MAAM,KAAK,eAAe,IAAI,MAAM;AAExD,YAAI;AAEA,gBAAM,SAAS,MAAM,KAAK,QAAQ,IAC9B,MAAM,KAAK,mBAAmB,WAAW,GACzC,WAAW;AAIf,cAAI,YAAW,GAAI;AAEf,iBAAK,eAAe,OAAO,MAAM;AACjC;UACJ;AAEA,cAAI,OAAO,SAAS;AAChB,iBAAK,aAAa,cAAc,QAAQ,OAAO,MAAM;AACrD,iBAAK,KAAK,iBAAiB,aAAa,OAAO,MAAM;UACzD,OAAO;AACH,kBAAM,KAAK,kBAAkB,aAAa,OAAO,SAAS,IAAI,MAAM,eAAe,CAAC;UACxF;QACJ,SAAS,OAAO;AACZ,cAAI,iBAAiB,sBAAA,mBAAmB;AAEpC,iBAAK,eAAe,OAAO,MAAM;AACjC,iBAAK,KAAK,iBAAiB,WAAW;UAC1C,OAAO;AACH,kBAAM,KAAK,kBAAkB,aAAa,KAAc;UAC5D;QACJ;MACJ;;;;MAKQ,MAAM,mBAAmB,MAAgB;AAC7C,cAAM,YAAY,KAAK,OAAO,aAAa,QAAA,oBAAoB;AAE/D,cAAM,YAAY,KAAK,IAAG;AAG1B,cAAM,iBAAiB,IAAI,QAA6B,CAAC,GAAG,WAAU;AAClE,qBAAW,MAAK;AACZ,mBAAO,IAAI,MAAM,wBAAwB,SAAS,IAAI,CAAC;UAC3D,GAAG,SAAS;QAChB,CAAC;AAGD,YAAI;AACA,gBAAM,SAAS,MAAM,QAAQ,KAAK;YAC9B,KAAK,aAAa,QAAQ,IAAI;YAC9B;WACH;AAED,iBAAO;YACH,GAAG;YACH,YAAY,KAAK,IAAG,IAAK;;QAEjC,SAAS,OAAO;AACZ,iBAAO;YACH,SAAS;YACT;YACA,YAAY,KAAK,IAAG,IAAK;;QAEjC;MACJ;;;;MAKQ,MAAM,kBAAkB,MAAkB,OAAY;AAC1D,cAAM,SAAS,KAAK;AACpB,cAAM,aAAa,KAAK,cAAc;AACtC,cAAM,aAAa,OAAO,iBAAiB,QAAA,oBAAoB;AAE/D,YAAI,OAAO,kBAAkB,aAAa,YAAY;AAElD,gBAAM,aAAa,OAAO,gBAAgB,QAAA,oBAAoB;AAC9D,gBAAM,KAAK,MAAM,UAAU;AAE3B,eAAK,aAAa,UAAU,KAAK,IAAI,IAAI;AACzC,eAAK,KAAK,aAAa,MAAM,aAAa,CAAC;QAC/C,OAAO;AAEH,eAAK,aAAa,WAAW,KAAK,IAAI,KAAK;AAC3C,eAAK,KAAK,cAAc,MAAM,KAAK;QACvC;MACJ;;;;;;;MASQ,sBAAmB;AAEvB,aAAK,aAAa,GAAG,WAAW,MAAK;AACjC,cAAI,KAAK,WAAW,CAAC,KAAK,mBAAmB;AACzC,iBAAK,oBAAoB,KAAK,YAAW;UAC7C;QACJ,CAAC;MACL;;;;;;;MASQ,MAAM,IAAU;AACpB,eAAO,IAAI,QAAQ,CAAAC,cAAW,WAAWA,WAAS,EAAE,CAAC;MACzD;;;;MAKA,UAAO;AACH,aAAK,KAAI;AACT,aAAK,eAAe,MAAK;AACzB,aAAK,mBAAkB;MAC3B;;AAlUJ,IAAAD,SAAA,gBAAA;AAwUA,aAAgB,oBACZ,cACA,cACA,SAA8B;AAE9B,aAAO,IAAI,cAAc,cAAc,cAAc,OAAO;IAChE;AAMA,QAAa,qBAAb,MAA+B;MAI3B,YAAY,WAAiD;AAF5C,aAAA,iBAA8B,oBAAI,IAAG;AAGlD,aAAK,YAAY;MACrB;MAEA,MAAM,QAAQ,MAAgB;AAC1B,YAAI,KAAK,eAAe,IAAI,KAAK,EAAE,GAAG;AAClC,iBAAO;YACH,SAAS;YACT,OAAO,IAAI,sBAAA,kBAAiB;YAC5B,YAAY;;QAEpB;AAEA,cAAM,YAAY,KAAK,IAAG;AAE1B,YAAI;AACA,gBAAM,SAAS,MAAM,KAAK,UAAU,IAAI;AACxC,iBAAO;YACH,SAAS;YACT;YACA,YAAY,KAAK,IAAG,IAAK;;QAEjC,SAAS,OAAO;AACZ,iBAAO;YACH,SAAS;YACT;YACA,YAAY,KAAK,IAAG,IAAK;;QAEjC;MACJ;MAEA,OAAO,QAAc;AACjB,aAAK,eAAe,IAAI,MAAM;MAClC;;AArCJ,IAAAA,SAAA,qBAAA;AA2CA,aAAgB,yBACZ,WAAiD;AAEjD,aAAO,IAAI,mBAAmB,SAAS;IAC3C;;;;;;;;;;ACnWA,QAAA,UAAA;AAgBI,WAAA,eAAAE,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AAgBnB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AAIxB,WAAA,eAAAA,UAAA,iCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAA6B,EAAA,CAAA;AAI7B,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AAGf,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AAGnB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AAOlB,QAAA,uBAAA;AACI,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,qBAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,qBAAA;IAAsB,EAAA,CAAA;AAO1B,QAAA,mBAAA;AACI,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,iBAAA;IAAwB,EAAA,CAAA;;;;;;;;;;;;;;;AC7F5B,QAAA,WAAA;AAEI,WAAA,eAAAC,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAW,EAAA,CAAA;AAOf,QAAA,WAAA;AAEI,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAS,EAAA,CAAA;AAET,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAkB,EAAA,CAAA;AAElB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAiB,EAAA,CAAA;AAEjB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,YAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAQ,EAAA,CAAA;AAOZ,QAAA,YAAA;AAEI,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAiB,EAAA,CAAA;AAEjB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAuB,EAAA,CAAA;AAEvB,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAY,EAAA,CAAA;AAEZ,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAoB,EAAA,CAAA;AAEpB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAmB,EAAA,CAAA;AAKnB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAqB,EAAA,CAAA;AAGrB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,UAAA;IAAkB,EAAA,CAAA;AAOtB,QAAA,UAAA;AAMI,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,YAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAQ,EAAA,CAAA;AACR,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AAEnB,WAAA,eAAAA,UAAA,QAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAI,EAAA,CAAA;AACJ,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAqB,EAAA,CAAA;AAErB,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAS,EAAA,CAAA;AAGT,WAAA,eAAAA,UAAA,WAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAO,EAAA,CAAA;AACP,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAW,EAAA,CAAA;AAGX,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAA2B,EAAA,CAAA;AAE3B,WAAA,eAAAA,UAAA,YAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAQ,EAAA,CAAA;AACR,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AAEd,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AASf,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAuB,EAAA,CAAA;AAEvB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAA2B,EAAA,CAAA;AAC3B,WAAA,eAAAA,UAAA,iCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAA6B,EAAA,CAAA;AAK7B,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AAExB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAiB,EAAA,CAAA;AAEjB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAiB,EAAA,CAAA;AAOrB,QAAA,WAAA;AAEI,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAqB,EAAA,CAAA;AAErB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAuB,EAAA,CAAA;AAEvB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAA2B,EAAA,CAAA;AAC3B,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAA0B,EAAA,CAAA;AAE1B,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,qCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAiC,EAAA,CAAA;AAEjC,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAuB,EAAA,CAAA;AAEvB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,oCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAgC,EAAA,CAAA;AAChC,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAA0B,EAAA,CAAA;AAE1B,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAwB,EAAA,CAAA;AAExB,WAAA,eAAAA,UAAA,iCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAA6B,EAAA,CAAA;AAC7B,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,SAAA;IAAsB,EAAA,CAAA;AAO1B,QAAA,OAAA;AAII,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAY,EAAA,CAAA;AAEZ,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAe,EAAA,CAAA;AAEf,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAgB,EAAA,CAAA;AAGhB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAe,EAAA,CAAA;AAKf,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAmB,EAAA,CAAA;AAGnB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAiB,EAAA,CAAA;AAGjB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAyB,EAAA,CAAA;AACzB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAqB,EAAA,CAAA;AAErB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAkB,EAAA,CAAA;AAgBlB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAkB,EAAA,CAAA;AAKlB,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAW,EAAA,CAAA;AAMX,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAyB,EAAA,CAAA;AACzB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAe,EAAA,CAAA;AAIf,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAsB,EAAA,CAAA;AActB,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAA2B,EAAA,CAAA;AAC3B,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAkB,EAAA,CAAA;AAKlB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAyB,EAAA,CAAA;AACzB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,KAAA;IAAwB,EAAA,CAAA;AAO5B,QAAA,eAAA;AA2BI,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA0B,EAAA,CAAA;AAE1B,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAc,EAAA,CAAA;AAEd,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAkB,EAAA,CAAA;AAIlB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AAEnB,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAyB,EAAA,CAAA;AACzB,WAAA,eAAAA,UAAA,aAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAS,EAAA,CAAA;AACT,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAyB,EAAA,CAAA;AAUzB,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,mCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA+B,EAAA,CAAA;AAC/B,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAyB,EAAA,CAAA;AACzB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,8BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAA0B,EAAA,CAAA;AAC1B,WAAA,eAAAA,UAAA,qCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAiC,EAAA,CAAA;AACjC,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,6BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAyB,EAAA,CAAA;AAezB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAoB,EAAA,CAAA;AAsBpB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,aAAA;IAAc,EAAA,CAAA;AAQlB,QAAA,aAAA;AAkBI,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAgB,EAAA,CAAA;AAEhB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AAKtB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAmB,EAAA,CAAA;AAEnB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAa,EAAA,CAAA;AAGb,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAmB,EAAA,CAAA;AAInB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,gCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA4B,EAAA,CAAA;AAC5B,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAc,EAAA,CAAA;AACd,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAoB,EAAA,CAAA;AACpB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAmB,EAAA,CAAA;AAGnB,WAAA,eAAAA,UAAA,gBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAY,EAAA,CAAA;AACZ,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,2BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAuB,EAAA,CAAA;AACvB,WAAA,eAAAA,UAAA,+BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAA2B,EAAA,CAAA;AAC3B,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,qBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAiB,EAAA,CAAA;AACjB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,eAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAW,EAAA,CAAA;AACX,WAAA,eAAAA,UAAA,cAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAU,EAAA,CAAA;AACV,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAElB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AAIrB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAsB,EAAA,CAAA;AACtB,WAAA,eAAAA,UAAA,wBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,WAAA;IAAoB,EAAA,CAAA;AAUxB,QAAA,UAAA;AAgBI,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AAgBnB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AAIxB,WAAA,eAAAA,UAAA,iCAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAA6B,EAAA,CAAA;AAI7B,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AACf,WAAA,eAAAA,UAAA,mBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAe,EAAA,CAAA;AAGf,WAAA,eAAAA,UAAA,yBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAqB,EAAA,CAAA;AACrB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;AACxB,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AAGnB,WAAA,eAAAA,UAAA,kBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAc,EAAA,CAAA;AAGd,WAAA,eAAAA,UAAA,oBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAgB,EAAA,CAAA;AAChB,WAAA,eAAAA,UAAA,0BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAsB,EAAA,CAAA;AAGtB,WAAA,eAAAA,UAAA,iBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAa,EAAA,CAAA;AACb,WAAA,eAAAA,UAAA,uBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAmB,EAAA,CAAA;AACnB,WAAA,eAAAA,UAAA,sBAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAkB,EAAA,CAAA;AAClB,WAAA,eAAAA,UAAA,4BAAA,EAAA,YAAA,MAAA,KAAA,WAAA;AAAA,aAAA,QAAA;IAAwB,EAAA,CAAA;;;;;ACpnBrB,SAAS,iBAAiB,UAAkB,WAA2B;AAC1E,SAAO;AAAA,6EACkE,QAAQ;AAAA;AAAA;AAAA;AAAA,yCAI5C,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBA0D/B,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAS5B;AA7FA;AAAA;AAAA;AAAA;AAAA;;;AC4JO,SAAS,sBAAsB,SAA0B;AAC5D,MAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AACzC,WAAO;AAAA,EACX;AACA,QAAM,UAAU,QAAQ,KAAK;AAC7B,SAAO,uBAAuB;AAAA,IAAK,aAC/B,QAAQ,WAAW,OAAO,KAAK,QAAQ,WAAW,GAAG,OAAO,GAAG;AAAA,EACnE;AACJ;AA6BO,SAAS,gBAAgB,IAAqB;AACjD,SAAO,gCAAgC,KAAK,EAAE;AAClD;AAKO,SAAS,kBAAkB,OAAuB;AACrD,SAAO,MACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE,EACtB,QAAQ,UAAU,GAAG,KAAK;AACnC;AA9MA,IAiBa,qBAqCA,wBAyBA,wBAoEA,wBA0BA,8BAKA,8BAKA,6BAKA;AA5Lb;AAAA;AAAA;AAiBO,IAAM,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAqC5B,IAAM,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAyB/B,IAAM,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoE/B,IAAM,yBAAyB;AAAA,MAClC;AAAA,MAAS;AAAA,MAAa;AAAA,MAAmB;AAAA,MAAgB;AAAA,MACzD;AAAA,MAAa;AAAA,MAAS;AAAA,MAAO;AAAA,MAAY;AAAA,MAAW;AAAA,MACpD;AAAA,MAAY;AAAA,MAAiB;AAAA,MAAU;AAAA,MAAW;AAAA,IACtD;AAsBO,IAAM,+BAA+B,CAAC,WAAW,WAAW,YAAY;AAKxE,IAAM,+BAA+B,CAAC,QAAQ,UAAU;AAKxD,IAAM,8BAA8B,CAAC,MAAM,QAAQ,MAAM;AAKzD,IAAM,0BAA0B,CAAC,OAAO,UAAU,MAAM;AAAA;AAAA;;;ACrKxD,SAAS,oBAAoB,UAAqC,SAAgD;AACrH,QAAM,EAAE,SAAS,SAAS,MAAM,IAAI;AAEpC,MAAI,CAAC,YAAY,OAAO,aAAa,UAAU;AAC3C,UAAM,IAAI,MAAM,sCAAsC,OAAO,GAAG;AAAA,EACpE;AAEA,QAAM,cAAU,kCAAY,QAAQ;AACpC,MAAI,CAAC,SAAS;AACV,UAAM,IAAI,MAAM,iCAAiC,OAAO,oDAAoD;AAAA,EAChH;AAEA,MAAI;AACJ,MAAI;AACA,aAAS,KAAK,MAAM,OAAO;AAAA,EAC/B,SAAS,YAAY;AACjB,QAAI,QAAQ;AACR,YAAM,QAAQ,kBAAkB,OAAO;AACvC,UAAI,OAAO;AACP,YAAI;AACA,mBAAS,KAAK,MAAM,KAAK;AAAA,QAC7B,QAAQ;AACJ,gBAAM,IAAI,MAAM,mBAAmB,OAAO,cAAc,gBAAgB,UAAU,CAAC,EAAE;AAAA,QACzF;AAAA,MACJ,OAAO;AACH,cAAM,IAAI,MAAM,mBAAmB,OAAO,cAAc,gBAAgB,UAAU,CAAC,EAAE;AAAA,MACzF;AAAA,IACJ,OAAO;AACH,YAAM,IAAI,MAAM,mBAAmB,OAAO,cAAc,gBAAgB,UAAU,CAAC,EAAE;AAAA,IACzF;AAAA,EACJ;AAEA,MAAI,OAAO,WAAW,YAAY,WAAW,QAAQ,MAAM,QAAQ,MAAM,GAAG;AACxE,UAAM,IAAI,MAAM,GAAG,OAAO,gCAAgC;AAAA,EAC9D;AAEA,SAAO;AACX;AAKO,SAAS,kBAAkB,SAAgC;AAC9D,MAAI;AACA,QAAI,QAAQ;AAEZ,YAAQ,MAAM,QAAQ,MAAM,GAAG;AAE/B,YAAQ,MAAM,QAAQ,0CAA0C,SAAS;AAEzE,YAAQ,MAAM,QAAQ,gBAAgB,IAAI;AAE1C,YAAQ,MAAM,QAAQ,eAAe,OAAO;AAC5C,SAAK,MAAM,KAAK;AAChB,WAAO;AAAA,EACX,QAAQ;AACJ,WAAO;AAAA,EACX;AACJ;AAjFA,IASA;AATA;AAAA;AAAA;AASA,2BAA4B;AAC5B;AAAA;AAAA;;;ACuBO,SAAS,mBAAmB,UAA+B;AAC9D,QAAM,MAAM,oBAAoB,UAAU,EAAE,SAAS,SAAS,QAAQ,KAAK,CAAC;AAC5E,MAAI,EAAE,YAAY,MAAM;AACpB,UAAM,IAAI,MAAM,uCAAuC;AAAA,EAC3D;AAEA,SAAO,iBAAiB,IAAI,MAAM;AACtC;AASA,SAAS,iBAAiB,KAA2B;AACjD,MAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACrB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACrD;AAEA,QAAM,SAAsB,CAAC;AAC7B,QAAM,WAAqB,CAAC;AAE5B,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,UAAM,OAAO,IAAI,CAAC;AAClB,QAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,eAAS,KAAK,mCAAmC,CAAC,iBAAiB;AACnE;AAAA,IACJ;AAEA,UAAM,MAAM;AAGZ,QAAI,OAAO,IAAI,UAAU,YAAY,CAAC,IAAI,OAAO;AAC7C,eAAS,KAAK,2BAA2B,CAAC,oCAAoC;AAC9E;AAAA,IACJ;AAEA,QAAI,OAAO,IAAI,gBAAgB,YAAY,CAAC,IAAI,aAAa;AACzD,eAAS,KAAK,2BAA2B,CAAC,0CAA0C;AACpF;AAAA,IACJ;AAGA,UAAM,UAAU,kBAAkB,OAAO,IAAI,KAAK,CAAC;AAGnD,QAAI,QAAkB,CAAC;AACvB,QAAI,MAAM,QAAQ,IAAI,KAAK,GAAG;AAC1B,cAAQ,IAAI,MACP,OAAO,OAAK,OAAO,MAAM,QAAQ,EACjC,IAAI,OAAK,OAAO,CAAC,EAAE,KAAK,CAAC,EACzB,OAAO,OAAK,EAAE,SAAS,CAAC;AAAA,IACjC,WAAW,OAAO,IAAI,UAAU,UAAU;AAEtC,cAAQ,IAAI,MACP,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAK,EAAE,SAAS,CAAC;AAAA,IACjC,OAAO;AAEH,cAAQ,CAAC,OAAO;AAAA,IACpB;AAGA,QAAI,MAAM,WAAW,GAAG;AACpB,cAAQ,CAAC,OAAO;AAAA,IACpB;AAEA,WAAO,KAAK;AAAA,MACR,OAAO;AAAA,MACP,aAAa,OAAO,IAAI,WAAW,EAAE,KAAK;AAAA,MAC1C;AAAA,IACJ,CAAC;AAAA,EACL;AAGA,MAAI,SAAS,SAAS,GAAG;AACrB,eAAW,KAAK,UAAU;AACtB,cAAQ,OAAO,MAAM,UAAU,CAAC;AAAA,CAAI;AAAA,IACxC;AAAA,EACJ;AAGA,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,eAA4B,CAAC;AACnC,aAAW,SAAS,QAAQ;AACxB,QAAI,QAAQ,IAAI,MAAM,KAAK,GAAG;AAC1B,eAAS,KAAK,uBAAuB,MAAM,KAAK,6BAA6B;AAC7E;AAAA,IACJ;AACA,YAAQ,IAAI,MAAM,KAAK;AACvB,iBAAa,KAAK,KAAK;AAAA,EAC3B;AAEA,SAAO;AACX;AAlIA;AAAA;AAAA;AAWA;AACA;AAAA;AAAA;;;AC6DO,SAAS,uBAAuB,UAA+B;AAClE,QAAM,QAAqB,CAAC;AAE5B,MAAI;AACA,UAAM,UAAa,gBAAY,UAAU,EAAE,eAAe,KAAK,CAAC;AAEhE,eAAW,SAAS,SAAS;AAEzB,UAAI,CAAC,MAAM,YAAY,GAAG;AACtB;AAAA,MACJ;AAEA,YAAM,UAAU,MAAM;AAGtB,UAAI,cAAc,IAAI,OAAO,GAAG;AAC5B;AAAA,MACJ;AAGA,UAAI,QAAQ,WAAW,GAAG,KAAK,CAAC,cAAc,IAAI,OAAO,GAAG;AAGxD;AAAA,MACJ;AAGA,YAAM,UAAU,kBAAkB,OAAO;AAGzC,UAAI,CAAC,WAAW,YAAY,WAAW;AACnC;AAAA,MACJ;AAGA,YAAM,KAAK;AAAA,QACP,OAAO;AAAA,QACP,aAAa,mBAAmB,OAAO;AAAA,QACvC,OAAO,CAAC,SAAS,OAAO;AAAA,MAC5B,CAAC;AAAA,IACL;AAAA,EACJ,SAAS,OAAO;AAGZ,QAAK,MAAgC,SAAS,UAAU;AACpD,aAAO,CAAC;AAAA,IACZ;AACA,UAAM;AAAA,EACV;AAEA,SAAO;AACX;AA5HA,IASAC,KAaM;AAtBN;AAAA;AAAA;AASA,IAAAA,MAAoB;AAGpB;AAUA,IAAM,gBAAgB,oBAAI,IAAI;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACJ,CAAC;AAAA;AAAA;;;ACjBD,SAAS,oBAAoB,SAAqD;AAC9E,MAAI,QAAQ,SAAS,QAAQ;AACzB,WAAO,EAAE,MAAM,WAAW;AAAA,EAC9B;AACA,SAAO,EAAE,MAAM,kBAAkB;AACrC;AAkBA,eAAsB,gBAClB,UACA,SACoB;AACpB,QAAM,cAAU,4CAAqB;AAGrC,YAAU,sCAAsC;AAChD,QAAM,eAAe,MAAM,QAAQ,YAAY;AAC/C,MAAI,CAAC,cAAc;AACf,UAAM,IAAI;AAAA,MACN;AAAA,MACA;AAAA,IACJ;AAAA,EACJ;AAGA,YAAU,yBAAyB,KAAK,gBAAgB,QAAQ,SAAS,GAAG,CAAC,EAAE;AAC/E,QAAM,SAAS,iBAAiB,UAAU,QAAQ,SAAS;AAG3D,QAAM,cAAkC;AAAA,IACpC;AAAA,IACA,kBAAkB;AAAA,IAClB,gBAAgB;AAAA,IAChB,qBAAqB;AAAA,IACrB,SAAS;AAAA;AAAA,IACT,WAAW;AAAA,EACf;AAGA,MAAI,QAAQ,OAAO;AACf,gBAAY,QAAQ,QAAQ;AAAA,EAChC;AAGA,YAAU,qEAAgE;AAC1E,QAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,MAAI,CAAC,OAAO,SAAS;AACjB,UAAM,WAAW,OAAO,SAAS;AACjC,QAAI,SAAS,YAAY,EAAE,SAAS,SAAS,GAAG;AAC5C,YAAM,IAAI;AAAA,QACN,oCAAoC,2BAA2B,GAAI;AAAA,QAEnE;AAAA,MACJ;AAAA,IACJ;AACA,UAAM,IAAI,WAAW,+BAA+B,QAAQ,IAAI,UAAU;AAAA,EAC9E;AAEA,MAAI,CAAC,OAAO,UAAU;AAClB,UAAM,IAAI,WAAW,8BAA8B,gBAAgB;AAAA,EACvE;AAGA,YAAU,yCAAyC;AACnD,MAAI;AACJ,MAAI;AACA,YAAQ,mBAAmB,OAAO,QAAQ;AAAA,EAC9C,SAAS,YAAY;AAEjB,QAAI,QAAQ,SAAS;AACjB,cAAQ,OAAO;AAAA,QACX,uCAAuC,gBAAgB,UAAU,CAAC;AAAA;AAAA,MACtE;AAAA,IACJ;AACA,WAAO,uBAAuB,QAAQ;AAAA,EAC1C;AAGA,MAAI,MAAM,SAAS,QAAQ,WAAW;AAClC,QAAI,QAAQ,SAAS;AACjB,cAAQ,OAAO;AAAA,QACX,uBAAuB,MAAM,MAAM,qBAAqB,QAAQ,SAAS,oBAAoB,QAAQ,SAAS;AAAA;AAAA,MAClH;AAAA,IACJ;AACA,WAAO,MAAM,MAAM,GAAG,QAAQ,SAAS;AAAA,EAC3C;AAEA,SAAO;AACX;AAjJA,IAUAC,uBAkBM,0BAGA,aA2HO;AA1Jb;AAAA;AAAA;AAUA,IAAAA,wBAKO;AAEP;AACA;AACA;AACA;AACA;AAOA,IAAM,2BAA2B;AAGjC,IAAM,cAAc,CAAC,QAAQ,QAAQ,MAAM;AA2HpC,IAAM,aAAN,cAAyB,MAAM;AAAA,MAClC,YACI,SACgB,MAClB;AACE,cAAM,OAAO;AAFG;AAGhB,aAAK,OAAO;AAAA,MAChB;AAAA,IACJ;AAAA;AAAA;;;ACxIO,SAAS,cAAc,UAA+B;AACzD,QAAM,eAAoB,cAAQ,QAAQ;AAG1C,MAAI,CAAI,eAAW,YAAY,GAAG;AAC9B,UAAM,IAAI,MAAM,6BAA6B,YAAY,EAAE;AAAA,EAC/D;AAEA,QAAM,QAAW,aAAS,YAAY;AACtC,MAAI,CAAC,MAAM,OAAO,GAAG;AACjB,UAAM,IAAI,MAAM,iCAAiC,YAAY,EAAE;AAAA,EACnE;AAGA,QAAM,UAAa,iBAAa,cAAc,OAAO,EAAE,KAAK;AAC5D,MAAI,CAAC,SAAS;AACV,UAAM,IAAI,MAAM,uBAAuB,YAAY,EAAE;AAAA,EACzD;AAGA,QAAM,MAAW,cAAQ,YAAY,EAAE,YAAY;AACnD,MAAI,QAAQ,WAAW,QAAQ,KAAK,EAAE,WAAW,GAAG,GAAG;AACnD,WAAO,kBAAkB,SAAS,YAAY;AAAA,EAClD,WAAW,QAAQ,UAAU,QAAQ,SAAS,GAAG,GAAG;AAChD,WAAO,iBAAiB,SAAS,YAAY;AAAA,EACjD,OAAO;AAEH,QAAI;AACA,aAAO,kBAAkB,SAAS,YAAY;AAAA,IAClD,QAAQ;AACJ,aAAO,iBAAiB,SAAS,YAAY;AAAA,IACjD;AAAA,EACJ;AACJ;AASA,SAAS,kBAAkB,SAAiB,UAA+B;AACvE,MAAI;AACJ,MAAI;AACA,aAAS,KAAK,MAAM,OAAO;AAAA,EAC/B,SAAS,OAAO;AACZ,UAAM,IAAI,MAAM,6BAA6B,QAAQ,KAAK,gBAAgB,KAAK,CAAC,EAAE;AAAA,EACtF;AAEA,MAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AAC/C,UAAM,IAAI,MAAM,aAAa,QAAQ,iCAAiC;AAAA,EAC1E;AAEA,QAAM,MAAM;AAGZ,MAAI,YAAY,OAAO,MAAM,QAAQ,IAAI,MAAM,GAAG;AAC9C,WAAOC,kBAAiB,IAAI,QAAQ,QAAQ;AAAA,EAChD;AAGA,MAAI,MAAM,QAAQ,MAAM,GAAG;AACvB,WAAOA,kBAAiB,QAAQ,QAAQ;AAAA,EAC5C;AAEA,QAAM,IAAI,MAAM,aAAa,QAAQ,yDAAyD;AAClG;AAKA,SAASA,kBAAiB,KAAgB,UAA+B;AACrE,QAAM,QAAqB,CAAC;AAE5B,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,UAAM,OAAO,IAAI,CAAC;AAClB,QAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,YAAM,IAAI,MAAM,0BAA0B,CAAC,OAAO,QAAQ,iBAAiB;AAAA,IAC/E;AAEA,UAAM,MAAM;AAGZ,QAAI,OAAO,IAAI,UAAU,YAAY,CAAC,IAAI,OAAO;AAC7C,YAAM,IAAI,MAAM,0BAA0B,CAAC,OAAO,QAAQ,oCAAoC;AAAA,IAClG;AAEA,QAAI,OAAO,IAAI,gBAAgB,YAAY,CAAC,IAAI,aAAa;AACzD,YAAM,IAAI,MAAM,0BAA0B,CAAC,OAAO,QAAQ,0CAA0C;AAAA,IACxG;AAGA,QAAI,QAAkB,CAAC;AACvB,QAAI,MAAM,QAAQ,IAAI,KAAK,GAAG;AAC1B,cAAQ,IAAI,MACP,OAAO,OAAK,OAAO,MAAM,QAAQ,EACjC,IAAI,OAAK,OAAO,CAAC,EAAE,KAAK,CAAC,EACzB,OAAO,OAAK,EAAE,SAAS,CAAC;AAAA,IACjC,WAAW,OAAO,IAAI,UAAU,UAAU;AACtC,cAAQ,IAAI,MACP,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAK,EAAE,SAAS,CAAC;AAAA,IACjC,OAAO;AACH,cAAQ,CAAC,kBAAkB,OAAO,IAAI,KAAK,CAAC,CAAC;AAAA,IACjD;AAEA,QAAI,MAAM,WAAW,GAAG;AACpB,cAAQ,CAAC,kBAAkB,OAAO,IAAI,KAAK,CAAC,CAAC;AAAA,IACjD;AAEA,UAAM,KAAK;AAAA,MACP,OAAO,kBAAkB,OAAO,IAAI,KAAK,CAAC;AAAA,MAC1C,aAAa,OAAO,IAAI,WAAW,EAAE,KAAK;AAAA,MAC1C;AAAA,IACJ,CAAC;AAAA,EACL;AAEA,SAAO;AACX;AASA,SAAS,iBAAiB,SAAiB,UAA+B;AACtE,QAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,IAAI,UAAQ,KAAK,KAAK,CAAC,EAAE,OAAO,UAAQ,KAAK,SAAS,CAAC;AAEzF,MAAI,MAAM,WAAW,GAAG;AACpB,UAAM,IAAI,MAAM,iBAAiB,QAAQ,WAAW;AAAA,EACxD;AAGA,QAAM,aAAa,MAAM,CAAC;AAC1B,QAAM,UAAU,aAAa,UAAU;AAGvC,QAAM,WAAW,QAAQ,UAAU,OAAK,EAAE,YAAY,MAAM,OAAO;AACnE,QAAM,UAAU,QAAQ,UAAU,OAAK,EAAE,YAAY,MAAM,iBAAiB,EAAE,YAAY,MAAM,MAAM;AACtG,QAAM,WAAW,QAAQ,UAAU,OAAK,EAAE,YAAY,MAAM,WAAW,EAAE,YAAY,MAAM,MAAM;AAEjG,MAAI,aAAa,IAAI;AACjB,UAAM,IAAI,MAAM,iBAAiB,QAAQ,yBAAyB;AAAA,EACtE;AACA,MAAI,YAAY,IAAI;AAChB,UAAM,IAAI,MAAM,iBAAiB,QAAQ,+BAA+B;AAAA,EAC5E;AAGA,QAAM,QAAqB,CAAC;AAC5B,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,UAAM,MAAM,aAAa,MAAM,CAAC,CAAC;AAEjC,QAAI,IAAI,UAAU,KAAK,IAAI,UAAU,OAAO,GAAG;AAC3C,YAAM,IAAI,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,CAAC,2BAA2B;AAAA,IACrF;AAEA,UAAM,QAAQ,kBAAkB,IAAI,QAAQ,EAAE,KAAK,CAAC;AACpD,UAAM,cAAc,IAAI,OAAO,EAAE,KAAK;AAEtC,QAAI,CAAC,SAAS,CAAC,aAAa;AACxB,YAAM,IAAI,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,CAAC,iCAAiC;AAAA,IAC3F;AAGA,QAAI,QAAkB,CAAC;AACvB,QAAI,aAAa,MAAM,IAAI,QAAQ,GAAG;AAClC,cAAQ,IAAI,QAAQ,EACf,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAK,EAAE,SAAS,CAAC;AAAA,IACjC;AAEA,QAAI,MAAM,WAAW,GAAG;AACpB,cAAQ,CAAC,KAAK;AAAA,IAClB;AAEA,UAAM,KAAK;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,IACJ,CAAC;AAAA,EACL;AAEA,SAAO;AACX;AAKA,SAAS,aAAa,MAAwB;AAC1C,QAAM,SAAmB,CAAC;AAC1B,MAAI,UAAU;AACd,MAAI,WAAW;AAEf,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,UAAM,OAAO,KAAK,CAAC;AACnB,UAAM,WAAW,KAAK,IAAI,CAAC;AAE3B,QAAI,SAAS,KAAK;AACd,UAAI,YAAY,aAAa,KAAK;AAE9B,mBAAW;AACX;AAAA,MACJ,OAAO;AAEH,mBAAW,CAAC;AAAA,MAChB;AAAA,IACJ,WAAW,SAAS,OAAO,CAAC,UAAU;AAElC,aAAO,KAAK,OAAO;AACnB,gBAAU;AAAA,IACd,OAAO;AACH,iBAAW;AAAA,IACf;AAAA,EACJ;AAGA,SAAO,KAAK,OAAO;AAEnB,SAAO;AACX;AA3PA,IASAC,KACAC;AAVA;AAAA;AAAA;AASA,IAAAD,MAAoB;AACpB,IAAAC,QAAsB;AAEtB;AACA;AAAA;AAAA;;;ACbA;AAAA;AAAA;AAQA;AACA;AAEA;AAAA;AAAA;;;ACXA;AAAA;AAAA;AAAA;AA8CA,eAAsB,aAClB,UACA,SACe;AAEf,QAAM,mBAAwB,cAAQ,QAAQ;AAG9C,MAAI,CAAI,eAAW,gBAAgB,GAAG;AAClC,eAAW,mCAAmC,gBAAgB,EAAE;AAChE,WAAO,WAAW;AAAA,EACtB;AAEA,MAAI,CAAI,aAAS,gBAAgB,EAAE,YAAY,GAAG;AAC9C,eAAW,uCAAuC,gBAAgB,EAAE;AACpE,WAAO,WAAW;AAAA,EACtB;AAGA,cAAY,6CAAwC;AACpD,gBAAc,cAAc,gBAAgB;AAC5C,gBAAc,eAAe,QAAQ,MAAM;AAC3C,gBAAc,cAAc,OAAO,QAAQ,SAAS,CAAC;AACrD,MAAI,QAAQ,OAAO;AACf,kBAAc,SAAS,QAAQ,KAAK;AAAA,EACxC;AACA,UAAQ,OAAO,MAAM,IAAI;AAGzB,QAAM,UAAU,IAAI,QAAQ;AAC5B,UAAQ,MAAM,2BAA2B;AAEzC,MAAI;AACA,UAAM,QAAQ,MAAM,gBAAmB,kBAAkB;AAAA,MACrD,WAAW,QAAQ;AAAA,MACnB,OAAO,QAAQ;AAAA,MACf,SAAS,QAAQ;AAAA,IACrB,CAAC;AAED,YAAQ,QAAQ,2BAA2B;AAG3C,YAAQ,OAAO,MAAM,IAAI;AACzB,gBAAY,eAAe;AAC3B,kBAAc,gBAAgB,OAAO,MAAM,MAAM,CAAC;AAElD,QAAI,QAAQ,SAAS;AACjB,cAAQ,OAAO,MAAM,IAAI;AACzB,gBAAU,SAAS;AACnB,iBAAW,QAAQ,OAAO;AACtB,gBAAQ,OAAO;AAAA,UACX,KAAK,KAAK,KAAK,KAAK,CAAC,IAAI,KAAK,QAAG,CAAC,IAAI,KAAK,WAAW;AAAA;AAAA,QAC1D;AAAA,MACJ;AAAA,IACJ,OAAO;AAEH,cAAQ,OAAO,MAAM,IAAI;AACzB,gBAAU,SAAS;AACnB,iBAAW,QAAQ,OAAO;AACtB,gBAAQ,OAAO,MAAM,KAAK,KAAK,KAAK,KAAK,CAAC;AAAA,CAAI;AAAA,MAClD;AAAA,IACJ;AAGA,UAAM,SAAsB;AAAA,MACxB,SAAS;AAAA,MACT,WAAW,KAAK,IAAI;AAAA,MACpB,UAAU;AAAA,MACV,QAAQ;AAAA,IACZ;AAGA,UAAM,aAAkB,cAAQ,QAAQ,MAAM;AAC9C,UAAM,YAAiB,cAAQ,UAAU;AAEzC,QAAI;AACA,MAAG,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAC3C,MAAG,kBAAc,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,GAAG,OAAO;AACrE,cAAQ,OAAO,MAAM,IAAI;AACzB,mBAAa,oBAAoB,KAAK,UAAU,CAAC,EAAE;AAAA,IACvD,SAAS,YAAY;AACjB,mBAAa,4BAA4B,gBAAgB,UAAU,CAAC,EAAE;AACtE,gBAAU,8BAA8B;AAExC,cAAQ,OAAO,MAAM,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI,IAAI;AAAA,IAC/D;AAEA,WAAO,WAAW;AAAA,EAEtB,SAAS,OAAO;AACZ,YAAQ,KAAK,yBAAyB;AAEtC,QAAI,iBAAiB,YAAY;AAC7B,cAAQ,MAAM,MAAM;AAAA,QAChB,KAAK;AACD,qBAAW,MAAM,OAAO;AACxB,oBAAU,qBAAqB;AAC/B,oBAAU,uCAAuC;AACjD,oBAAU,uCAAuC;AACjD,oBAAU,oCAAoC;AAC9C,iBAAO,WAAW;AAAA,QAEtB,KAAK;AACD,qBAAW,MAAM,OAAO;AACxB,iBAAO,WAAW;AAAA,QAEtB;AACI,qBAAW,MAAM,OAAO;AACxB,iBAAO,WAAW;AAAA,MAC1B;AAAA,IACJ;AAEA,eAAW,gBAAgB,KAAK,CAAC;AACjC,QAAI,QAAQ,WAAW,iBAAiB,SAAS,MAAM,OAAO;AAC1D,cAAQ,OAAO,MAAM,GAAG,KAAK,MAAM,KAAK,CAAC;AAAA,CAAI;AAAA,IACjD;AACA,WAAO,WAAW;AAAA,EACtB;AACJ;AApKA,IASAC,OACAC,KAuBM;AAjCN,IAAAC,cAAA;AAAA;AAAA;AASA,IAAAF,QAAsB;AACtB,IAAAC,MAAoB;AAEpB;AACA;AAYA;AACA;AAOA,IAAM,oBAAoB;AAAA;AAAA;;;ACXnB,SAAS,qBAAqB,UAAkB,OAAwB;AAC3E,QAAM,eAAe,QACf;AAAA;AAAA;AAAA,sCAA0D,KAAK;AAAA;AAAA;AAAA,IAC/D;AAEN,SAAO;AAAA,6EACkE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BnF,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBZ,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAcrB;AAaO,SAAS,0BAA0B,UAA0B;AAChE,SAAO;AAAA,6EACkE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2BnF,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AASxB;AAgBO,SAAS,4BACZ,UACA,UACA,iBACA,aACM;AACN,SAAO,wEAAwE,WAAW;AAAA,6EACjB,QAAQ;AAAA;AAAA;AAAA;AAAA,eAItE,QAAQ,qDAAqD,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oDAMvC,QAAQ;AAAA,eAC7C,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBrB,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAYrB;AA7MA,IAAAE,gBAAA;AAAA;AAAA;AASA;AAAA;AAAA;;;ACgCO,SAAS,yBAAyB,UAA+B;AACpE,QAAM,SAAS,oBAAoB,UAAU,EAAE,SAAS,aAAa,QAAQ,KAAK,CAAC;AAGnF,SAAO,0BAA0B,MAAM;AAC3C;AAaO,SAAS,4BAA4B,UAAwC;AAChF,MAAI,CAAC,YAAY,OAAO,aAAa,UAAU;AAC3C,UAAM,IAAI,MAAM,mCAAmC;AAAA,EACvD;AAEA,QAAM,cAAU,mCAAY,QAAQ;AACpC,MAAI,CAAC,SAAS;AACV,UAAM,IAAI,MAAM,2CAA2C;AAAA,EAC/D;AAEA,MAAI;AACJ,MAAI;AACA,aAAS,KAAK,MAAM,OAAO;AAAA,EAC/B,QAAQ;AACJ,UAAM,QAAQ,kBAAkB,OAAO;AACvC,QAAI,OAAO;AACP,eAAS,KAAK,MAAM,KAAK;AAAA,IAC7B,OAAO;AACH,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC9D;AAAA,EACJ;AAEA,MAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AAC/C,UAAM,IAAI,MAAM,+CAA+C;AAAA,EACnE;AAEA,QAAM,MAAM;AAEZ,SAAO;AAAA,IACH,WAAW,OAAO,IAAI,cAAc,WAAW,IAAI,YAAY;AAAA,IAC/D,OAAO,WAAW,IAAI,KAAK;AAAA,IAC3B,aAAa,wBAAwB,IAAI,WAAW;AAAA,EACxD;AACJ;AASA,SAAS,0BAA0B,KAA2C;AAC1E,QAAM,WAAqB,CAAC;AAG5B,aAAW,SAAS,8BAA8B;AAC9C,QAAI,EAAE,SAAS,MAAM;AACjB,UAAI,UAAU,cAAc;AACxB,YAAI,aAAa,CAAC;AAClB,iBAAS,KAAK,YAAY,KAAK,8BAA8B;AAAA,MACjE,OAAO;AACH,cAAM,IAAI,MAAM,2BAA2B,KAAK,mBAAmB;AAAA,MACvE;AAAA,IACJ;AAAA,EACJ;AAGA,QAAM,UAAU,iBAAiB,IAAI,OAAO;AAG5C,QAAM,UAAU,aAAa,IAAI,SAAS,QAAQ;AAGlD,QAAM,aAAa,gBAAgB,IAAI,YAAY,QAAQ;AAG3D,QAAM,oBAAoB,OAAO,IAAI,sBAAsB,WACrD,IAAI,oBACJ;AAGN,QAAM,gBAAgB,IAAI,IAAI,WAAW,IAAI,OAAK,EAAE,IAAI,CAAC;AACzD,aAAW,OAAO,SAAS;AACvB,QAAI,IAAI,YAAY,CAAC,cAAc,IAAI,IAAI,QAAQ,GAAG;AAElD,iBAAW,KAAK,EAAE,MAAM,IAAI,UAAU,aAAa,+BAA+B,IAAI,QAAQ,GAAG,CAAC;AAClG,oBAAc,IAAI,IAAI,QAAQ;AAC9B,eAAS,KAAK,gCAAgC,IAAI,QAAQ,GAAG;AAAA,IACjE;AAAA,EACJ;AAGA,QAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,EAAE,CAAC;AAChD,aAAW,OAAO,SAAS;AACvB,QAAI,eAAe,IAAI,aAAa,OAAO,SAAO;AAC9C,UAAI,CAAC,UAAU,IAAI,GAAG,GAAG;AACrB,iBAAS,KAAK,WAAW,IAAI,EAAE,oCAAoC,GAAG,aAAa;AACnF,eAAO;AAAA,MACX;AACA,aAAO;AAAA,IACX,CAAC;AACD,QAAI,aAAa,IAAI,WAAW,OAAO,SAAO;AAC1C,UAAI,CAAC,UAAU,IAAI,GAAG,GAAG;AACrB,iBAAS,KAAK,WAAW,IAAI,EAAE,mCAAmC,GAAG,aAAa;AAClF,eAAO;AAAA,MACX;AACA,aAAO;AAAA,IACX,CAAC;AAAA,EACL;AAGA,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,sBAAoC,CAAC;AAC3C,aAAW,OAAO,SAAS;AACvB,QAAI,QAAQ,IAAI,IAAI,EAAE,GAAG;AACrB,eAAS,KAAK,wBAAwB,IAAI,EAAE,6BAA6B;AACzE;AAAA,IACJ;AACA,YAAQ,IAAI,IAAI,EAAE;AAClB,wBAAoB,KAAK,GAAG;AAAA,EAChC;AAEA,MAAI,SAAS,SAAS,GAAG;AAErB,eAAW,KAAK,UAAU;AACtB,cAAQ,OAAO,MAAM,UAAU,CAAC;AAAA,CAAI;AAAA,IACxC;AAAA,EACJ;AAEA,SAAO;AAAA,IACH;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EACJ;AACJ;AAKA,SAAS,iBAAiB,KAA2B;AACjD,MAAI,OAAO,QAAQ,YAAY,QAAQ,MAAM;AACzC,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACxE;AAEA,QAAM,MAAM;AAGZ,aAAW,SAAS,8BAA8B;AAC9C,QAAI,EAAE,SAAS,QAAQ,OAAO,IAAI,KAAK,MAAM,UAAU;AAAA,IAEvD;AAAA,EACJ;AAEA,SAAO;AAAA,IACH,MAAM,OAAO,IAAI,QAAQ,SAAS;AAAA,IAClC,aAAa,OAAO,IAAI,eAAe,EAAE;AAAA,IACzC,UAAU,OAAO,IAAI,YAAY,SAAS;AAAA,IAC1C,aAAa,OAAO,IAAI,eAAe,SAAS;AAAA,IAChD,aAAa,iBAAiB,IAAI,WAAW;AAAA,EACjD;AACJ;AAKA,SAAS,aAAa,KAAc,UAAkC;AAClE,MAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACrB,UAAM,IAAI,MAAM,kCAAkC;AAAA,EACtD;AAEA,QAAM,UAAwB,CAAC;AAE/B,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,UAAM,OAAO,IAAI,CAAC;AAClB,QAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,eAAS,KAAK,oCAAoC,CAAC,EAAE;AACrD;AAAA,IACJ;AAEA,UAAM,MAAM;AAGZ,QAAI,cAAc;AAClB,eAAW,SAAS,6BAA6B;AAC7C,UAAI,EAAE,SAAS,QAAQ,OAAO,IAAI,KAAK,MAAM,UAAU;AACnD,iBAAS,KAAK,mBAAmB,CAAC,4BAA4B,KAAK,aAAa;AAChF,sBAAc;AACd;AAAA,MACJ;AAAA,IACJ;AACA,QAAI,CAAC,aAAa;AAAE;AAAA,IAAU;AAG9B,QAAI,KAAK,OAAO,IAAI,EAAE;AACtB,QAAI,CAAC,gBAAgB,EAAE,GAAG;AACtB,YAAM,aAAa,kBAAkB,EAAE;AACvC,eAAS,KAAK,yBAAyB,EAAE,aAAQ,UAAU,GAAG;AAC9D,WAAK;AAAA,IACT;AAGA,QAAI,aAAa,OAAO,IAAI,QAAQ,EAAE;AACtC,iBAAa,cAAc,UAAU;AAGrC,QAAI,aAAa,OAAO,IAAI,cAAc,QAAQ,EAAE,YAAY;AAChE,QAAI,CAAC,wBAAwB,SAAS,UAAoD,GAAG;AACzF,eAAS,KAAK,WAAW,EAAE,6BAA6B,UAAU,2BAA2B;AAC7F,mBAAa;AAAA,IACjB;AAEA,YAAQ,KAAK;AAAA,MACT;AAAA,MACA,MAAM,OAAO,IAAI,IAAI;AAAA,MACrB,MAAM;AAAA,MACN,SAAS,OAAO,IAAI,WAAW,EAAE;AAAA,MACjC,UAAU,iBAAiB,IAAI,QAAQ,EAAE,IAAI,aAAa;AAAA,MAC1D,cAAc,iBAAiB,IAAI,YAAY;AAAA,MAC/C,YAAY,iBAAiB,IAAI,UAAU;AAAA,MAC3C;AAAA,MACA,UAAU,OAAO,IAAI,YAAY,SAAS;AAAA,IAC9C,CAAC;AAAA,EACL;AAEA,SAAO;AACX;AAKA,SAAS,gBAAgB,KAAc,UAAoC;AACvE,MAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACrB,aAAS,KAAK,mDAAmD;AACjE,WAAO,CAAC;AAAA,EACZ;AAEA,QAAM,aAA6B,CAAC;AAEpC,aAAW,QAAQ,KAAK;AACpB,QAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAAE;AAAA,IAAU;AAC3D,UAAM,MAAM;AAEZ,QAAI,OAAO,IAAI,SAAS,YAAY,CAAC,IAAI,MAAM;AAAE;AAAA,IAAU;AAE3D,eAAW,KAAK;AAAA,MACZ,MAAM,OAAO,IAAI,IAAI;AAAA,MACrB,aAAa,OAAO,IAAI,eAAe,EAAE;AAAA,IAC7C,CAAC;AAAA,EACL;AAEA,SAAO;AACX;AAKA,SAAS,WAAW,KAA8B;AAC9C,MAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AAAE,WAAO,CAAC;AAAA,EAAG;AAEtC,QAAM,QAAwB,CAAC;AAC/B,aAAW,QAAQ,KAAK;AACpB,QAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAAE;AAAA,IAAU;AAC3D,UAAM,MAAM;AAEZ,UAAM,KAAK;AAAA,MACP,MAAM,OAAO,IAAI,QAAQ,EAAE;AAAA,MAC3B,MAAM,cAAc,OAAO,IAAI,QAAQ,EAAE,CAAC;AAAA,MAC1C,aAAa,OAAO,IAAI,eAAe,EAAE;AAAA,IAC7C,CAAC;AAAA,EACL;AAEA,SAAO;AACX;AAKA,SAAS,wBAAwB,KAAoC;AACjE,MAAI,OAAO,QAAQ,YAAY,QAAQ,MAAM;AAAE,WAAO,CAAC;AAAA,EAAG;AAC1D,QAAM,MAAM;AAEZ,QAAM,SAA+B,CAAC;AACtC,MAAI,OAAO,IAAI,SAAS,UAAU;AAAE,WAAO,OAAO,IAAI;AAAA,EAAM;AAC5D,MAAI,OAAO,IAAI,gBAAgB,UAAU;AAAE,WAAO,cAAc,IAAI;AAAA,EAAa;AACjF,MAAI,OAAO,IAAI,aAAa,UAAU;AAAE,WAAO,WAAW,IAAI;AAAA,EAAU;AACxE,MAAI,OAAO,IAAI,gBAAgB,UAAU;AAAE,WAAO,cAAc,IAAI;AAAA,EAAa;AACjF,MAAI,MAAM,QAAQ,IAAI,WAAW,GAAG;AAAE,WAAO,cAAc,iBAAiB,IAAI,WAAW;AAAA,EAAG;AAE9F,SAAO;AACX;AASA,SAAS,iBAAiB,KAAwB;AAC9C,MAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AAAE,WAAO,CAAC;AAAA,EAAG;AACtC,SAAO,IACF,OAAO,UAAQ,OAAO,SAAS,QAAQ,EACvC,IAAI,UAAQ,OAAO,IAAI,CAAC;AACjC;AAKO,SAAS,cAAc,GAAmB;AAC7C,SAAO,EACF,QAAQ,OAAO,GAAG,EAClB,QAAQ,SAAS,EAAE,EACnB,QAAQ,QAAQ,GAAG;AAC5B;AA5WA,IAUAC;AAVA,IAAAC,wBAAA;AAAA;AAAA;AAUA,IAAAD,wBAA4B;AAE5B;AAQA;AAAA;AAAA;;;ACqBA,SAASE,qBAAoB,SAAqD;AAC9E,MAAI,QAAQ,SAAS,QAAQ;AACzB,WAAO,EAAE,MAAM,WAAW;AAAA,EAC9B;AACA,SAAO,EAAE,MAAM,kBAAkB;AACrC;AA8BA,eAAsB,oBAAoB,SAA4D;AAClG,QAAM,cAAU,4CAAqB;AAGrC,YAAU,sCAAsC;AAChD,QAAM,eAAe,MAAM,QAAQ,YAAY;AAC/C,MAAI,CAAC,cAAc;AACf,UAAM,IAAI;AAAA,MACN;AAAA,MACA;AAAA,IACJ;AAAA,EACJ;AAGA,YAAU,6BAA6B,QAAQ,QAAQ,eAAe,QAAQ,KAAK,KAAK,qBAAqB,KAAK;AAClH,QAAM,SAAS,qBAAqB,QAAQ,UAAU,QAAQ,KAAK;AAGnE,QAAM,YAAY,QAAQ,WAAW;AACrC,QAAM,cAAkC;AAAA,IACpC;AAAA,IACA,kBAAkB,QAAQ;AAAA,IAC1B,gBAAgB;AAAA,IAChB,qBAAqBA;AAAA,IACrB,SAAS;AAAA;AAAA,IACT;AAAA,EACJ;AAGA,MAAI,QAAQ,OAAO;AACf,gBAAY,QAAQ,QAAQ;AAAA,EAChC;AAGA,YAAU,kCAAkC,KAAK,aAAa,YAAY,GAAI,aAAa,gBAAgB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE;AAC3H,QAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,MAAI,CAAC,OAAO,SAAS;AACjB,UAAM,WAAW,OAAO,SAAS;AACjC,QAAI,SAAS,YAAY,EAAE,SAAS,SAAS,GAAG;AAC5C,YAAM,IAAI;AAAA,QACN,6BAA6B,YAAY,GAAI;AAAA,QAE7C;AAAA,MACJ;AAAA,IACJ;AACA,UAAM,IAAI,eAAe,wBAAwB,QAAQ,IAAI,UAAU;AAAA,EAC3E;AAEA,MAAI,CAAC,OAAO,UAAU;AAClB,UAAM,IAAI,eAAe,8BAA8B,gBAAgB;AAAA,EAC3E;AAGA,YAAU,0CAA0C;AACpD,MAAI;AACA,UAAM,QAAQ,yBAAyB,OAAO,QAAQ;AACtD,cAAU,UAAU,MAAM,QAAQ,MAAM,mBAAmB,MAAM,WAAW,MAAM,aAAa;AAC/F,WAAO,EAAE,OAAO,YAAY,OAAO,WAAW;AAAA,EAClD,SAAS,YAAY;AAEjB,iBAAa,6BAA6B,gBAAgB,UAAU,CAAC,oCAAoC;AACzG,UAAM,cAAc,SAAS;AAE7B,UAAM,eAAmC;AAAA,MACrC,GAAG;AAAA,MACH,QAAQ;AAAA,IACZ;AAEA,UAAM,cAAc,MAAM,QAAQ,YAAY,YAAY;AAE1D,QAAI,CAAC,YAAY,WAAW,CAAC,YAAY,UAAU;AAC/C,YAAM,IAAI;AAAA,QACN,gCAAgC,gBAAgB,UAAU,CAAC;AAAA,QAC3D;AAAA,MACJ;AAAA,IACJ;AAEA,UAAM,QAAQ,yBAAyB,YAAY,QAAQ;AAC3D,cAAU,iCAA4B,MAAM,QAAQ,MAAM,UAAU;AAEpE,UAAM,cAAc,gBAAgB,OAAO,YAAY,YAAY,UAAU;AAC7E,WAAO,EAAE,OAAO,YAAY,YAAY;AAAA,EAC5C;AACJ;AASA,SAAS,gBAAgB,GAAgB,GAAwC;AAC7E,MAAI,CAAC,KAAK,CAAC,GAAG;AAAE,WAAO;AAAA,EAAW;AAClC,MAAI,CAAC,GAAG;AAAE,WAAO;AAAA,EAAG;AACpB,MAAI,CAAC,GAAG;AAAE,WAAO;AAAA,EAAG;AACpB,SAAO;AAAA,IACH,aAAa,EAAE,cAAc,EAAE;AAAA,IAC/B,cAAc,EAAE,eAAe,EAAE;AAAA,IACjC,iBAAiB,EAAE,kBAAkB,EAAE;AAAA,IACvC,kBAAkB,EAAE,mBAAmB,EAAE;AAAA,IACzC,aAAa,EAAE,cAAc,EAAE;AAAA,IAC/B,OAAO,EAAE,QAAQ,MAAM,EAAE,QAAQ,MAAM;AAAA,IACvC,WAAW,EAAE,YAAY,EAAE;AAAA,EAC/B;AACJ;AAtLA,IAUAC,uBAkBM,8BAGA,iBAgKO;AA/Lb;AAAA;AAAA;AAUA,IAAAA,wBAMO;AAEP,IAAAC;AACA,IAAAC;AACA;AACA;AAOA,IAAM,+BAA+B;AAGrC,IAAM,kBAAkB,CAAC,QAAQ,QAAQ,MAAM;AAgKxC,IAAM,iBAAN,cAA6B,MAAM;AAAA,MACtC,YACI,SACgB,MAClB;AACE,cAAM,OAAO;AAFG;AAGhB,aAAK,OAAO;AAAA,MAChB;AAAA,IACJ;AAAA;AAAA;;;AC/KA,eAAsB,WAAW,UAA0C;AACvE,MAAI;AACA,UAAM,SAAS,UAAM,iCAAU,iCAAiC,EAAE,KAAK,SAAS,CAAC;AACjF,UAAM,OAAO,OAAO,OAAO,KAAK;AAChC,QAAI,KAAK,SAAS,GAAG;AACjB,aAAO;AAAA,IACX;AACA,WAAO;AAAA,EACX,QAAQ;AACJ,WAAO;AAAA,EACX;AACJ;AAYA,eAAsB,gBAAgB,UAA0C;AAC5E,MAAI;AACA,UAAM,SAAS,UAAM,iCAAU,sBAAsB,EAAE,KAAK,SAAS,CAAC;AACtE,UAAM,OAAO,OAAO,OAAO,KAAK;AAEhC,QAAI,iBAAiB,KAAK,IAAI,GAAG;AAC7B,aAAO;AAAA,IACX;AACA,WAAO;AAAA,EACX,QAAQ;AACJ,WAAO;AAAA,EACX;AACJ;AAgBA,eAAsB,kBAAkB,UAA0C;AAC9E,MAAI;AACA,UAAM,UAAU,MAAM,WAAW,QAAQ;AACzC,QAAI,CAAC,SAAS;AACV,aAAO;AAAA,IACX;AAEA,UAAM,eAAoB,cAAQ,QAAQ;AAC1C,UAAM,eAAoB,cAAQ,OAAO;AAGzC,QAAI,iBAAiB,cAAc;AAC/B,aAAO,gBAAgB,QAAQ;AAAA,IACnC;AAIA,UAAM,eAAoB,eAAS,cAAc,YAAY,EAAE,QAAQ,OAAO,GAAG;AACjF,UAAM,SAAS,UAAM;AAAA,MACjB,8BAA8B,YAAY;AAAA,MAC1C,EAAE,KAAK,aAAa;AAAA,IACxB;AACA,UAAM,OAAO,OAAO,OAAO,KAAK;AAGhC,QAAI,iBAAiB,KAAK,IAAI,GAAG;AAC7B,aAAO;AAAA,IACX;AAGA,WAAO,gBAAgB,QAAQ;AAAA,EACnC,QAAQ;AACJ,WAAO;AAAA,EACX;AACJ;AAsBA,eAAsB,gBAClB,UACA,WACA,WACwB;AACxB,MAAI;AACA,UAAM,SAAS,UAAM,iCAAU,wBAAwB,SAAS,SAAS,EAAE,KAAK,SAAS,CAAC;AAC1F,QAAI,QAAQ,OAAO,OACd,KAAK,EACL,MAAM,IAAI,EACV,OAAO,UAAQ,KAAK,SAAS,CAAC;AAGnC,QAAI,WAAW;AACX,YAAM,UAAU,MAAM,WAAW,QAAQ;AACzC,UAAI,SAAS;AACT,cAAM,gBAAqB,cAAQ,SAAS;AAC5C,cAAM,eAAoB,cAAQ,OAAO;AAEzC,cAAM,gBAAqB,eAAS,cAAc,aAAa,EAAE,QAAQ,OAAO,GAAG;AAEnF,YAAI,iBAAiB,kBAAkB,KAAK;AACxC,gBAAM,SAAS,gBAAgB;AAC/B,kBAAQ,MACH,OAAO,OAAK;AACT,kBAAM,aAAa,EAAE,QAAQ,OAAO,GAAG;AACvC,mBAAO,WAAW,WAAW,MAAM,KAAK,eAAe;AAAA,UAC3D,CAAC,EACA,IAAI,OAAK;AACN,kBAAM,aAAa,EAAE,QAAQ,OAAO,GAAG;AACvC,mBAAO,WAAW,WAAW,MAAM,IAAI,WAAW,MAAM,OAAO,MAAM,IAAI;AAAA,UAC7E,CAAC;AAAA,QACT;AAAA,MAEJ;AAAA,IACJ;AAEA,WAAO;AAAA,EACX,QAAQ;AACJ,WAAO;AAAA,EACX;AACJ;AA5KA,IAWAC,OACAC;AAZA;AAAA;AAAA;AAWA,IAAAD,QAAsB;AACtB,IAAAC,wBAA0B;AAAA;AAAA;;;ACkBnB,SAAS,cAAiB,WAA6B;AAC1D,MAAI;AACA,QAAI,CAAI,eAAW,SAAS,GAAG;AAC3B,aAAO;AAAA,IACX;AACA,UAAM,UAAa,iBAAa,WAAW,OAAO;AAClD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC7B,QAAQ;AACJ,WAAO;AAAA,EACX;AACJ;AAUO,SAAS,gBACZ,WACA,UACQ;AACR,QAAM,OAAO,cAAiB,SAAS;AACvC,MAAI,SAAS,MAAM;AACf,WAAO;AAAA,EACX;AACA,SAAO,SAAS,IAAI,IAAI,OAAO;AACnC;AAeO,SAAS,eAAkB,WAAmB,MAAe;AAChE,QAAM,MAAW,cAAQ,SAAS;AAClC,EAAG,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAErC,QAAM,WAAW,YAAY;AAC7B,EAAG,kBAAc,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AACjE,EAAG,eAAW,UAAU,SAAS;AACrC;AA8BO,SAAS,cAAc,SAA0B;AACpD,MAAI,CAAI,eAAW,OAAO,GAAG;AACzB,WAAO;AAAA,EACX;AACA,MAAI;AACA,IAAG,WAAO,SAAS,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AACnD,WAAO;AAAA,EACX,QAAQ;AACJ,WAAO;AAAA,EACX;AACJ;AAoBO,SAAS,eACZ,KACA,cACA,WACA,WACuC;AACvC,QAAM,QAAmB,CAAC;AAC1B,QAAM,UAAoB,CAAC;AAE3B,aAAW,MAAM,KAAK;AAClB,UAAM,YAAY,aAAa,EAAE;AACjC,QAAI,CAAC,WAAW;AACZ,cAAQ,KAAK,EAAE;AACf;AAAA,IACJ;AAEA,UAAM,SAAS,cAAsB,SAAS;AAC9C,QAAI,UAAU,UAAU,MAAM,GAAG;AAC7B,YAAM,KAAK,UAAU,MAAM,CAAC;AAAA,IAChC,OAAO;AACH,cAAQ,KAAK,EAAE;AAAA,IACnB;AAAA,EACJ;AAEA,SAAO,EAAE,OAAO,QAAQ;AAC5B;AAcO,SAAS,kBACZ,KACA,cACA,WACA,WACkD;AAClD,QAAM,QAAQ,oBAAI,IAAqB;AACvC,QAAM,UAAoB,CAAC;AAE3B,aAAW,MAAM,KAAK;AAClB,UAAM,YAAY,aAAa,EAAE;AACjC,QAAI,CAAC,WAAW;AACZ,cAAQ,KAAK,EAAE;AACf;AAAA,IACJ;AAEA,UAAM,SAAS,cAAsB,SAAS;AAC9C,QAAI,UAAU,UAAU,MAAM,GAAG;AAC7B,YAAM,IAAI,IAAI,UAAU,MAAM,CAAC;AAAA,IACnC,OAAO;AACH,cAAQ,KAAK,EAAE;AAAA,IACnB;AAAA,EACJ;AAEA,SAAO,EAAE,OAAO,QAAQ;AAC5B;AA7MA,IAgBAC,KACAC;AAjBA;AAAA;AAAA;AAgBA,IAAAD,MAAoB;AACpB,IAAAC,QAAsB;AAAA;AAAA;;;ACyBf,SAAS,YAAY,WAA2B;AACnD,SAAY,WAAU,cAAQ,SAAS,GAAG,cAAc;AAC5D;AA5CA,IAOAC,OAGa,gBAGA,kBAGA,cAGA,cAGA,yBAGA,wBAGA,sBAGA,uBAGA;AAlCb;AAAA;AAAA;AAOA,IAAAA,QAAsB;AAGf,IAAM,iBAAiB;AAGvB,IAAM,mBAAmB;AAGzB,IAAM,eAAe;AAGrB,IAAM,eAAe;AAGrB,IAAM,0BAA0B;AAGhC,IAAM,yBAAyB;AAG/B,IAAM,uBAAuB;AAG7B,IAAM,wBAAwB;AAG9B,IAAM,gBAAgB;AAAA;AAAA;;;AC+CtB,SAAS,qBAAqB,WAA2B;AAC5D,SAAY,WAAU,cAAQ,SAAS,GAAGC,iBAAgB,aAAa;AAC3E;AAKA,SAAS,kBAAkB,WAA2B;AAClD,SAAY,WAAK,qBAAqB,SAAS,GAAG,UAAU;AAChE;AAKA,SAAS,iBAAiB,WAA2B;AACjD,SAAY,WAAK,qBAAqB,SAAS,GAAG,SAAS;AAC/D;AAKA,SAAS,kBAAkB,WAAmB,OAAuB;AACjE,QAAM,OAAO,kBAAkB,KAAK;AACpC,SAAY,WAAK,kBAAkB,SAAS,GAAG,GAAG,IAAI,OAAO;AACjE;AAKA,SAAS,iBAAiB,WAAmB,QAAwB;AACjE,QAAM,OAAO,kBAAkB,MAAM;AACrC,SAAY,WAAK,iBAAiB,SAAS,GAAG,GAAG,IAAI,OAAO;AAChE;AAgBO,SAAS,eACZ,OACA,WACA,SACI;AACJ,iBAAiC,WAAK,qBAAqB,SAAS,GAAG,UAAU,GAAG;AAAA,IAChF;AAAA,IACA;AAAA,IACA,WAAW,KAAK,IAAI;AAAA,EACxB,CAAC;AACL;AASO,SAAS,eACZ,WACA,SACkB;AAClB,QAAM,SAAS;AAAA,IACN,WAAK,qBAAqB,SAAS,GAAG,UAAU;AAAA,IACrD,CAAC,MAAM,CAAC,CAAC,EAAE,SAAS,EAAE,YAAY;AAAA,EACtC;AACA,SAAO,QAAQ,SAAS;AAC5B;AAQO,SAAS,kBACZ,WACkB;AAClB,QAAM,SAAS;AAAA,IACN,WAAK,qBAAqB,SAAS,GAAG,UAAU;AAAA,IACrD,CAAC,MAAM,CAAC,CAAC,EAAE;AAAA,EACf;AACA,SAAO,QAAQ,SAAS;AAC5B;AAcO,SAAS,gBACZ,OACA,QACA,WACA,SACI;AACJ,iBAAkC,kBAAkB,WAAW,KAAK,GAAG;AAAA,IACnE,aAAa;AAAA,IACb;AAAA,IACA,WAAW,KAAK,IAAI;AAAA,EACxB,CAAC;AACL;AA8BO,SAAS,iBACZ,QACA,WACA,SAC2D;AAC3D,SAAO;AAAA,IACH;AAAA,IACA,CAAC,UAAU,kBAAkB,WAAW,KAAK;AAAA,IAC7C,CAAC,WAAW,CAAC,CAAC,OAAO,eAAe,OAAO,YAAY;AAAA,IACvD,CAAC,WAAW,OAAO;AAAA,EACvB;AACJ;AAKO,SAAS,oBACZ,QACA,WAC2D;AAC3D,SAAO;AAAA,IACH;AAAA,IACA,CAAC,UAAU,kBAAkB,WAAW,KAAK;AAAA,IAC7C,CAAC,WAAW,CAAC,CAAC,OAAO;AAAA,IACrB,CAAC,WAAW,OAAO;AAAA,EACvB;AACJ;AAaO,SAAS,mBACZ,MACA,WACA,SACI;AACJ,iBAA0C,WAAK,qBAAqB,SAAS,GAAG,oBAAoB,GAAG;AAAA,IACnG,YAAY;AAAA,IACZ;AAAA,IACA,WAAW,KAAK,IAAI;AAAA,EACxB,CAAC;AACL;AASO,SAAS,wBACZ,WACA,SAC2B;AAC3B,QAAM,SAAS;AAAA,IACN,WAAK,qBAAqB,SAAS,GAAG,oBAAoB;AAAA,IAC/D,CAAC,MAAM,CAAC,CAAC,EAAE,cAAc,EAAE,YAAY;AAAA,EAC3C;AACA,SAAO,QAAQ,cAAc;AACjC;AAKO,SAAS,2BACZ,WAC2B;AAC3B,QAAM,SAAS;AAAA,IACN,WAAK,qBAAqB,SAAS,GAAG,oBAAoB;AAAA,IAC/D,CAAC,MAAM,CAAC,CAAC,EAAE;AAAA,EACf;AACA,SAAO,QAAQ,cAAc;AACjC;AAcO,SAAS,iBACZ,QACA,OACA,WACA,SACI;AACJ,iBAAgC,iBAAiB,WAAW,MAAM,GAAG;AAAA,IACjE;AAAA,IACA;AAAA,IACA,WAAW,KAAK,IAAI;AAAA,EACxB,CAAC;AACL;AAUO,SAAS,sBACZ,QACA,WACA,SACkB;AAClB,QAAM,SAAS;AAAA,IACX,iBAAiB,WAAW,MAAM;AAAA,IAClC,CAAC,MAAM,CAAC,CAAC,EAAE,SAAS,EAAE,YAAY;AAAA,EACtC;AACA,SAAO,QAAQ,SAAS;AAC5B;AAgDO,SAAS,sBACZ,UACA,WACI;AACJ,iBAAoB,WAAK,qBAAqB,SAAS,GAAG,aAAa,GAAG,QAAQ;AACtF;AAQO,SAAS,qBACZ,WACgC;AAChC,SAAO,cAA8C,WAAK,qBAAqB,SAAS,GAAG,aAAa,CAAC;AAC7G;AAYO,SAAS,oBAAoB,WAA4B;AAC5D,SAAO,cAAc,qBAAqB,SAAS,CAAC;AACxD;AAhbA,IA2BAC,OAwBMD,iBAGA,eAGA,YAGA,WAGA,eAGA,YAGA;AArEN;AAAA;AAAA;AA2BA,IAAAC,QAAsB;AAgBtB;AACA;AAOA,IAAMD,kBAAiB;AAGvB,IAAM,gBAAgB;AAGtB,IAAM,aAAa;AAGnB,IAAM,YAAY;AAGlB,IAAM,gBAAgB;AAGtB,IAAM,aAAa;AAGnB,IAAM,uBAAuB;AAAA;AAAA;;;ACxCtB,SAAS,kBAAkB,WAA2B;AACzD,SAAY,WAAK,YAAY,SAAS,GAAG,gBAAgB;AAC7D;AAaA,eAAsB,eAAe,UAAkB,WAAgD;AACnG,QAAM,SAAS;AAAA,IACX,kBAAkB,SAAS;AAAA,IAC3B,CAAC,MAAM,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,EAAE;AAAA,EAC/B;AACA,MAAI,CAAC,QAAQ;AACT,WAAO;AAAA,EACX;AAEA,MAAI;AACA,UAAM,cAAc,MAAM,kBAAkB,QAAQ;AACpD,QAAI,CAAC,eAAe,gBAAgB,OAAO,SAAS,SAAS;AACzD,aAAO;AAAA,IACX;AAAA,EACJ,QAAQ;AACJ,WAAO;AAAA,EACX;AAEA,SAAO;AACX;AAQO,SAAS,kBAAkB,WAAuC;AACrE,SAAO;AAAA,IACH,kBAAkB,SAAS;AAAA,IAC3B,CAAC,MAAM,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,EAAE;AAAA,EAC/B;AACJ;AAcA,eAAsB,UAClB,UACA,OACA,WACA,OACa;AACb,QAAM,cAAc,MAAM,kBAAkB,QAAQ;AACpD,MAAI,CAAC,aAAa;AAEd;AAAA,EACJ;AAEA,QAAM,WAA0B;AAAA,IAC5B,SAAS;AAAA,IACT,WAAW,KAAK,IAAI;AAAA,IACpB,SAAS;AAAA,IACT;AAAA,EACJ;AAEA,iBAA4B,kBAAkB,SAAS,GAAG,EAAE,UAAU,MAAM,CAAC;AACjF;AA9GA,IAOAE;AAPA;AAAA;AAAA;AAOA,IAAAA,QAAsB;AAQtB;AACA;AACA;AAAA;AAAA;;;ACWO,SAAS,8BAA8B,WAA2B;AACrE,SAAY,WAAK,YAAY,SAAS,GAAG,uBAAuB;AACpE;AAkBA,eAAsB,uBAClB,UACA,WACA,kBACmC;AACnC,QAAM,SAAS;AAAA,IACX,8BAA8B,SAAS;AAAA,IACvC,CAAC,MAAM,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,EAAE,oBAAoB,EAAE,qBAAqB;AAAA,EACtF;AACA,MAAI,CAAC,QAAQ;AACT,WAAO;AAAA,EACX;AAEA,MAAI;AACA,UAAM,cAAc,MAAM,kBAAkB,QAAQ;AACpD,QAAI,CAAC,eAAe,gBAAgB,OAAO,SAAS;AAChD,aAAO;AAAA,IACX;AAAA,EACJ,QAAQ;AACJ,WAAO;AAAA,EACX;AAEA,SAAO;AACX;AAYO,SAAS,0BACZ,WACA,kBAC0B;AAC1B,SAAO;AAAA,IACH,8BAA8B,SAAS;AAAA,IACvC,CAAC,MAAM,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,EAAE,oBAAoB,EAAE,qBAAqB;AAAA,EACtF;AACJ;AAcA,eAAsB,kBAClB,UACA,OACA,WACA,kBACa;AACb,QAAM,cAAc,MAAM,kBAAkB,QAAQ;AACpD,MAAI,CAAC,aAAa;AACd;AAAA,EACJ;AAEA,iBAAoC,8BAA8B,SAAS,GAAG;AAAA,IAC1E;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA,WAAW,KAAK,IAAI;AAAA,EACxB,CAAC;AACL;AA1HA,IAOAC;AAPA;AAAA;AAAA;AAOA,IAAAA,QAAsB;AAOtB;AACA;AACA;AAAA;AAAA;;;ACYO,SAAS,oBAAoB,WAA2B;AAC3D,SAAY,YAAK,YAAY,SAAS,GAAG,YAAY;AACzD;AAKO,SAAS,qBAAqB,WAAmB,UAA0B;AAC9E,SAAY,YAAK,oBAAoB,SAAS,GAAG,GAAG,QAAQ,OAAO;AACvE;AAKO,SAAS,wBAAwB,WAA2B;AAC/D,SAAY,YAAK,oBAAoB,SAAS,GAAG,sBAAsB;AAC3E;AAaO,SAAS,kBAAkB,UAAkB,WAA0C;AAC1F,QAAM,SAAS;AAAA,IACX,qBAAqB,WAAW,QAAQ;AAAA,IACxC,CAAC,MAAM,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,EAAE,SAAS;AAAA,EACxC;AACA,SAAO,QAAQ,YAAY;AAC/B;AAQO,SAAS,kBAAkB,WAA4C;AAC1E,QAAM,WAAW;AAAA,IACb,wBAAwB,SAAS;AAAA,IACjC,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,EAAE;AAAA,EAC9B;AACA,MAAI,CAAC,UAAU;AACX,WAAO;AAAA,EACX;AAGA,QAAM,cAAc,oBAAoB,SAAS;AACjD,QAAM,WAA6B,CAAC;AAEpC,MAAI;AACA,UAAM,QAAW,gBAAY,WAAW;AACxC,eAAW,QAAQ,OAAO;AACtB,UAAI,SAAS,0BAA0B,CAAC,KAAK,SAAS,OAAO,GAAG;AAC5D;AAAA,MACJ;AAEA,YAAM,SAAS;AAAA,QACN,YAAK,aAAa,IAAI;AAAA,QAC3B,CAAC,MAAM,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,EAAE,SAAS;AAAA,MACxC;AACA,UAAI,QAAQ;AACR,iBAAS,KAAK,OAAO,QAAQ;AAAA,MACjC;AAAA,IACJ;AAAA,EACJ,QAAQ;AACJ,WAAO;AAAA,EACX;AAEA,SAAO,SAAS,SAAS,IAAI,WAAW;AAC5C;AAKO,SAAS,yBAAyB,WAAiD;AACtF,SAAO,cAAqC,wBAAwB,SAAS,CAAC;AAClF;AAcO,SAAS,aACZ,UACA,UACA,WACA,SACI;AACJ,iBAA+B,qBAAqB,WAAW,QAAQ,GAAG;AAAA,IACtE;AAAA,IACA;AAAA,IACA,WAAW,KAAK,IAAI;AAAA,EACxB,CAAC;AACL;AASA,eAAsB,gBAClB,UACA,WACA,UACa;AACb,QAAM,cAAc,MAAM,kBAAkB,QAAQ;AACpD,MAAI,CAAC,aAAa;AACd;AAAA,EACJ;AAGA,aAAW,YAAY,UAAU;AAC7B,iBAAa,SAAS,UAAU,UAAU,WAAW,WAAW;AAAA,EACpE;AAGA,iBAAsC,wBAAwB,SAAS,GAAG;AAAA,IACtE,SAAS;AAAA,IACT,WAAW,KAAK,IAAI;AAAA,IACpB,SAAS;AAAA,IACT,aAAa,SAAS;AAAA,EAC1B,CAAC;AACL;AAmBO,SAAS,4BACZ,WACA,WACA,gBAC8C;AAC9C,SAAO;AAAA,IACH;AAAA,IACA,CAAC,OAAO,qBAAqB,WAAW,EAAE;AAAA,IAC1C,CAAC,WAAW,CAAC,CAAC,OAAO,YAAY,CAAC,CAAC,OAAO,SAAS,YAAY,OAAO,YAAY;AAAA,IAClF,CAAC,WAAW,OAAO;AAAA,EACvB;AACJ;AASO,SAAS,+BACZ,WACA,WAC8C;AAC9C,SAAO;AAAA,IACH;AAAA,IACA,CAAC,OAAO,qBAAqB,WAAW,EAAE;AAAA,IAC1C,CAAC,WAAW,CAAC,CAAC,OAAO,YAAY,CAAC,CAAC,OAAO,SAAS;AAAA,IACnD,CAAC,WAAW,OAAO;AAAA,EACvB;AACJ;AAxNA,IAQAC,KACAC;AATA;AAAA;AAAA;AAQA,IAAAD,MAAoB;AACpB,IAAAC,SAAsB;AAQtB;AACA;AACA;AAAA;AAAA;;;ACSO,SAAS,oBAAoB,WAA2B;AAC3D,SAAY,YAAK,YAAY,SAAS,GAAG,YAAY;AACzD;AAOO,SAAS,oBAAoB,WAAmB,UAAkB,QAAyB;AAC9F,MAAI,QAAQ;AACR,WAAY,YAAK,oBAAoB,SAAS,GAAG,QAAQ,GAAG,QAAQ,OAAO;AAAA,EAC/E;AACA,SAAY,YAAK,oBAAoB,SAAS,GAAG,GAAG,QAAQ,OAAO;AACvE;AAKO,SAAS,wBAAwB,WAA2B;AAC/D,SAAY,YAAK,oBAAoB,SAAS,GAAG,sBAAsB;AAC3E;AASO,SAAS,sBAAsB,WAA2B;AAC7D,SAAY,YAAK,oBAAoB,SAAS,GAAG,oBAAoB;AACzE;AAiBO,SAAS,0BACZ,WACA,aACA,QACM;AACN,QAAM,WAAW,SACX,GAAG,qBAAqB,QAAQ,MAAM,IAAI,WAAW,UACrD,GAAG,qBAAqB,GAAG,WAAW;AAC5C,SAAY,YAAK,oBAAoB,SAAS,GAAG,QAAQ;AAC7D;AA2GO,SAAS,uBAAuB,WAAiD;AACpF,SAAO,cAAqC,sBAAsB,SAAS,CAAC;AAChF;AAaO,SAAS,wBACZ,WACA,SACyB;AAEzB,QAAM,WAAW,uBAAuB,SAAS;AACjD,MAAI,CAAC,UAAU;AACX,WAAO;AAAA,EACX;AAGA,MAAI,WAAW,SAAS,YAAY,SAAS;AACzC,WAAO;AAAA,EACX;AAGA,QAAM,cAAc,oBAAoB,SAAS;AACjD,MAAI,CAAI,eAAW,WAAW,GAAG;AAC7B,WAAO;AAAA,EACX;AAEA,QAAM,WAA+B,CAAC;AAEtC,MAAI;AACA,UAAM,QAAW,gBAAY,WAAW;AACxC,eAAW,QAAQ,OAAO;AACtB,UACI,CAAC,KAAK,WAAW,qBAAqB,KACtC,SAAS,wBACT,CAAC,KAAK,SAAS,OAAO,GACxB;AACE;AAAA,MACJ;AAEA,YAAM,SAAS;AAAA,QACN,YAAK,aAAa,IAAI;AAAA,QAC3B,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,EAAE,QAAQ;AAAA,MACtC;AACA,UAAI,QAAQ;AACR,iBAAS,KAAK,OAAO,OAAO;AAAA,MAChC;AAAA,IACJ;AAAA,EACJ,QAAQ;AACJ,WAAO;AAAA,EACX;AAEA,SAAO,SAAS,SAAS,IAAI,WAAW;AAC5C;AAeO,SAAS,YACZ,UACA,SACA,WACA,SACI;AACJ,iBAA8B,oBAAoB,WAAW,UAAU,QAAQ,MAAM,GAAG;AAAA,IACpF;AAAA,IACA;AAAA,IACA,WAAW,KAAK,IAAI;AAAA,EACxB,CAAC;AACL;AASA,eAAsB,gBAClB,UACA,WACA,UACa;AACb,QAAM,cAAc,MAAM,kBAAkB,QAAQ;AACpD,MAAI,CAAC,aAAa;AACd;AAAA,EACJ;AAGA,QAAM,iBAAiB,SAAS,OAAO,OAAK,EAAE,SAAS,YAAY,EAAE,QAAQ;AAG7E,aAAW,WAAW,gBAAgB;AAClC,gBAAY,QAAQ,UAAW,SAAS,WAAW,WAAW;AAAA,EAClE;AAGA,iBAAsC,wBAAwB,SAAS,GAAG;AAAA,IACtE,SAAS;AAAA,IACT,WAAW,KAAK,IAAI;AAAA,IACpB,SAAS;AAAA,IACT,aAAa,eAAe;AAAA,EAChC,CAAC;AACL;AAiBO,SAAS,mBACZ,UACA,WACA,SACI;AAEJ,QAAM,iBAAiB,SAAS,OAAO,OAAK,EAAE,SAAS,QAAQ;AAC/D,MAAI,eAAe,WAAW,GAAG;AAC7B;AAAA,EACJ;AAGA,aAAW,WAAW,gBAAgB;AAClC,mBAA8B,0BAA0B,WAAW,QAAQ,MAAM,QAAQ,MAAM,GAAG;AAAA,MAC9F;AAAA,MACA;AAAA,MACA,WAAW,KAAK,IAAI;AAAA,IACxB,CAAC;AAAA,EACL;AAGA,iBAAsC,sBAAsB,SAAS,GAAG;AAAA,IACpE;AAAA,IACA,WAAW,KAAK,IAAI;AAAA,IACpB,SAAS;AAAA,IACT,aAAa,eAAe;AAAA,EAChC,CAAC;AACL;AAUA,SAAS,qBAAqB,WAAmB,UAAiC;AAE9E,QAAM,WAAW,oBAAoB,WAAW,QAAQ;AACxD,MAAO,eAAW,QAAQ,GAAG;AACzB,WAAO;AAAA,EACX;AAGA,QAAM,cAAc,oBAAoB,SAAS;AACjD,MAAO,eAAW,WAAW,GAAG;AAC5B,QAAI;AACA,YAAM,UAAa,gBAAY,aAAa,EAAE,eAAe,KAAK,CAAC;AACnE,iBAAW,SAAS,SAAS;AACzB,YAAI,MAAM,YAAY,KAAK,MAAM,SAAS,kBAAkB;AACxD,gBAAM,WAAgB,YAAK,aAAa,MAAM,MAAM,GAAG,QAAQ,OAAO;AACtE,cAAO,eAAW,QAAQ,GAAG;AACzB,mBAAO;AAAA,UACX;AAAA,QACJ;AAAA,MACJ;AAAA,IACJ,QAAQ;AAAA,IAER;AAAA,EACJ;AAEA,SAAO;AACX;AAkBO,SAAS,4BACZ,WACA,WACA,gBACgD;AAChD,SAAO;AAAA,IACH;AAAA,IACA,CAAC,OAAO,qBAAqB,WAAW,EAAE;AAAA,IAC1C,CAAC,WAAW,CAAC,CAAC,OAAO,WAAW,CAAC,CAAC,OAAO,QAAQ,QAAQ,OAAO,YAAY;AAAA,IAC5E,CAAC,WAAW,OAAO;AAAA,EACvB;AACJ;AAWO,SAAS,+BACZ,WACA,WACgD;AAChD,SAAO;AAAA,IACH;AAAA,IACA,CAAC,OAAO,qBAAqB,WAAW,EAAE;AAAA,IAC1C,CAAC,WAAW,CAAC,CAAC,OAAO,WAAW,CAAC,CAAC,OAAO,QAAQ;AAAA,IACjD,CAAC,WAAW,OAAO;AAAA,EACvB;AACJ;AAmBO,SAAS,gBACZ,WACA,WACA,YACM;AACN,MAAI,YAAY;AAEhB,aAAW,YAAY,WAAW;AAC9B,UAAM,YAAY,qBAAqB,WAAW,QAAQ;AAC1D,QAAI,CAAC,WAAW;AACZ;AAAA,IACJ;AAEA,UAAM,SAAS,cAA6B,SAAS;AACrD,QAAI,CAAC,UAAU,CAAC,OAAO,WAAW,CAAC,OAAO,QAAQ,MAAM;AACpD;AAAA,IACJ;AAGA,QAAI,OAAO,YAAY,YAAY;AAC/B;AACA;AAAA,IACJ;AAGA,mBAA8B,WAAW;AAAA,MACrC,SAAS,OAAO;AAAA,MAChB,SAAS;AAAA,MACT,WAAW,KAAK,IAAI;AAAA,IACxB,CAAC;AACD;AAAA,EACJ;AAEA,SAAO;AACX;AAlfA,IAQAC,KACAC;AATA;AAAA;AAAA;AAQA,IAAAD,MAAoB;AACpB,IAAAC,SAAsB;AAQtB;AACA;AACA;AAAA;AAAA;;;AC2DA,eAAsB,4BAClB,OACA,WACA,UACwB;AAExB,QAAM,WAAW,yBAAyB,SAAS;AACnD,MAAI,CAAC,YAAY,CAAC,SAAS,SAAS;AAEhC,WAAO;AAAA,EACX;AAGA,QAAM,cAAc,MAAM,kBAAkB,QAAQ;AACpD,MAAI,CAAC,aAAa;AAEd,WAAO;AAAA,EACX;AAGA,MAAI,SAAS,YAAY,aAAa;AAClC,WAAO,CAAC;AAAA,EACZ;AAGA,QAAM,eAAe,MAAM,gBAAgB,UAAU,SAAS,SAAS,QAAQ;AAC/E,MAAI,iBAAiB,MAAM;AAEvB,WAAO;AAAA,EACX;AAEA,MAAI,aAAa,WAAW,GAAG;AAC3B,WAAO,CAAC;AAAA,EACZ;AAGA,QAAM,oBAAoB,aAAa,IAAI,OAAK,EAAE,QAAQ,OAAO,GAAG,CAAC;AAGrE,QAAM,kBAA4B,CAAC;AACnC,aAAWC,WAAU,MAAM,SAAS;AAChC,UAAM,aAAaA,QAAO,KAAK,QAAQ,OAAO,GAAG,EAAE,QAAQ,OAAO,EAAE;AACpE,UAAM,WAAWA,QAAO,SAAS,IAAI,OAAK,EAAE,QAAQ,OAAO,GAAG,CAAC;AAE/D,UAAM,aAAa,kBAAkB,KAAK,iBAAe;AAErD,UAAI,YAAY,WAAW,aAAa,GAAG,KAAK,gBAAgB,YAAY;AACxE,eAAO;AAAA,MACX;AAGA,UAAI,SAAS,KAAK,QAAM,gBAAgB,MAAM,YAAY,SAAS,MAAM,EAAE,CAAC,GAAG;AAC3E,eAAO;AAAA,MACX;AAEA,aAAO;AAAA,IACX,CAAC;AAED,QAAI,YAAY;AACZ,sBAAgB,KAAKA,QAAO,EAAE;AAAA,IAClC;AAAA,EACJ;AAEA,SAAO;AACX;AA9IA;AAAA;AAAA;AAWA;AACA;AAGA;AAGA;AAGA;AAGA;AAsBA;AAGA;AAGA;AAGA;AAOA;AACA;AAAA;AAAA;;;ACHA,SAASC,qBAAoB,SAAqD;AAC9E,MAAI,QAAQ,SAAS,QAAQ;AACzB,WAAO,EAAE,MAAM,WAAW;AAAA,EAC9B;AACA,SAAO,EAAE,MAAM,kBAAkB;AACrC;AAaA,eAAsB,kBAAkB,UAAmC;AACvE,QAAM,cAAU,4CAAqB;AAErC,YAAU,qCAAqC;AAC/C,QAAM,SAAS,MAAM,QAAQ,YAAY;AAAA,IACrC,QAAQ;AAAA,IACR,kBAAkB;AAAA,IAClB,gBAAgB,CAAC,MAAM;AAAA,IACvB,qBAAqBA;AAAA,IACrB,SAAS;AAAA,IACT,WAAW;AAAA,EACf,CAAC;AAED,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,UAAU;AACrC,iBAAa,+BAA+B;AAC5C,WAAO;AAAA,EACX;AAGA,QAAM,QAAQ,OAAO,SAAS,KAAK,EAAE,MAAM,OAAO;AAClD,QAAM,QAAQ,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE,IAAI;AAC/C,MAAI,QAAQ,GAAG;AACX,cAAU,wBAAwB,KAAK,UAAU,KAAK,eAAe,oBAAoB,GAAG,CAAC,EAAE;AAAA,EACnG;AACA,SAAO;AACX;AAQA,eAAsB,YAAY,UAAoC;AAClE,QAAM,QAAQ,MAAM,kBAAkB,QAAQ;AAC9C,SAAO,QAAQ;AACnB;AAgBA,eAAsB,kBAAkB,SAAiD;AACrF,QAAM,eAAe,CAAC,CAAC,QAAQ;AAC/B,QAAM,UAAU,QAAQ;AACxB,QAAM,WAAW,QAAQ,YAAY;AAGrC,YAAU,wDAAmD;AAC7D,YAAU,iEAAiE;AAE3E,MAAI,aAA0C;AAE9C,MAAI,cAAc;AACd,iBAAc,YAAY,CAAC,UACrB,2BAA2B,QAAQ,SAAU,IAC7C,wBAAwB,QAAQ,WAAY,OAAQ;AAE1D,QAAI,YAAY;AACZ,gBAAU,iCAAiC,WAAW,MAAM,MAAM,SAAS;AAAA,IAC/E;AAAA,EACJ;AAEA,MAAI,CAAC,YAAY;AACb,iBAAa,MAAM,sBAAsB,OAAO;AAGhD,QAAI,gBAAgB,SAAS;AACzB,UAAI;AACA,2BAAmB,YAAY,QAAQ,WAAY,OAAO;AAAA,MAC9D,QAAQ;AAAA,MAER;AAAA,IACJ;AAAA,EACJ;AAEA,MAAI,WAAW,MAAM,WAAW,GAAG;AAC/B,UAAM,IAAI,MAAM,wFAAwF;AAAA,EAC5G;AAEA,YAAU,yBAAyB,WAAW,MAAM,MAAM,WAAW,WAAW,MAAM,IAAI,OAAK,KAAK,EAAE,IAAI,CAAC,EAAE,KAAK,IAAI,CAAC,EAAE;AAGzH,YAAU,iCAAiC;AAC3C,QAAM,YAA2B,CAAC;AAClC,QAAM,cAAc,WAAW,YAAY,QAAQ;AAEnD,WAAS,IAAI,GAAG,IAAI,WAAW,MAAM,QAAQ,KAAK;AAC9C,UAAM,OAAO,WAAW,MAAM,CAAC;AAC/B,UAAM,WAAW,kBAAkB,KAAK,IAAI;AAG5C,QAAI,aAAiC;AACrC,QAAI,gBAAgB,SAAS;AACzB,mBAAa,sBAAsB,UAAU,QAAQ,WAAY,OAAO;AAAA,IAC5E;AAEA,QAAI,YAAY;AACZ,gBAAU,WAAW,KAAK,IAAI,wBAAwB,WAAW,QAAQ,MAAM,WAAW;AAC1F,gBAAU,KAAK,UAAU;AACzB;AAAA,IACJ;AAEA,cAAU,sBAAsB,IAAI,CAAC,IAAI,WAAW,MAAM,MAAM,KAAK,KAAK,KAAK,IAAI,CAAC,IAAI,KAAK,IAAI,KAAK,IAAI,GAAG,CAAC,EAAE;AAChH,QAAI;AACA,YAAM,WAAW,MAAM,aAAa,SAAS,MAAM,WAAW;AAC9D,gBAAU,aAAa,SAAS,QAAQ,MAAM,UAAU;AACxD,gBAAU,KAAK,QAAQ;AAGvB,UAAI,gBAAgB,SAAS;AACzB,YAAI;AACA,2BAAiB,UAAU,UAAU,QAAQ,WAAY,OAAO;AAAA,QACpE,QAAQ;AAAA,QAER;AAAA,MACJ;AAAA,IACJ,SAAS,OAAO;AAEZ,mBAAa,4BAA4B,KAAK,IAAI,MAAM,gBAAgB,KAAK,CAAC,EAAE;AAAA,IACpF;AAAA,EACJ;AAEA,MAAI,UAAU,WAAW,GAAG;AACxB,UAAM,IAAI,MAAM,6DAA6D;AAAA,EACjF;AAGA,YAAU,WAAW,UAAU,MAAM,qBAAqB;AAC1D,QAAM,SAAS,eAAe,WAAW,UAAU;AACnD,YAAU,kBAAkB,OAAO,QAAQ,MAAM,aAAa,OAAO,WAAW,MAAM,aAAa;AACnG,SAAO;AACX;AASA,eAAe,sBAAsB,SAA0D;AAC3F,QAAM,cAAU,4CAAqB;AACrC,QAAM,SAAS,0BAA0B,QAAQ,QAAQ;AAEzD,QAAM,cAAkC;AAAA,IACpC;AAAA,IACA,kBAAkB,QAAQ;AAAA,IAC1B,gBAAgBC;AAAA,IAChB,qBAAqBD;AAAA,IACrB,SAAS;AAAA,IACT,WAAW;AAAA,EACf;AAEA,MAAI,QAAQ,OAAO;AACf,gBAAY,QAAQ,QAAQ;AAAA,EAChC;AAEA,QAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,UAAU;AACrC,UAAM,IAAI,MAAM,2BAA2B,OAAO,SAAS,gBAAgB,EAAE;AAAA,EACjF;AAEA,SAAO,4BAA4B,OAAO,QAAQ;AACtD;AASA,eAAe,aACX,SACA,MACA,aACoB;AACpB,QAAM,cAAU,4CAAqB;AACrC,QAAM,SAAS;AAAA,IACX,QAAQ;AAAA,IACR,KAAK;AAAA,IACL,KAAK;AAAA,IACL;AAAA,EACJ;AAEA,QAAM,cAAkC;AAAA,IACpC;AAAA,IACA,kBAAkB,QAAQ;AAAA,IAC1B,gBAAgBC;AAAA,IAChB,qBAAqBD;AAAA,IACrB,SAAS;AAAA,IACT,WAAW;AAAA,EACf;AAEA,MAAI,QAAQ,OAAO;AACf,gBAAY,QAAQ,QAAQ;AAAA,EAChC;AAEA,QAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,UAAU;AACrC,UAAM,IAAI,MAAM,8BAA8B,KAAK,IAAI,MAAM,OAAO,SAAS,gBAAgB,EAAE;AAAA,EACnG;AAEA,SAAO,yBAAyB,OAAO,QAAQ;AACnD;AAgBO,SAAS,eACZ,WACA,YACW;AAEX,QAAM,eAAe,UAAU,CAAC,EAAE;AAClC,QAAM,UAAU;AAAA,IACZ,MAAM,WAAW,YAAY,QAAQ,aAAa;AAAA,IAClD,aAAa,WAAW,YAAY,eAAe,aAAa;AAAA,IAChE,UAAU,WAAW,YAAY,YAAY,aAAa;AAAA,IAC1D,aAAa,WAAW,YAAY,eAAe,aAAa;AAAA,IAChE,aAAa,aAAa;AAAA,EAC9B;AAIA,QAAM,gBAAgB,oBAAI,IAAsB;AAGhD,QAAM,YAAY,oBAAI,IAAwB;AAC9C,WAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;AACvC,UAAM,QAAQ,UAAU,CAAC;AACzB,UAAM,OAAO,WAAW,MAAM,CAAC;AAC/B,UAAM,WAAW,OAAO,kBAAkB,KAAK,IAAI,IAAI;AAEvD,eAAW,OAAO,MAAM,SAAS;AAC7B,UAAI,CAAC,UAAU,IAAI,IAAI,EAAE,GAAG;AAExB,cAAM,YAAY,WAAW,EAAE,GAAG,KAAK,MAAM,SAAS,IAAI;AAC1D,kBAAU,IAAI,IAAI,IAAI,SAAS;AAG/B,YAAI,UAAU;AACV,cAAI,CAAC,cAAc,IAAI,QAAQ,GAAG;AAC9B,0BAAc,IAAI,UAAU,CAAC,CAAC;AAAA,UAClC;AACA,wBAAc,IAAI,QAAQ,EAAG,KAAK,IAAI,EAAE;AAAA,QAC5C;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ;AACA,QAAM,UAAU,MAAM,KAAK,UAAU,OAAO,CAAC;AAG7C,QAAM,cAAc,oBAAI,IAA0B;AAClD,aAAW,SAAS,WAAW;AAC3B,eAAW,OAAO,MAAM,YAAY;AAChC,UAAI,CAAC,YAAY,IAAI,IAAI,IAAI,GAAG;AAC5B,oBAAY,IAAI,IAAI,MAAM,GAAG;AAAA,MACjC;AAAA,IACJ;AAAA,EACJ;AACA,QAAM,aAAa,MAAM,KAAK,YAAY,OAAO,CAAC;AAGlD,QAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,EAAE,CAAC;AAChD,aAAW,OAAO,SAAS;AACvB,QAAI,eAAe,IAAI,aAAa,OAAO,SAAO,UAAU,IAAI,GAAG,CAAC;AACpE,QAAI,aAAa,IAAI,WAAW,OAAO,SAAO,UAAU,IAAI,GAAG,CAAC;AAAA,EACpE;AAGA,QAAM,oBAAoB,UACrB,IAAI,OAAK,EAAE,iBAAiB,EAC5B,OAAO,OAAO,EACd,KAAK,MAAM;AAGhB,QAAM,QAAgC,WAAW,MAAM,SAAS,IAC1D,WAAW,MAAM,IAAI,kBAAgB;AACnC,UAAM,WAAW,kBAAkB,aAAa,IAAI;AACpD,WAAO;AAAA,MACH,IAAI;AAAA,MACJ,MAAM,aAAa;AAAA,MACnB,MAAM,aAAa;AAAA,MACnB,aAAa,aAAa;AAAA,MAC1B,SAAS,cAAc,IAAI,QAAQ,KAAK,CAAC;AAAA,IAC7C;AAAA,EACJ,CAAC,IACC;AAEN,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAI,QAAQ,EAAE,MAAM,IAAI,CAAC;AAAA,EAC7B;AACJ;AA/YA,IASAE,uBAiCa,sBAGP,4BAGA,qBAGAD;AAnDN;AAAA;AAAA;AASA,IAAAC,wBAKO;AAUP;AACA;AACA,IAAAC;AACA,IAAAC;AACA;AACA;AAaO,IAAM,uBAAuB;AAGpC,IAAM,6BAA6B;AAGnC,IAAM,sBAAsB;AAG5B,IAAMH,mBAAkB,CAAC,QAAQ,QAAQ,MAAM;AAAA;AAAA;;;ACAxC,SAAS,iBACZ,UACA,OACA,OACM;AACN,QAAM,eAAe,QACf;AAAA;AAAA;AAAA,sCAA0D,KAAK;AAAA;AAAA,IAC/D;AAEN,QAAM,YAAY,MAAM,MAAM,SAAS,IACjC,MAAM,MAAM,IAAI,OAAK,KAAK,CAAC,EAAE,EAAE,KAAK,IAAI,IACxC,KAAK,MAAM,KAAK;AAEtB,SAAO,oCAAoC,MAAM,KAAK;AAAA,6EACmB,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInF,MAAM,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjB,SAAS;AAAA,EACT,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BZ,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAY3B;AArHA,IAkBM;AAlBN;AAAA;AAAA;AAkBA,IAAM,4BAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACO3B,SAAS,mBAAmB,UAAkB,OAAiC;AAClF,QAAM,MAAM,oBAAoB,UAAU,EAAE,SAAS,QAAQ,CAAC;AAG9D,MAAI,OAAO,IAAI,UAAU,UAAU;AAC/B,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACxE;AAEA,MAAI,CAAC,MAAM,QAAQ,IAAI,YAAY,GAAG;AAClC,UAAM,IAAI,MAAM,2DAA2D;AAAA,EAC/E;AAGA,QAAM,eAAmC,CAAC;AAC1C,WAAS,IAAI,GAAG,IAAI,IAAI,aAAa,QAAQ,KAAK;AAC9C,UAAM,OAAO,IAAI,aAAa,CAAC;AAC/B,QAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C;AAAA,IACJ;AAEA,UAAM,MAAM;AAGZ,QAAI,OAAO,IAAI,OAAO,YAAY,OAAO,IAAI,SAAS,YAAY,OAAO,IAAI,SAAS,UAAU;AAC5F;AAAA,IACJ;AAGA,UAAM,KAAK,kBAAkB,OAAO,IAAI,EAAE,CAAC;AAG3C,QAAI;AACJ,QAAI,MAAM,QAAQ,IAAI,UAAU,GAAG;AAC/B,YAAM,SAA6B,CAAC;AACpC,iBAAW,SAAS,IAAI,YAAY;AAChC,YAAI,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,KACzC,OAAO,MAAM,CAAC,MAAM,YAAY,OAAO,MAAM,CAAC,MAAM,UAAU;AAC9D,iBAAO,KAAK,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAAA,QACpC;AAAA,MACJ;AACA,UAAI,OAAO,SAAS,GAAG;AACnB,qBAAa;AAAA,MACjB;AAAA,IACJ;AAEA,iBAAa,KAAK;AAAA,MACd;AAAA,MACA,MAAM,OAAO,IAAI,IAAI;AAAA,MACrB,MAAM,OAAO,IAAI,IAAI;AAAA,MACrB,SAAS,OAAO,IAAI,WAAW,EAAE;AAAA,MACjC,UAAUI,kBAAiB,IAAI,QAAQ;AAAA,MACvC,UAAU,OAAO,IAAI,YAAY,EAAE;AAAA,MACnC;AAAA,IACJ,CAAC;AAAA,EACL;AAGA,QAAM,mBAAsC,CAAC;AAC7C,MAAI,MAAM,QAAQ,IAAI,gBAAgB,GAAG;AACrC,eAAW,QAAQ,IAAI,kBAAkB;AACrC,UAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C;AAAA,MACJ;AACA,YAAM,KAAK;AACX,UAAI,OAAO,GAAG,UAAU,YAAY,OAAO,GAAG,gBAAgB,UAAU;AACpE,yBAAiB,KAAK;AAAA,UAClB,OAAO,kBAAkB,OAAO,GAAG,KAAK,CAAC;AAAA,UACzC,aAAa,OAAO,GAAG,WAAW;AAAA,UAClC,OAAOA,kBAAiB,GAAG,KAAK;AAAA,UAChC,QAAQ,OAAO,GAAG,UAAU,EAAE;AAAA,QAClC,CAAC;AAAA,MACL;AAAA,IACJ;AAAA,EACJ;AAGA,QAAM,eAAyBA,kBAAiB,IAAI,YAAY;AAGhE,MAAI,aAAa;AACjB,MAAI,OAAO,IAAI,eAAe,UAAU;AACpC,iBAAa,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,IAAI,UAAU,CAAC;AAAA,EACxD;AAEA,SAAO;AAAA,IACH,OAAO,OAAO,IAAI,KAAK;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACJ;AASA,SAASA,kBAAiB,KAAwB;AAC9C,MAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACrB,WAAO,CAAC;AAAA,EACZ;AACA,SAAO,IACF,OAAO,UAAQ,OAAO,SAAS,QAAQ,EACvC,IAAI,UAAQ,OAAO,IAAI,CAAC;AACjC;AApIA;AAAA;AAAA;AAUA;AACA;AAAA;AAAA;;;AC6BA,SAASC,qBAAoB,SAAqD;AAC9E,MAAI,QAAQ,SAAS,QAAQ;AACzB,WAAO,EAAE,MAAM,WAAW;AAAA,EAC9B;AACA,SAAO,EAAE,MAAM,kBAAkB;AACrC;AAcA,eAAsB,cAClB,UACA,OACA,UAII,CAAC,GACoB;AACzB,QAAM,cAAU,4CAAqB;AAGrC,QAAM,eAAe,MAAM,QAAQ,YAAY;AAC/C,MAAI,CAAC,cAAc;AAEf,WAAO;AAAA,MACH,OAAO,MAAM;AAAA,MACb,cAAc,CAAC;AAAA,MACf,kBAAkB,CAAC;AAAA,MACnB,cAAc,CAAC;AAAA,MACf,YAAY;AAAA,IAChB;AAAA,EACJ;AAGA,QAAM,SAAS,iBAAiB,UAAU,OAAO,QAAQ,KAAK;AAG9D,QAAM,cAAkC;AAAA,IACpC;AAAA,IACA,kBAAkB;AAAA,IAClB,gBAAgB;AAAA,IAChB,qBAAqBA;AAAA,IACrB,SAAS;AAAA;AAAA,IACT,WAAW,QAAQ,WAAW;AAAA,EAClC;AAGA,MAAI,QAAQ,OAAO;AACf,gBAAY,QAAQ,QAAQ;AAAA,EAChC;AAEA,MAAI;AAEA,cAAU,sBAAsB,MAAM,KAAK,IAAI,KAAK,cAAc,QAAQ,WAAW,4BAA4B,GAAI,IAAI,CAAC,EAAE;AAC5H,UAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,QAAI,CAAC,OAAO,WAAW,CAAC,OAAO,UAAU;AAErC,mBAAa,yBAAyB,MAAM,KAAK,MAAM,OAAO,SAAS,gBAAgB,EAAE;AACzF,aAAO;AAAA,QACH,OAAO,MAAM;AAAA,QACb,cAAc,CAAC;AAAA,QACf,kBAAkB,CAAC;AAAA,QACnB,cAAc,CAAC;AAAA,QACf,YAAY;AAAA,MAChB;AAAA,IACJ;AAGA,UAAM,SAAS,mBAAmB,OAAO,UAAU,MAAM,KAAK;AAC9D,cAAU,cAAc,MAAM,KAAK,WAAW,OAAO,aAAa,MAAM,YAAY,KAAK,gBAAgB,OAAO,UAAU,GAAG,CAAC,EAAE;AAChI,WAAO;AAAA,EACX,SAAS,OAAO;AAEZ,iBAAa,wBAAwB,MAAM,KAAK,MAAM,gBAAgB,KAAK,CAAC,EAAE;AAC9E,WAAO;AAAA,MACH,OAAO,MAAM;AAAA,MACb,cAAc,CAAC;AAAA,MACf,kBAAkB,CAAC;AAAA,MACnB,cAAc,CAAC;AAAA,MACf,YAAY;AAAA,IAChB;AAAA,EACJ;AACJ;AArIA,IASAC,uBAkBM,0BAGA;AA9BN;AAAA;AAAA;AASA,IAAAA,wBAKO;AAGP;AACA;AACA;AACA;AAOA,IAAM,2BAA2B;AAGjC,IAAM,cAAc,CAAC,QAAQ,QAAQ,MAAM;AAAA;AAAA;;;ACYpC,SAAS,iBACZ,UACA,cACA,eACM;AACN,QAAM,mBAAmB,KAAK,UAAU,cAAc,MAAM,CAAC;AAC7D,QAAM,oBAAoB,gBAAgB,KAAK,UAAU,eAAe,MAAM,CAAC,IAAI;AAEnF,QAAM,uBAAuB,gBACvB;AAAA;AAAA;AAAA,EAA8C,iBAAiB;AAAA;AAAA,gDAC/D;AAEN,SAAO,sFAAsF,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAKvG,gBAAgB;AAAA,EAChB,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BpB,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWrB;AApGA,IAoBM;AApBN;AAAA;AAAA;AAWA;AASA,IAAM,sBAAsB;AAAA,aACf,oBAAoB,QAAQ,OAAO,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACMhD,SAAS,mBAAmB,UAA+B;AAC9D,QAAM,MAAM,oBAAoB,UAAU,EAAE,SAAS,QAAQ,CAAC;AAG9D,MAAI,OAAO,IAAI,UAAU,YAAY,IAAI,UAAU,MAAM;AACrD,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACxE;AAGA,MAAI;AACJ,MAAI;AACA,YAAQ,yBAAyB,KAAK,UAAU,IAAI,KAAK,CAAC;AAAA,EAC9D,SAAS,YAAY;AACjB,UAAM,IAAI,MAAM,oCAAoC,gBAAgB,UAAU,CAAC,EAAE;AAAA,EACrF;AAGA,QAAM,YAAyB,CAAC;AAChC,MAAI,MAAM,QAAQ,IAAI,SAAS,GAAG;AAC9B,eAAW,QAAQ,IAAI,WAAW;AAC9B,UAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C;AAAA,MACJ;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,UAAU,YAAY,OAAO,MAAM,gBAAgB,UAAU;AAC1E,kBAAU,KAAK;AAAA,UACX,OAAO,kBAAkB,OAAO,MAAM,KAAK,CAAC;AAAA,UAC5C,aAAa,OAAO,MAAM,WAAW;AAAA,UACrC,OAAOC,kBAAiB,MAAM,KAAK;AAAA,QACvC,CAAC;AAAA,MACL;AAAA,IACJ;AAAA,EACJ;AAGA,QAAM,YAAY,OAAO,IAAI,cAAc,YAAY,IAAI,YAAY;AAGvE,MAAI,WAAW;AACf,MAAI,OAAO,IAAI,aAAa,UAAU;AAClC,eAAW,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,IAAI,QAAQ,CAAC;AAAA,EACpD;AAGA,QAAM,SAAS,OAAO,IAAI,WAAW,WAAW,OAAO,IAAI,MAAM,IAAI;AAErE,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACJ;AASA,SAASA,kBAAiB,KAAwB;AAC9C,MAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACrB,WAAO,CAAC;AAAA,EACZ;AACA,SAAO,IACF,OAAO,UAAQ,OAAO,SAAS,QAAQ,EACvC,IAAI,UAAQ,OAAO,IAAI,CAAC;AACjC;AAhGA;AAAA;AAAA;AAWA,IAAAC;AACA;AACA;AACA;AAAA;AAAA;;;AC2BA,SAASC,qBAAoB,SAAqD;AAC9E,MAAI,QAAQ,SAAS,QAAQ;AACzB,WAAO,EAAE,MAAM,WAAW;AAAA,EAC9B;AACA,SAAO,EAAE,MAAM,kBAAkB;AACrC;AAeA,eAAsB,kBAClB,UACA,cACA,eACA,UAGI,CAAC,GACe;AACpB,QAAM,cAAU,4CAAqB;AAGrC,QAAM,eAAe,MAAM,QAAQ,YAAY;AAC/C,MAAI,CAAC,cAAc;AACf,iBAAa,mDAA8C;AAC3D,WAAO,sBAAsB,cAAc,eAAe,iBAAiB;AAAA,EAC/E;AAGA,QAAM,SAAS,iBAAiB,UAAU,cAAc,aAAa;AAGrE,QAAM,cAAkC;AAAA,IACpC;AAAA,IACA,kBAAkB;AAAA,IAClB,gBAAgB;AAAA,IAChB,qBAAqBA;AAAA,IACrB,SAAS;AAAA;AAAA,IACT,WAAW,QAAQ,WAAW;AAAA,EAClC;AAGA,MAAI,QAAQ,OAAO;AACf,gBAAY,QAAQ,QAAQ;AAAA,EAChC;AAEA,MAAI;AAEA,UAAM,cAAc,aAAa,OAAO,OAAK,KAAK,EAAE,aAAa,SAAS,CAAC,EAAE;AAC7E,cAAU,0BAA0B,KAAK,IAAI,WAAW,kBAAkB,gBAAgB,cAAc,QAAQ,SAAS,sBAAsB,gBAAgB,GAAG,CAAC,EAAE;AACrK,UAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,QAAI,CAAC,OAAO,WAAW,CAAC,OAAO,UAAU;AACrC,mBAAa,yBAAyB,OAAO,SAAS,gBAAgB,oCAA+B;AACrG,aAAO,sBAAsB,cAAc,eAAe,sBAAsB;AAAA,IACpF;AAGA,UAAM,cAAc,mBAAmB,OAAO,QAAQ;AAGtD,UAAM,mBAAmB,aAAa,OAAO,CAAC,KAAK,MAAM,OAAO,GAAG,cAAc,UAAU,IAAI,CAAC;AAChG,QAAI,YAAY,MAAM,QAAQ,WAAW,KAAK,mBAAmB,GAAG;AAChE,mBAAa,gDAAgD,gBAAgB,oCAA+B;AAC5G,aAAO,sBAAsB,cAAc,eAAe,+BAA+B;AAAA,IAC7F;AAEA,WAAO;AAAA,EACX,SAAS,OAAO;AACZ,iBAAa,wBAAwB,gBAAgB,KAAK,CAAC,oCAA+B;AAC1F,WAAO,sBAAsB,cAAc,eAAe,wBAAwB,gBAAgB,KAAK,CAAC,EAAE;AAAA,EAC9G;AACJ;AAUA,SAAS,sBACL,cACA,eACA,QACW;AACX,QAAM,YAAY,oBAAI,IAAwB;AAC9C,QAAM,cAAc,oBAAI,IAAY;AAGpC,MAAI,eAAe;AACf,eAAW,OAAO,cAAc,SAAS;AACrC,gBAAU,IAAI,IAAI,IAAI,GAAG;AACzB,UAAI,IAAI,UAAU;AACd,oBAAY,IAAI,IAAI,QAAQ;AAAA,MAChC;AAAA,IACJ;AAAA,EACJ;AAGA,aAAW,SAAS,cAAc;AAC9B,QAAI,CAAC,SAAS,CAAC,MAAM,cAAc;AAAE;AAAA,IAAU;AAE/C,eAAW,SAAS,MAAM,cAAc;AACpC,UAAI,KAAK,MAAM;AACf,UAAI,CAAC,gBAAgB,EAAE,GAAG;AACtB,aAAK,kBAAkB,EAAE;AAAA,MAC7B;AAEA,UAAI,UAAU,IAAI,EAAE,GAAG;AAAE;AAAA,MAAU;AAEnC,YAAM,WAAW,MAAM,SAAS;AAChC,kBAAY,IAAI,QAAQ;AAExB,gBAAU,IAAI,IAAI;AAAA,QACd;AAAA,QACA,MAAM,MAAM;AAAA,QACZ,MAAM,MAAM;AAAA,QACZ,SAAS,MAAM;AAAA,QACf,UAAU,MAAM,YAAY,CAAC;AAAA,QAC7B,cAAc,CAAC;AAAA,QACf,YAAY,CAAC;AAAA,QACb,YAAY;AAAA,QACZ;AAAA,MACJ,CAAC;AAAA,IACL;AAAA,EACJ;AAEA,QAAM,UAAU,MAAM,KAAK,UAAU,OAAO,CAAC;AAC7C,QAAM,aAA6B,MAAM,KAAK,WAAW,EAAE,IAAI,WAAS;AAAA,IACpE;AAAA,IACA,aAAa;AAAA,EACjB,EAAE;AAGF,QAAM,aAAa,IAAI,IAAI,aAAa,IAAI,OAAK,GAAG,KAAK,EAAE,OAAO,OAAO,CAAC;AAC1E,QAAM,YAAuE,CAAC;AAC9E,aAAW,SAAS,cAAc;AAC9B,QAAI,CAAC,OAAO,kBAAkB;AAAE;AAAA,IAAU;AAC1C,eAAW,MAAM,MAAM,kBAAkB;AACrC,UAAI,CAAC,WAAW,IAAI,GAAG,KAAK,GAAG;AAC3B,mBAAW,IAAI,GAAG,KAAK;AACvB,kBAAU,KAAK;AAAA,UACX,OAAO,kBAAkB,GAAG,KAAK;AAAA,UACjC,aAAa,GAAG;AAAA,UAChB,OAAO,GAAG,SAAS,CAAC;AAAA,QACxB,CAAC;AAAA,MACL;AAAA,IACJ;AAAA,EACJ;AAEA,QAAM,UAAU,eAAe,WAAW;AAAA,IACtC,MAAM;AAAA,IACN,aAAa;AAAA,IACb,UAAU;AAAA,IACV,aAAa;AAAA,IACb,aAAa,CAAC;AAAA,EAClB;AAEA,YAAU,kBAAkB,QAAQ,MAAM,aAAa,WAAW,MAAM,aAAa;AAErF,SAAO;AAAA,IACH,OAAO;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB,eAAe,qBAAqB;AAAA,IAC3D;AAAA,IACA;AAAA,IACA,WAAW,UAAU,WAAW;AAAA,IAChC,UAAU;AAAA,IACV,QAAQ,yBAAyB,MAAM;AAAA,EAC3C;AACJ;AAjOA,IASAC,uBAmBM,0BAGA;AA/BN;AAAA;AAAA;AASA,IAAAA,wBAKO;AAGP;AACA;AACA;AACA;AACA;AAOA,IAAM,2BAA2B;AAGjC,IAAM,cAAc,CAAC,QAAQ,QAAQ,MAAM;AAAA;AAAA;;;ACG3C,eAAe,YACX,OACA,aACA,IACY;AACZ,QAAM,UAAe,IAAI,MAAM,MAAM,MAAM;AAC3C,QAAM,YAA6B,CAAC;AAEpC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,QAAQ;AACd,UAAM,UAAU,GAAG,IAAI,EAClB,KAAK,YAAU;AACZ,cAAQ,KAAK,IAAI;AAAA,IACrB,CAAC,EACA,MAAM,MAAM;AAAA,IAEb,CAAC,EACA,QAAQ,MAAM;AAEX,YAAM,MAAM,UAAU,QAAQ,OAAO;AACrC,UAAI,QAAQ,IAAI;AACZ,kBAAU,OAAO,KAAK,CAAC;AAAA,MAC3B;AAAA,IACJ,CAAC;AAEL,cAAU,KAAK,OAAO;AAEtB,QAAI,UAAU,UAAU,aAAa;AACjC,YAAM,QAAQ,KAAK,SAAS;AAAA,IAChC;AAAA,EACJ;AAGA,QAAM,QAAQ,IAAI,SAAS;AAE3B,SAAO;AACX;AAmBA,eAAsB,sBAClB,SACoB;AACpB,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,cAAc,QAAQ,eAAe;AAC3C,QAAM,oBAAoB,QAAQ,qBAAqB;AAEvD,MAAI,gBAA6B,CAAC,GAAG,QAAQ,KAAK;AAClD,MAAI,eAAmC;AACvC,MAAI,QAAQ;AAGZ,MAAI,cAAc,WAAW,GAAG;AAC5B,WAAO;AAAA,MACH,SAAS;AAAA,QACL,MAAM;AAAA,QACN,aAAa;AAAA,QACb,UAAU;AAAA,QACV,aAAa;AAAA,QACb,aAAa,CAAC;AAAA,MAClB;AAAA,MACA,SAAS,CAAC;AAAA,MACV,YAAY,CAAC;AAAA,MACb,mBAAmB;AAAA,IACvB;AAAA,EACJ;AAGA,QAAM,eAAe,CAAC,CAAC,QAAQ;AAC/B,QAAM,UAAU,QAAQ;AACxB,QAAM,WAAW,QAAQ,YAAY;AAGrC,MAAI,cAAc;AACd,UAAM,WAAW,qBAAqB,QAAQ,SAAU;AACxD,QAAI,YAAY,SAAS,YAAY,WAAW,SAAS,eAAe,GAAG;AACvE,cAAQ,SAAS,eAAe;AAChC,gBAAU,uBAAuB,SAAS,YAAY,KAAK,SAAS,gBAAgB,MAAM,oBAAoB;AAAA,IAClH;AAAA,EACJ;AAEA,SAAO,QAAQ,aAAa,cAAc,SAAS,GAAG;AAClD;AAEA,cAAU,SAAS,KAAK,IAAI,SAAS,aAAa,cAAc,MAAM,WAAW,KAAK,iBAAiB,WAAW,GAAG,CAAC,EAAE;AACxH,QAAI,cAAc,UAAU,IAAI;AAC5B,gBAAU,aAAa,cAAc,IAAI,OAAK,KAAK,EAAE,KAAK,CAAC,EAAE,KAAK,IAAI,CAAC,EAAE;AAAA,IAC7E;AAGA,QAAI,eAAe,oBAAI,IAA8B;AACrD,QAAI,gBAAgB;AAEpB,QAAI,cAAc;AACd,YAAM,aAAa,cAAc,IAAI,OAAK,EAAE,KAAK;AACjD,YAAM,aAAc,YAAY,CAAC,UAC3B,oBAAoB,YAAY,QAAQ,SAAU,IAClD,iBAAiB,YAAY,QAAQ,WAAY,OAAQ;AAE/D,qBAAe,WAAW;AAC1B,sBAAgB,cAAc,OAAO,OAAK,WAAW,QAAQ,SAAS,EAAE,KAAK,CAAC;AAE9E,UAAI,aAAa,OAAO,GAAG;AACvB,kBAAU,YAAY,aAAa,IAAI,uBAAuB,cAAc,MAAM,YAAY;AAAA,MAClG;AAAA,IACJ;AAGA,QAAI,oBAAwC,CAAC;AAC7C,QAAI,cAAc,SAAS,GAAG;AAC1B,0BAAoB,MAAM;AAAA,QACtB;AAAA,QACA;AAAA,QACA,OAAO,UAAU;AACb,gBAAM,SAAS,MAAM,cAAc,QAAQ,UAAU,OAAO;AAAA,YACxD,OAAO,QAAQ;AAAA,YACf,SAAS,QAAQ;AAAA,YACjB,OAAO,QAAQ;AAAA,UACnB,CAAC;AAED,cAAI,gBAAgB,WAAW,QAAQ;AACnC,gBAAI;AACA,8BAAgB,MAAM,OAAO,QAAQ,QAAQ,WAAY,OAAO;AAAA,YACpE,QAAQ;AAAA,YAER;AAAA,UACJ;AACA,iBAAO;AAAA,QACX;AAAA,MACJ;AAAA,IACJ;AAGA,UAAM,kBAAsC,cAAc,IAAI,OAAK;AAC/D,YAAM,SAAS,aAAa,IAAI,EAAE,KAAK;AACvC,UAAI,QAAQ;AACR,eAAO;AAAA,MACX;AACA,YAAM,QAAQ,kBAAkB,KAAK,OAAK,GAAG,UAAU,EAAE,KAAK;AAC9D,aAAO,SAAS;AAAA,QACZ,OAAO,EAAE;AAAA,QACT,cAAc,CAAC;AAAA,QACf,kBAAkB,CAAC;AAAA,QACnB,cAAc,CAAC;AAAA,QACf,YAAY;AAAA,MAChB;AAAA,IACJ,CAAC;AAGD,UAAM,mBAAmB,gBAAgB,OAAO,OAAK,KAAK,EAAE,aAAa,SAAS,CAAC,EAAE;AACrF,UAAM,oBAAoB,gBAAgB,OAAO,CAAC,KAAK,MAAM,OAAO,GAAG,cAAc,UAAU,IAAI,CAAC;AACpG,cAAU,uBAAuB,gBAAgB,IAAI,cAAc,MAAM,gBAAgB,iBAAiB,gBAAgB;AAG1H,cAAU,4BAA4B;AACtC,UAAM,cAAc,MAAM;AAAA,MACtB,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,QACI,OAAO,QAAQ;AAAA,QACf,SAAS,QAAQ;AAAA,MACrB;AAAA,IACJ;AAEA,mBAAe,YAAY;AAC3B,cAAU,mBAAmB,aAAa,QAAQ,MAAM,wBAAwB,YAAY,WAAW,KAAK,QAAQ,CAAC,CAAC,GAAG;AAGzH,QAAI,gBAAgB,SAAS;AACzB,UAAI;AACA,8BAAsB;AAAA,UAClB;AAAA,UACA,WAAW,KAAK,IAAI;AAAA,UACpB,MAAM;AAAA,UACN,cAAc;AAAA,UACd;AAAA,UACA,iBAAiB,cAAc,IAAI,OAAK,EAAE,KAAK;AAAA,UAC/C,eAAe,YAAY,UAAU,IAAI,OAAK,EAAE,KAAK;AAAA,UACrD,WAAW,YAAY;AAAA,UACvB,UAAU,YAAY;AAAA,QAC1B,GAAG,QAAQ,SAAU;AAAA,MACzB,QAAQ;AAAA,MAER;AAAA,IACJ;AAGA,QAAI,YAAY,WAAW;AACvB,gBAAU,cAAc,YAAY,SAAS,WAAM,YAAY,MAAM,KAAK,EAAE,EAAE;AAC9E;AAAA,IACJ;AAGA,QAAI,YAAY,YAAY,qBAAqB,YAAY,UAAU,WAAW,GAAG;AACjF,gBAAU,kCAAkC,YAAY,WAAW,KAAK,QAAQ,CAAC,CAAC,SAAS,oBAAoB,KAAK,QAAQ,CAAC,CAAC,IAAI;AAClI;AAAA,IACJ;AAGA,QAAI,YAAY,UAAU,SAAS,GAAG;AAClC,gBAAU,gBAAgB,YAAY,UAAU,MAAM,4BAA4B;AAAA,IACtF;AACA,oBAAgB,YAAY,UAAU,IAAI,QAAM;AAAA,MAC5C,OAAO,EAAE;AAAA,MACT,aAAa,EAAE;AAAA,MACf,OAAO,EAAE;AAAA,IACb,EAAE;AAAA,EACN;AAGA,SAAO;AACX;AAtQA;AAAA;AAAA;AAWA;AACA;AACA;AACA;AAAA;AAAA;;;ACqBA,eAAsB,oBAAoB,SAAqD;AAC3F,QAAM,YAAY,KAAK,IAAI;AAE3B,MAAI;AAGJ,QAAM,QAAQ,MAAM,YAAY,QAAQ,QAAQ;AAEhD,MAAI,OAAO;AACP,YAAQ,MAAM,kBAAkB,OAAO;AAAA,EAC3C,OAAO;AACH,cAAU,yDAAoD;AAC9D,UAAM,gBAAgB,MAAM,oBAAoB,OAAO;AACvD,YAAQ,cAAc;AAEtB,UAAMC,YAAW,KAAK,IAAI,IAAI;AAC9B,WAAO;AAAA,MACH;AAAA,MACA,UAAAA;AAAA,MACA,YAAY,cAAc;AAAA,IAC9B;AAAA,EACJ;AAEA,QAAM,WAAW,KAAK,IAAI,IAAI;AAE9B,SAAO;AAAA,IACH;AAAA,IACA;AAAA,EACJ;AACJ;AAhEA;AAAA;AAAA;AAWA;AACA;AACA;AAGA;AAEA;AACA,IAAAC;AACA,IAAAC;AACA;AAAA;AAAA;;;ACrBA;AAAA;AAAA;AAAA;AAmDA,eAAsB,gBAClB,UACA,SACe;AAEf,QAAM,mBAAwB,eAAQ,QAAQ;AAG9C,MAAI,CAAI,eAAW,gBAAgB,GAAG;AAClC,eAAW,mCAAmC,gBAAgB,EAAE;AAChE,WAAO,WAAW;AAAA,EACtB;AAEA,MAAI,CAAI,aAAS,gBAAgB,EAAE,YAAY,GAAG;AAC9C,eAAW,uCAAuC,gBAAgB,EAAE;AACpE,WAAO,WAAW;AAAA,EACtB;AAGA,cAAY,kCAA6B;AACzC,gBAAc,cAAc,gBAAgB;AAC5C,MAAI,QAAQ,OAAO;AACf,kBAAc,SAAS,QAAQ,KAAK;AAAA,EACxC;AACA,MAAI,QAAQ,OAAO;AACf,kBAAc,SAAS,QAAQ,KAAK;AAAA,EACxC;AACA,UAAQ,OAAO,MAAM,IAAI;AAGzB,MAAI,iBAAgC;AACpC,MAAI;AACA,qBAAiB,MAAM,kBAAkB,gBAAgB;AAAA,EAC7D,QAAQ;AAAA,EAER;AAGA,MAAI,QAAQ,OAAO;AACf,wBAAoB,QAAQ,MAAM;AAAA,EACtC;AAGA,MAAI,CAAC,QAAQ,OAAO;AAChB,QAAI;AACA,YAAM,SAAS,QAAQ,WACjB,kBAAkB,QAAQ,MAAM,IAChC,MAAM,eAAe,kBAAkB,QAAQ,MAAM;AAC3D,UAAI,QAAQ;AACR,qBAAa,8CAA8C;AAC3D,sBAAc,WAAW,OAAO,OAAO,MAAM,QAAQ,MAAM,CAAC;AAC5D,sBAAc,cAAc,OAAO,OAAO,MAAM,WAAW,MAAM,CAAC;AAGlE,cAAM,aAAa,KAAK,UAAU,OAAO,OAAO,MAAM,CAAC;AACvD,gBAAQ,OAAO,MAAM,aAAa,IAAI;AAEtC,eAAO,WAAW;AAAA,MACtB;AAAA,IACJ,QAAQ;AAAA,IAER;AAAA,EACJ;AAGA,QAAM,UAAU,IAAI,QAAQ;AAC5B,UAAQ,MAAM,6BAA6B;AAE3C,MAAI;AACA,QAAI;AAGJ,QAAI,QAAQ,OAAO;AAEf,UAAI;AACJ,UAAI,QAAQ,UAAU,QAAQ;AAE1B,YAAI,CAAC,QAAQ,SAAS,gBAAgB;AAClC,gBAAM,cAAc,QAAQ,WACtB,kBAAkB,QAAQ,MAAM,IAChC,eAAe,QAAQ,QAAQ,cAAc;AACnD,cAAI,eAAe,YAAY,SAAS,GAAG;AACvC,oBAAQ;AACR,sBAAU,SAAS,MAAM,MAAM,eAAe;AAAA,UAClD;AAAA,QACJ;AAEA,YAAI,CAAC,OAAO;AACR,kBAAQ,OAAO,2BAA2B;AAC1C,kBAAQ,MAAM,gBAAmB,kBAAkB;AAAA,YAC/C,WAAW;AAAA,YACX,OAAO,QAAQ;AAAA,YACf,SAAS,QAAQ;AAAA,UACrB,CAAC;AAGD,cAAI,gBAAgB;AAChB,gBAAI;AACA,6BAAe,OAAO,QAAQ,QAAQ,cAAc;AAAA,YACxD,QAAQ;AAAA,YAER;AAAA,UACJ;AAAA,QACJ;AAEA,gBAAQ,QAAQ,aAAa,MAAM,MAAM,cAAc;AACvD,gBAAQ,MAAM,gCAAgC;AAAA,MAClD,OAAO;AAEH,gBAAQ,cAAc,QAAQ,KAAK;AACnC,kBAAU,UAAU,MAAM,MAAM,eAAe,QAAQ,KAAK,EAAE;AAC9D,gBAAQ,OAAO,gCAAgC;AAAA,MACnD;AAGA,YAAMC,SAAQ,MAAM,sBAAsB;AAAA,QACtC,UAAU;AAAA,QACV;AAAA,QACA,OAAO,QAAQ;AAAA,QACf,cAAc,QAAQ,UAAU,QAAQ,UAAU,MAAO;AAAA,QACzD,cAAc,QAAQ,UAAU,QAAQ,UAAU,MAAO,MAAM;AAAA;AAAA,QAC/D,aAAa;AAAA,QACb,WAAW;AAAA,QACX,mBAAmB;AAAA,QACnB,OAAO,QAAQ;AAAA,QACf,WAAW,QAAQ;AAAA,QACnB,SAAS,kBAAkB;AAAA,QAC3B,UAAU,QAAQ;AAAA,MACtB,CAAC;AAED,eAAS;AAAA,QACL,OAAAA;AAAA,QACA,UAAU;AAAA;AAAA,MACd;AAAA,IACJ,OAAO;AAEH,eAAS,MAAM,oBAAoB;AAAA,QAC/B,UAAU;AAAA,QACV,OAAO,QAAQ;AAAA,QACf,SAAS,QAAQ,UAAU,QAAQ,UAAU,MAAO;AAAA,QACpD,OAAO,QAAQ;AAAA,QACf,WAAW,QAAQ;AAAA,QACnB,SAAS,kBAAkB;AAAA,QAC3B,UAAU,QAAQ;AAAA,MACtB,CAAC;AAAA,IACL;AAEA,YAAQ,QAAQ,oBAAoB;AAGpC,UAAM,EAAE,OAAO,SAAS,IAAI;AAC5B,YAAQ,OAAO,MAAM,IAAI;AACzB,gBAAY,mBAAmB;AAC/B,kBAAc,WAAW,MAAM,QAAQ,IAAI;AAC3C,kBAAc,YAAY,MAAM,QAAQ,QAAQ;AAChD,kBAAc,gBAAgB,MAAM,QAAQ,WAAW;AACvD,kBAAc,WAAW,OAAO,MAAM,QAAQ,MAAM,CAAC;AACrD,kBAAc,cAAc,OAAO,MAAM,WAAW,MAAM,CAAC;AAC3D,kBAAc,YAAY,eAAe,QAAQ,CAAC;AAElD,QAAI,QAAQ,SAAS;AACjB,cAAQ,OAAO,MAAM,IAAI;AACzB,gBAAU,UAAU;AACpB,iBAAW,OAAO,MAAM,SAAS;AAC7B,gBAAQ,OAAO;AAAA,UACX,KAAK,KAAK,IAAI,EAAE,CAAC,IAAI,KAAK,QAAG,CAAC,IAAI,IAAI,OAAO,IAAI,KAAK,IAAI,IAAI,UAAU,GAAG,CAAC;AAAA;AAAA,QAChF;AAAA,MACJ;AAAA,IACJ;AAGA,QAAI;AACA,YAAM,UAAU,kBAAkB,OAAO,QAAQ,QAAQ,QAAQ,KAAK;AACtE,UAAI,QAAQ,SAAS;AACjB,kBAAU,oCAAoC;AAAA,MAClD;AAAA,IACJ,QAAQ;AACJ,UAAI,QAAQ,SAAS;AACjB,qBAAa,0CAA0C;AAAA,MAC3D;AAAA,IACJ;AAGA,UAAM,aAAa,KAAK,UAAU,OAAO,MAAM,CAAC;AAChD,UAAM,YAAiB,eAAQ,QAAQ,MAAM;AAC7C,UAAM,aAAkB,YAAK,WAAW,mBAAmB;AAE3D,QAAI;AACA,MAAG,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAC3C,MAAG,kBAAc,YAAY,YAAY,OAAO;AAChD,cAAQ,OAAO,MAAM,IAAI;AACzB,mBAAa,2BAA2B,KAAK,UAAU,CAAC,EAAE;AAAA,IAC9D,SAAS,YAAY;AACjB,mBAAa,4BAA4B,gBAAgB,UAAU,CAAC,EAAE;AACtE,gBAAU,8BAA8B;AAAA,IAC5C;AAGA,YAAQ,OAAO,MAAM,aAAa,IAAI;AAEtC,WAAO,WAAW;AAAA,EAEtB,SAAS,OAAO;AACZ,YAAQ,KAAK,kBAAkB;AAE/B,QAAI,iBAAiB,gBAAgB;AACjC,cAAQ,MAAM,MAAM;AAAA,QAChB,KAAK;AACD,qBAAW,MAAM,OAAO;AACxB,oBAAU,qBAAqB;AAC/B,oBAAU,uCAAuC;AACjD,oBAAU,uCAAuC;AACjD,oBAAU,oCAAoC;AAC9C,iBAAO,WAAW;AAAA,QAEtB,KAAK;AACD,qBAAW,MAAM,OAAO;AACxB,iBAAO,WAAW;AAAA,QAEtB;AACI,qBAAW,MAAM,OAAO;AACxB,iBAAO,WAAW;AAAA,MAC1B;AAAA,IACJ;AAEA,eAAW,gBAAgB,KAAK,CAAC;AACjC,QAAI,QAAQ,WAAW,iBAAiB,SAAS,MAAM,OAAO;AAC1D,cAAQ,OAAO,MAAM,GAAG,KAAK,MAAM,KAAK,CAAC;AAAA,CAAI;AAAA,IACjD;AACA,WAAO,WAAW;AAAA,EACtB;AACJ;AASA,SAAS,eAAe,IAAoB;AACxC,MAAI,KAAK,KAAM;AACX,WAAO,GAAG,EAAE;AAAA,EAChB;AACA,QAAM,UAAU,KAAK,MAAM,KAAK,GAAI;AACpC,MAAI,UAAU,IAAI;AACd,WAAO,GAAG,OAAO;AAAA,EACrB;AACA,QAAM,UAAU,KAAK,MAAM,UAAU,EAAE;AACvC,QAAM,mBAAmB,UAAU;AACnC,SAAO,GAAG,OAAO,KAAK,gBAAgB;AAC1C;AA9SA,IASAC,QACAC;AAVA;AAAA;AAAA;AASA,IAAAD,SAAsB;AACtB,IAAAC,MAAoB;AAEpB;AACA;AAUA;AACA;AAaA;AACA;AAAA;AAAA;;;ACsCA,eAAsB,sBAAqD;AACvE,MAAI;AACA,UAAM,cAAU,4CAAqB;AACrC,UAAM,SAAS,MAAM,QAAQ,YAAY;AACzC,WAAO;AAAA,MACH,WAAW,OAAO;AAAA,MAClB,QAAQ,OAAO;AAAA,IACnB;AAAA,EACJ,SAAS,OAAO;AACZ,WAAO;AAAA,MACH,WAAW;AAAA,MACX,QAAQ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IACjE;AAAA,EACJ;AACJ;AAaO,SAAS,sBAAsB,SAA4C;AAC9E,QAAM,cAAU,4CAAqB;AAErC,SAAO,OAAO,QAAgB,mBAAgE;AAC1F,QAAI;AACA,YAAM,QAAQ,gBAAgB,SAAS,QAAQ;AAC/C,YAAM,YAAY,gBAAgB,aAAa,QAAQ,aAAa;AAEpE,YAAM,cAAkC;AAAA,QACpC;AAAA,QACA;AAAA,QACA,kBAAkB,QAAQ;AAAA,QAC1B;AAAA,QACA,SAAS;AAAA;AAAA,QACT,gBAAgB;AAAA,QAChB,qBAAqB,CAAC,QAClB,IAAI,SAAS,SAAS,EAAE,MAAM,WAAW,IAAI,EAAE,MAAM,kBAAkB;AAAA,QAC3E,sBAAsB;AAAA;AAAA,MAC1B;AAEA,YAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,aAAO;AAAA,QACH,SAAS,OAAO;AAAA,QAChB,UAAU,OAAO,YAAY;AAAA,QAC7B,OAAO,OAAO;AAAA,QACd,YAAY,OAAO;AAAA,MACvB;AAAA,IACJ,SAAS,OAAO;AACZ,aAAO;AAAA,QACH,SAAS;AAAA,QACT,UAAU;AAAA,QACV,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAChE;AAAA,IACJ;AAAA,EACJ;AACJ;AAaO,SAAS,qBAAqB,SAA2C;AAC5E,QAAM,cAAU,4CAAqB;AAErC,SAAO,OAAO,QAAgB,mBAAgE;AAC1F,QAAI;AACA,YAAM,QAAQ,gBAAgB,SAAS,QAAQ;AAC/C,YAAM,YAAY,gBAAgB,aAAa,QAAQ,aAAa;AAEpE,YAAM,cAAkC;AAAA,QACpC;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA;AAAA,QACT,sBAAsB;AAAA;AAAA,MAC1B;AAEA,YAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,aAAO;AAAA,QACH,SAAS,OAAO;AAAA,QAChB,UAAU,OAAO,YAAY;AAAA,QAC7B,OAAO,OAAO;AAAA,QACd,YAAY,OAAO;AAAA,MACvB;AAAA,IACJ,SAAS,OAAO;AACZ,aAAO;AAAA,QACH,SAAS;AAAA,QACT,UAAU;AAAA,QACV,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAChE;AAAA,IACJ;AAAA,EACJ;AACJ;AA4BO,SAAS,2BAA2B,SAAiD;AACxF,QAAM,cAAU,4CAAqB;AAErC,SAAO,OAAO,QAAgB,mBAAgE;AAC1F,QAAI;AACA,YAAM,QAAQ,gBAAgB,SAAS,QAAQ;AAC/C,YAAM,YAAY,gBAAgB,aAAa,QAAQ,aAAa;AAEpE,YAAM,cAAkC;AAAA,QACpC;AAAA,QACA;AAAA,QACA;AAAA,QACA,kBAAkB,QAAQ;AAAA,QAC1B,SAAS;AAAA,QACT,sBAAsB;AAAA,MAC1B;AAEA,YAAM,SAAS,MAAM,QAAQ,YAAY,WAAW;AAEpD,aAAO;AAAA,QACH,SAAS,OAAO;AAAA,QAChB,UAAU,OAAO,YAAY;AAAA,QAC7B,OAAO,OAAO;AAAA,QACd,YAAY,OAAO;AAAA,MACvB;AAAA,IACJ,SAAS,OAAO;AACZ,aAAO;AAAA,QACH,SAAS;AAAA,QACT,UAAU;AAAA,QACV,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAChE;AAAA,IACJ;AAAA,EACJ;AACJ;AArPA,IAUAC,uBAmDM,6BAGA,4BAGA,gBAyIA;AA5MN;AAAA;AAAA;AAUA,IAAAA,wBAEO;AAiDP,IAAM,8BAA8B;AAGpC,IAAM,6BAA6B;AAGnC,IAAM,iBAAiB,CAAC,QAAQ,QAAQ,MAAM;AAyI9C,IAAM,mCAAmC;AAAA;AAAA;;;AC5MzC,IAqDa;AArDb;AAAA;AAAA;AAqDO,IAAM,eAAN,MAAmB;AAAA,MAAnB;AACH,aAAQ,SAAwC,oBAAI,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA,MAKxD,SAAS,OAAqB,OAA0B;AACpD,cAAM,UAAU,KAAK,iBAAiB,KAAK;AAC3C,gBAAQ,SAAS;AACjB,YAAI,OAAO;AACP,kBAAQ,eAAe,MAAM;AAC7B,kBAAQ,gBAAgB,MAAM;AAC9B,kBAAQ,mBAAmB,MAAM;AACjC,kBAAQ,oBAAoB,MAAM;AAClC,kBAAQ,eAAe,MAAM;AAC7B,cAAI,MAAM,QAAQ,MAAM;AACpB,oBAAQ,QAAQ,QAAQ,QAAQ,KAAK,MAAM;AAAA,UAC/C;AAAA,QACJ;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,WAAW,OAA2B;AAClC,cAAM,UAAU,KAAK,iBAAiB,KAAK;AAC3C,gBAAQ,SAAS;AAAA,MACrB;AAAA;AAAA;AAAA;AAAA,MAKA,cAAc,OAAiC;AAC3C,eAAO,KAAK,iBAAiB,KAAK;AAAA,MACtC;AAAA;AAAA;AAAA;AAAA,MAKA,WAAiC;AAC7B,YAAI,cAAc;AAClB,YAAI,eAAe;AACnB,YAAI,kBAAkB;AACtB,YAAI,mBAAmB;AACvB,YAAI,cAAc;AAClB,YAAI,OAAsB;AAC1B,YAAI,QAAQ;AAEZ,mBAAW,SAAS,KAAK,OAAO,OAAO,GAAG;AACtC,yBAAe,MAAM;AACrB,0BAAgB,MAAM;AACtB,6BAAmB,MAAM;AACzB,8BAAoB,MAAM;AAC1B,yBAAe,MAAM;AACrB,cAAI,MAAM,QAAQ,MAAM;AACpB,oBAAQ,QAAQ,KAAK,MAAM;AAAA,UAC/B;AACA,mBAAS,MAAM;AAAA,QACnB;AAEA,eAAO,EAAE,aAAa,cAAc,iBAAiB,kBAAkB,aAAa,MAAM,MAAM;AAAA,MACpG;AAAA;AAAA;AAAA;AAAA,MAKA,WAAoB;AAChB,cAAM,QAAQ,KAAK,SAAS;AAC5B,eAAO,MAAM,QAAQ,KAAK,MAAM,cAAc;AAAA,MAClD;AAAA;AAAA;AAAA;AAAA,MAKA,SAAS,OAA6B;AAClC,cAAM,YAA4B,CAAC,aAAa,iBAAiB,YAAY,SAAS;AACtF,cAAM,SAAS,CAAC;AAChB,mBAAW,SAAS,WAAW;AAC3B,iBAAO,KAAK,IAAI,KAAK,iBAAiB,KAAK;AAAA,QAC/C;AAEA,eAAO;AAAA,UACH,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC;AAAA,UACA;AAAA,UACA,OAAO,KAAK,SAAS;AAAA,QACzB;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,OAAO,aAAa,GAAmB;AACnC,eAAO,EAAE,eAAe,OAAO;AAAA,MACnC;AAAA;AAAA;AAAA;AAAA,MAKA,OAAO,WAAW,MAA6B;AAC3C,YAAI,QAAQ,MAAM;AAAE,iBAAO;AAAA,QAAO;AAClC,eAAO,IAAI,KAAK,QAAQ,CAAC,CAAC;AAAA,MAC9B;AAAA;AAAA;AAAA;AAAA,MAMQ,iBAAiB,OAAiC;AACtD,YAAI,WAAW,KAAK,OAAO,IAAI,KAAK;AACpC,YAAI,CAAC,UAAU;AACX,qBAAW;AAAA,YACP,aAAa;AAAA,YACb,cAAc;AAAA,YACd,iBAAiB;AAAA,YACjB,kBAAkB;AAAA,YAClB,aAAa;AAAA,YACb,MAAM;AAAA,YACN,OAAO;AAAA,YACP,QAAQ;AAAA,UACZ;AACA,eAAK,OAAO,IAAI,OAAO,QAAQ;AAAA,QACnC;AACA,eAAO;AAAA,MACX;AAAA,IACJ;AAAA;AAAA;;;ACjIA,eAAsB,UAClB,UACA,SACA,aACqB;AACrB,QAAM,YAAY,KAAK,IAAI;AAE3B,UAAQ,OAAO,MAAM,IAAI;AACzB,cAAY,oBAAoB;AAGhC,MAAI,iBAAgC;AACpC,MAAI;AACA,qBAAiB,MAAM,kBAAkB,QAAQ;AAAA,EACrD,QAAQ;AAAA,EAER;AAGA,MAAI,QAAQ,OAAO;AACf,wBAAoB,QAAQ,MAAM;AAAA,EACtC;AAGA,MAAI,CAAC,QAAQ,OAAO;AAChB,QAAI;AACA,YAAM,SAAS,QAAQ,WACjB,kBAAkB,QAAQ,MAAM,IAChC,MAAM,eAAe,UAAU,QAAQ,MAAM;AACnD,UAAI,QAAQ;AACR,cAAM,WAAW,KAAK,IAAI,IAAI;AAC9B,qBAAa,8BAA8B,OAAO,MAAM,QAAQ,MAAM,WAAW;AACjF,eAAO,EAAE,OAAO,OAAO,OAAO,SAAS;AAAA,MAC3C;AAAA,IACJ,QAAQ;AAAA,IAER;AAAA,EACJ;AAEA,QAAM,UAAU,IAAI,QAAQ;AAC5B,UAAQ,MAAM,6BAA6B;AAE3C,MAAI;AACA,QAAI;AAGJ,UAAM,iBAAiB,kBAAkB,SAAS,WAAW;AAC7D,UAAM,mBAAmB,oBAAoB,SAAS,WAAW;AACjE,UAAM,uBAAuB,wBAAwB,SAAS,WAAW;AAGzE,QAAI,QAAQ,OAAO;AAEf,UAAI;AACJ,UAAI,QAAQ,UAAU,QAAQ;AAE1B,YAAI,CAAC,QAAQ,SAAS,gBAAgB;AAClC,gBAAM,cAAc,QAAQ,WACtB,kBAAkB,QAAQ,MAAM,IAChC,eAAe,QAAQ,QAAQ,cAAc;AACnD,cAAI,eAAe,YAAY,SAAS,GAAG;AACvC,oBAAQ;AACR,sBAAU,SAAS,MAAM,MAAM,eAAe;AAAA,UAClD;AAAA,QACJ;AAEA,YAAI,CAAC,OAAO;AACR,kBAAQ,OAAO,2BAA2B;AAC1C,kBAAQ,MAAM,gBAAmB,UAAU;AAAA,YACvC,WAAW;AAAA,YACX,OAAO;AAAA,YACP,SAAS,QAAQ;AAAA,UACrB,CAAC;AAGD,cAAI,gBAAgB;AAChB,gBAAI;AACA,6BAAe,OAAO,QAAQ,QAAQ,cAAc;AAAA,YACxD,QAAQ;AAAA,YAER;AAAA,UACJ;AAAA,QACJ;AAEA,gBAAQ,QAAQ,aAAa,MAAM,MAAM,cAAc;AACvD,gBAAQ,MAAM,gCAAgC;AAAA,MAClD,OAAO;AAEH,gBAAQ,cAAc,QAAQ,KAAK;AACnC,kBAAU,UAAU,MAAM,MAAM,eAAe,QAAQ,KAAK,EAAE;AAC9D,gBAAQ,OAAO,gCAAgC;AAAA,MACnD;AAGA,YAAM,QAAQ,MAAM,sBAAsB;AAAA,QACtC;AAAA,QACA;AAAA,QACA,OAAO;AAAA,QACP,cAAc,mBAAmB,mBAAmB,MAAO;AAAA,QAC3D,cAAc,mBAAmB,mBAAmB,MAAO,MAAM;AAAA;AAAA,QACjE,aAAa,wBAAwB;AAAA,QACrC,WAAW;AAAA,QACX,mBAAmB;AAAA,QACnB,OAAO,QAAQ;AAAA,QACf,WAAW,QAAQ;AAAA,QACnB,SAAS,kBAAkB;AAAA,QAC3B,UAAU,QAAQ;AAAA,MACtB,CAAC;AAED,eAAS;AAAA,QACL;AAAA,QACA,UAAU;AAAA;AAAA,MACd;AAAA,IACJ,OAAO;AAEH,eAAS,MAAM,oBAAoB;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,QACP,SAAS,mBAAmB,mBAAmB,MAAO;AAAA,QACtD,OAAO,QAAQ;AAAA,QACf,WAAW,QAAQ;AAAA,QACnB,SAAS,kBAAkB;AAAA,QAC3B,UAAU,QAAQ;AAAA,MACtB,CAAC;AAAA,IACL;AAEA,YAAQ,QAAQ,6BAAwB,OAAO,MAAM,QAAQ,MAAM,gBAAgB;AAGnF,QAAI;AACA,YAAM,UAAU,UAAU,OAAO,OAAO,QAAQ,QAAQ,QAAQ,KAAK;AAAA,IACzE,QAAQ;AACJ,UAAI,QAAQ,SAAS;AACjB,qBAAa,0CAA0C;AAAA,MAC3D;AAAA,IACJ;AAGA,UAAM,YAAiB,eAAQ,QAAQ,MAAM;AAC7C,UAAM,aAAkB,YAAK,WAAW,mBAAmB;AAC3D,QAAI;AACA,MAAG,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAC3C,MAAG,kBAAc,YAAY,KAAK,UAAU,OAAO,OAAO,MAAM,CAAC,GAAG,OAAO;AAAA,IAC/E,QAAQ;AAAA,IAER;AAEA,WAAO,EAAE,OAAO,OAAO,OAAO,UAAU,KAAK,IAAI,IAAI,WAAW,YAAY,OAAO,WAAW;AAAA,EAClG,SAAS,OAAO;AACZ,YAAQ,KAAK,kBAAkB;AAC/B,eAAW,gBAAgB,KAAK,CAAC;AACjC,WAAO,EAAE,UAAU,KAAK,IAAI,IAAI,WAAW,UAAU,WAAW,gBAAgB;AAAA,EACpF;AACJ;AA1MA,IAMAC,QACAC;AAPA;AAAA;AAAA;AAMA,IAAAD,SAAsB;AACtB,IAAAC,MAAoB;AAGpB;AACA;AACA;AACA;AAUA;AAQA;AACA;AAAA;AAAA;;;ACtBO,SAAS,qBAAqB,SAA8D;AAC/F,MAAI,MAAM;AACV,aAAW,KAAK,SAAS;AACrB,UAAM,QAAQ,kBAAkB,EAAE,UAAU,KAAK;AACjD,QAAI,QAAQ,KAAK;AAAE,YAAM;AAAA,IAAO;AAAA,EACpC;AACA,SAAQ,iBAAiB,GAAG,KAAK;AACrC;AAjBA,IAIa,mBACA;AALb;AAAA;AAAA;AAIO,IAAM,oBAA4C,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,EAAE;AAC/E,IAAM,mBAA2C,EAAE,GAAG,QAAQ,GAAG,UAAU,GAAG,MAAM;AAAA;AAAA;;;AC2CpF,SAAS,uBAAuB,OAAiC;AACpE,QAAM,UAAU,MAAM;AAEtB,MAAI,QAAQ,WAAW,GAAG;AACtB,WAAO;AAAA,EACX;AAGA,QAAM,SAAS,wBAAwB,OAAO;AAG9C,QAAM,gBAA8B,CAAC;AACrC,QAAM,YAAY,oBAAI,IAAoB;AAE1C,aAAW,SAAS,QAAQ;AACxB,QAAI,MAAM,QAAQ,WAAW,GAAG;AAE5B,YAAM,MAAM,MAAM,QAAQ,CAAC;AAC3B,gBAAU,IAAI,IAAI,IAAI,IAAI,EAAE;AAC5B,oBAAc,KAAK,GAAG;AAAA,IAC1B,OAAO;AAEH,YAAM,SAAS,iBAAiB,KAAK;AACrC,iBAAW,OAAO,MAAM,SAAS;AAC7B,kBAAU,IAAI,IAAI,IAAI,OAAO,EAAE;AAAA,MACnC;AACA,oBAAc,KAAK,MAAM;AAAA,IAC7B;AAAA,EACJ;AAGA,QAAM,eAAe,wBAAwB,eAAe,SAAS;AAGrE,QAAM,aAAa,iBAAiB,YAAY;AAEhD,SAAO;AAAA,IACH,GAAG;AAAA,IACH,SAAS;AAAA,IACT;AAAA,EACJ;AACJ;AAUO,SAAS,mBAAmB,YAA4B;AAE3D,QAAM,aAAa,WAAW,QAAQ,OAAO,GAAG;AAGhD,QAAM,UAAU,WAAW,QAAQ,OAAO,EAAE;AAG5C,QAAM,cAAc,QAAQ,MAAM,GAAG,EAAE,IAAI,KAAK;AAChD,MAAI,YAAY,SAAS,GAAG,GAAG;AAC3B,WAAY,aAAM,QAAQ,OAAO;AAAA,EACrC;AAGA,SAAO;AACX;AAKA,SAAS,wBAAwB,SAAyC;AACtE,QAAM,SAAS,oBAAI,IAA0B;AAE7C,aAAW,OAAO,SAAS;AACvB,UAAM,MAAM,mBAAmB,IAAI,IAAI;AACvC,QAAI,CAAC,OAAO,IAAI,GAAG,GAAG;AAClB,aAAO,IAAI,KAAK,CAAC,CAAC;AAAA,IACtB;AACA,WAAO,IAAI,GAAG,EAAG,KAAK,GAAG;AAAA,EAC7B;AAEA,SAAO,MAAM,KAAK,OAAO,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,SAAS,IAAI,OAAO;AAAA,IAC1D;AAAA,IACA,SAAS;AAAA,EACb,EAAE;AACN;AAKA,SAAS,iBAAiB,OAAmC;AACzD,QAAM,EAAE,SAAS,QAAQ,IAAI;AAG7B,QAAM,UAAU,QAAQ,MAAM,GAAG,EAAE,IAAI,KAAK;AAC5C,QAAM,KAAK,kBAAkB,OAAO;AACpC,QAAM,OAAO,QACR,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,OAAO,CAAC,EAAE,YAAY,IAAI,EAAE,MAAM,CAAC,CAAC,EAC/C,KAAK,GAAG;AAGb,QAAM,WAAW;AAAA,IACb,QAAQ,QAAQ,OAAK,EAAE,QAAQ;AAAA,EACnC;AAGA,QAAM,UAAU;AAAA,IACZ,QAAQ,QAAQ,OAAK,EAAE,YAAY;AAAA,EACvC;AAEA,QAAM,UAAU,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,EAAE,CAAC;AAC9C,QAAM,eAAe,QAAQ,OAAO,OAAK,CAAC,QAAQ,IAAI,CAAC,CAAC;AAGxD,QAAM,gBAAgB;AAAA,IAClB,QAAQ,QAAQ,OAAK,EAAE,UAAU;AAAA,EACrC;AACA,QAAM,aAAa,cAAc,OAAO,OAAK,CAAC,QAAQ,IAAI,CAAC,CAAC;AAG5D,QAAM,aAAa,sBAAsB,OAAO;AAGhD,QAAM,WAAW,uBAAuB,OAAO;AAG/C,QAAM,UAAU,gBAAgB,OAAO;AAGvC,QAAM,aAAa,QAAQ,IAAI,OAAK,EAAE,EAAE;AAGxC,QAAM,QAAQ,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,IAAI,EAAE,OAAO,OAAO,CAAC;AAC9D,QAAM,OAAO,MAAM,SAAS,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,IAAI;AAEhD,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA,MAAM,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACJ;AAMA,SAAS,wBACL,SACA,WACY;AACZ,QAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,EAAE,CAAC;AAEhD,SAAO,QAAQ,IAAI,UAAQ;AAAA,IACvB,GAAG;AAAA,IACH,cAAc;AAAA,MACV,IAAI,aACC,IAAI,OAAK,UAAU,IAAI,CAAC,KAAK,CAAC,EAC9B,OAAO,OAAK,MAAM,IAAI,MAAM,UAAU,IAAI,CAAC,CAAC;AAAA,IACrD;AAAA,IACA,YAAY;AAAA,MACR,IAAI,WACC,IAAI,OAAK,UAAU,IAAI,CAAC,KAAK,CAAC,EAC9B,OAAO,OAAK,MAAM,IAAI,MAAM,UAAU,IAAI,CAAC,CAAC;AAAA,IACrD;AAAA,EACJ,EAAE;AACN;AAKA,SAAS,iBAAiB,SAAuC;AAC7D,QAAM,cAAc,oBAAI,IAAyB;AAEjD,aAAW,OAAO,SAAS;AACvB,QAAI,CAAC,YAAY,IAAI,IAAI,QAAQ,GAAG;AAChC,kBAAY,IAAI,IAAI,UAAU,oBAAI,IAAI,CAAC;AAAA,IAC3C;AACA,gBAAY,IAAI,IAAI,QAAQ,EAAG,IAAI,IAAI,EAAE;AAAA,EAC7C;AAEA,SAAO,MAAM,KAAK,YAAY,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,MAAM,SAAS,OAAO;AAAA,IACjE;AAAA,IACA,aAAa,YAAY,UAAU,IAAI;AAAA,EAC3C,EAAE;AACN;AAMA,SAAS,mBAAmB,KAAyB;AACjD,SAAO,CAAC,GAAG,IAAI,IAAI,GAAG,CAAC;AAC3B;AAEA,SAAS,sBAAsB,SAAkD;AAC7E,SAAO,qBAAqB,OAAO;AACvC;AAEA,SAAS,uBAAuB,SAA+B;AAC3D,QAAM,SAAS,oBAAI,IAAoB;AACvC,aAAW,KAAK,SAAS;AACrB,WAAO,IAAI,EAAE,WAAW,OAAO,IAAI,EAAE,QAAQ,KAAK,KAAK,CAAC;AAAA,EAC5D;AACA,MAAI,OAAO,QAAQ,CAAC,EAAE;AACtB,MAAI,YAAY;AAChB,aAAW,CAAC,KAAK,KAAK,KAAK,QAAQ;AAC/B,QAAI,QAAQ,WAAW;AACnB,aAAO;AACP,kBAAY;AAAA,IAChB;AAAA,EACJ;AACA,SAAO;AACX;AAEA,SAAS,gBAAgB,SAA+B;AACpD,MAAI,QAAQ,WAAW,GAAG;AACtB,WAAO,QAAQ,CAAC,EAAE;AAAA,EACtB;AAEA,QAAM,SAAS,mBAAmB,QAAQ,IAAI,OAAK,EAAE,OAAO,CAAC;AAC7D,MAAI,OAAO,WAAW,GAAG;AACrB,WAAO,OAAO,CAAC;AAAA,EACnB;AAEA,QAAM,QAAQ,OAAO,MAAM,GAAG,CAAC;AAC/B,QAAM,YAAY,OAAO,SAAS,MAAM;AACxC,QAAM,WAAW,MAAM,KAAK,IAAI;AAChC,SAAO,YAAY,IACb,GAAG,QAAQ,MAAM,SAAS,WAC1B;AACV;AAhSA,IAeAC;AAfA;AAAA;AAAA;AAeA,IAAAA,SAAsB;AAEtB;AACA;AAAA;AAAA;;;ACwCA,eAAsB,cAClB,OACA,SACoB;AACpB,QAAM,EAAE,WAAW,MAAM,IAAI;AAC7B,QAAM,cAAc,QAAQ,eAAe;AAC3C,QAAM,YAAY,QAAQ,aAAa;AAEvC,QAAM,UAAU,MAAM;AAGtB,MAAI,QAAQ,UAAU,aAAa;AAC/B,WAAO;AAAA,EACX;AAGA,QAAM,SAAS,sBAAsB,SAAS,MAAM,QAAQ,MAAM,WAAW;AAG7E,QAAM,SAAS,MAAM,UAAU,QAAQ,EAAE,OAAO,UAAU,CAAC;AAE3D,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,UAAU;AAErC,WAAO;AAAA,EACX;AAGA,QAAM,WAAW,qBAAqB,OAAO,UAAU,OAAO;AAE9D,MAAI,SAAS,WAAW,GAAG;AAEvB,WAAO;AAAA,EACX;AAGA,SAAO,kBAAkB,OAAO,QAAQ;AAC5C;AAUO,SAAS,sBACZ,SACA,aACA,aACM;AAEN,QAAM,aAAa,QACd,IAAI,OAAK,KAAK,EAAE,EAAE,KAAK,EAAE,IAAI,WAAM,EAAE,OAAO,EAAE,EAC9C,KAAK,IAAI;AAEd,SAAO,sCAAsC,WAAW,eAAe,QAAQ,MAAM;AAAA,4DAC7B,WAAW;AAAA;AAAA;AAAA;AAAA,EAIrE,UAAU;AAAA;AAAA;AAAA;AAAA,yCAI6B,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6BpD;AAUO,SAAS,qBACZ,UACA,SACc;AACd,QAAM,WAAO,oCAAY,QAAQ;AACjC,MAAI,CAAC,MAAM;AACP,WAAO,CAAC;AAAA,EACZ;AAEA,MAAI;AACJ,MAAI;AACA,aAAS,OAAO,SAAS,WAAW,KAAK,MAAM,IAAI,IAAI;AAAA,EAC3D,QAAQ;AACJ,WAAO,CAAC;AAAA,EACZ;AAEA,QAAM,OAAO;AACb,QAAM,cAAc,MAAM;AAC1B,MAAI,CAAC,MAAM,QAAQ,WAAW,GAAG;AAC7B,WAAO,CAAC;AAAA,EACZ;AAEA,QAAM,iBAAiB,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,EAAE,CAAC;AACrD,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,WAA2B,CAAC;AAElC,aAAW,OAAO,aAAa;AAC3B,QAAI,CAAC,OAAO,OAAO,QAAQ,UAAU;AAAE;AAAA,IAAU;AACjD,UAAM,IAAI;AAEV,UAAM,KAAK,OAAO,EAAE,OAAO,WAAW,kBAAkB,EAAE,EAAE,IAAI;AAChE,UAAM,OAAO,OAAO,EAAE,SAAS,WAAW,EAAE,OAAO;AACnD,UAAM,UAAU,OAAO,EAAE,YAAY,WAAW,EAAE,UAAU;AAC5D,UAAM,YAAY,MAAM,QAAQ,EAAE,SAAS,IACpC,EAAE,UACA,OAAO,CAAC,QAAuB,OAAO,QAAQ,YAAY,eAAe,IAAI,GAAG,CAAC,EACjF,OAAO,SAAO,CAAC,YAAY,IAAI,GAAG,CAAC,IACtC,CAAC;AAEP,QAAI,MAAM,UAAU,SAAS,GAAG;AAC5B,iBAAW,OAAO,WAAW;AACzB,oBAAY,IAAI,GAAG;AAAA,MACvB;AACA,eAAS,KAAK,EAAE,IAAI,MAAM,QAAQ,IAAI,WAAW,QAAQ,CAAC;AAAA,IAC9D;AAAA,EACJ;AAGA,aAAW,OAAO,SAAS;AACvB,QAAI,CAAC,YAAY,IAAI,IAAI,EAAE,GAAG;AAC1B,eAAS,KAAK;AAAA,QACV,IAAI,IAAI;AAAA,QACR,MAAM,IAAI;AAAA,QACV,WAAW,CAAC,IAAI,EAAE;AAAA,QAClB,SAAS,IAAI;AAAA,MACjB,CAAC;AAAA,IACL;AAAA,EACJ;AAEA,SAAO;AACX;AASO,SAAS,kBACZ,OACA,UACW;AACX,QAAM,YAAY,IAAI,IAAI,MAAM,QAAQ,IAAI,OAAK,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AAC3D,QAAM,YAAY,oBAAI,IAAoB;AAC1C,QAAM,gBAA8B,CAAC;AAErC,aAAW,WAAW,UAAU;AAC5B,UAAM,UAAU,QAAQ,UACnB,IAAI,QAAM,UAAU,IAAI,EAAE,CAAC,EAC3B,OAAO,CAAC,MAAuB,MAAM,MAAS;AAEnD,QAAI,QAAQ,WAAW,GAAG;AAAE;AAAA,IAAU;AAEtC,QAAI,QAAQ,WAAW,GAAG;AAEtB,YAAM,MAAM,QAAQ,CAAC;AACrB,gBAAU,IAAI,IAAI,IAAI,IAAI,EAAE;AAC5B,oBAAc,KAAK,GAAG;AAAA,IAC1B,OAAO;AAEH,YAAM,SAAS,oBAAoB,SAAS,OAAO;AACnD,iBAAW,OAAO,SAAS;AACvB,kBAAU,IAAI,IAAI,IAAI,QAAQ,EAAE;AAAA,MACpC;AACA,oBAAc,KAAK,MAAM;AAAA,IAC7B;AAAA,EACJ;AAGA,QAAM,YAAY,IAAI,IAAI,cAAc,IAAI,OAAK,EAAE,EAAE,CAAC;AACtD,QAAM,eAAe,cAAc,IAAI,UAAQ;AAAA,IAC3C,GAAG;AAAA,IACH,cAAc;AAAA,MACV,IAAI,aACC,IAAI,OAAK,UAAU,IAAI,CAAC,KAAK,CAAC,EAC9B,OAAO,OAAK,MAAM,IAAI,MAAM,UAAU,IAAI,CAAC,CAAC;AAAA,IACrD;AAAA,IACA,YAAY;AAAA,MACR,IAAI,WACC,IAAI,OAAK,UAAU,IAAI,CAAC,KAAK,CAAC,EAC9B,OAAO,OAAK,MAAM,IAAI,MAAM,UAAU,IAAI,CAAC,CAAC;AAAA,IACrD;AAAA,EACJ,EAAE;AAGF,QAAM,aAAa,sBAAsB,YAAY;AAErD,SAAO;AAAA,IACH,GAAG;AAAA,IACH,SAAS;AAAA,IACT;AAAA,EACJ;AACJ;AAMA,SAAS,oBAAoB,SAAuB,SAAmC;AACnF,QAAM,UAAU,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,EAAE,CAAC;AAE9C,QAAM,WAAW,MAAM,QAAQ,QAAQ,OAAK,EAAE,QAAQ,CAAC;AACvD,QAAM,eAAe;AAAA,IACjB,QAAQ,QAAQ,OAAK,EAAE,YAAY,EAAE,OAAO,OAAK,CAAC,QAAQ,IAAI,CAAC,CAAC;AAAA,EACpE;AACA,QAAM,aAAa;AAAA,IACf,QAAQ,QAAQ,OAAK,EAAE,UAAU,EAAE,OAAO,OAAK,CAAC,QAAQ,IAAI,CAAC,CAAC;AAAA,EAClE;AAGA,QAAM,aAAa,qBAAqB,OAAO;AAG/C,QAAM,YAAY,oBAAI,IAAoB;AAC1C,aAAW,KAAK,SAAS;AACrB,cAAU,IAAI,EAAE,WAAW,UAAU,IAAI,EAAE,QAAQ,KAAK,KAAK,CAAC;AAAA,EAClE;AACA,MAAI,WAAW,QAAQ,CAAC,EAAE;AAC1B,MAAI,YAAY;AAChB,aAAW,CAAC,KAAK,KAAK,KAAK,WAAW;AAClC,QAAI,QAAQ,WAAW;AAAE,iBAAW;AAAK,kBAAY;AAAA,IAAO;AAAA,EAChE;AAGA,QAAM,eAAe,QAChB,IAAI,OAAK,EAAE,IAAI,EACf,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,CAAC;AAG1C,QAAM,aAAa;AAAA,IACf,QAAQ,QAAQ,OAAK,EAAE,cAAc,CAAC,EAAE,EAAE,CAAC;AAAA,EAC/C;AAGA,QAAM,QAAQ,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,IAAI,EAAE,OAAO,OAAO,CAAC;AAC9D,QAAM,OAAO,MAAM,SAAS,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,IAAI;AAEhD,SAAO;AAAA,IACH,IAAI,QAAQ;AAAA,IACZ,MAAM,QAAQ;AAAA,IACd,MAAM;AAAA,IACN,SAAS,QAAQ,WAAW,QAAQ,IAAI,OAAK,EAAE,OAAO,EAAE,MAAM,GAAG,CAAC,EAAE,KAAK,IAAI;AAAA,IAC7E;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACJ;AAEA,SAAS,MAAM,KAAyB;AACpC,SAAO,CAAC,GAAG,IAAI,IAAI,GAAG,CAAC;AAC3B;AAEA,SAAS,sBAAsB,SAAuC;AAClE,QAAM,cAAc,oBAAI,IAAoB;AAC5C,aAAW,OAAO,SAAS;AACvB,gBAAY,IAAI,IAAI,WAAW,YAAY,IAAI,IAAI,QAAQ,KAAK,KAAK,CAAC;AAAA,EAC1E;AACA,SAAO,MAAM,KAAK,YAAY,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,MAAM,KAAK,OAAO;AAAA,IAC7D;AAAA,IACA,aAAa,YAAY,KAAK;AAAA,EAClC,EAAE;AACN;AAtWA,IAcAC,wBAWM,sBAGA;AA5BN;AAAA;AAAA;AAcA,IAAAA,yBAA4B;AAG5B;AACA;AAOA,IAAM,uBAAuB;AAG7B,IAAM,gCAAgC;AAAA;AAAA;;;ACWtC,eAAsB,mBAClB,OACA,WACA,UAAgC,CAAC,GACL;AAC5B,QAAM,YAAY,KAAK,IAAI;AAC3B,QAAM,gBAAgB,MAAM,QAAQ;AACpC,QAAM,cAAc,QAAQ,qBAAqB;AAGjD,QAAM,iBAAiB,uBAAuB,KAAK;AACnD,QAAM,sBAAsB,eAAe,QAAQ;AAGnD,MAAI,aAAa;AAEjB,MAAI,CAAC,QAAQ,UAAU,aAAa,sBAAsB,aAAa;AACnE,QAAI;AACA,mBAAa,MAAM,cAAc,gBAAgB;AAAA,QAC7C;AAAA,QACA;AAAA,QACA,OAAO,QAAQ;AAAA,QACf,WAAW,QAAQ;AAAA,MACvB,CAAC;AAAA,IACL,QAAQ;AAEJ,mBAAa;AAAA,IACjB;AAAA,EACJ;AAEA,SAAO;AAAA,IACH,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA,YAAY,WAAW,QAAQ;AAAA,IAC/B,UAAU,KAAK,IAAI,IAAI;AAAA,EAC3B;AACJ;AA5EA,IAsBM;AAtBN;AAAA;AAAA;AAcA;AACA;AAOA,IAAM,8BAA8B;AAAA;AAAA;;;ACtBpC;AAAA;AAAA;AAWA;AACA;AACA;AAAA;AAAA;;;AC4BA,eAAsB,uBAClB,UACA,OACA,SACA,cACkC;AAClC,QAAM,YAAY,KAAK,IAAI;AAE3B,UAAQ,OAAO,MAAM,IAAI;AACzB,cAAY,wBAAwB;AACpC,YAAU,UAAU,MAAM,QAAQ,MAAM,UAAU;AAElD,QAAM,YAAiB,eAAQ,QAAQ,MAAM;AAC7C,QAAM,mBAAmB,MAAM,QAAQ;AAGvC,MAAI,CAAC,QAAQ,OAAO;AAChB,UAAM,SAAS,QAAQ,WACjB,0BAA0B,WAAW,gBAAgB,IACrD,MAAM,uBAAuB,UAAU,WAAW,gBAAgB;AAExE,QAAI,QAAQ;AACR;AAAA,QACI,+BAA+B,gBAAgB,WAAM,OAAO,MAAM,QAAQ,MAAM;AAAA,MACpF;AACA,oBAAc,WAAW,eAAe;AACxC,aAAO,EAAE,OAAO,OAAO,OAAO,UAAU,KAAK,IAAI,IAAI,UAAU;AAAA,IACnE;AAAA,EACJ;AAEA,QAAM,UAAU,IAAI,QAAQ;AAC5B,UAAQ,MAAM,0BAA0B;AAExC,MAAI;AAEA,UAAM,qBAAqB,kBAAkB,SAAS,eAAe;AACrE,UAAM,uBAAuB,oBAAoB,SAAS,eAAe;AACzE,UAAM,sBAAsB,QAAQ,QAAQ,eAAe;AAG3D,IAAG,eAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAC3C,UAAM,cAAc,2BAA2B;AAAA,MAC3C,kBAAkB;AAAA,MAClB,OAAO;AAAA,MACP,WAAW,uBAAuB,uBAAuB,MAAO;AAAA,IACpE,CAAC;AAGD,UAAM,YAAuB,OAAO,QAAQ,SAAS;AACjD,YAAMC,UAAS,MAAM,YAAY,QAAQ,IAAI;AAC7C,oBAAc,SAAS,iBAAiBA,QAAO,UAAU;AACzD,aAAOA;AAAA,IACX;AAEA,UAAM,SAAS,MAAM,mBAAmB,OAAO,WAAW;AAAA,MACtD,OAAO;AAAA,MACP,WAAW,uBAAuB,uBAAuB,MAAO;AAAA,MAChE,QAAQ;AAAA,IACZ,CAAC;AAED,YAAQ;AAAA,MACJ,2BAA2B,OAAO,aAAa,WAAM,OAAO,mBAAmB,wBAAmB,OAAO,UAAU;AAAA,IACvH;AAGA,UAAM,kBAAkB,UAAU,OAAO,OAAO,WAAW,gBAAgB;AAE3E,WAAO,EAAE,OAAO,OAAO,OAAO,UAAU,KAAK,IAAI,IAAI,UAAU;AAAA,EACnE,SAAS,OAAO;AACZ,YAAQ,KAAK,oDAA+C;AAC5D,QAAI,QAAQ,SAAS;AACjB,mBAAa,gBAAgB,KAAK,CAAC;AAAA,IACvC;AACA,WAAO,EAAE,OAAO,UAAU,KAAK,IAAI,IAAI,UAAU;AAAA,EACrD;AACJ;AApHA,IAMAC,QACAC;AAPA;AAAA;AAAA;AAMA,IAAAD,SAAsB;AACtB,IAAAC,OAAoB;AAGpB;AACA;AACA;AAEA;AAKA;AAOA;AAAA;AAAA;;;ACiDO,SAAS,sBAAsB,OAA8C;AAChF,UAAQ,OAAO;AAAA,IACX,KAAK;AAAW,aAAO;AAAA,IACvB,KAAK;AAAQ,aAAO;AAAA,IACpB;AAAS,aAAO;AAAA,EACpB;AACJ;AAaO,SAAS,4BAA4B,OAA8C;AACtF,QAAM,QAAQ,sBAAsB,KAAK;AAEzC,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaT,KAAK;AAAA;AAAA;AAAA;AAAA,EAIL,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUxB;AAMO,SAAS,0BAAoC;AAChD,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACJ;AAjJA,IAoBM,6BAcA,4BAiBA;AAnDN,IAAAC,gBAAA;AAAA;AAAA;AAUA;AAUA,IAAM,8BAA8B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAcpC,IAAM,6BAA6B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBnC,IAAM,2BAA2B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACjB1B,SAASC,aAAY,UAAkC;AAC1D,MAAI,CAAC,YAAY,OAAO,aAAa,UAAU;AAC3C,WAAO;AAAA,EACX;AAEA,QAAM,UAAU,SAAS,KAAK;AAG9B,MAAI;AACA,WAAO,KAAK,MAAM,OAAO;AAAA,EAC7B,QAAQ;AAAA,EAER;AAIA,QAAM,iBAAiB,QAAQ,MAAM,4BAA4B;AACjE,MAAI,gBAAgB;AAChB,UAAM,UAAU,eAAe,CAAC,EAAE,KAAK;AACvC,QAAI,SAAS;AACT,UAAI;AACA,eAAO,KAAK,MAAM,OAAO;AAAA,MAC7B,QAAQ;AAAA,MAER;AAAA,IACJ;AAAA,EACJ;AAGA,QAAM,iBAAiB,QAAQ,MAAM,wBAAwB;AAC7D,MAAI,gBAAgB;AAChB,UAAM,UAAU,eAAe,CAAC,EAAE,KAAK;AACvC,QAAI,SAAS;AACT,UAAI;AACA,eAAO,KAAK,MAAM,OAAO;AAAA,MAC7B,QAAQ;AAAA,MAER;AAAA,IACJ;AAAA,EACJ;AAIA,QAAM,gBAAgB,CAAC,GAAG,QAAQ,SAAS,6BAA6B,CAAC;AACzE,WAAS,IAAI,cAAc,SAAS,GAAG,KAAK,GAAG,KAAK;AAChD,UAAM,UAAU,cAAc,CAAC,EAAE,CAAC,EAAE,KAAK;AACzC,QAAI,SAAS;AACT,UAAI;AACA,eAAO,KAAK,MAAM,OAAO;AAAA,MAC7B,QAAQ;AACJ;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ;AAGA,QAAM,aAAa,QAAQ,QAAQ,GAAG;AACtC,QAAM,YAAY,QAAQ,YAAY,GAAG;AACzC,MAAI,eAAe,MAAM,YAAY,YAAY;AAC7C,QAAI;AACA,aAAO,KAAK,MAAM,QAAQ,UAAU,YAAY,YAAY,CAAC,CAAC;AAAA,IAClE,QAAQ;AAAA,IAER;AAAA,EACJ;AAEA,SAAO;AACX;AASA,SAAS,aAAa,OAAgB,eAAuB,IAAY;AACrE,MAAI,OAAO,UAAU,UAAU;AAC3B,WAAO;AAAA,EACX;AACA,SAAO;AACX;AAKA,SAAS,YAAe,OAAqB;AACzC,MAAI,MAAM,QAAQ,KAAK,GAAG;AACtB,WAAO;AAAA,EACX;AACA,SAAO,CAAC;AACZ;AAKA,SAAS,oBAAoB,KAAiC;AAC1D,MAAI,CAAC,OAAO,OAAO,QAAQ,UAAU;AAAE,WAAO;AAAA,EAAM;AACpD,QAAM,MAAM;AACZ,MAAI,CAAC,IAAI,QAAQ,OAAO,IAAI,SAAS,UAAU;AAAE,WAAO;AAAA,EAAM;AAE9D,SAAO;AAAA,IACH,MAAM,IAAI;AAAA,IACV,aAAa,aAAa,IAAI,WAAW;AAAA,IACzC,SAAS,OAAO,IAAI,YAAY,WAAW,IAAI,UAAU;AAAA,EAC7D;AACJ;AAKA,SAAS,wBAAwB,KAAqC;AAClE,MAAI,CAAC,OAAO,OAAO,QAAQ,UAAU;AAAE,WAAO;AAAA,EAAM;AACpD,QAAM,MAAM;AACZ,MAAI,CAAC,IAAI,QAAQ,OAAO,IAAI,SAAS,UAAU;AAAE,WAAO;AAAA,EAAM;AAE9D,SAAO;AAAA,IACH,MAAM,IAAI;AAAA,IACV,WAAW,aAAa,IAAI,SAAS;AAAA,IACrC,aAAa,aAAa,IAAI,WAAW;AAAA,EAC7C;AACJ;AAKA,SAAS,qBAAqB,KAAkC;AAC5D,MAAI,CAAC,OAAO,OAAO,QAAQ,UAAU;AAAE,WAAO;AAAA,EAAM;AACpD,QAAM,MAAM;AACZ,MAAI,CAAC,IAAI,SAAS,OAAO,IAAI,UAAU,UAAU;AAAE,WAAO;AAAA,EAAM;AAEhE,QAAM,UAAuB;AAAA,IACzB,OAAO,IAAI;AAAA,IACX,MAAM,aAAa,IAAI,IAAI;AAAA,EAC/B;AAEA,MAAI,OAAO,IAAI,SAAS,YAAY,IAAI,KAAK,SAAS,GAAG;AACrD,YAAQ,OAAO,kBAAkB,IAAI,IAAI;AAAA,EAC7C;AAEA,MAAI,MAAM,QAAQ,IAAI,KAAK,KAAK,IAAI,MAAM,WAAW,GAAG;AACpD,UAAM,QAAQ,OAAO,IAAI,MAAM,CAAC,CAAC;AACjC,UAAM,MAAM,OAAO,IAAI,MAAM,CAAC,CAAC;AAC/B,QAAI,CAAC,MAAM,KAAK,KAAK,CAAC,MAAM,GAAG,KAAK,SAAS,KAAK,OAAO,OAAO;AAC5D,cAAQ,QAAQ,CAAC,OAAO,GAAG;AAAA,IAC/B;AAAA,EACJ;AAEA,SAAO;AACX;AAKA,SAAS,4BAA4B,KAAyC;AAC1E,MAAI,CAAC,OAAO,OAAO,QAAQ,UAAU;AAAE,WAAO;AAAA,EAAM;AACpD,QAAM,MAAM;AACZ,MAAI,CAAC,IAAI,UAAU,OAAO,IAAI,WAAW,UAAU;AAAE,WAAO;AAAA,EAAM;AAElE,SAAO;AAAA,IACH,QAAQ,IAAI;AAAA,IACZ,OAAO,aAAa,IAAI,KAAK;AAAA,EACjC;AACJ;AAKA,SAAS,4BAA4B,KAAyC;AAC1E,MAAI,CAAC,OAAO,OAAO,QAAQ,UAAU;AAAE,WAAO;AAAA,EAAM;AACpD,QAAM,MAAM;AACZ,MAAI,CAAC,IAAI,WAAW,OAAO,IAAI,YAAY,UAAU;AAAE,WAAO;AAAA,EAAM;AAEpE,SAAO;AAAA,IACH,SAAS,IAAI;AAAA,IACb,OAAO,aAAa,IAAI,KAAK;AAAA,EACjC;AACJ;AAMA,SAAS,kBAAkB,UAA0B;AAEjD,MAAI,aAAa,SAAS,QAAQ,SAAS,EAAE,EAAE,QAAQ,OAAO,EAAE;AAEhE,eAAa,WAAW,QAAQ,OAAO,GAAG;AAC1C,SAAO;AACX;AAMA,SAAS,wBAAwB,OAAwB;AACrD,MAAI,OAAO,UAAU,YAAY,CAAC,MAAM,KAAK,GAAG;AAC5C,WAAO;AAAA,EACX;AAGA,MAAI,UAAU,MAAM,KAAK;AACzB,QAAM,oBAAoB,QAAQ,MAAM,wCAAwC;AAChF,MAAI,mBAAmB;AACnB,cAAU,kBAAkB,CAAC,EAAE,KAAK;AAAA,EACxC;AAGA,MAAI,CAAC,sBAAsB,OAAO,GAAG;AACjC,WAAO;AAAA,EACX;AAEA,SAAO;AACX;AAcO,SAAS,sBAAsB,UAAkB,kBAA0C;AAC9F,QAAM,SAASA,aAAY,QAAQ;AACnC,MAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACvC,UAAM,IAAI,MAAM,+CAA+C;AAAA,EACnE;AAEA,QAAM,MAAM;AAGZ,QAAM,WAAW,OAAO,IAAI,aAAa,WAAW,IAAI,WAAW;AAGnE,QAAM,WAA2B;AAAA,IAC7B;AAAA,IACA,UAAU,aAAa,IAAI,UAAU,wBAAwB;AAAA,IAC7D,aAAa,YAAY,IAAI,WAAW,EACnC,IAAI,mBAAmB,EACvB,OAAO,CAAC,MAAuB,MAAM,IAAI;AAAA,IAC9C,WAAW,YAAY,IAAI,SAAS,EAC/B,IAAI,uBAAuB,EAC3B,OAAO,CAAC,MAA2B,MAAM,IAAI;AAAA,IAClD,sBAAsB,aAAa,IAAI,oBAAoB;AAAA,IAC3D,UAAU,aAAa,IAAI,QAAQ;AAAA,IACnC,UAAU,YAAoB,IAAI,QAAQ,EAAE;AAAA,MACxC,CAAC,MAAmB,OAAO,MAAM,YAAY,EAAE,SAAS;AAAA,IAC5D;AAAA,IACA,eAAe,aAAa,IAAI,aAAa;AAAA,IAC7C,cAAc,YAAY,IAAI,YAAY,EACrC,IAAI,oBAAoB,EACxB,OAAO,CAAC,MAAwB,MAAM,IAAI;AAAA,IAC/C,cAAc;AAAA,MACV,UAAU,CAAC;AAAA,MACX,UAAU,CAAC;AAAA,IACf;AAAA,IACA,kBAAkB,wBAAwB,IAAI,gBAAgB;AAAA,EAClE;AAGA,MAAI,IAAI,gBAAgB,OAAO,IAAI,iBAAiB,UAAU;AAC1D,UAAM,OAAO,IAAI;AACjB,aAAS,aAAa,WAAW,YAAY,KAAK,QAAQ,EACrD,IAAI,2BAA2B,EAC/B,OAAO,CAAC,MAA+B,MAAM,IAAI;AACtD,aAAS,aAAa,WAAW,YAAY,KAAK,QAAQ,EACrD,IAAI,2BAA2B,EAC/B,OAAO,CAAC,MAA+B,MAAM,IAAI;AAAA,EAC1D;AAEA,SAAO;AACX;AAtTA,IAAAC,wBAAA;AAAA;AAAA;AAoBA;AAAA;AAAA;;;AC+DO,SAAS,mBAAmBC,SAAoB,OAAgC;AACnF,SAAO;AAAA,IACH,UAAUA,QAAO;AAAA,IACjB,YAAYA,QAAO;AAAA,IACnB,YAAYA,QAAO;AAAA,IACnB,SAASA,QAAO;AAAA,IAChB,UAAUA,QAAO,SAAS,KAAK,IAAI;AAAA,IACnC,cAAcA,QAAO,aAAa,KAAK,IAAI,KAAK;AAAA,IAChD,YAAYA,QAAO,WAAW,KAAK,IAAI,KAAK;AAAA,IAC5C,YAAYA,QAAO;AAAA,IACnB,UAAUA,QAAO;AAAA,IACjB,aAAa,MAAM,QAAQ;AAAA,IAC3B,mBAAmB,MAAM,qBAAqB;AAAA,EAClD;AACJ;AAiBA,eAAsB,oBAClB,SAC+B;AAC/B,QAAM,YAAY,KAAK,IAAI;AAC3B,QAAM;AAAA,IACF;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ,IAAI;AAEJ,QAAM,UAAU,MAAM;AACtB,MAAI,QAAQ,WAAW,GAAG;AACtB,WAAO,EAAE,UAAU,CAAC,GAAG,iBAAiB,CAAC,GAAG,UAAU,EAAE;AAAA,EAC5D;AAGA,QAAM,iBAAiB,4BAA4B,KAAK;AACxD,QAAM,eAAe,wBAAwB;AAG7C,QAAM,EAAE,UAAU,gBAAgB,IAAI,MAAM,qBAAqB;AAAA,IAC7D;AAAA,IAAS;AAAA,IAAO;AAAA,IAAW;AAAA,IAAgB;AAAA,IAC3C;AAAA,IAAa;AAAA,IAAW;AAAA,IAAO;AAAA,IAAY;AAAA,IAAa;AAAA,EAC5D,CAAC;AAGD,MAAI,gBAAgB,SAAS,KAAK,gBAAgB,GAAG;AACjD,UAAM,aAAS,kCAAU;AACzB,QAAI,kBAAkB,CAAC,GAAG,eAAe;AAEzC,aAAS,UAAU,GAAG,UAAU,iBAAiB,gBAAgB,SAAS,GAAG,WAAW;AACpF,UAAI,cAAc,EAAG;AAErB,aAAO,MAAM,mCAAY,YAAY,YAAY,gBAAgB,MAAM,8BAA8B,UAAU,CAAC,IAAI,aAAa,GAAG;AAGpI,YAAM,eAAe,QAAQ,OAAO,OAAK,gBAAgB,SAAS,EAAE,EAAE,CAAC;AAEvE,YAAM,cAAc,MAAM,qBAAqB;AAAA,QAC3C,SAAS;AAAA,QAAc;AAAA,QAAO;AAAA,QAAW;AAAA,QAAgB;AAAA,QACzD;AAAA,QAAa;AAAA,QAAW;AAAA,QAAO;AAAA,QAAY;AAAA,QAAa;AAAA,MAC5D,CAAC;AAGD,eAAS,KAAK,GAAG,YAAY,QAAQ;AAGrC,wBAAkB,YAAY;AAAA,IAClC;AAGA,oBAAgB,SAAS;AACzB,oBAAgB,KAAK,GAAG,eAAe;AAAA,EAC3C;AAEA,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA,UAAU,KAAK,IAAI,IAAI;AAAA,EAC3B;AACJ;AAuBA,eAAe,qBACX,SACkE;AAClE,QAAM,EAAE,SAAS,OAAO,WAAW,gBAAgB,cAAc,aAAa,WAAW,OAAO,YAAY,aAAa,eAAe,IAAI;AAE5I,QAAM,QAAsB,QAAQ,IAAI,OAAK,mBAAmB,GAAG,KAAK,CAAC;AAGzE,QAAM,YAAQ,6CAAqB,OAAO,gBAAgB,YAAY;AAGtE,QAAM,UAAM,2CAAmB;AAAA,IAC3B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,EACpB,CAAC;AAGD,QAAM,eAAW,uCAAe;AAAA,IAC5B;AAAA,IACA,gBAAgB;AAAA,IAChB,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,EACJ,CAAC;AAGD,QAAM,SAAS,MAAM,SAAS,QAAQ,KAAK,KAAK;AAGhD,QAAM,WAA6B,CAAC;AACpC,QAAM,kBAA4B,CAAC;AAEnC,MAAI,OAAO,QAAQ;AACf,UAAM,aAAS,kCAAU;AACzB,UAAM,SAAS,OAAO;AACtB,eAAW,aAAa,OAAO,SAAS;AACpC,YAAM,WAAW,UAAU,KAAK;AAEhC,UAAI,UAAU,WAAW,UAAU,aAAa;AAC5C,YAAI;AACA,gBAAM,WAAW,sBAAsB,UAAU,aAAa,QAAQ;AACtE,mBAAS,KAAK,QAAQ;AAAA,QAC1B,SAAS,WAAW;AAEhB,cAAI;AACA,kBAAM,WAAW,sBAAsB,UAAU,QAAQ,QAAQ;AACjE,qBAAS,KAAK,QAAQ;AAAA,UAC1B,SAAS,WAAW;AAChB,mBAAO,MAAM,mCAAY,YAAY,qCAAqC,QAAQ,mBAAmB,UAAU,YAAY,MAAM,YAAY,UAAU,YAAY,UAAU,GAAG,GAAG,CAAC,EAAE;AACtL,mBAAO,MAAM,mCAAY,YAAY,oBAAoB,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS,CAAC,EAAE;AAC7H,mBAAO,MAAM,mCAAY,YAAY,oBAAoB,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS,CAAC,EAAE;AAC7H,4BAAgB,KAAK,QAAQ;AAAA,UACjC;AAAA,QACJ;AAAA,MACJ,WAAW,UAAU,WAAW,UAAU,QAAQ;AAE9C,YAAI;AACA,gBAAM,WAAW,sBAAsB,UAAU,QAAQ,QAAQ;AACjE,mBAAS,KAAK,QAAQ;AAAA,QAC1B,QAAQ;AACJ,0BAAgB,KAAK,QAAQ;AAAA,QACjC;AAAA,MACJ,WAAW,CAAC,UAAU,WAAW,UAAU,aAAa;AAGpD,YAAI;AACA,gBAAM,WAAW,sBAAsB,UAAU,aAAa,QAAQ;AACtE,mBAAS,KAAK,QAAQ;AACtB,iBAAO,MAAM,mCAAY,YAAY,kCAAkC,QAAQ,uDAAuD,UAAU,KAAK,GAAG;AAAA,QAC5J,SAAS,aAAa;AAElB,cAAI;AACA,kBAAM,WAAW,sBAAsB,UAAU,QAAQ,QAAQ;AACjE,qBAAS,KAAK,QAAQ;AAAA,UAC1B,QAAQ;AACJ,mBAAO,MAAM,mCAAY,YAAY,+BAA+B,QAAQ,cAAc,UAAU,OAAO,WAAW,UAAU,SAAS,MAAM,iBAAiB,UAAU,YAAY,MAAM,WAAW,UAAU,YAAY,UAAU,GAAG,GAAG,CAAC,EAAE;AAChP,mBAAO,MAAM,mCAAY,YAAY,2BAA2B,uBAAuB,QAAQ,YAAY,UAAU,OAAO,WAAW,CAAC,EAAE;AAC1I,4BAAgB,KAAK,QAAQ;AAAA,UACjC;AAAA,QACJ;AAAA,MACJ,OAAO;AACH,eAAO,MAAM,mCAAY,YAAY,+BAA+B,QAAQ,cAAc,UAAU,OAAO,WAAW,UAAU,SAAS,MAAM,iBAAiB,UAAU,cAAc,GAAG,UAAU,YAAY,MAAM,WAAW,UAAU,YAAY,UAAU,GAAG,GAAG,CAAC,KAAK,MAAM,EAAE;AACtR,wBAAgB,KAAK,QAAQ;AAAA,MACjC;AAAA,IACJ;AAAA,EACJ,OAAO;AAEH,eAAWA,WAAU,SAAS;AAC1B,sBAAgB,KAAKA,QAAO,EAAE;AAAA,IAClC;AAAA,EACJ;AAEA,SAAO,EAAE,UAAU,gBAAgB;AACvC;AAUA,SAAS,sBACL,QACA,kBACc;AAEd,QAAM,UAAU,KAAK,UAAU,MAAM;AACrC,SAAO,sBAAsB,SAAS,gBAAgB;AAC1D;AAjUA,IAWAC;AAXA;AAAA;AAAA;AAWA,IAAAA,yBAMO;AAUP,IAAAC;AACA,IAAAC;AAAA;AAAA;;;ACMA,eAAsB,eAClB,SACA,WACA,YACA,aACA,gBACuB;AACvB,QAAM,YAAY,KAAK,IAAI;AAE3B,QAAM,SAAS,MAAM,oBAAoB;AAAA,IACrC;AAAA,IACA,OAAO,QAAQ;AAAA,IACf,OAAO,QAAQ,SAAS;AAAA,IACxB,aAAa,QAAQ,eAAe;AAAA,IACpC,WAAW,QAAQ,WAAW;AAAA,IAC9B,OAAO,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACJ,CAAC;AAED,SAAO;AAAA,IACH,UAAU,OAAO;AAAA,IACjB,UAAU,KAAK,IAAI,IAAI;AAAA,EAC3B;AACJ;AA3DA;AAAA;AAAA;AAYA;AAGA,IAAAC;AACA,IAAAC;AACA;AAAA;AAAA;;;ACiCA,eAAsB,kBAClB,UACA,OACA,SACA,aACA,cAC6B;AAC7B,QAAM,YAAY,KAAK,IAAI;AAE3B,UAAQ,OAAO,MAAM,IAAI;AACzB,cAAY,wBAAwB;AAGpC,QAAM,gBAAgB,kBAAkB,SAAS,UAAU;AAC3D,QAAM,kBAAkB,oBAAoB,SAAS,UAAU;AAC/D,QAAM,sBAAsB,wBAAwB,SAAS,UAAU,KAAK;AAC5E,QAAM,gBAAgB,kBAAkB,SAAS,UAAU;AAC3D,QAAM,cAAc;AAGpB,MAAI,mBAAmB,MAAM;AAC7B,MAAI,iBAAmC,CAAC;AAExC,MAAI,CAAC,QAAQ,OAAO;AAChB,QAAI,QAAQ,UAAU;AAElB,YAAM,eAAe,MAAM,QAAQ,IAAI,OAAK,EAAE,EAAE;AAChD,YAAM,EAAE,OAAO,QAAQ,IAAI;AAAA,QACvB;AAAA,QAAc,QAAQ;AAAA,MAC1B;AAEA,UAAI,MAAM,SAAS,GAAG;AAClB,yBAAiB;AACjB,2BAAmB,MAAM,QAAQ;AAAA,UAC7B,OAAK,QAAQ,SAAS,EAAE,EAAE;AAAA,QAC9B;AAEA,YAAI,QAAQ,WAAW,GAAG;AACtB,uBAAa,OAAO,MAAM,MAAM,oCAAoC;AAAA,QACxE,OAAO;AACH,oBAAU,UAAU,MAAM,MAAM,qBAAqB,QAAQ,MAAM,YAAY;AAAA,QACnF;AAAA,MACJ;AAAA,IACJ,OAAO;AAEP,YAAM,oBAAoB,MAAM;AAAA,QAC5B;AAAA,QAAO,QAAQ;AAAA,QAAQ;AAAA,MAC3B;AAEA,UAAI,sBAAsB,MAAM;AAC5B,YAAI,kBAAkB,WAAW,GAAG;AAEhC,gBAAM,YAAY,kBAAkB,QAAQ,MAAM;AAClD,cAAI,aAAa,UAAU,SAAS,GAAG;AACnC,yBAAa,OAAO,UAAU,MAAM,0CAA0C;AAC9E,0BAAc,WAAW,UAAU;AACnC,mBAAO,EAAE,UAAU,WAAW,UAAU,KAAK,IAAI,IAAI,WAAW,qBAAqB,CAAC,EAAE;AAAA,UAC5F;AAAA,QACJ,OAAO;AAEH,oBAAU,GAAG,kBAAkB,MAAM,qBAAqB,MAAM,QAAQ,SAAS,kBAAkB,MAAM,SAAS;AAGlH,qBAAWC,WAAU,MAAM,SAAS;AAChC,gBAAI,CAAC,kBAAkB,SAASA,QAAO,EAAE,GAAG;AACxC,oBAAM,SAAS,kBAAkBA,QAAO,IAAI,QAAQ,MAAM;AAC1D,kBAAI,QAAQ;AACR,+BAAe,KAAK,MAAM;AAAA,cAC9B,OAAO;AAEH,kCAAkB,KAAKA,QAAO,EAAE;AAAA,cACpC;AAAA,YACJ;AAAA,UACJ;AAGA,6BAAmB,MAAM,QAAQ;AAAA,YAC7B,OAAK,kBAAkB,SAAS,EAAE,EAAE;AAAA,UACxC;AAAA,QACJ;AAAA,MACJ,OAAO;AAGH,cAAM,cAAc,MAAM,kBAAkB,QAAQ;AACpD,YAAI,aAAa;AACb,gBAAM,eAAe,MAAM,QAAQ,IAAI,OAAK,EAAE,EAAE;AAChD,gBAAM,EAAE,OAAO,QAAQ,IAAI;AAAA,YACvB;AAAA,YAAc,QAAQ;AAAA,YAAQ;AAAA,UAClC;AAEA,cAAI,MAAM,SAAS,GAAG;AAClB,sBAAU,aAAa,MAAM,MAAM,wCAAwC,QAAQ,MAAM,YAAY;AACrG,6BAAiB;AACjB,+BAAmB,MAAM,QAAQ;AAAA,cAC7B,OAAK,QAAQ,SAAS,EAAE,EAAE;AAAA,YAC9B;AAAA,UACJ;AAAA,QACJ;AAAA,MACJ;AAAA,IACA;AAAA,EACJ;AAEA,MAAI,iBAAiB,WAAW,KAAK,eAAe,SAAS,GAAG;AAC5D,iBAAa,mCAAmC,eAAe,MAAM,WAAW;AAChF,kBAAc,WAAW,UAAU;AACnC,WAAO,EAAE,UAAU,gBAAgB,UAAU,KAAK,IAAI,IAAI,WAAW,qBAAqB,CAAC,EAAE;AAAA,EACjG;AAGA,QAAM,sBAAsB,sBAAsB;AAAA,IAC9C;AAAA,IACA,OAAO;AAAA,IACP,WAAW,kBAAkB,kBAAkB,MAAO;AAAA,EAC1D,CAAC;AAGD,QAAM,kBAA6B,OAAO,QAAQ,SAAS;AACvD,UAAM,SAAS,MAAM,oBAAoB,QAAQ,IAAI;AACrD,kBAAc,SAAS,YAAY,OAAO,UAAU;AACpD,WAAO;AAAA,EACX;AAGA,MAAI,UAAyB;AAC7B,MAAI;AACA,cAAU,MAAM,kBAAkB,QAAQ;AAAA,EAC9C,QAAQ;AAAA,EAER;AAEA,QAAM,UAAU,IAAI,QAAQ;AAC5B,UAAQ,MAAM,aAAa,iBAAiB,MAAM,aAAa,WAAW,eAAe;AAEzF,MAAI;AAEA,UAAM,WAAW;AAAA,MACb,GAAG;AAAA,MACH,SAAS;AAAA,IACb;AAEA,UAAM,SAAS,MAAM;AAAA,MACjB;AAAA,QACI,OAAO;AAAA,QACP,OAAO;AAAA,QACP,SAAS,kBAAkB,kBAAkB,MAAO;AAAA,QACpD;AAAA,QACA,OAAO;AAAA,QACP;AAAA,MACJ;AAAA,MACA;AAAA,MACA,CAAC,aAAa;AACV,YAAI,SAAS,UAAU,WAAW;AAC9B,kBAAQ;AAAA,YACJ,sBAAsB,SAAS,cAAc,IAAI,SAAS,UAAU,KAChE,SAAS,WAAW;AAAA,UAC5B;AAAA,QACJ;AAAA,MACJ;AAAA,MACA;AAAA;AAAA,MAEA,CAAC,MAAM,cAAc;AACjB,YAAI,CAAC,WAAW,CAAC,UAAU,WAAW,CAAC,UAAU,QAAQ;AACrD;AAAA,QACJ;AACA,YAAI;AAEA,gBAAM,SAAS,UAAU;AACzB,gBAAM,WAAW,QAAQ,MAAM;AAC/B,gBAAM,cAAc,QAAQ;AAC5B,cAAI,YAAY,aAAa;AACzB,kBAAM,WAAW,sBAAsB,aAAa,QAAQ;AAC5D,yBAAa,UAAU,UAAU,QAAQ,QAAQ,OAAO;AAAA,UAC5D;AAAA,QACJ,QAAQ;AAAA,QAER;AAAA,MACJ;AAAA,IACJ;AAGA,UAAM,cAAc,CAAC,GAAG,gBAAgB,GAAG,OAAO,QAAQ;AAE1D,QAAI,OAAO,SAAS,WAAW,KAAK,iBAAiB,SAAS,GAAG;AAC7D,cAAQ,KAAK,4BAA4B;AACzC,iBAAW,2FAA2F;AACtG,aAAO,EAAE,UAAU,KAAK,IAAI,IAAI,WAAW,UAAU,WAAW,gBAAgB;AAAA,IACpF;AAEA,UAAM,cAAc,iBAAiB,SAAS,OAAO,SAAS;AAC9D,QAAI,cAAc,GAAG;AACjB,cAAQ,KAAK,4BAAuB,OAAO,SAAS,MAAM,eAAe,WAAW,SAAS;AAG7F,UAAI,QAAQ,WAAW,OAAO;AAE1B,cAAM,eAAe,IAAI,IAAI,OAAO,SAAS,IAAI,OAAK,EAAE,QAAQ,CAAC;AACjE,cAAM,kBAAkB,iBACnB,OAAO,OAAK,CAAC,aAAa,IAAI,EAAE,EAAE,CAAC,EACnC,IAAI,OAAK,EAAE,EAAE;AAClB;AAAA,UACI,gBAAgB,WAAW,+BAA+B,gBAAgB,KAAK,IAAI,CAAC;AAAA,QAExF;AACA,eAAO,EAAE,UAAU,KAAK,IAAI,IAAI,WAAW,UAAU,WAAW,gBAAgB;AAAA,MACpF;AAAA,IACJ,OAAO;AACH,cAAQ,QAAQ,4BAAuB,OAAO,SAAS,MAAM,mBAAmB;AAAA,IACpF;AAGA,QAAI;AACA,YAAM,gBAAgB,aAAa,QAAQ,QAAQ,QAAQ;AAAA,IAC/D,QAAQ;AACJ,UAAI,QAAQ,SAAS;AACjB,qBAAa,sCAAsC;AAAA,MACvD;AAAA,IACJ;AAEA,WAAO;AAAA,MACH,UAAU;AAAA,MACV,UAAU,KAAK,IAAI,IAAI;AAAA,MACvB,qBAAqB,iBAAiB,IAAI,OAAK,EAAE,EAAE;AAAA,IACvD;AAAA,EACJ,SAAS,OAAO;AACZ,YAAQ,KAAK,iBAAiB;AAC9B,eAAW,gBAAgB,KAAK,CAAC;AACjC,WAAO,EAAE,UAAU,KAAK,IAAI,IAAI,WAAW,UAAU,WAAW,gBAAgB;AAAA,EACpF;AACJ;AAtRA;AAAA;AAAA;AAQA;AACA;AACA;AAEA;AAUA;AAQA;AACA;AAAA;AAAA;;;ACXO,SAAS,qBAAqB,OAA4B;AAC7D,QAAM,aAAa,MAAM,QAAQ,IAAI,QAAM;AAAA,IACvC,IAAI,EAAE;AAAA,IACN,MAAM,EAAE;AAAA,IACR,MAAM,EAAE;AAAA,IACR,UAAU,EAAE;AAAA,EAChB,EAAE;AACF,SAAO,KAAK,UAAU,YAAY,MAAM,CAAC;AAC7C;AAwDO,SAAS,qBAAqB,OAA8C;AAC/E,UAAQ,OAAO;AAAA,IACX,KAAK;AAAW,aAAO;AAAA,IACvB,KAAK;AAAQ,aAAO;AAAA,IACpB;AAAS,aAAO;AAAA,EACpB;AACJ;AA4EO,SAAS,oBAAoB,QAAyB;AACzD,MAAI,CAAC,QAAQ;AAET,WAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMX;AAGA,SAAO;AAAA;AAAA,sCAE2B,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAO5C;AAUO,SAAS,iCAAiC,OAAsC,QAAyB;AAC5G,QAAM,aAAa,qBAAqB,KAAK;AAC7C,QAAM,iBAAiB,oBAAoB,MAAM;AAEjD,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBT,UAAU;AAAA;AAAA,EAEV,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuBhB;AArPA,IAkCM,eAWA,cAaA;AA1DN,IAAAC,gBAAA;AAAA;AAAA;AAkCA,IAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWtB,IAAM,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAarB,IAAM,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC/BZ,SAAS,4BAAoC;AAChD,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA4EX;AAKO,SAAS,wBAAkC;AAC9C,SAAO,CAAC,SAAS,gBAAgB,gBAAgB;AACrD;AAMO,SAAS,4BACZ,UACA,YACA,UACA,UACM;AACN,SAAO,KAAK,UAAU;AAAA,IAClB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN;AAAA,IACA,UAAU,SAAS,UAAU,GAAG,GAAG;AAAA;AAAA,EACvC,CAAC;AACL;AAsBO,SAAS,gCAAwC;AACpD,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6DX;AAKO,SAAS,4BAAsC;AAClD,SAAO,CAAC,SAAS,cAAc;AACnC;AAsBO,SAAS,wCAAgD;AAC5D,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6EX;AAhUA;AAAA;AAAA;AAAA;AAAA;;;AC6FO,SAAS,qBACZ,UACA,OACU;AACV,QAAM,aAAa,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,SAAS,QAAQ;AACrE,QAAM,aAAa,YAAY,QAAQ,SAAS;AAEhD,SAAO;AAAA,IACH,UAAU,SAAS;AAAA,IACnB;AAAA,IACA,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC;AAAA,IAC1C,aAAa,qBAAqB,KAAK;AAAA,EAC3C;AACJ;AAeA,eAAsB,mBAClB,SAC8B;AAC9B,QAAM,EAAE,MAAM,IAAI;AAGlB,MAAI,MAAM,SAAS,MAAM,MAAM,SAAS,GAAG;AACvC,WAAO,+BAA+B,OAAO;AAAA,EACjD;AAEA,SAAO,uBAAuB,OAAO;AACzC;AAKA,eAAe,uBACX,SAC8B;AAC9B,QAAM,YAAY,KAAK,IAAI;AAC3B,QAAM;AAAA,IACF;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ,IAAI;AAEJ,MAAI,SAAS,WAAW,GAAG;AACvB,WAAO,EAAE,UAAU,CAAC,GAAG,iBAAiB,CAAC,GAAG,UAAU,EAAE;AAAA,EAC5D;AAGA,QAAM,QAAsB,SAAS,IAAI,OAAK,qBAAqB,GAAG,KAAK,CAAC;AAG5E,QAAM,iBAAiB,iCAAiC,KAAK;AAG7D,QAAM,YAAQ,6CAAqB,OAAO,gBAAgB,CAAC,CAAC;AAI5D,QAAM,UAAM,2CAAmB;AAAA,IAC3B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,EACpB,CAAC;AAGD,QAAM,eAAW,uCAAe;AAAA,IAC5B;AAAA,IACA,gBAAgB;AAAA,IAChB,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,EACJ,CAAC;AAGD,QAAM,SAAS,MAAM,SAAS,QAAQ,KAAK,KAAK;AAGhD,QAAM,WAA+B,CAAC;AACtC,QAAM,kBAA4B,CAAC;AAEnC,MAAI,OAAO,QAAQ;AACf,UAAM,SAAS,OAAO;AACtB,eAAW,aAAa,OAAO,SAAS;AACpC,YAAM,WAAW,UAAU,KAAK;AAChC,YAAM,aAAa,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,QAAQ;AAC5D,YAAM,aAAa,YAAY,QAAQ;AAEvC,UAAI,UAAU,YAAY,UAAU,WAAW,UAAU,cAAc;AACnE,cAAM,UAAU,UAAU,WAAW,UAAU,eAAe;AAC9D,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM,kBAAkB,QAAQ;AAAA,UAChC,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACJ,CAAC;AAAA,MACL,OAAO;AACH,wBAAgB,KAAK,QAAQ;AAAA,MACjC;AAAA,IACJ;AAAA,EACJ;AAIA,QAAM,kBAAkB,SAAS,IAAI,OAAK;AACtC,UAAM,MAAM,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,EAAE,QAAQ;AACvD,WAAO;AAAA,MACH,EAAE;AAAA,MACF,KAAK,QAAQ,EAAE;AAAA,MACf,KAAK,YAAY;AAAA,MACjB,EAAE;AAAA,IACN;AAAA,EACJ,CAAC;AAED,QAAM,kBAAc;AAAA,IAChB,gBAAgB,IAAI,CAAC,SAAS,OAAO;AAAA,MACjC;AAAA,MACA,UAAU,SAAS,CAAC,EAAE;AAAA,IAC1B,EAAE;AAAA,IACF;AAAA,IACA,CAAC;AAAA,EACL;AAEA,QAAM,gBAAY,2CAAmB;AAAA,IACjC;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,IAChB,gBAAgB,0BAA0B;AAAA,IAC1C,gBAAgB,sBAAsB;AAAA,IACtC,eAAe;AAAA,IACf,oBAAoB;AAAA,MAChB,aAAa,MAAM,QAAQ;AAAA,MAC3B,oBAAoB,MAAM,QAAQ,eAAe;AAAA,MACjD,aAAa,MAAM,QAAQ,eAAe;AAAA,MAC1C,UAAU,MAAM,QAAQ,YAAY;AAAA,IACxC;AAAA,EACJ,CAAC;AAED,QAAM,qBAAiB,uCAAe;AAAA,IAClC;AAAA,IACA,gBAAgB;AAAA,IAChB,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EACJ,CAAC;AAED,MAAI;AACA,UAAM,eAAe,MAAM,eAAe,QAAQ,WAAW,WAAW;AACxE,UAAM,eAAe,aAAa;AAClC,UAAM,kBAAkB,cAAc;AAEtC,QAAI,iBAAiB;AACjB,YAAM,SAAS,KAAK,MAAM,eAAe;AAEzC,UAAI,OAAO,OAAO;AACd,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO,GAAG,MAAM,QAAQ,IAAI;AAAA,UAC5B,SAAS,OAAO;AAAA,QACpB,CAAC;AAAA,MACL;AAEA,UAAI,OAAO,cAAc;AACrB,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO;AAAA,UACP,SAAS,OAAO;AAAA,QACpB,CAAC;AAAA,MACL;AAEA,UAAI,OAAO,gBAAgB;AACvB,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO;AAAA,UACP,SAAS,OAAO;AAAA,QACpB,CAAC;AAAA,MACL;AAAA,IACJ,OAAO;AACH,eAAS,KAAK,GAAG,yBAAyB,OAAO,QAAQ,CAAC;AAAA,IAC9D;AAAA,EACJ,QAAQ;AACJ,aAAS,KAAK,GAAG,yBAAyB,OAAO,QAAQ,CAAC;AAAA,EAC9D;AAEA,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA,UAAU,KAAK,IAAI,IAAI;AAAA,EAC3B;AACJ;AA6BO,SAAS,oBACZ,UACA,OACY;AACZ,QAAM,gBAAgB,oBAAI,IAAoB;AAC9C,aAAW,QAAQ,OAAO;AACtB,eAAW,YAAY,KAAK,SAAS;AACjC,oBAAc,IAAI,UAAU,KAAK,EAAE;AAAA,IACvC;AAAA,EACJ;AAEA,QAAM,iBAAiB,oBAAI,IAA8B;AACzD,QAAM,qBAAuC,CAAC;AAC9C,aAAW,YAAY,UAAU;AAC7B,UAAM,SAAS,cAAc,IAAI,SAAS,QAAQ;AAClD,QAAI,QAAQ;AACR,UAAI,CAAC,eAAe,IAAI,MAAM,GAAG;AAC7B,uBAAe,IAAI,QAAQ,CAAC,CAAC;AAAA,MACjC;AACA,qBAAe,IAAI,MAAM,EAAG,KAAK,QAAQ;AAAA,IAC7C,OAAO;AACH,yBAAmB,KAAK,QAAQ;AAAA,IACpC;AAAA,EACJ;AAEA,SAAO,EAAE,eAAe,gBAAgB,mBAAmB;AAC/D;AAKA,eAAe,kBACX,SACA,UACA,OACA,eACwB;AACxB,QAAM;AAAA,IACF;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ,IAAI;AAEJ,QAAM,WAAyB,SAAS,IAAI,OAAK,qBAAqB,GAAG,KAAK,CAAC;AAC/E,QAAM,wBAAwB,iCAAiC,KAAK;AACpE,QAAM,YAAQ,6CAAqB,UAAU,uBAAuB,CAAC,CAAC;AAEtE,QAAM,UAAM,2CAAmB;AAAA,IAC3B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,EACpB,CAAC;AAED,QAAM,eAAW,uCAAe;AAAA,IAC5B;AAAA,IACA,gBAAgB;AAAA,IAChB,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,EACJ,CAAC;AAED,QAAM,YAAY,MAAM,SAAS,QAAQ,KAAK,KAAK;AAEnD,QAAM,WAA+B,CAAC;AACtC,QAAM,YAAY,oBAAI,IAAY;AAElC,MAAI,UAAU,QAAQ;AAClB,UAAM,SAAS,UAAU;AACzB,eAAW,UAAU,OAAO,SAAS;AACjC,YAAM,WAAW,OAAO,KAAK;AAC7B,YAAM,aAAa,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,QAAQ;AAC5D,YAAM,aAAa,YAAY,QAAQ;AACvC,YAAM,SAAS,cAAc,IAAI,QAAQ;AAEzC,UAAI,OAAO,YAAY,OAAO,WAAW,OAAO,cAAc;AAC1D,cAAM,UAAU,OAAO,WAAW,OAAO,eAAe;AACxD,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM,kBAAkB,QAAQ;AAAA,UAChC,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA;AAAA,QACJ,CAAC;AAAA,MACL,OAAO;AACH,kBAAU,IAAI,QAAQ;AAAA,MAC1B;AAAA,IACJ;AAAA,EACJ;AAEA,SAAO,EAAE,UAAU,UAAU;AACjC;AAMA,eAAe,mBACX,MACA,cACA,OACA,SACyB;AACzB,QAAM,EAAE,WAAW,WAAW,OAAO,YAAY,IAAI;AAErD,QAAM,sBAAsB,aAAa,IAAI,OAAK;AAC9C,UAAM,MAAM,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,EAAE,QAAQ;AACvD,WAAO;AAAA,MACH,EAAE;AAAA,MACF,KAAK,QAAQ,EAAE;AAAA,MACf,KAAK,YAAY;AAAA,MACjB,EAAE;AAAA,IACN;AAAA,EACJ,CAAC;AAED,QAAM,sBAAkB;AAAA,IACpB,oBAAoB,IAAI,CAAC,SAAS,OAAO;AAAA,MACrC;AAAA,MACA,UAAU,aAAa,CAAC,EAAE;AAAA,IAC9B,EAAE;AAAA,IACF;AAAA,IACA,CAAC;AAAA,EACL;AAEA,QAAM,oBAAgB,2CAAmB;AAAA,IACrC;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,IAChB,gBAAgB,8BAA8B;AAAA,IAC9C,gBAAgB,0BAA0B;AAAA,IAC1C,eAAe;AAAA,IACf,oBAAoB;AAAA,MAChB,UAAU,KAAK;AAAA,MACf,iBAAiB,KAAK;AAAA,MACtB,UAAU,KAAK;AAAA,MACf,aAAa,MAAM,QAAQ;AAAA,IAC/B;AAAA,EACJ,CAAC;AAED,QAAM,yBAAqB,uCAAe;AAAA,IACtC;AAAA,IACA,gBAAgB;AAAA,IAChB,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB;AAAA,IACA,SAAS,gBAAgB,KAAK,IAAI;AAAA,IAClC;AAAA,EACJ,CAAC;AAED,QAAM,kBAAkB;AAAA,IACpB,QAAQ,KAAK;AAAA,IACb,MAAM,KAAK;AAAA,IACX,aAAa,KAAK;AAAA,IAClB,SAAS,KAAK;AAAA,IACd,aAAa,aAAa;AAAA,EAC9B;AAEA,MAAI;AACA,UAAM,aAAa,MAAM,mBAAmB,QAAQ,eAAe,eAAe;AAClF,UAAM,aAAa,WAAW;AAC9B,UAAM,kBAAkB,YAAY;AAEpC,QAAI,iBAAiB;AACjB,YAAM,SAAS,KAAK,MAAM,eAAe;AACzC,YAAM,WAA+B,CAAC;AAEtC,UAAI,cAAc;AAClB,UAAI,OAAO,OAAO;AACd,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO,GAAG,KAAK,IAAI;AAAA,UACnB,SAAS,OAAO;AAAA,UAChB,QAAQ,KAAK;AAAA,QACjB,CAAC;AACD,sBAAc;AAAA,UACV,QAAQ,KAAK;AAAA,UACb,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,SAAS,OAAO,MAAM,UAAU,GAAG,GAAI;AAAA,UACvC,aAAa,aAAa;AAAA,QAC9B;AAAA,MACJ;AAEA,UAAI,OAAO,cAAc;AACrB,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO,GAAG,KAAK,IAAI;AAAA,UACnB,SAAS,OAAO;AAAA,UAChB,QAAQ,KAAK;AAAA,QACjB,CAAC;AAAA,MACL;AAEA,aAAO,EAAE,UAAU,YAAY;AAAA,IACnC,OAAO;AACH,aAAO;AAAA,QACH,UAAU,wBAAwB,MAAM,cAAc,KAAK;AAAA,QAC3D,aAAa;AAAA,MACjB;AAAA,IACJ;AAAA,EACJ,QAAQ;AACJ,WAAO;AAAA,MACH,UAAU,wBAAwB,MAAM,cAAc,KAAK;AAAA,MAC3D,aAAa;AAAA,IACjB;AAAA,EACJ;AACJ;AAOA,eAAe,sBACX,eACA,OACA,OACA,SAC2B;AAC3B,QAAM,EAAE,WAAW,WAAW,OAAO,YAAY,IAAI;AAErD,QAAM,qBAAqB,cAAc,IAAI,QAAM;AAAA,IAC/C,QAAQ,EAAE;AAAA,IACV,UAAU,EAAE;AAAA,IACZ,SAAS,KAAK,UAAU,CAAC;AAAA,EAC7B,EAAE;AAEF,QAAM,yBAAqB;AAAA,IACvB;AAAA,IACA;AAAA,IACA,CAAC;AAAA,EACL;AAEA,QAAM,uBAAmB,2CAAmB;AAAA,IACxC;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,IAChB,gBAAgB,sCAAsC;AAAA,IACtD,gBAAgB,sBAAsB;AAAA,IACtC,eAAe;AAAA,IACf,oBAAoB;AAAA,MAChB,aAAa,MAAM,QAAQ;AAAA,MAC3B,oBAAoB,MAAM,QAAQ,eAAe;AAAA,MACjD,aAAa,MAAM,QAAQ,eAAe;AAAA,MAC1C,UAAU,MAAM,QAAQ,YAAY;AAAA,IACxC;AAAA,EACJ,CAAC;AAED,QAAM,4BAAwB,uCAAe;AAAA,IACzC;AAAA,IACA,gBAAgB;AAAA,IAChB,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB;AAAA,IACA,SAAS;AAAA,IACT;AAAA,EACJ,CAAC;AAED,MAAI;AACA,UAAM,gBAAgB,MAAM,sBAAsB,QAAQ,kBAAkB,kBAAkB;AAC9F,UAAM,gBAAgB,cAAc;AACpC,UAAM,kBAAkB,eAAe;AAEvC,QAAI,iBAAiB;AACjB,YAAM,SAAS,KAAK,MAAM,eAAe;AACzC,YAAM,WAA+B,CAAC;AAEtC,UAAI,OAAO,OAAO;AACd,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO,GAAG,MAAM,QAAQ,IAAI;AAAA,UAC5B,SAAS,OAAO;AAAA,QACpB,CAAC;AAAA,MACL;AAEA,UAAI,OAAO,cAAc;AACrB,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO;AAAA,UACP,SAAS,OAAO;AAAA,QACpB,CAAC;AAAA,MACL;AAEA,UAAI,OAAO,gBAAgB;AACvB,iBAAS,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO;AAAA,UACP,SAAS,OAAO;AAAA,QACpB,CAAC;AAAA,MACL;AAEA,aAAO;AAAA,IACX,OAAO;AACH,aAAO,qCAAqC,OAAO,OAAO,aAAa;AAAA,IAC3E;AAAA,EACJ,QAAQ;AACJ,WAAO,qCAAqC,OAAO,OAAO,aAAa;AAAA,EAC3E;AACJ;AASA,eAAe,+BACX,SAC8B;AAC9B,QAAM,YAAY,KAAK,IAAI;AAC3B,QAAM,EAAE,OAAO,SAAS,IAAI;AAE5B,MAAI,SAAS,WAAW,GAAG;AACvB,WAAO,EAAE,UAAU,CAAC,GAAG,iBAAiB,CAAC,GAAG,UAAU,EAAE;AAAA,EAC5D;AAEA,QAAM,QAAQ,MAAM;AAGpB,QAAM,EAAE,eAAe,eAAe,IAAI,oBAAoB,UAAU,KAAK;AAG7E,QAAM,YAAY,MAAM,kBAAkB,SAAS,UAAU,OAAO,aAAa;AAGjF,QAAM,gBAAoH,CAAC;AAC3H,aAAW,QAAQ,OAAO;AACtB,UAAM,eAAe,eAAe,IAAI,KAAK,EAAE,KAAK,CAAC;AACrD,QAAI,aAAa,WAAW,GAAG;AAAE;AAAA,IAAU;AAE3C,UAAM,SAAS,MAAM,mBAAmB,MAAM,cAAc,OAAO,OAAO;AAC1E,cAAU,SAAS,KAAK,GAAG,OAAO,QAAQ;AAC1C,kBAAc,KAAK,OAAO,WAAW;AAAA,EACzC;AAGA,QAAM,kBAAkB,MAAM,sBAAsB,eAAe,OAAO,OAAO,OAAO;AACxF,YAAU,SAAS,KAAK,GAAG,eAAe;AAE1C,SAAO;AAAA,IACH,UAAU,UAAU;AAAA,IACpB,iBAAiB,CAAC,GAAG,UAAU,SAAS;AAAA,IACxC,UAAU,KAAK,IAAI,IAAI;AAAA,EAC3B;AACJ;AASO,SAAS,wBACZ,MACA,UACA,OACkB;AAClB,QAAM,WAA+B,CAAC;AAGtC,QAAM,aAAuB;AAAA,IACzB,KAAK,KAAK,IAAI;AAAA,IACd;AAAA,IACA,KAAK,eAAe;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AAEA,aAAW,KAAK,UAAU;AACtB,UAAM,MAAM,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,EAAE,QAAQ;AACvD,UAAM,OAAO,KAAK,QAAQ,EAAE;AAC5B,UAAM,OAAO,kBAAkB,EAAE,QAAQ;AACzC,eAAW,KAAK,MAAM,IAAI,eAAe,IAAI,eAAU,EAAE,SAAS,UAAU,GAAG,GAAG,CAAC,EAAE;AAAA,EACzF;AAEA,WAAS,KAAK;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO,GAAG,KAAK,IAAI;AAAA,IACnB,SAAS,WAAW,KAAK,IAAI;AAAA,IAC7B,QAAQ,KAAK;AAAA,EACjB,CAAC;AAGD,WAAS,KAAK;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO,GAAG,KAAK,IAAI;AAAA,IACnB,SAAS;AAAA,MACL,KAAK,KAAK,IAAI;AAAA,MACd;AAAA,MACA,KAAK,eAAe;AAAA,IACxB,EAAE,KAAK,IAAI;AAAA,IACX,QAAQ,KAAK;AAAA,EACjB,CAAC;AAED,SAAO;AACX;AAKO,SAAS,qCACZ,OACA,OACA,eACkB;AAClB,QAAM,WAA+B,CAAC;AAGtC,QAAM,aAAuB;AAAA,IACzB,KAAK,MAAM,QAAQ,IAAI;AAAA,IACvB;AAAA,IACA,MAAM,QAAQ,eAAe;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AAEA,aAAW,WAAW,eAAe;AACjC,eAAW,KAAK,MAAM,QAAQ,IAAI,aAAa,QAAQ,MAAM,qBAAgB,QAAQ,WAAW,KAAK,QAAQ,WAAW,WAAW;AAAA,EACvI;AAEA,WAAS,KAAK;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO,GAAG,MAAM,QAAQ,IAAI;AAAA,IAC5B,SAAS,WAAW,KAAK,IAAI;AAAA,EACjC,CAAC;AAGD,WAAS,KAAK;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO;AAAA,IACP,SAAS;AAAA,MACL;AAAA,MACA;AAAA,MACA,GAAG,MAAM,QAAQ,IAAI,kBAAkB,MAAM,QAAQ,QAAQ,UAAU,MAAM,QAAQ,WAAW;AAAA,MAChG;AAAA,MACA,MAAM,qBAAqB;AAAA,IAC/B,EAAE,KAAK,IAAI;AAAA,EACf,CAAC;AAED,SAAO;AACX;AAMO,SAAS,yBACZ,OACA,UACkB;AAClB,QAAM,WAA+B,CAAC;AAGtC,QAAM,aAAuB;AAAA,IACzB,KAAK,MAAM,QAAQ,IAAI;AAAA,IACvB;AAAA,IACA,MAAM,QAAQ,eAAe;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AAGA,QAAM,aAAa,oBAAI,IAA8B;AACrD,aAAW,KAAK,UAAU;AACtB,UAAM,MAAM,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,EAAE,QAAQ;AACvD,UAAM,WAAW,KAAK,YAAY;AAClC,QAAI,CAAC,WAAW,IAAI,QAAQ,GAAG;AAC3B,iBAAW,IAAI,UAAU,CAAC,CAAC;AAAA,IAC/B;AACA,eAAW,IAAI,QAAQ,EAAG,KAAK,CAAC;AAAA,EACpC;AAEA,aAAW,CAAC,UAAU,IAAI,KAAK,YAAY;AACvC,eAAW,KAAK,OAAO,QAAQ,IAAI,EAAE;AACrC,eAAW,KAAK,MAAM;AAClB,YAAM,MAAM,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,EAAE,QAAQ;AACvD,YAAM,OAAO,KAAK,QAAQ,EAAE;AAC5B,YAAM,OAAO,kBAAkB,EAAE,QAAQ;AACzC,iBAAW,KAAK,MAAM,IAAI,eAAe,IAAI,eAAU,EAAE,SAAS,UAAU,GAAG,GAAG,CAAC,EAAE;AAAA,IACzF;AACA,eAAW,KAAK,EAAE;AAAA,EACtB;AAEA,WAAS,KAAK;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO,GAAG,MAAM,QAAQ,IAAI;AAAA,IAC5B,SAAS,WAAW,KAAK,IAAI;AAAA,EACjC,CAAC;AAGD,WAAS,KAAK;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO;AAAA,IACP,SAAS;AAAA,MACL;AAAA,MACA;AAAA,MACA,GAAG,MAAM,QAAQ,IAAI,kBAAkB,MAAM,QAAQ,QAAQ,UAAU,MAAM,QAAQ,WAAW;AAAA,MAChG;AAAA,MACA,MAAM,qBAAqB;AAAA,IAC/B,EAAE,KAAK,IAAI;AAAA,EACf,CAAC;AAED,SAAO;AACX;AA52BA,IAWAC;AAXA;AAAA;AAAA;AAWA,IAAAA,yBAIO;AAaP,IAAAC;AACA;AAQA;AAAA;AAAA;;;ACYO,SAAS,gBAAgB,QAAoB,WAA6B;AAC7E,QAAM,cAAmB,eAAQ,SAAS;AAC1C,QAAM,aAAkB,YAAK,aAAa,WAAW;AACrD,QAAM,eAAyB,CAAC;AAGhC,EAAG,eAAU,aAAa,EAAE,WAAW,KAAK,CAAC;AAC7C,EAAG,eAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAG5C,QAAM,UAAU,oBAAI,IAAY;AAChC,aAAW,WAAW,OAAO,UAAU;AACnC,QAAI,QAAQ,QAAQ;AAChB,cAAQ,IAAI,QAAQ,MAAM;AAAA,IAC9B;AAAA,EACJ;AAGA,aAAW,UAAU,SAAS;AAC1B,UAAM,iBAAsB,YAAK,aAAaC,YAAW,QAAQ,WAAW;AAC5E,IAAG,eAAU,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAAA,EACpD;AAEA,aAAW,WAAW,OAAO,UAAU;AACnC,UAAM,WAAW,mBAAmB,SAAS,WAAW;AAGxD,IAAG,eAAe,eAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAGxD,UAAM,UAAU,qBAAqB,QAAQ,OAAO;AAGpD,IAAG,mBAAc,UAAU,SAAS,OAAO;AAC3C,iBAAa,KAAK,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACX;AAoBO,SAAS,mBAAmB,SAA2B,WAA2B;AACrF,QAAM,OAAO,QAAQ,QAAQ,IAAI;AAEjC,UAAQ,QAAQ,MAAM;AAAA,IAClB,KAAK;AACD,UAAI,QAAQ,QAAQ;AAChB,eAAY,YAAK,WAAWA,YAAW,QAAQ,QAAQ,aAAa,GAAG,IAAI,KAAK;AAAA,MACpF;AACA,aAAY,YAAK,WAAW,aAAa,GAAG,IAAI,KAAK;AAAA,IACzD,KAAK;AACD,aAAY,YAAK,WAAWA,YAAW,QAAQ,QAAS,UAAU;AAAA,IACtE,KAAK;AACD,aAAY,YAAK,WAAWA,YAAW,QAAQ,QAAS,iBAAiB;AAAA,IAC7E,KAAK;AACD,aAAY,YAAK,WAAW,UAAU;AAAA,IAC1C,KAAK;AACD,aAAY,YAAK,WAAW,iBAAiB;AAAA,IACjD,KAAK;AACD,aAAY,YAAK,WAAW,oBAAoB;AAAA,IACpD;AACI,aAAY,YAAK,WAAW,GAAG,IAAI,KAAK;AAAA,EAChD;AACJ;AAOO,SAAS,QAAQ,OAAuB;AAC3C,SAAO,MACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE,EACtB,QAAQ,UAAU,GAAG,KAAK;AACnC;AAKO,SAAS,qBAAqB,SAAyB;AAC1D,SAAO,QAAQ,QAAQ,SAAS,IAAI,EAAE,QAAQ,OAAO,IAAI;AAC7D;AArJA,IAgBAC,MACAC,QAQM,aAGAF;AA5BN;AAAA;AAAA;AAgBA,IAAAC,OAAoB;AACpB,IAAAC,SAAsB;AAQtB,IAAM,cAAc;AAGpB,IAAMF,aAAY;AAAA;AAAA;;;ACGX,SAAS,uBAA+B;AAC3C,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA8FX;AA8CO,SAAS,uBAA+B;AAC3C,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiGX;AA9QA;AAAA;AAAA;AAAA;AAAA;;;ACmBO,SAAS,YAAoB;AAChC,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8VT,qBAAqB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAaxB;AA/XA;AAAA;AAAA;AASA;AAAA;AAAA;;;ACaO,SAAS,UAAU,cAAuB,cAAoC;AACjF,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMmB,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoIxC,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAgCE,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2bnB,qBAAqB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAcxB;AA1oBA;AAAA;AAAA;AAUA;AAAA;AAAA;;;ACYO,SAAS,gBAAgB,SAA8B;AAC1D,QAAM,YAAiB,YAAK,SAAS,mBAAmB;AACxD,MAAI,CAAI,gBAAW,SAAS,GAAG;AAC3B,UAAM,IAAI,MAAM,kCAAkC,OAAO,EAAE;AAAA,EAC/D;AAEA,QAAM,UAAa,kBAAa,WAAW,OAAO;AAClD,SAAO,KAAK,MAAM,OAAO;AAC7B;AAoBO,SAAS,kBACZ,SACA,aACsB;AACtB,QAAM,OAA+B,CAAC;AAGtC,QAAM,gBAAgB,CAAC,YAAY,mBAAmB,oBAAoB;AAC1E,aAAW,QAAQ,eAAe;AAC9B,UAAM,WAAgB,YAAK,SAAS,IAAI;AACxC,QAAO,gBAAW,QAAQ,GAAG;AACzB,YAAM,MAAW,gBAAS,MAAM,KAAK;AACrC,WAAK,KAAK,GAAG,EAAE,IAAO,kBAAa,UAAU,OAAO;AAAA,IACxD;AAAA,EACJ;AAGA,QAAM,aAAkB,YAAK,SAAS,SAAS;AAC/C,MAAO,gBAAW,UAAU,KAAQ,cAAS,UAAU,EAAE,YAAY,GAAG;AACpE,UAAM,QAAW,iBAAY,UAAU,EAAE,OAAO,OAAK,EAAE,SAAS,KAAK,CAAC;AACtE,eAAW,QAAQ,OAAO;AACtB,YAAM,OAAY,gBAAS,MAAM,KAAK;AACtC,YAAM,WAAW,mBAAmB,MAAM,WAAW;AACrD,YAAM,MAAM,YAAY;AACxB,WAAK,GAAG,IAAO,kBAAkB,YAAK,YAAY,IAAI,GAAG,OAAO;AAAA,IACpE;AAAA,EACJ;AAGA,QAAM,WAAgB,YAAK,SAAS,OAAO;AAC3C,MAAO,gBAAW,QAAQ,KAAQ,cAAS,QAAQ,EAAE,YAAY,GAAG;AAChE,UAAM,WAAc,iBAAY,QAAQ,EAAE;AAAA,MAAO,OAC1C,cAAc,YAAK,UAAU,CAAC,CAAC,EAAE,YAAY;AAAA,IACpD;AAEA,eAAW,UAAU,UAAU;AAC3B,YAAM,UAAe,YAAK,UAAU,MAAM;AAG1C,iBAAW,QAAQ,CAAC,YAAY,iBAAiB,GAAG;AAChD,cAAM,WAAgB,YAAK,SAAS,IAAI;AACxC,YAAO,gBAAW,QAAQ,GAAG;AACzB,gBAAM,MAAW,gBAAS,MAAM,KAAK;AACrC,eAAK,UAAU,MAAM,IAAI,GAAG,EAAE,IAAO,kBAAa,UAAU,OAAO;AAAA,QACvE;AAAA,MACJ;AAGA,YAAM,iBAAsB,YAAK,SAAS,SAAS;AACnD,UAAO,gBAAW,cAAc,KAAQ,cAAS,cAAc,EAAE,YAAY,GAAG;AAC5E,cAAM,QAAW,iBAAY,cAAc,EAAE,OAAO,OAAK,EAAE,SAAS,KAAK,CAAC;AAC1E,mBAAW,QAAQ,OAAO;AACtB,gBAAM,OAAY,gBAAS,MAAM,KAAK;AACtC,gBAAM,WAAW,mBAAmB,MAAM,WAAW;AACrD,gBAAM,MAAM,YAAY;AACxB,eAAK,GAAG,IAAO,kBAAkB,YAAK,gBAAgB,IAAI,GAAG,OAAO;AAAA,QACxE;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ;AAEA,SAAO;AACX;AAmBO,SAAS,qBACZ,aACA,cACM;AAEN,QAAM,cAAc,gBAAgB,WAAW;AAC/C,QAAM,iBAAiB,gBAAgB,YAAY;AAEnD,SAAO;AAAA,uBAA+E,WAAW;AAAA,wBAA4B,cAAc;AAAA;AAC/I;AASO,SAAS,gBAAgB,OAAwB;AACpD,SAAO,KAAK,UAAU,OAAO,gBAAgB,CAAC;AAClD;AAKA,SAAS,eAAe,MAAc,OAAyB;AAC3D,MAAI,UAAU,QAAQ,OAAO,UAAU,YAAY,CAAC,MAAM,QAAQ,KAAK,GAAG;AACtE,UAAM,SAAkC,CAAC;AACzC,eAAW,KAAK,OAAO,KAAK,KAAgC,EAAE,KAAK,GAAG;AAClE,aAAO,CAAC,IAAK,MAAkC,CAAC;AAAA,IACpD;AACA,WAAO;AAAA,EACX;AACA,SAAO;AACX;AAUA,SAAS,mBAAmB,MAAc,aAAyC;AAC/E,QAAM,aAAa,KAAK,YAAY,EAAE,QAAQ,eAAe,GAAG,EAAE,QAAQ,YAAY,EAAE;AACxF,aAAW,OAAO,YAAY,SAAS;AACnC,UAAM,UAAU,IAAI,GAAG,YAAY,EAAE,QAAQ,eAAe,GAAG,EAAE,QAAQ,YAAY,EAAE;AACvF,QAAI,YAAY,YAAY;AACxB,aAAO,IAAI;AAAA,IACf;AAAA,EACJ;AACA,SAAO;AACX;AAKO,SAAS,WAAW,KAAqB;AAC5C,SAAO,IACF,QAAQ,MAAM,OAAO,EACrB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,QAAQ;AAC/B;AAnMA,IASAG,MACAC;AAVA;AAAA;AAAA;AASA,IAAAD,OAAoB;AACpB,IAAAC,SAAsB;AAAA;AAAA;;;AC4Cf,SAAS,gBAAgB,SAAiB,SAAoC;AACjF,QAAM,cAAmB,eAAQ,OAAO;AAGxC,QAAM,cAAc,gBAAgB,WAAW;AAG/C,QAAM,eAAe,kBAAkB,aAAa,WAAW;AAG/D,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS,YAAY,QAAQ;AACpD,QAAM,eAAe,CAAC,SAAS;AAG/B,QAAM,sBAAsB,qBAAqB,aAAa,YAAY;AAC1E,QAAM,mBAAwB,YAAK,aAAa,sBAAsB;AACtE,EAAG,mBAAc,kBAAkB,qBAAqB,OAAO;AAG/D,MAAI;AACJ,MAAI,SAAS,gBAAgB;AACzB,UAAM,eAAoB,eAAQ,QAAQ,cAAc;AACxD,QAAI,CAAI,gBAAW,YAAY,GAAG;AAC9B,YAAM,IAAI,MAAM,8BAA8B,YAAY,EAAE;AAAA,IAChE;AACA,kBAAiB,kBAAa,cAAc,OAAO;AAAA,EACvD,OAAO;AACH,kBAAc,qBAAqB,EAAE,OAAO,OAAO,aAAa,CAAC;AAAA,EACrE;AAEA,QAAM,WAAgB,YAAK,aAAa,mBAAmB;AAC3D,EAAG,mBAAc,UAAU,aAAa,OAAO;AAE/C,SAAO,CAAC,UAAU,gBAAgB;AACtC;AAkBO,SAAS,qBAAqB,SAAkC;AACnE,QAAM,EAAE,OAAO,OAAO,aAAa,IAAI;AAEvC,QAAM,aAAa,UAAU,SAAS,KAAK,UAAU,KAAK;AAC1D,QAAM,eAAe,UAAU,SACzB,oDACA;AAEN,SAAO;AAAA,kBACO,UAAU,gBAAgB,KAAK;AAAA;AAAA;AAAA;AAAA,MAI3C,YAAY;AAAA,aACL,WAAW,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc5B,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAMuB,WAAW,KAAK,CAAC;AAAA;AAAA;AAAA,EAGnD,eAAe;AAAA;AAAA,kBAEC,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBlB,UAAU,cAAc,KAAK,CAAC;AAAA;AAAA;AAAA;AAIhC;AA5KA,IAiBAC,MACAC,QAcM,eAGA,qBAGA;AAtCN;AAAA;AAAA;AAiBA,IAAAD,OAAoB;AACpB,IAAAC,SAAsB;AAEtB;AACA;AACA;AAGA;AAOA,IAAM,gBAA8B;AAGpC,IAAM,sBAAsB;AAG5B,IAAM,yBAAyB;AAAA;AAAA;;;ACC/B,eAAsB,iBAClB,SACA,WACA,YACA,aACA,gBACmB;AACnB,QAAM,YAAY,KAAK,IAAI;AAE3B,QAAM,SAAS,MAAM,mBAAmB;AAAA,IACpC;AAAA,IACA,OAAO,QAAQ;AAAA,IACf,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ,SAAS;AAAA,IACxB,aAAa,QAAQ,eAAe;AAAA,IACpC,WAAW,QAAQ,WAAW;AAAA,IAC9B,OAAO,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACJ,CAAC;AAED,SAAO;AAAA,IACH,UAAU,OAAO;AAAA,IACjB,UAAU,KAAK,IAAI,IAAI;AAAA,IACvB,iBAAiB,OAAO,gBAAgB,SAAS,IAAI,OAAO,kBAAkB;AAAA,EAClF;AACJ;AAlEA;AAAA;AAAA;AAWA;AAGA,IAAAC;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACuCA,eAAsB,iBAClB,UACA,OACA,UACA,SACA,aACA,cACA,qBAC4B;AAC5B,QAAM,YAAY,KAAK,IAAI;AAE3B,UAAQ,OAAO,MAAM,IAAI;AACzB,cAAY,6BAA6B;AAGzC,QAAM,eAAe,kBAAkB,SAAS,SAAS;AACzD,QAAM,iBAAiB,oBAAoB,SAAS,SAAS;AAC7D,QAAM,qBAAqB,wBAAwB,SAAS,SAAS;AACrE,QAAM,eAAe,kBAAkB,SAAS,SAAS;AACzD,QAAM,cAAc,qBAAqB,KAAK,IAAI,qBAAqB,GAAG,EAAE,IAAI;AAGhF,MAAI,UAAyB;AAC7B,MAAI;AACA,cAAU,MAAM,kBAAkB,QAAQ;AAAA,EAC9C,QAAQ;AAAA,EAER;AAGA,MAAI,qBAAqB;AACzB,MAAI,iBAAqC,CAAC;AAE1C,MAAI,CAAC,QAAQ,OAAO;AAChB,UAAM,YAAY,SACb,IAAI,OAAK,EAAE,QAAQ,EACnB,OAAO,QAAM,CAAC,CAAC,EAAE;AAYtB,QAAI,WAAW,wBAAwB,UAAa,oBAAoB,SAAS,KAAK,CAAC,QAAQ,UAAU;AACrG,YAAM,qBAAqB,UAAU;AAAA,QACjC,QAAM,CAAC,oBAAoB,SAAS,EAAE;AAAA,MAC1C;AACA,UAAI,mBAAmB,SAAS,GAAG;AAC/B,cAAM,YAAY,gBAAgB,oBAAoB,QAAQ,QAAQ,OAAO;AAC7E,YAAI,YAAY,KAAK,QAAQ,SAAS;AAClC,oBAAU,cAAc,SAAS,kDAAkD;AAAA,QACvF;AAAA,MACJ;AAAA,IACJ;AAGA,UAAM,EAAE,OAAO,QAAQ,IAAI,QAAQ,WAC7B,+BAA+B,WAAW,QAAQ,MAAM,IACxD,UACI,4BAA4B,WAAW,QAAQ,QAAQ,OAAO,IAC9D,EAAE,OAAO,CAAC,GAAyB,SAAS,CAAC,GAAG,SAAS,EAAE;AAErE,QAAI,MAAM,SAAS,GAAG;AAClB,uBAAiB;AAEjB,UAAI,QAAQ,WAAW,GAAG;AAEtB,qBAAa,OAAO,MAAM,MAAM,oCAAoC;AAAA,MACxE,OAAO;AACH,kBAAU,aAAa,MAAM,MAAM,qBAAqB,QAAQ,MAAM,YAAY;AAAA,MACtF;AAGA,2BAAqB,SAAS;AAAA,QAC1B,OAAK,QAAQ,SAAS,EAAE,QAAQ;AAAA,MACpC;AAAA,IACJ;AAAA,EACJ;AAGA,QAAM,qBAAqB,qBAAqB;AAAA,IAC5C,OAAO;AAAA,IACP,WAAW,iBAAiB,iBAAiB,MAAO;AAAA,EACxD,CAAC;AAGD,QAAM,iBAA4B,OAAO,QAAQ,SAAS;AACtD,UAAM,SAAS,MAAM,mBAAmB,QAAQ,IAAI;AACpD,kBAAc,SAAS,WAAW,OAAO,UAAU;AACnD,WAAO;AAAA,EACX;AAEA,QAAM,UAAU,IAAI,QAAQ;AAE5B,MAAI;AACA,QAAI,gBAAoC,CAAC;AAEzC,QAAI,mBAAmB,SAAS,GAAG;AAE/B,cAAQ,MAAM,2BAA2B,mBAAmB,MAAM,aAAa;AAE/E,YAAM,aAAa,MAAM;AAAA,QACrB;AAAA,UACI;AAAA,UACA,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA,SAAS,iBAAiB,iBAAiB,MAAO;AAAA,UAClD,OAAO;AAAA,QACX;AAAA,QACA;AAAA,QACA,CAAC,aAAa;AACV,cAAI,SAAS,UAAU,WAAW;AAC9B,oBAAQ;AAAA,cACJ,wBAAwB,SAAS,cAAc,IAAI,SAAS,UAAU;AAAA,YAC1E;AAAA,UACJ,WAAW,SAAS,UAAU,YAAY;AACtC,oBAAQ,OAAO,wCAAwC;AAAA,UAC3D;AAAA,QACJ;AAAA,QACA;AAAA;AAAA,QAEA,CAAC,MAAM,cAAc;AACjB,cAAI,CAAC,WAAW,CAAC,UAAU,SAAS;AAChC;AAAA,UACJ;AACA,cAAI;AACA,kBAAM,SAAS,UAAU;AACzB,kBAAM,WAAW,QAAQ,MAAM;AAC/B,kBAAM,UAAU,QAAQ,WAAW,QAAQ;AAC3C,gBAAI,YAAY,SAAS;AACrB,oBAAM,aAAa,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,QAAQ;AAC5D,oBAAM,UAA4B;AAAA,gBAC9B,MAAM;AAAA,gBACN,MAAM,kBAAkB,QAAQ;AAAA,gBAChC,OAAO,YAAY,QAAQ;AAAA,gBAC3B;AAAA,gBACA;AAAA,gBACA,QAAQ,YAAY;AAAA,cACxB;AACA,0BAAY,UAAU,SAAS,QAAQ,QAAQ,OAAO;AAAA,YAC1D;AAAA,UACJ,QAAQ;AAAA,UAER;AAAA,QACJ;AAAA,MACJ;AAGA,sBAAgB,WAAW;AAG3B,YAAM,yBAAyB,WAAW,mBAAmB,CAAC;AAC9D,UAAI,uBAAuB,SAAS,GAAG;AACnC,gBAAQ;AAAA,UACJ,uBAAuB,cAAc,MAAM,eAAe,uBAAuB,MAAM;AAAA,QAC3F;AAGA,YAAI,QAAQ,WAAW,OAAO;AAC1B;AAAA,YACI,gBAAgB,uBAAuB,MAAM,yCAC1C,uBAAuB,KAAK,IAAI,CAAC;AAAA,UACxC;AACA,iBAAO;AAAA,YACH,iBAAiB;AAAA,YACjB,UAAU,KAAK,IAAI,IAAI;AAAA,YACvB,UAAU,WAAW;AAAA,UACzB;AAAA,QACJ;AAAA,MACJ,OAAO;AACH,gBAAQ,QAAQ,aAAa,cAAc,MAAM,WAAW;AAAA,MAChE;AAAA,IACJ;AAIA,UAAM,cAAc,oBAAI,IAAI,CAAC,QAAQ,CAAC;AACtC,UAAM,sBAAsB,cAAc,OAAO,OAAK,YAAY,IAAI,EAAE,IAAI,CAAC;AAC7E,UAAM,iBAAiB,cAAc,OAAO,OAAK,CAAC,YAAY,IAAI,EAAE,IAAI,CAAC;AACzE,UAAM,oBAAoB,CAAC,GAAG,gBAAgB,GAAG,mBAAmB;AAUpE,UAAM,iBAAiB,wBAAwB,SACzC,oBAAoB,WAAW,IAC/B,mBAAmB,WAAW;AAEpC,QAAI,kBAAkB,eAAe,SAAS,GAAG;AAE7C,UAAI,uBAAkD;AACtD,UAAI,CAAC,QAAQ,OAAO;AAChB,+BAAuB,QAAQ,WACzB,wBAAwB,QAAQ,MAAM,IACrC,UAAU,wBAAwB,QAAQ,QAAQ,OAAO,IAAI;AAAA,MACxE;AAEA,UAAI,wBAAwB,qBAAqB,SAAS,GAAG;AAEzD,uBAAe,KAAK,GAAG,oBAAoB;AAC3C;AAAA,UACI,OAAO,eAAe,MAAM,sBAAsB,qBAAqB,MAAM;AAAA,QACjF;AAAA,MACJ,OAAO;AAEH,gBAAQ,MAAM,wCAAwC;AAEtD,cAAM,aAAa,MAAM;AAAA,UACrB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,iBAAiB,iBAAiB,MAAO;AAAA,QAC7C;AACA,uBAAe,KAAK,GAAG,UAAU;AAGjC,YAAI,WAAW,WAAW,SAAS,GAAG;AAClC,cAAI;AACA,+BAAmB,YAAY,QAAQ,QAAQ,OAAO;AAAA,UAC1D,QAAQ;AACJ,gBAAI,QAAQ,SAAS;AACjB,2BAAa,6CAA6C;AAAA,YAC9D;AAAA,UACJ;AAAA,QACJ;AAEA,gBAAQ,QAAQ,oCAAoC;AAAA,MACxD;AAAA,IACJ,WAAW,mBAAmB,WAAW,KAAK,eAAe,SAAS,KAAK,CAAC,gBAAgB;AAGxF,cAAQ,MAAM,mEAAmE;AAEjF,YAAM,aAAa,MAAM;AAAA,QACrB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,iBAAiB,iBAAiB,MAAO;AAAA,MAC7C;AACA,qBAAe,KAAK,GAAG,UAAU;AAGjC,UAAI,WAAW,WAAW,SAAS,GAAG;AAClC,YAAI;AACA,6BAAmB,YAAY,QAAQ,QAAQ,OAAO;AAAA,QAC1D,QAAQ;AACJ,cAAI,QAAQ,SAAS;AACjB,yBAAa,6CAA6C;AAAA,UAC9D;AAAA,QACJ;AAAA,MACJ;AAEA,cAAQ,QAAQ,sCAAsC;AAAA,IAC1D;AAGA,QAAI,eAAe,SAAS,KAAK,mBAAmB,SAAS,KAAK,SAAS;AACvE,UAAI;AACA,2BAAmB,gBAAgB,QAAQ,QAAQ,OAAO;AAAA,MAC9D,QAAQ;AACJ,YAAI,QAAQ,SAAS;AACjB,uBAAa,6CAA6C;AAAA,QAC9D;AAAA,MACJ;AAAA,IACJ;AAGA,UAAM,cAAc,CAAC,GAAG,mBAAmB,GAAG,cAAc;AAG5D,UAAM,YAAiB,eAAQ,QAAQ,MAAM;AAC7C,QAAI;AACA,YAAM,aAAa,EAAE,UAAU,aAAa,UAAU,KAAK,IAAI,IAAI,UAAU;AAC7E,YAAM,eAAe,gBAAgB,YAAY,SAAS;AAC1D,mBAAa,SAAS,aAAa,MAAM,aAAa,KAAK,SAAS,CAAC,EAAE;AAEvE,UAAI,QAAQ,SAAS;AACjB,mBAAW,KAAK,cAAc;AAC1B,oBAAU,KAAK,KAAU,gBAAS,WAAW,CAAC,CAAC,CAAC,EAAE;AAAA,QACtD;AAAA,MACJ;AAAA,IACJ,SAAS,YAAY;AACjB,iBAAW,0BAA0B,gBAAgB,UAAU,CAAC,EAAE;AAClE,aAAO;AAAA,QACH,iBAAiB;AAAA,QACjB,UAAU,KAAK,IAAI,IAAI;AAAA,QACvB,UAAU,WAAW;AAAA,MACzB;AAAA,IACJ;AAGA,QAAI;AACA,YAAM,gBAAgB,mBAAmB,QAAQ,QAAQ,QAAQ;AAAA,IACrE,QAAQ;AACJ,UAAI,QAAQ,SAAS;AACjB,qBAAa,sCAAsC;AAAA,MACvD;AAAA,IACJ;AAEA,WAAO;AAAA,MACH,iBAAiB,YAAY;AAAA,MAC7B,UAAU,KAAK,IAAI,IAAI;AAAA,IAC3B;AAAA,EACJ,SAAS,OAAO;AACZ,YAAQ,KAAK,2BAA2B;AACxC,eAAW,gBAAgB,KAAK,CAAC;AACjC,WAAO;AAAA,MACH,iBAAiB;AAAA,MACjB,UAAU,KAAK,IAAI,IAAI;AAAA,MACvB,UAAU,WAAW;AAAA,IACzB;AAAA,EACJ;AACJ;AASA,eAAsB,2BAClB,OACA,UACA,gBACA,OACA,WAC2B;AAC3B,MAAI,SAAS,WAAW,GAAG;AACvB,WAAO,CAAC;AAAA,EACZ;AAGA,QAAM,mBAAmB,SAAS,IAAI,OAAK;AACvC,UAAM,MAAM,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,EAAE,QAAQ;AACvD,WAAO;AAAA,MACH,IAAI,EAAE;AAAA,MACN,MAAM,KAAK,QAAQ,EAAE;AAAA,MACrB,UAAU,KAAK,YAAY;AAAA,MAC3B,WAAW,EAAE,YAAY,IAAI,UAAU,GAAG,GAAG;AAAA,IACjD;AAAA,EACJ,CAAC;AAED,QAAM,gBAAgB,KAAK,UAAU,kBAAkB,MAAM,CAAC;AAE9D,MAAI,SAAS,0BAA0B;AAEvC,WAAS,OACJ,QAAQ,oBAAoB,aAAa,EACzC,QAAQ,kBAAkB,OAAO,iBAAiB,MAAM,CAAC,EACzD,QAAQ,0BAA0B,OAAO,iBAAiB,MAAM,CAAC,EACjE,QAAQ,0BAA0B,GAAG;AAE1C,QAAM,mBAA2C;AAAA,IAC7C,aAAa,MAAM,QAAQ;AAAA,IAC3B,oBAAoB,MAAM,QAAQ,eAAe;AAAA,IACjD,aAAa,MAAM,QAAQ,eAAe;AAAA,IAC1C,UAAU,MAAM,QAAQ,YAAY;AAAA,EACxC;AAEA,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,gBAAgB,GAAG;AACzD,aAAS,OAAO,QAAQ,IAAI,OAAO,SAAS,GAAG,UAAU,GAAG,GAAG,KAAK;AAAA,EACxE;AAEA,QAAM,WAAW,MAAM,eAAe,QAAQ,EAAE,OAAO,UAAU,CAAC;AAClE,MAAI,CAAC,SAAS,WAAW,CAAC,SAAS,UAAU;AACzC,WAAO,yBAAyB,OAAO,QAAQ;AAAA,EACnD;AAEA,QAAM,cAAU,oCAAY,SAAS,QAAQ;AAC7C,MAAI,CAAC,SAAS;AACV,WAAO,yBAAyB,OAAO,QAAQ;AAAA,EACnD;AAEA,MAAI;AACA,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAM,WAA+B,CAAC;AAEtC,QAAI,OAAO,OAAO;AACd,eAAS,KAAK;AAAA,QACV,MAAM;AAAA,QACN,MAAM;AAAA,QACN,OAAO,GAAG,MAAM,QAAQ,IAAI;AAAA,QAC5B,SAAS,OAAO;AAAA,MACpB,CAAC;AAAA,IACL;AAEA,QAAI,OAAO,cAAc;AACrB,eAAS,KAAK;AAAA,QACV,MAAM;AAAA,QACN,MAAM;AAAA,QACN,OAAO;AAAA,QACP,SAAS,OAAO;AAAA,MACpB,CAAC;AAAA,IACL;AAEA,QAAI,OAAO,gBAAgB;AACvB,eAAS,KAAK;AAAA,QACV,MAAM;AAAA,QACN,MAAM;AAAA,QACN,OAAO;AAAA,QACP,SAAS,OAAO;AAAA,MACpB,CAAC;AAAA,IACL;AAEA,WAAO,SAAS,SAAS,IAAI,WAAW,yBAAyB,OAAO,QAAQ;AAAA,EACpF,QAAQ;AACJ,WAAO,yBAAyB,OAAO,QAAQ;AAAA,EACnD;AACJ;AA/dA,IAMAC,QAGAC;AATA;AAAA;AAAA;AAMA,IAAAD,SAAsB;AAGtB,IAAAC,yBAA4C;AAC5C;AACA;AAMA;AACA;AAEA;AAUA;AAUA;AACA;AAAA;AAAA;;;ACZO,SAAS,iBAAiB,SAAsD;AACnF,QAAM,YAAY,KAAK,IAAI;AAE3B,UAAQ,OAAO,MAAM,IAAI;AACzB,cAAY,6BAA6B;AAEzC,QAAM,UAAU,IAAI,QAAQ;AAC5B,UAAQ,MAAM,uBAAuB;AAErC,MAAI;AACA,UAAM,YAAiB,eAAQ,QAAQ,MAAM;AAC7C,UAAM,QAAQ,gBAAgB,WAAW;AAAA,MACrC,OAAO,QAAQ;AAAA,MACf,OAAO,QAAQ;AAAA,IACnB,CAAC;AAED,YAAQ,QAAQ,sBAAsB,MAAM,MAAM,SAAS;AAC3D,WAAO,EAAE,SAAS,MAAM,UAAU,KAAK,IAAI,IAAI,UAAU;AAAA,EAC7D,SAAS,OAAO;AACZ,YAAQ,KAAK,2BAA2B;AACxC,iBAAa,8BAA8B,gBAAgB,KAAK,CAAC,EAAE;AACnE,iBAAa,sDAAsD;AACnE,WAAO,EAAE,SAAS,OAAO,UAAU,KAAK,IAAI,IAAI,UAAU;AAAA,EAC9D;AACJ;AArDA,IAMAC;AANA;AAAA;AAAA;AAMA,IAAAA,SAAsB;AAEtB;AACA;AAKA;AAAA;AAAA;;;ACdA;AAAA;AAAA;AAMA;AAGA;AAGA;AAGA;AAGA;AAAA;AAAA;;;AClBA;AAAA;AAAA;AAAA;AAwDA,eAAsB,gBAClB,UACA,SACe;AACf,QAAM,YAAY,KAAK,IAAI;AAG3B,QAAM,mBAAwB,eAAQ,QAAQ;AAG9C,MAAI,CAAI,gBAAW,gBAAgB,GAAG;AAClC,eAAW,mCAAmC,gBAAgB,EAAE;AAChE,WAAO,WAAW;AAAA,EACtB;AAEA,MAAI,CAAI,cAAS,gBAAgB,EAAE,YAAY,GAAG;AAC9C,eAAW,uCAAuC,gBAAgB,EAAE;AACpE,WAAO,WAAW;AAAA,EACtB;AAGA,QAAM,aAAa,QAAQ,SAAS;AACpC,MAAI,aAAa,KAAK,aAAa,GAAG;AAClC,eAAW,0BAA0B,UAAU,0BAA0B;AACzE,WAAO,WAAW;AAAA,EACtB;AAGA,cAAY,kCAAkC;AAC9C,gBAAc,cAAc,gBAAgB;AAC5C,gBAAc,UAAe,eAAQ,QAAQ,MAAM,CAAC;AACpD,gBAAc,SAAS,QAAQ,KAAK;AACpC,MAAI,QAAQ,OAAO;AAAE,kBAAc,SAAS,QAAQ,KAAK;AAAA,EAAG;AAC5D,MAAI,QAAQ,OAAO;AAAE,kBAAc,SAAS,QAAQ,KAAK;AAAA,EAAG;AAC5D,MAAI,QAAQ,aAAa;AAAE,kBAAc,eAAe,OAAO,QAAQ,WAAW,CAAC;AAAA,EAAG;AACtF,MAAI,aAAa,GAAG;AAAE,kBAAc,kBAAkB,OAAO,UAAU,CAAC;AAAA,EAAG;AAC3E,MAAI,QAAQ,OAAO;AAAE,kBAAc,SAAS,2BAA2B;AAAA,EAAG;AAC1E,MAAI,QAAQ,UAAU;AAAE,kBAAc,aAAa,yBAAyB;AAAA,EAAG;AAC/E,MAAI,QAAQ,WAAW,OAAO;AAAE,kBAAc,UAAU,+BAA+B;AAAA,EAAG;AAC1F,MAAI,QAAQ,QAAQ;AAAE,kBAAc,UAAU,QAAQ,MAAM;AAAA,EAAG;AAG/D,MAAI,QAAQ,QAAQ;AAChB,UAAM,aAA0E;AAAA,MAC5E,EAAE,KAAK,aAAa,OAAO,sBAAsB;AAAA,MACjD,EAAE,KAAK,iBAAiB,OAAO,0BAA0B;AAAA,MACzD,EAAE,KAAK,YAAY,OAAO,qBAAqB;AAAA,MAC/C,EAAE,KAAK,WAAW,OAAO,oBAAoB;AAAA,IACjD;AACA,eAAW,EAAE,KAAK,MAAM,KAAK,YAAY;AACrC,YAAM,cAAc,QAAQ,OAAO,GAAG;AACtC,UAAI,aAAa;AACb,cAAM,QAAkB,CAAC;AACzB,YAAI,YAAY,OAAO;AAAE,gBAAM,KAAK,SAAS,YAAY,KAAK,EAAE;AAAA,QAAG;AACnE,YAAI,YAAY,SAAS;AAAE,gBAAM,KAAK,WAAW,YAAY,OAAO,GAAG;AAAA,QAAG;AAC1E,YAAI,YAAY,aAAa;AAAE,gBAAM,KAAK,eAAe,YAAY,WAAW,EAAE;AAAA,QAAG;AACrF,YAAI,YAAY,OAAO;AAAE,gBAAM,KAAK,SAAS,YAAY,KAAK,EAAE;AAAA,QAAG;AACnE,YAAI,YAAY,QAAQ;AAAE,gBAAM,KAAK,QAAQ;AAAA,QAAG;AAChD,YAAI,MAAM,SAAS,GAAG;AAClB,wBAAc,OAAO,MAAM,KAAK,IAAI,CAAC;AAAA,QACzC;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ;AAEA,UAAQ,OAAO,MAAM,IAAI;AAGzB,QAAM,eAAe,MAAM,oBAAoB;AAC/C,MAAI,CAAC,aAAa,WAAW;AACzB,eAAW,iCAAiC,aAAa,UAAU,gBAAgB,EAAE;AACrF,cAAU,qBAAqB;AAC/B,cAAU,uCAAuC;AACjD,cAAU,uCAAuC;AACjD,cAAU,oCAAoC;AAC9C,WAAO,WAAW;AAAA,EACtB;AAGA,MAAI,YAAY;AAChB,QAAM,cAAc,MAAM;AAC1B,QAAM,gBAAgB,MAAM;AACxB,QAAI,WAAW;AACX,cAAQ,KAAK,WAAW,SAAS;AAAA,IACrC;AACA,gBAAY;AACZ,iBAAa,+DAA0D;AAAA,EAC3E;AACA,UAAQ,GAAG,UAAU,aAAa;AAElC,MAAI;AAEA,UAAM,eAAe,IAAI,aAAa;AAKtC,QAAI;AACJ,QAAI,iBAAiB;AAErB,QAAI,cAAc,GAAG;AACjB,YAAM,eAAe,MAAM,UAAU,kBAAkB,SAAS,WAAW;AAC3E,UAAI,aAAa,aAAa,QAAW;AACrC,eAAO,aAAa;AAAA,MACxB;AACA,cAAQ,aAAa;AACrB,uBAAiB,aAAa;AAC9B,UAAI,aAAa,YAAY;AACzB,qBAAa,SAAS,aAAa,aAAa,UAAU;AAAA,MAC9D;AAAA,IACJ,OAAO;AAEH,YAAM,SAAS,QAAQ,WACjB,kBAAkB,QAAQ,MAAM,IAChC,MAAM,eAAe,kBAAkB,QAAQ,MAAM;AAC3D,UAAI,CAAC,QAAQ;AACT,mBAAW,yEAAyE;AACpF,eAAO,WAAW;AAAA,MACtB;AACA,cAAQ,OAAO;AACf,mBAAa,+BAA+B,MAAM,QAAQ,MAAM,WAAW;AAC3E,mBAAa,WAAW,WAAW;AAAA,IACvC;AAEA,QAAI,YAAY,GAAG;AACf,aAAO,WAAW;AAAA,IACtB;AAKA,QAAI,iBAAiB;AAErB,QAAI,CAAC,QAAQ,aAAa,MAAM,QAAQ,SAAS,KAAK,cAAc,GAAG;AACnE,YAAM,eAAe,MAAM,uBAAuB,kBAAkB,OAAO,SAAS,YAAY;AAChG,cAAQ,aAAa;AACrB,uBAAiB,aAAa;AAAA,IAClC;AAEA,QAAI,YAAY,GAAG;AACf,aAAO,WAAW;AAAA,IACtB;AAKA,QAAI;AACJ,QAAI,iBAAiB;AAErB,QAAI;AAEJ,QAAI,cAAc,GAAG;AACjB,YAAM,eAAe,MAAM;AAAA,QACvB;AAAA,QAAkB;AAAA,QAAO;AAAA,QAAS;AAAA,QAAa;AAAA,MACnD;AACA,UAAI,aAAa,aAAa,QAAW;AACrC,eAAO,aAAa;AAAA,MACxB;AACA,iBAAW,aAAa;AACxB,uBAAiB,aAAa;AAC9B,4BAAsB,aAAa;AAAA,IACvC,OAAO;AAEH,YAAM,SAAS,kBAAkB,QAAQ,MAAM;AAC/C,UAAI,CAAC,UAAU,OAAO,WAAW,GAAG;AAChC,mBAAW,0EAA0E;AACrF,eAAO,WAAW;AAAA,MACtB;AACA,iBAAW;AACX,mBAAa,UAAU,SAAS,MAAM,yBAAyB;AAC/D,mBAAa,WAAW,UAAU;AAAA,IACtC;AAEA,QAAI,YAAY,GAAG;AACf,aAAO,WAAW;AAAA,IACtB;AAKA,UAAM,eAAe,MAAM;AAAA,MACvB;AAAA,MAAkB;AAAA,MAAO;AAAA,MAAU;AAAA,MAAS;AAAA,MAAa;AAAA,MAAc;AAAA,IAC3E;AACA,QAAI,aAAa,aAAa,QAAW;AACrC,aAAO,aAAa;AAAA,IACxB;AAKA,QAAI,mBAAmB;AACvB,QAAI,iBAAiB;AAErB,QAAI,CAAC,QAAQ,aAAa;AACtB,YAAM,eAAe,iBAAiB,OAAO;AAC7C,yBAAmB,aAAa;AAChC,uBAAiB,aAAa;AAAA,IAClC;AAKA,UAAM,gBAAgB,KAAK,IAAI,IAAI;AACnC,YAAQ,OAAO,MAAM,IAAI;AACzB,gBAAY,oBAAoB;AAChC,kBAAc,sBAAsB,OAAO,MAAM,QAAQ,MAAM,CAAC;AAChE,QAAI,MAAM,SAAS,MAAM,MAAM,SAAS,GAAG;AACvC,oBAAc,SAAS,OAAO,MAAM,MAAM,MAAM,CAAC;AACjD,oBAAc,UAAU,wBAAwB;AAAA,IACpD;AACA,kBAAc,oBAAoB,OAAO,SAAS,MAAM,CAAC;AACzD,kBAAc,oBAAoB,OAAO,aAAa,eAAe,CAAC;AACtE,QAAI,kBAAkB;AAClB,oBAAc,WAAW,WAAW;AAAA,IACxC;AACA,QAAI,iBAAiB,GAAG;AAAE,oBAAc,oBAAoBC,gBAAe,cAAc,CAAC;AAAA,IAAG;AAC7F,QAAI,iBAAiB,GAAG;AAAE,oBAAc,oBAAoBA,gBAAe,cAAc,CAAC;AAAA,IAAG;AAC7F,QAAI,iBAAiB,GAAG;AAAE,oBAAc,oBAAoBA,gBAAe,cAAc,CAAC;AAAA,IAAG;AAC7F,kBAAc,oBAAoBA,gBAAe,aAAa,QAAQ,CAAC;AACvE,QAAI,iBAAiB,GAAG;AAAE,oBAAc,oBAAoBA,gBAAe,cAAc,CAAC;AAAA,IAAG;AAC7F,kBAAc,kBAAkBA,gBAAe,aAAa,CAAC;AAG7D,QAAI,aAAa,SAAS,GAAG;AACzB,cAAQ,OAAO,MAAM,IAAI;AACzB,6BAAuB,YAAY;AAGnC,UAAI;AACA,cAAM,WAAgB,YAAU,eAAQ,QAAQ,MAAM,GAAG,aAAa;AACtE,QAAG,eAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAC1C,cAAM,SAAS,aAAa,SAAS,QAAQ,KAAK;AAClD,QAAG;AAAA,UACM,YAAK,UAAU,mBAAmB;AAAA,UACvC,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,UAC9B;AAAA,QACJ;AAAA,MACJ,QAAQ;AAAA,MAER;AAAA,IACJ;AAEA,YAAQ,OAAO,MAAM,IAAI;AACzB,iBAAa,qBAAqB,KAAU,eAAQ,QAAQ,MAAM,CAAC,CAAC,EAAE;AACtE,QAAI,kBAAkB;AAClB,mBAAa,YAAY,KAAU,YAAU,eAAQ,QAAQ,MAAM,GAAG,YAAY,CAAC,CAAC,EAAE;AAAA,IAC1F;AAEA,WAAO,WAAW;AAAA,EAEtB,UAAE;AACE,YAAQ,eAAe,UAAU,aAAa;AAAA,EAClD;AACJ;AASA,SAASA,gBAAe,IAAoB;AACxC,MAAI,KAAK,KAAM;AACX,WAAO,GAAG,EAAE;AAAA,EAChB;AACA,QAAM,UAAU,KAAK,MAAM,KAAK,GAAI;AACpC,MAAI,UAAU,IAAI;AACd,WAAO,GAAG,OAAO;AAAA,EACrB;AACA,QAAM,UAAU,KAAK,MAAM,UAAU,EAAE;AACvC,QAAM,mBAAmB,UAAU;AACnC,SAAO,GAAG,OAAO,KAAK,gBAAgB;AAC1C;AAKA,SAAS,uBAAuB,SAA6B;AACzD,QAAM,MAAM,aAAa;AAEzB,QAAM,SAAwD;AAAA,IAC1D,EAAE,OAAO,uBAAuB,OAAO,YAAY;AAAA,IACnD,EAAE,OAAO,2BAA2B,OAAO,gBAAgB;AAAA,IAC3D,EAAE,OAAO,sBAAsB,OAAO,WAAW;AAAA,IACjD,EAAE,OAAO,qBAAqB,OAAO,UAAU;AAAA,EACnD;AAEA,YAAU,uRAA2D;AAErE,aAAW,EAAE,OAAO,MAAM,KAAK,QAAQ;AACnC,UAAM,IAAI,QAAQ,cAAc,KAAK;AACrC,QAAI,EAAE,UAAU,EAAE,UAAU,GAAG;AAC3B,oBAAc,OAAO,KAAK,QAAQ,CAAC;AAAA,IACvC,WAAW,EAAE,QAAQ,GAAG;AACpB;AAAA,QACI;AAAA,QACA,GAAG,IAAI,EAAE,WAAW,CAAC,SAAS,IAAI,EAAE,YAAY,CAAC,UAAU,IAAI,EAAE,WAAW,CAAC;AAAA,MACjF;AAAA,IACJ;AAAA,EACJ;AAEA,QAAM,QAAQ,QAAQ,SAAS;AAC/B,YAAU,wVAA2D;AACrE;AAAA,IACI;AAAA,IACA,GAAG,IAAI,MAAM,WAAW,CAAC,SAAS,IAAI,MAAM,YAAY,CAAC,UAAU,IAAI,MAAM,WAAW,CAAC;AAAA,EAC7F;AACA,MAAI,MAAM,QAAQ,MAAM;AACpB,kBAAc,cAAc,aAAa,WAAW,MAAM,IAAI,CAAC;AAAA,EACnE;AACA,gBAAc,YAAY,OAAO,MAAM,KAAK,CAAC;AACjD;AAhXA,IAeAC,QACAC;AAhBA;AAAA;AAAA;AAeA,IAAAD,SAAsB;AACtB,IAAAC,OAAoB;AAEpB;AACA;AAEA;AAKA;AAUA;AACA;AAAA;AAAA;;;ACrCA,IASAC,MACAC,QA8Ca;AAxDb;AAAA;AAAA;AASA,IAAAD,OAAoB;AACpB,IAAAC,SAAsB;AA8Cf,IAAM,WAAN,MAAe;AAAA,MAMlB,YAAY,SAAiB;AAJ7B,aAAQ,SAA6B;AACrC,aAAQ,YAAoC,CAAC;AAC7C,aAAQ,YAAyC,oBAAI,IAAI;AAGrD,aAAK,UAAe,eAAQ,OAAO;AAAA,MACvC;AAAA;AAAA;AAAA;AAAA,MAKA,OAAa;AACT,aAAK,SAAS,KAAK,gBAAgB;AACnC,aAAK,YAAY,KAAK,kBAAkB;AACxC,aAAK,YAAY,KAAK,aAAa;AAAA,MACvC;AAAA;AAAA;AAAA;AAAA,MAKA,SAAe;AACX,aAAK,KAAK;AAAA,MACd;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,QAAqB;AACrB,YAAI,CAAC,KAAK,QAAQ;AACd,gBAAM,IAAI,MAAM,0CAA0C;AAAA,QAC9D;AACA,eAAO,KAAK;AAAA,MAChB;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,MAAc;AACd,eAAO,KAAK;AAAA,MAChB;AAAA;AAAA;AAAA;AAAA,MAKA,qBAAsC;AAClC,eAAO,KAAK,MAAM,QAAQ,IAAI,UAAQ;AAAA,UAClC,IAAI,IAAI;AAAA,UACR,MAAM,IAAI;AAAA,UACV,UAAU,IAAI;AAAA,UACd,YAAY,IAAI;AAAA,UAChB,MAAM,IAAI;AAAA,UACV,SAAS,IAAI;AAAA,QACjB,EAAE;AAAA,MACN;AAAA;AAAA;AAAA;AAAA,MAKA,gBAAgB,UAAuC;AACnD,cAAM,MAAM,KAAK,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,QAAQ;AAC1D,YAAI,CAAC,KAAK;AACN,iBAAO;AAAA,QACX;AAEA,eAAO;AAAA,UACH,QAAQ;AAAA,UACR,UAAU,KAAK,UAAU,QAAQ,KAAK;AAAA,UACtC,UAAU,KAAK,UAAU,IAAI,QAAQ;AAAA,QACzC;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,eAAe,KAAiC;AAC5C,cAAM,SAAiC;AAAA,UACnC,SAAS;AAAA,UACT,gBAAgB;AAAA,UAChB,mBAAmB;AAAA,QACvB;AAEA,cAAM,cAAc,KAAK,GAAG;AAC5B,cAAM,WAAW,KAAK,UAAU,WAAW;AAC3C,YAAI,CAAC,UAAU;AACX,iBAAO;AAAA,QACX;AAEA,eAAO;AAAA,UACH;AAAA,UACA,OAAO,OAAO,GAAG,KAAK;AAAA,UACtB;AAAA,QACJ;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,kBAA0C;AACtC,eAAO,EAAE,GAAG,KAAK,UAAU;AAAA,MAC/B;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,WAAoB;AACpB,eAAO,KAAK,WAAW;AAAA,MAC3B;AAAA;AAAA;AAAA;AAAA,MAMQ,kBAA+B;AACnC,cAAM,YAAiB,YAAK,KAAK,SAAS,mBAAmB;AAC7D,YAAI,CAAI,gBAAW,SAAS,GAAG;AAC3B,gBAAM,IAAI,MAAM,kCAAkC,KAAK,OAAO,EAAE;AAAA,QACpE;AACA,cAAM,UAAa,kBAAa,WAAW,OAAO;AAClD,eAAO,KAAK,MAAM,OAAO;AAAA,MAC7B;AAAA,MAEQ,oBAA4C;AAChD,cAAM,OAA+B,CAAC;AAGtC,cAAM,gBAAgB,CAAC,YAAY,mBAAmB,oBAAoB;AAC1E,mBAAW,QAAQ,eAAe;AAC9B,gBAAM,WAAgB,YAAK,KAAK,SAAS,IAAI;AAC7C,cAAO,gBAAW,QAAQ,GAAG;AACzB,kBAAM,MAAW,gBAAS,MAAM,KAAK;AACrC,iBAAK,KAAK,GAAG,EAAE,IAAO,kBAAa,UAAU,OAAO;AAAA,UACxD;AAAA,QACJ;AAGA,cAAM,aAAkB,YAAK,KAAK,SAAS,SAAS;AACpD,YAAO,gBAAW,UAAU,KAAQ,cAAS,UAAU,EAAE,YAAY,GAAG;AACpE,gBAAM,QAAW,iBAAY,UAAU,EAAE,OAAO,OAAK,EAAE,SAAS,KAAK,CAAC;AACtE,qBAAW,QAAQ,OAAO;AACtB,kBAAM,OAAY,gBAAS,MAAM,KAAK;AACtC,kBAAM,WAAW,KAAK,mBAAmB,IAAI;AAC7C,iBAAK,YAAY,IAAI,IAAO,kBAAkB,YAAK,YAAY,IAAI,GAAG,OAAO;AAAA,UACjF;AAAA,QACJ;AAGA,cAAM,WAAgB,YAAK,KAAK,SAAS,OAAO;AAChD,YAAO,gBAAW,QAAQ,KAAQ,cAAS,QAAQ,EAAE,YAAY,GAAG;AAChE,gBAAM,WAAc,iBAAY,QAAQ,EAAE;AAAA,YAAO,OAC1C,cAAc,YAAK,UAAU,CAAC,CAAC,EAAE,YAAY;AAAA,UACpD;AAEA,qBAAW,UAAU,UAAU;AAC3B,kBAAM,UAAe,YAAK,UAAU,MAAM;AAG1C,uBAAW,QAAQ,CAAC,YAAY,iBAAiB,GAAG;AAChD,oBAAM,WAAgB,YAAK,SAAS,IAAI;AACxC,kBAAO,gBAAW,QAAQ,GAAG;AACzB,sBAAM,MAAW,gBAAS,MAAM,KAAK;AACrC,qBAAK,UAAU,MAAM,IAAI,GAAG,EAAE,IAAO,kBAAa,UAAU,OAAO;AAAA,cACvE;AAAA,YACJ;AAGA,kBAAM,iBAAsB,YAAK,SAAS,SAAS;AACnD,gBAAO,gBAAW,cAAc,KAAQ,cAAS,cAAc,EAAE,YAAY,GAAG;AAC5E,oBAAM,QAAW,iBAAY,cAAc,EAAE,OAAO,OAAK,EAAE,SAAS,KAAK,CAAC;AAC1E,yBAAW,QAAQ,OAAO;AACtB,sBAAM,OAAY,gBAAS,MAAM,KAAK;AACtC,sBAAM,WAAW,KAAK,mBAAmB,IAAI;AAC7C,qBAAK,YAAY,IAAI,IAAO,kBAAkB,YAAK,gBAAgB,IAAI,GAAG,OAAO;AAAA,cACrF;AAAA,YACJ;AAAA,UACJ;AAAA,QACJ;AAEA,eAAO;AAAA,MACX;AAAA,MAEQ,eAA4C;AAChD,cAAM,WAAW,oBAAI,IAA4B;AAGjD,cAAM,WAAgB,YAAK,KAAK,SAAS,eAAe,UAAU;AAClE,YAAI,CAAI,gBAAW,QAAQ,KAAK,CAAI,cAAS,QAAQ,EAAE,YAAY,GAAG;AAClE,iBAAO;AAAA,QACX;AAEA,cAAM,QAAW,iBAAY,QAAQ,EAAE,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC;AACtE,mBAAW,QAAQ,OAAO;AACtB,cAAI;AACA,kBAAM,UAAa,kBAAkB,YAAK,UAAU,IAAI,GAAG,OAAO;AAClE,kBAAM,SAAS,KAAK,MAAM,OAAO;AAEjC,kBAAM,WAA2B,OAAO,YAAY;AACpD,gBAAI,SAAS,UAAU;AACnB,uBAAS,IAAI,SAAS,UAAU,QAAQ;AAAA,YAC5C;AAAA,UACJ,QAAQ;AAAA,UAER;AAAA,QACJ;AAEA,eAAO;AAAA,MACX;AAAA,MAEQ,mBAAmB,MAA6B;AACpD,YAAI,CAAC,KAAK,QAAQ;AAAE,iBAAO;AAAA,QAAM;AACjC,cAAM,aAAa,KAAK,YAAY,EAAE,QAAQ,eAAe,GAAG,EAAE,QAAQ,YAAY,EAAE;AACxF,mBAAW,OAAO,KAAK,OAAO,SAAS;AACnC,gBAAM,UAAU,IAAI,GAAG,YAAY,EAAE,QAAQ,eAAe,GAAG,EAAE,QAAQ,YAAY,EAAE;AACvF,cAAI,YAAY,YAAY;AACxB,mBAAO,IAAI;AAAA,UACf;AAAA,QACJ;AACA,eAAO;AAAA,MACX;AAAA,IACJ;AAAA;AAAA;;;AC1MA,eAAsB,iBAClB,KACA,KACA,SACa;AAEb,QAAM,OAAO,MAAM,SAAS,GAAG;AAC/B,MAAI;AACJ,MAAI;AACA,aAAS,KAAK,MAAM,IAAI;AAAA,EAC5B,QAAQ;AACJ,QAAI,UAAU,KAAK,EAAE,gBAAgB,mBAAmB,CAAC;AACzD,QAAI,IAAI,KAAK,UAAU,EAAE,OAAO,oBAAoB,CAAC,CAAC;AACtD;AAAA,EACJ;AAEA,MAAI,CAAC,OAAO,YAAY,OAAO,OAAO,aAAa,UAAU;AACzD,QAAI,UAAU,KAAK,EAAE,gBAAgB,mBAAmB,CAAC;AACzD,QAAI,IAAI,KAAK,UAAU,EAAE,OAAO,sCAAsC,CAAC,CAAC;AACxE;AAAA,EACJ;AAGA,MAAI,UAAU,KAAK;AAAA,IACf,gBAAgB;AAAA,IAChB,iBAAiB;AAAA,IACjB,cAAc;AAAA,IACd,+BAA+B;AAAA,EACnC,CAAC;AAED,MAAI;AAEA,UAAM,UAAU,QAAQ,eAAe,SAAS,OAAO,QAAQ;AAG/D,YAAQ,KAAK;AAAA,MACT,MAAM;AAAA,MACN,WAAW,QAAQ;AAAA,IACvB,CAAC;AAGD,UAAM,iBAAiB,QAAQ;AAC/B,QAAI,YAAY,OAAO;AACvB,QAAI,gBAAgB;AAEpB,QAAI,gBAAgB;AAChB,UAAI,WAAW;AAEX,cAAM,WAAW,eAAe,IAAI,SAAS;AAC7C,YAAI,UAAU;AACV,0BAAgB;AAAA,QACpB,OAAO;AAEH,sBAAY;AAAA,QAChB;AAAA,MACJ;AAEA,UAAI,CAAC,WAAW;AAEZ,cAAM,aAAa,eAAe,OAAO;AACzC,YAAI,YAAY;AACZ,sBAAY,WAAW;AACvB,0BAAgB;AAAA,QACpB;AAAA,MAEJ;AAAA,IACJ;AAEA,QAAI;AAEJ,QAAI,iBAAiB,kBAAkB,WAAW;AAE9C,YAAM,SAAS;AAAA,QACX,OAAO;AAAA,QACP,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR;AAAA;AAAA,MACJ;AAEA,YAAM,SAAS,MAAM,eAAe,KAAK,WAAW,QAAQ;AAAA,QACxD,OAAO,QAAQ;AAAA,QACf,kBAAkB,QAAQ;AAAA,QAC1B,kBAAkB,CAAC,UAAU;AACzB,kBAAQ,KAAK,EAAE,MAAM,SAAS,SAAS,MAAM,CAAC;AAAA,QAClD;AAAA,MACJ,CAAC;AAED,qBAAe,OAAO;AAAA,IAC1B,OAAO;AAEH,YAAM,SAAS;AAAA,QACX,OAAO;AAAA,QACP,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,OAAO;AAAA,MACX;AAEA,qBAAe,MAAM,QAAQ,YAAY,QAAQ;AAAA,QAC7C,OAAO,QAAQ;AAAA,QACf,kBAAkB,QAAQ;AAAA,QAC1B,kBAAkB,CAAC,UAAU;AACzB,kBAAQ,KAAK,EAAE,MAAM,SAAS,SAAS,MAAM,CAAC;AAAA,QAClD;AAAA,MACJ,CAAC;AAAA,IACL;AAGA,YAAQ,KAAK;AAAA,MACT,MAAM;AAAA,MACN;AAAA,MACA,GAAI,YAAY,EAAE,UAAU,IAAI,CAAC;AAAA,IACrC,CAAC;AAAA,EAEL,SAAS,KAAK;AACV,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU;AACrD,YAAQ,KAAK,EAAE,MAAM,SAAS,QAAQ,CAAC;AAAA,EAC3C;AAEA,MAAI,IAAI;AACZ;AASO,SAAS,eACZ,UACA,aACA,cACA,qBACM;AACN,QAAM,QAAkB,CAAC;AAEzB,QAAM,KAAK,gEAAgE;AAC3E,QAAM,KAAK,iGAAkG;AAC7G,QAAM,KAAK,oFAAqF;AAChG,QAAM,KAAK,6FAA6F;AACxG,QAAM,KAAK,EAAE;AAGb,QAAM,KAAK,0BAA0B;AACrC,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,YAAY;AACvB,QAAM,KAAK,EAAE;AAGb,MAAI,aAAa;AACb,UAAM,KAAK,kCAAkC;AAC7C,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,WAAW;AACtB,UAAM,KAAK,EAAE;AAAA,EACjB;AAGA,MAAI,uBAAuB,oBAAoB,SAAS,GAAG;AACvD,UAAM,KAAK,yBAAyB;AACpC,UAAM,KAAK,EAAE;AACb,eAAW,OAAO,qBAAqB;AACnC,YAAM,OAAO,IAAI,SAAS,SAAS,SAAS;AAC5C,YAAM,KAAK,KAAK,IAAI,OAAO,IAAI,OAAO,EAAE;AACxC,YAAM,KAAK,EAAE;AAAA,IACjB;AAAA,EACJ;AAGA,QAAM,KAAK,qBAAqB;AAChC,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,QAAQ;AAEnB,SAAO,MAAM,KAAK,IAAI;AAC1B;AASO,SAAS,QAAQ,KAAqB,MAAqC;AAC9E,MAAI,MAAM,SAAS,KAAK,UAAU,IAAI,CAAC;AAAA;AAAA,CAAM;AACjD;AAkBA,SAAS,SAAS,KAAuC;AACrD,SAAO,IAAI,QAAQ,CAACC,WAAS,WAAW;AACpC,QAAI,OAAO;AACX,QAAI,GAAG,QAAQ,CAAC,UAAkB;AAC9B,cAAQ,MAAM,SAAS;AAAA,IAC3B,CAAC;AACD,QAAI,GAAG,OAAO,MAAMA,UAAQ,IAAI,CAAC;AACjC,QAAI,GAAG,SAAS,MAAM;AAAA,EAC1B,CAAC;AACL;AA7RA;AAAA;AAAA;AAAA;AAAA;;;AC4CA,eAAsB,qBAClB,KACA,KACA,UACA,SACa;AAEb,QAAM,OAAO,MAAMC,UAAS,GAAG;AAC/B,MAAI,aAA6B,CAAC;AAClC,MAAI,KAAK,KAAK,GAAG;AACb,QAAI;AACA,mBAAa,KAAK,MAAM,IAAI;AAAA,IAChC,QAAQ;AACJ,UAAI,UAAU,KAAK,EAAE,gBAAgB,mBAAmB,CAAC;AACzD,UAAI,IAAI,KAAK,UAAU,EAAE,OAAO,oBAAoB,CAAC,CAAC;AACtD;AAAA,IACJ;AAAA,EACJ;AAGA,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,MAAM,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,QAAQ;AACrD,MAAI,CAAC,KAAK;AACN,QAAI,UAAU,KAAK,EAAE,gBAAgB,mBAAmB,CAAC;AACzD,QAAI,IAAI,KAAK,UAAU,EAAE,OAAO,qBAAqB,QAAQ,GAAG,CAAC,CAAC;AAClE;AAAA,EACJ;AAGA,MAAI,UAAU,KAAK;AAAA,IACf,gBAAgB;AAAA,IAChB,iBAAiB;AAAA,IACjB,cAAc;AAAA,IACd,+BAA+B;AAAA,EACnC,CAAC;AAED,MAAI;AAEA,YAAQ,KAAK,EAAE,MAAM,UAAU,SAAS,aAAa,IAAI,IAAI,aAAa,CAAC;AAG3E,UAAM,SAAS,QAAQ,SAAS,gBAAgB,QAAQ;AACxD,UAAM,mBAAmB,QAAQ,YAAY;AAG7C,UAAM,SAAS,mBAAmB,KAAK,kBAAkB,OAAO,UAAU;AAG1E,UAAM,eAAe,MAAM,QAAQ,YAAY,QAAQ;AAAA,MACnD,OAAO,QAAQ;AAAA,MACf,kBAAkB,QAAQ;AAAA,MAC1B,kBAAkB,CAAC,UAAU;AACzB,gBAAQ,KAAK,EAAE,MAAM,SAAS,MAAM,MAAM,CAAC;AAAA,MAC/C;AAAA,IACJ,CAAC;AAGD,YAAQ,KAAK,EAAE,MAAM,QAAQ,aAAa,CAAC;AAAA,EAE/C,SAAS,KAAK;AACV,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU;AACrD,YAAQ,KAAK,EAAE,MAAM,SAAS,QAAQ,CAAC;AAAA,EAC3C;AAEA,MAAI,IAAI;AACZ;AASO,SAAS,mBACZ,KACA,kBACA,OACA,SACM;AACN,QAAM,QAAkB,CAAC;AAEzB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,UAAU;AAEzB,QAAM,KAAK,wBAAwB,SAAS,SAAS,SAAS,qBAAqB,IAAI,IAAI,WAAW;AACtG,QAAM,KAAK,gEAAgE;AAC3E,QAAM,KAAK,+DAA+D;AAC1E,QAAM,KAAK,EAAE;AAGb,QAAM,KAAK,uBAAuB;AAClC,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,eAAe,IAAI,IAAI,EAAE;AACpC,QAAM,KAAK,aAAa,IAAI,EAAE,EAAE;AAChC,QAAM,KAAK,mBAAmB,IAAI,QAAQ,EAAE;AAC5C,QAAM,KAAK,eAAe,IAAI,IAAI,EAAE;AACpC,QAAM,KAAK,kBAAkB,IAAI,OAAO,EAAE;AAC1C,QAAM,KAAK,oBAAoB,IAAI,SAAS,KAAK,IAAI,CAAC,EAAE;AACxD,QAAM,KAAK,uBAAuB,IAAI,aAAa,SAAS,IAAI,IAAI,aAAa,KAAK,IAAI,IAAI,MAAM,EAAE;AACtG,QAAM,KAAK,qBAAqB,IAAI,WAAW,SAAS,IAAI,IAAI,WAAW,KAAK,IAAI,IAAI,MAAM,EAAE;AAChG,QAAM,KAAK,EAAE;AAGb,MAAI,kBAAkB;AAClB,UAAM,KAAK,sBAAsB;AACjC,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,gBAAgB;AAC3B,UAAM,KAAK,EAAE;AAAA,EACjB;AAGA,QAAM,KAAK,yBAAyB;AACpC,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,YAAY,MAAM,QAAQ,IAAI,KAAK,MAAM,QAAQ,QAAQ,GAAG;AACvE,aAAW,KAAK,MAAM,SAAS;AAC3B,UAAM,OAAO,EAAE,aAAa,SAAS,IAAI,WAAM,EAAE,aAAa,KAAK,IAAI,CAAC,KAAK;AAC7E,UAAM,KAAK,OAAO,EAAE,IAAI,KAAK,EAAE,OAAO,GAAG,IAAI,EAAE;AAAA,EACnD;AACA,QAAM,KAAK,EAAE;AAGb,MAAI,QAAQ,UAAU;AAClB,UAAM,KAAK,kBAAkB;AAC7B,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,QAAQ,QAAQ;AAAA,EAC/B,WAAW,QAAQ;AACf,UAAM,KAAK,uBAAuB;AAClC,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,sDAAsD;AACjE,UAAM,KAAK,8CAA8C;AACzD,UAAM,KAAK,uCAAuC;AAClD,UAAM,KAAK,8BAA8B;AACzC,UAAM,KAAK,0DAA0D;AACrE,UAAM,KAAK,0CAA0C;AACrD,UAAM,KAAK,8CAA8C;AAAA,EAC7D,OAAO;AACH,UAAM,KAAK,kBAAkB;AAC7B,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,gFAAgF;AAC3F,UAAM,KAAK,0FAA0F;AAAA,EACzG;AAEA,SAAO,MAAM,KAAK,IAAI;AAC1B;AAMA,SAASA,UAAS,KAAuC;AACrD,SAAO,IAAI,QAAQ,CAACC,WAAS,WAAW;AACpC,QAAI,OAAO;AACX,QAAI,GAAG,QAAQ,CAAC,UAAkB;AAAE,cAAQ,MAAM,SAAS;AAAA,IAAG,CAAC;AAC/D,QAAI,GAAG,OAAO,MAAMA,UAAQ,IAAI,CAAC;AACjC,QAAI,GAAG,SAAS,MAAM;AAAA,EAC1B,CAAC;AACL;AAzMA;AAAA;AAAA;AAgBA;AAAA;AAAA;;;AC8BO,SAAS,iBACZ,KACA,KACA,UACA,QACA,SACI;AACJ,QAAM,EAAE,SAAS,IAAI;AAGrB,MAAI,WAAW,SAAS,aAAa,cAAc;AAC/C,mBAAe,KAAK,QAAQ;AAC5B;AAAA,EACJ;AAGA,MAAI,WAAW,SAAS,aAAa,gBAAgB;AACjD,qBAAiB,KAAK,QAAQ;AAC9B;AAAA,EACJ;AAGA,QAAM,cAAc,SAAS,MAAM,wBAAwB;AAC3D,MAAI,WAAW,SAAS,aAAa;AACjC,UAAM,WAAW,mBAAmB,YAAY,CAAC,CAAC;AAClD,wBAAoB,KAAK,UAAU,QAAQ;AAC3C;AAAA,EACJ;AAGA,QAAM,YAAY,SAAS,MAAM,sBAAsB;AACvD,MAAI,WAAW,SAAS,WAAW;AAC/B,UAAM,MAAM,mBAAmB,UAAU,CAAC,CAAC;AAC3C,kBAAc,KAAK,UAAU,GAAG;AAChC;AAAA,EACJ;AAGA,MAAI,WAAW,UAAU,aAAa,YAAY;AAC9C,QAAI,CAAC,QAAQ,WAAW;AACpB,cAAQ,KAAK,+DAA+D;AAC5E;AAAA,IACJ;AACA,QAAI,CAAC,QAAQ,kBAAkB,CAAC,QAAQ,eAAe;AACnD,cAAQ,KAAK,+BAA+B;AAC5C;AAAA,IACJ;AACA,qBAAiB,KAAK,KAAK;AAAA,MACvB,gBAAgB,QAAQ;AAAA,MACxB,aAAa,QAAQ;AAAA,MACrB,OAAO,QAAQ;AAAA,MACf,kBAAkB,QAAQ;AAAA,MAC1B,gBAAgB,QAAQ;AAAA,IAC5B,CAAC,EAAE,MAAM,MAAM;AACX,UAAI,CAAC,IAAI,aAAa;AAClB,iBAAS,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,MACzD;AAAA,IACJ,CAAC;AACD;AAAA,EACJ;AAGA,QAAM,qBAAqB,SAAS,MAAM,6BAA6B;AACvE,MAAI,WAAW,YAAY,oBAAoB;AAC3C,QAAI,CAAC,QAAQ,gBAAgB;AACzB,cAAQ,KAAK,oCAAoC;AACjD;AAAA,IACJ;AACA,UAAM,YAAY,mBAAmB,mBAAmB,CAAC,CAAC;AAC1D,UAAM,YAAY,QAAQ,eAAe,QAAQ,SAAS;AAC1D,aAAS,KAAK,EAAE,WAAW,UAAU,CAAC;AACtC;AAAA,EACJ;AAGA,QAAM,eAAe,SAAS,MAAM,wBAAwB;AAC5D,MAAI,WAAW,UAAU,cAAc;AACnC,QAAI,CAAC,QAAQ,WAAW;AACpB,cAAQ,KAAK,+DAA+D;AAC5E;AAAA,IACJ;AACA,QAAI,CAAC,QAAQ,eAAe;AACxB,cAAQ,KAAK,+BAA+B;AAC5C;AAAA,IACJ;AACA,UAAM,kBAAkB,mBAAmB,aAAa,CAAC,CAAC;AAC1D,yBAAqB,KAAK,KAAK,iBAAiB;AAAA,MAC5C,UAAU,QAAQ;AAAA,MAClB,aAAa,QAAQ;AAAA,MACrB,OAAO,QAAQ;AAAA,MACf,kBAAkB,QAAQ;AAAA,IAC9B,CAAC,EAAE,MAAM,MAAM;AACX,UAAI,CAAC,IAAI,aAAa;AAClB,iBAAS,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,MACzD;AAAA,IACJ,CAAC;AACD;AAAA,EACJ;AAGA,UAAQ,KAAK,yBAAyB,MAAM,IAAI,QAAQ,EAAE;AAC9D;AASA,SAAS,eAAe,KAA0B,UAA0B;AACxE,MAAI;AACA,aAAS,KAAK,SAAS,KAAK;AAAA,EAChC,SAAS,OAAO;AACZ,aAAS,KAAK,EAAE,OAAO,gBAAgB,KAAK,EAAE,GAAG,GAAG;AAAA,EACxD;AACJ;AAKA,SAAS,iBAAiB,KAA0B,UAA0B;AAC1E,MAAI;AACA,UAAM,YAAY,SAAS,mBAAmB;AAC9C,aAAS,KAAK,SAAS;AAAA,EAC3B,SAAS,OAAO;AACZ,aAAS,KAAK,EAAE,OAAO,gBAAgB,KAAK,EAAE,GAAG,GAAG;AAAA,EACxD;AACJ;AAKA,SAAS,oBACL,KACA,UACA,UACI;AACJ,MAAI;AACA,UAAM,SAAS,SAAS,gBAAgB,QAAQ;AAChD,QAAI,CAAC,QAAQ;AACT,cAAQ,KAAK,qBAAqB,QAAQ,EAAE;AAC5C;AAAA,IACJ;AACA,aAAS,KAAK,MAAM;AAAA,EACxB,SAAS,OAAO;AACZ,aAAS,KAAK,EAAE,OAAO,gBAAgB,KAAK,EAAE,GAAG,GAAG;AAAA,EACxD;AACJ;AAKA,SAAS,cACL,KACA,UACA,KACI;AACJ,MAAI;AACA,UAAM,OAAO,SAAS,eAAe,GAAG;AACxC,QAAI,CAAC,MAAM;AACP,cAAQ,KAAK,mBAAmB,GAAG,EAAE;AACrC;AAAA,IACJ;AACA,aAAS,KAAK,IAAI;AAAA,EACtB,SAAS,OAAO;AACZ,aAAS,KAAK,EAAE,OAAO,gBAAgB,KAAK,EAAE,GAAG,GAAG;AAAA,EACxD;AACJ;AAtNA;AAAA;AAAA;AAWA;AAGA;AACA;AACA;AAAA;AAAA;;;ACkEO,SAAS,qBACZ,SAC6D;AAC7D,QAAM,EAAE,UAAU,SAAS,WAAW,UAAU,gBAAgB,eAAe,SAAS,oBAAoB,eAAe,IAAI;AAE/H,SAAO,CAAC,KAA2B,QAA6B;AAC5D,UAAM,YAAgB,UAAM,IAAI,OAAO,KAAK,IAAI;AAChD,UAAM,WAAW,mBAAmB,UAAU,YAAY,GAAG;AAC7D,UAAM,SAAS,IAAI,QAAQ,YAAY,KAAK;AAG5C,QAAI,UAAU,+BAA+B,GAAG;AAChD,QAAI,UAAU,gCAAgC,4BAA4B;AAC1E,QAAI,UAAU,gCAAgC,cAAc;AAG5D,QAAI,WAAW,WAAW;AACtB,UAAI,UAAU,GAAG;AACjB,UAAI,IAAI;AACR;AAAA,IACJ;AAGA,QAAI,SAAS,WAAW,OAAO,GAAG;AAC9B,uBAAiB,KAAK,KAAK,UAAU,QAAQ;AAAA,QACzC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACJ,CAAC;AACD;AAAA,IACJ;AAGA,QAAI,aAAa,OAAO,aAAa,eAAe;AAChD,YAAM,WAAgB,YAAK,SAAS,KAAK,QAAQ;AACjD,UAAI,gBAAgB,UAAU,GAAG,GAAG;AAChC;AAAA,MACJ;AAAA,IACJ;AAGA,QAAI,UAAU,KAAK,EAAE,gBAAgB,2BAA2B,CAAC;AACjE,QAAI,IAAI,OAAO;AAAA,EACnB;AACJ;AAUA,SAAS,gBAAgB,UAAkB,KAAmC;AAE1E,QAAM,iBAAsB,iBAAU,QAAQ;AAE9C,MAAI,CAAI,gBAAW,cAAc,GAAG;AAChC,WAAO;AAAA,EACX;AAEA,MAAI;AACA,UAAM,OAAU,cAAS,cAAc;AACvC,QAAI,CAAC,KAAK,OAAO,GAAG;AAChB,aAAO;AAAA,IACX;AAEA,UAAM,MAAW,eAAQ,cAAc,EAAE,YAAY;AACrD,UAAM,cAAc,WAAW,GAAG,KAAK;AAEvC,QAAI,UAAU,KAAK;AAAA,MACf,gBAAgB;AAAA,MAChB,kBAAkB,KAAK;AAAA,MACvB,iBAAiB;AAAA,IACrB,CAAC;AAED,UAAM,SAAY,sBAAiB,cAAc;AACjD,WAAO,KAAK,GAAG;AACf,WAAO,GAAG,SAAS,MAAM;AACrB,UAAI,CAAC,IAAI,aAAa;AAClB,YAAI,UAAU,GAAG;AAAA,MACrB;AACA,UAAI,IAAI;AAAA,IACZ,CAAC;AAED,WAAO;AAAA,EACX,QAAQ;AACJ,WAAO;AAAA,EACX;AACJ;AASO,SAAS,SAAS,KAA0B,MAAe,aAAa,KAAW;AACtF,QAAM,OAAO,KAAK,UAAU,IAAI;AAChC,MAAI,UAAU,YAAY;AAAA,IACtB,gBAAgB;AAAA,IAChB,kBAAkB,OAAO,WAAW,IAAI;AAAA,EAC5C,CAAC;AACD,MAAI,IAAI,IAAI;AAChB;AAKO,SAAS,QAAQ,KAA0B,UAAU,aAAmB;AAC3E,WAAS,KAAK,EAAE,OAAO,QAAQ,GAAG,GAAG;AACzC;AAKO,SAAS,QAAQ,KAA0B,UAAU,eAAqB;AAC7E,WAAS,KAAK,EAAE,OAAO,QAAQ,GAAG,GAAG;AACzC;AA/MA,IAWAC,MACAC,QACA,KAYM,YAkBA;AA3CN;AAAA;AAAA;AAWA,IAAAD,OAAoB;AACpB,IAAAC,SAAsB;AACtB,UAAqB;AAErB;AAUA,IAAM,aAAqC;AAAA,MACvC,SAAS;AAAA,MACT,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,OAAO;AAAA,IACX;AAGA,IAAM,eAAe;AAAA;AAAA;;;ACAd,SAAS,gBAAgB,SAAqC;AACjE,QAAM,EAAE,OAAO,OAAO,cAAc,UAAU,aAAa,cAAc,MAAM,IAAI;AAEnF,QAAM,aAAa,UAAU,SAAS,KAAK,UAAU,KAAK;AAC1D,QAAM,eAAe,UAAU,SACzB,oDACA;AAEN,SAAO;AAAA,kBACO,UAAU,gBAAgB,KAAK;AAAA;AAAA;AAAA;AAAA,MAI3C,YAAY;AAAA,aACLC,YAAW,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAa5B,cAAc;AAAA,gFACgE,EAAE;AAAA;AAAA;AAAA,EAGhF,aAAa,QAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iEAQyCA,YAAW,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWhF,eAAe;AAAA;AAAA,sBAEK,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0BtB,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6GAWgGA,YAAW,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAMhH,EAAE;AAAA;AAAA,EAEd,cAAc,iEAAiE,EAAE;AAAA;AAAA;AAAA,EAGjF,aAAa,EAAE,cAAc,UAAU,aAAa,aAAa,cAAc,MAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAIzF;AAMA,SAAS,aAAa,UAA2B;AAC7C,MAAI,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkmBf,qBAAqB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiJpB,MAAI,UAAU;AACV,cAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiLd;AAEA,SAAO;AACX;AAcA,SAAS,aAAa,MAA6B;AAC/C,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMmB,KAAK,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsH7C,KAAK,cAAc;AAAA,iFAC4D,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWjF,KAAK,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBA6BH,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0JnB,KAAK,WAAW,4DAA4D,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiG9E,KAAK,WAAW,4CAA4C,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsD9D,KAAK,WAAW,2CAA2C,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyM7D,qBAAqB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0EtB,KAAK,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aA2NR,EAAE;AAAA,EACb,KAAK,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,eAsYH,EAAE;AAAA,EACf,KAAK,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BA6DU,EAAE;AAEjC;AAMA,SAASA,YAAW,KAAqB;AACrC,SAAO,IACF,QAAQ,MAAM,OAAO,EACrB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,QAAQ;AAC/B;AAx+EA;AAAA;AAAA;AAeA;AAAA;AAAA;;;ACwPO,SAAS,SAAS,MAAwB;AAC7C,SAAO,KACF,YAAY,EACZ,QAAQ,kBAAkB,GAAG,EAC7B,MAAM,KAAK,EACX;AAAA,IAAO,UACJ,KAAK,UAAU,KACf,CAAC,WAAW,IAAI,IAAI;AAAA,EACxB;AACR;AAhRA,IAmDM,kBAEA,YAoBO;AAzEb;AAAA;AAAA;AAmDA,IAAM,mBAAmB;AAEzB,IAAM,aAAa,oBAAI,IAAI;AAAA,MACvB;AAAA,MAAO;AAAA,MAAK;AAAA,MAAM;AAAA,MAAO;AAAA,MAAM;AAAA,MAAO;AAAA,MAAM;AAAA,MAAM;AAAA,MAAM;AAAA,MAAM;AAAA,MAC9D;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAM;AAAA,MAC9D;AAAA,MAAS;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAO;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAC5D;AAAA,MAAS;AAAA,MAAU;AAAA,MAAO;AAAA,MAAS;AAAA,MAAS;AAAA,MAAO;AAAA,MAAQ;AAAA,MAC3D;AAAA,MAAS;AAAA,MAAS;AAAA,MAAM;AAAA,MAAO;AAAA,MAAK;AAAA,MAAM;AAAA,MAAO;AAAA,MAAM;AAAA,MAAO;AAAA,MAC9D;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAO;AAAA,MAC9D;AAAA,MAAQ;AAAA,MAAS;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAS;AAAA,MAAO;AAAA,MAC/D;AAAA,MAAM;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAQ;AAAA,MACxD;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAS;AAAA,MAAO;AAAA,MAAO;AAAA,MAC9D;AAAA,MAAS;AAAA,MAAM;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAS;AAAA,MAAU;AAAA,IAC7D,CAAC;AASM,IAAM,iBAAN,MAAqB;AAAA,MAMxB,YAAY,OAAoB,cAAsC;AALtE,aAAQ,YAA+B,CAAC;AACxC,aAAQ,iBAAsC,oBAAI,IAAI;AAKlD,aAAK,QAAQ;AACb,aAAK,eAAe;AACpB,aAAK,WAAW;AAAA,MACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,SAAS,UAAkB,aAAa,GAAqB;AACzD,cAAM,aAAa,SAAS,QAAQ;AAGpC,cAAM,SAAqD,CAAC;AAC5D,mBAAW,OAAO,KAAK,WAAW;AAC9B,cAAI,QAAQ;AACZ,qBAAW,QAAQ,YAAY;AAC3B,kBAAM,KAAK,IAAI,gBAAgB,IAAI,IAAI,KAAK;AAC5C,kBAAM,MAAM,KAAK,eAAe,IAAI,IAAI,KAAK;AAC7C,qBAAS,KAAK;AAAA,UAClB;AAGA,gBAAM,YAAY,IAAI,KAAK,YAAY;AACvC,qBAAW,QAAQ,YAAY;AAC3B,gBAAI,UAAU,SAAS,IAAI,GAAG;AAC1B,uBAAS;AAAA,YACb;AAAA,UACJ;AAEA,cAAI,QAAQ,GAAG;AACX,mBAAO,KAAK,EAAE,UAAU,IAAI,UAAU,MAAM,CAAC;AAAA,UACjD;AAAA,QACJ;AAGA,eAAO,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAGvC,cAAM,aAAa,OAAO,MAAM,GAAG,UAAU;AAC7C,cAAM,cAAc,WAAW,IAAI,OAAK,EAAE,QAAQ;AAGlD,cAAM,cAAc,IAAI,IAAI,WAAW;AACvC,YAAI,YAAY,SAAS,YAAY;AACjC,qBAAW,YAAY,aAAa;AAChC,kBAAM,MAAM,KAAK,MAAM,QAAQ,KAAK,OAAK,EAAE,OAAO,QAAQ;AAC1D,gBAAI,KAAK;AACL,yBAAW,OAAO,IAAI,cAAc;AAChC,oBAAI,YAAY,QAAQ,WAAY;AACpC,4BAAY,IAAI,GAAG;AAAA,cACvB;AACA,yBAAW,OAAO,IAAI,YAAY;AAC9B,oBAAI,YAAY,QAAQ,WAAY;AACpC,4BAAY,IAAI,GAAG;AAAA,cACvB;AAAA,YACJ;AAAA,UACJ;AAAA,QACJ;AAEA,cAAM,WAAW,MAAM,KAAK,WAAW;AAGvC,cAAM,eAAyB,CAAC;AAChC,mBAAW,YAAY,UAAU;AAC7B,gBAAM,WAAW,KAAK,aAAa,QAAQ;AAC3C,cAAI,UAAU;AACV,yBAAa,KAAK,cAAc,QAAQ;AAAA;AAAA,EAAO,QAAQ,EAAE;AAAA,UAC7D;AAAA,QACJ;AAGA,cAAM,eAAe,KAAK,kBAAkB;AAE5C,eAAO;AAAA,UACH,WAAW;AAAA,UACX,aAAa,aAAa,KAAK,aAAa;AAAA,UAC5C;AAAA,QACJ;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,gBAAwB;AACxB,eAAO,KAAK,UAAU;AAAA,MAC1B;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,iBAAyB;AACzB,eAAO,KAAK,eAAe;AAAA,MAC/B;AAAA;AAAA;AAAA;AAAA,MAMQ,aAAmB;AAEvB,mBAAW,OAAO,KAAK,MAAM,SAAS;AAClC,gBAAM,WAAW,KAAK,aAAa,IAAI,EAAE,KAAK;AAE9C,gBAAM,OAAO;AAAA,YACT,IAAI;AAAA,YACJ,IAAI;AAAA,YACJ,IAAI;AAAA,YACJ,IAAI;AAAA,YACJ,IAAI,SAAS,KAAK,GAAG;AAAA,YACrB;AAAA,UACJ,EAAE,KAAK,GAAG;AAEV,gBAAM,QAAQ,SAAS,IAAI;AAC3B,gBAAM,kBAAkB,oBAAI,IAAoB;AAEhD,qBAAW,QAAQ,OAAO;AACtB,4BAAgB,IAAI,OAAO,gBAAgB,IAAI,IAAI,KAAK,KAAK,CAAC;AAAA,UAClE;AAGA,gBAAM,YAAY,MAAM;AACxB,cAAI,YAAY,GAAG;AACf,uBAAW,CAAC,MAAM,KAAK,KAAK,iBAAiB;AACzC,8BAAgB,IAAI,MAAM,QAAQ,SAAS;AAAA,YAC/C;AAAA,UACJ;AAEA,eAAK,UAAU,KAAK;AAAA,YAChB,UAAU,IAAI;AAAA,YACd,MAAM,IAAI;AAAA,YACV,UAAU,IAAI;AAAA,YACd;AAAA,YACA;AAAA,UACJ,CAAC;AAAA,QACL;AAGA,cAAM,IAAI,KAAK,UAAU;AACzB,cAAM,UAAU,oBAAI,IAAoB;AAExC,mBAAW,OAAO,KAAK,WAAW;AAC9B,qBAAW,QAAQ,IAAI,gBAAgB,KAAK,GAAG;AAC3C,oBAAQ,IAAI,OAAO,QAAQ,IAAI,IAAI,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,QACJ;AAEA,mBAAW,CAAC,MAAM,EAAE,KAAK,SAAS;AAE9B,eAAK,eAAe,IAAI,MAAM,KAAK,IAAI,IAAI,EAAE,IAAI,CAAC;AAAA,QACtD;AAAA,MACJ;AAAA,MAEQ,oBAA4B;AAChC,cAAM,QAAkB,CAAC;AACzB,cAAM,KAAK,YAAY,KAAK,MAAM,QAAQ,IAAI,EAAE;AAChD,cAAM,KAAK,gBAAgB,KAAK,MAAM,QAAQ,WAAW,EAAE;AAC3D,cAAM,KAAK,aAAa,KAAK,MAAM,QAAQ,QAAQ,EAAE;AACrD,cAAM,KAAK,YAAY,KAAK,MAAM,QAAQ,MAAM,EAAE;AAClD,cAAM,KAAK,EAAE;AACb,cAAM,KAAK,eAAe;AAE1B,mBAAW,OAAO,KAAK,MAAM,SAAS;AAClC,gBAAM,OAAO,IAAI,aAAa,SAAS,IACjC,uBAAkB,IAAI,aAAa,KAAK,IAAI,CAAC,KAC7C;AACN,gBAAM,KAAK,OAAO,IAAI,IAAI,KAAK,IAAI,EAAE,MAAM,IAAI,OAAO,GAAG,IAAI,EAAE;AAAA,QACnE;AAEA,eAAO,MAAM,KAAK,IAAI;AAAA,MAC1B;AAAA,IACJ;AAAA;AAAA;;;AC1FA,SAAS,UAAU,QAAgB,MAAoB;AACnD,QAAM,UAAU,OAAO,KAAK,MAAM,OAAO;AACzC,QAAM,SAAS,QAAQ;AAEvB,MAAI;AAEJ,MAAI,SAAS,KAAK;AACd,aAAS,OAAO,MAAM,CAAC;AACvB,WAAO,CAAC,IAAI;AACZ,WAAO,CAAC,IAAI;AAAA,EAChB,WAAW,SAAS,OAAO;AACvB,aAAS,OAAO,MAAM,CAAC;AACvB,WAAO,CAAC,IAAI;AACZ,WAAO,CAAC,IAAI;AACZ,WAAO,cAAc,QAAQ,CAAC;AAAA,EAClC,OAAO;AACH,aAAS,OAAO,MAAM,EAAE;AACxB,WAAO,CAAC,IAAI;AACZ,WAAO,CAAC,IAAI;AAEZ,WAAO,cAAc,GAAG,CAAC;AACzB,WAAO,cAAc,QAAQ,CAAC;AAAA,EAClC;AAEA,SAAO,MAAM,OAAO,OAAO,CAAC,QAAQ,OAAO,CAAC,CAAC;AACjD;AAMA,SAAS,YAAY,KAA4B;AAC7C,MAAI,IAAI,SAAS,EAAG,QAAO;AAE3B,QAAM,SAAS,IAAI,CAAC,IAAI;AAGxB,MAAI,WAAW,EAAG,QAAO;AAEzB,QAAM,UAAU,IAAI,CAAC,IAAI,SAAU;AACnC,MAAI,gBAAgB,IAAI,CAAC,IAAI;AAC7B,MAAI,SAAS;AAEb,MAAI,kBAAkB,KAAK;AACvB,QAAI,IAAI,SAAS,EAAG,QAAO;AAC3B,oBAAgB,IAAI,aAAa,CAAC;AAClC,aAAS;AAAA,EACb,WAAW,kBAAkB,KAAK;AAC9B,QAAI,IAAI,SAAS,GAAI,QAAO;AAE5B,oBAAgB,IAAI,aAAa,CAAC;AAClC,aAAS;AAAA,EACb;AAEA,MAAI,QAAQ;AACR,QAAI,IAAI,SAAS,SAAS,IAAI,cAAe,QAAO;AACpD,UAAM,UAAU,IAAI,MAAM,QAAQ,SAAS,CAAC;AAC5C,cAAU;AACV,UAAM,UAAU,IAAI,MAAM,QAAQ,SAAS,aAAa;AACxD,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACrC,cAAQ,CAAC,KAAK,QAAQ,IAAI,CAAC;AAAA,IAC/B;AACA,WAAO,QAAQ,SAAS,OAAO;AAAA,EACnC;AAEA,MAAI,IAAI,SAAS,SAAS,cAAe,QAAO;AAChD,SAAO,IAAI,MAAM,QAAQ,SAAS,aAAa,EAAE,SAAS,OAAO;AACrE;AAvOA,IAmBA,QA4Ba;AA/Cb;AAAA;AAAA;AAmBA,aAAwB;AA4BjB,IAAM,kBAAN,MAAsB;AAAA,MAAtB;AACH,aAAQ,UAAyB,oBAAI,IAAI;AAAA;AAAA,MAGzC,IAAI,cAAsB;AACtB,eAAO,KAAK,QAAQ;AAAA,MACxB;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,OAAO,QAA2B;AAC9B,eAAO,GAAG,WAAW,CAAC,KAA2B,QAAgB,SAAiB;AAC9E,cAAI,IAAI,QAAQ,OAAO;AACnB,mBAAO,QAAQ;AACf;AAAA,UACJ;AAEA,gBAAM,MAAM,IAAI,QAAQ,mBAAmB;AAC3C,cAAI,CAAC,KAAK;AACN,mBAAO,QAAQ;AACf;AAAA,UACJ;AAGA,gBAAM,YACD,kBAAW,MAAM,EACjB,OAAO,MAAM,sCAAsC,EACnD,OAAO,QAAQ;AAEpB,iBAAO;AAAA,YACH;AAAA;AAAA;AAAA,wBAGyB,SAAS;AAAA;AAAA;AAAA,UAEtC;AAEA,gBAAM,SAAmB;AAAA,YACrB;AAAA,YACA,MAAM,CAAC,SAAiB;AACpB,kBAAI;AACA,0BAAU,QAAQ,IAAI;AAAA,cAC1B,QAAQ;AAAA,cAER;AAAA,YACJ;AAAA,YACA,OAAO,MAAM;AACT,kBAAI;AACA,uBAAO,IAAI;AAAA,cACf,QAAQ;AAAA,cAER;AACA,mBAAK,QAAQ,OAAO,MAAM;AAAA,YAC9B;AAAA,UACJ;AAEA,eAAK,QAAQ,IAAI,MAAM;AAEvB,iBAAO,GAAG,QAAQ,CAAC,QAAgB;AAC/B,gBAAI;AACA,oBAAM,UAAU,YAAY,GAAG;AAC/B,kBAAI,YAAY,QAAQ,KAAK,gBAAgB;AACzC,sBAAM,SAAS,KAAK,MAAM,OAAO;AACjC,qBAAK,eAAe,QAAQ,MAAM;AAAA,cACtC;AAAA,YACJ,QAAQ;AAAA,YAER;AAAA,UACJ,CAAC;AAED,gBAAM,eAAe,MAAM;AACvB,iBAAK,QAAQ,OAAO,MAAM;AAAA,UAC9B;AAEA,iBAAO,GAAG,SAAS,YAAY;AAC/B,iBAAO,GAAG,OAAO,YAAY;AAC7B,iBAAO,GAAG,SAAS,YAAY;AAAA,QACnC,CAAC;AAAA,MACL;AAAA;AAAA;AAAA;AAAA,MAKA,UAAU,SAAiC;AACvC,aAAK,iBAAiB;AAAA,MAC1B;AAAA;AAAA;AAAA;AAAA,MAKA,UAAU,SAA0B;AAChC,cAAM,OAAO,KAAK,UAAU,OAAO;AACnC,mBAAW,UAAU,KAAK,SAAS;AAC/B,iBAAO,KAAK,IAAI;AAAA,QACpB;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,WAAiB;AACb,mBAAW,UAAU,KAAK,SAAS;AAC/B,iBAAO,MAAM;AAAA,QACjB;AACA,aAAK,QAAQ,MAAM;AAAA,MACvB;AAAA,IACJ;AAAA;AAAA;;;ACsCA,SAAS,aAAa,UAA2B;AAC7C,QAAM,aAAa,SAAS,QAAQ,OAAO,GAAG;AAC9C,QAAM,QAAQ,WAAW,MAAM,GAAG;AAElC,aAAW,QAAQ,OAAO;AACtB,QAAI,gBAAgB,SAAS,IAAI,EAAG,QAAO;AAAA,EAC/C;AAGA,MAAI,WAAW,SAAS,MAAM,KAC1B,WAAW,SAAS,OAAO,KAC3B,WAAW,SAAS,MAAM,GAAG;AAC7B,WAAO;AAAA,EACX;AAEA,SAAO;AACX;AAjNA,IAaAC,MAwBM,qBAMO,aAoIP;AA/KN;AAAA;AAAA;AAaA,IAAAA,OAAoB;AAwBpB,IAAM,sBAAsB;AAMrB,IAAM,cAAN,MAAkB;AAAA,MAOrB,YAAY,SAA6B;AANzC,aAAQ,UAA+B;AACvC,aAAQ,gBAAsD;AAC9D,aAAQ,eAA4B,oBAAI,IAAI;AAE5C,aAAQ,cAAc;AAGlB,aAAK,UAAU;AAAA,MACnB;AAAA;AAAA;AAAA;AAAA,MAKA,QAAc;AACV,YAAI,KAAK,YAAa;AAEtB,cAAM,EAAE,UAAU,aAAa,oBAAoB,IAAI,KAAK;AAE5D,YAAI;AACA,eAAK,UAAa,WAAM,UAAU,EAAE,WAAW,KAAK,GAAG,CAAC,WAAW,aAAa;AAC5E,gBAAI,CAAC,SAAU;AAGf,gBAAI,aAAa,QAAQ,EAAG;AAE5B,iBAAK,aAAa,IAAI,QAAQ;AAG9B,gBAAI,KAAK,eAAe;AACpB,2BAAa,KAAK,aAAa;AAAA,YACnC;AACA,iBAAK,gBAAgB,WAAW,MAAM;AAClC,mBAAK,eAAe;AAAA,YACxB,GAAG,UAAU;AAAA,UACjB,CAAC;AAED,eAAK,QAAQ,GAAG,SAAS,CAAC,QAAQ;AAC9B,gBAAI,KAAK,QAAQ,SAAS;AACtB,mBAAK,QAAQ,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,YAC5E;AAAA,UACJ,CAAC;AAED,eAAK,cAAc;AAAA,QACvB,SAAS,KAAK;AACV,cAAI,KAAK,QAAQ,SAAS;AACtB,iBAAK,QAAQ,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,UAC5E;AAAA,QACJ;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,OAAa;AACT,YAAI,KAAK,eAAe;AACpB,uBAAa,KAAK,aAAa;AAC/B,eAAK,gBAAgB;AAAA,QACzB;AACA,YAAI,KAAK,SAAS;AACd,eAAK,QAAQ,MAAM;AACnB,eAAK,UAAU;AAAA,QACnB;AACA,aAAK,aAAa,MAAM;AACxB,aAAK,cAAc;AAAA,MACvB;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,aAAsB;AACtB,eAAO,KAAK;AAAA,MAChB;AAAA;AAAA;AAAA;AAAA,MAMQ,iBAAuB;AAC3B,cAAM,QAAQ,MAAM,KAAK,KAAK,YAAY;AAC1C,aAAK,aAAa,MAAM;AAGxB,cAAM,cAAc,KAAK,oBAAoB,KAAK;AAElD,YAAI,YAAY,SAAS,GAAG;AACxB,eAAK,QAAQ,SAAS,WAAW;AAAA,QACrC;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOQ,oBAAoB,cAAkC;AAC1D,cAAM,WAAW,oBAAI,IAAY;AAEjC,mBAAW,QAAQ,cAAc;AAC7B,gBAAM,iBAAiB,KAAK,QAAQ,OAAO,GAAG;AAE9C,qBAAW,OAAO,KAAK,QAAQ,YAAY,SAAS;AAChD,kBAAM,aAAa,IAAI,KAAK,QAAQ,OAAO,GAAG;AAG9C,gBAAI,eAAe,WAAW,aAAa,GAAG,KAAK,mBAAmB,YAAY;AAC9E,uBAAS,IAAI,IAAI,EAAE;AACnB;AAAA,YACJ;AAGA,uBAAW,WAAW,IAAI,UAAU;AAChC,oBAAM,oBAAoB,QAAQ,QAAQ,OAAO,GAAG;AACpD,kBAAI,mBAAmB,qBAAqB,eAAe,SAAS,MAAM,iBAAiB,GAAG;AAC1F,yBAAS,IAAI,IAAI,EAAE;AACnB;AAAA,cACJ;AAAA,YACJ;AAAA,UACJ;AAAA,QACJ;AAEA,eAAO,MAAM,KAAK,QAAQ;AAAA,MAC9B;AAAA,IACJ;AASA,IAAM,kBAAkB;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACJ;AAAA;AAAA;;;ACkDA,SAAS,oBAA4B;AACjC,QAAM,QAAQ;AACd,MAAI,KAAK;AACT,WAAS,IAAI,GAAG,IAAI,IAAI,KAAK;AACzB,UAAM,MAAM,KAAK,MAAM,KAAK,OAAO,IAAI,MAAM,MAAM,CAAC;AAAA,EACxD;AACA,SAAO;AACX;AAxPA,IA2DM,yBACA,sBACA,6BAaO;AA1Eb;AAAA;AAAA;AA2DA,IAAM,0BAA0B,KAAK,KAAK;AAC1C,IAAM,uBAAuB;AAC7B,IAAM,8BAA8B,KAAK;AAalC,IAAM,6BAAN,MAAiC;AAAA,MAOpC,YAAY,SAA4C;AANxD,aAAiB,WAAW,oBAAI,IAAiC;AAIjE,aAAQ,eAAsD;AAG1D,aAAK,cAAc,QAAQ;AAC3B,aAAK,gBAAgB,QAAQ,iBAAiB;AAC9C,aAAK,cAAc,QAAQ,eAAe;AAE1C,cAAM,oBAAoB,QAAQ,qBAAqB;AACvD,aAAK,eAAe,YAAY,MAAM,KAAK,oBAAoB,GAAG,iBAAiB;AAEnF,YAAI,KAAK,aAAa,OAAO;AACzB,eAAK,aAAa,MAAM;AAAA,QAC5B;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,SAAqC;AACjC,YAAI,KAAK,SAAS,QAAQ,KAAK,aAAa;AAExC,gBAAM,UAAU,KAAK,gBAAgB;AACrC,cAAI,CAAC,SAAS;AACV,mBAAO;AAAA,UACX;AAAA,QACJ;AAEA,cAAM,YAAY,kBAAkB;AACpC,cAAM,UAA+B;AAAA,UACjC;AAAA,UACA,WAAW;AAAA,UACX,YAAY,KAAK,IAAI;AAAA,UACrB,WAAW,KAAK,IAAI;AAAA,UACpB,MAAM;AAAA,QACV;AAEA,aAAK,SAAS,IAAI,WAAW,OAAO;AACpC,eAAO;AAAA,MACX;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,IAAI,WAAoD;AACpD,eAAO,KAAK,SAAS,IAAI,SAAS;AAAA,MACtC;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAM,KACF,WACA,QACA,SAK0B;AAC1B,cAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,YAAI,CAAC,SAAS;AACV,gBAAM,IAAI,MAAM,sBAAsB,SAAS,EAAE;AAAA,QACrD;AAEA,YAAI,QAAQ,MAAM;AACd,gBAAM,IAAI,MAAM,oBAAoB,SAAS,EAAE;AAAA,QACnD;AAEA,gBAAQ,OAAO;AACf,YAAI;AACA,gBAAM,WAAW,MAAM,KAAK,YAAY,QAAQ;AAAA,YAC5C,OAAO,SAAS;AAAA,YAChB,kBAAkB,SAAS;AAAA,YAC3B,kBAAkB,SAAS;AAAA,UAC/B,CAAC;AAED,kBAAQ;AACR,kBAAQ,aAAa,KAAK,IAAI;AAE9B,iBAAO,EAAE,UAAU,UAAU;AAAA,QACjC,UAAE;AACE,kBAAQ,OAAO;AAAA,QACnB;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,QAAQ,WAA4B;AAChC,eAAO,KAAK,SAAS,OAAO,SAAS;AAAA,MACzC;AAAA;AAAA;AAAA;AAAA,MAKA,aAAmB;AACf,aAAK,SAAS,MAAM;AACpB,YAAI,KAAK,cAAc;AACnB,wBAAc,KAAK,YAAY;AAC/B,eAAK,eAAe;AAAA,QACxB;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,OAAe;AACf,eAAO,KAAK,SAAS;AAAA,MACzB;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,aAAuB;AACvB,eAAO,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC;AAAA,MAC1C;AAAA;AAAA;AAAA;AAAA,MAKQ,sBAA4B;AAChC,cAAM,MAAM,KAAK,IAAI;AACrB,mBAAW,CAAC,IAAI,OAAO,KAAK,KAAK,UAAU;AACvC,cAAI,CAAC,QAAQ,QAAS,MAAM,QAAQ,aAAc,KAAK,eAAe;AAClE,iBAAK,SAAS,OAAO,EAAE;AAAA,UAC3B;AAAA,QACJ;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA;AAAA,MAMQ,kBAA2B;AAC/B,YAAI,WAA0B;AAC9B,YAAI,aAAa;AAEjB,mBAAW,CAAC,IAAI,OAAO,KAAK,KAAK,UAAU;AACvC,cAAI,CAAC,QAAQ,QAAQ,QAAQ,aAAa,YAAY;AAClD,yBAAa,QAAQ;AACrB,uBAAW;AAAA,UACf;AAAA,QACJ;AAEA,YAAI,UAAU;AACV,eAAK,SAAS,OAAO,QAAQ;AAC7B,iBAAO;AAAA,QACX;AACA,eAAO;AAAA,MACX;AAAA,IACJ;AAAA;AAAA;;;AC7IA,eAAsBC,cAAa,SAAiD;AAChF,QAAM,OAAO,QAAQ,SAAS,SAAY,QAAQ,OAAO;AACzD,QAAM,OAAO,QAAQ,QAAQ;AAC7B,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,QAAQ,QAAQ,SAAS;AAG/B,QAAM,WAAW,IAAI,SAAS,QAAQ,OAAO;AAC7C,WAAS,KAAK;AAGd,QAAM,QAAQ,QAAQ,SAAS,SAAS,MAAM,QAAQ;AAGtD,MAAI;AACJ,MAAI;AACJ,MAAI,WAAW;AACX,UAAM,eAAe,SAAS,gBAAgB;AAC9C,qBAAiB,IAAI,eAAe,SAAS,OAAO,YAAY;AAEhE,QAAI,QAAQ,eAAe;AACvB,uBAAiB,IAAI,2BAA2B;AAAA,QAC5C,aAAa,QAAQ;AAAA,MACzB,CAAC;AAAA,IACL;AAAA,EACJ;AAGA,QAAM,UAAU,gBAAgB;AAAA,IAC5B;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd,UAAU;AAAA,IACV,aAAa;AAAA,IACb,aAAa,CAAC,EAAE,QAAQ,SAAS,QAAQ;AAAA,EAC7C,CAAC;AAGD,QAAM,UAAU,qBAAqB;AAAA,IACjC;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,QAAQ;AAAA,IAClB;AAAA,IACA,eAAe,QAAQ;AAAA,IACvB,SAAS,QAAQ;AAAA,IACjB,oBAAoB,QAAQ;AAAA,IAC5B;AAAA,EACJ,CAAC;AAED,QAAM,SAAc,kBAAa,OAAO;AAGxC,MAAI;AACJ,MAAI;AAEJ,MAAI,QAAQ,SAAS,QAAQ,UAAU;AACnC,eAAW,IAAI,gBAAgB;AAC/B,aAAS,OAAO,MAAM;AAGtB,aAAS,UAAU,CAAC,QAAQ,QAAQ;AAChC,UAAI,IAAI,SAAS,QAAQ;AACrB,eAAO,KAAK,KAAK,UAAU,EAAE,MAAM,OAAO,CAAC,CAAC;AAAA,MAChD;AAAA,IACJ,CAAC;AAGD,kBAAc,IAAI,YAAY;AAAA,MAC1B,UAAU,QAAQ;AAAA,MAClB,SAAS,QAAQ;AAAA,MACjB,aAAa,SAAS;AAAA,MACtB,YAAY,QAAQ;AAAA,MACpB,UAAU,CAAC,sBAAsB;AAE7B,iBAAU,UAAU,EAAE,MAAM,cAAc,SAAS,kBAAkB,CAAC;AAGtE,YAAI;AACA,mBAAS,OAAO;AAGhB,cAAI,aAAa,gBAAgB;AAC7B,kBAAM,eAAe,SAAS,gBAAgB;AAC9C,kBAAM,aAAa,IAAI,eAAe,SAAS,OAAO,YAAY;AAAA,UAGtE;AAEA,mBAAU,UAAU,EAAE,MAAM,UAAU,SAAS,kBAAkB,CAAC;AAAA,QACtE,SAAS,KAAK;AACV,gBAAM,MAAM,eAAe,QAAQ,IAAI,UAAU;AACjD,mBAAU,UAAU,EAAE,MAAM,SAAS,SAAS,IAAI,CAAC;AAAA,QACvD;AAAA,MACJ;AAAA,MACA,SAAS,CAAC,QAAQ;AACd,iBAAU,UAAU,EAAE,MAAM,SAAS,SAAS,IAAI,QAAQ,CAAC;AAAA,MAC/D;AAAA,IACJ,CAAC;AAED,gBAAY,MAAM;AAAA,EACtB;AAGA,QAAM,IAAI,QAAc,CAACC,WAAS,WAAW;AACzC,WAAO,GAAG,SAAS,MAAM;AACzB,WAAO,OAAO,MAAM,MAAM,MAAMA,UAAQ,CAAC;AAAA,EAC7C,CAAC;AAGD,QAAM,UAAU,OAAO,QAAQ;AAC/B,QAAM,aAAa,OAAO,YAAY,YAAY,UAAU,QAAQ,OAAO;AAC3E,QAAMC,OAAM,UAAU,IAAI,IAAI,UAAU;AAExC,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA,KAAAA;AAAA,IACA,OAAO,YAAY;AACf,UAAI,gBAAgB;AAChB,uBAAe,WAAW;AAAA,MAC9B;AACA,UAAI,aAAa;AACb,oBAAY,KAAK;AAAA,MACrB;AACA,UAAI,UAAU;AACV,iBAAS,SAAS;AAAA,MACtB;AACA,YAAM,IAAI,QAAc,CAACD,WAAS,WAAW;AACzC,eAAO,MAAM,CAAC,QAAQ;AAClB,cAAI,KAAK;AAAE,mBAAO,GAAG;AAAA,UAAG,OACnB;AAAE,YAAAA,UAAQ;AAAA,UAAG;AAAA,QACtB,CAAC;AAAA,MACL,CAAC;AAAA,IACL;AAAA,EACJ;AACJ;AAzOA,IAYA;AAZA;AAAA;AAAA;AAYA,WAAsB;AACtB;AACA;AACA;AACA;AACA;AACA;AAEA;AAwNA;AACA;AACA;AACA;AACA;AAGA;AAAA;AAAA;;;ACnPA;AAAA;AAAA;AAAA;AA8CA,eAAsB,aAClB,SACA,SACe;AACf,QAAM,kBAAuB,eAAQ,OAAO;AAK5C,MAAI,QAAQ,UAAU;AAClB,UAAM,WAAgB,eAAQ,QAAQ,QAAQ;AAE9C,QAAI,CAAI,gBAAW,QAAQ,GAAG;AAC1B,iBAAW,mCAAmC,QAAQ,EAAE;AACxD,aAAO,WAAW;AAAA,IACtB;AAEA,gBAAY,mCAAmC;AAE/C,QAAI;AACA,YAAM,EAAE,iBAAAE,iBAAgB,IAAI,MAAM;AAClC,YAAM,WAAW,MAAMA,iBAAgB,UAAU;AAAA,QAC7C,QAAQ;AAAA,QACR,OAAO,QAAQ;AAAA,QACf,OAAO;AAAA,QACP,OAAO;AAAA,QACP,UAAU;AAAA,QACV,SAAS;AAAA,MACb,CAAC;AAED,UAAI,aAAa,WAAW,SAAS;AACjC,mBAAW,uCAAuC;AAClD,eAAO;AAAA,MACX;AAAA,IACJ,SAAS,OAAO;AACZ,iBAAW,2BAA2B,gBAAgB,KAAK,CAAC,EAAE;AAC9D,aAAO,WAAW;AAAA,IACtB;AAAA,EACJ;AAKA,MAAI,CAAI,gBAAW,eAAe,GAAG;AACjC,eAAW,kCAAkC,eAAe,EAAE;AAC9D,cAAU,8EAA8E;AACxF,WAAO,WAAW;AAAA,EACtB;AAEA,QAAM,YAAiB,YAAK,iBAAiB,mBAAmB;AAChE,MAAI,CAAI,gBAAW,SAAS,GAAG;AAC3B,eAAW,kCAAkC,eAAe,EAAE;AAC9D,cAAU,0DAA0D;AACpE,cAAU,8EAA8E;AACxF,WAAO,WAAW;AAAA,EACtB;AAKA,MAAI,QAAQ,SAAS,CAAC,QAAQ,UAAU;AACpC,iBAAa,4DAA4D;AAAA,EAC7E;AAKA,QAAM,YAAY,QAAQ,OAAO;AACjC,MAAI;AAEJ,MAAI,WAAW;AACX,QAAI;AACA,sBAAgB,MAAM,qBAAqB,QAAQ,OAAO,eAAe;AACzE,gBAAU,sCAAsC;AAAA,IACpD,SAAS,OAAO;AACZ,YAAM,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACpE,mBAAa,2BAA2B,MAAM,EAAE;AAChD,mBAAa,wCAAwC;AAAA,IACzD;AAAA,EACJ;AAKA,cAAY,qCAAgC;AAC5C,gBAAc,kBAAkB,eAAe;AAC/C,gBAAc,QAAQ,OAAO,QAAQ,QAAQ,GAAI,CAAC;AAClD,gBAAc,QAAQ,QAAQ,QAAQ,WAAW;AACjD,gBAAc,eAAe,gBAAgB,YAAY,YAAY,gBAAgB,UAAU;AAC/F,MAAI,QAAQ,SAAS,QAAQ,UAAU;AAAE,kBAAc,cAAc,SAAS;AAAA,EAAG;AACjF,UAAQ,OAAO,MAAM,IAAI;AAEzB,MAAI;AACA,UAAM,OAAO,MAAMC,cAAa;AAAA,MAC5B,SAAS;AAAA,MACT,MAAM,QAAQ,QAAQ;AAAA,MACtB,MAAM,QAAQ,QAAQ;AAAA,MACtB,WAAW,CAAC,CAAC;AAAA,MACb;AAAA,MACA,SAAS,QAAQ;AAAA,MACjB,UAAU,QAAQ,WAAgB,eAAQ,QAAQ,QAAQ,IAAI;AAAA,MAC9D,OAAQ,QAAQ,SAAuC;AAAA,MACvD,OAAO,QAAQ;AAAA,IACnB,CAAC;AAED,iBAAa,qBAAqB,KAAK,KAAK,GAAG,CAAC,EAAE;AAClD,cAAU,uBAAuB;AAGjC,QAAI,QAAQ,MAAM;AACd,kBAAY,KAAK,GAAG;AAAA,IACxB;AAGA,UAAM,IAAI,QAAc,CAACC,cAAY;AACjC,YAAM,WAAW,YAAY;AACzB,gBAAQ,OAAO,MAAM,IAAI;AACzB,kBAAU,yBAAyB;AACnC,cAAM,KAAK,MAAM;AACjB,qBAAa,iBAAiB;AAC9B,QAAAA,UAAQ;AAAA,MACZ;AAEA,cAAQ,GAAG,UAAU,MAAM,KAAK,SAAS,CAAC;AAC1C,cAAQ,GAAG,WAAW,MAAM,KAAK,SAAS,CAAC;AAAA,IAC/C,CAAC;AAED,WAAO,WAAW;AAAA,EAEtB,SAAS,OAAO;AACZ,UAAM,SAAS,gBAAgB,KAAK;AACpC,QAAI,OAAO,SAAS,YAAY,GAAG;AAC/B,iBAAW,QAAQ,QAAQ,QAAQ,GAAI,uDAAuD;AAAA,IAClG,OAAO;AACH,iBAAW,2BAA2B,MAAM,EAAE;AAAA,IAClD;AACA,WAAO,WAAW;AAAA,EACtB;AACJ;AAgBA,eAAe,qBACX,cACA,yBACsB;AACtB,QAAM,EAAE,sBAAAC,sBAAqB,IAAI,MAAM;AACvC,QAAM,UAAUA,sBAAqB;AAGrC,QAAM,eAAe,MAAM,QAAQ,YAAY;AAC/C,MAAI,CAAC,aAAa,WAAW;AACzB,UAAM,IAAI,MAAM,aAAa,SAAS,8BAA8B;AAAA,EACxE;AAEA,SAAO,OAAO,QAAgB,YAAyH;AACnJ,UAAM,SAAS,MAAM,QAAQ,YAAY;AAAA,MACrC;AAAA,MACA,OAAO,SAAS,SAAS;AAAA,MACzB,kBAAkB,SAAS,oBAAoB;AAAA,MAC/C,SAAS;AAAA,MACT,sBAAsB;AAAA,MACtB,kBAAkB,SAAS;AAAA,IAC/B,CAAC;AAED,QAAI,CAAC,OAAO,SAAS;AACjB,YAAM,IAAI,MAAM,OAAO,SAAS,mBAAmB;AAAA,IACvD;AAEA,WAAO,OAAO,YAAY;AAAA,EAC9B;AACJ;AAUA,SAAS,YAAYC,MAAmB;AACpC,QAAM,EAAE,KAAK,IAAI,QAAQ,eAAe;AAExC,QAAM,WAAW,QAAQ;AACzB,MAAI;AAEJ,MAAI,aAAa,UAAU;AACvB,cAAU,SAASA,IAAG;AAAA,EAC1B,WAAW,aAAa,SAAS;AAC7B,cAAU,aAAaA,IAAG;AAAA,EAC9B,OAAO;AACH,cAAU,aAAaA,IAAG;AAAA,EAC9B;AAEA,OAAK,SAAS,CAAC,UAAwB;AACnC,QAAI,OAAO;AACP,mBAAa,8CAA8CA,IAAG,YAAY;AAAA,IAC9E;AAAA,EACJ,CAAC;AACL;AAlQA,IAkBAC,QACAC;AAnBA;AAAA;AAAA;AAkBA,IAAAD,SAAsB;AACtB,IAAAC,OAAoB;AACpB;AAEA;AACA;AAUA;AAAA;AAAA;;;ACAO,SAAS,gBAAyB;AACrC,QAAM,UAAU,IAAI,yBAAQ;AAE5B,UACK,KAAK,WAAW,EAChB,YAAY,oDAAoD,EAChE,QAAQ,OAAO;AAMpB,UACK,QAAQ,OAAO,EACf,YAAY,kDAAkD,EAC9D,SAAS,eAAe,kCAAkC,EAC1D,OAAO,uBAAuB,oBAAoB,YAAY,EAC9D,OAAO,oBAAoB,wCAAwC,CAAC,MAAc,SAAS,GAAG,EAAE,GAAG,EAAE,EACrG,OAAO,uBAAuB,iBAAiB,EAC/C,OAAO,iBAAiB,mBAAmB,KAAK,EAChD,OAAO,cAAc,wBAAwB,EAC7C,OAAO,OAAO,UAAkB,SAAkC;AAC/D,uBAAmB,IAAI;AACvB,UAAM,EAAE,cAAAC,cAAa,IAAI,MAAM;AAC/B,UAAM,WAAW,MAAMA,cAAa,UAAU;AAAA,MAC1C,QAAQ,KAAK;AAAA,MACb,WAAY,KAAK,aAAwB;AAAA,MACzC,OAAO,KAAK;AAAA,MACZ,SAAS,QAAQ,KAAK,OAAO;AAAA,IACjC,CAAC;AACD,YAAQ,KAAK,QAAQ;AAAA,EACzB,CAAC;AAML,UACK,QAAQ,UAAU,EAClB,YAAY,wCAAwC,EACpD,SAAS,eAAe,kCAAkC,EAC1D,OAAO,uBAAuB,gCAAgC,QAAQ,EACtE,OAAO,uBAAuB,iBAAiB,EAC/C,OAAO,2BAA2B,oCAAoC,CAAC,MAAc,SAAS,GAAG,EAAE,CAAC,EACpG,OAAO,kBAAkB,uCAAuC,EAChE,OAAO,kBAAkB,uEAAuE,EAChG,OAAO,WAAW,sCAAsC,KAAK,EAC7D,OAAO,eAAe,6CAA6C,KAAK,EACxE,OAAO,iBAAiB,mBAAmB,KAAK,EAChD,OAAO,cAAc,wBAAwB,EAC7C,OAAO,OAAO,UAAkB,SAAkC;AAC/D,uBAAmB,IAAI;AAGvB,UAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAClC,UAAM,WAAW,MAAMA,iBAAgB,UAAU;AAAA,MAC7C,QAAQ,KAAK;AAAA,MACb,OAAO,KAAK;AAAA,MACZ,SAAS,KAAK;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,OAAO,KAAK;AAAA,MACZ,OAAO,QAAQ,KAAK,KAAK;AAAA,MACzB,UAAU,QAAQ,KAAK,QAAQ;AAAA,MAC/B,SAAS,QAAQ,KAAK,OAAO;AAAA,IACjC,CAAC;AACD,YAAQ,KAAK,QAAQ;AAAA,EACzB,CAAC;AAML,UACK,QAAQ,UAAU,EAClB,YAAY,gGAAiF,EAC7F,SAAS,eAAe,kCAAkC,EAC1D,OAAO,uBAAuB,6BAA6B,QAAQ,EACnE,OAAO,uBAAuB,iBAAiB,EAC/C,OAAO,8BAA8B,kCAAkC,CAAC,MAAc,SAAS,GAAG,EAAE,CAAC,EACrG,OAAO,2BAA2B,gCAAgC,CAAC,MAAc,SAAS,GAAG,EAAE,CAAC,EAChG,OAAO,kBAAkB,6BAA6B,EACtD,OAAO,kBAAkB,uEAAuE,EAChG,OAAO,mBAAmB,+CAA+C,QAAQ,EACjF,OAAO,WAAW,uCAAuC,KAAK,EAC9D,OAAO,eAAe,6CAA6C,KAAK,EACxE,OAAO,oBAAoB,iDAAiD,CAAC,MAAc,SAAS,GAAG,EAAE,CAAC,EAC1G,OAAO,kBAAkB,qCAAqC,KAAK,EACnE,OAAO,gBAAgB,uDAAuD,EAC9E,OAAO,eAAe,8DAA8D,EACpF,OAAO,mBAAmB,oCAAoC,MAAM,EACpE,OAAO,mBAAmB,wCAAwC,EAClE,OAAO,mBAAmB,yDAAyD,EACnF,OAAO,iBAAiB,mBAAmB,KAAK,EAChD,OAAO,cAAc,wBAAwB,EAC7C,OAAO,OAAO,UAAkB,MAA+B,QAAiB;AAC7E,uBAAmB,IAAI;AAGvB,QAAI,aAAuD;AAAA,MACvD,QAAQ,KAAK;AAAA,MACb,OAAO,KAAK;AAAA,MACZ,aAAa,KAAK;AAAA,MAClB,SAAS,KAAK;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,OAAO,KAAK;AAAA,MACZ,OAAQ,KAAK,SAA2C;AAAA,MACxD,OAAO,QAAQ,KAAK,KAAK;AAAA,MACzB,UAAU,QAAQ,KAAK,QAAQ;AAAA,MAC/B,OAAO,KAAK;AAAA,MACZ,aAAa,QAAQ,KAAK,WAAW;AAAA,MACrC,WAAW,KAAK,YAAY;AAAA,MAC5B,QAAQ,KAAK,WAAW;AAAA,MACxB,OAAQ,KAAK,SAAuC;AAAA,MACpD,OAAO,KAAK;AAAA,MACZ,SAAS,QAAQ,KAAK,OAAO;AAAA,MAC7B,QAAQ,KAAK;AAAA,IACjB;AAGA,UAAM,aAAa,WAAW,UAAU,mBAAmB,QAAQ;AACnE,QAAI,YAAY;AACZ,UAAI;AACA,cAAM,SAAS,WAAW,UAAU;AAEpC,cAAM,iBAAiB,kBAAkB,KAAK,IAAI;AAClD,qBAAa,mBAAmB,QAAQ,YAAY,cAAc;AAClE,YAAI,WAAW,SAAS;AACpB,oBAAU,sBAAsB,UAAU,EAAE;AAAA,QAChD;AAAA,MACJ,SAAS,GAAG;AAER,YAAI,KAAK,QAAQ;AACb,kBAAQ,OAAO,MAAM,UAAU,gBAAgB,CAAC,CAAC;AAAA,CAAI;AACrD,kBAAQ,KAAK,WAAW,YAAY;AAAA,QACxC;AAEA,YAAI,WAAW,SAAS;AACpB,kBAAQ,OAAO,MAAM,kCAAkC,gBAAgB,CAAC,CAAC;AAAA,CAAI;AAAA,QACjF;AAAA,MACJ;AAAA,IACJ;AAEA,UAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAClC,UAAM,WAAW,MAAMA,iBAAgB,UAAU,UAAU;AAC3D,YAAQ,KAAK,QAAQ;AAAA,EACzB,CAAC;AAML,UACK,QAAQ,OAAO,EACf,YAAY,iDAAiD,EAC7D,SAAS,cAAc,mCAAmC,EAC1D,OAAO,uBAAuB,qBAAqB,CAAC,MAAc,SAAS,GAAG,EAAE,GAAG,GAAI,EACvF,OAAO,wBAAwB,gBAAgB,WAAW,EAC1D,OAAO,8BAA8B,8BAA8B,EACnE,OAAO,eAAe,gDAAgD,KAAK,EAC3E,OAAO,WAAW,uCAAuC,EACzD,OAAO,uBAAuB,2BAA2B,EACzD,OAAO,UAAU,8BAA8B,KAAK,EACpD,OAAO,mBAAmB,oCAAoC,MAAM,EACpE,OAAO,mBAAmB,wCAAwC,EAClE,OAAO,iBAAiB,mBAAmB,KAAK,EAChD,OAAO,cAAc,wBAAwB,EAC7C,OAAO,OAAO,SAAiB,SAAkC;AAC9D,uBAAmB,IAAI;AAEvB,UAAM,EAAE,cAAAC,cAAa,IAAI,MAAM;AAC/B,UAAM,WAAW,MAAMA,cAAa,SAAS;AAAA,MACzC,MAAM,KAAK;AAAA,MACX,MAAM,KAAK;AAAA,MACX,UAAU,KAAK;AAAA,MACf,OAAO,QAAQ,KAAK,KAAK;AAAA,MACzB,IAAI,QAAQ,KAAK,EAAE;AAAA,MACnB,OAAO,KAAK;AAAA,MACZ,MAAM,QAAQ,KAAK,IAAI;AAAA,MACvB,OAAO,KAAK;AAAA,MACZ,OAAO,KAAK;AAAA,MACZ,SAAS,QAAQ,KAAK,OAAO;AAAA,IACjC,CAAC;AACD,YAAQ,KAAK,QAAQ;AAAA,EACzB,CAAC;AAEL,SAAO;AACX;AASA,SAAS,mBAAmB,MAAqC;AAE7D,MAAI,KAAK,UAAU,OAAO;AACtB,oBAAgB,KAAK;AAAA,EACzB;AAGA,MAAI,QAAQ,IAAI,aAAa,QAAW;AACpC,oBAAgB,KAAK;AAAA,EACzB;AAGA,MAAI,KAAK,SAAS;AACd,iBAAa,SAAS;AAAA,EAC1B;AACJ;AAUA,SAAS,kBAAkB,KAAc,MAA4C;AACjF,QAAM,WAAW,oBAAI,IAAY;AAIjC,aAAW,UAAU,IAAI,SAAS;AAC9B,UAAM,MAAM,OAAO,cAAc;AACjC,UAAM,SAAS,IAAI,qBAAqB,GAAG;AAC3C,QAAI,WAAW,OAAO;AAElB,eAAS,IAAI,GAAG;AAAA,IACpB;AAAA,EACJ;AAEA,SAAO;AACX;AA5QA,IASA,kBASa;AAlBb;AAAA;AAAA;AASA,uBAAwB;AACxB;AACA;AACA;AAMO,IAAM,aAAa;AAAA,MACtB,SAAS;AAAA,MACT,iBAAiB;AAAA,MACjB,cAAc;AAAA,MACd,gBAAgB;AAAA,MAChB,WAAW;AAAA,IACf;AAAA;AAAA;;;ACVA;AACA;AAEA,eAAe,OAAsB;AACjC,MAAI;AACA,UAAM,UAAU,cAAc;AAC9B,UAAM,QAAQ,WAAW,QAAQ,IAAI;AAAA,EACzC,SAAS,OAAO;AACZ,QAAI,iBAAiB,OAAO;AACxB,iBAAW,MAAM,OAAO;AAAA,IAC5B,OAAO;AACH,iBAAW,OAAO,KAAK,CAAC;AAAA,IAC5B;AACA,YAAQ,KAAK,WAAW,eAAe;AAAA,EAC3C;AACJ;AAEA,KAAK;",
6
+ "names": ["resolve", "exports", "getLogger", "LogCategory", "exports", "exports", "exports", "exports", "exports", "resolve", "exports", "resolve", "exports", "exports", "exports", "fs", "path", "yaml", "exports", "fs", "path", "exports", "execAsync", "resolve", "exports", "http", "url", "resolve", "exports", "exports", "extractJSON", "exports", "resolve", "exports", "path", "fs", "isWindows", "exports", "exports", "exports", "exports", "exports", "exports", "exports", "exports", "exports", "exports", "exports", "isWindows", "exports", "process", "logger", "resolve", "exports", "exports", "path", "fs", "exports", "path", "fs", "exports", "exports", "getCopilotSDKService", "path", "fs", "logger", "resolve", "exports", "exports", "exports", "resolve", "exports", "createExecutor", "mapPhaseTimeMs", "totalTimeMs", "resolve", "exports", "exports", "exports", "exports", "exports", "exports", "exports", "path", "exports", "exports", "exports", "exports", "exports", "exports", "fs", "path", "exports", "createPromptMapJob", "createPromptMapInput", "exports", "exports", "exports", "exports", "fs", "path", "exports", "extractJSON", "exports", "exports", "path", "exports", "fs", "path", "exports", "fs", "path", "exports", "path", "yaml", "exports", "exports", "exports", "exports", "resolve", "exports", "exports", "fs", "import_pipeline_core", "parseTopicsArray", "fs", "path", "path", "fs", "init_seeds", "init_prompts", "import_pipeline_core", "init_response_parser", "readOnlyPermissions", "import_pipeline_core", "init_prompts", "init_response_parser", "path", "import_pipeline_core", "fs", "path", "path", "CACHE_DIR_NAME", "path", "path", "path", "fs", "path", "fs", "path", "module", "readOnlyPermissions", "DISCOVERY_TOOLS", "import_pipeline_core", "init_prompts", "init_response_parser", "parseStringArray", "readOnlyPermissions", "import_pipeline_core", "parseStringArray", "init_response_parser", "readOnlyPermissions", "import_pipeline_core", "duration", "init_response_parser", "init_prompts", "graph", "path", "fs", "import_pipeline_core", "path", "fs", "path", "import_pipeline_core", "result", "path", "fs", "init_prompts", "extractJSON", "init_response_parser", "module", "import_pipeline_core", "init_prompts", "init_response_parser", "init_response_parser", "init_prompts", "module", "init_prompts", "import_pipeline_core", "init_prompts", "AREAS_DIR", "fs", "path", "fs", "path", "fs", "path", "init_prompts", "path", "import_pipeline_core", "path", "formatDuration", "path", "fs", "fs", "path", "resolve", "readBody", "resolve", "fs", "path", "escapeHtml", "fs", "createServer", "resolve", "url", "executeGenerate", "createServer", "resolve", "getCopilotSDKService", "url", "path", "fs", "executeSeeds", "executeDiscover", "executeGenerate", "executeServe"]
7
+ }