bluera-knowledge 0.19.6 → 0.19.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/analysis/adapter-registry.ts","../src/logging/logger.ts","../src/services/project-root.service.ts","../src/logging/payload.ts","../src/services/job.service.ts","../src/types/job.ts","../src/types/result.ts","../src/utils/atomic-write.ts","../src/services/code-graph.service.ts","../src/analysis/ast-parser.ts","../src/analysis/code-graph.ts","../src/analysis/tree-sitter-parser.ts","../src/analysis/go-ast-parser.ts","../src/analysis/parser-factory.ts","../src/analysis/python-ast-parser.ts","../src/analysis/rust-ast-parser.ts","../src/services/config.service.ts","../src/types/config.ts","../src/utils/deep-merge.ts","../src/services/gitignore.service.ts","../src/services/index.service.ts","../src/services/chunking.service.ts","../src/services/drift.service.ts","../src/services/manifest.service.ts","../src/types/manifest.ts","../src/services/code-unit.service.ts","../src/services/search.service.ts","../src/services/store-definition.service.ts","../src/types/store-definition.ts","../src/services/store.service.ts","../src/plugin/git-clone.ts","../src/crawl/bridge.ts","../src/crawl/schemas.ts","../src/db/embeddings.ts","../src/db/lance.ts","../src/types/document.ts","../src/services/index.ts"],"sourcesContent":["/**\n * Adapter Registry\n *\n * Singleton registry for language adapters. Provides lookup by extension\n * or language ID.\n *\n * @example\n * ```typescript\n * // Register an adapter\n * const registry = AdapterRegistry.getInstance();\n * registry.register(zilAdapter);\n *\n * // Look up by extension\n * const adapter = registry.getByExtension('.zil');\n * if (adapter) {\n * const nodes = adapter.parse(content, filePath);\n * }\n * ```\n */\n\nimport type { LanguageAdapter } from './language-adapter.js';\n\nexport class AdapterRegistry {\n private static instance: AdapterRegistry | undefined;\n\n /** Map from languageId to adapter */\n private readonly adaptersByLanguageId = new Map<string, LanguageAdapter>();\n\n /** Map from extension to adapter */\n private readonly adaptersByExtension = new Map<string, LanguageAdapter>();\n\n private constructor() {\n // Private constructor for singleton\n }\n\n /**\n * Get the singleton instance of the registry.\n */\n static getInstance(): AdapterRegistry {\n AdapterRegistry.instance ??= new AdapterRegistry();\n return AdapterRegistry.instance;\n }\n\n /**\n * Reset the singleton instance (for testing).\n */\n static resetInstance(): void {\n AdapterRegistry.instance = undefined;\n }\n\n /**\n * Register a language adapter.\n *\n * @param adapter - The adapter to register\n * @throws If a different adapter with the same extension is already registered\n */\n register(adapter: LanguageAdapter): void {\n // Skip if already registered with same languageId (idempotent)\n if (this.adaptersByLanguageId.has(adapter.languageId)) {\n return;\n }\n\n // Check for extension conflicts with other adapters\n for (const ext of adapter.extensions) {\n const normalizedExt = this.normalizeExtension(ext);\n const existingAdapter = this.adaptersByExtension.get(normalizedExt);\n if (existingAdapter !== undefined) {\n throw new Error(\n `Extension \"${normalizedExt}\" is already registered by adapter \"${existingAdapter.languageId}\"`\n );\n }\n }\n\n // Register by languageId\n this.adaptersByLanguageId.set(adapter.languageId, adapter);\n\n // Register by each extension\n for (const ext of adapter.extensions) {\n const normalizedExt = this.normalizeExtension(ext);\n this.adaptersByExtension.set(normalizedExt, adapter);\n }\n }\n\n /**\n * Unregister a language adapter by its language ID.\n *\n * @param languageId - The language ID to unregister\n * @returns true if the adapter was found and removed, false otherwise\n */\n unregister(languageId: string): boolean {\n const adapter = this.adaptersByLanguageId.get(languageId);\n if (adapter === undefined) {\n return false;\n }\n\n // Remove from languageId map\n this.adaptersByLanguageId.delete(languageId);\n\n // Remove from extension map\n for (const ext of adapter.extensions) {\n const normalizedExt = this.normalizeExtension(ext);\n this.adaptersByExtension.delete(normalizedExt);\n }\n\n return true;\n }\n\n /**\n * Get an adapter by file extension.\n *\n * @param ext - File extension (with or without leading dot)\n * @returns The adapter if found, undefined otherwise\n */\n getByExtension(ext: string): LanguageAdapter | undefined {\n const normalizedExt = this.normalizeExtension(ext);\n return this.adaptersByExtension.get(normalizedExt);\n }\n\n /**\n * Get an adapter by language ID.\n *\n * @param languageId - The unique language identifier\n * @returns The adapter if found, undefined otherwise\n */\n getByLanguageId(languageId: string): LanguageAdapter | undefined {\n return this.adaptersByLanguageId.get(languageId);\n }\n\n /**\n * Get all registered adapters.\n *\n * @returns Array of all registered adapters\n */\n getAllAdapters(): LanguageAdapter[] {\n return Array.from(this.adaptersByLanguageId.values());\n }\n\n /**\n * Check if an extension is registered.\n *\n * @param ext - File extension (with or without leading dot)\n * @returns true if the extension is registered\n */\n hasExtension(ext: string): boolean {\n const normalizedExt = this.normalizeExtension(ext);\n return this.adaptersByExtension.has(normalizedExt);\n }\n\n /**\n * Normalize extension to always have a leading dot.\n */\n private normalizeExtension(ext: string): string {\n return ext.startsWith('.') ? ext : `.${ext}`;\n }\n}\n","/**\n * Core logger factory using pino with file-based rolling logs\n *\n * Features:\n * - File-only output (no console pollution for Claude Code)\n * - Size-based rotation (10MB, keeps 5 files)\n * - LOG_LEVEL env var control (trace/debug/info/warn/error/fatal)\n * - Child loggers per module for context\n */\n\nimport { mkdirSync, existsSync } from 'node:fs';\nimport { join } from 'node:path';\nimport pino, { type Logger, type LoggerOptions } from 'pino';\nimport { ProjectRootService } from '../services/project-root.service.js';\n\n/** Valid log levels */\nexport type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'fatal';\n\nconst VALID_LEVELS: readonly LogLevel[] = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'];\nconst VALID_LEVELS_SET: ReadonlySet<string> = new Set(VALID_LEVELS);\n\n/** Default log directory under project root (per-repo) */\nfunction getLogDir(): string {\n const projectRoot = ProjectRootService.resolve();\n return join(projectRoot, '.bluera', 'bluera-knowledge', 'logs');\n}\n\n/** Resolve and create log directory - fails fast if cannot create */\nfunction ensureLogDir(): string {\n const logDir = getLogDir();\n if (!existsSync(logDir)) {\n mkdirSync(logDir, { recursive: true });\n }\n return logDir;\n}\n\n/** Check if a string is a valid log level */\nfunction isValidLogLevel(level: string): level is LogLevel {\n return VALID_LEVELS_SET.has(level);\n}\n\n/** Get log level from environment - fails fast on invalid value */\nfunction getLogLevel(): LogLevel {\n const level = process.env['LOG_LEVEL']?.toLowerCase();\n\n if (level === undefined || level === '') {\n return 'info';\n }\n\n if (!isValidLogLevel(level)) {\n throw new Error(`Invalid LOG_LEVEL: \"${level}\". Valid values: ${VALID_LEVELS.join(', ')}`);\n }\n\n return level;\n}\n\n/** Root logger instance - lazily initialized */\nlet rootLogger: Logger | null = null;\n\n/** Initialize the root logger with pino-roll transport */\nfunction initializeLogger(): Logger {\n if (rootLogger !== null) {\n return rootLogger;\n }\n\n const logDir = ensureLogDir();\n const logFile = join(logDir, 'app.log');\n const level = getLogLevel();\n\n const options: LoggerOptions = {\n level,\n timestamp: pino.stdTimeFunctions.isoTime,\n formatters: {\n level: (label) => ({ level: label }),\n },\n transport: {\n target: 'pino-roll',\n options: {\n file: logFile,\n size: '10m', // 10MB rotation\n limit: { count: 5 }, // Keep 5 rotated files\n mkdir: true,\n },\n },\n };\n\n rootLogger = pino(options);\n return rootLogger;\n}\n\n/**\n * Create a named child logger for a specific module\n *\n * @param module - Module name (e.g., 'crawler', 'mcp-server', 'search-service')\n * @returns Logger instance with module context\n *\n * @example\n * const logger = createLogger('crawler');\n * logger.info({ url }, 'Fetching page');\n */\nexport function createLogger(module: string): Logger {\n const root = initializeLogger();\n return root.child({ module });\n}\n\n/**\n * Get the current log level\n */\nexport function getCurrentLogLevel(): LogLevel {\n return getLogLevel();\n}\n\n/**\n * Check if a specific log level is enabled\n */\nexport function isLevelEnabled(level: LogLevel): boolean {\n const currentLevel = getLogLevel();\n const currentIndex = VALID_LEVELS.indexOf(currentLevel);\n const checkIndex = VALID_LEVELS.indexOf(level);\n return checkIndex >= currentIndex;\n}\n\n/**\n * Get the log directory path\n */\nexport function getLogDirectory(): string {\n return getLogDir();\n}\n\n/**\n * Flush and shutdown the logger - call before process exit\n */\nexport function shutdownLogger(): Promise<void> {\n return new Promise((resolve) => {\n if (rootLogger !== null) {\n rootLogger.flush();\n // Give time for async transport to flush\n setTimeout(() => {\n rootLogger = null;\n resolve();\n }, 100);\n } else {\n resolve();\n }\n });\n}\n","import { existsSync, statSync, realpathSync } from 'node:fs';\nimport { dirname, join, normalize, sep } from 'node:path';\n\nexport interface ProjectRootOptions {\n readonly projectRoot?: string | undefined;\n}\n\n/**\n * Service for resolving the project root directory using a hierarchical detection strategy.\n *\n * Resolution hierarchy:\n * 1. Explicit projectRoot option (highest priority)\n * 2. PROJECT_ROOT environment variable (set by plugin commands)\n * 3. Git root detection (walk up to find .git directory)\n * 4. PWD environment variable (set by MCP server and shells)\n * 5. process.cwd() (fallback)\n */\n// eslint-disable-next-line @typescript-eslint/no-extraneous-class\nexport class ProjectRootService {\n /**\n * Resolve project root directory using hierarchical detection.\n */\n static resolve(options?: ProjectRootOptions): string {\n // 1. Check explicit option first\n if (options?.projectRoot !== undefined && options.projectRoot !== '') {\n return this.normalize(options.projectRoot);\n }\n\n // 2. Check PROJECT_ROOT environment variable (plugin commands)\n const projectRootEnv = process.env['PROJECT_ROOT'];\n if (projectRootEnv !== undefined && projectRootEnv !== '') {\n return this.normalize(projectRootEnv);\n }\n\n // 3. Try git root detection (most reliable for repos)\n const gitRoot = this.findGitRoot(process.cwd());\n if (gitRoot !== null) {\n return gitRoot;\n }\n\n // 4. Check PWD environment variable (MCP server, shells) - only if not in a git repo\n const pwdEnv = process.env['PWD'];\n if (pwdEnv !== undefined && pwdEnv !== '') {\n return this.normalize(pwdEnv);\n }\n\n // 5. Fallback to process.cwd()\n return process.cwd();\n }\n\n /**\n * Find git repository root by walking up the directory tree looking for .git\n */\n static findGitRoot(startPath: string): string | null {\n let currentPath = normalize(startPath);\n const root = normalize(sep); // Root filesystem (/ on Unix, C:\\ on Windows)\n\n // Walk up directory tree\n while (currentPath !== root) {\n const gitPath = join(currentPath, '.git');\n\n if (existsSync(gitPath)) {\n try {\n const stats = statSync(gitPath);\n // .git can be a directory (normal repo) or file (submodule/worktree)\n if (stats.isDirectory() || stats.isFile()) {\n return currentPath;\n }\n } catch {\n // Ignore stat errors, continue searching\n }\n }\n\n // Move up one directory\n const parentPath = dirname(currentPath);\n if (parentPath === currentPath) {\n // Reached root without finding .git\n break;\n }\n currentPath = parentPath;\n }\n\n return null;\n }\n\n /**\n * Normalize path by resolving symlinks and normalizing separators\n */\n static normalize(path: string): string {\n try {\n // Resolve symlinks to real path\n const realPath = realpathSync(path);\n // Normalize separators\n return normalize(realPath);\n } catch {\n // If realpath fails (path doesn't exist), just normalize\n return normalize(path);\n }\n }\n\n /**\n * Validate that a path exists and is a directory\n */\n static validate(path: string): boolean {\n try {\n const stats = statSync(path);\n return stats.isDirectory();\n } catch {\n return false;\n }\n }\n}\n","/**\n * Large payload handling utilities for logging\n *\n * Handles large content (raw HTML, MCP responses) by:\n * - Truncating to preview in log entries\n * - Optionally dumping full content to separate files at trace level\n */\n\nimport { createHash } from 'node:crypto';\nimport { writeFileSync, mkdirSync, existsSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getLogDirectory, isLevelEnabled } from './logger.js';\n\n/** Maximum characters for log preview */\nconst MAX_PREVIEW_LENGTH = 500;\n\n/** Minimum size to trigger payload dump (10KB) */\nconst PAYLOAD_DUMP_THRESHOLD = 10_000;\n\n/** Summary of a large payload for logging */\nexport interface PayloadSummary {\n /** Truncated preview of content */\n preview: string;\n /** Size in bytes */\n sizeBytes: number;\n /** Short hash for identification */\n hash: string;\n /** Filename if full content was dumped (trace level only) */\n payloadFile?: string;\n}\n\n/** Get the payload dump directory */\nfunction getPayloadDir(): string {\n const dir = join(getLogDirectory(), 'payload');\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n return dir;\n}\n\n/** Generate a safe filename from an identifier */\nfunction safeFilename(identifier: string): string {\n return identifier.replace(/[^a-zA-Z0-9-]/g, '_').substring(0, 50);\n}\n\n/**\n * Summarize a large payload for logging\n *\n * Creates a summary with:\n * - Truncated preview (first 500 chars)\n * - Size in bytes\n * - Short MD5 hash for identification\n * - Optional full dump to file at trace level\n *\n * @param content - The full content to summarize\n * @param type - Type identifier (e.g., 'raw-html', 'mcp-response')\n * @param identifier - Unique identifier (e.g., URL, query)\n * @param dumpFull - Whether to dump full content to file (default: trace level check)\n * @returns PayloadSummary for inclusion in log entry\n *\n * @example\n * logger.info({\n * url,\n * ...summarizePayload(html, 'raw-html', url),\n * }, 'Fetched HTML');\n */\nexport function summarizePayload(\n content: string,\n type: string,\n identifier: string,\n dumpFull: boolean = isLevelEnabled('trace')\n): PayloadSummary {\n const sizeBytes = Buffer.byteLength(content, 'utf8');\n const hash = createHash('md5').update(content).digest('hex').substring(0, 12);\n const preview = truncateForLog(content, MAX_PREVIEW_LENGTH);\n\n const baseSummary = { preview, sizeBytes, hash };\n\n // Dump full payload to file if enabled and above threshold\n if (dumpFull && sizeBytes > PAYLOAD_DUMP_THRESHOLD) {\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const safeId = safeFilename(identifier);\n const filename = `${timestamp}-${type}-${safeId}-${hash}.json`;\n const filepath = join(getPayloadDir(), filename);\n\n writeFileSync(\n filepath,\n JSON.stringify(\n {\n timestamp: new Date().toISOString(),\n type,\n identifier,\n sizeBytes,\n content,\n },\n null,\n 2\n )\n );\n\n return { ...baseSummary, payloadFile: filename };\n }\n\n return baseSummary;\n}\n\n/**\n * Truncate content for logging with ellipsis indicator\n *\n * @param content - Content to truncate\n * @param maxLength - Maximum length (default: 500)\n * @returns Truncated string with '... [truncated]' if needed\n */\nexport function truncateForLog(content: string, maxLength: number = MAX_PREVIEW_LENGTH): string {\n if (content.length <= maxLength) {\n return content;\n }\n return `${content.substring(0, maxLength)}... [truncated]`;\n}\n","import { randomUUID } from 'crypto';\nimport fs from 'fs';\nimport path from 'path';\nimport { JobSchema } from '../types/job.js';\nimport { Result, ok, err } from '../types/result.js';\nimport { atomicWriteFileSync } from '../utils/atomic-write.js';\nimport type { Job, CreateJobParams, UpdateJobParams, JobStatus } from '../types/job.js';\n\nexport class JobService {\n private readonly jobsDir: string;\n\n constructor(dataDir?: string) {\n // Default to ~/.local/share/bluera-knowledge/jobs\n let baseDir: string;\n if (dataDir !== undefined) {\n baseDir = dataDir;\n } else {\n const homeDir = process.env['HOME'] ?? process.env['USERPROFILE'];\n if (homeDir === undefined) {\n throw new Error('HOME or USERPROFILE environment variable is required');\n }\n baseDir = path.join(homeDir, '.local/share/bluera-knowledge');\n }\n this.jobsDir = path.join(baseDir, 'jobs');\n\n // Ensure jobs directory exists\n if (!fs.existsSync(this.jobsDir)) {\n fs.mkdirSync(this.jobsDir, { recursive: true });\n }\n }\n\n /**\n * Create a new job\n */\n createJob(params: CreateJobParams): Job {\n const job: Job = {\n id: `job_${randomUUID().replace(/-/g, '').substring(0, 12)}`,\n type: params.type,\n status: 'pending',\n progress: 0,\n message: params.message ?? `${params.type} job created`,\n details: params.details,\n createdAt: new Date().toISOString(),\n updatedAt: new Date().toISOString(),\n };\n\n // Write job to file\n this.writeJob(job);\n\n return job;\n }\n\n /**\n * Update an existing job\n */\n updateJob(jobId: string, updates: UpdateJobParams): void {\n const job = this.getJob(jobId);\n\n if (!job) {\n throw new Error(`Job ${jobId} not found`);\n }\n\n // Merge updates\n if (updates.status !== undefined) {\n job.status = updates.status;\n }\n if (updates.progress !== undefined) {\n job.progress = updates.progress;\n }\n if (updates.message !== undefined) {\n job.message = updates.message;\n }\n if (updates.details !== undefined) {\n job.details = { ...job.details, ...updates.details };\n }\n\n job.updatedAt = new Date().toISOString();\n\n // Write updated job\n this.writeJob(job);\n }\n\n /**\n * Get a job by ID\n */\n getJob(jobId: string): Job | null {\n const jobFile = path.join(this.jobsDir, `${jobId}.json`);\n\n if (!fs.existsSync(jobFile)) {\n return null;\n }\n\n try {\n const content = fs.readFileSync(jobFile, 'utf-8');\n return JobSchema.parse(JSON.parse(content));\n } catch (error) {\n throw new Error(\n `Failed to read job ${jobId}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * List all jobs with optional status filter\n */\n listJobs(statusFilter?: JobStatus | JobStatus[]): Job[] {\n if (!fs.existsSync(this.jobsDir)) {\n return [];\n }\n\n const files = fs.readdirSync(this.jobsDir);\n const jobs: Job[] = [];\n\n for (const file of files) {\n if (!file.endsWith('.json') || file.endsWith('.pid')) {\n continue;\n }\n\n try {\n const content = fs.readFileSync(path.join(this.jobsDir, file), 'utf-8');\n const job = JobSchema.parse(JSON.parse(content));\n\n if (statusFilter !== undefined) {\n const filters = Array.isArray(statusFilter) ? statusFilter : [statusFilter];\n if (filters.includes(job.status)) {\n jobs.push(job);\n }\n } else {\n jobs.push(job);\n }\n } catch (error) {\n throw new Error(\n `Failed to read job file ${file}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n // Sort by updated time (most recent first)\n jobs.sort((a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime());\n\n return jobs;\n }\n\n /**\n * List active jobs (pending or running)\n */\n listActiveJobs(): Job[] {\n return this.listJobs(['pending', 'running']);\n }\n\n /**\n * Cancel a job\n */\n cancelJob(jobId: string): Result<void> {\n const job = this.getJob(jobId);\n\n if (!job) {\n return err(new Error(`Job ${jobId} not found`));\n }\n\n if (job.status === 'completed' || job.status === 'failed') {\n return err(new Error(`Cannot cancel ${job.status} job`));\n }\n\n if (job.status === 'cancelled') {\n return ok(undefined);\n }\n\n // Update job status\n this.updateJob(jobId, {\n status: 'cancelled',\n message: 'Job cancelled by user',\n details: { cancelledAt: new Date().toISOString() },\n });\n\n // Kill worker process if it exists\n const pidFile = path.join(this.jobsDir, `${jobId}.pid`);\n if (fs.existsSync(pidFile)) {\n try {\n const pid = parseInt(fs.readFileSync(pidFile, 'utf-8'), 10);\n // Validate PID: must be positive integer > 0\n // PID 0 = sends to process group (DANGEROUS - kills terminal!)\n // Negative PIDs have special meanings in kill()\n if (!Number.isNaN(pid) && Number.isInteger(pid) && pid > 0) {\n process.kill(pid, 'SIGTERM');\n }\n } catch {\n // Process may have already exited, ignore\n }\n // Always delete the PID file, even if kill failed\n try {\n fs.unlinkSync(pidFile);\n } catch {\n // Ignore if file already deleted\n }\n }\n\n return ok(undefined);\n }\n\n /**\n * Clean up old completed/failed/cancelled jobs\n */\n cleanupOldJobs(olderThanHours: number = 24): number {\n const jobs = this.listJobs();\n const cutoffTime = Date.now() - olderThanHours * 60 * 60 * 1000;\n let cleaned = 0;\n\n for (const job of jobs) {\n if (\n (job.status === 'completed' || job.status === 'failed' || job.status === 'cancelled') &&\n new Date(job.updatedAt).getTime() < cutoffTime\n ) {\n const jobFile = path.join(this.jobsDir, `${job.id}.json`);\n try {\n fs.unlinkSync(jobFile);\n cleaned++;\n } catch (error) {\n throw new Error(\n `Failed to delete job file ${job.id}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n }\n\n return cleaned;\n }\n\n /**\n * Clean up stale pending jobs that never started or got stuck\n *\n * @param olderThanHours - Consider pending jobs stale after this many hours (default 2)\n * @param options - Options for cleanup behavior\n * @param options.markAsFailed - If true, mark jobs as failed instead of deleting\n * @returns Number of jobs cleaned up or marked as failed\n */\n cleanupStalePendingJobs(\n olderThanHours: number = 2,\n options: { markAsFailed?: boolean } = {}\n ): number {\n const jobs = this.listJobs();\n const cutoffTime = Date.now() - olderThanHours * 60 * 60 * 1000;\n let cleaned = 0;\n\n for (const job of jobs) {\n if (job.status === 'pending' && new Date(job.updatedAt).getTime() < cutoffTime) {\n const jobFile = path.join(this.jobsDir, `${job.id}.json`);\n\n if (options.markAsFailed === true) {\n // Mark as failed instead of deleting\n this.updateJob(job.id, {\n status: 'failed',\n message: `Job marked as stale - pending for over ${String(olderThanHours)} hours without progress`,\n });\n } else {\n // Delete the job file\n try {\n fs.unlinkSync(jobFile);\n } catch (error) {\n throw new Error(\n `Failed to delete stale job ${job.id}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n cleaned++;\n }\n }\n\n return cleaned;\n }\n\n /**\n * Delete a specific job\n */\n deleteJob(jobId: string): boolean {\n const jobFile = path.join(this.jobsDir, `${jobId}.json`);\n\n if (!fs.existsSync(jobFile)) {\n return false;\n }\n\n try {\n fs.unlinkSync(jobFile);\n return true;\n } catch (error) {\n throw new Error(\n `Failed to delete job ${jobId}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * Write job to file\n */\n private writeJob(job: Job): void {\n const jobFile = path.join(this.jobsDir, `${job.id}.json`);\n atomicWriteFileSync(jobFile, JSON.stringify(job, null, 2));\n }\n}\n","import { z } from 'zod';\n\n// ============================================================================\n// Zod Schemas\n// ============================================================================\n\nexport const JobTypeSchema = z.enum(['clone', 'index', 'crawl']);\nexport const JobStatusSchema = z.enum(['pending', 'running', 'completed', 'failed', 'cancelled']);\n\nexport const JobDetailsSchema = z.object({\n storeName: z.string().optional(),\n storeId: z.string().optional(),\n url: z.string().optional(),\n path: z.string().optional(),\n filesProcessed: z.number().optional(),\n totalFiles: z.number().optional(),\n startedAt: z.string().optional(),\n completedAt: z.string().optional(),\n cancelledAt: z.string().optional(),\n error: z.string().optional(),\n // Crawl-specific fields\n crawlInstruction: z.string().optional(),\n extractInstruction: z.string().optional(),\n maxPages: z.number().optional(),\n simple: z.boolean().optional(),\n useHeadless: z.boolean().optional(),\n pagesCrawled: z.number().optional(),\n});\n\nexport const JobSchema = z.object({\n id: z.string(),\n type: JobTypeSchema,\n status: JobStatusSchema,\n progress: z.number().min(0).max(100),\n message: z.string(),\n details: JobDetailsSchema.default({}),\n createdAt: z.string(),\n updatedAt: z.string(),\n});\n\n// ============================================================================\n// Types (inferred from schemas)\n// ============================================================================\n\nexport type JobType = z.infer<typeof JobTypeSchema>;\nexport type JobStatus = z.infer<typeof JobStatusSchema>;\nexport type JobDetails = z.infer<typeof JobDetailsSchema>;\nexport type Job = z.infer<typeof JobSchema>;\n\nexport interface CreateJobParams {\n type: JobType;\n details: JobDetails;\n message?: string;\n}\n\nexport interface UpdateJobParams {\n status?: JobStatus;\n progress?: number;\n message?: string;\n details?: Partial<JobDetails>;\n}\n","export type Result<T, E = Error> =\n | { readonly success: true; readonly data: T }\n | { readonly success: false; readonly error: E };\n\nexport function ok<T>(data: T): Result<T, never> {\n return { success: true, data };\n}\n\nexport function err<E>(error: E): Result<never, E> {\n return { success: false, error };\n}\n\nexport function isOk<T, E>(result: Result<T, E>): result is { success: true; data: T } {\n return result.success;\n}\n\nexport function isErr<T, E>(result: Result<T, E>): result is { success: false; error: E } {\n return !result.success;\n}\n\nexport function unwrap<T, E>(result: Result<T, E>): T {\n if (isOk(result)) {\n return result.data;\n }\n // Type guard ensures result has 'error' property\n if (isErr(result)) {\n if (result.error instanceof Error) {\n throw result.error;\n }\n throw new Error(String(result.error));\n }\n // This should never happen due to discriminated union\n throw new Error('Invalid result type');\n}\n\nexport function unwrapOr<T, E>(result: Result<T, E>, defaultValue: T): T {\n if (isOk(result)) {\n return result.data;\n }\n return defaultValue;\n}\n","import { writeFileSync, renameSync, mkdirSync } from 'node:fs';\nimport { writeFile, rename, mkdir } from 'node:fs/promises';\nimport { dirname } from 'node:path';\n\n/**\n * Atomically write content to a file.\n *\n * Writes to a temporary file first, then renames it to the target path.\n * The rename operation is atomic on POSIX systems, ensuring that the file\n * is never in a partially-written state even if the process crashes.\n *\n * @param filePath - The target file path\n * @param content - The content to write\n */\nexport async function atomicWriteFile(filePath: string, content: string): Promise<void> {\n // Ensure parent directory exists\n await mkdir(dirname(filePath), { recursive: true });\n\n const tempPath = `${filePath}.tmp.${String(Date.now())}.${String(process.pid)}`;\n await writeFile(tempPath, content, 'utf-8');\n await rename(tempPath, filePath);\n}\n\n/**\n * Synchronously and atomically write content to a file.\n *\n * Writes to a temporary file first, then renames it to the target path.\n * The rename operation is atomic on POSIX systems, ensuring that the file\n * is never in a partially-written state even if the process crashes.\n *\n * @param filePath - The target file path\n * @param content - The content to write\n */\nexport function atomicWriteFileSync(filePath: string, content: string): void {\n // Ensure parent directory exists\n mkdirSync(dirname(filePath), { recursive: true });\n\n const tempPath = `${filePath}.tmp.${String(Date.now())}.${String(process.pid)}`;\n writeFileSync(tempPath, content, 'utf-8');\n renameSync(tempPath, filePath);\n}\n","import { readFile, writeFile, mkdir, rm } from 'node:fs/promises';\nimport { join, dirname } from 'node:path';\nimport { ASTParser } from '../analysis/ast-parser.js';\nimport { CodeGraph, type GraphNode } from '../analysis/code-graph.js';\nimport { GoASTParser } from '../analysis/go-ast-parser.js';\nimport { ParserFactory } from '../analysis/parser-factory.js';\nimport { RustASTParser } from '../analysis/rust-ast-parser.js';\nimport type { PythonBridge } from '../crawl/bridge.js';\nimport type { StoreId } from '../types/brands.js';\nimport type { CacheInvalidationEvent, CacheInvalidationListener } from '../types/cache-events.js';\n\ninterface SerializedGraph {\n nodes: Array<{\n id: string;\n file: string;\n type: string;\n name: string;\n exported: boolean;\n startLine: number;\n endLine: number;\n signature?: string;\n }>;\n edges: Array<{\n from: string;\n to: string;\n type: string;\n confidence: number;\n }>;\n}\n\n/**\n * Service for building, persisting, and querying code graphs.\n * Code graphs track relationships between code elements (functions, classes, etc.)\n * for enhanced search context.\n */\nexport class CodeGraphService {\n private readonly dataDir: string;\n private readonly parser: ASTParser;\n private readonly parserFactory: ParserFactory;\n private readonly graphCache: Map<string, CodeGraph>;\n private readonly cacheListeners: Set<CacheInvalidationListener>;\n\n constructor(dataDir: string, pythonBridge?: PythonBridge) {\n this.dataDir = dataDir;\n this.parser = new ASTParser();\n this.parserFactory = new ParserFactory(pythonBridge);\n this.graphCache = new Map();\n this.cacheListeners = new Set();\n }\n\n /**\n * Subscribe to cache invalidation events.\n * Returns an unsubscribe function.\n */\n onCacheInvalidation(listener: CacheInvalidationListener): () => void {\n this.cacheListeners.add(listener);\n return () => {\n this.cacheListeners.delete(listener);\n };\n }\n\n /**\n * Emit a cache invalidation event to all listeners.\n */\n private emitCacheInvalidation(event: CacheInvalidationEvent): void {\n for (const listener of this.cacheListeners) {\n listener(event);\n }\n }\n\n /**\n * Build a code graph from source files.\n */\n async buildGraph(files: Array<{ path: string; content: string }>): Promise<CodeGraph> {\n const graph = new CodeGraph();\n\n for (const file of files) {\n const ext = file.path.split('.').pop() ?? '';\n if (!['ts', 'tsx', 'js', 'jsx', 'py', 'rs', 'go'].includes(ext)) continue;\n\n // Parse nodes (functions, classes, etc.) using the factory\n const nodes = await this.parserFactory.parseFile(file.path, file.content);\n graph.addNodes(nodes, file.path);\n\n // Parse imports and add edges\n if (ext === 'rs') {\n // Use RustASTParser for Rust imports\n const rustParser = new RustASTParser();\n const imports = rustParser.extractImports(file.content);\n for (const imp of imports) {\n if (!imp.isType) {\n graph.addImport(file.path, imp.source, imp.specifiers);\n }\n }\n } else if (ext === 'go') {\n // Use GoASTParser for Go imports\n const goParser = new GoASTParser();\n const imports = goParser.extractImports(file.content);\n for (const imp of imports) {\n if (!imp.isType) {\n graph.addImport(file.path, imp.source, imp.specifiers);\n }\n }\n } else if (ext !== 'py') {\n // Use ASTParser for JS/TS imports (Python imports handled by Python parser)\n const imports = this.parser.extractImports(file.content);\n for (const imp of imports) {\n if (!imp.isType) {\n graph.addImport(file.path, imp.source, imp.specifiers);\n }\n }\n }\n\n // Analyze call relationships for each function/method\n for (const node of nodes) {\n const lines = file.content.split('\\n');\n\n if (node.type === 'function') {\n // Extract the function body for call analysis\n const functionCode = lines.slice(node.startLine - 1, node.endLine).join('\\n');\n graph.analyzeCallRelationships(functionCode, file.path, node.name);\n } else if (node.type === 'class' && node.methods !== undefined) {\n // For classes, analyze each method separately\n for (const method of node.methods) {\n const methodCode = lines.slice(method.startLine - 1, method.endLine).join('\\n');\n graph.analyzeCallRelationships(methodCode, file.path, `${node.name}.${method.name}`);\n }\n }\n }\n }\n\n return graph;\n }\n\n /**\n * Save a code graph for a store.\n */\n async saveGraph(storeId: StoreId, graph: CodeGraph): Promise<void> {\n const graphPath = this.getGraphPath(storeId);\n await mkdir(dirname(graphPath), { recursive: true });\n\n const serialized = graph.toJSON();\n await writeFile(graphPath, JSON.stringify(serialized, null, 2));\n\n // Notify listeners that the graph has been updated\n this.emitCacheInvalidation({ type: 'graph-updated', storeId });\n }\n\n /**\n * Delete the code graph file for a store.\n * Silently succeeds if the file doesn't exist.\n */\n async deleteGraph(storeId: StoreId): Promise<void> {\n const graphPath = this.getGraphPath(storeId);\n await rm(graphPath, { force: true });\n this.graphCache.delete(storeId);\n\n // Notify listeners that the graph has been deleted\n this.emitCacheInvalidation({ type: 'graph-deleted', storeId });\n }\n\n /**\n * Load a code graph for a store.\n * Returns undefined if no graph exists.\n */\n async loadGraph(storeId: StoreId): Promise<CodeGraph | undefined> {\n // Check cache first\n const cached = this.graphCache.get(storeId);\n if (cached) return cached;\n\n const graphPath = this.getGraphPath(storeId);\n\n try {\n const content = await readFile(graphPath, 'utf-8');\n const parsed: unknown = JSON.parse(content);\n\n // Validate structure\n if (!this.isSerializedGraph(parsed)) {\n return undefined;\n }\n\n const serialized = parsed;\n const graph = new CodeGraph();\n\n // Restore nodes\n for (const node of serialized.nodes) {\n const nodeType = this.validateNodeType(node.type);\n if (!nodeType) continue;\n\n // Method nodes are added directly to the graph since they're already separate nodes\n if (nodeType === 'method') {\n const graphNode: GraphNode = {\n id: node.id,\n file: node.file,\n type: 'method',\n name: node.name,\n exported: node.exported,\n startLine: node.startLine,\n endLine: node.endLine,\n };\n if (node.signature !== undefined) {\n graphNode.signature = node.signature;\n }\n graph.addGraphNode(graphNode);\n continue;\n }\n\n const codeNode: {\n type: 'function' | 'class' | 'interface' | 'type' | 'const';\n name: string;\n exported: boolean;\n startLine: number;\n endLine: number;\n signature?: string;\n } = {\n type: nodeType,\n name: node.name,\n exported: node.exported,\n startLine: node.startLine,\n endLine: node.endLine,\n };\n if (node.signature !== undefined) {\n codeNode.signature = node.signature;\n }\n graph.addNodes([codeNode], node.file);\n }\n\n // Restore edges\n for (const edge of serialized.edges) {\n const edgeType = this.validateEdgeType(edge.type);\n if (!edgeType) continue;\n\n graph.addEdge({\n from: edge.from,\n to: edge.to,\n type: edgeType,\n confidence: edge.confidence,\n });\n }\n\n this.graphCache.set(storeId, graph);\n return graph;\n } catch {\n return undefined;\n }\n }\n\n /**\n * Get usage stats for a code element.\n */\n getUsageStats(\n graph: CodeGraph,\n filePath: string,\n symbolName: string\n ): { calledBy: number; calls: number } {\n const nodeId = `${filePath}:${symbolName}`;\n return {\n calledBy: graph.getCalledByCount(nodeId),\n calls: graph.getCallsCount(nodeId),\n };\n }\n\n /**\n * Get related code (callers and callees) for a code element.\n */\n getRelatedCode(\n graph: CodeGraph,\n filePath: string,\n symbolName: string\n ): Array<{ id: string; relationship: string }> {\n const nodeId = `${filePath}:${symbolName}`;\n const related: Array<{ id: string; relationship: string }> = [];\n\n // Get callers (incoming call edges)\n const incoming = graph.getIncomingEdges(nodeId);\n for (const edge of incoming) {\n if (edge.type === 'calls') {\n related.push({ id: edge.from, relationship: 'calls this' });\n }\n }\n\n // Get callees (outgoing call edges)\n const outgoing = graph.getEdges(nodeId);\n for (const edge of outgoing) {\n if (edge.type === 'calls') {\n related.push({ id: edge.to, relationship: 'called by this' });\n }\n }\n\n return related;\n }\n\n /**\n * Clear cached graphs.\n */\n clearCache(): void {\n this.graphCache.clear();\n }\n\n private getGraphPath(storeId: StoreId): string {\n return join(this.dataDir, 'graphs', `${storeId}.json`);\n }\n\n /**\n * Type guard for SerializedGraph structure.\n */\n private isSerializedGraph(value: unknown): value is SerializedGraph {\n if (typeof value !== 'object' || value === null) return false;\n // Use 'in' operator for property checking\n if (!('nodes' in value) || !('edges' in value)) return false;\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- narrowed via 'in' checks above\n const obj = value as { nodes: unknown; edges: unknown };\n return Array.isArray(obj.nodes) && Array.isArray(obj.edges);\n }\n\n /**\n * Type guard for valid node types.\n */\n private isValidNodeType(\n type: string\n ): type is 'function' | 'class' | 'interface' | 'type' | 'const' | 'method' {\n return ['function', 'class', 'interface', 'type', 'const', 'method'].includes(type);\n }\n\n /**\n * Validate and return a node type, or undefined if invalid.\n */\n private validateNodeType(\n type: string\n ): 'function' | 'class' | 'interface' | 'type' | 'const' | 'method' | undefined {\n if (this.isValidNodeType(type)) {\n return type;\n }\n return undefined;\n }\n\n /**\n * Type guard for valid edge types.\n */\n private isValidEdgeType(type: string): type is 'calls' | 'imports' | 'extends' | 'implements' {\n return ['calls', 'imports', 'extends', 'implements'].includes(type);\n }\n\n /**\n * Validate and return an edge type, or undefined if invalid.\n */\n private validateEdgeType(\n type: string\n ): 'calls' | 'imports' | 'extends' | 'implements' | undefined {\n if (this.isValidEdgeType(type)) {\n return type;\n }\n return undefined;\n }\n}\n","import { parse, type ParserPlugin } from '@babel/parser';\nimport traverseModule from '@babel/traverse';\nimport * as t from '@babel/types';\nimport type { NodePath } from '@babel/traverse';\n\n// Handle both ESM and CJS module formats\ntype TraverseFunction = (ast: t.File, visitor: Record<string, unknown>) => void;\nfunction getTraverse(mod: unknown): TraverseFunction {\n if (typeof mod === 'function') {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n return mod as TraverseFunction;\n }\n if (mod !== null && typeof mod === 'object' && 'default' in mod) {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const withDefault = mod as { default: unknown };\n if (typeof withDefault.default === 'function') {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n return withDefault.default as TraverseFunction;\n }\n }\n throw new Error('Invalid traverse module export');\n}\nconst traverse = getTraverse(traverseModule);\n\nexport interface CodeNode {\n type: 'function' | 'class' | 'interface' | 'type' | 'const';\n name: string;\n exported: boolean;\n async?: boolean;\n startLine: number;\n endLine: number;\n signature?: string;\n methods?: Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }>;\n}\n\nexport interface ImportInfo {\n source: string;\n specifiers: string[];\n isType: boolean;\n}\n\nexport class ASTParser {\n parse(code: string, language: 'typescript' | 'javascript'): CodeNode[] {\n try {\n const plugins: ParserPlugin[] = ['jsx'];\n if (language === 'typescript') {\n plugins.push('typescript');\n }\n\n const ast = parse(code, {\n sourceType: 'module',\n plugins,\n });\n\n const nodes: CodeNode[] = [];\n\n traverse(ast, {\n FunctionDeclaration: (path: NodePath<t.FunctionDeclaration>) => {\n const node = path.node;\n if (!node.id) return;\n\n const exported =\n path.parent.type === 'ExportNamedDeclaration' ||\n path.parent.type === 'ExportDefaultDeclaration';\n\n nodes.push({\n type: 'function',\n name: node.id.name,\n exported,\n async: node.async,\n startLine: node.loc?.start.line ?? 0,\n endLine: node.loc?.end.line ?? 0,\n signature: this.extractFunctionSignature(node),\n });\n },\n\n ClassDeclaration: (path: NodePath<t.ClassDeclaration>) => {\n const node = path.node;\n if (!node.id) return;\n\n const exported =\n path.parent.type === 'ExportNamedDeclaration' ||\n path.parent.type === 'ExportDefaultDeclaration';\n\n const methods: CodeNode['methods'] = [];\n\n for (const member of node.body.body) {\n if (t.isClassMethod(member) && t.isIdentifier(member.key)) {\n methods.push({\n name: member.key.name,\n async: member.async,\n signature: this.extractMethodSignature(member),\n startLine: member.loc?.start.line ?? 0,\n endLine: member.loc?.end.line ?? 0,\n });\n }\n }\n\n nodes.push({\n type: 'class',\n name: node.id.name,\n exported,\n startLine: node.loc?.start.line ?? 0,\n endLine: node.loc?.end.line ?? 0,\n methods,\n });\n },\n\n TSInterfaceDeclaration: (path: NodePath<t.TSInterfaceDeclaration>) => {\n const node = path.node;\n\n const exported = path.parent.type === 'ExportNamedDeclaration';\n\n nodes.push({\n type: 'interface',\n name: node.id.name,\n exported,\n startLine: node.loc?.start.line ?? 0,\n endLine: node.loc?.end.line ?? 0,\n });\n },\n });\n\n return nodes;\n } catch {\n // Return empty array for malformed code\n return [];\n }\n }\n\n extractImports(code: string): ImportInfo[] {\n try {\n const ast = parse(code, {\n sourceType: 'module',\n plugins: ['typescript', 'jsx'],\n });\n\n const imports: ImportInfo[] = [];\n\n traverse(ast, {\n ImportDeclaration: (path: NodePath<t.ImportDeclaration>) => {\n const node = path.node;\n const specifiers: string[] = [];\n\n for (const spec of node.specifiers) {\n if (t.isImportDefaultSpecifier(spec)) {\n specifiers.push(spec.local.name);\n } else if (t.isImportSpecifier(spec)) {\n specifiers.push(spec.local.name);\n } else if (t.isImportNamespaceSpecifier(spec)) {\n specifiers.push(spec.local.name);\n }\n }\n\n imports.push({\n source: node.source.value,\n specifiers,\n isType: node.importKind === 'type',\n });\n },\n });\n\n return imports;\n } catch {\n // Return empty array for malformed code\n return [];\n }\n }\n\n private extractFunctionSignature(node: t.FunctionDeclaration): string {\n const params = node.params\n .map((p) => {\n if (t.isIdentifier(p)) return p.name;\n return 'param';\n })\n .join(', ');\n\n return `${node.id?.name ?? 'anonymous'}(${params})`;\n }\n\n private extractMethodSignature(node: t.ClassMethod): string {\n const params = node.params\n .map((p) => {\n if (t.isIdentifier(p)) return p.name;\n return 'param';\n })\n .join(', ');\n\n const name = t.isIdentifier(node.key) ? node.key.name : 'method';\n return `${name}(${params})`;\n }\n}\n","import type { CodeNode } from './ast-parser.js';\n\nexport interface GraphNode {\n id: string;\n file: string;\n type: 'function' | 'class' | 'interface' | 'type' | 'const' | 'method';\n name: string;\n exported: boolean;\n startLine: number;\n endLine: number;\n signature?: string;\n}\n\nexport interface GraphEdge {\n from: string;\n to: string;\n type: 'calls' | 'imports' | 'extends' | 'implements';\n confidence: number;\n}\n\nexport class CodeGraph {\n private readonly nodes: Map<string, GraphNode> = new Map<string, GraphNode>();\n private readonly edges: Map<string, GraphEdge[]> = new Map<string, GraphEdge[]>();\n\n addNodes(nodes: CodeNode[], file: string): void {\n for (const node of nodes) {\n const id = `${file}:${node.name}`;\n\n const graphNode: GraphNode = {\n id,\n file,\n type: node.type,\n name: node.name,\n exported: node.exported,\n startLine: node.startLine,\n endLine: node.endLine,\n };\n\n if (node.signature !== undefined) {\n graphNode.signature = node.signature;\n }\n\n this.nodes.set(id, graphNode);\n\n // Initialize edges array for this node\n if (!this.edges.has(id)) {\n this.edges.set(id, []);\n }\n\n // If this is a class with methods, create separate nodes for each method\n if (node.type === 'class' && node.methods !== undefined) {\n for (const method of node.methods) {\n const methodId = `${file}:${node.name}.${method.name}`;\n\n const methodNode: GraphNode = {\n id: methodId,\n file,\n type: 'method',\n name: method.name,\n exported: node.exported, // Methods inherit export status from class\n startLine: method.startLine,\n endLine: method.endLine,\n signature: method.signature,\n };\n\n this.nodes.set(methodId, methodNode);\n\n // Initialize edges array for this method\n if (!this.edges.has(methodId)) {\n this.edges.set(methodId, []);\n }\n }\n }\n }\n }\n\n addImport(fromFile: string, toFile: string, specifiers: string[]): void {\n // Normalize the toFile path (resolve relative imports)\n const resolvedTo = this.resolveImportPath(fromFile, toFile);\n\n for (const spec of specifiers) {\n const edge: GraphEdge = {\n from: fromFile,\n to: `${resolvedTo}:${spec}`,\n type: 'imports',\n confidence: 1.0,\n };\n\n const edges = this.edges.get(fromFile) ?? [];\n edges.push(edge);\n this.edges.set(fromFile, edges);\n }\n }\n\n analyzeCallRelationships(code: string, file: string, functionName: string): void {\n const nodeId = `${file}:${functionName}`;\n\n // Simple regex-based call detection (can be enhanced with AST later)\n const callPattern = /\\b([a-zA-Z_$][a-zA-Z0-9_$]*)\\s*\\(/g;\n const calls = new Set<string>();\n\n let match;\n while ((match = callPattern.exec(code)) !== null) {\n if (match[1] !== undefined && match[1] !== '') {\n calls.add(match[1]);\n }\n }\n\n const edges = this.edges.get(nodeId) ?? [];\n\n for (const calledFunction of calls) {\n // Try to find the called function in the graph\n const targetNode = this.findNodeByName(calledFunction);\n\n if (targetNode) {\n edges.push({\n from: nodeId,\n to: targetNode.id,\n type: 'calls',\n confidence: 0.8, // Lower confidence for regex-based detection\n });\n } else {\n // Unknown function, possibly from import\n edges.push({\n from: nodeId,\n to: `unknown:${calledFunction}`,\n type: 'calls',\n confidence: 0.5,\n });\n }\n }\n\n this.edges.set(nodeId, edges);\n }\n\n getNode(id: string): GraphNode | undefined {\n return this.nodes.get(id);\n }\n\n getEdges(nodeId: string): GraphEdge[] {\n return this.edges.get(nodeId) ?? [];\n }\n\n /**\n * Add an edge to the graph (used when restoring from serialized data)\n */\n addEdge(edge: GraphEdge): void {\n const edges = this.edges.get(edge.from) ?? [];\n edges.push(edge);\n this.edges.set(edge.from, edges);\n }\n\n /**\n * Add a graph node directly (used when restoring from serialized data)\n */\n addGraphNode(node: GraphNode): void {\n this.nodes.set(node.id, node);\n\n // Initialize edges array for this node if it doesn't exist\n if (!this.edges.has(node.id)) {\n this.edges.set(node.id, []);\n }\n }\n\n /**\n * Get edges where this node is the target (callers of this function)\n */\n getIncomingEdges(nodeId: string): GraphEdge[] {\n const incoming: GraphEdge[] = [];\n for (const edges of this.edges.values()) {\n for (const edge of edges) {\n if (edge.to === nodeId) {\n incoming.push(edge);\n }\n }\n }\n return incoming;\n }\n\n /**\n * Count how many nodes call this node\n */\n getCalledByCount(nodeId: string): number {\n return this.getIncomingEdges(nodeId).filter((e) => e.type === 'calls').length;\n }\n\n /**\n * Count how many nodes this node calls\n */\n getCallsCount(nodeId: string): number {\n return this.getEdges(nodeId).filter((e) => e.type === 'calls').length;\n }\n\n getAllNodes(): GraphNode[] {\n return Array.from(this.nodes.values());\n }\n\n private findNodeByName(name: string): GraphNode | undefined {\n for (const node of this.nodes.values()) {\n if (node.name === name) {\n return node;\n }\n }\n return undefined;\n }\n\n private resolveImportPath(fromFile: string, importPath: string): string {\n // Simple resolution - can be enhanced\n if (importPath.startsWith('.')) {\n // Relative import\n const fromDir = fromFile.split('/').slice(0, -1).join('/');\n const parts = importPath.split('/');\n\n let resolved = fromDir;\n for (const part of parts) {\n if (part === '..') {\n resolved = resolved.split('/').slice(0, -1).join('/');\n } else if (part !== '.') {\n resolved += `/${part}`;\n }\n }\n\n return resolved.replace(/\\.js$/, '');\n }\n\n // Package import\n return importPath;\n }\n\n toJSON(): {\n nodes: GraphNode[];\n edges: Array<{ from: string; to: string; type: string; confidence: number }>;\n } {\n const allEdges: GraphEdge[] = [];\n for (const edges of this.edges.values()) {\n allEdges.push(...edges);\n }\n\n return {\n nodes: Array.from(this.nodes.values()),\n edges: allEdges.map((e) => ({\n from: e.from,\n to: e.to,\n type: e.type,\n confidence: e.confidence,\n })),\n };\n }\n}\n","/**\n * Tree-sitter infrastructure for parsing Rust and Go code.\n * Provides utilities for AST traversal, querying, and position conversion.\n *\n * NOTE: tree-sitter requires native module compilation which may not be available\n * on all platforms (e.g., macOS with newer Node versions). Use isTreeSitterAvailable()\n * to check before calling parsing functions.\n */\n\nimport type Parser from 'tree-sitter';\n\n// Lazy-loaded tree-sitter modules (native module may not be available on all platforms)\nlet TreeSitterParser: typeof Parser | null = null;\nlet GoLanguage: Parser.Language | null = null;\nlet RustLanguage: Parser.Language | null = null;\nlet _initialized = false;\nlet _available = false;\n\n/**\n * Reset internal state for testing. DO NOT USE IN PRODUCTION CODE.\n * @internal\n */\nexport function _resetTreeSitterState(forceUnavailable = false): void {\n TreeSitterParser = null;\n GoLanguage = null;\n RustLanguage = null;\n _initialized = forceUnavailable;\n _available = false;\n}\n\n/**\n * Check if tree-sitter native module is available on this platform.\n * Call this before using any tree-sitter parsing functions.\n */\nexport function isTreeSitterAvailable(): boolean {\n if (!_initialized) {\n try {\n // Dynamic require for native modules that may not be available\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-unsafe-assignment -- Lazy load native module\n TreeSitterParser = require('tree-sitter');\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-unsafe-assignment -- Lazy load native module\n GoLanguage = require('tree-sitter-go');\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-unsafe-assignment -- Lazy load native module\n RustLanguage = require('tree-sitter-rust');\n _available = true;\n } catch {\n // Native module not available (e.g., no prebuilds for darwin-arm64)\n _available = false;\n }\n _initialized = true;\n }\n return _available;\n}\n\nexport interface TreeSitterPosition {\n row: number;\n column: number;\n}\n\nexport interface TreeSitterNode {\n type: string;\n text: string;\n startPosition: TreeSitterPosition;\n endPosition: TreeSitterPosition;\n startIndex: number;\n endIndex: number;\n childCount: number;\n namedChildCount: number;\n children: TreeSitterNode[];\n namedChildren: TreeSitterNode[];\n parent: TreeSitterNode | null;\n nextSibling: TreeSitterNode | null;\n previousSibling: TreeSitterNode | null;\n firstChild: TreeSitterNode | null;\n lastChild: TreeSitterNode | null;\n firstNamedChild: TreeSitterNode | null;\n lastNamedChild: TreeSitterNode | null;\n child(index: number): TreeSitterNode | null;\n namedChild(index: number): TreeSitterNode | null;\n childForFieldName(fieldName: string): TreeSitterNode | null;\n descendantsOfType(type: string | string[]): TreeSitterNode[];\n}\n\nexport interface TreeSitterTree {\n rootNode: TreeSitterNode;\n edit(delta: unknown): void;\n walk(): unknown;\n}\n\n/**\n * Initialize a tree-sitter parser for Rust.\n * Returns null if tree-sitter is not available.\n */\nexport function createRustParser(): Parser | null {\n if (!isTreeSitterAvailable() || TreeSitterParser === null || RustLanguage === null) {\n return null;\n }\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call -- Dynamic native module\n const parser: Parser = new TreeSitterParser();\n parser.setLanguage(RustLanguage);\n return parser;\n}\n\n/**\n * Parse Rust source code into an AST.\n * Returns null if tree-sitter is not available or code is malformed.\n */\nexport function parseRustCode(code: string): TreeSitterTree | null {\n try {\n const parser = createRustParser();\n if (parser === null) {\n return null;\n }\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return -- tree-sitter returns compatible type\n return parser.parse(code);\n } catch {\n // Return null for malformed code\n return null;\n }\n}\n\n/**\n * Initialize a tree-sitter parser for Go.\n * Returns null if tree-sitter is not available.\n */\nexport function createGoParser(): Parser | null {\n if (!isTreeSitterAvailable() || TreeSitterParser === null || GoLanguage === null) {\n return null;\n }\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call -- Dynamic native module\n const parser: Parser = new TreeSitterParser();\n parser.setLanguage(GoLanguage);\n return parser;\n}\n\n/**\n * Parse Go source code into an AST.\n * Returns null if tree-sitter is not available or code is malformed.\n */\nexport function parseGoCode(code: string): TreeSitterTree | null {\n try {\n const parser = createGoParser();\n if (parser === null) {\n return null;\n }\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return -- tree-sitter returns compatible type\n return parser.parse(code);\n } catch {\n // Return null for malformed code\n return null;\n }\n}\n\n/**\n * Convert tree-sitter position (0-indexed) to line number (1-indexed)\n */\nexport function positionToLineNumber(position: TreeSitterPosition): number {\n return position.row + 1;\n}\n\n/**\n * Get the text content of a node\n */\nexport function getNodeText(node: TreeSitterNode): string {\n return node.text;\n}\n\n/**\n * Get all children of a specific type\n */\nexport function getChildrenOfType(node: TreeSitterNode, type: string): TreeSitterNode[] {\n return node.children.filter((child) => child.type === type);\n}\n\n/**\n * Get the first child of a specific type\n */\nexport function getFirstChildOfType(node: TreeSitterNode, type: string): TreeSitterNode | null {\n return node.children.find((child) => child.type === type) ?? null;\n}\n\n/**\n * Get child by field name (e.g., \"name\", \"body\", \"parameters\")\n */\nexport function getChildByFieldName(\n node: TreeSitterNode,\n fieldName: string\n): TreeSitterNode | null {\n return node.childForFieldName(fieldName);\n}\n\n/**\n * Check if node has a visibility modifier (pub)\n */\nexport function hasVisibilityModifier(node: TreeSitterNode): boolean {\n return node.children.some((child) => child.type === 'visibility_modifier');\n}\n\n/**\n * Get visibility modifier text (e.g., \"pub\", \"pub(crate)\")\n */\nexport function getVisibilityModifier(node: TreeSitterNode): string | null {\n const visNode = node.children.find((child) => child.type === 'visibility_modifier');\n return visNode !== undefined ? visNode.text : null;\n}\n\n/**\n * Check if a function is async\n */\nexport function isAsyncFunction(node: TreeSitterNode): boolean {\n // Check for 'async' keyword in function_item or function_signature_item\n return node.children.some((child) => child.type === 'async' || child.text === 'async');\n}\n\n/**\n * Check if a function is unsafe\n */\nexport function isUnsafeFunction(node: TreeSitterNode): boolean {\n return node.children.some((child) => child.type === 'unsafe' || child.text === 'unsafe');\n}\n\n/**\n * Extract function signature including generics and parameters\n */\nexport function getFunctionSignature(node: TreeSitterNode): string {\n // Extract the full signature by getting text from name to return type\n const nameNode = getChildByFieldName(node, 'name');\n const parametersNode = getChildByFieldName(node, 'parameters');\n const returnTypeNode = getChildByFieldName(node, 'return_type');\n const typeParametersNode = getChildByFieldName(node, 'type_parameters');\n\n if (nameNode === null) {\n return '';\n }\n\n let signature = nameNode.text;\n\n // Add type parameters (generics)\n if (typeParametersNode !== null) {\n signature += typeParametersNode.text;\n }\n\n // Add parameters\n if (parametersNode !== null) {\n signature += parametersNode.text;\n }\n\n // Add return type\n if (returnTypeNode !== null) {\n signature += ` ${returnTypeNode.text}`;\n }\n\n return signature;\n}\n\n/**\n * Query nodes of specific type from the tree\n * @param tree The tree-sitter tree\n * @param nodeType The type of nodes to find (e.g., 'function_item', 'struct_item')\n * @returns Array of matching nodes\n */\nexport function queryNodesByType(\n tree: TreeSitterTree,\n nodeType: string | string[]\n): TreeSitterNode[] {\n const types = Array.isArray(nodeType) ? nodeType : [nodeType];\n return tree.rootNode.descendantsOfType(types);\n}\n\n/**\n * Extract use statement import path\n */\nexport function extractImportPath(useNode: TreeSitterNode): string {\n // Get the use_declaration argument\n const argumentNode = getChildByFieldName(useNode, 'argument');\n if (argumentNode === null) {\n return '';\n }\n return argumentNode.text;\n}\n","import {\n parseGoCode,\n queryNodesByType,\n positionToLineNumber,\n getChildByFieldName,\n getFunctionSignature,\n getFirstChildOfType,\n type TreeSitterNode,\n type TreeSitterTree,\n} from './tree-sitter-parser.js';\nimport type { CodeNode, ImportInfo } from './ast-parser.js';\n\n/**\n * Parser for Go code using tree-sitter\n * Extracts functions, methods, structs, interfaces, types, constants, and imports\n */\nexport class GoASTParser {\n /**\n * Parse Go code into CodeNode array\n * @param code Go source code\n * @param filePath File path for error context\n * @returns Array of CodeNode objects representing Go constructs\n */\n parse(code: string, _filePath: string): CodeNode[] {\n try {\n const tree = parseGoCode(code);\n if (tree === null) {\n // Malformed code - return empty array\n return [];\n }\n\n const nodes: CodeNode[] = [];\n\n // Parse functions\n const functions = this.parseFunctions(tree);\n nodes.push(...functions);\n\n // Parse structs\n const structs = this.parseStructs(tree);\n nodes.push(...structs);\n\n // Parse interfaces\n const interfaces = this.parseInterfaces(tree);\n nodes.push(...interfaces);\n\n // Parse type aliases\n const types = this.parseTypeAliases(tree);\n nodes.push(...types);\n\n // Parse constants and variables\n const constants = this.parseConstants(tree);\n nodes.push(...constants);\n\n // Parse methods and attach to structs\n this.parseMethods(tree, nodes);\n\n return nodes;\n } catch {\n // Return empty array for any parsing errors\n return [];\n }\n }\n\n /**\n * Extract imports from Go code\n * @param code Go source code\n * @returns Array of ImportInfo objects\n */\n extractImports(code: string): ImportInfo[] {\n try {\n const tree = parseGoCode(code);\n if (tree === null) {\n return [];\n }\n\n const imports: ImportInfo[] = [];\n const importDecls = queryNodesByType(tree, 'import_declaration');\n\n for (const importDecl of importDecls) {\n const importSpecs = importDecl.descendantsOfType('import_spec');\n\n for (const spec of importSpecs) {\n const pathNode = getChildByFieldName(spec, 'path');\n if (pathNode === null) {\n continue;\n }\n\n // Extract string content from interpreted_string_literal\n const stringContent = pathNode.descendantsOfType('interpreted_string_literal_content')[0];\n const path =\n stringContent !== undefined ? stringContent.text : pathNode.text.replace(/\"/g, '');\n\n if (path !== '') {\n imports.push({\n source: path,\n specifiers: [],\n isType: false,\n });\n }\n }\n }\n\n return imports;\n } catch {\n return [];\n }\n }\n\n /**\n * Parse function declarations\n */\n private parseFunctions(tree: TreeSitterTree): CodeNode[] {\n const functionNodes = queryNodesByType(tree, 'function_declaration');\n const nodes: CodeNode[] = [];\n\n for (const fnNode of functionNodes) {\n const nameNode = getChildByFieldName(fnNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(fnNode.startPosition);\n const endLine = positionToLineNumber(fnNode.endPosition);\n const signature = getFunctionSignature(fnNode);\n\n nodes.push({\n type: 'function',\n name,\n exported,\n async: false,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse struct definitions\n */\n private parseStructs(tree: TreeSitterTree): CodeNode[] {\n const typeDecls = queryNodesByType(tree, 'type_declaration');\n const nodes: CodeNode[] = [];\n\n for (const typeDecl of typeDecls) {\n // Get type_spec child node\n const typeSpec = getFirstChildOfType(typeDecl, 'type_spec');\n if (typeSpec === null) {\n continue;\n }\n\n const nameNode = getChildByFieldName(typeSpec, 'name');\n const typeNode = getChildByFieldName(typeSpec, 'type');\n\n if (nameNode === null || typeNode === null) {\n continue;\n }\n\n // Check if it's a struct type\n if (typeNode.type !== 'struct_type') {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(typeDecl.startPosition);\n const endLine = positionToLineNumber(typeDecl.endPosition);\n\n nodes.push({\n type: 'class',\n name,\n exported,\n startLine,\n endLine,\n signature: name,\n methods: [],\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse interface definitions\n */\n private parseInterfaces(tree: TreeSitterTree): CodeNode[] {\n const typeDecls = queryNodesByType(tree, 'type_declaration');\n const nodes: CodeNode[] = [];\n\n for (const typeDecl of typeDecls) {\n const typeSpec = getFirstChildOfType(typeDecl, 'type_spec');\n if (typeSpec === null) {\n continue;\n }\n\n const nameNode = getChildByFieldName(typeSpec, 'name');\n const typeNode = getChildByFieldName(typeSpec, 'type');\n\n if (nameNode === null || typeNode === null) {\n continue;\n }\n\n // Check if it's an interface type\n if (typeNode.type !== 'interface_type') {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(typeDecl.startPosition);\n const endLine = positionToLineNumber(typeDecl.endPosition);\n\n // Extract interface methods\n const methods = this.extractInterfaceMethods(typeNode);\n\n nodes.push({\n type: 'interface',\n name,\n exported,\n startLine,\n endLine,\n signature: name,\n methods,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse type aliases\n */\n private parseTypeAliases(tree: TreeSitterTree): CodeNode[] {\n const typeDecls = queryNodesByType(tree, 'type_declaration');\n const nodes: CodeNode[] = [];\n\n for (const typeDecl of typeDecls) {\n const typeSpec = getFirstChildOfType(typeDecl, 'type_spec');\n if (typeSpec === null) {\n continue;\n }\n\n const nameNode = getChildByFieldName(typeSpec, 'name');\n const typeNode = getChildByFieldName(typeSpec, 'type');\n\n if (nameNode === null || typeNode === null) {\n continue;\n }\n\n // Skip struct and interface types (handled by other methods)\n if (typeNode.type === 'struct_type' || typeNode.type === 'interface_type') {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(typeDecl.startPosition);\n const endLine = positionToLineNumber(typeDecl.endPosition);\n const signature = `${name} = ${typeNode.text}`;\n\n nodes.push({\n type: 'type',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse constants and variables\n */\n private parseConstants(tree: TreeSitterTree): CodeNode[] {\n const nodes: CodeNode[] = [];\n\n // Parse const declarations\n const constDecls = queryNodesByType(tree, 'const_declaration');\n for (const constDecl of constDecls) {\n const specs = constDecl.descendantsOfType('const_spec');\n for (const spec of specs) {\n const nameNode = getChildByFieldName(spec, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(spec.startPosition);\n const endLine = positionToLineNumber(spec.endPosition);\n\n const typeNode = getChildByFieldName(spec, 'type');\n const signature = typeNode !== null ? `${name}: ${typeNode.text}` : name;\n\n nodes.push({\n type: 'const',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n }\n\n // Parse var declarations\n const varDecls = queryNodesByType(tree, 'var_declaration');\n for (const varDecl of varDecls) {\n const specs = varDecl.descendantsOfType('var_spec');\n for (const spec of specs) {\n const nameNode = getChildByFieldName(spec, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(spec.startPosition);\n const endLine = positionToLineNumber(spec.endPosition);\n\n const typeNode = getChildByFieldName(spec, 'type');\n const signature = typeNode !== null ? `${name}: ${typeNode.text}` : name;\n\n nodes.push({\n type: 'const',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n }\n\n return nodes;\n }\n\n /**\n * Parse methods and attach to corresponding structs\n */\n private parseMethods(tree: TreeSitterTree, nodes: CodeNode[]): void {\n const methodNodes = queryNodesByType(tree, 'method_declaration');\n\n for (const methodNode of methodNodes) {\n const receiverType = this.getReceiverType(methodNode);\n if (receiverType === null) {\n continue;\n }\n\n const nameNode = getChildByFieldName(methodNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const signature = getFunctionSignature(methodNode);\n const startLine = positionToLineNumber(methodNode.startPosition);\n const endLine = positionToLineNumber(methodNode.endPosition);\n\n // Find the corresponding struct and attach method\n const structNode = nodes.find((node) => node.type === 'class' && node.name === receiverType);\n\n if (structNode?.methods !== undefined) {\n structNode.methods.push({\n name,\n async: false,\n signature,\n startLine,\n endLine,\n });\n }\n }\n }\n\n /**\n * Extract methods from interface definition\n */\n private extractInterfaceMethods(interfaceNode: TreeSitterNode): Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> {\n const methods: Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> = [];\n\n const methodElems = interfaceNode.descendantsOfType('method_elem');\n\n for (const methodElem of methodElems) {\n const nameNode = getChildByFieldName(methodElem, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const signature = getFunctionSignature(methodElem);\n const startLine = positionToLineNumber(methodElem.startPosition);\n const endLine = positionToLineNumber(methodElem.endPosition);\n\n methods.push({\n name,\n async: false,\n signature,\n startLine,\n endLine,\n });\n }\n\n return methods;\n }\n\n /**\n * Get the receiver type name for a method\n */\n private getReceiverType(methodNode: TreeSitterNode): string | null {\n const receiverNode = getChildByFieldName(methodNode, 'receiver');\n if (receiverNode === null) {\n return null;\n }\n\n const paramDecl = getFirstChildOfType(receiverNode, 'parameter_declaration');\n if (paramDecl === null) {\n return null;\n }\n\n const typeNode = getChildByFieldName(paramDecl, 'type');\n if (typeNode === null) {\n return null;\n }\n\n // Handle pointer receivers (*Type)\n if (typeNode.type === 'pointer_type') {\n const innerType = typeNode.children.find((child) => child.type === 'type_identifier');\n return innerType !== undefined ? innerType.text : null;\n }\n\n // Handle value receivers (Type)\n if (typeNode.type === 'type_identifier') {\n return typeNode.text;\n }\n\n return null;\n }\n\n /**\n * Check if a name is exported (starts with uppercase letter)\n */\n private isExported(name: string): boolean {\n if (name.length === 0) {\n return false;\n }\n const firstChar = name[0];\n if (firstChar === undefined) {\n return false;\n }\n return firstChar === firstChar.toUpperCase();\n }\n}\n","import path from 'node:path';\nimport { AdapterRegistry } from './adapter-registry.js';\nimport { ASTParser, type CodeNode } from './ast-parser.js';\nimport { GoASTParser } from './go-ast-parser.js';\nimport { PythonASTParser } from './python-ast-parser.js';\nimport { RustASTParser } from './rust-ast-parser.js';\nimport type { PythonBridge } from '../crawl/bridge.js';\n\nexport class ParserFactory {\n constructor(private readonly pythonBridge?: PythonBridge) {}\n\n async parseFile(filePath: string, code: string): Promise<CodeNode[]> {\n const ext = path.extname(filePath);\n\n if (['.ts', '.tsx'].includes(ext)) {\n const parser = new ASTParser();\n return parser.parse(code, 'typescript');\n }\n\n if (['.js', '.jsx'].includes(ext)) {\n const parser = new ASTParser();\n return parser.parse(code, 'javascript');\n }\n\n if (ext === '.py') {\n if (!this.pythonBridge) {\n throw new Error('Python bridge not available for parsing Python files');\n }\n const parser = new PythonASTParser(this.pythonBridge);\n return parser.parse(code, filePath);\n }\n\n if (ext === '.rs') {\n const parser = new RustASTParser();\n return parser.parse(code, filePath);\n }\n\n if (ext === '.go') {\n const parser = new GoASTParser();\n return parser.parse(code, filePath);\n }\n\n // Check for registered language adapters\n const registry = AdapterRegistry.getInstance();\n const adapter = registry.getByExtension(ext);\n if (adapter !== undefined) {\n return adapter.parse(code, filePath);\n }\n\n return [];\n }\n}\n","import type { CodeNode } from './ast-parser.js';\nimport type { PythonBridge, ParsePythonResult } from '../crawl/bridge.js';\n\nexport class PythonASTParser {\n constructor(private readonly bridge: PythonBridge) {}\n\n async parse(code: string, filePath: string): Promise<CodeNode[]> {\n const result: ParsePythonResult = await this.bridge.parsePython(code, filePath);\n\n return result.nodes.map((node) => {\n const codeNode: CodeNode = {\n type: node.type,\n name: node.name,\n exported: node.exported,\n startLine: node.startLine,\n endLine: node.endLine,\n };\n\n if (node.async !== undefined) {\n codeNode.async = node.async;\n }\n\n if (node.signature !== undefined) {\n codeNode.signature = node.signature;\n }\n\n if (node.methods !== undefined) {\n codeNode.methods = node.methods;\n }\n\n return codeNode;\n });\n }\n}\n","import {\n parseRustCode,\n queryNodesByType,\n positionToLineNumber,\n getChildByFieldName,\n hasVisibilityModifier,\n isAsyncFunction,\n getFunctionSignature,\n extractImportPath,\n type TreeSitterNode,\n type TreeSitterTree,\n} from './tree-sitter-parser.js';\nimport type { CodeNode, ImportInfo } from './ast-parser.js';\n\n/**\n * Parser for Rust code using tree-sitter\n * Extracts functions, structs, traits, types, constants, and imports\n */\nexport class RustASTParser {\n /**\n * Parse Rust code into CodeNode array\n * @param code Rust source code\n * @param filePath File path for error context\n * @returns Array of CodeNode objects representing Rust constructs\n */\n parse(code: string, _filePath: string): CodeNode[] {\n try {\n const tree = parseRustCode(code);\n if (tree === null) {\n // Malformed code - return empty array\n return [];\n }\n\n const nodes: CodeNode[] = [];\n\n // Parse functions\n const functions = this.parseFunctions(tree);\n nodes.push(...functions);\n\n // Parse structs\n const structs = this.parseStructs(tree);\n nodes.push(...structs);\n\n // Parse traits\n const traits = this.parseTraits(tree);\n nodes.push(...traits);\n\n // Parse type aliases\n const types = this.parseTypeAliases(tree);\n nodes.push(...types);\n\n // Parse constants and statics\n const constants = this.parseConstants(tree);\n nodes.push(...constants);\n\n // Parse impl blocks and attach methods to structs\n this.parseImplBlocks(tree, nodes);\n\n return nodes;\n } catch {\n // Return empty array for any parsing errors\n return [];\n }\n }\n\n /**\n * Extract imports from Rust code\n * @param code Rust source code\n * @returns Array of ImportInfo objects\n */\n extractImports(code: string): ImportInfo[] {\n try {\n const tree = parseRustCode(code);\n if (tree === null) {\n return [];\n }\n\n const useDeclarations = queryNodesByType(tree, 'use_declaration');\n const imports: ImportInfo[] = [];\n\n for (const useNode of useDeclarations) {\n const importPath = extractImportPath(useNode);\n if (importPath === '') {\n continue;\n }\n\n // Parse the import path to extract module and specifiers\n const { source, specifiers } = this.parseImportPath(importPath);\n\n imports.push({\n source,\n specifiers,\n isType: false, // Rust doesn't distinguish type-only imports at syntax level\n });\n }\n\n return imports;\n } catch {\n return [];\n }\n }\n\n /**\n * Parse function declarations (excluding impl block methods)\n */\n private parseFunctions(tree: TreeSitterTree): CodeNode[] {\n const functionNodes = queryNodesByType(tree, 'function_item');\n const nodes: CodeNode[] = [];\n\n for (const fnNode of functionNodes) {\n // Skip functions inside impl blocks - they'll be handled as methods\n if (this.isInsideImplBlock(fnNode)) {\n continue;\n }\n\n const nameNode = getChildByFieldName(fnNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(fnNode);\n const async = isAsyncFunction(fnNode);\n const startLine = positionToLineNumber(fnNode.startPosition);\n const endLine = positionToLineNumber(fnNode.endPosition);\n const signature = getFunctionSignature(fnNode);\n\n nodes.push({\n type: 'function',\n name,\n exported,\n async,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Check if a node is inside an impl block\n */\n private isInsideImplBlock(node: TreeSitterNode): boolean {\n let current = node.parent;\n while (current !== null) {\n if (current.type === 'impl_item') {\n return true;\n }\n current = current.parent;\n }\n return false;\n }\n\n /**\n * Parse struct definitions\n */\n private parseStructs(tree: TreeSitterTree): CodeNode[] {\n const structNodes = queryNodesByType(tree, 'struct_item');\n const nodes: CodeNode[] = [];\n\n for (const structNode of structNodes) {\n const nameNode = getChildByFieldName(structNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(structNode);\n const startLine = positionToLineNumber(structNode.startPosition);\n const endLine = positionToLineNumber(structNode.endPosition);\n\n // Get type parameters (generics) if present\n const typeParamsNode = getChildByFieldName(structNode, 'type_parameters');\n const signature = typeParamsNode !== null ? `${name}${typeParamsNode.text}` : name;\n\n nodes.push({\n type: 'class',\n name,\n exported,\n startLine,\n endLine,\n signature,\n methods: [], // Will be populated by parseImplBlocks\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse trait definitions\n */\n private parseTraits(tree: TreeSitterTree): CodeNode[] {\n const traitNodes = queryNodesByType(tree, 'trait_item');\n const nodes: CodeNode[] = [];\n\n for (const traitNode of traitNodes) {\n const nameNode = getChildByFieldName(traitNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(traitNode);\n const startLine = positionToLineNumber(traitNode.startPosition);\n const endLine = positionToLineNumber(traitNode.endPosition);\n\n // Get type parameters (generics) if present\n const typeParamsNode = getChildByFieldName(traitNode, 'type_parameters');\n const signature = typeParamsNode !== null ? `${name}${typeParamsNode.text}` : name;\n\n // Extract trait methods\n const methods = this.extractTraitMethods(traitNode);\n\n nodes.push({\n type: 'interface',\n name,\n exported,\n startLine,\n endLine,\n signature,\n methods,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse type aliases\n */\n private parseTypeAliases(tree: TreeSitterTree): CodeNode[] {\n const typeNodes = queryNodesByType(tree, 'type_item');\n const nodes: CodeNode[] = [];\n\n for (const typeNode of typeNodes) {\n const nameNode = getChildByFieldName(typeNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(typeNode);\n const startLine = positionToLineNumber(typeNode.startPosition);\n const endLine = positionToLineNumber(typeNode.endPosition);\n\n // Get the full type alias definition\n const valueNode = getChildByFieldName(typeNode, 'type');\n const signature = valueNode !== null ? `${name} = ${valueNode.text}` : name;\n\n nodes.push({\n type: 'type',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse constants and statics\n */\n private parseConstants(tree: TreeSitterTree): CodeNode[] {\n const constNodes = queryNodesByType(tree, ['const_item', 'static_item']);\n const nodes: CodeNode[] = [];\n\n for (const constNode of constNodes) {\n const nameNode = getChildByFieldName(constNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(constNode);\n const startLine = positionToLineNumber(constNode.startPosition);\n const endLine = positionToLineNumber(constNode.endPosition);\n\n // Get type annotation\n const typeNode = getChildByFieldName(constNode, 'type');\n const signature = typeNode !== null ? `${name}: ${typeNode.text}` : name;\n\n nodes.push({\n type: 'const',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse impl blocks and attach methods to corresponding structs\n */\n private parseImplBlocks(tree: TreeSitterTree, nodes: CodeNode[]): void {\n const implNodes = queryNodesByType(tree, 'impl_item');\n\n for (const implNode of implNodes) {\n // Get the type being implemented\n const typeNode = getChildByFieldName(implNode, 'type');\n if (typeNode === null) {\n continue;\n }\n\n const typeName = typeNode.text;\n\n // Extract methods from impl block\n const methods = this.extractImplMethods(implNode);\n\n // Find the corresponding struct and attach methods\n const structNode = nodes.find((node) => node.type === 'class' && node.name === typeName);\n\n if (structNode?.methods !== undefined) {\n structNode.methods.push(...methods);\n }\n }\n }\n\n /**\n * Extract methods from trait definition\n */\n private extractTraitMethods(traitNode: TreeSitterNode): Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> {\n const methods: Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> = [];\n\n // Get declaration_list (trait body)\n const bodyNode = getChildByFieldName(traitNode, 'body');\n if (bodyNode === null) {\n return methods;\n }\n\n // Find all function_signature_item nodes (trait method declarations)\n const functionSignatures = bodyNode.descendantsOfType('function_signature_item');\n\n for (const fnSigNode of functionSignatures) {\n const nameNode = getChildByFieldName(fnSigNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const async = isAsyncFunction(fnSigNode);\n const signature = getFunctionSignature(fnSigNode);\n const startLine = positionToLineNumber(fnSigNode.startPosition);\n const endLine = positionToLineNumber(fnSigNode.endPosition);\n\n methods.push({\n name,\n async,\n signature,\n startLine,\n endLine,\n });\n }\n\n return methods;\n }\n\n /**\n * Extract methods from impl block\n */\n private extractImplMethods(implNode: TreeSitterNode): Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> {\n const methods: Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> = [];\n\n // Get declaration_list (impl body)\n const bodyNode = getChildByFieldName(implNode, 'body');\n if (bodyNode === null) {\n return methods;\n }\n\n // Find all function_item nodes (impl methods)\n const functionItems = bodyNode.descendantsOfType('function_item');\n\n for (const fnNode of functionItems) {\n const nameNode = getChildByFieldName(fnNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const async = isAsyncFunction(fnNode);\n const signature = getFunctionSignature(fnNode);\n const startLine = positionToLineNumber(fnNode.startPosition);\n const endLine = positionToLineNumber(fnNode.endPosition);\n\n methods.push({\n name,\n async,\n signature,\n startLine,\n endLine,\n });\n }\n\n return methods;\n }\n\n /**\n * Parse import path into source and specifiers\n * Examples:\n * - \"std::collections::HashMap\" -> { source: \"std::collections\", specifiers: [\"HashMap\"] }\n * - \"crate::utils::*\" -> { source: \"crate::utils\", specifiers: [\"*\"] }\n * - \"super::Type\" -> { source: \"super\", specifiers: [\"Type\"] }\n */\n private parseImportPath(importPath: string): { source: string; specifiers: string[] } {\n // Remove whitespace\n const path = importPath.trim();\n\n // Handle glob imports (use std::io::*)\n if (path.includes('::*')) {\n const source = path.replace('::*', '');\n return { source, specifiers: ['*'] };\n }\n\n // Handle scoped imports: use std::io::{Read, Write}\n const scopedMatch = path.match(/^(.+)::\\{(.+)\\}$/);\n if (scopedMatch !== null) {\n const source = scopedMatch[1] ?? '';\n const specifiersStr = scopedMatch[2] ?? '';\n const specifiers = specifiersStr.split(',').map((s) => s.trim());\n return { source, specifiers };\n }\n\n // Handle simple imports: use std::collections::HashMap\n const parts = path.split('::');\n if (parts.length > 1) {\n const specifiers = [parts[parts.length - 1] ?? ''];\n const source = parts.slice(0, -1).join('::');\n return { source, specifiers };\n }\n\n // Single item import\n return { source: '', specifiers: [path] };\n }\n}\n","import { readFile, access } from 'node:fs/promises';\nimport { homedir } from 'node:os';\nimport { isAbsolute, join, resolve } from 'node:path';\nimport { ProjectRootService } from './project-root.service.js';\nimport { DEFAULT_CONFIG } from '../types/config.js';\nimport { atomicWriteFile } from '../utils/atomic-write.js';\nimport { deepMerge } from '../utils/deep-merge.js';\nimport type { AppConfig } from '../types/config.js';\n\n/** Default config path relative to project root */\nconst DEFAULT_CONFIG_PATH = '.bluera/bluera-knowledge/config.json';\n\n/**\n * Check if a file exists\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n}\n\nexport class ConfigService {\n private readonly configPath: string;\n private readonly dataDir: string;\n private readonly projectRoot: string;\n private config: AppConfig | null = null;\n\n constructor(configPath?: string, dataDir?: string, projectRoot?: string) {\n // Resolve project root using hierarchical detection\n this.projectRoot = projectRoot ?? ProjectRootService.resolve();\n\n // Resolve configPath - per-repo by default\n // Explicit paths are resolved against projectRoot (handles ~ and relative paths)\n if (configPath !== undefined && configPath !== '') {\n this.configPath = this.expandPath(configPath, this.projectRoot);\n } else {\n this.configPath = join(this.projectRoot, DEFAULT_CONFIG_PATH);\n }\n\n // Resolve dataDir - per-repo by default\n // Explicit paths are resolved against projectRoot (handles ~ and relative paths)\n if (dataDir !== undefined && dataDir !== '') {\n this.dataDir = this.expandPath(dataDir, this.projectRoot);\n } else {\n this.dataDir = this.expandPath(DEFAULT_CONFIG.dataDir, this.projectRoot);\n }\n }\n\n /**\n * Get the resolved project root directory.\n */\n resolveProjectRoot(): string {\n return this.projectRoot;\n }\n\n async load(): Promise<AppConfig> {\n if (this.config !== null) {\n return this.config;\n }\n\n const exists = await fileExists(this.configPath);\n if (!exists) {\n // First run - create config file with defaults\n this.config = { ...DEFAULT_CONFIG };\n await this.save(this.config);\n return this.config;\n }\n\n // File exists - load it (throws on corruption per CLAUDE.md \"fail early\")\n const content = await readFile(this.configPath, 'utf-8');\n try {\n this.config = deepMerge(DEFAULT_CONFIG, JSON.parse(content));\n } catch (error) {\n throw new Error(\n `Failed to parse config file at ${this.configPath}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n\n return this.config;\n }\n\n async save(config: AppConfig): Promise<void> {\n await atomicWriteFile(this.configPath, JSON.stringify(config, null, 2));\n this.config = config;\n }\n\n resolveDataDir(): string {\n return this.dataDir;\n }\n\n resolveConfigPath(): string {\n return this.configPath;\n }\n\n private expandPath(path: string, baseDir: string): string {\n // Expand ~ to home directory\n if (path.startsWith('~')) {\n return path.replace('~', homedir());\n }\n // Resolve relative paths against base directory (not process.cwd())\n // Uses isAbsolute() for cross-platform compatibility (Windows paths like C:\\data)\n if (!isAbsolute(path)) {\n return resolve(baseDir, path);\n }\n // Return absolute paths as-is\n return path;\n }\n}\n","export interface EmbeddingConfig {\n readonly model: string;\n readonly batchSize: number;\n // Note: dimensions is fixed at 384 (determined by all-MiniLM-L6-v2 model)\n}\n\nexport interface IndexingConfig {\n readonly concurrency: number;\n readonly chunkSize: number;\n readonly chunkOverlap: number;\n readonly ignorePatterns: readonly string[];\n}\n\nexport interface SearchConfig {\n readonly defaultMode: 'vector' | 'fts' | 'hybrid';\n readonly defaultLimit: number;\n}\n\nexport interface CrawlConfig {\n readonly userAgent: string;\n readonly timeout: number;\n readonly maxConcurrency: number;\n}\n\nexport interface ServerConfig {\n readonly port: number;\n readonly host: string;\n}\n\nexport interface AppConfig {\n readonly version: number;\n readonly dataDir: string;\n readonly embedding: EmbeddingConfig;\n readonly indexing: IndexingConfig;\n readonly search: SearchConfig;\n readonly crawl: CrawlConfig;\n readonly server: ServerConfig;\n}\n\nexport const DEFAULT_CONFIG: AppConfig = {\n version: 1,\n dataDir: '.bluera/bluera-knowledge/data',\n embedding: {\n model: 'Xenova/all-MiniLM-L6-v2',\n batchSize: 32,\n },\n indexing: {\n concurrency: 4,\n chunkSize: 1000,\n chunkOverlap: 150,\n ignorePatterns: ['node_modules/**', '.git/**', '*.min.js', '*.map'],\n },\n search: {\n defaultMode: 'hybrid',\n defaultLimit: 10,\n },\n crawl: {\n userAgent: 'BlueraKnowledge/1.0',\n timeout: 30000,\n maxConcurrency: 3,\n },\n server: {\n port: 3847,\n host: '127.0.0.1',\n },\n};\n","/**\n * Deep merge utility for config objects.\n *\n * Recursively merges overrides into defaults:\n * - Objects: recursively merge nested properties\n * - Arrays: replace entirely (don't concat)\n * - Primitives/null/undefined: use override value when defined\n */\n\n/**\n * Check if a value is a plain object (not null, array, Date, etc.)\n */\nfunction isPlainObject(value: unknown): value is Record<string, unknown> {\n return (\n typeof value === 'object' && value !== null && !Array.isArray(value) && !(value instanceof Date)\n );\n}\n\n/**\n * Deep merge two objects, with overrides taking precedence.\n *\n * Accepts `unknown` as the second parameter to work with JSON.parse() output\n * without requiring type assertions at the call site.\n *\n * @param defaults - The base object with default values (typed)\n * @param overrides - Object with values to override (can be unknown from JSON.parse)\n * @returns A new object with merged values, typed as the defaults type\n *\n * @example\n * ```typescript\n * const defaults = {\n * search: { mode: 'hybrid', limit: 10, rrf: { k: 40 } }\n * };\n * const overrides = JSON.parse('{\"search\": {\"mode\": \"vector\"}}');\n * const result = deepMerge(defaults, overrides);\n * // { search: { mode: 'vector', limit: 10, rrf: { k: 40 } } }\n * ```\n */\nexport function deepMerge<T extends object>(defaults: T, overrides: unknown): T {\n // If overrides is not a plain object, return defaults unchanged\n if (!isPlainObject(overrides)) {\n return { ...defaults };\n }\n\n // Use internal helper that works with Record types\n // Type assertions unavoidable here: we need to bridge generic T to Record<string, unknown>\n // for iteration while preserving the return type. This is safe because we spread defaults\n // and only add/replace properties that exist in overrides.\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const defaultsRecord = defaults as T & Record<string, unknown>;\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n return deepMergeRecords(defaultsRecord, overrides) as T;\n}\n\n/**\n * Internal implementation that works with Record types.\n * Separated to satisfy TypeScript's type system without assertions.\n */\nfunction deepMergeRecords(\n defaults: Record<string, unknown>,\n overrides: Record<string, unknown>\n): Record<string, unknown> {\n const result: Record<string, unknown> = { ...defaults };\n\n for (const key of Object.keys(overrides)) {\n const defaultValue = defaults[key];\n const overrideValue = overrides[key];\n\n // Skip undefined overrides (treat as \"not specified\")\n if (overrideValue === undefined) {\n continue;\n }\n\n // If both values are plain objects, recursively merge\n if (isPlainObject(defaultValue) && isPlainObject(overrideValue)) {\n result[key] = deepMergeRecords(defaultValue, overrideValue);\n } else {\n // Arrays, primitives, null, Date, etc. - use override directly\n result[key] = overrideValue;\n }\n }\n\n return result;\n}\n","import { readFile, writeFile, access } from 'node:fs/promises';\nimport { join } from 'node:path';\n\n/**\n * Required .gitignore patterns for Bluera Knowledge\n *\n * These patterns ensure:\n * - The .bluera/ data directory (vector DB, cloned repos) is ignored\n * - The .bluera/ logs directory is ignored\n * - Config files are NOT ignored (can be committed for team sharing):\n * - stores.config.json (store definitions)\n * - config.json (app configuration)\n * - skill-activation.json (skill activation preferences)\n *\n * IMPORTANT: Git ignores children of ignored directories. To un-ignore a nested\n * file, you must first un-ignore each parent directory in the path. The order is:\n * 1. Ignore .bluera/ (everything ignored by default)\n * 2. Un-ignore .bluera/ itself (allow traversing into it)\n * 3. Un-ignore .bluera/bluera-knowledge/ (allow traversing deeper)\n * 4. Un-ignore the specific files we want tracked\n * 5. Re-ignore .bluera/bluera-knowledge/data/ and logs/ (keep untracked)\n */\nconst REQUIRED_PATTERNS = [\n '.bluera/',\n '!.bluera/',\n '!.bluera/bluera-knowledge/',\n '!.bluera/bluera-knowledge/stores.config.json',\n '!.bluera/bluera-knowledge/config.json',\n '!.bluera/bluera-knowledge/skill-activation.json',\n '.bluera/bluera-knowledge/data/',\n '.bluera/bluera-knowledge/logs/',\n];\n\n/**\n * Header comment for the gitignore section\n */\nconst SECTION_HEADER = `\n# Bluera Knowledge\n# Config files (stores.config.json, config.json, skill-activation.json) can be committed\n# Data directory (vector DB, cloned repos) and logs are not committed\n`;\n\n/**\n * Check if a file exists\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Service for managing .gitignore patterns for Bluera Knowledge.\n *\n * When stores are created, this service ensures the project's .gitignore\n * is updated to:\n * - Ignore the .bluera/ data directory (not committed)\n * - Allow committing .bluera/bluera-knowledge/stores.config.json (for team sharing)\n */\nexport class GitignoreService {\n private readonly gitignorePath: string;\n\n constructor(projectRoot: string) {\n this.gitignorePath = join(projectRoot, '.gitignore');\n }\n\n /**\n * Check if all required patterns are present in .gitignore\n */\n async hasRequiredPatterns(): Promise<boolean> {\n const exists = await fileExists(this.gitignorePath);\n if (!exists) {\n return false;\n }\n\n const content = await readFile(this.gitignorePath, 'utf-8');\n const lines = content.split('\\n').map((l) => l.trim());\n\n for (const pattern of REQUIRED_PATTERNS) {\n if (!lines.includes(pattern)) {\n return false;\n }\n }\n\n return true;\n }\n\n /**\n * Ensure required .gitignore patterns are present.\n *\n * - Creates .gitignore if it doesn't exist\n * - Appends missing patterns if .gitignore exists\n * - Does nothing if all patterns are already present\n *\n * @returns Object with updated flag and descriptive message\n */\n async ensureGitignorePatterns(): Promise<{ updated: boolean; message: string }> {\n const exists = await fileExists(this.gitignorePath);\n\n if (!exists) {\n // Create new .gitignore with our patterns\n const content = `${SECTION_HEADER.trim()}\\n${REQUIRED_PATTERNS.join('\\n')}\\n`;\n await writeFile(this.gitignorePath, content);\n return {\n updated: true,\n message: 'Created .gitignore with Bluera Knowledge patterns',\n };\n }\n\n // Read existing content\n const existingContent = await readFile(this.gitignorePath, 'utf-8');\n const lines = existingContent.split('\\n').map((l) => l.trim());\n\n // Find missing patterns\n const missingPatterns = REQUIRED_PATTERNS.filter((pattern) => !lines.includes(pattern));\n\n if (missingPatterns.length === 0) {\n return {\n updated: false,\n message: 'All Bluera Knowledge patterns already present in .gitignore',\n };\n }\n\n // Append missing patterns\n let newContent = existingContent;\n if (!newContent.endsWith('\\n')) {\n newContent += '\\n';\n }\n\n newContent += SECTION_HEADER;\n newContent += `${missingPatterns.join('\\n')}\\n`;\n\n await writeFile(this.gitignorePath, newContent);\n\n return {\n updated: true,\n message: `Updated .gitignore with ${String(missingPatterns.length)} Bluera Knowledge pattern(s)`,\n };\n }\n\n /**\n * Get the path to the .gitignore file\n */\n getGitignorePath(): string {\n return this.gitignorePath;\n }\n}\n","import { createHash } from 'node:crypto';\nimport { readFile, readdir } from 'node:fs/promises';\nimport { join, extname, basename, relative } from 'node:path';\nimport { ChunkingService } from './chunking.service.js';\nimport { DriftService } from './drift.service.js';\nimport { createLogger } from '../logging/index.js';\nimport { createDocumentId } from '../types/brands.js';\nimport { ok, err } from '../types/result.js';\nimport { parseIgnorePatternsForScanning } from '../utils/ignore-patterns.js';\nimport type { CodeGraphService } from './code-graph.service.js';\nimport type { ManifestService } from './manifest.service.js';\nimport type { EmbeddingEngine } from '../db/embeddings.js';\nimport type { LanceStore } from '../db/lance.js';\nimport type { DocumentId } from '../types/brands.js';\nimport type { Document } from '../types/document.js';\nimport type { TypedStoreManifest, TypedFileState } from '../types/manifest.js';\nimport type { ProgressCallback } from '../types/progress.js';\nimport type { Result } from '../types/result.js';\nimport type { Store, FileStore, RepoStore } from '../types/store.js';\n\nconst logger = createLogger('index-service');\n\ninterface IndexResult {\n filesIndexed: number;\n chunksCreated: number;\n timeMs: number;\n}\n\ninterface IndexOptions {\n chunkSize?: number;\n chunkOverlap?: number;\n codeGraphService?: CodeGraphService;\n concurrency?: number;\n manifestService?: ManifestService;\n ignorePatterns?: readonly string[];\n}\n\ninterface IncrementalIndexResult extends IndexResult {\n filesAdded: number;\n filesModified: number;\n filesDeleted: number;\n filesUnchanged: number;\n}\n\nconst TEXT_EXTENSIONS = new Set([\n // Text/docs\n '.txt',\n '.md',\n '.rst',\n '.adoc',\n // JavaScript/TypeScript\n '.js',\n '.ts',\n '.jsx',\n '.tsx',\n '.mjs',\n '.cjs',\n '.mts',\n '.cts',\n // Config/data\n '.json',\n '.yaml',\n '.yml',\n '.toml',\n '.ini',\n '.env',\n // Web\n '.html',\n '.htm',\n '.css',\n '.scss',\n '.sass',\n '.less',\n '.vue',\n '.svelte',\n // Python\n '.py',\n '.pyi',\n '.pyx',\n // Ruby\n '.rb',\n '.erb',\n '.rake',\n // Go\n '.go',\n // Rust\n '.rs',\n // Java/JVM\n '.java',\n '.kt',\n '.kts',\n '.scala',\n '.groovy',\n '.gradle',\n // C/C++\n '.c',\n '.cpp',\n '.cc',\n '.cxx',\n '.h',\n '.hpp',\n '.hxx',\n // C#/.NET\n '.cs',\n '.fs',\n '.vb',\n // Swift/Objective-C\n '.swift',\n '.m',\n '.mm',\n // PHP\n '.php',\n // Shell\n '.sh',\n '.bash',\n '.zsh',\n '.fish',\n '.ps1',\n '.psm1',\n // SQL\n '.sql',\n // Other\n '.xml',\n '.graphql',\n '.gql',\n '.proto',\n '.lua',\n '.r',\n '.R',\n '.jl',\n '.ex',\n '.exs',\n '.erl',\n '.hrl',\n '.clj',\n '.cljs',\n '.cljc',\n '.hs',\n '.elm',\n '.dart',\n '.pl',\n '.pm',\n '.tcl',\n '.vim',\n '.zig',\n '.nim',\n '.v',\n '.tf',\n '.hcl',\n '.dockerfile',\n '.makefile',\n '.cmake',\n]);\n\nexport class IndexService {\n private readonly lanceStore: LanceStore;\n private readonly embeddingEngine: EmbeddingEngine;\n private readonly chunker: ChunkingService;\n private readonly codeGraphService: CodeGraphService | undefined;\n private readonly manifestService: ManifestService | undefined;\n private readonly driftService: DriftService;\n private readonly concurrency: number;\n private readonly ignoreDirs: Set<string>;\n private readonly ignoreFilePatterns: Array<(filename: string) => boolean>;\n\n constructor(\n lanceStore: LanceStore,\n embeddingEngine: EmbeddingEngine,\n options: IndexOptions = {}\n ) {\n this.lanceStore = lanceStore;\n this.embeddingEngine = embeddingEngine;\n this.chunker = new ChunkingService({\n chunkSize: options.chunkSize ?? 1000,\n chunkOverlap: options.chunkOverlap ?? 150,\n });\n this.codeGraphService = options.codeGraphService;\n this.manifestService = options.manifestService;\n this.driftService = new DriftService();\n this.concurrency = options.concurrency ?? 4;\n\n const parsed = parseIgnorePatternsForScanning(options.ignorePatterns ?? []);\n this.ignoreDirs = parsed.dirs;\n this.ignoreFilePatterns = parsed.fileMatchers;\n }\n\n async indexStore(store: Store, onProgress?: ProgressCallback): Promise<Result<IndexResult>> {\n logger.info(\n {\n storeId: store.id,\n storeName: store.name,\n storeType: store.type,\n },\n 'Starting store indexing'\n );\n\n try {\n if (store.type === 'file' || store.type === 'repo') {\n return await this.indexFileStore(store, onProgress);\n }\n\n logger.error(\n { storeId: store.id, storeType: store.type },\n 'Unsupported store type for indexing'\n );\n return err(new Error(`Indexing not supported for store type: ${store.type}`));\n } catch (error) {\n logger.error(\n {\n storeId: store.id,\n error: error instanceof Error ? error.message : String(error),\n },\n 'Store indexing failed'\n );\n return err(error instanceof Error ? error : new Error(String(error)));\n }\n }\n\n /**\n * Incrementally index a store, only processing changed files.\n * Requires manifestService to be configured.\n *\n * @param store - The store to index\n * @param onProgress - Optional progress callback\n * @returns Result with incremental index statistics\n */\n async indexStoreIncremental(\n store: Store,\n onProgress?: ProgressCallback\n ): Promise<Result<IncrementalIndexResult>> {\n if (this.manifestService === undefined) {\n return err(new Error('ManifestService required for incremental indexing'));\n }\n\n if (store.type !== 'file' && store.type !== 'repo') {\n return err(new Error(`Incremental indexing not supported for store type: ${store.type}`));\n }\n\n logger.info(\n {\n storeId: store.id,\n storeName: store.name,\n storeType: store.type,\n },\n 'Starting incremental store indexing'\n );\n\n const startTime = Date.now();\n\n try {\n // Load manifest\n const manifest = await this.manifestService.load(store.id);\n\n // Scan current files\n const filePaths = await this.scanDirectory(store.path);\n const currentFiles = await Promise.all(\n filePaths.map((path) => this.driftService.getFileState(path))\n );\n\n // Detect changes\n const drift = await this.driftService.detectChanges(manifest, currentFiles);\n\n logger.debug(\n {\n storeId: store.id,\n added: drift.added.length,\n modified: drift.modified.length,\n deleted: drift.deleted.length,\n unchanged: drift.unchanged.length,\n },\n 'Drift detection complete'\n );\n\n // Collect document IDs to delete (from modified and deleted files)\n const documentIdsToDelete: DocumentId[] = [];\n for (const path of [...drift.modified, ...drift.deleted]) {\n const fileState = manifest.files[path];\n if (fileState !== undefined) {\n documentIdsToDelete.push(...fileState.documentIds);\n }\n }\n\n // Delete old documents\n if (documentIdsToDelete.length > 0) {\n await this.lanceStore.deleteDocuments(store.id, documentIdsToDelete);\n logger.debug(\n { storeId: store.id, count: documentIdsToDelete.length },\n 'Deleted old documents'\n );\n }\n\n // Process new and modified files\n const filesToProcess = [...drift.added, ...drift.modified];\n const totalFiles = filesToProcess.length;\n\n onProgress?.({\n type: 'start',\n current: 0,\n total: totalFiles,\n message: `Processing ${String(totalFiles)} changed files`,\n });\n\n const documents: Document[] = [];\n const newManifestFiles: Record<string, TypedFileState> = {};\n let filesProcessed = 0;\n\n // Keep unchanged files in manifest\n for (const path of drift.unchanged) {\n const existingState = manifest.files[path];\n if (existingState !== undefined) {\n newManifestFiles[path] = existingState;\n }\n }\n\n // Process changed files in parallel batches\n for (let i = 0; i < filesToProcess.length; i += this.concurrency) {\n const batch = filesToProcess.slice(i, i + this.concurrency);\n\n const batchResults = await Promise.all(\n batch.map(async (filePath) => {\n try {\n const result = await this.processFile(filePath, store);\n const documentIds = result.documents.map((d) => d.id);\n\n // Create file state for manifest\n const { state } = await this.driftService.createFileState(filePath, documentIds);\n\n return {\n filePath,\n documents: result.documents,\n fileState: state,\n };\n } catch (error) {\n logger.warn(\n { filePath, error: error instanceof Error ? error.message : String(error) },\n 'Failed to process file during incremental indexing, skipping'\n );\n return null;\n }\n })\n );\n\n // Collect results (skip null entries from failed files)\n for (const result of batchResults) {\n if (result !== null) {\n documents.push(...result.documents);\n newManifestFiles[result.filePath] = result.fileState;\n }\n }\n\n filesProcessed += batch.length;\n\n onProgress?.({\n type: 'progress',\n current: filesProcessed,\n total: totalFiles,\n message: `Processed ${String(filesProcessed)}/${String(totalFiles)} files`,\n });\n }\n\n // Add new documents\n if (documents.length > 0) {\n await this.lanceStore.addDocuments(store.id, documents);\n }\n\n // Recreate FTS index if any changes occurred (deletions or additions)\n if (documentIdsToDelete.length > 0 || documents.length > 0) {\n await this.lanceStore.createFtsIndex(store.id);\n }\n\n // Rebuild code graph if service available and source files changed\n if (this.codeGraphService) {\n const sourceExtensions = ['.ts', '.tsx', '.js', '.jsx', '.py', '.rs', '.go'];\n const hasSourceChanges =\n filesToProcess.some((p) => sourceExtensions.includes(extname(p).toLowerCase())) ||\n drift.deleted.some((p) => sourceExtensions.includes(extname(p).toLowerCase()));\n\n if (hasSourceChanges) {\n // Rebuild full graph from all current source files (simpler than incremental updates)\n const allSourceFiles: Array<{ path: string; content: string }> = [];\n const allPaths = [...drift.unchanged, ...filesToProcess];\n\n for (const filePath of allPaths) {\n const ext = extname(filePath).toLowerCase();\n if (sourceExtensions.includes(ext)) {\n try {\n const content = await readFile(filePath, 'utf-8');\n allSourceFiles.push({ path: filePath, content });\n } catch {\n // File may have been deleted between scan and read\n }\n }\n }\n\n if (allSourceFiles.length > 0) {\n const graph = await this.codeGraphService.buildGraph(allSourceFiles);\n await this.codeGraphService.saveGraph(store.id, graph);\n logger.debug(\n { storeId: store.id, sourceFiles: allSourceFiles.length },\n 'Rebuilt code graph during incremental indexing'\n );\n } else {\n // No source files remain - delete stale graph\n await this.codeGraphService.deleteGraph(store.id);\n logger.debug(\n { storeId: store.id },\n 'Deleted stale code graph (no source files remain)'\n );\n }\n }\n }\n\n // Save updated manifest\n const updatedManifest: TypedStoreManifest = {\n version: 1,\n storeId: store.id,\n indexedAt: new Date().toISOString(),\n files: newManifestFiles,\n };\n await this.manifestService.save(updatedManifest);\n\n onProgress?.({\n type: 'complete',\n current: totalFiles,\n total: totalFiles,\n message: 'Incremental indexing complete',\n });\n\n const timeMs = Date.now() - startTime;\n\n logger.info(\n {\n storeId: store.id,\n storeName: store.name,\n filesAdded: drift.added.length,\n filesModified: drift.modified.length,\n filesDeleted: drift.deleted.length,\n filesUnchanged: drift.unchanged.length,\n chunksCreated: documents.length,\n timeMs,\n },\n 'Incremental indexing complete'\n );\n\n return ok({\n filesIndexed: filesToProcess.length,\n chunksCreated: documents.length,\n timeMs,\n filesAdded: drift.added.length,\n filesModified: drift.modified.length,\n filesDeleted: drift.deleted.length,\n filesUnchanged: drift.unchanged.length,\n });\n } catch (error) {\n logger.error(\n {\n storeId: store.id,\n error: error instanceof Error ? error.message : String(error),\n },\n 'Incremental indexing failed'\n );\n return err(error instanceof Error ? error : new Error(String(error)));\n }\n }\n\n private async indexFileStore(\n store: FileStore | RepoStore,\n onProgress?: ProgressCallback\n ): Promise<Result<IndexResult>> {\n const startTime = Date.now();\n\n // Clear existing documents before full re-index to prevent duplicates\n await this.lanceStore.clearAllDocuments(store.id);\n\n // Clear stale manifest to ensure fresh incremental indexing later\n if (this.manifestService) {\n await this.manifestService.delete(store.id);\n }\n\n const files = await this.scanDirectory(store.path);\n const documents: Document[] = [];\n let filesProcessed = 0;\n\n logger.debug(\n {\n storeId: store.id,\n path: store.path,\n fileCount: files.length,\n concurrency: this.concurrency,\n },\n 'Files scanned for indexing'\n );\n\n // Collect source files for code graph building\n const sourceFiles: Array<{ path: string; content: string }> = [];\n\n // Emit start event\n onProgress?.({\n type: 'start',\n current: 0,\n total: files.length,\n message: 'Starting index',\n });\n\n // Process files in parallel batches\n for (let i = 0; i < files.length; i += this.concurrency) {\n const batch = files.slice(i, i + this.concurrency);\n\n const batchResults = await Promise.all(\n batch.map(async (filePath) => {\n try {\n return await this.processFile(filePath, store);\n } catch (error) {\n logger.warn(\n { filePath, error: error instanceof Error ? error.message : String(error) },\n 'Failed to process file, skipping'\n );\n return { documents: [], sourceFile: undefined };\n }\n })\n );\n\n // Collect results from batch\n for (const result of batchResults) {\n documents.push(...result.documents);\n if (result.sourceFile !== undefined) {\n sourceFiles.push(result.sourceFile);\n }\n }\n\n filesProcessed += batch.length;\n\n // Emit progress event after each batch\n onProgress?.({\n type: 'progress',\n current: filesProcessed,\n total: files.length,\n message: `Indexed ${String(filesProcessed)}/${String(files.length)} files`,\n });\n }\n\n if (documents.length > 0) {\n await this.lanceStore.addDocuments(store.id, documents);\n // Create FTS index for full-text search\n await this.lanceStore.createFtsIndex(store.id);\n }\n\n // Build and save code graph if service is available and we have source files\n if (this.codeGraphService && sourceFiles.length > 0) {\n const graph = await this.codeGraphService.buildGraph(sourceFiles);\n await this.codeGraphService.saveGraph(store.id, graph);\n } else if (this.codeGraphService) {\n // No source files - delete any stale graph\n await this.codeGraphService.deleteGraph(store.id);\n }\n\n // Emit complete event\n onProgress?.({\n type: 'complete',\n current: files.length,\n total: files.length,\n message: 'Indexing complete',\n });\n\n const timeMs = Date.now() - startTime;\n\n logger.info(\n {\n storeId: store.id,\n storeName: store.name,\n filesIndexed: filesProcessed,\n chunksCreated: documents.length,\n sourceFilesForGraph: sourceFiles.length,\n timeMs,\n },\n 'Store indexing complete'\n );\n\n return ok({\n filesIndexed: filesProcessed,\n chunksCreated: documents.length,\n timeMs,\n });\n }\n\n /**\n * Process a single file: read, chunk, embed, and return documents.\n * Extracted for parallel processing.\n */\n private async processFile(\n filePath: string,\n store: FileStore | RepoStore\n ): Promise<{\n documents: Document[];\n sourceFile: { path: string; content: string } | undefined;\n }> {\n const content = await readFile(filePath, 'utf-8');\n const fileHash = createHash('md5').update(content).digest('hex');\n const chunks = this.chunker.chunk(content, filePath);\n\n // Use relative path for document ID to ensure consistency across machines\n // and prevent collisions between files with identical content\n const relativePath = relative(store.path, filePath);\n const pathHash = createHash('md5').update(relativePath).digest('hex').slice(0, 8);\n\n const ext = extname(filePath).toLowerCase();\n const fileName = basename(filePath).toLowerCase();\n const fileType = this.classifyFileType(ext, fileName, filePath);\n\n // Track source file for code graph (supports JS/TS, Python, Rust, Go)\n const sourceFile = ['.ts', '.tsx', '.js', '.jsx', '.py', '.rs', '.go'].includes(ext)\n ? { path: filePath, content }\n : undefined;\n\n // Skip files with no chunks (empty files)\n if (chunks.length === 0) {\n return { documents: [], sourceFile };\n }\n\n // Batch embed all chunks from this file\n const chunkContents = chunks.map((c) => c.content);\n const vectors = await this.embeddingEngine.embedBatch(chunkContents);\n\n const documents: Document[] = [];\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i];\n const vector = vectors[i];\n\n // Fail fast if chunk/vector mismatch (should never happen)\n if (chunk === undefined || vector === undefined) {\n throw new Error(\n `Chunk/vector mismatch at index ${String(i)}: chunk=${String(chunk !== undefined)}, vector=${String(vector !== undefined)}`\n );\n }\n\n // Include pathHash in ID to prevent collisions when files have identical content\n const chunkId =\n chunks.length > 1\n ? `${store.id}-${pathHash}-${fileHash}-${String(chunk.chunkIndex)}`\n : `${store.id}-${pathHash}-${fileHash}`;\n\n documents.push({\n id: createDocumentId(chunkId),\n content: chunk.content,\n vector,\n metadata: {\n type: chunks.length > 1 ? 'chunk' : 'file',\n storeId: store.id,\n path: filePath,\n indexedAt: new Date().toISOString(),\n fileHash,\n chunkIndex: chunk.chunkIndex,\n totalChunks: chunk.totalChunks,\n fileType,\n sectionHeader: chunk.sectionHeader,\n functionName: chunk.functionName,\n hasDocComments: /\\/\\*\\*[\\s\\S]*?\\*\\//.test(chunk.content),\n docSummary: chunk.docSummary,\n },\n });\n }\n\n return { documents, sourceFile };\n }\n\n private async scanDirectory(dir: string): Promise<string[]> {\n const files: string[] = [];\n const entries = await readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n\n if (entry.isDirectory()) {\n // Skip directories matching ignore patterns\n if (!this.ignoreDirs.has(entry.name)) {\n files.push(...(await this.scanDirectory(fullPath)));\n }\n } else if (entry.isFile()) {\n // Skip files matching ignore patterns (e.g., *.min.js, *.map)\n const shouldIgnore = this.ignoreFilePatterns.some((matcher) => matcher(entry.name));\n if (shouldIgnore) {\n continue;\n }\n\n const ext = extname(entry.name).toLowerCase();\n if (TEXT_EXTENSIONS.has(ext)) {\n files.push(fullPath);\n }\n }\n }\n\n return files;\n }\n\n /**\n * Classify file type for ranking purposes.\n * Documentation files rank higher than source code for documentation queries.\n * Phase 4: Enhanced to detect internal implementation files.\n */\n private classifyFileType(ext: string, fileName: string, filePath: string): string {\n // Documentation files\n if (ext === '.md') {\n // CHANGELOG files get their own category for intent-based penalties\n if (fileName === 'changelog.md' || fileName === 'changes.md' || /changelog/i.test(fileName)) {\n return 'changelog';\n }\n // Special doc files get highest priority\n if (['readme.md', 'migration.md', 'contributing.md'].includes(fileName)) {\n return 'documentation-primary';\n }\n // Check path for documentation indicators\n if (/\\/(docs?|documentation|guides?|tutorials?|articles?)\\//i.test(filePath)) {\n return 'documentation';\n }\n return 'documentation';\n }\n\n // Test files\n if (/\\.(test|spec)\\.[jt]sx?$/.test(fileName) || /\\/__tests__\\//.test(filePath)) {\n return 'test';\n }\n\n // Example files\n if (/\\/examples?\\//.test(filePath) || fileName.includes('example')) {\n return 'example';\n }\n\n // Config files\n if (/^(tsconfig|package|\\.eslint|\\.prettier|vite\\.config|next\\.config)/i.test(fileName)) {\n return 'config';\n }\n\n // Source code - distinguish between internal and public-facing\n if (['.ts', '.tsx', '.js', '.jsx', '.py', '.go', '.rs', '.java'].includes(ext)) {\n // Internal implementation files (monorepo packages, lib internals)\n // These patterns indicate internal/core implementation code\n if (this.isInternalImplementation(filePath, fileName)) {\n return 'source-internal';\n }\n return 'source';\n }\n\n return 'other';\n }\n\n /**\n * Detect if a source file is internal implementation code.\n * Internal code should rank lower than public-facing APIs and docs.\n */\n private isInternalImplementation(filePath: string, fileName: string): boolean {\n const pathLower = filePath.toLowerCase();\n const fileNameLower = fileName.toLowerCase();\n\n // Monorepo internal packages (like Vue's packages/*/src/)\n if (/\\/packages\\/[^/]+\\/src\\//.test(pathLower)) {\n // Exception: index files often export public APIs\n if (fileNameLower === 'index.ts' || fileNameLower === 'index.js') {\n return false;\n }\n return true;\n }\n\n // Internal/core directories\n if (/\\/(internal|lib\\/core|core\\/src|_internal|private)\\//.test(pathLower)) {\n return true;\n }\n\n // Compiler/transform internals (often not what users want)\n if (\n /\\/(compiler|transforms?|parse|codegen)\\//.test(pathLower) &&\n !fileNameLower.includes('readme') &&\n !fileNameLower.includes('index')\n ) {\n return true;\n }\n\n return false;\n }\n}\n\n/**\n * Classify web content type based on URL patterns and page title.\n * Used for ranking boosts similar to local file classification.\n */\nexport function classifyWebContentType(url: string, title?: string): string {\n const urlLower = url.toLowerCase();\n const titleLower = (title ?? '').toLowerCase();\n\n // API reference documentation → documentation-primary (1.8x boost)\n if (\n /\\/api[-/]?(ref|reference|docs?)?\\//i.test(urlLower) ||\n /api\\s*(reference|documentation)/i.test(titleLower)\n ) {\n return 'documentation-primary';\n }\n\n // Getting started / tutorials → documentation-primary (1.8x boost)\n if (\n /\\/(getting[-_]?started|quickstart|tutorial|setup)\\b/i.test(urlLower) ||\n /(getting started|quickstart|tutorial)/i.test(titleLower)\n ) {\n return 'documentation-primary';\n }\n\n // General docs paths → documentation (1.5x boost)\n if (/\\/(docs?|documentation|reference|learn|manual|guide)/i.test(urlLower)) {\n return 'documentation';\n }\n\n // Examples and demos → example (1.4x boost)\n if (/\\/(examples?|demos?|samples?|cookbook)/i.test(urlLower)) {\n return 'example';\n }\n\n // Changelog → changelog (special handling in intent boosts)\n if (/changelog|release[-_]?notes/i.test(urlLower)) {\n return 'changelog';\n }\n\n // Blog posts → lower priority\n if (/\\/blog\\//i.test(urlLower)) {\n return 'other';\n }\n\n // Web content without specific path indicators is treated as documentation\n return 'documentation';\n}\n","export interface ChunkConfig {\n chunkSize: number;\n chunkOverlap: number;\n}\n\nexport interface Chunk {\n content: string;\n chunkIndex: number;\n totalChunks: number;\n startOffset: number;\n endOffset: number;\n /** Section header if this chunk starts a markdown section */\n sectionHeader?: string | undefined;\n /** Function or class name if this chunk contains a code declaration */\n functionName?: string | undefined;\n /** JSDoc/comment summary extracted from this chunk */\n docSummary?: string | undefined;\n}\n\n/**\n * Preset configurations for different content types.\n * Code uses smaller chunks for precise symbol matching.\n * Web/docs use larger chunks to preserve prose context.\n */\nconst CHUNK_PRESETS = {\n code: { chunkSize: 768, chunkOverlap: 100 },\n web: { chunkSize: 1200, chunkOverlap: 200 },\n docs: { chunkSize: 1200, chunkOverlap: 200 },\n} as const;\n\nexport type ContentType = keyof typeof CHUNK_PRESETS;\n\nexport class ChunkingService {\n private readonly chunkSize: number;\n private readonly chunkOverlap: number;\n\n constructor(config: ChunkConfig) {\n if (config.chunkOverlap >= config.chunkSize) {\n throw new Error(\n `chunkOverlap (${String(config.chunkOverlap)}) must be less than chunkSize (${String(config.chunkSize)})`\n );\n }\n this.chunkSize = config.chunkSize;\n this.chunkOverlap = config.chunkOverlap;\n }\n\n /**\n * Create a ChunkingService with preset configuration for a content type.\n * - 'code': Smaller chunks (768/100) for precise code symbol matching\n * - 'web': Larger chunks (1200/200) for web prose content\n * - 'docs': Larger chunks (1200/200) for documentation\n */\n static forContentType(type: ContentType): ChunkingService {\n return new ChunkingService(CHUNK_PRESETS[type]);\n }\n\n /**\n * Chunk text content. Uses semantic chunking for Markdown and code files,\n * falling back to sliding window for other content.\n */\n chunk(text: string, filePath?: string): Chunk[] {\n // Use semantic chunking for Markdown files\n if (filePath !== undefined && filePath !== '' && /\\.md$/i.test(filePath)) {\n return this.chunkMarkdown(text);\n }\n\n // Use semantic chunking for TypeScript/JavaScript files\n if (filePath !== undefined && filePath !== '' && /\\.(ts|tsx|js|jsx)$/i.test(filePath)) {\n return this.chunkCode(text);\n }\n\n return this.chunkSlidingWindow(text);\n }\n\n /**\n * Semantic chunking for Markdown files.\n * Splits on section headers to keep related content together.\n */\n private chunkMarkdown(text: string): Chunk[] {\n // Match markdown headers (# through ####)\n const headerRegex = /^(#{1,4})\\s+(.+)$/gm;\n const sections: Array<{ header: string; content: string; startOffset: number }> = [];\n\n let lastIndex = 0;\n let lastHeader = '';\n let match: RegExpExecArray | null;\n\n while ((match = headerRegex.exec(text)) !== null) {\n // Save previous section\n if (match.index > lastIndex) {\n const content = text.slice(lastIndex, match.index).trim();\n if (content) {\n sections.push({\n header: lastHeader,\n content: content,\n startOffset: lastIndex,\n });\n }\n }\n lastHeader = match[2] ?? '';\n lastIndex = match.index;\n }\n\n // Add final section\n const finalContent = text.slice(lastIndex).trim();\n if (finalContent) {\n sections.push({\n header: lastHeader,\n content: finalContent,\n startOffset: lastIndex,\n });\n }\n\n // If no sections found, fall back to sliding window\n if (sections.length === 0) {\n return this.chunkSlidingWindow(text);\n }\n\n // Convert sections to chunks, splitting large sections if needed\n const chunks: Chunk[] = [];\n\n for (const section of sections) {\n if (section.content.length <= this.chunkSize) {\n // Section fits in one chunk\n chunks.push({\n content: section.content,\n chunkIndex: chunks.length,\n totalChunks: 0,\n startOffset: section.startOffset,\n endOffset: section.startOffset + section.content.length,\n sectionHeader: section.header || undefined,\n });\n } else {\n // Split large section using sliding window\n const sectionChunks = this.chunkSlidingWindow(section.content);\n for (const subChunk of sectionChunks) {\n chunks.push({\n ...subChunk,\n chunkIndex: chunks.length,\n startOffset: section.startOffset + subChunk.startOffset,\n endOffset: section.startOffset + subChunk.endOffset,\n sectionHeader: section.header || undefined,\n });\n }\n }\n }\n\n // Set totalChunks\n for (const chunk of chunks) {\n chunk.totalChunks = chunks.length;\n }\n\n return chunks;\n }\n\n /**\n * Semantic chunking for TypeScript/JavaScript code files.\n * Splits on top-level declarations to keep functions/classes together.\n */\n private chunkCode(text: string): Chunk[] {\n // Match top-level declarations with optional JSDoc/comments before them\n const declarationRegex =\n /^(?:\\/\\*\\*[\\s\\S]*?\\*\\/\\s*)?(?:export\\s+)?(?:default\\s+)?(?:async\\s+)?(?:function|class|interface|type|const|let|var|enum)\\s+(\\w+)/gm;\n const declarations: Array<{ startOffset: number; endOffset: number; name?: string }> = [];\n\n let match: RegExpExecArray | null;\n while ((match = declarationRegex.exec(text)) !== null) {\n const name = match[1];\n const decl: { startOffset: number; endOffset: number; name?: string } = {\n startOffset: match.index,\n endOffset: match.index,\n };\n if (name !== undefined) {\n decl.name = name;\n }\n declarations.push(decl);\n }\n\n // If no declarations found, use sliding window\n if (declarations.length === 0) {\n return this.chunkSlidingWindow(text);\n }\n\n // Find end of each declaration using brace-aware boundary detection\n for (let i = 0; i < declarations.length; i++) {\n const currentDecl = declarations[i];\n const nextDecl = declarations[i + 1];\n if (currentDecl === undefined) continue;\n\n // For declarations that likely have braces (functions, classes, enums)\n // use smart boundary detection\n const declText = text.slice(currentDecl.startOffset);\n if (\n /^(?:\\/\\*\\*[\\s\\S]*?\\*\\/\\s*)?(?:export\\s+)?(?:async\\s+)?(?:function|class|enum)\\s+/m.test(\n declText\n )\n ) {\n const boundary = this.findDeclarationEnd(declText);\n if (boundary > 0) {\n currentDecl.endOffset = currentDecl.startOffset + boundary;\n } else {\n // Fall back to next declaration or EOF\n currentDecl.endOffset = nextDecl !== undefined ? nextDecl.startOffset : text.length;\n }\n } else {\n // For other declarations (interface, type, const, let, var), use next declaration or EOF\n currentDecl.endOffset = nextDecl !== undefined ? nextDecl.startOffset : text.length;\n }\n }\n\n const chunks: Chunk[] = [];\n\n for (const decl of declarations) {\n const content = text.slice(decl.startOffset, decl.endOffset).trim();\n\n if (content.length <= this.chunkSize) {\n // Declaration fits in one chunk\n chunks.push({\n content,\n chunkIndex: chunks.length,\n totalChunks: 0,\n startOffset: decl.startOffset,\n endOffset: decl.endOffset,\n functionName: decl.name,\n });\n } else {\n // Split large declaration with sliding window\n const declChunks = this.chunkSlidingWindow(content);\n for (const subChunk of declChunks) {\n chunks.push({\n ...subChunk,\n chunkIndex: chunks.length,\n startOffset: decl.startOffset + subChunk.startOffset,\n endOffset: decl.startOffset + subChunk.endOffset,\n functionName: decl.name,\n });\n }\n }\n }\n\n // Set totalChunks\n for (const chunk of chunks) {\n chunk.totalChunks = chunks.length;\n }\n\n return chunks.length > 0 ? chunks : this.chunkSlidingWindow(text);\n }\n\n /**\n * Find the end of a code declaration by counting braces while ignoring\n * braces inside strings and comments.\n * Returns the offset where the declaration ends, or -1 if not found.\n */\n private findDeclarationEnd(text: string): number {\n let braceCount = 0;\n let inString = false;\n let inSingleLineComment = false;\n let inMultiLineComment = false;\n let stringChar = '';\n let i = 0;\n let foundFirstBrace = false;\n\n // Find the first opening brace\n while (i < text.length) {\n const char = text[i];\n const nextChar = i + 1 < text.length ? text[i + 1] : '';\n\n // Handle comments\n if (!inString && !inMultiLineComment && char === '/' && nextChar === '/') {\n inSingleLineComment = true;\n i += 2;\n continue;\n }\n\n if (!inString && !inSingleLineComment && char === '/' && nextChar === '*') {\n inMultiLineComment = true;\n i += 2;\n continue;\n }\n\n if (inMultiLineComment && char === '*' && nextChar === '/') {\n inMultiLineComment = false;\n i += 2;\n continue;\n }\n\n if (inSingleLineComment && char === '\\n') {\n inSingleLineComment = false;\n i++;\n continue;\n }\n\n // Skip if in comment\n if (inSingleLineComment || inMultiLineComment) {\n i++;\n continue;\n }\n\n // Handle strings\n if (!inString && (char === '\"' || char === \"'\" || char === '`')) {\n inString = true;\n stringChar = char;\n i++;\n continue;\n }\n\n if (inString && char === '\\\\') {\n // Skip escaped character\n i += 2;\n continue;\n }\n\n if (inString && char === stringChar) {\n inString = false;\n stringChar = '';\n i++;\n continue;\n }\n\n // Skip if in string\n if (inString) {\n i++;\n continue;\n }\n\n // Count braces\n if (char === '{') {\n braceCount++;\n foundFirstBrace = true;\n } else if (char === '}') {\n braceCount--;\n if (foundFirstBrace && braceCount === 0) {\n // Found the closing brace\n return i + 1;\n }\n }\n\n i++;\n }\n\n // If we didn't find a complete declaration, return -1\n return -1;\n }\n\n /**\n * Traditional sliding window chunking for non-Markdown content.\n */\n private chunkSlidingWindow(text: string): Chunk[] {\n if (text.length <= this.chunkSize) {\n return [\n {\n content: text,\n chunkIndex: 0,\n totalChunks: 1,\n startOffset: 0,\n endOffset: text.length,\n },\n ];\n }\n\n const chunks: Chunk[] = [];\n const step = this.chunkSize - this.chunkOverlap;\n let start = 0;\n\n while (start < text.length) {\n const end = Math.min(start + this.chunkSize, text.length);\n chunks.push({\n content: text.slice(start, end),\n chunkIndex: chunks.length,\n totalChunks: 0,\n startOffset: start,\n endOffset: end,\n });\n start += step;\n if (end === text.length) break;\n }\n\n // Set totalChunks\n for (const chunk of chunks) {\n chunk.totalChunks = chunks.length;\n }\n\n return chunks;\n }\n}\n","import { createHash } from 'node:crypto';\nimport { readFile, stat } from 'node:fs/promises';\nimport type { TypedStoreManifest, TypedFileState, DriftResult } from '../types/manifest.js';\n\n/**\n * Current state of a file on disk.\n * Used for comparison against manifest.\n */\nexport interface CurrentFileState {\n path: string;\n mtime: number;\n size: number;\n}\n\n/**\n * Service for detecting file changes between disk state and manifest.\n *\n * Uses two-phase detection for efficiency:\n * - Phase 1 (fast): Compare mtime and size\n * - Phase 2 (deep): Compute hash for files that changed in phase 1\n *\n * This approach minimizes disk I/O by avoiding hash computation for unchanged files.\n */\nexport class DriftService {\n /**\n * Detect changes between current files and manifest.\n *\n * @param manifest - The stored manifest from last index\n * @param currentFiles - Current files on disk with mtime/size\n * @returns Classification of files into added, modified, deleted, unchanged\n */\n async detectChanges(\n manifest: TypedStoreManifest,\n currentFiles: CurrentFileState[]\n ): Promise<DriftResult> {\n const result: DriftResult = {\n added: [],\n modified: [],\n deleted: [],\n unchanged: [],\n };\n\n // Build a set of current file paths for quick lookup\n const currentPathSet = new Set(currentFiles.map((f) => f.path));\n const manifestPaths = new Set(Object.keys(manifest.files));\n\n // Find deleted files (in manifest but not on disk)\n for (const path of manifestPaths) {\n if (!currentPathSet.has(path)) {\n result.deleted.push(path);\n }\n }\n\n // Process current files\n const potentiallyModified: CurrentFileState[] = [];\n\n for (const file of currentFiles) {\n const manifestState = manifest.files[file.path];\n\n if (manifestState === undefined) {\n // New file (not in manifest)\n result.added.push(file.path);\n } else {\n // Phase 1: Fast check - compare mtime and size\n if (file.mtime === manifestState.mtime && file.size === manifestState.size) {\n // Same mtime and size - assume unchanged\n result.unchanged.push(file.path);\n } else {\n // mtime or size changed - need phase 2 check\n potentiallyModified.push(file);\n }\n }\n }\n\n // Phase 2: Deep check - compute hash for potentially modified files\n for (const file of potentiallyModified) {\n const manifestState = manifest.files[file.path];\n if (manifestState === undefined) {\n // Should not happen, but handle gracefully\n result.added.push(file.path);\n continue;\n }\n\n const currentHash = await this.computeFileHash(file.path);\n\n if (currentHash === manifestState.hash) {\n // Hash matches - file content unchanged (only metadata changed)\n result.unchanged.push(file.path);\n } else {\n // Hash differs - file actually modified\n result.modified.push(file.path);\n }\n }\n\n return result;\n }\n\n /**\n * Get the current state of a file on disk.\n */\n async getFileState(path: string): Promise<CurrentFileState> {\n const stats = await stat(path);\n return {\n path,\n mtime: stats.mtimeMs,\n size: stats.size,\n };\n }\n\n /**\n * Compute MD5 hash of a file.\n */\n async computeFileHash(path: string): Promise<string> {\n const content = await readFile(path);\n return createHash('md5').update(content).digest('hex');\n }\n\n /**\n * Create a file state entry for the manifest after indexing.\n *\n * @param path - File path\n * @param documentIds - Document IDs created from this file\n * @returns File state for manifest\n */\n async createFileState(\n path: string,\n documentIds: string[]\n ): Promise<{ state: TypedFileState; hash: string }> {\n const stats = await stat(path);\n const content = await readFile(path);\n const hash = createHash('md5').update(content).digest('hex');\n\n // Import createDocumentId dynamically to avoid circular deps\n const { createDocumentId } = await import('../types/brands.js');\n\n return {\n state: {\n mtime: stats.mtimeMs,\n size: stats.size,\n hash,\n documentIds: documentIds.map((id) => createDocumentId(id)),\n },\n hash,\n };\n }\n}\n","import { readFile, access, mkdir } from 'node:fs/promises';\nimport { join } from 'node:path';\nimport { createDocumentId } from '../types/brands.js';\nimport { StoreManifestSchema, createEmptyManifest } from '../types/manifest.js';\nimport { atomicWriteFile } from '../utils/atomic-write.js';\nimport type { StoreId } from '../types/brands.js';\nimport type { TypedStoreManifest, TypedFileState } from '../types/manifest.js';\n\n/**\n * Service for managing store manifests.\n *\n * Manifests track the state of indexed files to enable incremental re-indexing.\n * They are stored in the data directory under manifests/{storeId}.manifest.json.\n */\nexport class ManifestService {\n private readonly manifestsDir: string;\n\n constructor(dataDir: string) {\n this.manifestsDir = join(dataDir, 'manifests');\n }\n\n /**\n * Initialize the manifests directory.\n */\n async initialize(): Promise<void> {\n await mkdir(this.manifestsDir, { recursive: true });\n }\n\n /**\n * Get the file path for a store's manifest.\n */\n getManifestPath(storeId: StoreId): string {\n return join(this.manifestsDir, `${storeId}.manifest.json`);\n }\n\n /**\n * Load a store's manifest.\n * Returns an empty manifest if one doesn't exist.\n * Throws on parse/validation errors (fail fast).\n */\n async load(storeId: StoreId): Promise<TypedStoreManifest> {\n const manifestPath = this.getManifestPath(storeId);\n\n const exists = await this.fileExists(manifestPath);\n if (!exists) {\n return createEmptyManifest(storeId);\n }\n\n const content = await readFile(manifestPath, 'utf-8');\n let parsed: unknown;\n try {\n parsed = JSON.parse(content);\n } catch (error) {\n throw new Error(\n `Failed to parse manifest at ${manifestPath}: ${\n error instanceof Error ? error.message : String(error)\n }`\n );\n }\n\n const result = StoreManifestSchema.safeParse(parsed);\n if (!result.success) {\n throw new Error(`Invalid manifest at ${manifestPath}: ${result.error.message}`);\n }\n\n // Convert to typed manifest with branded types\n return this.toTypedManifest(result.data, storeId);\n }\n\n /**\n * Save a store's manifest atomically.\n */\n async save(manifest: TypedStoreManifest): Promise<void> {\n const manifestPath = this.getManifestPath(manifest.storeId);\n\n // Update indexedAt timestamp\n const toSave = {\n ...manifest,\n indexedAt: new Date().toISOString(),\n };\n\n await atomicWriteFile(manifestPath, JSON.stringify(toSave, null, 2));\n }\n\n /**\n * Delete a store's manifest.\n * Called when a store is deleted or during full re-index.\n */\n async delete(storeId: StoreId): Promise<void> {\n const manifestPath = this.getManifestPath(storeId);\n const { unlink } = await import('node:fs/promises');\n\n const exists = await this.fileExists(manifestPath);\n if (exists) {\n await unlink(manifestPath);\n }\n }\n\n /**\n * Check if a file exists.\n */\n private async fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Convert a parsed manifest to a typed manifest with branded types.\n */\n private toTypedManifest(\n data: { version: 1; storeId: string; indexedAt: string; files: Record<string, FileStateRaw> },\n storeId: StoreId\n ): TypedStoreManifest {\n const files: Record<string, TypedFileState> = {};\n\n for (const [path, state] of Object.entries(data.files)) {\n files[path] = {\n mtime: state.mtime,\n size: state.size,\n hash: state.hash,\n documentIds: state.documentIds.map((id) => createDocumentId(id)),\n };\n }\n\n return {\n version: 1,\n storeId,\n indexedAt: data.indexedAt,\n files,\n };\n }\n}\n\n/** Raw file state from parsed JSON (matches FileStateSchema) */\ninterface FileStateRaw {\n mtime: number;\n size: number;\n hash: string;\n documentIds: string[];\n}\n","import { z } from 'zod';\nimport type { StoreId, DocumentId } from './brands.js';\n\n/**\n * Manifest types for tracking indexed file state.\n *\n * The manifest enables incremental indexing by tracking:\n * - File metadata (mtime, size) for fast change detection\n * - Content hash for deep verification\n * - Document IDs for cleanup on file changes\n */\n\n// ============================================================================\n// File State Schema\n// ============================================================================\n\n/**\n * State of a single indexed file.\n * Used for change detection in two phases:\n * - Phase 1 (fast): mtime + size comparison\n * - Phase 2 (deep): hash comparison for files that passed phase 1\n */\nexport const FileStateSchema = z.object({\n /** File modification time in milliseconds since epoch */\n mtime: z.number(),\n /** File size in bytes */\n size: z.number(),\n /** MD5 hash of file content */\n hash: z.string(),\n /** Document IDs created from this file (for cleanup) */\n documentIds: z.array(z.string()),\n});\n\nexport type FileState = z.infer<typeof FileStateSchema>;\n\n// ============================================================================\n// Store Manifest Schema\n// ============================================================================\n\n/**\n * Manifest for a single store.\n * Tracks the state of all indexed files to enable incremental re-indexing.\n */\nexport const StoreManifestSchema = z.object({\n /** Schema version for future migrations */\n version: z.literal(1),\n /** Store ID this manifest belongs to */\n storeId: z.string(),\n /** When the manifest was last updated */\n indexedAt: z.string(),\n /** Map of file paths to their state */\n files: z.record(z.string(), FileStateSchema),\n});\n\nexport type StoreManifest = z.infer<typeof StoreManifestSchema>;\n\n// ============================================================================\n// Branded Type Wrappers\n// ============================================================================\n\n/**\n * Type-safe manifest with branded StoreId.\n * Use this in service code for proper type safety.\n */\nexport interface TypedStoreManifest {\n version: 1;\n storeId: StoreId;\n indexedAt: string;\n files: Record<string, TypedFileState>;\n}\n\n/**\n * Type-safe file state with branded DocumentIds.\n */\nexport interface TypedFileState {\n mtime: number;\n size: number;\n hash: string;\n documentIds: DocumentId[];\n}\n\n// ============================================================================\n// Change Detection Types\n// ============================================================================\n\n/**\n * Result of comparing current files against manifest.\n */\nexport interface DriftResult {\n /** Files that exist on disk but not in manifest */\n added: string[];\n /** Files that exist in both but have changed */\n modified: string[];\n /** Files that exist in manifest but not on disk */\n deleted: string[];\n /** Files that are unchanged */\n unchanged: string[];\n}\n\n// ============================================================================\n// Default Manifest\n// ============================================================================\n\n/**\n * Create an empty manifest for a store.\n */\nexport function createEmptyManifest(storeId: StoreId): TypedStoreManifest {\n return {\n version: 1,\n storeId,\n indexedAt: new Date().toISOString(),\n files: {},\n };\n}\n","export interface CodeUnit {\n type: 'function' | 'class' | 'interface' | 'type' | 'const' | 'documentation' | 'example';\n name: string;\n signature: string;\n fullContent: string;\n startLine: number;\n endLine: number;\n language: string;\n}\n\nexport class CodeUnitService {\n extractCodeUnit(code: string, symbolName: string, language: string): CodeUnit | undefined {\n const lines = code.split('\\n');\n\n // Find the line containing the symbol\n let startLine = -1;\n let type: CodeUnit['type'] = 'function';\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i] ?? '';\n\n if (line.includes(`function ${symbolName}`)) {\n startLine = i + 1; // 1-indexed\n type = 'function';\n break;\n }\n\n if (line.includes(`class ${symbolName}`)) {\n startLine = i + 1;\n type = 'class';\n break;\n }\n\n // Check for interface declarations\n if (line.match(new RegExp(`interface\\\\s+${symbolName}(?:\\\\s|{|<)`))) {\n startLine = i + 1;\n type = 'interface';\n break;\n }\n\n // Check for type declarations\n if (line.match(new RegExp(`type\\\\s+${symbolName}(?:\\\\s|=|<)`))) {\n startLine = i + 1;\n type = 'type';\n break;\n }\n\n // Check for arrow functions: const/let/var name = ...\n if (line.match(new RegExp(`(?:const|let|var)\\\\s+${symbolName}\\\\s*=`))) {\n startLine = i + 1;\n type = 'const';\n break;\n }\n }\n\n if (startLine === -1) return undefined;\n\n // Find end line using state machine that tracks strings and comments\n let endLine = startLine;\n let braceCount = 0;\n let foundFirstBrace = false;\n\n // For type aliases without braces (e.g., \"type UserId = string;\"), find semicolon\n if (type === 'type') {\n const firstLine = lines[startLine - 1] ?? '';\n if (!firstLine.includes('{') && firstLine.includes(';')) {\n // Single-line type alias\n endLine = startLine;\n const fullContent = firstLine;\n const signature = this.extractSignature(firstLine, symbolName, type);\n return {\n type,\n name: symbolName,\n signature,\n fullContent,\n startLine,\n endLine,\n language,\n };\n }\n }\n\n // State machine for tracking context\n let inSingleQuote = false;\n let inDoubleQuote = false;\n let inTemplateLiteral = false;\n let inMultiLineComment = false;\n\n for (let i = startLine - 1; i < lines.length; i++) {\n const line = lines[i] ?? '';\n let inSingleLineComment = false;\n\n for (let j = 0; j < line.length; j++) {\n const char = line[j];\n const prevChar = j > 0 ? line[j - 1] : '';\n const nextChar = j < line.length - 1 ? line[j + 1] : '';\n\n // Skip escaped characters within strings\n if (prevChar === '\\\\' && (inSingleQuote || inDoubleQuote || inTemplateLiteral)) {\n continue;\n }\n\n // Inside multi-line comment - only look for end marker\n if (inMultiLineComment) {\n if (char === '*' && nextChar === '/') {\n inMultiLineComment = false;\n j++; // Skip the /\n }\n continue;\n }\n\n // Inside single-line comment - skip rest of line\n if (inSingleLineComment) {\n continue;\n }\n\n // Inside a string - only look for closing delimiter\n if (inSingleQuote) {\n if (char === \"'\") inSingleQuote = false;\n continue;\n }\n if (inDoubleQuote) {\n if (char === '\"') inDoubleQuote = false;\n continue;\n }\n if (inTemplateLiteral) {\n if (char === '`') inTemplateLiteral = false;\n continue;\n }\n\n // Not inside any special context - check for context starters\n if (char === '/' && nextChar === '*') {\n inMultiLineComment = true;\n j++; // Skip the *\n continue;\n }\n if (char === '/' && nextChar === '/') {\n inSingleLineComment = true;\n continue;\n }\n if (char === \"'\") {\n inSingleQuote = true;\n continue;\n }\n if (char === '\"') {\n inDoubleQuote = true;\n continue;\n }\n if (char === '`') {\n inTemplateLiteral = true;\n continue;\n }\n\n // Count braces (we're not inside any string or comment)\n if (char === '{') {\n braceCount++;\n foundFirstBrace = true;\n }\n if (char === '}') braceCount--;\n }\n\n if (foundFirstBrace && braceCount === 0) {\n endLine = i + 1;\n break;\n }\n }\n\n const fullContent = lines.slice(startLine - 1, endLine).join('\\n');\n\n // Extract signature (first line, cleaned)\n const firstLine = lines[startLine - 1] ?? '';\n const signature = this.extractSignature(firstLine, symbolName, type);\n\n return {\n type,\n name: symbolName,\n signature,\n fullContent,\n startLine,\n endLine,\n language,\n };\n }\n\n private extractSignature(line: string, name: string, type: string): string {\n // Remove 'export', 'async', trim whitespace\n const sig = line\n .replace(/^\\s*export\\s+/, '')\n .replace(/^\\s*async\\s+/, '')\n .trim();\n\n if (type === 'function') {\n // Extract just \"functionName(params): returnType\"\n // Supports: simple types, generics (Promise<T>), arrays (T[]), unions (T | null)\n const match = sig.match(/function\\s+(\\w+\\([^)]*\\):\\s*[\\w<>[\\],\\s|]+)/);\n if (match?.[1] !== undefined && match[1].length > 0) return match[1].trim();\n }\n\n if (type === 'class') {\n return `class ${name}`;\n }\n\n if (type === 'interface') {\n return `interface ${name}`;\n }\n\n if (type === 'type') {\n // For type aliases, include generics if present\n const typeMatch = sig.match(new RegExp(`type\\\\s+(${name}(?:<[^>]+>)?)\\\\s*=`));\n if (typeMatch?.[1] !== undefined && typeMatch[1].length > 0) {\n return `type ${typeMatch[1]}`;\n }\n return `type ${name}`;\n }\n\n if (type === 'const') {\n // For arrow functions, extract the variable declaration part\n // Example: const myFunc = (param: string): void => ...\n // Returns: const myFunc = (param: string): void\n const arrowMatch = sig.match(\n new RegExp(\n `((?:const|let|var)\\\\s+${name}\\\\s*=\\\\s*(?:async\\\\s+)?\\\\([^)]*\\\\)(?::\\\\s*[^=]+)?)`\n )\n );\n const matchedSig = arrowMatch?.[1];\n if (matchedSig !== undefined && matchedSig !== '') return matchedSig.trim();\n\n // Fallback for simple arrow functions without params\n return `const ${name}`;\n }\n\n return sig;\n }\n}\n","import { CodeUnitService } from './code-unit.service.js';\nimport { createLogger } from '../logging/index.js';\nimport type { CodeGraphService } from './code-graph.service.js';\nimport type { CodeGraph } from '../analysis/code-graph.js';\nimport type { EmbeddingEngine } from '../db/embeddings.js';\nimport type { LanceStore } from '../db/lance.js';\nimport type { StoreId } from '../types/brands.js';\nimport type { SearchConfig } from '../types/config.js';\nimport type {\n SearchQuery,\n SearchResponse,\n SearchResult,\n SearchConfidence,\n DetailLevel,\n CodeUnit,\n SearchIntent,\n} from '../types/search.js';\n\nconst logger = createLogger('search-service');\n\n/**\n * Query intent classification for context-aware ranking.\n * Different intents prioritize different content types.\n */\nexport type QueryIntent = 'how-to' | 'implementation' | 'conceptual' | 'comparison' | 'debugging';\n\n/**\n * Classified intent with confidence score for multi-intent queries.\n */\nexport interface ClassifiedIntent {\n intent: QueryIntent;\n confidence: number;\n}\n\n/**\n * Intent-based file type multipliers - CONSERVATIVE version.\n * Applied on top of base file-type boosts.\n * Lessons learned: Too-aggressive penalties hurt when corpus lacks ideal content.\n * These values provide gentle guidance rather than dramatic reranking.\n */\nconst INTENT_FILE_BOOSTS: Record<QueryIntent, Record<string, number>> = {\n 'how-to': {\n 'documentation-primary': 1.3, // Strong boost for docs\n documentation: 1.2,\n example: 1.5, // Examples are ideal for \"how to\"\n source: 0.85, // Moderate penalty - source might still have good content\n 'source-internal': 0.7, // Stronger penalty - internal code less useful\n test: 0.8,\n config: 0.7,\n changelog: 0.6, // Changelogs rarely answer \"how to\" questions\n other: 0.9,\n },\n implementation: {\n 'documentation-primary': 0.95,\n documentation: 1.0,\n example: 1.0,\n source: 1.1, // Slight boost for source code\n 'source-internal': 1.05, // Internal code can be relevant\n test: 1.0,\n config: 0.95,\n changelog: 0.8, // Might reference implementation changes\n other: 1.0,\n },\n conceptual: {\n 'documentation-primary': 1.1,\n documentation: 1.05,\n example: 1.0,\n source: 0.95,\n 'source-internal': 0.9,\n test: 0.9,\n config: 0.85,\n changelog: 0.7, // Sometimes explains concepts behind changes\n other: 0.95,\n },\n comparison: {\n 'documentation-primary': 1.15,\n documentation: 1.1,\n example: 1.05,\n source: 0.9,\n 'source-internal': 0.85,\n test: 0.9,\n config: 0.85,\n changelog: 0.9, // Version comparisons can be useful\n other: 0.95,\n },\n debugging: {\n 'documentation-primary': 1.0,\n documentation: 1.0,\n example: 1.05,\n source: 1.0, // Source code helps with debugging\n 'source-internal': 0.95,\n test: 1.05, // Tests can show expected behavior\n config: 0.9,\n changelog: 1.1, // Often contains bug fixes and known issues\n other: 1.0,\n },\n};\n\n// Known frameworks/technologies for context-aware boosting\nconst FRAMEWORK_PATTERNS: Array<{ pattern: RegExp; terms: string[] }> = [\n { pattern: /\\bexpress\\b/i, terms: ['express', 'expressjs', 'express.js'] },\n { pattern: /\\bhono\\b/i, terms: ['hono'] },\n { pattern: /\\bzod\\b/i, terms: ['zod'] },\n { pattern: /\\breact\\b/i, terms: ['react', 'reactjs', 'react.js'] },\n { pattern: /\\bvue\\b/i, terms: ['vue', 'vuejs', 'vue.js', 'vue3'] },\n { pattern: /\\bnode\\b/i, terms: ['node', 'nodejs', 'node.js'] },\n { pattern: /\\btypescript\\b/i, terms: ['typescript', 'ts'] },\n { pattern: /\\bjwt\\b/i, terms: ['jwt', 'jsonwebtoken', 'json-web-token'] },\n];\n\n// Pattern definitions for intent classification\nconst HOW_TO_PATTERNS = [\n /how (do|can|should|would) (i|you|we)/i,\n /how to\\b/i,\n /what('s| is) the (best |right |correct )?(way|approach) to/i,\n /i (need|want|have) to/i,\n /show me how/i,\n /\\bwhat's the syntax\\b/i,\n /\\bhow do i (use|create|make|set up|configure|implement|add|get)\\b/i,\n /\\bi'm (trying|building|creating|making)\\b/i,\n];\n\nconst IMPLEMENTATION_PATTERNS = [\n /how (does|is) .* (implemented|work internally)/i,\n /\\binternal(ly)?\\b/i,\n /\\bsource code\\b/i,\n /\\bunder the hood\\b/i,\n /\\bimplementation (of|details?)\\b/i,\n];\n\nconst COMPARISON_PATTERNS = [\n /\\b(vs\\.?|versus)\\b/i,\n /\\bdifference(s)? between\\b/i,\n /\\bcompare\\b/i,\n /\\bshould (i|we) use .* or\\b/i,\n /\\bwhat's the difference\\b/i,\n /\\bwhich (one|is better)\\b/i,\n /\\bwhen (should|to) use\\b/i,\n];\n\nconst DEBUGGING_PATTERNS = [\n /\\b(error|bug|issue|problem|crash|fail|broken|wrong)\\b/i,\n /\\bdoesn't (work|compile|run)\\b/i,\n /\\bisn't (working|updating|rendering)\\b/i,\n /\\bwhy (is|does|doesn't|isn't)\\b/i,\n /\\bwhat('s| is) (wrong|happening|going on)\\b/i,\n /\\bwhat am i doing wrong\\b/i,\n /\\bnot (working|updating|showing)\\b/i,\n /\\bhow do i (fix|debug|solve|resolve)\\b/i,\n];\n\nconst CONCEPTUAL_PATTERNS = [\n /\\bwhat (is|are)\\b/i,\n /\\bexplain\\b/i,\n /\\bwhat does .* (mean|do)\\b/i,\n /\\bhow does .* work\\b/i,\n /\\bwhat('s| is) the (purpose|point|idea)\\b/i,\n];\n\n/**\n * Classify query intents with confidence scores.\n * Returns all matching intents, allowing queries to have multiple intents.\n */\nfunction classifyQueryIntents(query: string): ClassifiedIntent[] {\n const q = query.toLowerCase();\n const intents: ClassifiedIntent[] = [];\n\n // Check all pattern groups and add matching intents with confidence\n if (IMPLEMENTATION_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'implementation', confidence: 0.9 });\n }\n\n if (DEBUGGING_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'debugging', confidence: 0.85 });\n }\n\n if (COMPARISON_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'comparison', confidence: 0.8 });\n }\n\n if (HOW_TO_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'how-to', confidence: 0.75 });\n }\n\n if (CONCEPTUAL_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'conceptual', confidence: 0.7 });\n }\n\n // If no patterns match, use how-to as the baseline intent\n if (intents.length === 0) {\n intents.push({ intent: 'how-to', confidence: 0.5 });\n }\n\n // Sort by confidence descending\n return intents.sort((a, b) => b.confidence - a.confidence);\n}\n\n/**\n * Get primary intent for logging/display purposes.\n */\nfunction getPrimaryIntent(intents: ClassifiedIntent[]): QueryIntent {\n return intents[0]?.intent ?? 'how-to';\n}\n\n/**\n * Map MCP SearchIntent to internal QueryIntent.\n * This allows users to override auto-classification via the API.\n */\nfunction mapSearchIntentToQueryIntent(intent: SearchIntent): QueryIntent {\n switch (intent) {\n case 'find-pattern':\n case 'find-implementation':\n case 'find-definition':\n return 'implementation';\n case 'find-usage':\n case 'find-documentation':\n return 'how-to';\n }\n}\n\n/**\n * RRF presets for different content types.\n * Web/docs content uses higher k to reduce noise from repetitive structure.\n */\nconst RRF_PRESETS = {\n code: { k: 20, vectorWeight: 0.6, ftsWeight: 0.4 },\n web: { k: 30, vectorWeight: 0.55, ftsWeight: 0.45 },\n} as const;\n\n/**\n * Detect if results are primarily web content (have urls vs file paths).\n */\nfunction detectContentType(results: SearchResult[]): 'web' | 'code' {\n const webCount = results.filter((r) => 'url' in r.metadata).length;\n return webCount > results.length / 2 ? 'web' : 'code';\n}\n\nexport class SearchService {\n private readonly lanceStore: LanceStore;\n private readonly embeddingEngine: EmbeddingEngine;\n private readonly codeUnitService: CodeUnitService;\n private readonly codeGraphService: CodeGraphService | undefined;\n private readonly graphCache: Map<string, CodeGraph | null>;\n private readonly searchConfig: SearchConfig | undefined;\n private readonly unsubscribeCacheInvalidation: (() => void) | undefined;\n\n constructor(\n lanceStore: LanceStore,\n embeddingEngine: EmbeddingEngine,\n codeGraphService?: CodeGraphService,\n searchConfig?: SearchConfig\n ) {\n this.lanceStore = lanceStore;\n this.embeddingEngine = embeddingEngine;\n this.codeUnitService = new CodeUnitService();\n this.codeGraphService = codeGraphService;\n this.graphCache = new Map();\n this.searchConfig = searchConfig;\n\n // Subscribe to cache invalidation events from CodeGraphService\n if (codeGraphService) {\n this.unsubscribeCacheInvalidation = codeGraphService.onCacheInvalidation((event) => {\n // Clear our cached graph when it's updated or deleted\n this.graphCache.delete(event.storeId);\n });\n }\n }\n\n /**\n * Clean up resources (unsubscribe from events).\n * Call this when destroying the service.\n */\n cleanup(): void {\n this.unsubscribeCacheInvalidation?.();\n }\n\n /**\n * Load code graph for a store, with caching.\n * Returns null if no graph is available.\n */\n private async loadGraphForStore(storeId: StoreId): Promise<CodeGraph | null> {\n if (!this.codeGraphService) return null;\n\n const cached = this.graphCache.get(storeId);\n if (cached !== undefined) return cached;\n\n const graph = await this.codeGraphService.loadGraph(storeId);\n const result = graph ?? null;\n this.graphCache.set(storeId, result);\n return result;\n }\n\n /**\n * Calculate confidence level based on max raw vector similarity score.\n * Configurable via environment variables, with sensible defaults for CLI usage.\n */\n private calculateConfidence(maxRawScore: number): SearchConfidence {\n const highThreshold = parseFloat(process.env['SEARCH_CONFIDENCE_HIGH'] ?? '0.5');\n const mediumThreshold = parseFloat(process.env['SEARCH_CONFIDENCE_MEDIUM'] ?? '0.3');\n\n if (maxRawScore >= highThreshold) return 'high';\n if (maxRawScore >= mediumThreshold) return 'medium';\n return 'low';\n }\n\n async search(query: SearchQuery): Promise<SearchResponse> {\n const startTime = Date.now();\n const mode = query.mode ?? this.searchConfig?.defaultMode ?? 'hybrid';\n const limit = query.limit ?? this.searchConfig?.defaultLimit ?? 10;\n const stores = query.stores ?? [];\n const detail = query.detail ?? 'minimal';\n\n // Auto-classify intents from query text (used for logging and when user doesn't specify intent)\n const intents = classifyQueryIntents(query.query);\n\n // Use user-provided intent if available, otherwise use auto-classified\n const primaryIntent =\n query.intent !== undefined\n ? mapSearchIntentToQueryIntent(query.intent)\n : getPrimaryIntent(intents);\n\n logger.debug(\n {\n query: query.query,\n mode,\n limit,\n stores,\n detail,\n intent: primaryIntent,\n userIntent: query.intent,\n autoClassifiedIntents: intents,\n minRelevance: query.minRelevance,\n },\n 'Search query received'\n );\n\n let allResults: SearchResult[] = [];\n let maxRawScore = 0;\n\n // Fetch more results than needed to allow for deduplication\n const fetchLimit = limit * 3;\n\n if (mode === 'vector') {\n // For vector mode, call vectorSearchRaw once and reuse results\n // This avoids double embedding cost (vectorSearch calls vectorSearchRaw internally)\n const rawResults = await this.vectorSearchRaw(query.query, stores, fetchLimit);\n maxRawScore = rawResults.length > 0 ? (rawResults[0]?.score ?? 0) : 0;\n // Apply same normalization logic as vectorSearch without re-embedding\n allResults = this.normalizeAndFilterScores(rawResults, query.threshold).slice(0, fetchLimit);\n } else if (mode === 'fts') {\n // FTS mode doesn't have vector similarity, so no confidence calculation\n allResults = await this.ftsSearch(query.query, stores, fetchLimit);\n } else {\n // Hybrid: combine vector and FTS with RRF, get maxRawScore for confidence\n const hybridResult = await this.hybridSearchWithMetadata(\n query.query,\n stores,\n fetchLimit,\n query.threshold\n );\n allResults = hybridResult.results;\n maxRawScore = hybridResult.maxRawScore;\n }\n\n // Apply minRelevance filter - if max raw score is below threshold, return empty\n // Skip in FTS mode since there are no vector scores to compare against\n if (query.minRelevance !== undefined) {\n if (mode === 'fts') {\n logger.warn(\n { query: query.query, minRelevance: query.minRelevance },\n 'minRelevance filter ignored in FTS mode (no vector scores available)'\n );\n } else if (maxRawScore < query.minRelevance) {\n const timeMs = Date.now() - startTime;\n logger.info(\n {\n query: query.query,\n mode,\n maxRawScore,\n minRelevance: query.minRelevance,\n timeMs,\n },\n 'Search filtered by minRelevance - no sufficiently relevant results'\n );\n\n return {\n query: query.query,\n mode,\n stores,\n results: [],\n totalResults: 0,\n timeMs,\n confidence: this.calculateConfidence(maxRawScore),\n maxRawScore,\n };\n }\n }\n\n // Deduplicate by source file - keep best chunk per source (considers query relevance)\n const dedupedResults = this.deduplicateBySource(allResults, query.query);\n const resultsToEnhance = dedupedResults.slice(0, limit);\n\n // Load code graphs for stores in results (for contextual/full detail levels)\n const graphs = new Map<string, CodeGraph | null>();\n if (detail === 'contextual' || detail === 'full') {\n const storeIds = new Set(resultsToEnhance.map((r) => r.metadata.storeId));\n for (const storeId of storeIds) {\n graphs.set(storeId, await this.loadGraphForStore(storeId));\n }\n }\n\n // Enhance results with progressive context\n const enhancedResults = resultsToEnhance.map((r) => {\n const graph = graphs.get(r.metadata.storeId) ?? null;\n return this.addProgressiveContext(r, query.query, detail, graph);\n });\n\n const timeMs = Date.now() - startTime;\n const confidence = mode !== 'fts' ? this.calculateConfidence(maxRawScore) : undefined;\n\n logger.info(\n {\n query: query.query,\n mode,\n resultCount: enhancedResults.length,\n dedupedFrom: allResults.length,\n intents: intents.map((i) => `${i.intent}(${i.confidence.toFixed(2)})`),\n maxRawScore: mode !== 'fts' ? maxRawScore : undefined,\n confidence,\n timeMs,\n },\n 'Search complete'\n );\n\n return {\n query: query.query,\n mode,\n stores,\n results: enhancedResults,\n totalResults: enhancedResults.length,\n timeMs,\n confidence,\n maxRawScore: mode !== 'fts' ? maxRawScore : undefined,\n };\n }\n\n /**\n * Deduplicate results by source file path.\n * Keeps the best chunk for each unique source, considering both score and query relevance.\n */\n private deduplicateBySource(results: SearchResult[], query: string): SearchResult[] {\n const bySource = new Map<string, SearchResult>();\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2);\n\n for (const result of results) {\n // Use storeId + file path as the source key to distinguish same paths across stores\n // (or url for web content, or id as last resort)\n const storeId = result.metadata.storeId;\n const source = result.metadata.path ?? result.metadata.url ?? result.id;\n const sourceKey = `${storeId}:${source}`;\n\n const existing = bySource.get(sourceKey);\n if (!existing) {\n bySource.set(sourceKey, result);\n } else {\n // Score-weighted relevance: accounts for fileType/framework boosts\n const existingTermCount = this.countQueryTerms(existing.content, queryTerms);\n const newTermCount = this.countQueryTerms(result.content, queryTerms);\n\n // Weight term count by score to account for ranking boosts\n const existingRelevance = existingTermCount * existing.score;\n const newRelevance = newTermCount * result.score;\n\n if (newRelevance > existingRelevance) {\n bySource.set(sourceKey, result);\n }\n }\n }\n\n // Return results sorted by score\n return Array.from(bySource.values()).sort((a, b) => b.score - a.score);\n }\n\n /**\n * Count how many query terms appear in the content.\n */\n private countQueryTerms(content: string, queryTerms: string[]): number {\n const lowerContent = content.toLowerCase();\n return queryTerms.filter((term) => lowerContent.includes(term)).length;\n }\n\n /**\n * Normalize scores to 0-1 range and optionally filter by threshold.\n * This ensures threshold values match displayed scores (UX consistency).\n *\n * Edge case handling:\n * - If there's only 1 result or all results have the same score, normalization\n * would make them all 1.0. In this case, we keep the raw scores to allow\n * threshold filtering to work meaningfully on absolute quality.\n */\n private normalizeAndFilterScores(results: SearchResult[], threshold?: number): SearchResult[] {\n if (results.length === 0) return [];\n\n // Sort by score descending\n const sorted = [...results].sort((a, b) => b.score - a.score);\n\n // Get score range for normalization\n const first = sorted[0];\n const last = sorted[sorted.length - 1];\n if (first === undefined || last === undefined) return [];\n\n const maxScore = first.score;\n const minScore = last.score;\n const range = maxScore - minScore;\n\n // Only normalize when there's meaningful score variation\n // If all scores are the same (range = 0), keep raw scores for threshold filtering\n const normalized =\n range > 0\n ? sorted.map((r) => ({\n ...r,\n score: Math.round(((r.score - minScore) / range) * 1000000) / 1000000,\n }))\n : sorted; // Keep raw scores when no variation (allows threshold to filter by quality)\n\n // Apply threshold filter on scores\n if (threshold !== undefined) {\n return normalized.filter((r) => r.score >= threshold);\n }\n\n return normalized;\n }\n\n /**\n * Fetch raw vector search results without normalization.\n * Returns results with raw cosine similarity scores [0-1].\n */\n private async vectorSearchRaw(\n query: string,\n stores: readonly StoreId[],\n limit: number\n ): Promise<SearchResult[]> {\n const queryVector = await this.embeddingEngine.embed(query);\n const results: SearchResult[] = [];\n\n for (const storeId of stores) {\n const hits = await this.lanceStore.search(storeId, queryVector, limit);\n results.push(\n ...hits.map((r) => ({\n id: r.id,\n score: r.score, // Raw cosine similarity (1 - distance)\n content: r.content,\n metadata: r.metadata,\n }))\n );\n }\n\n return results.sort((a, b) => b.score - a.score).slice(0, limit);\n }\n\n private async ftsSearch(\n query: string,\n stores: readonly StoreId[],\n limit: number\n ): Promise<SearchResult[]> {\n const results: SearchResult[] = [];\n\n for (const storeId of stores) {\n try {\n const hits = await this.lanceStore.fullTextSearch(storeId, query, limit);\n results.push(\n ...hits.map((r) => ({\n id: r.id,\n score: r.score,\n content: r.content,\n metadata: r.metadata,\n }))\n );\n } catch {\n // FTS index may not exist for this store - continue with other stores\n // and rely on vector search results. This is expected behavior since\n // FTS indexing is optional and hybrid search works with vector-only.\n }\n }\n\n return results.sort((a, b) => b.score - a.score).slice(0, limit);\n }\n\n /**\n * Internal hybrid search result with additional metadata for confidence calculation.\n */\n private async hybridSearchWithMetadata(\n query: string,\n stores: readonly StoreId[],\n limit: number,\n threshold?: number\n ): Promise<{ results: SearchResult[]; maxRawScore: number }> {\n // Classify query intents for context-aware ranking (supports multiple intents)\n const intents = classifyQueryIntents(query);\n\n // Get raw vector results (unnormalized) to track raw cosine similarity\n // We use these for both raw score tracking and as the basis for normalized vector results\n const rawVectorResults = await this.vectorSearchRaw(query, stores, limit * 2);\n\n // Build map of raw vector scores by document ID\n const rawVectorScores = new Map<string, number>();\n rawVectorResults.forEach((r) => {\n rawVectorScores.set(r.id, r.score);\n });\n\n // Track max raw score for confidence calculation\n const maxRawScore = rawVectorResults.length > 0 ? (rawVectorResults[0]?.score ?? 0) : 0;\n\n // Normalize raw vector results directly (avoids duplicate embedding call)\n // Don't apply threshold here - it's applied to final RRF-normalized scores at the end\n const vectorResults = this.normalizeAndFilterScores(rawVectorResults);\n\n // Get FTS results in parallel (only one call needed now)\n const ftsResults = await this.ftsSearch(query, stores, limit * 2);\n\n // Build rank maps\n const vectorRanks = new Map<string, number>();\n const ftsRanks = new Map<string, number>();\n const allDocs = new Map<string, SearchResult>();\n\n vectorResults.forEach((r, i) => {\n vectorRanks.set(r.id, i + 1);\n allDocs.set(r.id, r);\n });\n\n ftsResults.forEach((r, i) => {\n ftsRanks.set(r.id, i + 1);\n if (!allDocs.has(r.id)) {\n allDocs.set(r.id, r);\n }\n });\n\n // Calculate RRF scores with file-type boosting and preserve ranking metadata\n const rrfScores: Array<{\n id: string;\n score: number;\n result: SearchResult;\n rawVectorScore: number | undefined;\n metadata: {\n vectorRank?: number;\n ftsRank?: number;\n vectorRRF: number;\n ftsRRF: number;\n fileTypeBoost: number;\n frameworkBoost: number;\n urlKeywordBoost: number;\n pathKeywordBoost: number;\n rawVectorScore?: number;\n };\n }> = [];\n\n // Select RRF config based on content type (web vs code)\n const contentType = detectContentType([...allDocs.values()]);\n const { k, vectorWeight, ftsWeight } = RRF_PRESETS[contentType];\n\n for (const [id, result] of allDocs) {\n const vectorRank = vectorRanks.get(id) ?? Infinity;\n const ftsRank = ftsRanks.get(id) ?? Infinity;\n const rawVectorScore = rawVectorScores.get(id);\n\n const vectorRRF = vectorRank !== Infinity ? vectorWeight / (k + vectorRank) : 0;\n const ftsRRF = ftsRank !== Infinity ? ftsWeight / (k + ftsRank) : 0;\n\n // Apply file-type boost (base + multi-intent-adjusted)\n const fileTypeBoost = this.getFileTypeBoost(\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n result.metadata['fileType'] as string | undefined,\n intents\n );\n\n // Apply framework context boost\n const frameworkBoost = this.getFrameworkContextBoost(query, result);\n\n // Apply URL keyword boost (helps \"troubleshooting\" find /troubleshooting pages)\n const urlKeywordBoost = this.getUrlKeywordBoost(query, result);\n\n // Apply path keyword boost (helps \"dispatcher\" find async_dispatcher.py)\n const pathKeywordBoost = this.getPathKeywordBoost(query, result);\n\n const metadata: {\n vectorRank?: number;\n ftsRank?: number;\n vectorRRF: number;\n ftsRRF: number;\n fileTypeBoost: number;\n frameworkBoost: number;\n urlKeywordBoost: number;\n pathKeywordBoost: number;\n rawVectorScore?: number;\n } = {\n vectorRRF,\n ftsRRF,\n fileTypeBoost,\n frameworkBoost,\n urlKeywordBoost,\n pathKeywordBoost,\n };\n\n if (vectorRank !== Infinity) {\n metadata.vectorRank = vectorRank;\n }\n if (ftsRank !== Infinity) {\n metadata.ftsRank = ftsRank;\n }\n if (rawVectorScore !== undefined) {\n metadata.rawVectorScore = rawVectorScore;\n }\n\n rrfScores.push({\n id,\n score:\n (vectorRRF + ftsRRF) *\n fileTypeBoost *\n frameworkBoost *\n urlKeywordBoost *\n pathKeywordBoost,\n result,\n rawVectorScore,\n metadata,\n });\n }\n\n // Sort by RRF score\n const sorted = rrfScores.sort((a, b) => b.score - a.score).slice(0, limit);\n\n // Normalize scores to 0-1 range for better interpretability\n let normalizedResults: SearchResult[];\n\n if (sorted.length > 0) {\n const first = sorted[0];\n const last = sorted[sorted.length - 1];\n if (first === undefined || last === undefined) {\n normalizedResults = sorted.map((r) => ({\n ...r.result,\n score: r.score,\n rankingMetadata: r.metadata,\n }));\n } else {\n const maxScore = first.score;\n const minScore = last.score;\n const range = maxScore - minScore;\n\n if (range > 0) {\n // Round to avoid floating point precision issues in threshold comparisons\n normalizedResults = sorted.map((r) => ({\n ...r.result,\n score: Math.round(((r.score - minScore) / range) * 1000000) / 1000000,\n rankingMetadata: r.metadata,\n }));\n } else {\n // All same score - keep raw scores (allows threshold to filter by quality)\n normalizedResults = sorted.map((r) => ({\n ...r.result,\n score: r.score,\n rankingMetadata: r.metadata,\n }));\n }\n }\n } else {\n normalizedResults = [];\n }\n\n // Apply threshold filter on normalized scores (UX consistency)\n if (threshold !== undefined) {\n normalizedResults = normalizedResults.filter((r) => r.score >= threshold);\n }\n\n return { results: normalizedResults, maxRawScore };\n }\n\n async searchAllStores(query: SearchQuery, storeIds: StoreId[]): Promise<SearchResponse> {\n return this.search({\n ...query,\n stores: storeIds,\n });\n }\n\n /**\n * Get a score multiplier based on file type and query intent.\n * Documentation files get a strong boost to surface them higher.\n * Phase 4: Strengthened boosts for better documentation ranking.\n * Phase 1: Intent-based adjustments for context-aware ranking.\n */\n private getFileTypeBoost(fileType: string | undefined, intents: ClassifiedIntent[]): number {\n // Base file-type boosts\n let baseBoost: number;\n switch (fileType) {\n case 'documentation-primary':\n baseBoost = 1.8; // README, guides get very strong boost\n break;\n case 'documentation':\n baseBoost = 1.5; // docs/, tutorials/ get strong boost\n break;\n case 'example':\n baseBoost = 1.4; // examples/, demos/ are highly valuable\n break;\n case 'source':\n baseBoost = 1.0; // Source code baseline\n break;\n case 'source-internal':\n baseBoost = 0.75; // Internal implementation files (not too harsh)\n break;\n case 'test':\n baseBoost = parseFloat(process.env['SEARCH_TEST_FILE_BOOST'] ?? '0.5');\n break;\n case 'config':\n baseBoost = 0.5; // Config files rarely answer questions\n break;\n case 'changelog':\n baseBoost = 0.7; // Changelogs secondary to docs and examples\n break;\n default:\n baseBoost = 1.0;\n }\n\n // Blend intent-based multipliers weighted by confidence\n let weightedMultiplier = 0;\n let totalConfidence = 0;\n\n for (const { intent, confidence } of intents) {\n const intentBoosts = INTENT_FILE_BOOSTS[intent];\n const multiplier = intentBoosts[fileType ?? 'other'] ?? 1.0;\n weightedMultiplier += multiplier * confidence;\n totalConfidence += confidence;\n }\n\n const blendedMultiplier = totalConfidence > 0 ? weightedMultiplier / totalConfidence : 1.0;\n const finalBoost = baseBoost * blendedMultiplier;\n\n // Cap test file boost to prevent intent multipliers from overriding the penalty\n if (fileType === 'test') {\n return Math.min(finalBoost, 0.6);\n }\n\n return finalBoost;\n }\n\n /**\n * Get a score multiplier based on URL keyword matching.\n * Boosts results where URL path contains significant query keywords.\n * This helps queries like \"troubleshooting\" rank /troubleshooting pages first.\n */\n private getUrlKeywordBoost(query: string, result: SearchResult): number {\n const url = result.metadata.url;\n if (url === undefined || url === '') return 1.0;\n\n // Extract path segments from URL and normalize\n const urlPath = url.toLowerCase().replace(/[^a-z0-9]+/g, ' ');\n\n // Common stop words to filter from queries\n const stopWords = new Set([\n 'how',\n 'to',\n 'the',\n 'a',\n 'an',\n 'is',\n 'are',\n 'what',\n 'why',\n 'when',\n 'where',\n 'can',\n 'do',\n 'does',\n 'i',\n 'my',\n 'your',\n 'it',\n 'in',\n 'on',\n 'for',\n 'with',\n 'this',\n 'that',\n 'get',\n 'use',\n 'using',\n ]);\n\n // Extract meaningful query terms\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2 && !stopWords.has(t));\n\n if (queryTerms.length === 0) return 1.0;\n\n // Count matching terms in URL path\n const matchingTerms = queryTerms.filter((term) => urlPath.includes(term));\n\n if (matchingTerms.length === 0) return 1.0;\n\n // Boost based on proportion of matching terms\n // Single match: ~1.5, all terms match: ~2.0\n const matchRatio = matchingTerms.length / queryTerms.length;\n return 1.0 + 1.0 * matchRatio;\n }\n\n /**\n * Get a score multiplier based on file path keyword matching.\n * Boosts results where file path contains significant query keywords.\n * This helps queries like \"dispatcher\" rank async_dispatcher.py higher.\n */\n private getPathKeywordBoost(query: string, result: SearchResult): number {\n const path = result.metadata.path;\n if (path === undefined || path === '') return 1.0;\n\n // Extract path segments and normalize (split on slashes, dots, underscores, etc.)\n const pathSegments = path.toLowerCase().replace(/[^a-z0-9]+/g, ' ');\n\n // Common stop words to filter from queries\n const stopWords = new Set([\n 'how',\n 'to',\n 'the',\n 'a',\n 'an',\n 'is',\n 'are',\n 'what',\n 'why',\n 'when',\n 'where',\n 'can',\n 'do',\n 'does',\n 'i',\n 'my',\n 'your',\n 'it',\n 'in',\n 'on',\n 'for',\n 'with',\n 'this',\n 'that',\n 'get',\n 'use',\n 'using',\n ]);\n\n // Extract meaningful query terms\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2 && !stopWords.has(t));\n\n if (queryTerms.length === 0) return 1.0;\n\n // Count matching terms in file path\n const matchingTerms = queryTerms.filter((term) => pathSegments.includes(term));\n\n if (matchingTerms.length === 0) return 1.0;\n\n // Boost based on proportion of matching terms\n // Single match: ~1.5, all terms match: ~2.0\n const matchRatio = matchingTerms.length / queryTerms.length;\n return 1.0 + 1.0 * matchRatio;\n }\n\n /**\n * Get a score multiplier based on framework context.\n * If query mentions a framework, boost results from that framework's files.\n */\n private getFrameworkContextBoost(query: string, result: SearchResult): number {\n const path = result.metadata.path ?? result.metadata.url ?? '';\n const content = result.content.toLowerCase();\n const pathLower = path.toLowerCase();\n\n // Check if query mentions any known frameworks\n for (const { pattern, terms } of FRAMEWORK_PATTERNS) {\n if (pattern.test(query)) {\n // Query mentions this framework - check if result is from that framework\n const resultMatchesFramework = terms.some(\n (term) => pathLower.includes(term) || content.includes(term)\n );\n\n if (resultMatchesFramework) {\n return 1.5; // Strong boost for matching framework\n } else {\n return 0.8; // Moderate penalty for non-matching when framework is specified\n }\n }\n }\n\n return 1.0; // No framework context in query\n }\n\n private addProgressiveContext(\n result: SearchResult,\n query: string,\n detail: DetailLevel,\n graph: CodeGraph | null\n ): SearchResult {\n const enhanced = { ...result };\n\n // Layer 1: Always add summary\n const path = result.metadata.path ?? result.metadata.url ?? 'unknown';\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const fileType = result.metadata['fileType'] as string | undefined;\n\n // Try to extract code unit\n const codeUnit = this.extractCodeUnitFromResult(result);\n const symbolName = codeUnit?.name ?? this.extractSymbolName(result.content);\n\n enhanced.summary = {\n type: this.inferType(fileType, codeUnit),\n name: symbolName,\n signature: codeUnit?.signature ?? '',\n purpose: this.generatePurpose(result.content, query),\n location: `${path}${codeUnit ? `:${String(codeUnit.startLine)}` : ''}`,\n relevanceReason: this.generateRelevanceReason(result, query),\n };\n\n // Layer 2: Add context if requested\n if (detail === 'contextual' || detail === 'full') {\n // Get usage stats from code graph if available\n const usage = this.getUsageFromGraph(graph, path, symbolName);\n\n enhanced.context = {\n interfaces: this.extractInterfaces(result.content),\n keyImports: this.extractImports(result.content),\n relatedConcepts: this.extractConcepts(result.content, query),\n usage,\n };\n }\n\n // Layer 3: Add full context if requested\n if (detail === 'full') {\n // Get related code from graph if available\n const relatedCode = this.getRelatedCodeFromGraph(graph, path, symbolName);\n\n enhanced.full = {\n completeCode: codeUnit?.fullContent ?? result.content,\n relatedCode,\n documentation: this.extractDocumentation(result.content),\n tests: undefined,\n };\n }\n\n return enhanced;\n }\n\n private extractCodeUnitFromResult(result: SearchResult): CodeUnit | undefined {\n const path = result.metadata.path;\n if (path === undefined || path === '') return undefined;\n\n const ext = path.split('.').pop() ?? '';\n const language =\n ext === 'ts' || ext === 'tsx'\n ? 'typescript'\n : ext === 'js' || ext === 'jsx'\n ? 'javascript'\n : ext;\n\n // Try to find a symbol name in the content\n const symbolName = this.extractSymbolName(result.content);\n if (symbolName === '') return undefined;\n\n return this.codeUnitService.extractCodeUnit(result.content, symbolName, language);\n }\n\n private extractSymbolName(content: string): string {\n // Extract function or class name\n const funcMatch = content.match(/(?:export\\s+)?(?:async\\s+)?function\\s+(\\w+)/);\n if (funcMatch?.[1] !== undefined && funcMatch[1] !== '') return funcMatch[1];\n\n const classMatch = content.match(/(?:export\\s+)?class\\s+(\\w+)/);\n if (classMatch?.[1] !== undefined && classMatch[1] !== '') return classMatch[1];\n\n const constMatch = content.match(/(?:export\\s+)?const\\s+(\\w+)/);\n if (constMatch?.[1] !== undefined && constMatch[1] !== '') return constMatch[1];\n\n // Fallback: return \"(anonymous)\" for unnamed symbols\n return '(anonymous)';\n }\n\n private inferType(\n fileType: string | undefined,\n codeUnit: CodeUnit | undefined\n ): import('../types/search.js').ResultSummary['type'] {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n if (codeUnit) return codeUnit.type as import('../types/search.js').ResultSummary['type'];\n if (fileType === 'documentation' || fileType === 'documentation-primary')\n return 'documentation';\n return 'function';\n }\n\n private generatePurpose(content: string, query: string): string {\n // Extract first line of JSDoc comment if present\n const docMatch = content.match(/\\/\\*\\*\\s*\\n\\s*\\*\\s*([^\\n]+)/);\n if (docMatch?.[1] !== undefined && docMatch[1] !== '') return docMatch[1].trim();\n\n const lines = content.split('\\n');\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2);\n\n // Helper to check if line is skippable (imports, declarations)\n const shouldSkip = (cleaned: string): boolean => {\n return (\n cleaned.startsWith('import ') ||\n cleaned.startsWith('export ') ||\n cleaned.startsWith('interface ') ||\n cleaned.startsWith('type ')\n );\n };\n\n // Helper to score a line based on query term matches\n const scoreLine = (cleaned: string): number => {\n const lowerLine = cleaned.toLowerCase();\n return queryTerms.filter((term) => lowerLine.includes(term)).length;\n };\n\n // Helper to check if line is meaningful (length, not a comment)\n const isMeaningful = (cleaned: string): boolean => {\n if (cleaned.length === 0) return false;\n if (cleaned.startsWith('//') || cleaned.startsWith('/*')) return false;\n // Accept Markdown headings\n if (cleaned.startsWith('#') && cleaned.length > 3) return true;\n // Accept lines 15+ chars\n return cleaned.length >= 15;\n };\n\n // First pass: find lines with query terms, preferring complete sentences\n let bestLine: string | null = null;\n let bestScore = 0;\n\n for (const line of lines) {\n const cleaned = line.trim();\n if (shouldSkip(cleaned) || !isMeaningful(cleaned)) continue;\n\n let score = scoreLine(cleaned);\n\n // Boost score for complete sentences (end with period, !, ?)\n if (/[.!?]$/.test(cleaned)) {\n score += 0.5;\n }\n\n // Boost score for code examples (contains function calls or assignments)\n // Favor complete patterns: function calls WITH arguments, assignments with values\n if (/\\w+\\([^)]*\\)|=\\s*\\w+\\(|=>/.test(cleaned)) {\n score += 0.6; // Enhanced boost to preserve code examples in snippets\n }\n\n if (score > bestScore) {\n bestScore = score;\n bestLine = cleaned;\n }\n }\n\n // If we found a line with query terms, use it\n if (bestLine !== null && bestLine !== '' && bestScore > 0) {\n if (bestLine.length > 150) {\n const firstSentence = bestLine.match(/^[^.!?]+[.!?]/);\n if (firstSentence && firstSentence[0].length >= 20 && firstSentence[0].length <= 150) {\n return firstSentence[0].trim();\n }\n return `${bestLine.substring(0, 147)}...`;\n }\n return bestLine;\n }\n\n // Fallback: first meaningful line (original logic)\n for (const line of lines) {\n const cleaned = line.trim();\n if (shouldSkip(cleaned) || !isMeaningful(cleaned)) continue;\n\n if (cleaned.length > 150) {\n const firstSentence = cleaned.match(/^[^.!?]+[.!?]/);\n if (firstSentence && firstSentence[0].length >= 20 && firstSentence[0].length <= 150) {\n return firstSentence[0].trim();\n }\n return `${cleaned.substring(0, 147)}...`;\n }\n\n return cleaned;\n }\n\n return 'Code related to query';\n }\n\n private generateRelevanceReason(result: SearchResult, query: string): string {\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2);\n const contentLower = result.content.toLowerCase();\n\n const matchedTerms = queryTerms.filter((term) => contentLower.includes(term));\n\n if (matchedTerms.length > 0) {\n return `Matches: ${matchedTerms.join(', ')}`;\n }\n\n return 'Semantically similar to query';\n }\n\n private extractInterfaces(content: string): string[] {\n const interfaces: string[] = [];\n const matches = content.matchAll(/interface\\s+(\\w+)/g);\n for (const match of matches) {\n if (match[1] !== undefined && match[1] !== '') interfaces.push(match[1]);\n }\n return interfaces;\n }\n\n private extractImports(content: string): string[] {\n const imports: string[] = [];\n const matches = content.matchAll(/import\\s+.*?from\\s+['\"]([^'\"]+)['\"]/g);\n for (const match of matches) {\n if (match[1] !== undefined && match[1] !== '') imports.push(match[1]);\n }\n return imports.slice(0, 5); // Top 5\n }\n\n private extractConcepts(content: string, _query: string): string[] {\n // TODO: Use _query parameter to prioritize query-related concepts in future enhancement\n\n // Common stopwords to filter out\n const stopwords = new Set([\n 'this',\n 'that',\n 'these',\n 'those',\n 'from',\n 'with',\n 'have',\n 'will',\n 'would',\n 'should',\n 'could',\n 'about',\n 'been',\n 'were',\n 'being',\n 'function',\n 'return',\n 'const',\n 'import',\n 'export',\n 'default',\n 'type',\n 'interface',\n 'class',\n 'extends',\n 'implements',\n 'async',\n 'await',\n 'then',\n 'catch',\n 'throw',\n 'error',\n 'undefined',\n 'null',\n 'true',\n 'false',\n 'void',\n 'number',\n 'string',\n 'boolean',\n 'object',\n 'array',\n 'promise',\n 'callback',\n 'resolve',\n 'reject',\n 'value',\n 'param',\n 'params',\n 'args',\n 'props',\n 'options',\n 'config',\n 'data',\n ]);\n\n // Simple keyword extraction\n const words = content.toLowerCase().match(/\\b[a-z]{4,}\\b/g) ?? [];\n const frequency = new Map<string, number>();\n\n for (const word of words) {\n // Skip stopwords\n if (stopwords.has(word)) continue;\n\n frequency.set(word, (frequency.get(word) ?? 0) + 1);\n }\n\n return Array.from(frequency.entries())\n .sort((a, b) => b[1] - a[1])\n .slice(0, 5)\n .map(([word]) => word);\n }\n\n private extractDocumentation(content: string): string {\n const docMatch = content.match(/\\/\\*\\*([\\s\\S]*?)\\*\\//);\n if (docMatch?.[1] !== undefined && docMatch[1] !== '') {\n return docMatch[1]\n .split('\\n')\n .map((line) => line.replace(/^\\s*\\*\\s?/, '').trim())\n .filter((line) => line.length > 0)\n .join('\\n');\n }\n return '';\n }\n\n /**\n * Get usage stats from code graph.\n * Returns default values if no graph is available.\n */\n private getUsageFromGraph(\n graph: CodeGraph | null,\n filePath: string,\n symbolName: string\n ): { calledBy: number; calls: number } {\n if (!graph || symbolName === '' || symbolName === '(anonymous)') {\n return { calledBy: 0, calls: 0 };\n }\n\n const nodeId = `${filePath}:${symbolName}`;\n return {\n calledBy: graph.getCalledByCount(nodeId),\n calls: graph.getCallsCount(nodeId),\n };\n }\n\n /**\n * Get related code from graph.\n * Returns callers and callees for the symbol.\n */\n private getRelatedCodeFromGraph(\n graph: CodeGraph | null,\n filePath: string,\n symbolName: string\n ): Array<{ file: string; summary: string; relationship: string }> {\n if (!graph || symbolName === '' || symbolName === '(anonymous)') {\n return [];\n }\n\n const nodeId = `${filePath}:${symbolName}`;\n const related: Array<{ file: string; summary: string; relationship: string }> = [];\n\n // Get callers (incoming edges)\n const incoming = graph.getIncomingEdges(nodeId);\n for (const edge of incoming) {\n if (edge.type === 'calls') {\n // Parse file:symbol from edge.from\n const [file, symbol] = this.parseNodeId(edge.from);\n related.push({\n file,\n summary: symbol ? `${symbol}()` : 'unknown',\n relationship: 'calls this',\n });\n }\n }\n\n // Get callees (outgoing edges)\n const outgoing = graph.getEdges(nodeId);\n for (const edge of outgoing) {\n if (edge.type === 'calls') {\n // Parse file:symbol from edge.to\n const [file, symbol] = this.parseNodeId(edge.to);\n related.push({\n file,\n summary: symbol ? `${symbol}()` : 'unknown',\n relationship: 'called by this',\n });\n }\n }\n\n // Limit to top 10 related items\n return related.slice(0, 10);\n }\n\n /**\n * Parse a node ID into file path and symbol name.\n */\n private parseNodeId(nodeId: string): [string, string] {\n const lastColon = nodeId.lastIndexOf(':');\n if (lastColon === -1) {\n return [nodeId, ''];\n }\n return [nodeId.substring(0, lastColon), nodeId.substring(lastColon + 1)];\n }\n}\n","import { readFile, access } from 'node:fs/promises';\nimport { resolve, isAbsolute, join } from 'node:path';\nimport { ProjectRootService } from './project-root.service.js';\nimport {\n StoreDefinitionsConfigSchema,\n DEFAULT_STORE_DEFINITIONS_CONFIG,\n} from '../types/store-definition.js';\nimport { atomicWriteFile } from '../utils/atomic-write.js';\nimport type { StoreDefinitionsConfig, StoreDefinition } from '../types/store-definition.js';\n\n/**\n * Check if a file exists\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Service for managing git-committable store definitions.\n *\n * Store definitions are saved to `.bluera/bluera-knowledge/stores.config.json`\n * within the project root. This file is designed to be committed to version\n * control, allowing teams to share store configurations.\n *\n * The actual store data (vector embeddings, cloned repos) lives in the data\n * directory and should be gitignored.\n */\nexport class StoreDefinitionService {\n private readonly configPath: string;\n private readonly projectRoot: string;\n private config: StoreDefinitionsConfig | null = null;\n\n constructor(projectRoot?: string) {\n this.projectRoot = projectRoot ?? ProjectRootService.resolve();\n this.configPath = join(this.projectRoot, '.bluera/bluera-knowledge/stores.config.json');\n }\n\n /**\n * Load store definitions from config file.\n * Returns empty config if file doesn't exist.\n * Throws on parse/validation errors (fail fast per CLAUDE.md).\n */\n async load(): Promise<StoreDefinitionsConfig> {\n if (this.config !== null) {\n return this.config;\n }\n\n const exists = await fileExists(this.configPath);\n if (!exists) {\n // Deep clone to avoid mutating the shared default\n this.config = {\n ...DEFAULT_STORE_DEFINITIONS_CONFIG,\n stores: [...DEFAULT_STORE_DEFINITIONS_CONFIG.stores],\n };\n return this.config;\n }\n\n const content = await readFile(this.configPath, 'utf-8');\n let parsed: unknown;\n try {\n parsed = JSON.parse(content);\n } catch (error) {\n throw new Error(\n `Failed to parse store definitions at ${this.configPath}: ${\n error instanceof Error ? error.message : String(error)\n }`\n );\n }\n\n const result = StoreDefinitionsConfigSchema.safeParse(parsed);\n if (!result.success) {\n throw new Error(`Invalid store definitions at ${this.configPath}: ${result.error.message}`);\n }\n\n this.config = result.data;\n return this.config;\n }\n\n /**\n * Save store definitions to config file.\n */\n async save(config: StoreDefinitionsConfig): Promise<void> {\n await atomicWriteFile(this.configPath, JSON.stringify(config, null, 2));\n this.config = config;\n }\n\n /**\n * Add a store definition.\n * Throws if a definition with the same name already exists.\n */\n async addDefinition(definition: StoreDefinition): Promise<void> {\n const config = await this.load();\n const existing = config.stores.find((s) => s.name === definition.name);\n if (existing !== undefined) {\n throw new Error(`Store definition \"${definition.name}\" already exists`);\n }\n config.stores.push(definition);\n await this.save(config);\n }\n\n /**\n * Remove a store definition by name.\n * Returns true if removed, false if not found.\n */\n async removeDefinition(name: string): Promise<boolean> {\n const config = await this.load();\n const index = config.stores.findIndex((s) => s.name === name);\n if (index === -1) {\n return false;\n }\n config.stores.splice(index, 1);\n await this.save(config);\n return true;\n }\n\n /**\n * Update an existing store definition.\n * Only updates the provided fields, preserving others.\n * Throws if definition not found.\n */\n async updateDefinition(\n name: string,\n updates: { description?: string; tags?: string[] }\n ): Promise<void> {\n const config = await this.load();\n const index = config.stores.findIndex((s) => s.name === name);\n if (index === -1) {\n throw new Error(`Store definition \"${name}\" not found`);\n }\n\n // Merge updates while preserving type safety\n // We only allow updating common optional fields (description, tags)\n const existing = config.stores[index];\n if (existing === undefined) {\n throw new Error(`Store definition \"${name}\" not found at index ${String(index)}`);\n }\n if (updates.description !== undefined) {\n existing.description = updates.description;\n }\n if (updates.tags !== undefined) {\n existing.tags = updates.tags;\n }\n await this.save(config);\n }\n\n /**\n * Get a store definition by name.\n * Returns undefined if not found.\n */\n async getByName(name: string): Promise<StoreDefinition | undefined> {\n const config = await this.load();\n return config.stores.find((s) => s.name === name);\n }\n\n /**\n * Check if any definitions exist.\n */\n async hasDefinitions(): Promise<boolean> {\n const config = await this.load();\n return config.stores.length > 0;\n }\n\n /**\n * Resolve a file store path relative to project root.\n */\n resolvePath(path: string): string {\n if (isAbsolute(path)) {\n return path;\n }\n return resolve(this.projectRoot, path);\n }\n\n /**\n * Get the config file path.\n */\n getConfigPath(): string {\n return this.configPath;\n }\n\n /**\n * Get the project root.\n */\n getProjectRoot(): string {\n return this.projectRoot;\n }\n\n /**\n * Clear the cached config (useful for testing).\n */\n clearCache(): void {\n this.config = null;\n }\n}\n","import { z } from 'zod';\n\n/**\n * Store definition schemas for git-committable configuration.\n *\n * Store definitions capture the essential information needed to recreate\n * a store, without the runtime data (vector embeddings, cloned repos).\n * This allows teams to share store configurations via version control.\n */\n\n// ============================================================================\n// Base Schema\n// ============================================================================\n\n/**\n * Base fields common to all store definitions\n */\nconst BaseStoreDefinitionSchema = z.object({\n name: z.string().min(1, 'Store name is required'),\n description: z.string().optional(),\n tags: z.array(z.string()).optional(),\n});\n\n// ============================================================================\n// File Store Definition\n// ============================================================================\n\n/**\n * File store definition - references a local directory.\n * Path can be relative (resolved against project root) or absolute.\n */\nexport const FileStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({\n type: z.literal('file'),\n path: z.string().min(1, 'Path is required for file stores'),\n});\n\nexport type FileStoreDefinition = z.infer<typeof FileStoreDefinitionSchema>;\n\n// ============================================================================\n// Repo Store Definition\n// ============================================================================\n\n/**\n * Validates git repository URLs, supporting both standard URLs and SCP-style SSH URLs.\n * - Standard URLs: https://github.com/org/repo.git, ssh://git@github.com/org/repo.git\n * - SCP-style SSH: git@github.com:org/repo.git\n */\nconst GitUrlSchema = z.string().refine(\n (val) => {\n // Accept standard URLs (http://, https://, ssh://, git://)\n try {\n new URL(val);\n return true;\n } catch {\n // Accept SCP-style SSH URLs: git@host:org/repo.git or git@host:org/repo\n return /^git@[\\w.-]+:[\\w./-]+$/.test(val);\n }\n },\n { message: 'Must be a valid URL or SSH URL (git@host:path)' }\n);\n\n/**\n * Repo store definition - references a git repository.\n * The repo will be cloned on sync.\n */\nexport const RepoStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({\n type: z.literal('repo'),\n url: GitUrlSchema,\n branch: z.string().optional(),\n depth: z.number().int().positive('Depth must be a positive integer').optional(),\n});\n\nexport type RepoStoreDefinition = z.infer<typeof RepoStoreDefinitionSchema>;\n\n// ============================================================================\n// Web Store Definition\n// ============================================================================\n\n/**\n * Web store definition - references a website to crawl.\n * Supports intelligent crawling with natural language instructions.\n */\nexport const WebStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({\n type: z.literal('web'),\n url: z.url('Valid URL is required for web stores'),\n depth: z.number().int().min(0, 'Depth must be non-negative').default(1),\n maxPages: z.number().int().positive('maxPages must be a positive integer').optional(),\n crawlInstructions: z.string().optional(),\n extractInstructions: z.string().optional(),\n});\n\nexport type WebStoreDefinition = z.infer<typeof WebStoreDefinitionSchema>;\n\n// ============================================================================\n// Union Type\n// ============================================================================\n\n/**\n * Discriminated union of all store definition types.\n * Use the `type` field to narrow the type.\n */\nexport const StoreDefinitionSchema = z.discriminatedUnion('type', [\n FileStoreDefinitionSchema,\n RepoStoreDefinitionSchema,\n WebStoreDefinitionSchema,\n]);\n\nexport type StoreDefinition = z.infer<typeof StoreDefinitionSchema>;\n\n// ============================================================================\n// Config Schema\n// ============================================================================\n\n/**\n * Root configuration schema for store definitions.\n * Version field enables future schema migrations.\n */\nexport const StoreDefinitionsConfigSchema = z.object({\n version: z.literal(1),\n stores: z.array(StoreDefinitionSchema),\n});\n\nexport type StoreDefinitionsConfig = z.infer<typeof StoreDefinitionsConfigSchema>;\n\n// ============================================================================\n// Type Guards\n// ============================================================================\n\nexport function isFileStoreDefinition(def: StoreDefinition): def is FileStoreDefinition {\n return def.type === 'file';\n}\n\nexport function isRepoStoreDefinition(def: StoreDefinition): def is RepoStoreDefinition {\n return def.type === 'repo';\n}\n\nexport function isWebStoreDefinition(def: StoreDefinition): def is WebStoreDefinition {\n return def.type === 'web';\n}\n\n// ============================================================================\n// Default Config\n// ============================================================================\n\nexport const DEFAULT_STORE_DEFINITIONS_CONFIG: StoreDefinitionsConfig = {\n version: 1,\n stores: [],\n};\n","import { randomUUID } from 'node:crypto';\nimport { readFile, mkdir, stat, access } from 'node:fs/promises';\nimport { join, resolve } from 'node:path';\nimport { cloneRepository } from '../plugin/git-clone.js';\nimport { createStoreId } from '../types/brands.js';\nimport { ok, err } from '../types/result.js';\nimport { atomicWriteFile } from '../utils/atomic-write.js';\nimport type { GitignoreService } from './gitignore.service.js';\nimport type { StoreDefinitionService } from './store-definition.service.js';\nimport type { StoreId } from '../types/brands.js';\nimport type { Result } from '../types/result.js';\nimport type {\n StoreDefinition,\n FileStoreDefinition,\n RepoStoreDefinition,\n WebStoreDefinition,\n} from '../types/store-definition.js';\nimport type { Store, FileStore, RepoStore, WebStore, StoreType } from '../types/store.js';\n\n/**\n * Check if a file exists\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n}\n\nexport interface CreateStoreInput {\n name: string;\n type: StoreType;\n path?: string | undefined;\n url?: string | undefined;\n description?: string | undefined;\n tags?: string[] | undefined;\n branch?: string | undefined;\n depth?: number | undefined;\n // Web store crawl options\n maxPages?: number | undefined;\n crawlInstructions?: string | undefined;\n extractInstructions?: string | undefined;\n}\n\nexport interface StoreServiceOptions {\n /** Optional definition service for auto-updating git-committable config */\n definitionService?: StoreDefinitionService;\n /** Optional gitignore service for ensuring .gitignore patterns */\n gitignoreService?: GitignoreService;\n /** Optional project root for resolving relative paths */\n projectRoot?: string;\n}\n\nexport interface OperationOptions {\n /** Skip syncing to store definitions (used by stores:sync command) */\n skipDefinitionSync?: boolean;\n}\n\ninterface StoreRegistry {\n stores: Store[];\n}\n\nexport class StoreService {\n private readonly dataDir: string;\n private readonly definitionService: StoreDefinitionService | undefined;\n private readonly gitignoreService: GitignoreService | undefined;\n private readonly projectRoot: string | undefined;\n private registry: StoreRegistry = { stores: [] };\n\n constructor(dataDir: string, options?: StoreServiceOptions) {\n this.dataDir = dataDir;\n this.definitionService = options?.definitionService ?? undefined;\n this.gitignoreService = options?.gitignoreService ?? undefined;\n this.projectRoot = options?.projectRoot ?? undefined;\n }\n\n async initialize(): Promise<void> {\n await mkdir(this.dataDir, { recursive: true });\n await this.loadRegistry();\n }\n\n /**\n * Convert a Store and CreateStoreInput to a StoreDefinition for persistence.\n * Returns undefined for stores that shouldn't be persisted (e.g., local repo stores).\n */\n private createDefinitionFromStore(\n store: Store,\n input: CreateStoreInput\n ): StoreDefinition | undefined {\n // Copy tags array to convert from readonly to mutable\n const tags = store.tags !== undefined ? [...store.tags] : undefined;\n const base = {\n name: store.name,\n description: store.description,\n tags,\n };\n\n switch (store.type) {\n case 'file': {\n const fileStore = store;\n const fileDef: FileStoreDefinition = {\n ...base,\n type: 'file',\n // Use original input path if provided (may be relative), otherwise use normalized\n path: input.path ?? fileStore.path,\n };\n return fileDef;\n }\n case 'repo': {\n const repoStore = store;\n // Local repo stores (no URL) are machine-specific; skip definition sync\n if (repoStore.url === undefined) {\n return undefined;\n }\n const repoDef: RepoStoreDefinition = {\n ...base,\n type: 'repo',\n url: repoStore.url,\n branch: repoStore.branch,\n depth: input.depth,\n };\n return repoDef;\n }\n case 'web': {\n const webStore = store;\n const webDef: WebStoreDefinition = {\n ...base,\n type: 'web',\n url: webStore.url,\n depth: webStore.depth,\n maxPages: input.maxPages,\n crawlInstructions: input.crawlInstructions,\n extractInstructions: input.extractInstructions,\n };\n return webDef;\n }\n }\n }\n\n /**\n * Create a StoreDefinition from an existing store (without original input).\n * Used when updating/renaming stores where we don't have the original input.\n * Returns undefined for stores that shouldn't be persisted (e.g., local repo stores).\n */\n private createDefinitionFromExistingStore(store: Store): StoreDefinition | undefined {\n // Copy tags array to convert from readonly to mutable\n const tags = store.tags !== undefined ? [...store.tags] : undefined;\n const base = {\n name: store.name,\n description: store.description,\n tags,\n };\n\n switch (store.type) {\n case 'file': {\n const fileDef: FileStoreDefinition = {\n ...base,\n type: 'file',\n path: store.path,\n };\n return fileDef;\n }\n case 'repo': {\n // Local repo stores (no URL) are machine-specific; skip definition sync\n if (store.url === undefined) {\n return undefined;\n }\n const repoDef: RepoStoreDefinition = {\n ...base,\n type: 'repo',\n url: store.url,\n branch: store.branch,\n depth: store.depth,\n };\n return repoDef;\n }\n case 'web': {\n const webDef: WebStoreDefinition = {\n ...base,\n type: 'web',\n url: store.url,\n depth: store.depth,\n maxPages: store.maxPages,\n crawlInstructions: store.crawlInstructions,\n extractInstructions: store.extractInstructions,\n };\n return webDef;\n }\n }\n }\n\n async create(input: CreateStoreInput, options?: OperationOptions): Promise<Result<Store>> {\n if (!input.name || input.name.trim() === '') {\n return err(new Error('Store name cannot be empty'));\n }\n\n const existing = await this.getByName(input.name);\n if (existing !== undefined) {\n return err(new Error(`Store with name \"${input.name}\" already exists`));\n }\n\n const id = createStoreId(randomUUID());\n const now = new Date();\n\n let store: Store;\n\n switch (input.type) {\n case 'file': {\n if (input.path === undefined) {\n return err(new Error('Path is required for file stores'));\n }\n // Normalize path to absolute path, using projectRoot if available\n const normalizedPath =\n this.projectRoot !== undefined\n ? resolve(this.projectRoot, input.path)\n : resolve(input.path);\n // Validate directory exists\n try {\n const stats = await stat(normalizedPath);\n if (!stats.isDirectory()) {\n return err(new Error(`Path is not a directory: ${normalizedPath}`));\n }\n } catch {\n return err(new Error(`Directory does not exist: ${normalizedPath}`));\n }\n store = {\n type: 'file',\n id,\n name: input.name,\n path: normalizedPath,\n description: input.description,\n tags: input.tags,\n status: 'ready',\n createdAt: now,\n updatedAt: now,\n } satisfies FileStore;\n break;\n }\n\n case 'repo': {\n let repoPath = input.path;\n\n // If URL provided, clone it\n if (input.url !== undefined) {\n const cloneDir = join(this.dataDir, 'repos', id);\n const result = await cloneRepository({\n url: input.url,\n targetDir: cloneDir,\n ...(input.branch !== undefined ? { branch: input.branch } : {}),\n depth: input.depth ?? 1,\n });\n\n if (!result.success) {\n return err(result.error);\n }\n repoPath = result.data;\n }\n\n if (repoPath === undefined) {\n return err(new Error('Path or URL required for repo stores'));\n }\n\n // Normalize path to absolute path, using projectRoot if available\n const normalizedRepoPath =\n this.projectRoot !== undefined ? resolve(this.projectRoot, repoPath) : resolve(repoPath);\n\n // Validate local repo path exists (only for local repos without URL)\n if (input.url === undefined) {\n try {\n const stats = await stat(normalizedRepoPath);\n if (!stats.isDirectory()) {\n return err(new Error(`Path is not a directory: ${normalizedRepoPath}`));\n }\n } catch {\n return err(new Error(`Repository path does not exist: ${normalizedRepoPath}`));\n }\n }\n\n store = {\n type: 'repo',\n id,\n name: input.name,\n path: normalizedRepoPath,\n url: input.url,\n branch: input.branch,\n depth: input.depth ?? 1,\n description: input.description,\n tags: input.tags,\n status: 'ready',\n createdAt: now,\n updatedAt: now,\n } satisfies RepoStore;\n break;\n }\n\n case 'web':\n if (input.url === undefined) {\n return err(new Error('URL is required for web stores'));\n }\n store = {\n type: 'web',\n id,\n name: input.name,\n url: input.url,\n depth: input.depth ?? 1,\n maxPages: input.maxPages,\n crawlInstructions: input.crawlInstructions,\n extractInstructions: input.extractInstructions,\n description: input.description,\n tags: input.tags,\n status: 'ready',\n createdAt: now,\n updatedAt: now,\n } satisfies WebStore;\n break;\n\n default: {\n // Exhaustive check - if this is reached, input.type is invalid\n const invalidType: never = input.type;\n return err(new Error(`Invalid store type: ${String(invalidType)}`));\n }\n }\n\n this.registry.stores.push(store);\n await this.saveRegistry();\n\n // Ensure .gitignore has required patterns\n if (this.gitignoreService !== undefined) {\n await this.gitignoreService.ensureGitignorePatterns();\n }\n\n // Sync to store definitions if service is available and not skipped\n if (this.definitionService !== undefined && options?.skipDefinitionSync !== true) {\n const definition = this.createDefinitionFromStore(store, input);\n // Only add if definition was created (local repo stores return undefined)\n if (definition !== undefined) {\n await this.definitionService.addDefinition(definition);\n }\n }\n\n return ok(store);\n }\n\n async list(type?: StoreType): Promise<Store[]> {\n if (type !== undefined) {\n return Promise.resolve(this.registry.stores.filter((s) => s.type === type));\n }\n return Promise.resolve([...this.registry.stores]);\n }\n\n async get(id: StoreId): Promise<Store | undefined> {\n return Promise.resolve(this.registry.stores.find((s) => s.id === id));\n }\n\n async getByName(name: string): Promise<Store | undefined> {\n return Promise.resolve(this.registry.stores.find((s) => s.name === name));\n }\n\n async getByIdOrName(idOrName: string): Promise<Store | undefined> {\n return Promise.resolve(\n this.registry.stores.find((s) => s.id === idOrName || s.name === idOrName)\n );\n }\n\n async update(\n id: StoreId,\n updates: Partial<Pick<Store, 'name' | 'description' | 'tags'>>,\n options?: OperationOptions\n ): Promise<Result<Store>> {\n const index = this.registry.stores.findIndex((s) => s.id === id);\n if (index === -1) {\n return err(new Error(`Store not found: ${id}`));\n }\n\n const store = this.registry.stores[index];\n if (store === undefined) {\n return err(new Error(`Store not found: ${id}`));\n }\n\n // Validate name is not empty when provided\n if (updates.name?.trim() === '') {\n return err(new Error('Store name cannot be empty'));\n }\n\n // Check for duplicate name when renaming\n const isRenaming = updates.name !== undefined && updates.name !== store.name;\n if (isRenaming) {\n const existing = this.registry.stores.find((s) => s.name === updates.name && s.id !== id);\n if (existing !== undefined) {\n return err(new Error(`Store with name '${updates.name}' already exists`));\n }\n }\n\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const updated = {\n ...store,\n ...updates,\n updatedAt: new Date(),\n } as Store;\n\n this.registry.stores[index] = updated;\n await this.saveRegistry();\n\n // Sync to store definitions if service is available and not skipped\n if (this.definitionService !== undefined && options?.skipDefinitionSync !== true) {\n if (isRenaming) {\n // When renaming: remove old definition and add new one with updated store data\n await this.definitionService.removeDefinition(store.name);\n const newDefinition = this.createDefinitionFromExistingStore(updated);\n // Only add if store type supports definitions (local repo stores don't)\n if (newDefinition !== undefined) {\n await this.definitionService.addDefinition(newDefinition);\n }\n } else {\n // Not renaming: just update description/tags on existing definition\n const defUpdates: { description?: string; tags?: string[] } = {};\n if (updates.description !== undefined) {\n defUpdates.description = updates.description;\n }\n if (updates.tags !== undefined) {\n // Copy tags array to convert from readonly to mutable\n defUpdates.tags = [...updates.tags];\n }\n // Only update if there are actual changes to sync\n if (Object.keys(defUpdates).length > 0) {\n await this.definitionService.updateDefinition(store.name, defUpdates);\n }\n }\n }\n\n return ok(updated);\n }\n\n async delete(id: StoreId, options?: OperationOptions): Promise<Result<void>> {\n const index = this.registry.stores.findIndex((s) => s.id === id);\n if (index === -1) {\n return err(new Error(`Store not found: ${id}`));\n }\n\n const store = this.registry.stores[index];\n if (store === undefined) {\n return err(new Error(`Store not found: ${id}`));\n }\n\n const storeName = store.name;\n this.registry.stores.splice(index, 1);\n await this.saveRegistry();\n\n // Sync to store definitions if service is available and not skipped\n if (this.definitionService !== undefined && options?.skipDefinitionSync !== true) {\n await this.definitionService.removeDefinition(storeName);\n }\n\n return ok(undefined);\n }\n\n private async loadRegistry(): Promise<void> {\n const registryPath = join(this.dataDir, 'stores.json');\n const exists = await fileExists(registryPath);\n\n if (!exists) {\n // First run - create empty registry\n this.registry = { stores: [] };\n await this.saveRegistry();\n return;\n }\n\n // File exists - load it (throws on corruption per CLAUDE.md \"fail early\")\n const content = await readFile(registryPath, 'utf-8');\n try {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const data = JSON.parse(content) as { stores: (Store | null)[] };\n this.registry = {\n stores: data.stores\n .filter((s): s is Store => s !== null)\n .map((s) => ({\n ...s,\n id: createStoreId(s.id),\n createdAt: new Date(s.createdAt),\n updatedAt: new Date(s.updatedAt),\n })),\n };\n } catch (error) {\n throw new Error(\n `Failed to parse store registry at ${registryPath}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n private async saveRegistry(): Promise<void> {\n const registryPath = join(this.dataDir, 'stores.json');\n await atomicWriteFile(registryPath, JSON.stringify(this.registry, null, 2));\n }\n}\n","import { spawn } from 'node:child_process';\nimport { mkdir } from 'node:fs/promises';\nimport { ok, err } from '../types/result.js';\nimport type { Result } from '../types/result.js';\n\nexport interface CloneOptions {\n url: string;\n targetDir: string;\n branch?: string;\n depth?: number;\n}\n\nexport async function cloneRepository(options: CloneOptions): Promise<Result<string>> {\n const { url, targetDir, branch, depth = 1 } = options;\n\n await mkdir(targetDir, { recursive: true });\n\n const args = ['clone', '--depth', String(depth)];\n if (branch !== undefined) {\n args.push('--branch', branch);\n }\n args.push(url, targetDir);\n\n return new Promise((resolve) => {\n const git = spawn('git', args, { stdio: ['ignore', 'pipe', 'pipe'] });\n\n let stderr = '';\n git.stderr.on('data', (data: Buffer) => {\n stderr += data.toString();\n });\n\n git.on('error', (error: Error) => {\n resolve(err(error));\n });\n\n git.on('close', (code: number | null) => {\n if (code === 0) {\n resolve(ok(targetDir));\n } else {\n resolve(err(new Error(`Git clone failed: ${stderr}`)));\n }\n });\n });\n}\n\nexport function isGitUrl(source: string): boolean {\n return source.startsWith('http://') || source.startsWith('https://') || source.startsWith('git@');\n}\n\nexport function extractRepoName(url: string): string {\n const match = /\\/([^/]+?)(\\.git)?$/.exec(url);\n const name = match?.[1];\n if (name === undefined) {\n return 'repository';\n }\n return name;\n}\n","import { spawn, type ChildProcess } from 'node:child_process';\nimport { randomUUID } from 'node:crypto';\nimport { existsSync } from 'node:fs';\nimport path from 'node:path';\nimport { createInterface, type Interface as ReadlineInterface } from 'node:readline';\nimport { fileURLToPath } from 'node:url';\nimport { ZodError } from 'zod';\nimport { type ParsePythonResult, validateParsePythonResult } from './schemas.js';\nimport { createLogger } from '../logging/index.js';\n\nconst logger = createLogger('python-bridge');\n\n// Re-export for backwards compatibility\nexport type { ParsePythonResult };\n\ninterface PendingRequest {\n resolve: (v: ParsePythonResult) => void;\n reject: (e: Error) => void;\n timeout: NodeJS.Timeout;\n}\n\n/**\n * Get the system Python executable name based on platform.\n * Windows uses 'python', Unix-like systems use 'python3'.\n */\nfunction getPythonExecutable(): string {\n return process.platform === 'win32' ? 'python' : 'python3';\n}\n\n/**\n * Get the venv Python path based on platform.\n * Windows: .venv/Scripts/python.exe\n * Unix-like: .venv/bin/python3\n */\nfunction getVenvPythonPath(pluginRoot: string): string {\n if (process.platform === 'win32') {\n return path.join(pluginRoot, '.venv', 'Scripts', 'python.exe');\n }\n return path.join(pluginRoot, '.venv', 'bin', 'python3');\n}\n\nexport class PythonBridge {\n private process: ChildProcess | null = null;\n private readonly pending: Map<string, PendingRequest> = new Map();\n private stoppingIntentionally = false;\n private stdoutReadline: ReadlineInterface | null = null;\n private stderrReadline: ReadlineInterface | null = null;\n\n start(): Promise<void> {\n if (this.process) return Promise.resolve();\n\n // Compute absolute path to Python worker using import.meta.url\n // This works both in development (src/) and production (dist/)\n const currentFilePath = fileURLToPath(import.meta.url);\n // Platform-agnostic check: match both /dist/ and \\dist\\ (Windows)\n const distPattern = `${path.sep}dist${path.sep}`;\n const isProduction = currentFilePath.includes(distPattern);\n\n let pythonWorkerPath: string;\n let pythonPath: string;\n\n if (isProduction) {\n // Production: Find dist dir and go to sibling python/ directory\n const distIndex = currentFilePath.indexOf(distPattern);\n const pluginRoot = currentFilePath.substring(0, distIndex);\n pythonWorkerPath = path.join(pluginRoot, 'python', 'ast_worker.py');\n\n // Use venv python if available (installed by check-dependencies.sh hook)\n const venvPython = getVenvPythonPath(pluginRoot);\n pythonPath = existsSync(venvPython) ? venvPython : getPythonExecutable();\n } else {\n // Development: Go up from src/crawl to find python/\n const srcDir = path.dirname(path.dirname(currentFilePath));\n const projectRoot = path.dirname(srcDir);\n pythonWorkerPath = path.join(projectRoot, 'python', 'ast_worker.py');\n\n // Development: Use system python (user manages their own environment)\n pythonPath = getPythonExecutable();\n }\n\n logger.debug(\n { pythonWorkerPath, pythonPath, currentFilePath, isProduction },\n 'Starting Python bridge process'\n );\n\n this.process = spawn(pythonPath, [pythonWorkerPath], {\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n\n // Add error handler for process spawn errors\n this.process.on('error', (err) => {\n logger.error({ error: err.message, stack: err.stack }, 'Python bridge process error');\n this.rejectAllPending(new Error(`Process error: ${err.message}`));\n });\n\n // Add exit handler to detect non-zero exits\n this.process.on('exit', (code, signal) => {\n if (code !== 0 && code !== null) {\n logger.error({ code }, 'Python bridge process exited with non-zero code');\n this.rejectAllPending(new Error(`Process exited with code ${String(code)}`));\n } else if (signal && !this.stoppingIntentionally) {\n // Only log if we didn't intentionally stop the process\n logger.error({ signal }, 'Python bridge process killed with signal');\n this.rejectAllPending(new Error(`Process killed with signal ${signal}`));\n }\n this.process = null;\n this.stoppingIntentionally = false;\n });\n\n // Add stderr logging\n if (this.process.stderr) {\n this.stderrReadline = createInterface({ input: this.process.stderr });\n this.stderrReadline.on('line', (line) => {\n logger.warn({ stderr: line }, 'Python bridge stderr output');\n });\n }\n\n if (this.process.stdout === null) {\n this.process.kill(); // Kill process to prevent zombie\n this.process = null; // Clean up reference\n return Promise.reject(new Error('Python bridge process stdout is null'));\n }\n this.stdoutReadline = createInterface({ input: this.process.stdout });\n this.stdoutReadline.on('line', (line) => {\n // Filter out non-JSON lines (Python logging output)\n if (!line.trim().startsWith('{')) {\n return;\n }\n\n try {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const response = JSON.parse(line) as {\n id: string;\n error?: { message: string };\n result?: unknown;\n };\n const pending = this.pending.get(response.id);\n if (pending !== undefined) {\n if (response.error !== undefined) {\n clearTimeout(pending.timeout);\n this.pending.delete(response.id);\n pending.reject(new Error(response.error.message));\n } else if (response.result !== undefined) {\n clearTimeout(pending.timeout);\n this.pending.delete(response.id);\n\n // Validate response structure\n try {\n const validated = validateParsePythonResult(response.result);\n pending.resolve(validated);\n } catch (error: unknown) {\n // Log validation failure with original response for debugging\n if (error instanceof ZodError) {\n logger.error(\n {\n issues: error.issues,\n response: JSON.stringify(response.result),\n },\n 'Python bridge response validation failed'\n );\n pending.reject(\n new Error(`Invalid response format from Python bridge: ${error.message}`)\n );\n } else {\n const errorMessage = error instanceof Error ? error.message : String(error);\n logger.error({ error: errorMessage }, 'Response validation error');\n pending.reject(new Error(`Response validation error: ${errorMessage}`));\n }\n }\n }\n // If neither result nor error, leave pending (will timeout)\n }\n } catch (err) {\n logger.error(\n {\n error: err instanceof Error ? err.message : String(err),\n line,\n },\n 'Failed to parse JSON response from Python bridge'\n );\n }\n });\n\n return Promise.resolve();\n }\n\n async parsePython(\n code: string,\n filePath: string,\n timeoutMs: number = 10000\n ): Promise<ParsePythonResult> {\n if (!this.process) await this.start();\n\n const id = randomUUID();\n const request = {\n jsonrpc: '2.0',\n id,\n method: 'parse_python',\n params: { code, filePath },\n };\n\n return new Promise((resolve, reject) => {\n const timeout = setTimeout(() => {\n const pending = this.pending.get(id);\n if (pending) {\n this.pending.delete(id);\n reject(\n new Error(`Python parsing timeout after ${String(timeoutMs)}ms for file: ${filePath}`)\n );\n }\n }, timeoutMs);\n\n this.pending.set(id, {\n resolve,\n reject,\n timeout,\n });\n if (!this.process?.stdin) {\n reject(new Error('Python bridge process not available'));\n return;\n }\n this.process.stdin.write(`${JSON.stringify(request)}\\n`);\n });\n }\n\n stop(): Promise<void> {\n if (!this.process) {\n return Promise.resolve();\n }\n\n return new Promise((resolve) => {\n this.stoppingIntentionally = true;\n this.rejectAllPending(new Error('Python bridge stopped'));\n\n // Close readline interfaces to prevent resource leaks\n if (this.stdoutReadline) {\n this.stdoutReadline.close();\n this.stdoutReadline = null;\n }\n if (this.stderrReadline) {\n this.stderrReadline.close();\n this.stderrReadline = null;\n }\n\n // Wait for process to actually exit before resolving\n const proc = this.process;\n if (proc === null) {\n resolve();\n return;\n }\n\n // Set up exit handler to resolve when process terminates\n const onExit = (): void => {\n resolve();\n };\n proc.once('exit', onExit);\n\n // Send SIGTERM to gracefully stop\n proc.kill();\n\n // Safety timeout in case process doesn't exit within 1 second\n setTimeout(() => {\n proc.removeListener('exit', onExit);\n if (this.process === proc) {\n proc.kill('SIGKILL'); // Force kill\n this.process = null;\n }\n resolve();\n }, 1000);\n });\n }\n\n private rejectAllPending(error: Error): void {\n for (const pending of this.pending.values()) {\n clearTimeout(pending.timeout);\n pending.reject(error);\n }\n this.pending.clear();\n }\n}\n","import { z } from 'zod';\n\n// Schema for Python AST parsing response\nconst MethodInfoSchema = z.object({\n name: z.string(),\n async: z.boolean(),\n signature: z.string(),\n startLine: z.number(),\n endLine: z.number(),\n calls: z.array(z.string()),\n});\n\nconst CodeNodeSchema = z.object({\n type: z.enum(['function', 'class']),\n name: z.string(),\n exported: z.boolean(),\n startLine: z.number(),\n endLine: z.number(),\n async: z.boolean().optional(),\n signature: z.string().optional(),\n calls: z.array(z.string()).optional(),\n methods: z.array(MethodInfoSchema).optional(),\n});\n\nconst ImportInfoSchema = z.object({\n source: z.string(),\n imported: z.string(),\n alias: z.string().optional().nullable(),\n});\n\nexport const ParsePythonResultSchema = z.object({\n nodes: z.array(CodeNodeSchema),\n imports: z.array(ImportInfoSchema),\n});\n\nexport type ParsePythonResult = z.infer<typeof ParsePythonResultSchema>;\n\n/**\n * Validates a Python AST parsing response from Python bridge.\n * Throws ZodError if the response doesn't match the expected schema.\n *\n * @param data - Raw data from Python bridge\n * @returns Validated ParsePythonResult\n * @throws {z.ZodError} If validation fails\n */\nexport function validateParsePythonResult(data: unknown): ParsePythonResult {\n return ParsePythonResultSchema.parse(data);\n}\n","import { homedir } from 'node:os';\nimport { join } from 'node:path';\nimport { pipeline, env, type FeatureExtractionPipeline } from '@huggingface/transformers';\n\n// Set cache directory to ~/.cache/huggingface-transformers (outside node_modules)\n// This allows CI caching and prevents model re-downloads on each npm install\nenv.cacheDir = join(homedir(), '.cache', 'huggingface-transformers');\n\nexport class EmbeddingEngine {\n private extractor: FeatureExtractionPipeline | null = null;\n // eslint-disable-next-line @typescript-eslint/prefer-readonly -- mutated in embed()\n private _dimensions: number | null = null;\n private readonly modelName: string;\n private readonly batchSize: number;\n\n constructor(modelName = 'Xenova/all-MiniLM-L6-v2', batchSize = 32) {\n this.modelName = modelName;\n this.batchSize = batchSize;\n }\n\n async initialize(): Promise<void> {\n if (this.extractor !== null) return;\n // @ts-expect-error TS2590: TypeScript can't represent the complex union type from pipeline()\n // This is a known limitation with @huggingface/transformers overloaded signatures\n this.extractor = await pipeline('feature-extraction', this.modelName, {\n dtype: 'fp32',\n });\n }\n\n async embed(text: string): Promise<number[]> {\n if (this.extractor === null) {\n await this.initialize();\n }\n if (this.extractor === null) {\n throw new Error('Failed to initialize embedding model');\n }\n const output = await this.extractor(text, {\n pooling: 'mean',\n normalize: true,\n });\n const result = Array.from(output.data);\n // Cache dimensions from first embedding result\n this._dimensions ??= result.length;\n return result.map((v) => Number(v));\n }\n\n async embedBatch(texts: string[]): Promise<number[][]> {\n const results: number[][] = [];\n\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n\n // Process batch in parallel using Promise.all\n const batchResults = await Promise.all(batch.map((text) => this.embed(text)));\n\n results.push(...batchResults);\n\n // Small delay between batches to prevent memory issues\n if (i + this.batchSize < texts.length) {\n await new Promise((resolve) => setTimeout(resolve, 100));\n }\n }\n\n return results;\n }\n\n /**\n * Get cached embedding dimensions. Throws if embed() hasn't been called yet.\n * Use ensureDimensions() if you need to guarantee dimensions are available.\n */\n getDimensions(): number {\n if (this._dimensions === null) {\n throw new Error('Cannot get dimensions before first embed() call');\n }\n return this._dimensions;\n }\n\n /**\n * Ensure dimensions are available, initializing the model if needed.\n * Returns the embedding dimensions for the current model.\n */\n async ensureDimensions(): Promise<number> {\n if (this._dimensions === null) {\n // Embed empty string to determine dimensions\n await this.embed('');\n }\n if (this._dimensions === null) {\n throw new Error('Failed to determine embedding dimensions');\n }\n return this._dimensions;\n }\n\n /**\n * Dispose the embedding pipeline to free resources.\n * Should be called before process exit to prevent ONNX runtime cleanup issues on macOS.\n */\n async dispose(): Promise<void> {\n if (this.extractor !== null) {\n await this.extractor.dispose();\n this.extractor = null;\n }\n }\n}\n","import * as lancedb from '@lancedb/lancedb';\nimport { createDocumentId } from '../types/brands.js';\nimport { DocumentMetadataSchema } from '../types/document.js';\nimport type { StoreId, DocumentId } from '../types/brands.js';\nimport type { Document, DocumentMetadata } from '../types/document.js';\nimport type { Table, Connection } from '@lancedb/lancedb';\n\ninterface LanceDocument {\n id: string;\n content: string;\n vector: number[];\n metadata: string; // JSON serialized\n [key: string]: unknown;\n}\n\ninterface SearchHit {\n id: string;\n content: string;\n metadata: string;\n _distance: number;\n}\n\nexport class LanceStore {\n private connection: Connection | null = null;\n private readonly tables: Map<string, Table> = new Map();\n private readonly dataDir: string;\n // eslint-disable-next-line @typescript-eslint/prefer-readonly -- set via setDimensions()\n private _dimensions: number | null = null;\n\n constructor(dataDir: string) {\n this.dataDir = dataDir;\n }\n\n /**\n * Set the embedding dimensions. Must be called before initialize().\n * This allows dimensions to be derived from the embedding model at runtime.\n * Idempotent: subsequent calls are ignored if dimensions are already set.\n */\n setDimensions(dimensions: number): void {\n this._dimensions ??= dimensions;\n }\n\n async initialize(storeId: StoreId): Promise<void> {\n if (this._dimensions === null) {\n throw new Error('Dimensions not set. Call setDimensions() before initialize().');\n }\n\n this.connection ??= await lancedb.connect(this.dataDir);\n\n const tableName = this.getTableName(storeId);\n const tableNames = await this.connection.tableNames();\n\n if (!tableNames.includes(tableName)) {\n // Create table with initial schema\n const table = await this.connection.createTable(tableName, [\n {\n id: '__init__',\n content: '',\n vector: new Array(this._dimensions).fill(0),\n metadata: '{}',\n },\n ]);\n // Delete the init row\n await table.delete('id = \"__init__\"');\n this.tables.set(tableName, table);\n } else {\n const table = await this.connection.openTable(tableName);\n this.tables.set(tableName, table);\n }\n }\n\n async addDocuments(storeId: StoreId, documents: Document[]): Promise<void> {\n const table = await this.getTable(storeId);\n const lanceDocuments: LanceDocument[] = documents.map((doc) => ({\n id: doc.id,\n content: doc.content,\n vector: [...doc.vector],\n metadata: JSON.stringify(doc.metadata),\n }));\n await table.add(lanceDocuments);\n }\n\n async deleteDocuments(storeId: StoreId, documentIds: DocumentId[]): Promise<void> {\n if (documentIds.length === 0) {\n return;\n }\n const table = await this.getTable(storeId);\n const idList = documentIds.map((id) => `\"${id}\"`).join(', ');\n await table.delete(`id IN (${idList})`);\n }\n\n async clearAllDocuments(storeId: StoreId): Promise<void> {\n const table = await this.getTable(storeId);\n await table.delete('id IS NOT NULL');\n }\n\n async search(\n storeId: StoreId,\n vector: number[],\n limit: number,\n // threshold is kept for API compatibility but filtering is done after normalization\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n _threshold?: number\n ): Promise<\n Array<{ id: DocumentId; content: string; score: number; metadata: DocumentMetadata }>\n > {\n const table = await this.getTable(storeId);\n const query = table.vectorSearch(vector).limit(limit).distanceType('cosine');\n\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const results = (await query.toArray()) as SearchHit[];\n\n // Return all results - threshold filtering is applied after score normalization\n // in search.service.ts to match displayed scores\n return results.map((r) => {\n const metadata = DocumentMetadataSchema.parse(JSON.parse(r.metadata));\n return {\n id: createDocumentId(r.id),\n content: r.content,\n score: 1 - r._distance,\n // Schema validates structure, cast to branded type\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n metadata: metadata as DocumentMetadata,\n };\n });\n }\n\n async createFtsIndex(storeId: StoreId): Promise<void> {\n const table = await this.getTable(storeId);\n await table.createIndex('content', {\n config: lancedb.Index.fts(),\n });\n }\n\n async fullTextSearch(\n storeId: StoreId,\n query: string,\n limit: number\n ): Promise<\n Array<{ id: DocumentId; content: string; score: number; metadata: DocumentMetadata }>\n > {\n const table = await this.getTable(storeId);\n\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const results = (await table.search(query, 'fts').limit(limit).toArray()) as Array<{\n id: string;\n content: string;\n metadata: string;\n _score: number;\n }>;\n\n return results.map((r) => {\n const metadata = DocumentMetadataSchema.parse(JSON.parse(r.metadata));\n return {\n id: createDocumentId(r.id),\n content: r.content,\n score: r._score,\n // Schema validates structure, cast to branded type\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n metadata: metadata as DocumentMetadata,\n };\n });\n }\n\n async deleteStore(storeId: StoreId): Promise<void> {\n const tableName = this.getTableName(storeId);\n // Connect on-demand - no dimensions needed for listing/dropping tables\n this.connection ??= await lancedb.connect(this.dataDir);\n const tableNames = await this.connection.tableNames();\n if (tableNames.includes(tableName)) {\n await this.connection.dropTable(tableName);\n this.tables.delete(tableName);\n }\n }\n\n close(): void {\n this.tables.clear();\n if (this.connection !== null) {\n this.connection.close();\n this.connection = null;\n }\n }\n\n /**\n * Async close for API consistency. Calls sync close() internally.\n * Do NOT call process.exit() after this - let the event loop drain\n * naturally so native threads can complete cleanup.\n */\n closeAsync(): Promise<void> {\n this.close();\n return Promise.resolve();\n }\n\n private getTableName(storeId: StoreId): string {\n return `documents_${storeId}`;\n }\n\n private async getTable(storeId: StoreId): Promise<Table> {\n const tableName = this.getTableName(storeId);\n let table = this.tables.get(tableName);\n if (table === undefined) {\n await this.initialize(storeId);\n table = this.tables.get(tableName);\n }\n if (table === undefined) {\n throw new Error(`Table not found for store: ${storeId}`);\n }\n return table;\n }\n}\n","import { z } from 'zod';\nimport type { DocumentId, StoreId } from './brands.js';\n\n// ============================================================================\n// Zod Schemas\n// ============================================================================\n\nexport const DocumentTypeSchema = z.enum(['file', 'chunk', 'web']);\n\nexport const DocumentMetadataSchema = z\n .object({\n path: z.string().optional(),\n url: z.string().optional(),\n type: DocumentTypeSchema,\n storeId: z.string(),\n indexedAt: z.string(), // ISO 8601 string (what JSON serialization produces)\n fileHash: z.string().optional(),\n chunkIndex: z.number().optional(),\n totalChunks: z.number().optional(),\n })\n .loose(); // Allow additional fields per index signature\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport type DocumentType = z.infer<typeof DocumentTypeSchema>;\n\nexport interface DocumentMetadata {\n readonly path?: string | undefined;\n readonly url?: string | undefined;\n readonly type: DocumentType;\n readonly storeId: StoreId;\n readonly indexedAt: string; // ISO 8601 string\n readonly fileHash?: string | undefined;\n readonly chunkIndex?: number | undefined;\n readonly totalChunks?: number | undefined;\n readonly [key: string]: unknown;\n}\n\nexport interface Document {\n readonly id: DocumentId;\n readonly content: string;\n readonly vector: readonly number[];\n readonly metadata: DocumentMetadata;\n}\n\nexport interface DocumentChunk {\n readonly id: DocumentId;\n readonly content: string;\n readonly startLine?: number | undefined;\n readonly endLine?: number | undefined;\n readonly metadata: DocumentMetadata;\n}\n","import { CodeGraphService } from './code-graph.service.js';\nimport { ConfigService } from './config.service.js';\nimport { GitignoreService } from './gitignore.service.js';\nimport { IndexService } from './index.service.js';\nimport { ManifestService } from './manifest.service.js';\nimport { SearchService } from './search.service.js';\nimport { StoreDefinitionService } from './store-definition.service.js';\nimport { StoreService } from './store.service.js';\nimport { PythonBridge } from '../crawl/bridge.js';\nimport { EmbeddingEngine } from '../db/embeddings.js';\nimport { LanceStore } from '../db/lance.js';\nimport { createLogger, shutdownLogger } from '../logging/index.js';\nimport type { StoreServiceOptions } from './store.service.js';\nimport type { AppConfig } from '../types/config.js';\n\nconst logger = createLogger('services');\n\nexport { ConfigService } from './config.service.js';\nexport { StoreService } from './store.service.js';\nexport { SearchService } from './search.service.js';\nexport { IndexService } from './index.service.js';\nexport { JobService } from './job.service.js';\nexport { WatchService } from './watch.service.js';\nexport { ChunkingService } from './chunking.service.js';\nexport { CodeGraphService } from './code-graph.service.js';\n\nexport interface ServiceContainer {\n config: ConfigService;\n store: StoreService;\n search: SearchService;\n index: IndexService;\n lance: LanceStore;\n embeddings: EmbeddingEngine;\n codeGraph: CodeGraphService;\n pythonBridge: PythonBridge;\n manifest: ManifestService;\n}\n\n/**\n * Lazy service container that defers heavy initialization until first use.\n *\n * Initialization strategy:\n * - Eager (lightweight): config, store, lance (wrapper only), pythonBridge (started for fork safety)\n * - Lazy (heavy): embeddings (3-10s model load), search, index, codeGraph\n *\n * IMPORTANT: PythonBridge must be started BEFORE lancedb.connect() is called.\n * LanceDB's native Rust code is not fork-safe - spawning subprocesses after\n * lancedb is loaded corrupts the mutex state, causing crashes on shutdown.\n */\nexport class LazyServiceContainer implements ServiceContainer {\n // Eagerly initialized (lightweight)\n readonly config: ConfigService;\n readonly store: StoreService;\n readonly lance: LanceStore;\n readonly pythonBridge: PythonBridge;\n\n // Configuration for lazy initialization\n private readonly appConfig: AppConfig;\n private readonly dataDir: string;\n\n // Lazily initialized (heavy)\n // eslint-disable-next-line @typescript-eslint/prefer-readonly -- mutated in lazy getter\n private _manifest: ManifestService | null = null;\n private _embeddings: EmbeddingEngine | null = null;\n private _codeGraph: CodeGraphService | null = null;\n private _search: SearchService | null = null;\n private _index: IndexService | null = null;\n\n constructor(\n config: ConfigService,\n appConfig: AppConfig,\n dataDir: string,\n store: StoreService,\n lance: LanceStore,\n pythonBridge: PythonBridge\n ) {\n this.config = config;\n this.appConfig = appConfig;\n this.dataDir = dataDir;\n this.store = store;\n this.lance = lance;\n this.pythonBridge = pythonBridge;\n }\n\n /**\n * EmbeddingEngine is lazily created on first access.\n * Model loading (3-10s) is deferred until embed() is called.\n */\n get embeddings(): EmbeddingEngine {\n if (this._embeddings === null) {\n logger.debug('Lazy-initializing EmbeddingEngine');\n this._embeddings = new EmbeddingEngine(\n this.appConfig.embedding.model,\n this.appConfig.embedding.batchSize\n );\n }\n return this._embeddings;\n }\n\n /**\n * CodeGraphService is lazily created on first access.\n */\n get codeGraph(): CodeGraphService {\n if (this._codeGraph === null) {\n logger.debug('Lazy-initializing CodeGraphService');\n this._codeGraph = new CodeGraphService(this.dataDir, this.pythonBridge);\n }\n return this._codeGraph;\n }\n\n /**\n * SearchService is lazily created on first access.\n */\n get search(): SearchService {\n if (this._search === null) {\n logger.debug('Lazy-initializing SearchService');\n this._search = new SearchService(\n this.lance,\n this.embeddings,\n this.codeGraph,\n this.appConfig.search\n );\n }\n return this._search;\n }\n\n /**\n * IndexService is lazily created on first access.\n */\n get index(): IndexService {\n if (this._index === null) {\n logger.debug('Lazy-initializing IndexService');\n this._index = new IndexService(this.lance, this.embeddings, {\n codeGraphService: this.codeGraph,\n manifestService: this.manifest,\n chunkSize: this.appConfig.indexing.chunkSize,\n chunkOverlap: this.appConfig.indexing.chunkOverlap,\n concurrency: this.appConfig.indexing.concurrency,\n ignorePatterns: this.appConfig.indexing.ignorePatterns,\n });\n }\n return this._index;\n }\n\n /**\n * ManifestService is lazily created on first access.\n */\n get manifest(): ManifestService {\n if (this._manifest === null) {\n logger.debug('Lazy-initializing ManifestService');\n this._manifest = new ManifestService(this.dataDir);\n }\n return this._manifest;\n }\n\n /**\n * Check if embeddings have been initialized (for cleanup purposes).\n */\n get hasEmbeddings(): boolean {\n return this._embeddings !== null;\n }\n\n /**\n * Check if search service has been initialized (for cleanup purposes).\n */\n get hasSearch(): boolean {\n return this._search !== null;\n }\n}\n\n/**\n * Create lazy service container for MCP server.\n *\n * This defers heavy initialization (embeddings model loading) until first use,\n * reducing MCP server startup time from ~5s to <500ms.\n *\n * PythonBridge is started eagerly to avoid fork-safety issues with LanceDB.\n */\nexport async function createLazyServices(\n configPath?: string,\n dataDir?: string,\n projectRoot?: string\n): Promise<LazyServiceContainer> {\n logger.info({ configPath, dataDir, projectRoot }, 'Initializing lazy services');\n const startTime = Date.now();\n\n const config = new ConfigService(configPath, dataDir, projectRoot);\n const appConfig = await config.load();\n const resolvedDataDir = config.resolveDataDir();\n\n // IMPORTANT: Start PythonBridge BEFORE creating LanceStore.\n // LanceDB's native Rust code is not fork-safe. Spawning subprocesses after\n // lancedb is loaded corrupts the mutex state, causing crashes on shutdown.\n const pythonBridge = new PythonBridge();\n await pythonBridge.start();\n\n // Now safe to create LanceStore wrapper (doesn't connect until initialize() is called)\n const lance = new LanceStore(resolvedDataDir);\n\n // Create project-root-dependent services using resolved project root\n const resolvedProjectRoot = config.resolveProjectRoot();\n const definitionService = new StoreDefinitionService(resolvedProjectRoot);\n const gitignoreService = new GitignoreService(resolvedProjectRoot);\n const storeOptions: StoreServiceOptions = {\n definitionService,\n gitignoreService,\n projectRoot: resolvedProjectRoot,\n };\n\n const store = new StoreService(resolvedDataDir, storeOptions);\n await store.initialize();\n\n const durationMs = Date.now() - startTime;\n logger.info(\n { dataDir: resolvedDataDir, projectRoot: resolvedProjectRoot, durationMs },\n 'Lazy services initialized'\n );\n\n return new LazyServiceContainer(config, appConfig, resolvedDataDir, store, lance, pythonBridge);\n}\n\n/**\n * Create services with eager initialization (for CLI commands).\n *\n * This initializes all services including the embedding model upfront.\n * Use createLazyServices() for MCP server to reduce startup time.\n */\nexport async function createServices(\n configPath?: string,\n dataDir?: string,\n projectRoot?: string\n): Promise<ServiceContainer> {\n logger.info({ configPath, dataDir, projectRoot }, 'Initializing services');\n\n const config = new ConfigService(configPath, dataDir, projectRoot);\n const appConfig = await config.load();\n const resolvedDataDir = config.resolveDataDir();\n\n // IMPORTANT: Start PythonBridge BEFORE creating LanceStore.\n // LanceDB's native Rust code is not fork-safe. Spawning subprocesses after\n // lancedb is loaded corrupts the mutex state, causing crashes on shutdown.\n const pythonBridge = new PythonBridge();\n await pythonBridge.start();\n\n // Now safe to initialize lancedb and other services\n const lance = new LanceStore(resolvedDataDir);\n const embeddings = new EmbeddingEngine(appConfig.embedding.model, appConfig.embedding.batchSize);\n\n await embeddings.initialize();\n\n // Create project-root-dependent services using resolved project root\n const resolvedProjectRoot = config.resolveProjectRoot();\n const definitionService = new StoreDefinitionService(resolvedProjectRoot);\n const gitignoreService = new GitignoreService(resolvedProjectRoot);\n const storeOptions: StoreServiceOptions = {\n definitionService,\n gitignoreService,\n projectRoot: resolvedProjectRoot,\n };\n\n const store = new StoreService(resolvedDataDir, storeOptions);\n await store.initialize();\n\n const codeGraph = new CodeGraphService(resolvedDataDir, pythonBridge);\n const manifest = new ManifestService(resolvedDataDir);\n const search = new SearchService(lance, embeddings, codeGraph, appConfig.search);\n const index = new IndexService(lance, embeddings, {\n codeGraphService: codeGraph,\n manifestService: manifest,\n chunkSize: appConfig.indexing.chunkSize,\n chunkOverlap: appConfig.indexing.chunkOverlap,\n concurrency: appConfig.indexing.concurrency,\n ignorePatterns: appConfig.indexing.ignorePatterns,\n });\n\n logger.info(\n { dataDir: resolvedDataDir, projectRoot: resolvedProjectRoot },\n 'Services initialized successfully'\n );\n\n return {\n config,\n store,\n search,\n index,\n lance,\n embeddings,\n codeGraph,\n pythonBridge,\n manifest,\n };\n}\n\n/**\n * Cleanly shut down all services, stopping background processes.\n * Call this after CLI commands complete to allow the process to exit.\n * Attempts all cleanup operations and throws if any fail.\n *\n * For LazyServiceContainer, only disposes embeddings if they were initialized.\n */\nexport async function destroyServices(services: ServiceContainer): Promise<void> {\n logger.info('Shutting down services');\n const errors: Error[] = [];\n\n // IMPORTANT: Shutdown in reverse order of initialization (LIFO).\n // PythonBridge must stop BEFORE LanceStore closes to avoid mutex corruption.\n // LanceDB's native Rust code is not fork-safe and has threading issues\n // if subprocess signals are sent while lancedb is shutting down.\n\n // 0. Clean up SearchService event subscriptions (no async, just unsubscribe)\n // Skip for lazy containers where search was never accessed (avoids triggering initialization)\n const isLazyContainer = services instanceof LazyServiceContainer;\n const shouldCleanupSearch = !isLazyContainer || services.hasSearch;\n\n if (shouldCleanupSearch) {\n services.search.cleanup();\n } else {\n logger.debug('Skipping search cleanup (not initialized)');\n }\n\n // 1. Stop Python bridge first (reverse of init: started first, stopped first)\n try {\n await services.pythonBridge.stop();\n } catch (e) {\n const error = e instanceof Error ? e : new Error(String(e));\n logger.error({ error }, 'Error stopping Python bridge');\n errors.push(error);\n }\n\n // 2. Dispose embedding engine (only if initialized for lazy containers)\n const shouldDisposeEmbeddings = !isLazyContainer || services.hasEmbeddings;\n\n if (shouldDisposeEmbeddings) {\n try {\n await services.embeddings.dispose();\n } catch (e) {\n const error = e instanceof Error ? e : new Error(String(e));\n logger.error({ error }, 'Error disposing EmbeddingEngine');\n errors.push(error);\n }\n } else {\n logger.debug('Skipping embeddings disposal (not initialized)');\n }\n\n // 3. Close LanceStore last (reverse of init: created after PythonBridge started)\n try {\n await services.lance.closeAsync();\n } catch (e) {\n const error = e instanceof Error ? e : new Error(String(e));\n logger.error({ error }, 'Error closing LanceStore');\n errors.push(error);\n }\n\n await shutdownLogger();\n\n // Throw if any errors occurred during cleanup\n if (errors.length === 1 && errors[0] !== undefined) {\n throw new Error(`Service shutdown failed: ${errors[0].message}`, { cause: errors[0] });\n } else if (errors.length > 1) {\n throw new AggregateError(errors, 'Multiple errors during service shutdown');\n }\n}\n"],"mappings":";;;;;;;;;;;;AAsBO,IAAM,kBAAN,MAAM,iBAAgB;AAAA,EAC3B,OAAe;AAAA;AAAA,EAGE,uBAAuB,oBAAI,IAA6B;AAAA;AAAA,EAGxD,sBAAsB,oBAAI,IAA6B;AAAA,EAEhE,cAAc;AAAA,EAEtB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cAA+B;AACpC,qBAAgB,aAAa,IAAI,iBAAgB;AACjD,WAAO,iBAAgB;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,gBAAsB;AAC3B,qBAAgB,WAAW;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,SAAS,SAAgC;AAEvC,QAAI,KAAK,qBAAqB,IAAI,QAAQ,UAAU,GAAG;AACrD;AAAA,IACF;AAGA,eAAW,OAAO,QAAQ,YAAY;AACpC,YAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,YAAM,kBAAkB,KAAK,oBAAoB,IAAI,aAAa;AAClE,UAAI,oBAAoB,QAAW;AACjC,cAAM,IAAI;AAAA,UACR,cAAc,aAAa,uCAAuC,gBAAgB,UAAU;AAAA,QAC9F;AAAA,MACF;AAAA,IACF;AAGA,SAAK,qBAAqB,IAAI,QAAQ,YAAY,OAAO;AAGzD,eAAW,OAAO,QAAQ,YAAY;AACpC,YAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,WAAK,oBAAoB,IAAI,eAAe,OAAO;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,WAAW,YAA6B;AACtC,UAAM,UAAU,KAAK,qBAAqB,IAAI,UAAU;AACxD,QAAI,YAAY,QAAW;AACzB,aAAO;AAAA,IACT;AAGA,SAAK,qBAAqB,OAAO,UAAU;AAG3C,eAAW,OAAO,QAAQ,YAAY;AACpC,YAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,WAAK,oBAAoB,OAAO,aAAa;AAAA,IAC/C;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAe,KAA0C;AACvD,UAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,WAAO,KAAK,oBAAoB,IAAI,aAAa;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,gBAAgB,YAAiD;AAC/D,WAAO,KAAK,qBAAqB,IAAI,UAAU;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAoC;AAClC,WAAO,MAAM,KAAK,KAAK,qBAAqB,OAAO,CAAC;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,KAAsB;AACjC,UAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,WAAO,KAAK,oBAAoB,IAAI,aAAa;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,KAAqB;AAC9C,WAAO,IAAI,WAAW,GAAG,IAAI,MAAM,IAAI,GAAG;AAAA,EAC5C;AACF;;;AChJA,SAAS,WAAW,cAAAA,mBAAkB;AACtC,SAAS,QAAAC,aAAY;AACrB,OAAO,UAA+C;;;ACZtD,SAAS,YAAY,UAAU,oBAAoB;AACnD,SAAS,SAAS,MAAM,WAAW,WAAW;AAiBvC,IAAM,qBAAN,MAAyB;AAAA;AAAA;AAAA;AAAA,EAI9B,OAAO,QAAQ,SAAsC;AAEnD,QAAI,SAAS,gBAAgB,UAAa,QAAQ,gBAAgB,IAAI;AACpE,aAAO,KAAK,UAAU,QAAQ,WAAW;AAAA,IAC3C;AAGA,UAAM,iBAAiB,QAAQ,IAAI,cAAc;AACjD,QAAI,mBAAmB,UAAa,mBAAmB,IAAI;AACzD,aAAO,KAAK,UAAU,cAAc;AAAA,IACtC;AAGA,UAAM,UAAU,KAAK,YAAY,QAAQ,IAAI,CAAC;AAC9C,QAAI,YAAY,MAAM;AACpB,aAAO;AAAA,IACT;AAGA,UAAM,SAAS,QAAQ,IAAI,KAAK;AAChC,QAAI,WAAW,UAAa,WAAW,IAAI;AACzC,aAAO,KAAK,UAAU,MAAM;AAAA,IAC9B;AAGA,WAAO,QAAQ,IAAI;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YAAY,WAAkC;AACnD,QAAI,cAAc,UAAU,SAAS;AACrC,UAAM,OAAO,UAAU,GAAG;AAG1B,WAAO,gBAAgB,MAAM;AAC3B,YAAM,UAAU,KAAK,aAAa,MAAM;AAExC,UAAI,WAAW,OAAO,GAAG;AACvB,YAAI;AACF,gBAAM,QAAQ,SAAS,OAAO;AAE9B,cAAI,MAAM,YAAY,KAAK,MAAM,OAAO,GAAG;AACzC,mBAAO;AAAA,UACT;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAGA,YAAM,aAAa,QAAQ,WAAW;AACtC,UAAI,eAAe,aAAa;AAE9B;AAAA,MACF;AACA,oBAAc;AAAA,IAChB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,UAAUC,OAAsB;AACrC,QAAI;AAEF,YAAM,WAAW,aAAaA,KAAI;AAElC,aAAO,UAAU,QAAQ;AAAA,IAC3B,QAAQ;AAEN,aAAO,UAAUA,KAAI;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,SAASA,OAAuB;AACrC,QAAI;AACF,YAAM,QAAQ,SAASA,KAAI;AAC3B,aAAO,MAAM,YAAY;AAAA,IAC3B,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AD7FA,IAAM,eAAoC,CAAC,SAAS,SAAS,QAAQ,QAAQ,SAAS,OAAO;AAC7F,IAAM,mBAAwC,IAAI,IAAI,YAAY;AAGlE,SAAS,YAAoB;AAC3B,QAAM,cAAc,mBAAmB,QAAQ;AAC/C,SAAOC,MAAK,aAAa,WAAW,oBAAoB,MAAM;AAChE;AAGA,SAAS,eAAuB;AAC9B,QAAM,SAAS,UAAU;AACzB,MAAI,CAACC,YAAW,MAAM,GAAG;AACvB,cAAU,QAAQ,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AACA,SAAO;AACT;AAGA,SAAS,gBAAgB,OAAkC;AACzD,SAAO,iBAAiB,IAAI,KAAK;AACnC;AAGA,SAAS,cAAwB;AAC/B,QAAM,QAAQ,QAAQ,IAAI,WAAW,GAAG,YAAY;AAEpD,MAAI,UAAU,UAAa,UAAU,IAAI;AACvC,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,gBAAgB,KAAK,GAAG;AAC3B,UAAM,IAAI,MAAM,uBAAuB,KAAK,oBAAoB,aAAa,KAAK,IAAI,CAAC,EAAE;AAAA,EAC3F;AAEA,SAAO;AACT;AAGA,IAAI,aAA4B;AAGhC,SAAS,mBAA2B;AAClC,MAAI,eAAe,MAAM;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,aAAa;AAC5B,QAAM,UAAUD,MAAK,QAAQ,SAAS;AACtC,QAAM,QAAQ,YAAY;AAE1B,QAAM,UAAyB;AAAA,IAC7B;AAAA,IACA,WAAW,KAAK,iBAAiB;AAAA,IACjC,YAAY;AAAA,MACV,OAAO,CAAC,WAAW,EAAE,OAAO,MAAM;AAAA,IACpC;AAAA,IACA,WAAW;AAAA,MACT,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,MAAM;AAAA,QACN,MAAM;AAAA;AAAA,QACN,OAAO,EAAE,OAAO,EAAE;AAAA;AAAA,QAClB,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,eAAa,KAAK,OAAO;AACzB,SAAO;AACT;AAYO,SAAS,aAAa,QAAwB;AACnD,QAAM,OAAO,iBAAiB;AAC9B,SAAO,KAAK,MAAM,EAAE,OAAO,CAAC;AAC9B;AAYO,SAAS,eAAe,OAA0B;AACvD,QAAM,eAAe,YAAY;AACjC,QAAM,eAAe,aAAa,QAAQ,YAAY;AACtD,QAAM,aAAa,aAAa,QAAQ,KAAK;AAC7C,SAAO,cAAc;AACvB;AAKO,SAAS,kBAA0B;AACxC,SAAO,UAAU;AACnB;AAKO,SAAS,iBAAgC;AAC9C,SAAO,IAAI,QAAQ,CAACE,aAAY;AAC9B,QAAI,eAAe,MAAM;AACvB,iBAAW,MAAM;AAEjB,iBAAW,MAAM;AACf,qBAAa;AACb,QAAAA,SAAQ;AAAA,MACV,GAAG,GAAG;AAAA,IACR,OAAO;AACL,MAAAA,SAAQ;AAAA,IACV;AAAA,EACF,CAAC;AACH;;;AEzIA,SAAS,kBAAkB;AAC3B,SAAS,eAAe,aAAAC,YAAW,cAAAC,mBAAkB;AACrD,SAAS,QAAAC,aAAY;AAIrB,IAAM,qBAAqB;AAG3B,IAAM,yBAAyB;AAe/B,SAAS,gBAAwB;AAC/B,QAAM,MAAMC,MAAK,gBAAgB,GAAG,SAAS;AAC7C,MAAI,CAACC,YAAW,GAAG,GAAG;AACpB,IAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AAGA,SAAS,aAAa,YAA4B;AAChD,SAAO,WAAW,QAAQ,kBAAkB,GAAG,EAAE,UAAU,GAAG,EAAE;AAClE;AAuBO,SAAS,iBACd,SACA,MACA,YACA,WAAoB,eAAe,OAAO,GAC1B;AAChB,QAAM,YAAY,OAAO,WAAW,SAAS,MAAM;AACnD,QAAM,OAAO,WAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK,EAAE,UAAU,GAAG,EAAE;AAC5E,QAAM,UAAU,eAAe,SAAS,kBAAkB;AAE1D,QAAM,cAAc,EAAE,SAAS,WAAW,KAAK;AAG/C,MAAI,YAAY,YAAY,wBAAwB;AAClD,UAAM,aAAY,oBAAI,KAAK,GAAE,YAAY,EAAE,QAAQ,SAAS,GAAG;AAC/D,UAAM,SAAS,aAAa,UAAU;AACtC,UAAM,WAAW,GAAG,SAAS,IAAI,IAAI,IAAI,MAAM,IAAI,IAAI;AACvD,UAAM,WAAWF,MAAK,cAAc,GAAG,QAAQ;AAE/C;AAAA,MACE;AAAA,MACA,KAAK;AAAA,QACH;AAAA,UACE,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,GAAG,aAAa,aAAa,SAAS;AAAA,EACjD;AAEA,SAAO;AACT;AASO,SAAS,eAAe,SAAiB,YAAoB,oBAA4B;AAC9F,MAAI,QAAQ,UAAU,WAAW;AAC/B,WAAO;AAAA,EACT;AACA,SAAO,GAAG,QAAQ,UAAU,GAAG,SAAS,CAAC;AAC3C;;;ACtHA,SAAS,kBAAkB;AAC3B,OAAO,QAAQ;AACf,OAAO,UAAU;;;ACFjB,SAAS,SAAS;AAMX,IAAM,gBAAgB,EAAE,KAAK,CAAC,SAAS,SAAS,OAAO,CAAC;AACxD,IAAM,kBAAkB,EAAE,KAAK,CAAC,WAAW,WAAW,aAAa,UAAU,WAAW,CAAC;AAEzF,IAAM,mBAAmB,EAAE,OAAO;AAAA,EACvC,WAAW,EAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,KAAK,EAAE,OAAO,EAAE,SAAS;AAAA,EACzB,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,gBAAgB,EAAE,OAAO,EAAE,SAAS;AAAA,EACpC,YAAY,EAAE,OAAO,EAAE,SAAS;AAAA,EAChC,WAAW,EAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,aAAa,EAAE,OAAO,EAAE,SAAS;AAAA,EACjC,aAAa,EAAE,OAAO,EAAE,SAAS;AAAA,EACjC,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA,EACtC,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,UAAU,EAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,QAAQ,EAAE,QAAQ,EAAE,SAAS;AAAA,EAC7B,aAAa,EAAE,QAAQ,EAAE,SAAS;AAAA,EAClC,cAAc,EAAE,OAAO,EAAE,SAAS;AACpC,CAAC;AAEM,IAAM,YAAY,EAAE,OAAO;AAAA,EAChC,IAAI,EAAE,OAAO;AAAA,EACb,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,GAAG;AAAA,EACnC,SAAS,EAAE,OAAO;AAAA,EAClB,SAAS,iBAAiB,QAAQ,CAAC,CAAC;AAAA,EACpC,WAAW,EAAE,OAAO;AAAA,EACpB,WAAW,EAAE,OAAO;AACtB,CAAC;;;AClCM,SAAS,GAAM,MAA2B;AAC/C,SAAO,EAAE,SAAS,MAAM,KAAK;AAC/B;AAEO,SAAS,IAAO,OAA4B;AACjD,SAAO,EAAE,SAAS,OAAO,MAAM;AACjC;;;ACVA,SAAS,iBAAAG,gBAAe,YAAY,aAAAC,kBAAiB;AACrD,SAAS,WAAW,QAAQ,aAAa;AACzC,SAAS,WAAAC,gBAAe;AAYxB,eAAsB,gBAAgB,UAAkB,SAAgC;AAEtF,QAAM,MAAMA,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,QAAM,WAAW,GAAG,QAAQ,QAAQ,OAAO,KAAK,IAAI,CAAC,CAAC,IAAI,OAAO,QAAQ,GAAG,CAAC;AAC7E,QAAM,UAAU,UAAU,SAAS,OAAO;AAC1C,QAAM,OAAO,UAAU,QAAQ;AACjC;AAYO,SAAS,oBAAoB,UAAkB,SAAuB;AAE3E,EAAAD,WAAUC,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAEhD,QAAM,WAAW,GAAG,QAAQ,QAAQ,OAAO,KAAK,IAAI,CAAC,CAAC,IAAI,OAAO,QAAQ,GAAG,CAAC;AAC7E,EAAAF,eAAc,UAAU,SAAS,OAAO;AACxC,aAAW,UAAU,QAAQ;AAC/B;;;AHhCO,IAAM,aAAN,MAAiB;AAAA,EACL;AAAA,EAEjB,YAAY,SAAkB;AAE5B,QAAI;AACJ,QAAI,YAAY,QAAW;AACzB,gBAAU;AAAA,IACZ,OAAO;AACL,YAAM,UAAU,QAAQ,IAAI,MAAM,KAAK,QAAQ,IAAI,aAAa;AAChE,UAAI,YAAY,QAAW;AACzB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AACA,gBAAU,KAAK,KAAK,SAAS,+BAA+B;AAAA,IAC9D;AACA,SAAK,UAAU,KAAK,KAAK,SAAS,MAAM;AAGxC,QAAI,CAAC,GAAG,WAAW,KAAK,OAAO,GAAG;AAChC,SAAG,UAAU,KAAK,SAAS,EAAE,WAAW,KAAK,CAAC;AAAA,IAChD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,QAA8B;AACtC,UAAM,MAAW;AAAA,MACf,IAAI,OAAO,WAAW,EAAE,QAAQ,MAAM,EAAE,EAAE,UAAU,GAAG,EAAE,CAAC;AAAA,MAC1D,MAAM,OAAO;AAAA,MACb,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,SAAS,OAAO,WAAW,GAAG,OAAO,IAAI;AAAA,MACzC,SAAS,OAAO;AAAA,MAChB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAGA,SAAK,SAAS,GAAG;AAEjB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAAe,SAAgC;AACvD,UAAM,MAAM,KAAK,OAAO,KAAK;AAE7B,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,OAAO,KAAK,YAAY;AAAA,IAC1C;AAGA,QAAI,QAAQ,WAAW,QAAW;AAChC,UAAI,SAAS,QAAQ;AAAA,IACvB;AACA,QAAI,QAAQ,aAAa,QAAW;AAClC,UAAI,WAAW,QAAQ;AAAA,IACzB;AACA,QAAI,QAAQ,YAAY,QAAW;AACjC,UAAI,UAAU,QAAQ;AAAA,IACxB;AACA,QAAI,QAAQ,YAAY,QAAW;AACjC,UAAI,UAAU,EAAE,GAAG,IAAI,SAAS,GAAG,QAAQ,QAAQ;AAAA,IACrD;AAEA,QAAI,aAAY,oBAAI,KAAK,GAAE,YAAY;AAGvC,SAAK,SAAS,GAAG;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAA2B;AAChC,UAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,KAAK,OAAO;AAEvD,QAAI,CAAC,GAAG,WAAW,OAAO,GAAG;AAC3B,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,UAAU,GAAG,aAAa,SAAS,OAAO;AAChD,aAAO,UAAU,MAAM,KAAK,MAAM,OAAO,CAAC;AAAA,IAC5C,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,sBAAsB,KAAK,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACxF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,cAA+C;AACtD,QAAI,CAAC,GAAG,WAAW,KAAK,OAAO,GAAG;AAChC,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,QAAQ,GAAG,YAAY,KAAK,OAAO;AACzC,UAAM,OAAc,CAAC;AAErB,eAAW,QAAQ,OAAO;AACxB,UAAI,CAAC,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,MAAM,GAAG;AACpD;AAAA,MACF;AAEA,UAAI;AACF,cAAM,UAAU,GAAG,aAAa,KAAK,KAAK,KAAK,SAAS,IAAI,GAAG,OAAO;AACtE,cAAM,MAAM,UAAU,MAAM,KAAK,MAAM,OAAO,CAAC;AAE/C,YAAI,iBAAiB,QAAW;AAC9B,gBAAM,UAAU,MAAM,QAAQ,YAAY,IAAI,eAAe,CAAC,YAAY;AAC1E,cAAI,QAAQ,SAAS,IAAI,MAAM,GAAG;AAChC,iBAAK,KAAK,GAAG;AAAA,UACf;AAAA,QACF,OAAO;AACL,eAAK,KAAK,GAAG;AAAA,QACf;AAAA,MACF,SAAS,OAAO;AACd,cAAM,IAAI;AAAA,UACR,2BAA2B,IAAI,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAC5F;AAAA,MACF;AAAA,IACF;AAGA,SAAK,KAAK,CAAC,GAAG,MAAM,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC;AAErF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAwB;AACtB,WAAO,KAAK,SAAS,CAAC,WAAW,SAAS,CAAC;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAA6B;AACrC,UAAM,MAAM,KAAK,OAAO,KAAK;AAE7B,QAAI,CAAC,KAAK;AACR,aAAO,IAAI,IAAI,MAAM,OAAO,KAAK,YAAY,CAAC;AAAA,IAChD;AAEA,QAAI,IAAI,WAAW,eAAe,IAAI,WAAW,UAAU;AACzD,aAAO,IAAI,IAAI,MAAM,iBAAiB,IAAI,MAAM,MAAM,CAAC;AAAA,IACzD;AAEA,QAAI,IAAI,WAAW,aAAa;AAC9B,aAAO,GAAG,MAAS;AAAA,IACrB;AAGA,SAAK,UAAU,OAAO;AAAA,MACpB,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,SAAS,EAAE,cAAa,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,IACnD,CAAC;AAGD,UAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,KAAK,MAAM;AACtD,QAAI,GAAG,WAAW,OAAO,GAAG;AAC1B,UAAI;AACF,cAAM,MAAM,SAAS,GAAG,aAAa,SAAS,OAAO,GAAG,EAAE;AAI1D,YAAI,CAAC,OAAO,MAAM,GAAG,KAAK,OAAO,UAAU,GAAG,KAAK,MAAM,GAAG;AAC1D,kBAAQ,KAAK,KAAK,SAAS;AAAA,QAC7B;AAAA,MACF,QAAQ;AAAA,MAER;AAEA,UAAI;AACF,WAAG,WAAW,OAAO;AAAA,MACvB,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,WAAO,GAAG,MAAS;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,iBAAyB,IAAY;AAClD,UAAM,OAAO,KAAK,SAAS;AAC3B,UAAM,aAAa,KAAK,IAAI,IAAI,iBAAiB,KAAK,KAAK;AAC3D,QAAI,UAAU;AAEd,eAAW,OAAO,MAAM;AACtB,WACG,IAAI,WAAW,eAAe,IAAI,WAAW,YAAY,IAAI,WAAW,gBACzE,IAAI,KAAK,IAAI,SAAS,EAAE,QAAQ,IAAI,YACpC;AACA,cAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,IAAI,EAAE,OAAO;AACxD,YAAI;AACF,aAAG,WAAW,OAAO;AACrB;AAAA,QACF,SAAS,OAAO;AACd,gBAAM,IAAI;AAAA,YACR,6BAA6B,IAAI,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UAChG;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,wBACE,iBAAyB,GACzB,UAAsC,CAAC,GAC/B;AACR,UAAM,OAAO,KAAK,SAAS;AAC3B,UAAM,aAAa,KAAK,IAAI,IAAI,iBAAiB,KAAK,KAAK;AAC3D,QAAI,UAAU;AAEd,eAAW,OAAO,MAAM;AACtB,UAAI,IAAI,WAAW,aAAa,IAAI,KAAK,IAAI,SAAS,EAAE,QAAQ,IAAI,YAAY;AAC9E,cAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,IAAI,EAAE,OAAO;AAExD,YAAI,QAAQ,iBAAiB,MAAM;AAEjC,eAAK,UAAU,IAAI,IAAI;AAAA,YACrB,QAAQ;AAAA,YACR,SAAS,0CAA0C,OAAO,cAAc,CAAC;AAAA,UAC3E,CAAC;AAAA,QACH,OAAO;AAEL,cAAI;AACF,eAAG,WAAW,OAAO;AAAA,UACvB,SAAS,OAAO;AACd,kBAAM,IAAI;AAAA,cACR,8BAA8B,IAAI,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,YACjG;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAAwB;AAChC,UAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,KAAK,OAAO;AAEvD,QAAI,CAAC,GAAG,WAAW,OAAO,GAAG;AAC3B,aAAO;AAAA,IACT;AAEA,QAAI;AACF,SAAG,WAAW,OAAO;AACrB,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,wBAAwB,KAAK,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC1F;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,KAAgB;AAC/B,UAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,IAAI,EAAE,OAAO;AACxD,wBAAoB,SAAS,KAAK,UAAU,KAAK,MAAM,CAAC,CAAC;AAAA,EAC3D;AACF;;;AI1SA,SAAS,UAAU,aAAAG,YAAW,SAAAC,QAAO,UAAU;AAC/C,SAAS,QAAAC,OAAM,WAAAC,gBAAe;;;ACD9B,SAAS,aAAgC;AACzC,OAAO,oBAAoB;AAC3B,YAAY,OAAO;AAKnB,SAAS,YAAY,KAAgC;AACnD,MAAI,OAAO,QAAQ,YAAY;AAE7B,WAAO;AAAA,EACT;AACA,MAAI,QAAQ,QAAQ,OAAO,QAAQ,YAAY,aAAa,KAAK;AAE/D,UAAM,cAAc;AACpB,QAAI,OAAO,YAAY,YAAY,YAAY;AAE7C,aAAO,YAAY;AAAA,IACrB;AAAA,EACF;AACA,QAAM,IAAI,MAAM,gCAAgC;AAClD;AACA,IAAM,WAAW,YAAY,cAAc;AAyBpC,IAAM,YAAN,MAAgB;AAAA,EACrB,MAAM,MAAc,UAAmD;AACrE,QAAI;AACF,YAAM,UAA0B,CAAC,KAAK;AACtC,UAAI,aAAa,cAAc;AAC7B,gBAAQ,KAAK,YAAY;AAAA,MAC3B;AAEA,YAAM,MAAM,MAAM,MAAM;AAAA,QACtB,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAED,YAAM,QAAoB,CAAC;AAE3B,eAAS,KAAK;AAAA,QACZ,qBAAqB,CAACC,UAA0C;AAC9D,gBAAM,OAAOA,MAAK;AAClB,cAAI,CAAC,KAAK,GAAI;AAEd,gBAAM,WACJA,MAAK,OAAO,SAAS,4BACrBA,MAAK,OAAO,SAAS;AAEvB,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM,KAAK,GAAG;AAAA,YACd;AAAA,YACA,OAAO,KAAK;AAAA,YACZ,WAAW,KAAK,KAAK,MAAM,QAAQ;AAAA,YACnC,SAAS,KAAK,KAAK,IAAI,QAAQ;AAAA,YAC/B,WAAW,KAAK,yBAAyB,IAAI;AAAA,UAC/C,CAAC;AAAA,QACH;AAAA,QAEA,kBAAkB,CAACA,UAAuC;AACxD,gBAAM,OAAOA,MAAK;AAClB,cAAI,CAAC,KAAK,GAAI;AAEd,gBAAM,WACJA,MAAK,OAAO,SAAS,4BACrBA,MAAK,OAAO,SAAS;AAEvB,gBAAM,UAA+B,CAAC;AAEtC,qBAAW,UAAU,KAAK,KAAK,MAAM;AACnC,gBAAM,gBAAc,MAAM,KAAO,eAAa,OAAO,GAAG,GAAG;AACzD,sBAAQ,KAAK;AAAA,gBACX,MAAM,OAAO,IAAI;AAAA,gBACjB,OAAO,OAAO;AAAA,gBACd,WAAW,KAAK,uBAAuB,MAAM;AAAA,gBAC7C,WAAW,OAAO,KAAK,MAAM,QAAQ;AAAA,gBACrC,SAAS,OAAO,KAAK,IAAI,QAAQ;AAAA,cACnC,CAAC;AAAA,YACH;AAAA,UACF;AAEA,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM,KAAK,GAAG;AAAA,YACd;AAAA,YACA,WAAW,KAAK,KAAK,MAAM,QAAQ;AAAA,YACnC,SAAS,KAAK,KAAK,IAAI,QAAQ;AAAA,YAC/B;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QAEA,wBAAwB,CAACA,UAA6C;AACpE,gBAAM,OAAOA,MAAK;AAElB,gBAAM,WAAWA,MAAK,OAAO,SAAS;AAEtC,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM,KAAK,GAAG;AAAA,YACd;AAAA,YACA,WAAW,KAAK,KAAK,MAAM,QAAQ;AAAA,YACnC,SAAS,KAAK,KAAK,IAAI,QAAQ;AAAA,UACjC,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA,EAEA,eAAe,MAA4B;AACzC,QAAI;AACF,YAAM,MAAM,MAAM,MAAM;AAAA,QACtB,YAAY;AAAA,QACZ,SAAS,CAAC,cAAc,KAAK;AAAA,MAC/B,CAAC;AAED,YAAM,UAAwB,CAAC;AAE/B,eAAS,KAAK;AAAA,QACZ,mBAAmB,CAACA,UAAwC;AAC1D,gBAAM,OAAOA,MAAK;AAClB,gBAAM,aAAuB,CAAC;AAE9B,qBAAW,QAAQ,KAAK,YAAY;AAClC,gBAAM,2BAAyB,IAAI,GAAG;AACpC,yBAAW,KAAK,KAAK,MAAM,IAAI;AAAA,YACjC,WAAa,oBAAkB,IAAI,GAAG;AACpC,yBAAW,KAAK,KAAK,MAAM,IAAI;AAAA,YACjC,WAAa,6BAA2B,IAAI,GAAG;AAC7C,yBAAW,KAAK,KAAK,MAAM,IAAI;AAAA,YACjC;AAAA,UACF;AAEA,kBAAQ,KAAK;AAAA,YACX,QAAQ,KAAK,OAAO;AAAA,YACpB;AAAA,YACA,QAAQ,KAAK,eAAe;AAAA,UAC9B,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA,EAEQ,yBAAyB,MAAqC;AACpE,UAAM,SAAS,KAAK,OACjB,IAAI,CAAC,MAAM;AACV,UAAM,eAAa,CAAC,EAAG,QAAO,EAAE;AAChC,aAAO;AAAA,IACT,CAAC,EACA,KAAK,IAAI;AAEZ,WAAO,GAAG,KAAK,IAAI,QAAQ,WAAW,IAAI,MAAM;AAAA,EAClD;AAAA,EAEQ,uBAAuB,MAA6B;AAC1D,UAAM,SAAS,KAAK,OACjB,IAAI,CAAC,MAAM;AACV,UAAM,eAAa,CAAC,EAAG,QAAO,EAAE;AAChC,aAAO;AAAA,IACT,CAAC,EACA,KAAK,IAAI;AAEZ,UAAM,OAAS,eAAa,KAAK,GAAG,IAAI,KAAK,IAAI,OAAO;AACxD,WAAO,GAAG,IAAI,IAAI,MAAM;AAAA,EAC1B;AACF;;;ACjLO,IAAM,YAAN,MAAgB;AAAA,EACJ,QAAgC,oBAAI,IAAuB;AAAA,EAC3D,QAAkC,oBAAI,IAAyB;AAAA,EAEhF,SAAS,OAAmB,MAAoB;AAC9C,eAAW,QAAQ,OAAO;AACxB,YAAM,KAAK,GAAG,IAAI,IAAI,KAAK,IAAI;AAE/B,YAAM,YAAuB;AAAA,QAC3B;AAAA,QACA;AAAA,QACA,MAAM,KAAK;AAAA,QACX,MAAM,KAAK;AAAA,QACX,UAAU,KAAK;AAAA,QACf,WAAW,KAAK;AAAA,QAChB,SAAS,KAAK;AAAA,MAChB;AAEA,UAAI,KAAK,cAAc,QAAW;AAChC,kBAAU,YAAY,KAAK;AAAA,MAC7B;AAEA,WAAK,MAAM,IAAI,IAAI,SAAS;AAG5B,UAAI,CAAC,KAAK,MAAM,IAAI,EAAE,GAAG;AACvB,aAAK,MAAM,IAAI,IAAI,CAAC,CAAC;AAAA,MACvB;AAGA,UAAI,KAAK,SAAS,WAAW,KAAK,YAAY,QAAW;AACvD,mBAAW,UAAU,KAAK,SAAS;AACjC,gBAAM,WAAW,GAAG,IAAI,IAAI,KAAK,IAAI,IAAI,OAAO,IAAI;AAEpD,gBAAM,aAAwB;AAAA,YAC5B,IAAI;AAAA,YACJ;AAAA,YACA,MAAM;AAAA,YACN,MAAM,OAAO;AAAA,YACb,UAAU,KAAK;AAAA;AAAA,YACf,WAAW,OAAO;AAAA,YAClB,SAAS,OAAO;AAAA,YAChB,WAAW,OAAO;AAAA,UACpB;AAEA,eAAK,MAAM,IAAI,UAAU,UAAU;AAGnC,cAAI,CAAC,KAAK,MAAM,IAAI,QAAQ,GAAG;AAC7B,iBAAK,MAAM,IAAI,UAAU,CAAC,CAAC;AAAA,UAC7B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,UAAU,UAAkB,QAAgB,YAA4B;AAEtE,UAAM,aAAa,KAAK,kBAAkB,UAAU,MAAM;AAE1D,eAAW,QAAQ,YAAY;AAC7B,YAAM,OAAkB;AAAA,QACtB,MAAM;AAAA,QACN,IAAI,GAAG,UAAU,IAAI,IAAI;AAAA,QACzB,MAAM;AAAA,QACN,YAAY;AAAA,MACd;AAEA,YAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ,KAAK,CAAC;AAC3C,YAAM,KAAK,IAAI;AACf,WAAK,MAAM,IAAI,UAAU,KAAK;AAAA,IAChC;AAAA,EACF;AAAA,EAEA,yBAAyB,MAAc,MAAc,cAA4B;AAC/E,UAAM,SAAS,GAAG,IAAI,IAAI,YAAY;AAGtC,UAAM,cAAc;AACpB,UAAM,QAAQ,oBAAI,IAAY;AAE9B,QAAI;AACJ,YAAQ,QAAQ,YAAY,KAAK,IAAI,OAAO,MAAM;AAChD,UAAI,MAAM,CAAC,MAAM,UAAa,MAAM,CAAC,MAAM,IAAI;AAC7C,cAAM,IAAI,MAAM,CAAC,CAAC;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,QAAQ,KAAK,MAAM,IAAI,MAAM,KAAK,CAAC;AAEzC,eAAW,kBAAkB,OAAO;AAElC,YAAM,aAAa,KAAK,eAAe,cAAc;AAErD,UAAI,YAAY;AACd,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,WAAW;AAAA,UACf,MAAM;AAAA,UACN,YAAY;AAAA;AAAA,QACd,CAAC;AAAA,MACH,OAAO;AAEL,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,WAAW,cAAc;AAAA,UAC7B,MAAM;AAAA,UACN,YAAY;AAAA,QACd,CAAC;AAAA,MACH;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,QAAQ,KAAK;AAAA,EAC9B;AAAA,EAEA,QAAQ,IAAmC;AACzC,WAAO,KAAK,MAAM,IAAI,EAAE;AAAA,EAC1B;AAAA,EAEA,SAAS,QAA6B;AACpC,WAAO,KAAK,MAAM,IAAI,MAAM,KAAK,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,MAAuB;AAC7B,UAAM,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI,KAAK,CAAC;AAC5C,UAAM,KAAK,IAAI;AACf,SAAK,MAAM,IAAI,KAAK,MAAM,KAAK;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,MAAuB;AAClC,SAAK,MAAM,IAAI,KAAK,IAAI,IAAI;AAG5B,QAAI,CAAC,KAAK,MAAM,IAAI,KAAK,EAAE,GAAG;AAC5B,WAAK,MAAM,IAAI,KAAK,IAAI,CAAC,CAAC;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,QAA6B;AAC5C,UAAM,WAAwB,CAAC;AAC/B,eAAW,SAAS,KAAK,MAAM,OAAO,GAAG;AACvC,iBAAW,QAAQ,OAAO;AACxB,YAAI,KAAK,OAAO,QAAQ;AACtB,mBAAS,KAAK,IAAI;AAAA,QACpB;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,QAAwB;AACvC,WAAO,KAAK,iBAAiB,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO,EAAE;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,QAAwB;AACpC,WAAO,KAAK,SAAS,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO,EAAE;AAAA,EACjE;AAAA,EAEA,cAA2B;AACzB,WAAO,MAAM,KAAK,KAAK,MAAM,OAAO,CAAC;AAAA,EACvC;AAAA,EAEQ,eAAe,MAAqC;AAC1D,eAAW,QAAQ,KAAK,MAAM,OAAO,GAAG;AACtC,UAAI,KAAK,SAAS,MAAM;AACtB,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kBAAkB,UAAkB,YAA4B;AAEtE,QAAI,WAAW,WAAW,GAAG,GAAG;AAE9B,YAAM,UAAU,SAAS,MAAM,GAAG,EAAE,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG;AACzD,YAAM,QAAQ,WAAW,MAAM,GAAG;AAElC,UAAI,WAAW;AACf,iBAAW,QAAQ,OAAO;AACxB,YAAI,SAAS,MAAM;AACjB,qBAAW,SAAS,MAAM,GAAG,EAAE,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG;AAAA,QACtD,WAAW,SAAS,KAAK;AACvB,sBAAY,IAAI,IAAI;AAAA,QACtB;AAAA,MACF;AAEA,aAAO,SAAS,QAAQ,SAAS,EAAE;AAAA,IACrC;AAGA,WAAO;AAAA,EACT;AAAA,EAEA,SAGE;AACA,UAAM,WAAwB,CAAC;AAC/B,eAAW,SAAS,KAAK,MAAM,OAAO,GAAG;AACvC,eAAS,KAAK,GAAG,KAAK;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAO,MAAM,KAAK,KAAK,MAAM,OAAO,CAAC;AAAA,MACrC,OAAO,SAAS,IAAI,CAAC,OAAO;AAAA,QAC1B,MAAM,EAAE;AAAA,QACR,IAAI,EAAE;AAAA,QACN,MAAM,EAAE;AAAA,QACR,YAAY,EAAE;AAAA,MAChB,EAAE;AAAA,IACJ;AAAA,EACF;AACF;;;AC5OA,IAAI,mBAAyC;AAC7C,IAAI,aAAqC;AACzC,IAAI,eAAuC;AAC3C,IAAI,eAAe;AACnB,IAAI,aAAa;AAkBV,SAAS,wBAAiC;AAC/C,MAAI,CAAC,cAAc;AACjB,QAAI;AAGF,yBAAmB,UAAQ,aAAa;AAExC,mBAAa,UAAQ,gBAAgB;AAErC,qBAAe,UAAQ,kBAAkB;AACzC,mBAAa;AAAA,IACf,QAAQ;AAEN,mBAAa;AAAA,IACf;AACA,mBAAe;AAAA,EACjB;AACA,SAAO;AACT;AAyCO,SAAS,mBAAkC;AAChD,MAAI,CAAC,sBAAsB,KAAK,qBAAqB,QAAQ,iBAAiB,MAAM;AAClF,WAAO;AAAA,EACT;AAEA,QAAM,SAAiB,IAAI,iBAAiB;AAC5C,SAAO,YAAY,YAAY;AAC/B,SAAO;AACT;AAMO,SAAS,cAAc,MAAqC;AACjE,MAAI;AACF,UAAM,SAAS,iBAAiB;AAChC,QAAI,WAAW,MAAM;AACnB,aAAO;AAAA,IACT;AAEA,WAAO,OAAO,MAAM,IAAI;AAAA,EAC1B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAMO,SAAS,iBAAgC;AAC9C,MAAI,CAAC,sBAAsB,KAAK,qBAAqB,QAAQ,eAAe,MAAM;AAChF,WAAO;AAAA,EACT;AAEA,QAAM,SAAiB,IAAI,iBAAiB;AAC5C,SAAO,YAAY,UAAU;AAC7B,SAAO;AACT;AAMO,SAAS,YAAY,MAAqC;AAC/D,MAAI;AACF,UAAM,SAAS,eAAe;AAC9B,QAAI,WAAW,MAAM;AACnB,aAAO;AAAA,IACT;AAEA,WAAO,OAAO,MAAM,IAAI;AAAA,EAC1B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,qBAAqB,UAAsC;AACzE,SAAO,SAAS,MAAM;AACxB;AAmBO,SAAS,oBAAoB,MAAsB,MAAqC;AAC7F,SAAO,KAAK,SAAS,KAAK,CAAC,UAAU,MAAM,SAAS,IAAI,KAAK;AAC/D;AAKO,SAAS,oBACd,MACA,WACuB;AACvB,SAAO,KAAK,kBAAkB,SAAS;AACzC;AAKO,SAAS,sBAAsB,MAA+B;AACnE,SAAO,KAAK,SAAS,KAAK,CAAC,UAAU,MAAM,SAAS,qBAAqB;AAC3E;AAaO,SAAS,gBAAgB,MAA+B;AAE7D,SAAO,KAAK,SAAS,KAAK,CAAC,UAAU,MAAM,SAAS,WAAW,MAAM,SAAS,OAAO;AACvF;AAYO,SAAS,qBAAqB,MAA8B;AAEjE,QAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,QAAM,iBAAiB,oBAAoB,MAAM,YAAY;AAC7D,QAAM,iBAAiB,oBAAoB,MAAM,aAAa;AAC9D,QAAM,qBAAqB,oBAAoB,MAAM,iBAAiB;AAEtE,MAAI,aAAa,MAAM;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,YAAY,SAAS;AAGzB,MAAI,uBAAuB,MAAM;AAC/B,iBAAa,mBAAmB;AAAA,EAClC;AAGA,MAAI,mBAAmB,MAAM;AAC3B,iBAAa,eAAe;AAAA,EAC9B;AAGA,MAAI,mBAAmB,MAAM;AAC3B,iBAAa,IAAI,eAAe,IAAI;AAAA,EACtC;AAEA,SAAO;AACT;AAQO,SAAS,iBACd,MACA,UACkB;AAClB,QAAM,QAAQ,MAAM,QAAQ,QAAQ,IAAI,WAAW,CAAC,QAAQ;AAC5D,SAAO,KAAK,SAAS,kBAAkB,KAAK;AAC9C;AAKO,SAAS,kBAAkB,SAAiC;AAEjE,QAAM,eAAe,oBAAoB,SAAS,UAAU;AAC5D,MAAI,iBAAiB,MAAM;AACzB,WAAO;AAAA,EACT;AACA,SAAO,aAAa;AACtB;;;ACvQO,IAAM,cAAN,MAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOvB,MAAM,MAAc,WAA+B;AACjD,QAAI;AACF,YAAM,OAAO,YAAY,IAAI;AAC7B,UAAI,SAAS,MAAM;AAEjB,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,QAAoB,CAAC;AAG3B,YAAM,YAAY,KAAK,eAAe,IAAI;AAC1C,YAAM,KAAK,GAAG,SAAS;AAGvB,YAAM,UAAU,KAAK,aAAa,IAAI;AACtC,YAAM,KAAK,GAAG,OAAO;AAGrB,YAAM,aAAa,KAAK,gBAAgB,IAAI;AAC5C,YAAM,KAAK,GAAG,UAAU;AAGxB,YAAM,QAAQ,KAAK,iBAAiB,IAAI;AACxC,YAAM,KAAK,GAAG,KAAK;AAGnB,YAAM,YAAY,KAAK,eAAe,IAAI;AAC1C,YAAM,KAAK,GAAG,SAAS;AAGvB,WAAK,aAAa,MAAM,KAAK;AAE7B,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe,MAA4B;AACzC,QAAI;AACF,YAAM,OAAO,YAAY,IAAI;AAC7B,UAAI,SAAS,MAAM;AACjB,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,UAAwB,CAAC;AAC/B,YAAM,cAAc,iBAAiB,MAAM,oBAAoB;AAE/D,iBAAW,cAAc,aAAa;AACpC,cAAM,cAAc,WAAW,kBAAkB,aAAa;AAE9D,mBAAW,QAAQ,aAAa;AAC9B,gBAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,cAAI,aAAa,MAAM;AACrB;AAAA,UACF;AAGA,gBAAM,gBAAgB,SAAS,kBAAkB,oCAAoC,EAAE,CAAC;AACxF,gBAAMC,QACJ,kBAAkB,SAAY,cAAc,OAAO,SAAS,KAAK,QAAQ,MAAM,EAAE;AAEnF,cAAIA,UAAS,IAAI;AACf,oBAAQ,KAAK;AAAA,cACX,QAAQA;AAAA,cACR,YAAY,CAAC;AAAA,cACb,QAAQ;AAAA,YACV,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAkC;AACvD,UAAM,gBAAgB,iBAAiB,MAAM,sBAAsB;AACnE,UAAM,QAAoB,CAAC;AAE3B,eAAW,UAAU,eAAe;AAClC,YAAM,WAAW,oBAAoB,QAAQ,MAAM;AACnD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,YAAY,qBAAqB,OAAO,aAAa;AAC3D,YAAM,UAAU,qBAAqB,OAAO,WAAW;AACvD,YAAM,YAAY,qBAAqB,MAAM;AAE7C,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAkC;AACrD,UAAM,YAAY,iBAAiB,MAAM,kBAAkB;AAC3D,UAAM,QAAoB,CAAC;AAE3B,eAAW,YAAY,WAAW;AAEhC,YAAM,WAAW,oBAAoB,UAAU,WAAW;AAC1D,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,YAAM,WAAW,oBAAoB,UAAU,MAAM;AAErD,UAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C;AAAA,MACF;AAGA,UAAI,SAAS,SAAS,eAAe;AACnC;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,YAAY,qBAAqB,SAAS,aAAa;AAC7D,YAAM,UAAU,qBAAqB,SAAS,WAAW;AAEzD,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,WAAW;AAAA,QACX,SAAS,CAAC;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,MAAkC;AACxD,UAAM,YAAY,iBAAiB,MAAM,kBAAkB;AAC3D,UAAM,QAAoB,CAAC;AAE3B,eAAW,YAAY,WAAW;AAChC,YAAM,WAAW,oBAAoB,UAAU,WAAW;AAC1D,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,YAAM,WAAW,oBAAoB,UAAU,MAAM;AAErD,UAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C;AAAA,MACF;AAGA,UAAI,SAAS,SAAS,kBAAkB;AACtC;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,YAAY,qBAAqB,SAAS,aAAa;AAC7D,YAAM,UAAU,qBAAqB,SAAS,WAAW;AAGzD,YAAM,UAAU,KAAK,wBAAwB,QAAQ;AAErD,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,WAAW;AAAA,QACX;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,MAAkC;AACzD,UAAM,YAAY,iBAAiB,MAAM,kBAAkB;AAC3D,UAAM,QAAoB,CAAC;AAE3B,eAAW,YAAY,WAAW;AAChC,YAAM,WAAW,oBAAoB,UAAU,WAAW;AAC1D,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,YAAM,WAAW,oBAAoB,UAAU,MAAM;AAErD,UAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C;AAAA,MACF;AAGA,UAAI,SAAS,SAAS,iBAAiB,SAAS,SAAS,kBAAkB;AACzE;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,YAAY,qBAAqB,SAAS,aAAa;AAC7D,YAAM,UAAU,qBAAqB,SAAS,WAAW;AACzD,YAAM,YAAY,GAAG,IAAI,MAAM,SAAS,IAAI;AAE5C,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAkC;AACvD,UAAM,QAAoB,CAAC;AAG3B,UAAM,aAAa,iBAAiB,MAAM,mBAAmB;AAC7D,eAAW,aAAa,YAAY;AAClC,YAAM,QAAQ,UAAU,kBAAkB,YAAY;AACtD,iBAAW,QAAQ,OAAO;AACxB,cAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,YAAI,aAAa,MAAM;AACrB;AAAA,QACF;AAEA,cAAM,OAAO,SAAS;AACtB,cAAM,WAAW,KAAK,WAAW,IAAI;AACrC,cAAM,YAAY,qBAAqB,KAAK,aAAa;AACzD,cAAM,UAAU,qBAAqB,KAAK,WAAW;AAErD,cAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,cAAM,YAAY,aAAa,OAAO,GAAG,IAAI,KAAK,SAAS,IAAI,KAAK;AAEpE,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,WAAW,iBAAiB,MAAM,iBAAiB;AACzD,eAAW,WAAW,UAAU;AAC9B,YAAM,QAAQ,QAAQ,kBAAkB,UAAU;AAClD,iBAAW,QAAQ,OAAO;AACxB,cAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,YAAI,aAAa,MAAM;AACrB;AAAA,QACF;AAEA,cAAM,OAAO,SAAS;AACtB,cAAM,WAAW,KAAK,WAAW,IAAI;AACrC,cAAM,YAAY,qBAAqB,KAAK,aAAa;AACzD,cAAM,UAAU,qBAAqB,KAAK,WAAW;AAErD,cAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,cAAM,YAAY,aAAa,OAAO,GAAG,IAAI,KAAK,SAAS,IAAI,KAAK;AAEpE,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAsB,OAAyB;AAClE,UAAM,cAAc,iBAAiB,MAAM,oBAAoB;AAE/D,eAAW,cAAc,aAAa;AACpC,YAAM,eAAe,KAAK,gBAAgB,UAAU;AACpD,UAAI,iBAAiB,MAAM;AACzB;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,YAAY,MAAM;AACvD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,YAAY,qBAAqB,UAAU;AACjD,YAAM,YAAY,qBAAqB,WAAW,aAAa;AAC/D,YAAM,UAAU,qBAAqB,WAAW,WAAW;AAG3D,YAAM,aAAa,MAAM,KAAK,CAAC,SAAS,KAAK,SAAS,WAAW,KAAK,SAAS,YAAY;AAE3F,UAAI,YAAY,YAAY,QAAW;AACrC,mBAAW,QAAQ,KAAK;AAAA,UACtB;AAAA,UACA,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,wBAAwB,eAM7B;AACD,UAAM,UAMD,CAAC;AAEN,UAAM,cAAc,cAAc,kBAAkB,aAAa;AAEjE,eAAW,cAAc,aAAa;AACpC,YAAM,WAAW,oBAAoB,YAAY,MAAM;AACvD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,YAAY,qBAAqB,UAAU;AACjD,YAAM,YAAY,qBAAqB,WAAW,aAAa;AAC/D,YAAM,UAAU,qBAAqB,WAAW,WAAW;AAE3D,cAAQ,KAAK;AAAA,QACX;AAAA,QACA,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,YAA2C;AACjE,UAAM,eAAe,oBAAoB,YAAY,UAAU;AAC/D,QAAI,iBAAiB,MAAM;AACzB,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,oBAAoB,cAAc,uBAAuB;AAC3E,QAAI,cAAc,MAAM;AACtB,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,QAAI,aAAa,MAAM;AACrB,aAAO;AAAA,IACT;AAGA,QAAI,SAAS,SAAS,gBAAgB;AACpC,YAAM,YAAY,SAAS,SAAS,KAAK,CAAC,UAAU,MAAM,SAAS,iBAAiB;AACpF,aAAO,cAAc,SAAY,UAAU,OAAO;AAAA,IACpD;AAGA,QAAI,SAAS,SAAS,mBAAmB;AACvC,aAAO,SAAS;AAAA,IAClB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,MAAuB;AACxC,QAAI,KAAK,WAAW,GAAG;AACrB,aAAO;AAAA,IACT;AACA,UAAM,YAAY,KAAK,CAAC;AACxB,QAAI,cAAc,QAAW;AAC3B,aAAO;AAAA,IACT;AACA,WAAO,cAAc,UAAU,YAAY;AAAA,EAC7C;AACF;;;ACtdA,OAAOC,WAAU;;;ACGV,IAAM,kBAAN,MAAsB;AAAA,EAC3B,YAA6B,QAAsB;AAAtB;AAAA,EAAuB;AAAA,EAEpD,MAAM,MAAM,MAAc,UAAuC;AAC/D,UAAM,SAA4B,MAAM,KAAK,OAAO,YAAY,MAAM,QAAQ;AAE9E,WAAO,OAAO,MAAM,IAAI,CAAC,SAAS;AAChC,YAAM,WAAqB;AAAA,QACzB,MAAM,KAAK;AAAA,QACX,MAAM,KAAK;AAAA,QACX,UAAU,KAAK;AAAA,QACf,WAAW,KAAK;AAAA,QAChB,SAAS,KAAK;AAAA,MAChB;AAEA,UAAI,KAAK,UAAU,QAAW;AAC5B,iBAAS,QAAQ,KAAK;AAAA,MACxB;AAEA,UAAI,KAAK,cAAc,QAAW;AAChC,iBAAS,YAAY,KAAK;AAAA,MAC5B;AAEA,UAAI,KAAK,YAAY,QAAW;AAC9B,iBAAS,UAAU,KAAK;AAAA,MAC1B;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;;;ACfO,IAAM,gBAAN,MAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOzB,MAAM,MAAc,WAA+B;AACjD,QAAI;AACF,YAAM,OAAO,cAAc,IAAI;AAC/B,UAAI,SAAS,MAAM;AAEjB,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,QAAoB,CAAC;AAG3B,YAAM,YAAY,KAAK,eAAe,IAAI;AAC1C,YAAM,KAAK,GAAG,SAAS;AAGvB,YAAM,UAAU,KAAK,aAAa,IAAI;AACtC,YAAM,KAAK,GAAG,OAAO;AAGrB,YAAM,SAAS,KAAK,YAAY,IAAI;AACpC,YAAM,KAAK,GAAG,MAAM;AAGpB,YAAM,QAAQ,KAAK,iBAAiB,IAAI;AACxC,YAAM,KAAK,GAAG,KAAK;AAGnB,YAAM,YAAY,KAAK,eAAe,IAAI;AAC1C,YAAM,KAAK,GAAG,SAAS;AAGvB,WAAK,gBAAgB,MAAM,KAAK;AAEhC,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe,MAA4B;AACzC,QAAI;AACF,YAAM,OAAO,cAAc,IAAI;AAC/B,UAAI,SAAS,MAAM;AACjB,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,kBAAkB,iBAAiB,MAAM,iBAAiB;AAChE,YAAM,UAAwB,CAAC;AAE/B,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,kBAAkB,OAAO;AAC5C,YAAI,eAAe,IAAI;AACrB;AAAA,QACF;AAGA,cAAM,EAAE,QAAQ,WAAW,IAAI,KAAK,gBAAgB,UAAU;AAE9D,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA;AAAA,UACA,QAAQ;AAAA;AAAA,QACV,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAkC;AACvD,UAAM,gBAAgB,iBAAiB,MAAM,eAAe;AAC5D,UAAM,QAAoB,CAAC;AAE3B,eAAW,UAAU,eAAe;AAElC,UAAI,KAAK,kBAAkB,MAAM,GAAG;AAClC;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,QAAQ,MAAM;AACnD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,MAAM;AAC7C,YAAM,QAAQ,gBAAgB,MAAM;AACpC,YAAM,YAAY,qBAAqB,OAAO,aAAa;AAC3D,YAAM,UAAU,qBAAqB,OAAO,WAAW;AACvD,YAAM,YAAY,qBAAqB,MAAM;AAE7C,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,MAA+B;AACvD,QAAI,UAAU,KAAK;AACnB,WAAO,YAAY,MAAM;AACvB,UAAI,QAAQ,SAAS,aAAa;AAChC,eAAO;AAAA,MACT;AACA,gBAAU,QAAQ;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAkC;AACrD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,QAAoB,CAAC;AAE3B,eAAW,cAAc,aAAa;AACpC,YAAM,WAAW,oBAAoB,YAAY,MAAM;AACvD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,UAAU;AACjD,YAAM,YAAY,qBAAqB,WAAW,aAAa;AAC/D,YAAM,UAAU,qBAAqB,WAAW,WAAW;AAG3D,YAAM,iBAAiB,oBAAoB,YAAY,iBAAiB;AACxE,YAAM,YAAY,mBAAmB,OAAO,GAAG,IAAI,GAAG,eAAe,IAAI,KAAK;AAE9E,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS,CAAC;AAAA;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,MAAkC;AACpD,UAAM,aAAa,iBAAiB,MAAM,YAAY;AACtD,UAAM,QAAoB,CAAC;AAE3B,eAAW,aAAa,YAAY;AAClC,YAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,SAAS;AAChD,YAAM,YAAY,qBAAqB,UAAU,aAAa;AAC9D,YAAM,UAAU,qBAAqB,UAAU,WAAW;AAG1D,YAAM,iBAAiB,oBAAoB,WAAW,iBAAiB;AACvE,YAAM,YAAY,mBAAmB,OAAO,GAAG,IAAI,GAAG,eAAe,IAAI,KAAK;AAG9E,YAAM,UAAU,KAAK,oBAAoB,SAAS;AAElD,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,MAAkC;AACzD,UAAM,YAAY,iBAAiB,MAAM,WAAW;AACpD,UAAM,QAAoB,CAAC;AAE3B,eAAW,YAAY,WAAW;AAChC,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,QAAQ;AAC/C,YAAM,YAAY,qBAAqB,SAAS,aAAa;AAC7D,YAAM,UAAU,qBAAqB,SAAS,WAAW;AAGzD,YAAM,YAAY,oBAAoB,UAAU,MAAM;AACtD,YAAM,YAAY,cAAc,OAAO,GAAG,IAAI,MAAM,UAAU,IAAI,KAAK;AAEvE,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAkC;AACvD,UAAM,aAAa,iBAAiB,MAAM,CAAC,cAAc,aAAa,CAAC;AACvE,UAAM,QAAoB,CAAC;AAE3B,eAAW,aAAa,YAAY;AAClC,YAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,SAAS;AAChD,YAAM,YAAY,qBAAqB,UAAU,aAAa;AAC9D,YAAM,UAAU,qBAAqB,UAAU,WAAW;AAG1D,YAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,YAAM,YAAY,aAAa,OAAO,GAAG,IAAI,KAAK,SAAS,IAAI,KAAK;AAEpE,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,MAAsB,OAAyB;AACrE,UAAM,YAAY,iBAAiB,MAAM,WAAW;AAEpD,eAAW,YAAY,WAAW;AAEhC,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,WAAW,SAAS;AAG1B,YAAM,UAAU,KAAK,mBAAmB,QAAQ;AAGhD,YAAM,aAAa,MAAM,KAAK,CAAC,SAAS,KAAK,SAAS,WAAW,KAAK,SAAS,QAAQ;AAEvF,UAAI,YAAY,YAAY,QAAW;AACrC,mBAAW,QAAQ,KAAK,GAAG,OAAO;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,WAMzB;AACD,UAAM,UAMD,CAAC;AAGN,UAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,QAAI,aAAa,MAAM;AACrB,aAAO;AAAA,IACT;AAGA,UAAM,qBAAqB,SAAS,kBAAkB,yBAAyB;AAE/E,eAAW,aAAa,oBAAoB;AAC1C,YAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,QAAQ,gBAAgB,SAAS;AACvC,YAAM,YAAY,qBAAqB,SAAS;AAChD,YAAM,YAAY,qBAAqB,UAAU,aAAa;AAC9D,YAAM,UAAU,qBAAqB,UAAU,WAAW;AAE1D,cAAQ,KAAK;AAAA,QACX;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,UAMxB;AACD,UAAM,UAMD,CAAC;AAGN,UAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,QAAI,aAAa,MAAM;AACrB,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,SAAS,kBAAkB,eAAe;AAEhE,eAAW,UAAU,eAAe;AAClC,YAAM,WAAW,oBAAoB,QAAQ,MAAM;AACnD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,QAAQ,gBAAgB,MAAM;AACpC,YAAM,YAAY,qBAAqB,MAAM;AAC7C,YAAM,YAAY,qBAAqB,OAAO,aAAa;AAC3D,YAAM,UAAU,qBAAqB,OAAO,WAAW;AAEvD,cAAQ,KAAK;AAAA,QACX;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,gBAAgB,YAA8D;AAEpF,UAAMC,QAAO,WAAW,KAAK;AAG7B,QAAIA,MAAK,SAAS,KAAK,GAAG;AACxB,YAAM,SAASA,MAAK,QAAQ,OAAO,EAAE;AACrC,aAAO,EAAE,QAAQ,YAAY,CAAC,GAAG,EAAE;AAAA,IACrC;AAGA,UAAM,cAAcA,MAAK,MAAM,kBAAkB;AACjD,QAAI,gBAAgB,MAAM;AACxB,YAAM,SAAS,YAAY,CAAC,KAAK;AACjC,YAAM,gBAAgB,YAAY,CAAC,KAAK;AACxC,YAAM,aAAa,cAAc,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAC/D,aAAO,EAAE,QAAQ,WAAW;AAAA,IAC9B;AAGA,UAAM,QAAQA,MAAK,MAAM,IAAI;AAC7B,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,aAAa,CAAC,MAAM,MAAM,SAAS,CAAC,KAAK,EAAE;AACjD,YAAM,SAAS,MAAM,MAAM,GAAG,EAAE,EAAE,KAAK,IAAI;AAC3C,aAAO,EAAE,QAAQ,WAAW;AAAA,IAC9B;AAGA,WAAO,EAAE,QAAQ,IAAI,YAAY,CAACA,KAAI,EAAE;AAAA,EAC1C;AACF;;;AF1cO,IAAM,gBAAN,MAAoB;AAAA,EACzB,YAA6B,cAA6B;AAA7B;AAAA,EAA8B;AAAA,EAE3D,MAAM,UAAU,UAAkB,MAAmC;AACnE,UAAM,MAAMC,MAAK,QAAQ,QAAQ;AAEjC,QAAI,CAAC,OAAO,MAAM,EAAE,SAAS,GAAG,GAAG;AACjC,YAAM,SAAS,IAAI,UAAU;AAC7B,aAAO,OAAO,MAAM,MAAM,YAAY;AAAA,IACxC;AAEA,QAAI,CAAC,OAAO,MAAM,EAAE,SAAS,GAAG,GAAG;AACjC,YAAM,SAAS,IAAI,UAAU;AAC7B,aAAO,OAAO,MAAM,MAAM,YAAY;AAAA,IACxC;AAEA,QAAI,QAAQ,OAAO;AACjB,UAAI,CAAC,KAAK,cAAc;AACtB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AACA,YAAM,SAAS,IAAI,gBAAgB,KAAK,YAAY;AACpD,aAAO,OAAO,MAAM,MAAM,QAAQ;AAAA,IACpC;AAEA,QAAI,QAAQ,OAAO;AACjB,YAAM,SAAS,IAAI,cAAc;AACjC,aAAO,OAAO,MAAM,MAAM,QAAQ;AAAA,IACpC;AAEA,QAAI,QAAQ,OAAO;AACjB,YAAM,SAAS,IAAI,YAAY;AAC/B,aAAO,OAAO,MAAM,MAAM,QAAQ;AAAA,IACpC;AAGA,UAAM,WAAW,gBAAgB,YAAY;AAC7C,UAAM,UAAU,SAAS,eAAe,GAAG;AAC3C,QAAI,YAAY,QAAW;AACzB,aAAO,QAAQ,MAAM,MAAM,QAAQ;AAAA,IACrC;AAEA,WAAO,CAAC;AAAA,EACV;AACF;;;ALhBO,IAAM,mBAAN,MAAuB;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,SAAiB,cAA6B;AACxD,SAAK,UAAU;AACf,SAAK,SAAS,IAAI,UAAU;AAC5B,SAAK,gBAAgB,IAAI,cAAc,YAAY;AACnD,SAAK,aAAa,oBAAI,IAAI;AAC1B,SAAK,iBAAiB,oBAAI,IAAI;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,oBAAoB,UAAiD;AACnE,SAAK,eAAe,IAAI,QAAQ;AAChC,WAAO,MAAM;AACX,WAAK,eAAe,OAAO,QAAQ;AAAA,IACrC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB,OAAqC;AACjE,eAAW,YAAY,KAAK,gBAAgB;AAC1C,eAAS,KAAK;AAAA,IAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAqE;AACpF,UAAM,QAAQ,IAAI,UAAU;AAE5B,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,KAAK,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK;AAC1C,UAAI,CAAC,CAAC,MAAM,OAAO,MAAM,OAAO,MAAM,MAAM,IAAI,EAAE,SAAS,GAAG,EAAG;AAGjE,YAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,KAAK,MAAM,KAAK,OAAO;AACxE,YAAM,SAAS,OAAO,KAAK,IAAI;AAG/B,UAAI,QAAQ,MAAM;AAEhB,cAAM,aAAa,IAAI,cAAc;AACrC,cAAM,UAAU,WAAW,eAAe,KAAK,OAAO;AACtD,mBAAW,OAAO,SAAS;AACzB,cAAI,CAAC,IAAI,QAAQ;AACf,kBAAM,UAAU,KAAK,MAAM,IAAI,QAAQ,IAAI,UAAU;AAAA,UACvD;AAAA,QACF;AAAA,MACF,WAAW,QAAQ,MAAM;AAEvB,cAAM,WAAW,IAAI,YAAY;AACjC,cAAM,UAAU,SAAS,eAAe,KAAK,OAAO;AACpD,mBAAW,OAAO,SAAS;AACzB,cAAI,CAAC,IAAI,QAAQ;AACf,kBAAM,UAAU,KAAK,MAAM,IAAI,QAAQ,IAAI,UAAU;AAAA,UACvD;AAAA,QACF;AAAA,MACF,WAAW,QAAQ,MAAM;AAEvB,cAAM,UAAU,KAAK,OAAO,eAAe,KAAK,OAAO;AACvD,mBAAW,OAAO,SAAS;AACzB,cAAI,CAAC,IAAI,QAAQ;AACf,kBAAM,UAAU,KAAK,MAAM,IAAI,QAAQ,IAAI,UAAU;AAAA,UACvD;AAAA,QACF;AAAA,MACF;AAGA,iBAAW,QAAQ,OAAO;AACxB,cAAM,QAAQ,KAAK,QAAQ,MAAM,IAAI;AAErC,YAAI,KAAK,SAAS,YAAY;AAE5B,gBAAM,eAAe,MAAM,MAAM,KAAK,YAAY,GAAG,KAAK,OAAO,EAAE,KAAK,IAAI;AAC5E,gBAAM,yBAAyB,cAAc,KAAK,MAAM,KAAK,IAAI;AAAA,QACnE,WAAW,KAAK,SAAS,WAAW,KAAK,YAAY,QAAW;AAE9D,qBAAW,UAAU,KAAK,SAAS;AACjC,kBAAM,aAAa,MAAM,MAAM,OAAO,YAAY,GAAG,OAAO,OAAO,EAAE,KAAK,IAAI;AAC9E,kBAAM,yBAAyB,YAAY,KAAK,MAAM,GAAG,KAAK,IAAI,IAAI,OAAO,IAAI,EAAE;AAAA,UACrF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAU,SAAkB,OAAiC;AACjE,UAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,UAAMC,OAAMC,SAAQ,SAAS,GAAG,EAAE,WAAW,KAAK,CAAC;AAEnD,UAAM,aAAa,MAAM,OAAO;AAChC,UAAMC,WAAU,WAAW,KAAK,UAAU,YAAY,MAAM,CAAC,CAAC;AAG9D,SAAK,sBAAsB,EAAE,MAAM,iBAAiB,QAAQ,CAAC;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,YAAY,SAAiC;AACjD,UAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,UAAM,GAAG,WAAW,EAAE,OAAO,KAAK,CAAC;AACnC,SAAK,WAAW,OAAO,OAAO;AAG9B,SAAK,sBAAsB,EAAE,MAAM,iBAAiB,QAAQ,CAAC;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAU,SAAkD;AAEhE,UAAM,SAAS,KAAK,WAAW,IAAI,OAAO;AAC1C,QAAI,OAAQ,QAAO;AAEnB,UAAM,YAAY,KAAK,aAAa,OAAO;AAE3C,QAAI;AACF,YAAM,UAAU,MAAM,SAAS,WAAW,OAAO;AACjD,YAAM,SAAkB,KAAK,MAAM,OAAO;AAG1C,UAAI,CAAC,KAAK,kBAAkB,MAAM,GAAG;AACnC,eAAO;AAAA,MACT;AAEA,YAAM,aAAa;AACnB,YAAM,QAAQ,IAAI,UAAU;AAG5B,iBAAW,QAAQ,WAAW,OAAO;AACnC,cAAM,WAAW,KAAK,iBAAiB,KAAK,IAAI;AAChD,YAAI,CAAC,SAAU;AAGf,YAAI,aAAa,UAAU;AACzB,gBAAM,YAAuB;AAAA,YAC3B,IAAI,KAAK;AAAA,YACT,MAAM,KAAK;AAAA,YACX,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,YACX,UAAU,KAAK;AAAA,YACf,WAAW,KAAK;AAAA,YAChB,SAAS,KAAK;AAAA,UAChB;AACA,cAAI,KAAK,cAAc,QAAW;AAChC,sBAAU,YAAY,KAAK;AAAA,UAC7B;AACA,gBAAM,aAAa,SAAS;AAC5B;AAAA,QACF;AAEA,cAAM,WAOF;AAAA,UACF,MAAM;AAAA,UACN,MAAM,KAAK;AAAA,UACX,UAAU,KAAK;AAAA,UACf,WAAW,KAAK;AAAA,UAChB,SAAS,KAAK;AAAA,QAChB;AACA,YAAI,KAAK,cAAc,QAAW;AAChC,mBAAS,YAAY,KAAK;AAAA,QAC5B;AACA,cAAM,SAAS,CAAC,QAAQ,GAAG,KAAK,IAAI;AAAA,MACtC;AAGA,iBAAW,QAAQ,WAAW,OAAO;AACnC,cAAM,WAAW,KAAK,iBAAiB,KAAK,IAAI;AAChD,YAAI,CAAC,SAAU;AAEf,cAAM,QAAQ;AAAA,UACZ,MAAM,KAAK;AAAA,UACX,IAAI,KAAK;AAAA,UACT,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,QACnB,CAAC;AAAA,MACH;AAEA,WAAK,WAAW,IAAI,SAAS,KAAK;AAClC,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,cACE,OACA,UACA,YACqC;AACrC,UAAM,SAAS,GAAG,QAAQ,IAAI,UAAU;AACxC,WAAO;AAAA,MACL,UAAU,MAAM,iBAAiB,MAAM;AAAA,MACvC,OAAO,MAAM,cAAc,MAAM;AAAA,IACnC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,eACE,OACA,UACA,YAC6C;AAC7C,UAAM,SAAS,GAAG,QAAQ,IAAI,UAAU;AACxC,UAAM,UAAuD,CAAC;AAG9D,UAAM,WAAW,MAAM,iBAAiB,MAAM;AAC9C,eAAW,QAAQ,UAAU;AAC3B,UAAI,KAAK,SAAS,SAAS;AACzB,gBAAQ,KAAK,EAAE,IAAI,KAAK,MAAM,cAAc,aAAa,CAAC;AAAA,MAC5D;AAAA,IACF;AAGA,UAAM,WAAW,MAAM,SAAS,MAAM;AACtC,eAAW,QAAQ,UAAU;AAC3B,UAAI,KAAK,SAAS,SAAS;AACzB,gBAAQ,KAAK,EAAE,IAAI,KAAK,IAAI,cAAc,iBAAiB,CAAC;AAAA,MAC9D;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,WAAW,MAAM;AAAA,EACxB;AAAA,EAEQ,aAAa,SAA0B;AAC7C,WAAOC,MAAK,KAAK,SAAS,UAAU,GAAG,OAAO,OAAO;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,OAA0C;AAClE,QAAI,OAAO,UAAU,YAAY,UAAU,KAAM,QAAO;AAExD,QAAI,EAAE,WAAW,UAAU,EAAE,WAAW,OAAQ,QAAO;AAEvD,UAAM,MAAM;AACZ,WAAO,MAAM,QAAQ,IAAI,KAAK,KAAK,MAAM,QAAQ,IAAI,KAAK;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,MAC0E;AAC1E,WAAO,CAAC,YAAY,SAAS,aAAa,QAAQ,SAAS,QAAQ,EAAE,SAAS,IAAI;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBACN,MAC8E;AAC9E,QAAI,KAAK,gBAAgB,IAAI,GAAG;AAC9B,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,MAAsE;AAC5F,WAAO,CAAC,SAAS,WAAW,WAAW,YAAY,EAAE,SAAS,IAAI;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA,EAKQ,iBACN,MAC4D;AAC5D,QAAI,KAAK,gBAAgB,IAAI,GAAG;AAC9B,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AACF;;;AQlWA,SAAS,YAAAC,WAAU,cAAc;AACjC,SAAS,eAAe;AACxB,SAAS,YAAY,QAAAC,OAAM,eAAe;;;ACqCnC,IAAM,iBAA4B;AAAA,EACvC,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,IACT,OAAO;AAAA,IACP,WAAW;AAAA,EACb;AAAA,EACA,UAAU;AAAA,IACR,aAAa;AAAA,IACb,WAAW;AAAA,IACX,cAAc;AAAA,IACd,gBAAgB,CAAC,mBAAmB,WAAW,YAAY,OAAO;AAAA,EACpE;AAAA,EACA,QAAQ;AAAA,IACN,aAAa;AAAA,IACb,cAAc;AAAA,EAChB;AAAA,EACA,OAAO;AAAA,IACL,WAAW;AAAA,IACX,SAAS;AAAA,IACT,gBAAgB;AAAA,EAClB;AAAA,EACA,QAAQ;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AACF;;;ACrDA,SAAS,cAAc,OAAkD;AACvE,SACE,OAAO,UAAU,YAAY,UAAU,QAAQ,CAAC,MAAM,QAAQ,KAAK,KAAK,EAAE,iBAAiB;AAE/F;AAsBO,SAAS,UAA4B,UAAa,WAAuB;AAE9E,MAAI,CAAC,cAAc,SAAS,GAAG;AAC7B,WAAO,EAAE,GAAG,SAAS;AAAA,EACvB;AAOA,QAAM,iBAAiB;AAEvB,SAAO,iBAAiB,gBAAgB,SAAS;AACnD;AAMA,SAAS,iBACP,UACA,WACyB;AACzB,QAAM,SAAkC,EAAE,GAAG,SAAS;AAEtD,aAAW,OAAO,OAAO,KAAK,SAAS,GAAG;AACxC,UAAM,eAAe,SAAS,GAAG;AACjC,UAAM,gBAAgB,UAAU,GAAG;AAGnC,QAAI,kBAAkB,QAAW;AAC/B;AAAA,IACF;AAGA,QAAI,cAAc,YAAY,KAAK,cAAc,aAAa,GAAG;AAC/D,aAAO,GAAG,IAAI,iBAAiB,cAAc,aAAa;AAAA,IAC5D,OAAO;AAEL,aAAO,GAAG,IAAI;AAAA,IAChB;AAAA,EACF;AAEA,SAAO;AACT;;;AFzEA,IAAM,sBAAsB;AAK5B,eAAe,WAAWC,OAAgC;AACxD,MAAI;AACF,UAAM,OAAOA,KAAI;AACjB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,IAAM,gBAAN,MAAoB;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACT,SAA2B;AAAA,EAEnC,YAAY,YAAqB,SAAkB,aAAsB;AAEvE,SAAK,cAAc,eAAe,mBAAmB,QAAQ;AAI7D,QAAI,eAAe,UAAa,eAAe,IAAI;AACjD,WAAK,aAAa,KAAK,WAAW,YAAY,KAAK,WAAW;AAAA,IAChE,OAAO;AACL,WAAK,aAAaC,MAAK,KAAK,aAAa,mBAAmB;AAAA,IAC9D;AAIA,QAAI,YAAY,UAAa,YAAY,IAAI;AAC3C,WAAK,UAAU,KAAK,WAAW,SAAS,KAAK,WAAW;AAAA,IAC1D,OAAO;AACL,WAAK,UAAU,KAAK,WAAW,eAAe,SAAS,KAAK,WAAW;AAAA,IACzE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,qBAA6B;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,OAA2B;AAC/B,QAAI,KAAK,WAAW,MAAM;AACxB,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,SAAS,MAAM,WAAW,KAAK,UAAU;AAC/C,QAAI,CAAC,QAAQ;AAEX,WAAK,SAAS,EAAE,GAAG,eAAe;AAClC,YAAM,KAAK,KAAK,KAAK,MAAM;AAC3B,aAAO,KAAK;AAAA,IACd;AAGA,UAAM,UAAU,MAAMC,UAAS,KAAK,YAAY,OAAO;AACvD,QAAI;AACF,WAAK,SAAS,UAAU,gBAAgB,KAAK,MAAM,OAAO,CAAC;AAAA,IAC7D,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,kCAAkC,KAAK,UAAU,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC9G;AAAA,IACF;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,KAAK,QAAkC;AAC3C,UAAM,gBAAgB,KAAK,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AACtE,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,iBAAyB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,oBAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,WAAWF,OAAc,SAAyB;AAExD,QAAIA,MAAK,WAAW,GAAG,GAAG;AACxB,aAAOA,MAAK,QAAQ,KAAK,QAAQ,CAAC;AAAA,IACpC;AAGA,QAAI,CAAC,WAAWA,KAAI,GAAG;AACrB,aAAO,QAAQ,SAASA,KAAI;AAAA,IAC9B;AAEA,WAAOA;AAAA,EACT;AACF;;;AG9GA,SAAS,YAAAG,WAAU,aAAAC,YAAW,UAAAC,eAAc;AAC5C,SAAS,QAAAC,aAAY;AAqBrB,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAKA,IAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AASvB,eAAeC,YAAWC,OAAgC;AACxD,MAAI;AACF,UAAMH,QAAOG,KAAI;AACjB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAUO,IAAM,mBAAN,MAAuB;AAAA,EACX;AAAA,EAEjB,YAAY,aAAqB;AAC/B,SAAK,gBAAgBF,MAAK,aAAa,YAAY;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,sBAAwC;AAC5C,UAAM,SAAS,MAAMC,YAAW,KAAK,aAAa;AAClD,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,MAAMJ,UAAS,KAAK,eAAe,OAAO;AAC1D,UAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAErD,eAAW,WAAW,mBAAmB;AACvC,UAAI,CAAC,MAAM,SAAS,OAAO,GAAG;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,0BAA0E;AAC9E,UAAM,SAAS,MAAMI,YAAW,KAAK,aAAa;AAElD,QAAI,CAAC,QAAQ;AAEX,YAAM,UAAU,GAAG,eAAe,KAAK,CAAC;AAAA,EAAK,kBAAkB,KAAK,IAAI,CAAC;AAAA;AACzE,YAAMH,WAAU,KAAK,eAAe,OAAO;AAC3C,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS;AAAA,MACX;AAAA,IACF;AAGA,UAAM,kBAAkB,MAAMD,UAAS,KAAK,eAAe,OAAO;AAClE,UAAM,QAAQ,gBAAgB,MAAM,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAG7D,UAAM,kBAAkB,kBAAkB,OAAO,CAAC,YAAY,CAAC,MAAM,SAAS,OAAO,CAAC;AAEtF,QAAI,gBAAgB,WAAW,GAAG;AAChC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS;AAAA,MACX;AAAA,IACF;AAGA,QAAI,aAAa;AACjB,QAAI,CAAC,WAAW,SAAS,IAAI,GAAG;AAC9B,oBAAc;AAAA,IAChB;AAEA,kBAAc;AACd,kBAAc,GAAG,gBAAgB,KAAK,IAAI,CAAC;AAAA;AAE3C,UAAMC,WAAU,KAAK,eAAe,UAAU;AAE9C,WAAO;AAAA,MACL,SAAS;AAAA,MACT,SAAS,2BAA2B,OAAO,gBAAgB,MAAM,CAAC;AAAA,IACpE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAA2B;AACzB,WAAO,KAAK;AAAA,EACd;AACF;;;ACrJA,SAAS,cAAAK,mBAAkB;AAC3B,SAAS,YAAAC,WAAU,eAAe;AAClC,SAAS,QAAAC,OAAM,SAAS,UAAU,gBAAgB;;;ACsBlD,IAAM,gBAAgB;AAAA,EACpB,MAAM,EAAE,WAAW,KAAK,cAAc,IAAI;AAAA,EAC1C,KAAK,EAAE,WAAW,MAAM,cAAc,IAAI;AAAA,EAC1C,MAAM,EAAE,WAAW,MAAM,cAAc,IAAI;AAC7C;AAIO,IAAM,kBAAN,MAAM,iBAAgB;AAAA,EACV;AAAA,EACA;AAAA,EAEjB,YAAY,QAAqB;AAC/B,QAAI,OAAO,gBAAgB,OAAO,WAAW;AAC3C,YAAM,IAAI;AAAA,QACR,iBAAiB,OAAO,OAAO,YAAY,CAAC,kCAAkC,OAAO,OAAO,SAAS,CAAC;AAAA,MACxG;AAAA,IACF;AACA,SAAK,YAAY,OAAO;AACxB,SAAK,eAAe,OAAO;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,eAAe,MAAoC;AACxD,WAAO,IAAI,iBAAgB,cAAc,IAAI,CAAC;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAc,UAA4B;AAE9C,QAAI,aAAa,UAAa,aAAa,MAAM,SAAS,KAAK,QAAQ,GAAG;AACxE,aAAO,KAAK,cAAc,IAAI;AAAA,IAChC;AAGA,QAAI,aAAa,UAAa,aAAa,MAAM,sBAAsB,KAAK,QAAQ,GAAG;AACrF,aAAO,KAAK,UAAU,IAAI;AAAA,IAC5B;AAEA,WAAO,KAAK,mBAAmB,IAAI;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,cAAc,MAAuB;AAE3C,UAAM,cAAc;AACpB,UAAM,WAA4E,CAAC;AAEnF,QAAI,YAAY;AAChB,QAAI,aAAa;AACjB,QAAI;AAEJ,YAAQ,QAAQ,YAAY,KAAK,IAAI,OAAO,MAAM;AAEhD,UAAI,MAAM,QAAQ,WAAW;AAC3B,cAAM,UAAU,KAAK,MAAM,WAAW,MAAM,KAAK,EAAE,KAAK;AACxD,YAAI,SAAS;AACX,mBAAS,KAAK;AAAA,YACZ,QAAQ;AAAA,YACR;AAAA,YACA,aAAa;AAAA,UACf,CAAC;AAAA,QACH;AAAA,MACF;AACA,mBAAa,MAAM,CAAC,KAAK;AACzB,kBAAY,MAAM;AAAA,IACpB;AAGA,UAAM,eAAe,KAAK,MAAM,SAAS,EAAE,KAAK;AAChD,QAAI,cAAc;AAChB,eAAS,KAAK;AAAA,QACZ,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAGA,QAAI,SAAS,WAAW,GAAG;AACzB,aAAO,KAAK,mBAAmB,IAAI;AAAA,IACrC;AAGA,UAAM,SAAkB,CAAC;AAEzB,eAAW,WAAW,UAAU;AAC9B,UAAI,QAAQ,QAAQ,UAAU,KAAK,WAAW;AAE5C,eAAO,KAAK;AAAA,UACV,SAAS,QAAQ;AAAA,UACjB,YAAY,OAAO;AAAA,UACnB,aAAa;AAAA,UACb,aAAa,QAAQ;AAAA,UACrB,WAAW,QAAQ,cAAc,QAAQ,QAAQ;AAAA,UACjD,eAAe,QAAQ,UAAU;AAAA,QACnC,CAAC;AAAA,MACH,OAAO;AAEL,cAAM,gBAAgB,KAAK,mBAAmB,QAAQ,OAAO;AAC7D,mBAAW,YAAY,eAAe;AACpC,iBAAO,KAAK;AAAA,YACV,GAAG;AAAA,YACH,YAAY,OAAO;AAAA,YACnB,aAAa,QAAQ,cAAc,SAAS;AAAA,YAC5C,WAAW,QAAQ,cAAc,SAAS;AAAA,YAC1C,eAAe,QAAQ,UAAU;AAAA,UACnC,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,cAAc,OAAO;AAAA,IAC7B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,UAAU,MAAuB;AAEvC,UAAM,mBACJ;AACF,UAAM,eAAiF,CAAC;AAExF,QAAI;AACJ,YAAQ,QAAQ,iBAAiB,KAAK,IAAI,OAAO,MAAM;AACrD,YAAM,OAAO,MAAM,CAAC;AACpB,YAAM,OAAkE;AAAA,QACtE,aAAa,MAAM;AAAA,QACnB,WAAW,MAAM;AAAA,MACnB;AACA,UAAI,SAAS,QAAW;AACtB,aAAK,OAAO;AAAA,MACd;AACA,mBAAa,KAAK,IAAI;AAAA,IACxB;AAGA,QAAI,aAAa,WAAW,GAAG;AAC7B,aAAO,KAAK,mBAAmB,IAAI;AAAA,IACrC;AAGA,aAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,YAAM,cAAc,aAAa,CAAC;AAClC,YAAM,WAAW,aAAa,IAAI,CAAC;AACnC,UAAI,gBAAgB,OAAW;AAI/B,YAAM,WAAW,KAAK,MAAM,YAAY,WAAW;AACnD,UACE,oFAAoF;AAAA,QAClF;AAAA,MACF,GACA;AACA,cAAM,WAAW,KAAK,mBAAmB,QAAQ;AACjD,YAAI,WAAW,GAAG;AAChB,sBAAY,YAAY,YAAY,cAAc;AAAA,QACpD,OAAO;AAEL,sBAAY,YAAY,aAAa,SAAY,SAAS,cAAc,KAAK;AAAA,QAC/E;AAAA,MACF,OAAO;AAEL,oBAAY,YAAY,aAAa,SAAY,SAAS,cAAc,KAAK;AAAA,MAC/E;AAAA,IACF;AAEA,UAAM,SAAkB,CAAC;AAEzB,eAAW,QAAQ,cAAc;AAC/B,YAAM,UAAU,KAAK,MAAM,KAAK,aAAa,KAAK,SAAS,EAAE,KAAK;AAElE,UAAI,QAAQ,UAAU,KAAK,WAAW;AAEpC,eAAO,KAAK;AAAA,UACV;AAAA,UACA,YAAY,OAAO;AAAA,UACnB,aAAa;AAAA,UACb,aAAa,KAAK;AAAA,UAClB,WAAW,KAAK;AAAA,UAChB,cAAc,KAAK;AAAA,QACrB,CAAC;AAAA,MACH,OAAO;AAEL,cAAM,aAAa,KAAK,mBAAmB,OAAO;AAClD,mBAAW,YAAY,YAAY;AACjC,iBAAO,KAAK;AAAA,YACV,GAAG;AAAA,YACH,YAAY,OAAO;AAAA,YACnB,aAAa,KAAK,cAAc,SAAS;AAAA,YACzC,WAAW,KAAK,cAAc,SAAS;AAAA,YACvC,cAAc,KAAK;AAAA,UACrB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,cAAc,OAAO;AAAA,IAC7B;AAEA,WAAO,OAAO,SAAS,IAAI,SAAS,KAAK,mBAAmB,IAAI;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,mBAAmB,MAAsB;AAC/C,QAAI,aAAa;AACjB,QAAI,WAAW;AACf,QAAI,sBAAsB;AAC1B,QAAI,qBAAqB;AACzB,QAAI,aAAa;AACjB,QAAI,IAAI;AACR,QAAI,kBAAkB;AAGtB,WAAO,IAAI,KAAK,QAAQ;AACtB,YAAM,OAAO,KAAK,CAAC;AACnB,YAAM,WAAW,IAAI,IAAI,KAAK,SAAS,KAAK,IAAI,CAAC,IAAI;AAGrD,UAAI,CAAC,YAAY,CAAC,sBAAsB,SAAS,OAAO,aAAa,KAAK;AACxE,8BAAsB;AACtB,aAAK;AACL;AAAA,MACF;AAEA,UAAI,CAAC,YAAY,CAAC,uBAAuB,SAAS,OAAO,aAAa,KAAK;AACzE,6BAAqB;AACrB,aAAK;AACL;AAAA,MACF;AAEA,UAAI,sBAAsB,SAAS,OAAO,aAAa,KAAK;AAC1D,6BAAqB;AACrB,aAAK;AACL;AAAA,MACF;AAEA,UAAI,uBAAuB,SAAS,MAAM;AACxC,8BAAsB;AACtB;AACA;AAAA,MACF;AAGA,UAAI,uBAAuB,oBAAoB;AAC7C;AACA;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,SAAS,OAAO,SAAS,OAAO,SAAS,MAAM;AAC/D,mBAAW;AACX,qBAAa;AACb;AACA;AAAA,MACF;AAEA,UAAI,YAAY,SAAS,MAAM;AAE7B,aAAK;AACL;AAAA,MACF;AAEA,UAAI,YAAY,SAAS,YAAY;AACnC,mBAAW;AACX,qBAAa;AACb;AACA;AAAA,MACF;AAGA,UAAI,UAAU;AACZ;AACA;AAAA,MACF;AAGA,UAAI,SAAS,KAAK;AAChB;AACA,0BAAkB;AAAA,MACpB,WAAW,SAAS,KAAK;AACvB;AACA,YAAI,mBAAmB,eAAe,GAAG;AAEvC,iBAAO,IAAI;AAAA,QACb;AAAA,MACF;AAEA;AAAA,IACF;AAGA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,MAAuB;AAChD,QAAI,KAAK,UAAU,KAAK,WAAW;AACjC,aAAO;AAAA,QACL;AAAA,UACE,SAAS;AAAA,UACT,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,aAAa;AAAA,UACb,WAAW,KAAK;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAkB,CAAC;AACzB,UAAM,OAAO,KAAK,YAAY,KAAK;AACnC,QAAI,QAAQ;AAEZ,WAAO,QAAQ,KAAK,QAAQ;AAC1B,YAAM,MAAM,KAAK,IAAI,QAAQ,KAAK,WAAW,KAAK,MAAM;AACxD,aAAO,KAAK;AAAA,QACV,SAAS,KAAK,MAAM,OAAO,GAAG;AAAA,QAC9B,YAAY,OAAO;AAAA,QACnB,aAAa;AAAA,QACb,aAAa;AAAA,QACb,WAAW;AAAA,MACb,CAAC;AACD,eAAS;AACT,UAAI,QAAQ,KAAK,OAAQ;AAAA,IAC3B;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,cAAc,OAAO;AAAA,IAC7B;AAEA,WAAO;AAAA,EACT;AACF;;;AChYA,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,YAAAC,WAAU,YAAY;AAsBxB,IAAM,eAAN,MAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQxB,MAAM,cACJ,UACA,cACsB;AACtB,UAAM,SAAsB;AAAA,MAC1B,OAAO,CAAC;AAAA,MACR,UAAU,CAAC;AAAA,MACX,SAAS,CAAC;AAAA,MACV,WAAW,CAAC;AAAA,IACd;AAGA,UAAM,iBAAiB,IAAI,IAAI,aAAa,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AAC9D,UAAM,gBAAgB,IAAI,IAAI,OAAO,KAAK,SAAS,KAAK,CAAC;AAGzD,eAAWC,SAAQ,eAAe;AAChC,UAAI,CAAC,eAAe,IAAIA,KAAI,GAAG;AAC7B,eAAO,QAAQ,KAAKA,KAAI;AAAA,MAC1B;AAAA,IACF;AAGA,UAAM,sBAA0C,CAAC;AAEjD,eAAW,QAAQ,cAAc;AAC/B,YAAM,gBAAgB,SAAS,MAAM,KAAK,IAAI;AAE9C,UAAI,kBAAkB,QAAW;AAE/B,eAAO,MAAM,KAAK,KAAK,IAAI;AAAA,MAC7B,OAAO;AAEL,YAAI,KAAK,UAAU,cAAc,SAAS,KAAK,SAAS,cAAc,MAAM;AAE1E,iBAAO,UAAU,KAAK,KAAK,IAAI;AAAA,QACjC,OAAO;AAEL,8BAAoB,KAAK,IAAI;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AAGA,eAAW,QAAQ,qBAAqB;AACtC,YAAM,gBAAgB,SAAS,MAAM,KAAK,IAAI;AAC9C,UAAI,kBAAkB,QAAW;AAE/B,eAAO,MAAM,KAAK,KAAK,IAAI;AAC3B;AAAA,MACF;AAEA,YAAM,cAAc,MAAM,KAAK,gBAAgB,KAAK,IAAI;AAExD,UAAI,gBAAgB,cAAc,MAAM;AAEtC,eAAO,UAAU,KAAK,KAAK,IAAI;AAAA,MACjC,OAAO;AAEL,eAAO,SAAS,KAAK,KAAK,IAAI;AAAA,MAChC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAaA,OAAyC;AAC1D,UAAM,QAAQ,MAAM,KAAKA,KAAI;AAC7B,WAAO;AAAA,MACL,MAAAA;AAAA,MACA,OAAO,MAAM;AAAA,MACb,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAgBA,OAA+B;AACnD,UAAM,UAAU,MAAMD,UAASC,KAAI;AACnC,WAAOF,YAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,gBACJE,OACA,aACkD;AAClD,UAAM,QAAQ,MAAM,KAAKA,KAAI;AAC7B,UAAM,UAAU,MAAMD,UAASC,KAAI;AACnC,UAAM,OAAOF,YAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAG3D,UAAM,EAAE,kBAAAG,kBAAiB,IAAI,MAAM,OAAO,sBAAoB;AAE9D,WAAO;AAAA,MACL,OAAO;AAAA,QACL,OAAO,MAAM;AAAA,QACb,MAAM,MAAM;AAAA,QACZ;AAAA,QACA,aAAa,YAAY,IAAI,CAAC,OAAOA,kBAAiB,EAAE,CAAC;AAAA,MAC3D;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;AF7HA,IAAM,SAAS,aAAa,eAAe;AAwB3C,IAAM,kBAAkB,oBAAI,IAAI;AAAA;AAAA,EAE9B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAEM,IAAM,eAAN,MAAmB;AAAA,EACP;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YACE,YACA,iBACA,UAAwB,CAAC,GACzB;AACA,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,SAAK,UAAU,IAAI,gBAAgB;AAAA,MACjC,WAAW,QAAQ,aAAa;AAAA,MAChC,cAAc,QAAQ,gBAAgB;AAAA,IACxC,CAAC;AACD,SAAK,mBAAmB,QAAQ;AAChC,SAAK,kBAAkB,QAAQ;AAC/B,SAAK,eAAe,IAAI,aAAa;AACrC,SAAK,cAAc,QAAQ,eAAe;AAE1C,UAAM,SAAS,+BAA+B,QAAQ,kBAAkB,CAAC,CAAC;AAC1E,SAAK,aAAa,OAAO;AACzB,SAAK,qBAAqB,OAAO;AAAA,EACnC;AAAA,EAEA,MAAM,WAAW,OAAc,YAA6D;AAC1F,WAAO;AAAA,MACL;AAAA,QACE,SAAS,MAAM;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,WAAW,MAAM;AAAA,MACnB;AAAA,MACA;AAAA,IACF;AAEA,QAAI;AACF,UAAI,MAAM,SAAS,UAAU,MAAM,SAAS,QAAQ;AAClD,eAAO,MAAM,KAAK,eAAe,OAAO,UAAU;AAAA,MACpD;AAEA,aAAO;AAAA,QACL,EAAE,SAAS,MAAM,IAAI,WAAW,MAAM,KAAK;AAAA,QAC3C;AAAA,MACF;AACA,aAAO,IAAI,IAAI,MAAM,0CAA0C,MAAM,IAAI,EAAE,CAAC;AAAA,IAC9E,SAAS,OAAO;AACd,aAAO;AAAA,QACL;AAAA,UACE,SAAS,MAAM;AAAA,UACf,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D;AAAA,QACA;AAAA,MACF;AACA,aAAO,IAAI,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,sBACJ,OACA,YACyC;AACzC,QAAI,KAAK,oBAAoB,QAAW;AACtC,aAAO,IAAI,IAAI,MAAM,mDAAmD,CAAC;AAAA,IAC3E;AAEA,QAAI,MAAM,SAAS,UAAU,MAAM,SAAS,QAAQ;AAClD,aAAO,IAAI,IAAI,MAAM,sDAAsD,MAAM,IAAI,EAAE,CAAC;AAAA,IAC1F;AAEA,WAAO;AAAA,MACL;AAAA,QACE,SAAS,MAAM;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,WAAW,MAAM;AAAA,MACnB;AAAA,MACA;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AAEF,YAAM,WAAW,MAAM,KAAK,gBAAgB,KAAK,MAAM,EAAE;AAGzD,YAAM,YAAY,MAAM,KAAK,cAAc,MAAM,IAAI;AACrD,YAAM,eAAe,MAAM,QAAQ;AAAA,QACjC,UAAU,IAAI,CAACC,UAAS,KAAK,aAAa,aAAaA,KAAI,CAAC;AAAA,MAC9D;AAGA,YAAM,QAAQ,MAAM,KAAK,aAAa,cAAc,UAAU,YAAY;AAE1E,aAAO;AAAA,QACL;AAAA,UACE,SAAS,MAAM;AAAA,UACf,OAAO,MAAM,MAAM;AAAA,UACnB,UAAU,MAAM,SAAS;AAAA,UACzB,SAAS,MAAM,QAAQ;AAAA,UACvB,WAAW,MAAM,UAAU;AAAA,QAC7B;AAAA,QACA;AAAA,MACF;AAGA,YAAM,sBAAoC,CAAC;AAC3C,iBAAWA,SAAQ,CAAC,GAAG,MAAM,UAAU,GAAG,MAAM,OAAO,GAAG;AACxD,cAAM,YAAY,SAAS,MAAMA,KAAI;AACrC,YAAI,cAAc,QAAW;AAC3B,8BAAoB,KAAK,GAAG,UAAU,WAAW;AAAA,QACnD;AAAA,MACF;AAGA,UAAI,oBAAoB,SAAS,GAAG;AAClC,cAAM,KAAK,WAAW,gBAAgB,MAAM,IAAI,mBAAmB;AACnE,eAAO;AAAA,UACL,EAAE,SAAS,MAAM,IAAI,OAAO,oBAAoB,OAAO;AAAA,UACvD;AAAA,QACF;AAAA,MACF;AAGA,YAAM,iBAAiB,CAAC,GAAG,MAAM,OAAO,GAAG,MAAM,QAAQ;AACzD,YAAM,aAAa,eAAe;AAElC,mBAAa;AAAA,QACX,MAAM;AAAA,QACN,SAAS;AAAA,QACT,OAAO;AAAA,QACP,SAAS,cAAc,OAAO,UAAU,CAAC;AAAA,MAC3C,CAAC;AAED,YAAM,YAAwB,CAAC;AAC/B,YAAM,mBAAmD,CAAC;AAC1D,UAAI,iBAAiB;AAGrB,iBAAWA,SAAQ,MAAM,WAAW;AAClC,cAAM,gBAAgB,SAAS,MAAMA,KAAI;AACzC,YAAI,kBAAkB,QAAW;AAC/B,2BAAiBA,KAAI,IAAI;AAAA,QAC3B;AAAA,MACF;AAGA,eAAS,IAAI,GAAG,IAAI,eAAe,QAAQ,KAAK,KAAK,aAAa;AAChE,cAAM,QAAQ,eAAe,MAAM,GAAG,IAAI,KAAK,WAAW;AAE1D,cAAM,eAAe,MAAM,QAAQ;AAAA,UACjC,MAAM,IAAI,OAAO,aAAa;AAC5B,gBAAI;AACF,oBAAM,SAAS,MAAM,KAAK,YAAY,UAAU,KAAK;AACrD,oBAAM,cAAc,OAAO,UAAU,IAAI,CAAC,MAAM,EAAE,EAAE;AAGpD,oBAAM,EAAE,MAAM,IAAI,MAAM,KAAK,aAAa,gBAAgB,UAAU,WAAW;AAE/E,qBAAO;AAAA,gBACL;AAAA,gBACA,WAAW,OAAO;AAAA,gBAClB,WAAW;AAAA,cACb;AAAA,YACF,SAAS,OAAO;AACd,qBAAO;AAAA,gBACL,EAAE,UAAU,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,gBAC1E;AAAA,cACF;AACA,qBAAO;AAAA,YACT;AAAA,UACF,CAAC;AAAA,QACH;AAGA,mBAAW,UAAU,cAAc;AACjC,cAAI,WAAW,MAAM;AACnB,sBAAU,KAAK,GAAG,OAAO,SAAS;AAClC,6BAAiB,OAAO,QAAQ,IAAI,OAAO;AAAA,UAC7C;AAAA,QACF;AAEA,0BAAkB,MAAM;AAExB,qBAAa;AAAA,UACX,MAAM;AAAA,UACN,SAAS;AAAA,UACT,OAAO;AAAA,UACP,SAAS,aAAa,OAAO,cAAc,CAAC,IAAI,OAAO,UAAU,CAAC;AAAA,QACpE,CAAC;AAAA,MACH;AAGA,UAAI,UAAU,SAAS,GAAG;AACxB,cAAM,KAAK,WAAW,aAAa,MAAM,IAAI,SAAS;AAAA,MACxD;AAGA,UAAI,oBAAoB,SAAS,KAAK,UAAU,SAAS,GAAG;AAC1D,cAAM,KAAK,WAAW,eAAe,MAAM,EAAE;AAAA,MAC/C;AAGA,UAAI,KAAK,kBAAkB;AACzB,cAAM,mBAAmB,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,OAAO,KAAK;AAC3E,cAAM,mBACJ,eAAe,KAAK,CAAC,MAAM,iBAAiB,SAAS,QAAQ,CAAC,EAAE,YAAY,CAAC,CAAC,KAC9E,MAAM,QAAQ,KAAK,CAAC,MAAM,iBAAiB,SAAS,QAAQ,CAAC,EAAE,YAAY,CAAC,CAAC;AAE/E,YAAI,kBAAkB;AAEpB,gBAAM,iBAA2D,CAAC;AAClE,gBAAM,WAAW,CAAC,GAAG,MAAM,WAAW,GAAG,cAAc;AAEvD,qBAAW,YAAY,UAAU;AAC/B,kBAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,gBAAI,iBAAiB,SAAS,GAAG,GAAG;AAClC,kBAAI;AACF,sBAAM,UAAU,MAAMC,UAAS,UAAU,OAAO;AAChD,+BAAe,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AAAA,cACjD,QAAQ;AAAA,cAER;AAAA,YACF;AAAA,UACF;AAEA,cAAI,eAAe,SAAS,GAAG;AAC7B,kBAAM,QAAQ,MAAM,KAAK,iBAAiB,WAAW,cAAc;AACnE,kBAAM,KAAK,iBAAiB,UAAU,MAAM,IAAI,KAAK;AACrD,mBAAO;AAAA,cACL,EAAE,SAAS,MAAM,IAAI,aAAa,eAAe,OAAO;AAAA,cACxD;AAAA,YACF;AAAA,UACF,OAAO;AAEL,kBAAM,KAAK,iBAAiB,YAAY,MAAM,EAAE;AAChD,mBAAO;AAAA,cACL,EAAE,SAAS,MAAM,GAAG;AAAA,cACpB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,YAAM,kBAAsC;AAAA,QAC1C,SAAS;AAAA,QACT,SAAS,MAAM;AAAA,QACf,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,OAAO;AAAA,MACT;AACA,YAAM,KAAK,gBAAgB,KAAK,eAAe;AAE/C,mBAAa;AAAA,QACX,MAAM;AAAA,QACN,SAAS;AAAA,QACT,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAED,YAAM,SAAS,KAAK,IAAI,IAAI;AAE5B,aAAO;AAAA,QACL;AAAA,UACE,SAAS,MAAM;AAAA,UACf,WAAW,MAAM;AAAA,UACjB,YAAY,MAAM,MAAM;AAAA,UACxB,eAAe,MAAM,SAAS;AAAA,UAC9B,cAAc,MAAM,QAAQ;AAAA,UAC5B,gBAAgB,MAAM,UAAU;AAAA,UAChC,eAAe,UAAU;AAAA,UACzB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAEA,aAAO,GAAG;AAAA,QACR,cAAc,eAAe;AAAA,QAC7B,eAAe,UAAU;AAAA,QACzB;AAAA,QACA,YAAY,MAAM,MAAM;AAAA,QACxB,eAAe,MAAM,SAAS;AAAA,QAC9B,cAAc,MAAM,QAAQ;AAAA,QAC5B,gBAAgB,MAAM,UAAU;AAAA,MAClC,CAAC;AAAA,IACH,SAAS,OAAO;AACd,aAAO;AAAA,QACL;AAAA,UACE,SAAS,MAAM;AAAA,UACf,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D;AAAA,QACA;AAAA,MACF;AACA,aAAO,IAAI,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AAAA,EAEA,MAAc,eACZ,OACA,YAC8B;AAC9B,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,KAAK,WAAW,kBAAkB,MAAM,EAAE;AAGhD,QAAI,KAAK,iBAAiB;AACxB,YAAM,KAAK,gBAAgB,OAAO,MAAM,EAAE;AAAA,IAC5C;AAEA,UAAM,QAAQ,MAAM,KAAK,cAAc,MAAM,IAAI;AACjD,UAAM,YAAwB,CAAC;AAC/B,QAAI,iBAAiB;AAErB,WAAO;AAAA,MACL;AAAA,QACE,SAAS,MAAM;AAAA,QACf,MAAM,MAAM;AAAA,QACZ,WAAW,MAAM;AAAA,QACjB,aAAa,KAAK;AAAA,MACpB;AAAA,MACA;AAAA,IACF;AAGA,UAAM,cAAwD,CAAC;AAG/D,iBAAa;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,OAAO,MAAM;AAAA,MACb,SAAS;AAAA,IACX,CAAC;AAGD,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,aAAa;AACvD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,WAAW;AAEjD,YAAM,eAAe,MAAM,QAAQ;AAAA,QACjC,MAAM,IAAI,OAAO,aAAa;AAC5B,cAAI;AACF,mBAAO,MAAM,KAAK,YAAY,UAAU,KAAK;AAAA,UAC/C,SAAS,OAAO;AACd,mBAAO;AAAA,cACL,EAAE,UAAU,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,cAC1E;AAAA,YACF;AACA,mBAAO,EAAE,WAAW,CAAC,GAAG,YAAY,OAAU;AAAA,UAChD;AAAA,QACF,CAAC;AAAA,MACH;AAGA,iBAAW,UAAU,cAAc;AACjC,kBAAU,KAAK,GAAG,OAAO,SAAS;AAClC,YAAI,OAAO,eAAe,QAAW;AACnC,sBAAY,KAAK,OAAO,UAAU;AAAA,QACpC;AAAA,MACF;AAEA,wBAAkB,MAAM;AAGxB,mBAAa;AAAA,QACX,MAAM;AAAA,QACN,SAAS;AAAA,QACT,OAAO,MAAM;AAAA,QACb,SAAS,WAAW,OAAO,cAAc,CAAC,IAAI,OAAO,MAAM,MAAM,CAAC;AAAA,MACpE,CAAC;AAAA,IACH;AAEA,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,KAAK,WAAW,aAAa,MAAM,IAAI,SAAS;AAEtD,YAAM,KAAK,WAAW,eAAe,MAAM,EAAE;AAAA,IAC/C;AAGA,QAAI,KAAK,oBAAoB,YAAY,SAAS,GAAG;AACnD,YAAM,QAAQ,MAAM,KAAK,iBAAiB,WAAW,WAAW;AAChE,YAAM,KAAK,iBAAiB,UAAU,MAAM,IAAI,KAAK;AAAA,IACvD,WAAW,KAAK,kBAAkB;AAEhC,YAAM,KAAK,iBAAiB,YAAY,MAAM,EAAE;AAAA,IAClD;AAGA,iBAAa;AAAA,MACX,MAAM;AAAA,MACN,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,MACb,SAAS;AAAA,IACX,CAAC;AAED,UAAM,SAAS,KAAK,IAAI,IAAI;AAE5B,WAAO;AAAA,MACL;AAAA,QACE,SAAS,MAAM;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,cAAc;AAAA,QACd,eAAe,UAAU;AAAA,QACzB,qBAAqB,YAAY;AAAA,QACjC;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAEA,WAAO,GAAG;AAAA,MACR,cAAc;AAAA,MACd,eAAe,UAAU;AAAA,MACzB;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,YACZ,UACA,OAIC;AACD,UAAM,UAAU,MAAMA,UAAS,UAAU,OAAO;AAChD,UAAM,WAAWC,YAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAC/D,UAAM,SAAS,KAAK,QAAQ,MAAM,SAAS,QAAQ;AAInD,UAAM,eAAe,SAAS,MAAM,MAAM,QAAQ;AAClD,UAAM,WAAWA,YAAW,KAAK,EAAE,OAAO,YAAY,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,CAAC;AAEhF,UAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,UAAM,WAAW,SAAS,QAAQ,EAAE,YAAY;AAChD,UAAM,WAAW,KAAK,iBAAiB,KAAK,UAAU,QAAQ;AAG9D,UAAM,aAAa,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,OAAO,KAAK,EAAE,SAAS,GAAG,IAC/E,EAAE,MAAM,UAAU,QAAQ,IAC1B;AAGJ,QAAI,OAAO,WAAW,GAAG;AACvB,aAAO,EAAE,WAAW,CAAC,GAAG,WAAW;AAAA,IACrC;AAGA,UAAM,gBAAgB,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO;AACjD,UAAM,UAAU,MAAM,KAAK,gBAAgB,WAAW,aAAa;AAEnE,UAAM,YAAwB,CAAC;AAC/B,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,QAAQ,OAAO,CAAC;AACtB,YAAM,SAAS,QAAQ,CAAC;AAGxB,UAAI,UAAU,UAAa,WAAW,QAAW;AAC/C,cAAM,IAAI;AAAA,UACR,kCAAkC,OAAO,CAAC,CAAC,WAAW,OAAO,UAAU,MAAS,CAAC,YAAY,OAAO,WAAW,MAAS,CAAC;AAAA,QAC3H;AAAA,MACF;AAGA,YAAM,UACJ,OAAO,SAAS,IACZ,GAAG,MAAM,EAAE,IAAI,QAAQ,IAAI,QAAQ,IAAI,OAAO,MAAM,UAAU,CAAC,KAC/D,GAAG,MAAM,EAAE,IAAI,QAAQ,IAAI,QAAQ;AAEzC,gBAAU,KAAK;AAAA,QACb,IAAI,iBAAiB,OAAO;AAAA,QAC5B,SAAS,MAAM;AAAA,QACf;AAAA,QACA,UAAU;AAAA,UACR,MAAM,OAAO,SAAS,IAAI,UAAU;AAAA,UACpC,SAAS,MAAM;AAAA,UACf,MAAM;AAAA,UACN,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC;AAAA,UACA,YAAY,MAAM;AAAA,UAClB,aAAa,MAAM;AAAA,UACnB;AAAA,UACA,eAAe,MAAM;AAAA,UACrB,cAAc,MAAM;AAAA,UACpB,gBAAgB,qBAAqB,KAAK,MAAM,OAAO;AAAA,UACvD,YAAY,MAAM;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,EAAE,WAAW,WAAW;AAAA,EACjC;AAAA,EAEA,MAAc,cAAc,KAAgC;AAC1D,UAAM,QAAkB,CAAC;AACzB,UAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAE1D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAWC,MAAK,KAAK,MAAM,IAAI;AAErC,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,CAAC,KAAK,WAAW,IAAI,MAAM,IAAI,GAAG;AACpC,gBAAM,KAAK,GAAI,MAAM,KAAK,cAAc,QAAQ,CAAE;AAAA,QACpD;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AAEzB,cAAM,eAAe,KAAK,mBAAmB,KAAK,CAAC,YAAY,QAAQ,MAAM,IAAI,CAAC;AAClF,YAAI,cAAc;AAChB;AAAA,QACF;AAEA,cAAM,MAAM,QAAQ,MAAM,IAAI,EAAE,YAAY;AAC5C,YAAI,gBAAgB,IAAI,GAAG,GAAG;AAC5B,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,iBAAiB,KAAa,UAAkB,UAA0B;AAEhF,QAAI,QAAQ,OAAO;AAEjB,UAAI,aAAa,kBAAkB,aAAa,gBAAgB,aAAa,KAAK,QAAQ,GAAG;AAC3F,eAAO;AAAA,MACT;AAEA,UAAI,CAAC,aAAa,gBAAgB,iBAAiB,EAAE,SAAS,QAAQ,GAAG;AACvE,eAAO;AAAA,MACT;AAEA,UAAI,0DAA0D,KAAK,QAAQ,GAAG;AAC5E,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAGA,QAAI,0BAA0B,KAAK,QAAQ,KAAK,gBAAgB,KAAK,QAAQ,GAAG;AAC9E,aAAO;AAAA,IACT;AAGA,QAAI,gBAAgB,KAAK,QAAQ,KAAK,SAAS,SAAS,SAAS,GAAG;AAClE,aAAO;AAAA,IACT;AAGA,QAAI,qEAAqE,KAAK,QAAQ,GAAG;AACvF,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO,OAAO,EAAE,SAAS,GAAG,GAAG;AAG9E,UAAI,KAAK,yBAAyB,UAAU,QAAQ,GAAG;AACrD,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,yBAAyB,UAAkB,UAA2B;AAC5E,UAAM,YAAY,SAAS,YAAY;AACvC,UAAM,gBAAgB,SAAS,YAAY;AAG3C,QAAI,2BAA2B,KAAK,SAAS,GAAG;AAE9C,UAAI,kBAAkB,cAAc,kBAAkB,YAAY;AAChE,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAGA,QAAI,uDAAuD,KAAK,SAAS,GAAG;AAC1E,aAAO;AAAA,IACT;AAGA,QACE,2CAA2C,KAAK,SAAS,KACzD,CAAC,cAAc,SAAS,QAAQ,KAChC,CAAC,cAAc,SAAS,OAAO,GAC/B;AACA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AACF;AAMO,SAAS,uBAAuB,KAAa,OAAwB;AAC1E,QAAM,WAAW,IAAI,YAAY;AACjC,QAAM,cAAc,SAAS,IAAI,YAAY;AAG7C,MACE,sCAAsC,KAAK,QAAQ,KACnD,mCAAmC,KAAK,UAAU,GAClD;AACA,WAAO;AAAA,EACT;AAGA,MACE,uDAAuD,KAAK,QAAQ,KACpE,yCAAyC,KAAK,UAAU,GACxD;AACA,WAAO;AAAA,EACT;AAGA,MAAI,wDAAwD,KAAK,QAAQ,GAAG;AAC1E,WAAO;AAAA,EACT;AAGA,MAAI,0CAA0C,KAAK,QAAQ,GAAG;AAC5D,WAAO;AAAA,EACT;AAGA,MAAI,+BAA+B,KAAK,QAAQ,GAAG;AACjD,WAAO;AAAA,EACT;AAGA,MAAI,YAAY,KAAK,QAAQ,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,SAAO;AACT;;;AG1zBA,SAAS,YAAAC,WAAU,UAAAC,SAAQ,SAAAC,cAAa;AACxC,SAAS,QAAAC,aAAY;;;ACDrB,SAAS,KAAAC,UAAS;AAsBX,IAAM,kBAAkBA,GAAE,OAAO;AAAA;AAAA,EAEtC,OAAOA,GAAE,OAAO;AAAA;AAAA,EAEhB,MAAMA,GAAE,OAAO;AAAA;AAAA,EAEf,MAAMA,GAAE,OAAO;AAAA;AAAA,EAEf,aAAaA,GAAE,MAAMA,GAAE,OAAO,CAAC;AACjC,CAAC;AAYM,IAAM,sBAAsBA,GAAE,OAAO;AAAA;AAAA,EAE1C,SAASA,GAAE,QAAQ,CAAC;AAAA;AAAA,EAEpB,SAASA,GAAE,OAAO;AAAA;AAAA,EAElB,WAAWA,GAAE,OAAO;AAAA;AAAA,EAEpB,OAAOA,GAAE,OAAOA,GAAE,OAAO,GAAG,eAAe;AAC7C,CAAC;AAsDM,SAAS,oBAAoB,SAAsC;AACxE,SAAO;AAAA,IACL,SAAS;AAAA,IACT;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,OAAO,CAAC;AAAA,EACV;AACF;;;ADnGO,IAAM,kBAAN,MAAsB;AAAA,EACV;AAAA,EAEjB,YAAY,SAAiB;AAC3B,SAAK,eAAeC,MAAK,SAAS,WAAW;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,UAAMC,OAAM,KAAK,cAAc,EAAE,WAAW,KAAK,CAAC;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAgB,SAA0B;AACxC,WAAOD,MAAK,KAAK,cAAc,GAAG,OAAO,gBAAgB;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,KAAK,SAA+C;AACxD,UAAM,eAAe,KAAK,gBAAgB,OAAO;AAEjD,UAAM,SAAS,MAAM,KAAK,WAAW,YAAY;AACjD,QAAI,CAAC,QAAQ;AACX,aAAO,oBAAoB,OAAO;AAAA,IACpC;AAEA,UAAM,UAAU,MAAME,UAAS,cAAc,OAAO;AACpD,QAAI;AACJ,QAAI;AACF,eAAS,KAAK,MAAM,OAAO;AAAA,IAC7B,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,+BAA+B,YAAY,KACzC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CACvD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAS,oBAAoB,UAAU,MAAM;AACnD,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,IAAI,MAAM,uBAAuB,YAAY,KAAK,OAAO,MAAM,OAAO,EAAE;AAAA,IAChF;AAGA,WAAO,KAAK,gBAAgB,OAAO,MAAM,OAAO;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,UAA6C;AACtD,UAAM,eAAe,KAAK,gBAAgB,SAAS,OAAO;AAG1D,UAAM,SAAS;AAAA,MACb,GAAG;AAAA,MACH,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAEA,UAAM,gBAAgB,cAAc,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,SAAiC;AAC5C,UAAM,eAAe,KAAK,gBAAgB,OAAO;AACjD,UAAM,EAAE,OAAO,IAAI,MAAM,OAAO,aAAkB;AAElD,UAAM,SAAS,MAAM,KAAK,WAAW,YAAY;AACjD,QAAI,QAAQ;AACV,YAAM,OAAO,YAAY;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAWC,OAAgC;AACvD,QAAI;AACF,YAAMC,QAAOD,KAAI;AACjB,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,MACA,SACoB;AACpB,UAAM,QAAwC,CAAC;AAE/C,eAAW,CAACA,OAAM,KAAK,KAAK,OAAO,QAAQ,KAAK,KAAK,GAAG;AACtD,YAAMA,KAAI,IAAI;AAAA,QACZ,OAAO,MAAM;AAAA,QACb,MAAM,MAAM;AAAA,QACZ,MAAM,MAAM;AAAA,QACZ,aAAa,MAAM,YAAY,IAAI,CAAC,OAAO,iBAAiB,EAAE,CAAC;AAAA,MACjE;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA,WAAW,KAAK;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF;;;AE7HO,IAAM,kBAAN,MAAsB;AAAA,EAC3B,gBAAgB,MAAc,YAAoB,UAAwC;AACxF,UAAM,QAAQ,KAAK,MAAM,IAAI;AAG7B,QAAI,YAAY;AAChB,QAAI,OAAyB;AAE7B,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,OAAO,MAAM,CAAC,KAAK;AAEzB,UAAI,KAAK,SAAS,YAAY,UAAU,EAAE,GAAG;AAC3C,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAEA,UAAI,KAAK,SAAS,SAAS,UAAU,EAAE,GAAG;AACxC,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAGA,UAAI,KAAK,MAAM,IAAI,OAAO,gBAAgB,UAAU,aAAa,CAAC,GAAG;AACnE,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAGA,UAAI,KAAK,MAAM,IAAI,OAAO,WAAW,UAAU,aAAa,CAAC,GAAG;AAC9D,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAGA,UAAI,KAAK,MAAM,IAAI,OAAO,wBAAwB,UAAU,OAAO,CAAC,GAAG;AACrE,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,GAAI,QAAO;AAG7B,QAAI,UAAU;AACd,QAAI,aAAa;AACjB,QAAI,kBAAkB;AAGtB,QAAI,SAAS,QAAQ;AACnB,YAAME,aAAY,MAAM,YAAY,CAAC,KAAK;AAC1C,UAAI,CAACA,WAAU,SAAS,GAAG,KAAKA,WAAU,SAAS,GAAG,GAAG;AAEvD,kBAAU;AACV,cAAMC,eAAcD;AACpB,cAAME,aAAY,KAAK,iBAAiBF,YAAW,YAAY,IAAI;AACnE,eAAO;AAAA,UACL;AAAA,UACA,MAAM;AAAA,UACN,WAAAE;AAAA,UACA,aAAAD;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,QAAI,gBAAgB;AACpB,QAAI,gBAAgB;AACpB,QAAI,oBAAoB;AACxB,QAAI,qBAAqB;AAEzB,aAAS,IAAI,YAAY,GAAG,IAAI,MAAM,QAAQ,KAAK;AACjD,YAAM,OAAO,MAAM,CAAC,KAAK;AACzB,UAAI,sBAAsB;AAE1B,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,cAAM,OAAO,KAAK,CAAC;AACnB,cAAM,WAAW,IAAI,IAAI,KAAK,IAAI,CAAC,IAAI;AACvC,cAAM,WAAW,IAAI,KAAK,SAAS,IAAI,KAAK,IAAI,CAAC,IAAI;AAGrD,YAAI,aAAa,SAAS,iBAAiB,iBAAiB,oBAAoB;AAC9E;AAAA,QACF;AAGA,YAAI,oBAAoB;AACtB,cAAI,SAAS,OAAO,aAAa,KAAK;AACpC,iCAAqB;AACrB;AAAA,UACF;AACA;AAAA,QACF;AAGA,YAAI,qBAAqB;AACvB;AAAA,QACF;AAGA,YAAI,eAAe;AACjB,cAAI,SAAS,IAAK,iBAAgB;AAClC;AAAA,QACF;AACA,YAAI,eAAe;AACjB,cAAI,SAAS,IAAK,iBAAgB;AAClC;AAAA,QACF;AACA,YAAI,mBAAmB;AACrB,cAAI,SAAS,IAAK,qBAAoB;AACtC;AAAA,QACF;AAGA,YAAI,SAAS,OAAO,aAAa,KAAK;AACpC,+BAAqB;AACrB;AACA;AAAA,QACF;AACA,YAAI,SAAS,OAAO,aAAa,KAAK;AACpC,gCAAsB;AACtB;AAAA,QACF;AACA,YAAI,SAAS,KAAK;AAChB,0BAAgB;AAChB;AAAA,QACF;AACA,YAAI,SAAS,KAAK;AAChB,0BAAgB;AAChB;AAAA,QACF;AACA,YAAI,SAAS,KAAK;AAChB,8BAAoB;AACpB;AAAA,QACF;AAGA,YAAI,SAAS,KAAK;AAChB;AACA,4BAAkB;AAAA,QACpB;AACA,YAAI,SAAS,IAAK;AAAA,MACpB;AAEA,UAAI,mBAAmB,eAAe,GAAG;AACvC,kBAAU,IAAI;AACd;AAAA,MACF;AAAA,IACF;AAEA,UAAM,cAAc,MAAM,MAAM,YAAY,GAAG,OAAO,EAAE,KAAK,IAAI;AAGjE,UAAM,YAAY,MAAM,YAAY,CAAC,KAAK;AAC1C,UAAM,YAAY,KAAK,iBAAiB,WAAW,YAAY,IAAI;AAEnE,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,iBAAiB,MAAc,MAAc,MAAsB;AAEzE,UAAM,MAAM,KACT,QAAQ,iBAAiB,EAAE,EAC3B,QAAQ,gBAAgB,EAAE,EAC1B,KAAK;AAER,QAAI,SAAS,YAAY;AAGvB,YAAM,QAAQ,IAAI,MAAM,6CAA6C;AACrE,UAAI,QAAQ,CAAC,MAAM,UAAa,MAAM,CAAC,EAAE,SAAS,EAAG,QAAO,MAAM,CAAC,EAAE,KAAK;AAAA,IAC5E;AAEA,QAAI,SAAS,SAAS;AACpB,aAAO,SAAS,IAAI;AAAA,IACtB;AAEA,QAAI,SAAS,aAAa;AACxB,aAAO,aAAa,IAAI;AAAA,IAC1B;AAEA,QAAI,SAAS,QAAQ;AAEnB,YAAM,YAAY,IAAI,MAAM,IAAI,OAAO,YAAY,IAAI,oBAAoB,CAAC;AAC5E,UAAI,YAAY,CAAC,MAAM,UAAa,UAAU,CAAC,EAAE,SAAS,GAAG;AAC3D,eAAO,QAAQ,UAAU,CAAC,CAAC;AAAA,MAC7B;AACA,aAAO,QAAQ,IAAI;AAAA,IACrB;AAEA,QAAI,SAAS,SAAS;AAIpB,YAAM,aAAa,IAAI;AAAA,QACrB,IAAI;AAAA,UACF,yBAAyB,IAAI;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,aAAa,aAAa,CAAC;AACjC,UAAI,eAAe,UAAa,eAAe,GAAI,QAAO,WAAW,KAAK;AAG1E,aAAO,SAAS,IAAI;AAAA,IACtB;AAEA,WAAO;AAAA,EACT;AACF;;;ACvNA,IAAME,UAAS,aAAa,gBAAgB;AAsB5C,IAAM,qBAAkE;AAAA,EACtE,UAAU;AAAA,IACR,yBAAyB;AAAA;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA;AAAA,IACT,QAAQ;AAAA;AAAA,IACR,mBAAmB;AAAA;AAAA,IACnB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,gBAAgB;AAAA,IACd,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA,IACT,QAAQ;AAAA;AAAA,IACR,mBAAmB;AAAA;AAAA,IACnB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,YAAY;AAAA,IACV,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,mBAAmB;AAAA,IACnB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,YAAY;AAAA,IACV,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,mBAAmB;AAAA,IACnB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,WAAW;AAAA,IACT,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA,IACT,QAAQ;AAAA;AAAA,IACR,mBAAmB;AAAA,IACnB,MAAM;AAAA;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AACF;AAGA,IAAM,qBAAkE;AAAA,EACtE,EAAE,SAAS,gBAAgB,OAAO,CAAC,WAAW,aAAa,YAAY,EAAE;AAAA,EACzE,EAAE,SAAS,aAAa,OAAO,CAAC,MAAM,EAAE;AAAA,EACxC,EAAE,SAAS,YAAY,OAAO,CAAC,KAAK,EAAE;AAAA,EACtC,EAAE,SAAS,cAAc,OAAO,CAAC,SAAS,WAAW,UAAU,EAAE;AAAA,EACjE,EAAE,SAAS,YAAY,OAAO,CAAC,OAAO,SAAS,UAAU,MAAM,EAAE;AAAA,EACjE,EAAE,SAAS,aAAa,OAAO,CAAC,QAAQ,UAAU,SAAS,EAAE;AAAA,EAC7D,EAAE,SAAS,mBAAmB,OAAO,CAAC,cAAc,IAAI,EAAE;AAAA,EAC1D,EAAE,SAAS,YAAY,OAAO,CAAC,OAAO,gBAAgB,gBAAgB,EAAE;AAC1E;AAGA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,0BAA0B;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,sBAAsB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,qBAAqB;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,sBAAsB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAMA,SAAS,qBAAqB,OAAmC;AAC/D,QAAM,IAAI,MAAM,YAAY;AAC5B,QAAM,UAA8B,CAAC;AAGrC,MAAI,wBAAwB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAClD,YAAQ,KAAK,EAAE,QAAQ,kBAAkB,YAAY,IAAI,CAAC;AAAA,EAC5D;AAEA,MAAI,mBAAmB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAC7C,YAAQ,KAAK,EAAE,QAAQ,aAAa,YAAY,KAAK,CAAC;AAAA,EACxD;AAEA,MAAI,oBAAoB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAC9C,YAAQ,KAAK,EAAE,QAAQ,cAAc,YAAY,IAAI,CAAC;AAAA,EACxD;AAEA,MAAI,gBAAgB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAC1C,YAAQ,KAAK,EAAE,QAAQ,UAAU,YAAY,KAAK,CAAC;AAAA,EACrD;AAEA,MAAI,oBAAoB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAC9C,YAAQ,KAAK,EAAE,QAAQ,cAAc,YAAY,IAAI,CAAC;AAAA,EACxD;AAGA,MAAI,QAAQ,WAAW,GAAG;AACxB,YAAQ,KAAK,EAAE,QAAQ,UAAU,YAAY,IAAI,CAAC;AAAA,EACpD;AAGA,SAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAC3D;AAKA,SAAS,iBAAiB,SAA0C;AAClE,SAAO,QAAQ,CAAC,GAAG,UAAU;AAC/B;AAMA,SAAS,6BAA6B,QAAmC;AACvE,UAAQ,QAAQ;AAAA,IACd,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,EACX;AACF;AAMA,IAAM,cAAc;AAAA,EAClB,MAAM,EAAE,GAAG,IAAI,cAAc,KAAK,WAAW,IAAI;AAAA,EACjD,KAAK,EAAE,GAAG,IAAI,cAAc,MAAM,WAAW,KAAK;AACpD;AAKA,SAAS,kBAAkB,SAAyC;AAClE,QAAM,WAAW,QAAQ,OAAO,CAAC,MAAM,SAAS,EAAE,QAAQ,EAAE;AAC5D,SAAO,WAAW,QAAQ,SAAS,IAAI,QAAQ;AACjD;AAEO,IAAM,gBAAN,MAAoB;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YACE,YACA,iBACA,kBACA,cACA;AACA,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,SAAK,kBAAkB,IAAI,gBAAgB;AAC3C,SAAK,mBAAmB;AACxB,SAAK,aAAa,oBAAI,IAAI;AAC1B,SAAK,eAAe;AAGpB,QAAI,kBAAkB;AACpB,WAAK,+BAA+B,iBAAiB,oBAAoB,CAAC,UAAU;AAElF,aAAK,WAAW,OAAO,MAAM,OAAO;AAAA,MACtC,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,UAAgB;AACd,SAAK,+BAA+B;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,kBAAkB,SAA6C;AAC3E,QAAI,CAAC,KAAK,iBAAkB,QAAO;AAEnC,UAAM,SAAS,KAAK,WAAW,IAAI,OAAO;AAC1C,QAAI,WAAW,OAAW,QAAO;AAEjC,UAAM,QAAQ,MAAM,KAAK,iBAAiB,UAAU,OAAO;AAC3D,UAAM,SAAS,SAAS;AACxB,SAAK,WAAW,IAAI,SAAS,MAAM;AACnC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAAoB,aAAuC;AACjE,UAAM,gBAAgB,WAAW,QAAQ,IAAI,wBAAwB,KAAK,KAAK;AAC/E,UAAM,kBAAkB,WAAW,QAAQ,IAAI,0BAA0B,KAAK,KAAK;AAEnF,QAAI,eAAe,cAAe,QAAO;AACzC,QAAI,eAAe,gBAAiB,QAAO;AAC3C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,OAAO,OAA6C;AACxD,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,OAAO,MAAM,QAAQ,KAAK,cAAc,eAAe;AAC7D,UAAM,QAAQ,MAAM,SAAS,KAAK,cAAc,gBAAgB;AAChE,UAAM,SAAS,MAAM,UAAU,CAAC;AAChC,UAAM,SAAS,MAAM,UAAU;AAG/B,UAAM,UAAU,qBAAqB,MAAM,KAAK;AAGhD,UAAM,gBACJ,MAAM,WAAW,SACb,6BAA6B,MAAM,MAAM,IACzC,iBAAiB,OAAO;AAE9B,IAAAA,QAAO;AAAA,MACL;AAAA,QACE,OAAO,MAAM;AAAA,QACb;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,YAAY,MAAM;AAAA,QAClB,uBAAuB;AAAA,QACvB,cAAc,MAAM;AAAA,MACtB;AAAA,MACA;AAAA,IACF;AAEA,QAAI,aAA6B,CAAC;AAClC,QAAI,cAAc;AAGlB,UAAM,aAAa,QAAQ;AAE3B,QAAI,SAAS,UAAU;AAGrB,YAAM,aAAa,MAAM,KAAK,gBAAgB,MAAM,OAAO,QAAQ,UAAU;AAC7E,oBAAc,WAAW,SAAS,IAAK,WAAW,CAAC,GAAG,SAAS,IAAK;AAEpE,mBAAa,KAAK,yBAAyB,YAAY,MAAM,SAAS,EAAE,MAAM,GAAG,UAAU;AAAA,IAC7F,WAAW,SAAS,OAAO;AAEzB,mBAAa,MAAM,KAAK,UAAU,MAAM,OAAO,QAAQ,UAAU;AAAA,IACnE,OAAO;AAEL,YAAM,eAAe,MAAM,KAAK;AAAA,QAC9B,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,MAAM;AAAA,MACR;AACA,mBAAa,aAAa;AAC1B,oBAAc,aAAa;AAAA,IAC7B;AAIA,QAAI,MAAM,iBAAiB,QAAW;AACpC,UAAI,SAAS,OAAO;AAClB,QAAAA,QAAO;AAAA,UACL,EAAE,OAAO,MAAM,OAAO,cAAc,MAAM,aAAa;AAAA,UACvD;AAAA,QACF;AAAA,MACF,WAAW,cAAc,MAAM,cAAc;AAC3C,cAAMC,UAAS,KAAK,IAAI,IAAI;AAC5B,QAAAD,QAAO;AAAA,UACL;AAAA,YACE,OAAO,MAAM;AAAA,YACb;AAAA,YACA;AAAA,YACA,cAAc,MAAM;AAAA,YACpB,QAAAC;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAEA,eAAO;AAAA,UACL,OAAO,MAAM;AAAA,UACb;AAAA,UACA;AAAA,UACA,SAAS,CAAC;AAAA,UACV,cAAc;AAAA,UACd,QAAAA;AAAA,UACA,YAAY,KAAK,oBAAoB,WAAW;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,iBAAiB,KAAK,oBAAoB,YAAY,MAAM,KAAK;AACvE,UAAM,mBAAmB,eAAe,MAAM,GAAG,KAAK;AAGtD,UAAM,SAAS,oBAAI,IAA8B;AACjD,QAAI,WAAW,gBAAgB,WAAW,QAAQ;AAChD,YAAM,WAAW,IAAI,IAAI,iBAAiB,IAAI,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AACxE,iBAAW,WAAW,UAAU;AAC9B,eAAO,IAAI,SAAS,MAAM,KAAK,kBAAkB,OAAO,CAAC;AAAA,MAC3D;AAAA,IACF;AAGA,UAAM,kBAAkB,iBAAiB,IAAI,CAAC,MAAM;AAClD,YAAM,QAAQ,OAAO,IAAI,EAAE,SAAS,OAAO,KAAK;AAChD,aAAO,KAAK,sBAAsB,GAAG,MAAM,OAAO,QAAQ,KAAK;AAAA,IACjE,CAAC;AAED,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,UAAM,aAAa,SAAS,QAAQ,KAAK,oBAAoB,WAAW,IAAI;AAE5E,IAAAD,QAAO;AAAA,MACL;AAAA,QACE,OAAO,MAAM;AAAA,QACb;AAAA,QACA,aAAa,gBAAgB;AAAA,QAC7B,aAAa,WAAW;AAAA,QACxB,SAAS,QAAQ,IAAI,CAAC,MAAM,GAAG,EAAE,MAAM,IAAI,EAAE,WAAW,QAAQ,CAAC,CAAC,GAAG;AAAA,QACrE,aAAa,SAAS,QAAQ,cAAc;AAAA,QAC5C;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAEA,WAAO;AAAA,MACL,OAAO,MAAM;AAAA,MACb;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,cAAc,gBAAgB;AAAA,MAC9B;AAAA,MACA;AAAA,MACA,aAAa,SAAS,QAAQ,cAAc;AAAA,IAC9C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAAoB,SAAyB,OAA+B;AAClF,UAAM,WAAW,oBAAI,IAA0B;AAC/C,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACE,OAAMA,GAAE,SAAS,CAAC;AAE7B,eAAW,UAAU,SAAS;AAG5B,YAAM,UAAU,OAAO,SAAS;AAChC,YAAM,SAAS,OAAO,SAAS,QAAQ,OAAO,SAAS,OAAO,OAAO;AACrE,YAAM,YAAY,GAAG,OAAO,IAAI,MAAM;AAEtC,YAAM,WAAW,SAAS,IAAI,SAAS;AACvC,UAAI,CAAC,UAAU;AACb,iBAAS,IAAI,WAAW,MAAM;AAAA,MAChC,OAAO;AAEL,cAAM,oBAAoB,KAAK,gBAAgB,SAAS,SAAS,UAAU;AAC3E,cAAM,eAAe,KAAK,gBAAgB,OAAO,SAAS,UAAU;AAGpE,cAAM,oBAAoB,oBAAoB,SAAS;AACvD,cAAM,eAAe,eAAe,OAAO;AAE3C,YAAI,eAAe,mBAAmB;AACpC,mBAAS,IAAI,WAAW,MAAM;AAAA,QAChC;AAAA,MACF;AAAA,IACF;AAGA,WAAO,MAAM,KAAK,SAAS,OAAO,CAAC,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAAA,EACvE;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,SAAiB,YAA8B;AACrE,UAAM,eAAe,QAAQ,YAAY;AACzC,WAAO,WAAW,OAAO,CAAC,SAAS,aAAa,SAAS,IAAI,CAAC,EAAE;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,yBAAyB,SAAyB,WAAoC;AAC5F,QAAI,QAAQ,WAAW,EAAG,QAAO,CAAC;AAGlC,UAAM,SAAS,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAG5D,UAAM,QAAQ,OAAO,CAAC;AACtB,UAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AACrC,QAAI,UAAU,UAAa,SAAS,OAAW,QAAO,CAAC;AAEvD,UAAM,WAAW,MAAM;AACvB,UAAM,WAAW,KAAK;AACtB,UAAM,QAAQ,WAAW;AAIzB,UAAM,aACJ,QAAQ,IACJ,OAAO,IAAI,CAAC,OAAO;AAAA,MACjB,GAAG;AAAA,MACH,OAAO,KAAK,OAAQ,EAAE,QAAQ,YAAY,QAAS,GAAO,IAAI;AAAA,IAChE,EAAE,IACF;AAGN,QAAI,cAAc,QAAW;AAC3B,aAAO,WAAW,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS;AAAA,IACtD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBACZ,OACA,QACA,OACyB;AACzB,UAAM,cAAc,MAAM,KAAK,gBAAgB,MAAM,KAAK;AAC1D,UAAM,UAA0B,CAAC;AAEjC,eAAW,WAAW,QAAQ;AAC5B,YAAM,OAAO,MAAM,KAAK,WAAW,OAAO,SAAS,aAAa,KAAK;AACrE,cAAQ;AAAA,QACN,GAAG,KAAK,IAAI,CAAC,OAAO;AAAA,UAClB,IAAI,EAAE;AAAA,UACN,OAAO,EAAE;AAAA;AAAA,UACT,SAAS,EAAE;AAAA,UACX,UAAU,EAAE;AAAA,QACd,EAAE;AAAA,MACJ;AAAA,IACF;AAEA,WAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,KAAK;AAAA,EACjE;AAAA,EAEA,MAAc,UACZ,OACA,QACA,OACyB;AACzB,UAAM,UAA0B,CAAC;AAEjC,eAAW,WAAW,QAAQ;AAC5B,UAAI;AACF,cAAM,OAAO,MAAM,KAAK,WAAW,eAAe,SAAS,OAAO,KAAK;AACvE,gBAAQ;AAAA,UACN,GAAG,KAAK,IAAI,CAAC,OAAO;AAAA,YAClB,IAAI,EAAE;AAAA,YACN,OAAO,EAAE;AAAA,YACT,SAAS,EAAE;AAAA,YACX,UAAU,EAAE;AAAA,UACd,EAAE;AAAA,QACJ;AAAA,MACF,QAAQ;AAAA,MAIR;AAAA,IACF;AAEA,WAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,KAAK;AAAA,EACjE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,yBACZ,OACA,QACA,OACA,WAC2D;AAE3D,UAAM,UAAU,qBAAqB,KAAK;AAI1C,UAAM,mBAAmB,MAAM,KAAK,gBAAgB,OAAO,QAAQ,QAAQ,CAAC;AAG5E,UAAM,kBAAkB,oBAAI,IAAoB;AAChD,qBAAiB,QAAQ,CAAC,MAAM;AAC9B,sBAAgB,IAAI,EAAE,IAAI,EAAE,KAAK;AAAA,IACnC,CAAC;AAGD,UAAM,cAAc,iBAAiB,SAAS,IAAK,iBAAiB,CAAC,GAAG,SAAS,IAAK;AAItF,UAAM,gBAAgB,KAAK,yBAAyB,gBAAgB;AAGpE,UAAM,aAAa,MAAM,KAAK,UAAU,OAAO,QAAQ,QAAQ,CAAC;AAGhE,UAAM,cAAc,oBAAI,IAAoB;AAC5C,UAAM,WAAW,oBAAI,IAAoB;AACzC,UAAM,UAAU,oBAAI,IAA0B;AAE9C,kBAAc,QAAQ,CAAC,GAAG,MAAM;AAC9B,kBAAY,IAAI,EAAE,IAAI,IAAI,CAAC;AAC3B,cAAQ,IAAI,EAAE,IAAI,CAAC;AAAA,IACrB,CAAC;AAED,eAAW,QAAQ,CAAC,GAAG,MAAM;AAC3B,eAAS,IAAI,EAAE,IAAI,IAAI,CAAC;AACxB,UAAI,CAAC,QAAQ,IAAI,EAAE,EAAE,GAAG;AACtB,gBAAQ,IAAI,EAAE,IAAI,CAAC;AAAA,MACrB;AAAA,IACF,CAAC;AAGD,UAAM,YAgBD,CAAC;AAGN,UAAM,cAAc,kBAAkB,CAAC,GAAG,QAAQ,OAAO,CAAC,CAAC;AAC3D,UAAM,EAAE,GAAG,cAAc,UAAU,IAAI,YAAY,WAAW;AAE9D,eAAW,CAAC,IAAI,MAAM,KAAK,SAAS;AAClC,YAAM,aAAa,YAAY,IAAI,EAAE,KAAK;AAC1C,YAAM,UAAU,SAAS,IAAI,EAAE,KAAK;AACpC,YAAM,iBAAiB,gBAAgB,IAAI,EAAE;AAE7C,YAAM,YAAY,eAAe,WAAW,gBAAgB,IAAI,cAAc;AAC9E,YAAM,SAAS,YAAY,WAAW,aAAa,IAAI,WAAW;AAGlE,YAAM,gBAAgB,KAAK;AAAA;AAAA,QAEzB,OAAO,SAAS,UAAU;AAAA,QAC1B;AAAA,MACF;AAGA,YAAM,iBAAiB,KAAK,yBAAyB,OAAO,MAAM;AAGlE,YAAM,kBAAkB,KAAK,mBAAmB,OAAO,MAAM;AAG7D,YAAM,mBAAmB,KAAK,oBAAoB,OAAO,MAAM;AAE/D,YAAM,WAUF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,UAAI,eAAe,UAAU;AAC3B,iBAAS,aAAa;AAAA,MACxB;AACA,UAAI,YAAY,UAAU;AACxB,iBAAS,UAAU;AAAA,MACrB;AACA,UAAI,mBAAmB,QAAW;AAChC,iBAAS,iBAAiB;AAAA,MAC5B;AAEA,gBAAU,KAAK;AAAA,QACb;AAAA,QACA,QACG,YAAY,UACb,gBACA,iBACA,kBACA;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,SAAS,UAAU,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,KAAK;AAGzE,QAAI;AAEJ,QAAI,OAAO,SAAS,GAAG;AACrB,YAAM,QAAQ,OAAO,CAAC;AACtB,YAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AACrC,UAAI,UAAU,UAAa,SAAS,QAAW;AAC7C,4BAAoB,OAAO,IAAI,CAAC,OAAO;AAAA,UACrC,GAAG,EAAE;AAAA,UACL,OAAO,EAAE;AAAA,UACT,iBAAiB,EAAE;AAAA,QACrB,EAAE;AAAA,MACJ,OAAO;AACL,cAAM,WAAW,MAAM;AACvB,cAAM,WAAW,KAAK;AACtB,cAAM,QAAQ,WAAW;AAEzB,YAAI,QAAQ,GAAG;AAEb,8BAAoB,OAAO,IAAI,CAAC,OAAO;AAAA,YACrC,GAAG,EAAE;AAAA,YACL,OAAO,KAAK,OAAQ,EAAE,QAAQ,YAAY,QAAS,GAAO,IAAI;AAAA,YAC9D,iBAAiB,EAAE;AAAA,UACrB,EAAE;AAAA,QACJ,OAAO;AAEL,8BAAoB,OAAO,IAAI,CAAC,OAAO;AAAA,YACrC,GAAG,EAAE;AAAA,YACL,OAAO,EAAE;AAAA,YACT,iBAAiB,EAAE;AAAA,UACrB,EAAE;AAAA,QACJ;AAAA,MACF;AAAA,IACF,OAAO;AACL,0BAAoB,CAAC;AAAA,IACvB;AAGA,QAAI,cAAc,QAAW;AAC3B,0BAAoB,kBAAkB,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS;AAAA,IAC1E;AAEA,WAAO,EAAE,SAAS,mBAAmB,YAAY;AAAA,EACnD;AAAA,EAEA,MAAM,gBAAgB,OAAoB,UAA8C;AACtF,WAAO,KAAK,OAAO;AAAA,MACjB,GAAG;AAAA,MACH,QAAQ;AAAA,IACV,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,iBAAiB,UAA8B,SAAqC;AAE1F,QAAI;AACJ,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY,WAAW,QAAQ,IAAI,wBAAwB,KAAK,KAAK;AACrE;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF;AACE,oBAAY;AAAA,IAChB;AAGA,QAAI,qBAAqB;AACzB,QAAI,kBAAkB;AAEtB,eAAW,EAAE,QAAQ,WAAW,KAAK,SAAS;AAC5C,YAAM,eAAe,mBAAmB,MAAM;AAC9C,YAAM,aAAa,aAAa,YAAY,OAAO,KAAK;AACxD,4BAAsB,aAAa;AACnC,yBAAmB;AAAA,IACrB;AAEA,UAAM,oBAAoB,kBAAkB,IAAI,qBAAqB,kBAAkB;AACvF,UAAM,aAAa,YAAY;AAG/B,QAAI,aAAa,QAAQ;AACvB,aAAO,KAAK,IAAI,YAAY,GAAG;AAAA,IACjC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,mBAAmB,OAAe,QAA8B;AACtE,UAAM,MAAM,OAAO,SAAS;AAC5B,QAAI,QAAQ,UAAa,QAAQ,GAAI,QAAO;AAG5C,UAAM,UAAU,IAAI,YAAY,EAAE,QAAQ,eAAe,GAAG;AAG5D,UAAM,YAAY,oBAAI,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACA,OAAMA,GAAE,SAAS,KAAK,CAAC,UAAU,IAAIA,EAAC,CAAC;AAElD,QAAI,WAAW,WAAW,EAAG,QAAO;AAGpC,UAAM,gBAAgB,WAAW,OAAO,CAAC,SAAS,QAAQ,SAAS,IAAI,CAAC;AAExE,QAAI,cAAc,WAAW,EAAG,QAAO;AAIvC,UAAM,aAAa,cAAc,SAAS,WAAW;AACrD,WAAO,IAAM,IAAM;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,oBAAoB,OAAe,QAA8B;AACvE,UAAMC,QAAO,OAAO,SAAS;AAC7B,QAAIA,UAAS,UAAaA,UAAS,GAAI,QAAO;AAG9C,UAAM,eAAeA,MAAK,YAAY,EAAE,QAAQ,eAAe,GAAG;AAGlE,UAAM,YAAY,oBAAI,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACD,OAAMA,GAAE,SAAS,KAAK,CAAC,UAAU,IAAIA,EAAC,CAAC;AAElD,QAAI,WAAW,WAAW,EAAG,QAAO;AAGpC,UAAM,gBAAgB,WAAW,OAAO,CAAC,SAAS,aAAa,SAAS,IAAI,CAAC;AAE7E,QAAI,cAAc,WAAW,EAAG,QAAO;AAIvC,UAAM,aAAa,cAAc,SAAS,WAAW;AACrD,WAAO,IAAM,IAAM;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,yBAAyB,OAAe,QAA8B;AAC5E,UAAMC,QAAO,OAAO,SAAS,QAAQ,OAAO,SAAS,OAAO;AAC5D,UAAM,UAAU,OAAO,QAAQ,YAAY;AAC3C,UAAM,YAAYA,MAAK,YAAY;AAGnC,eAAW,EAAE,SAAS,MAAM,KAAK,oBAAoB;AACnD,UAAI,QAAQ,KAAK,KAAK,GAAG;AAEvB,cAAM,yBAAyB,MAAM;AAAA,UACnC,CAAC,SAAS,UAAU,SAAS,IAAI,KAAK,QAAQ,SAAS,IAAI;AAAA,QAC7D;AAEA,YAAI,wBAAwB;AAC1B,iBAAO;AAAA,QACT,OAAO;AACL,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,QACA,OACA,QACA,OACc;AACd,UAAM,WAAW,EAAE,GAAG,OAAO;AAG7B,UAAMA,QAAO,OAAO,SAAS,QAAQ,OAAO,SAAS,OAAO;AAE5D,UAAM,WAAW,OAAO,SAAS,UAAU;AAG3C,UAAM,WAAW,KAAK,0BAA0B,MAAM;AACtD,UAAM,aAAa,UAAU,QAAQ,KAAK,kBAAkB,OAAO,OAAO;AAE1E,aAAS,UAAU;AAAA,MACjB,MAAM,KAAK,UAAU,UAAU,QAAQ;AAAA,MACvC,MAAM;AAAA,MACN,WAAW,UAAU,aAAa;AAAA,MAClC,SAAS,KAAK,gBAAgB,OAAO,SAAS,KAAK;AAAA,MACnD,UAAU,GAAGA,KAAI,GAAG,WAAW,IAAI,OAAO,SAAS,SAAS,CAAC,KAAK,EAAE;AAAA,MACpE,iBAAiB,KAAK,wBAAwB,QAAQ,KAAK;AAAA,IAC7D;AAGA,QAAI,WAAW,gBAAgB,WAAW,QAAQ;AAEhD,YAAM,QAAQ,KAAK,kBAAkB,OAAOA,OAAM,UAAU;AAE5D,eAAS,UAAU;AAAA,QACjB,YAAY,KAAK,kBAAkB,OAAO,OAAO;AAAA,QACjD,YAAY,KAAK,eAAe,OAAO,OAAO;AAAA,QAC9C,iBAAiB,KAAK,gBAAgB,OAAO,SAAS,KAAK;AAAA,QAC3D;AAAA,MACF;AAAA,IACF;AAGA,QAAI,WAAW,QAAQ;AAErB,YAAM,cAAc,KAAK,wBAAwB,OAAOA,OAAM,UAAU;AAExE,eAAS,OAAO;AAAA,QACd,cAAc,UAAU,eAAe,OAAO;AAAA,QAC9C;AAAA,QACA,eAAe,KAAK,qBAAqB,OAAO,OAAO;AAAA,QACvD,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,0BAA0B,QAA4C;AAC5E,UAAMA,QAAO,OAAO,SAAS;AAC7B,QAAIA,UAAS,UAAaA,UAAS,GAAI,QAAO;AAE9C,UAAM,MAAMA,MAAK,MAAM,GAAG,EAAE,IAAI,KAAK;AACrC,UAAM,WACJ,QAAQ,QAAQ,QAAQ,QACpB,eACA,QAAQ,QAAQ,QAAQ,QACtB,eACA;AAGR,UAAM,aAAa,KAAK,kBAAkB,OAAO,OAAO;AACxD,QAAI,eAAe,GAAI,QAAO;AAE9B,WAAO,KAAK,gBAAgB,gBAAgB,OAAO,SAAS,YAAY,QAAQ;AAAA,EAClF;AAAA,EAEQ,kBAAkB,SAAyB;AAEjD,UAAM,YAAY,QAAQ,MAAM,6CAA6C;AAC7E,QAAI,YAAY,CAAC,MAAM,UAAa,UAAU,CAAC,MAAM,GAAI,QAAO,UAAU,CAAC;AAE3E,UAAM,aAAa,QAAQ,MAAM,6BAA6B;AAC9D,QAAI,aAAa,CAAC,MAAM,UAAa,WAAW,CAAC,MAAM,GAAI,QAAO,WAAW,CAAC;AAE9E,UAAM,aAAa,QAAQ,MAAM,6BAA6B;AAC9D,QAAI,aAAa,CAAC,MAAM,UAAa,WAAW,CAAC,MAAM,GAAI,QAAO,WAAW,CAAC;AAG9E,WAAO;AAAA,EACT;AAAA,EAEQ,UACN,UACA,UACoD;AAEpD,QAAI,SAAU,QAAO,SAAS;AAC9B,QAAI,aAAa,mBAAmB,aAAa;AAC/C,aAAO;AACT,WAAO;AAAA,EACT;AAAA,EAEQ,gBAAgB,SAAiB,OAAuB;AAE9D,UAAM,WAAW,QAAQ,MAAM,6BAA6B;AAC5D,QAAI,WAAW,CAAC,MAAM,UAAa,SAAS,CAAC,MAAM,GAAI,QAAO,SAAS,CAAC,EAAE,KAAK;AAE/E,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACD,OAAMA,GAAE,SAAS,CAAC;AAG7B,UAAM,aAAa,CAAC,YAA6B;AAC/C,aACE,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,YAAY,KAC/B,QAAQ,WAAW,OAAO;AAAA,IAE9B;AAGA,UAAM,YAAY,CAAC,YAA4B;AAC7C,YAAM,YAAY,QAAQ,YAAY;AACtC,aAAO,WAAW,OAAO,CAAC,SAAS,UAAU,SAAS,IAAI,CAAC,EAAE;AAAA,IAC/D;AAGA,UAAM,eAAe,CAAC,YAA6B;AACjD,UAAI,QAAQ,WAAW,EAAG,QAAO;AACjC,UAAI,QAAQ,WAAW,IAAI,KAAK,QAAQ,WAAW,IAAI,EAAG,QAAO;AAEjE,UAAI,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,EAAG,QAAO;AAE1D,aAAO,QAAQ,UAAU;AAAA,IAC3B;AAGA,QAAI,WAA0B;AAC9B,QAAI,YAAY;AAEhB,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,KAAK,KAAK;AAC1B,UAAI,WAAW,OAAO,KAAK,CAAC,aAAa,OAAO,EAAG;AAEnD,UAAI,QAAQ,UAAU,OAAO;AAG7B,UAAI,SAAS,KAAK,OAAO,GAAG;AAC1B,iBAAS;AAAA,MACX;AAIA,UAAI,4BAA4B,KAAK,OAAO,GAAG;AAC7C,iBAAS;AAAA,MACX;AAEA,UAAI,QAAQ,WAAW;AACrB,oBAAY;AACZ,mBAAW;AAAA,MACb;AAAA,IACF;AAGA,QAAI,aAAa,QAAQ,aAAa,MAAM,YAAY,GAAG;AACzD,UAAI,SAAS,SAAS,KAAK;AACzB,cAAM,gBAAgB,SAAS,MAAM,eAAe;AACpD,YAAI,iBAAiB,cAAc,CAAC,EAAE,UAAU,MAAM,cAAc,CAAC,EAAE,UAAU,KAAK;AACpF,iBAAO,cAAc,CAAC,EAAE,KAAK;AAAA,QAC/B;AACA,eAAO,GAAG,SAAS,UAAU,GAAG,GAAG,CAAC;AAAA,MACtC;AACA,aAAO;AAAA,IACT;AAGA,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,KAAK,KAAK;AAC1B,UAAI,WAAW,OAAO,KAAK,CAAC,aAAa,OAAO,EAAG;AAEnD,UAAI,QAAQ,SAAS,KAAK;AACxB,cAAM,gBAAgB,QAAQ,MAAM,eAAe;AACnD,YAAI,iBAAiB,cAAc,CAAC,EAAE,UAAU,MAAM,cAAc,CAAC,EAAE,UAAU,KAAK;AACpF,iBAAO,cAAc,CAAC,EAAE,KAAK;AAAA,QAC/B;AACA,eAAO,GAAG,QAAQ,UAAU,GAAG,GAAG,CAAC;AAAA,MACrC;AAEA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,wBAAwB,QAAsB,OAAuB;AAC3E,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACA,OAAMA,GAAE,SAAS,CAAC;AAC7B,UAAM,eAAe,OAAO,QAAQ,YAAY;AAEhD,UAAM,eAAe,WAAW,OAAO,CAAC,SAAS,aAAa,SAAS,IAAI,CAAC;AAE5E,QAAI,aAAa,SAAS,GAAG;AAC3B,aAAO,YAAY,aAAa,KAAK,IAAI,CAAC;AAAA,IAC5C;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,kBAAkB,SAA2B;AACnD,UAAM,aAAuB,CAAC;AAC9B,UAAM,UAAU,QAAQ,SAAS,oBAAoB;AACrD,eAAW,SAAS,SAAS;AAC3B,UAAI,MAAM,CAAC,MAAM,UAAa,MAAM,CAAC,MAAM,GAAI,YAAW,KAAK,MAAM,CAAC,CAAC;AAAA,IACzE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,eAAe,SAA2B;AAChD,UAAM,UAAoB,CAAC;AAC3B,UAAM,UAAU,QAAQ,SAAS,sCAAsC;AACvE,eAAW,SAAS,SAAS;AAC3B,UAAI,MAAM,CAAC,MAAM,UAAa,MAAM,CAAC,MAAM,GAAI,SAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,IACtE;AACA,WAAO,QAAQ,MAAM,GAAG,CAAC;AAAA,EAC3B;AAAA,EAEQ,gBAAgB,SAAiB,QAA0B;AAIjE,UAAM,YAAY,oBAAI,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,QAAQ,QAAQ,YAAY,EAAE,MAAM,gBAAgB,KAAK,CAAC;AAChE,UAAM,YAAY,oBAAI,IAAoB;AAE1C,eAAW,QAAQ,OAAO;AAExB,UAAI,UAAU,IAAI,IAAI,EAAG;AAEzB,gBAAU,IAAI,OAAO,UAAU,IAAI,IAAI,KAAK,KAAK,CAAC;AAAA,IACpD;AAEA,WAAO,MAAM,KAAK,UAAU,QAAQ,CAAC,EAClC,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,MAAM,GAAG,CAAC,EACV,IAAI,CAAC,CAAC,IAAI,MAAM,IAAI;AAAA,EACzB;AAAA,EAEQ,qBAAqB,SAAyB;AACpD,UAAM,WAAW,QAAQ,MAAM,sBAAsB;AACrD,QAAI,WAAW,CAAC,MAAM,UAAa,SAAS,CAAC,MAAM,IAAI;AACrD,aAAO,SAAS,CAAC,EACd,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,QAAQ,aAAa,EAAE,EAAE,KAAK,CAAC,EAClD,OAAO,CAAC,SAAS,KAAK,SAAS,CAAC,EAChC,KAAK,IAAI;AAAA,IACd;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBACN,OACA,UACA,YACqC;AACrC,QAAI,CAAC,SAAS,eAAe,MAAM,eAAe,eAAe;AAC/D,aAAO,EAAE,UAAU,GAAG,OAAO,EAAE;AAAA,IACjC;AAEA,UAAM,SAAS,GAAG,QAAQ,IAAI,UAAU;AACxC,WAAO;AAAA,MACL,UAAU,MAAM,iBAAiB,MAAM;AAAA,MACvC,OAAO,MAAM,cAAc,MAAM;AAAA,IACnC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,wBACN,OACA,UACA,YACgE;AAChE,QAAI,CAAC,SAAS,eAAe,MAAM,eAAe,eAAe;AAC/D,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,SAAS,GAAG,QAAQ,IAAI,UAAU;AACxC,UAAM,UAA0E,CAAC;AAGjF,UAAM,WAAW,MAAM,iBAAiB,MAAM;AAC9C,eAAW,QAAQ,UAAU;AAC3B,UAAI,KAAK,SAAS,SAAS;AAEzB,cAAM,CAAC,MAAM,MAAM,IAAI,KAAK,YAAY,KAAK,IAAI;AACjD,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,SAAS,SAAS,GAAG,MAAM,OAAO;AAAA,UAClC,cAAc;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,WAAW,MAAM,SAAS,MAAM;AACtC,eAAW,QAAQ,UAAU;AAC3B,UAAI,KAAK,SAAS,SAAS;AAEzB,cAAM,CAAC,MAAM,MAAM,IAAI,KAAK,YAAY,KAAK,EAAE;AAC/C,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,SAAS,SAAS,GAAG,MAAM,OAAO;AAAA,UAClC,cAAc;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,WAAO,QAAQ,MAAM,GAAG,EAAE;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,QAAkC;AACpD,UAAM,YAAY,OAAO,YAAY,GAAG;AACxC,QAAI,cAAc,IAAI;AACpB,aAAO,CAAC,QAAQ,EAAE;AAAA,IACpB;AACA,WAAO,CAAC,OAAO,UAAU,GAAG,SAAS,GAAG,OAAO,UAAU,YAAY,CAAC,CAAC;AAAA,EACzE;AACF;;;ACl3CA,SAAS,YAAAE,WAAU,UAAAC,eAAc;AACjC,SAAS,WAAAC,UAAS,cAAAC,aAAY,QAAAC,aAAY;;;ACD1C,SAAS,KAAAC,UAAS;AAiBlB,IAAM,4BAA4BA,GAAE,OAAO;AAAA,EACzC,MAAMA,GAAE,OAAO,EAAE,IAAI,GAAG,wBAAwB;AAAA,EAChD,aAAaA,GAAE,OAAO,EAAE,SAAS;AAAA,EACjC,MAAMA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,SAAS;AACrC,CAAC;AAUM,IAAM,4BAA4B,0BAA0B,OAAO;AAAA,EACxE,MAAMA,GAAE,QAAQ,MAAM;AAAA,EACtB,MAAMA,GAAE,OAAO,EAAE,IAAI,GAAG,kCAAkC;AAC5D,CAAC;AAaD,IAAM,eAAeA,GAAE,OAAO,EAAE;AAAA,EAC9B,CAAC,QAAQ;AAEP,QAAI;AACF,UAAI,IAAI,GAAG;AACX,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,yBAAyB,KAAK,GAAG;AAAA,IAC1C;AAAA,EACF;AAAA,EACA,EAAE,SAAS,iDAAiD;AAC9D;AAMO,IAAM,4BAA4B,0BAA0B,OAAO;AAAA,EACxE,MAAMA,GAAE,QAAQ,MAAM;AAAA,EACtB,KAAK;AAAA,EACL,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,IAAI,EAAE,SAAS,kCAAkC,EAAE,SAAS;AAChF,CAAC;AAYM,IAAM,2BAA2B,0BAA0B,OAAO;AAAA,EACvE,MAAMA,GAAE,QAAQ,KAAK;AAAA,EACrB,KAAKA,GAAE,IAAI,sCAAsC;AAAA,EACjD,OAAOA,GAAE,OAAO,EAAE,IAAI,EAAE,IAAI,GAAG,4BAA4B,EAAE,QAAQ,CAAC;AAAA,EACtE,UAAUA,GAAE,OAAO,EAAE,IAAI,EAAE,SAAS,qCAAqC,EAAE,SAAS;AAAA,EACpF,mBAAmBA,GAAE,OAAO,EAAE,SAAS;AAAA,EACvC,qBAAqBA,GAAE,OAAO,EAAE,SAAS;AAC3C,CAAC;AAYM,IAAM,wBAAwBA,GAAE,mBAAmB,QAAQ;AAAA,EAChE;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAYM,IAAM,+BAA+BA,GAAE,OAAO;AAAA,EACnD,SAASA,GAAE,QAAQ,CAAC;AAAA,EACpB,QAAQA,GAAE,MAAM,qBAAqB;AACvC,CAAC;AAQM,SAAS,sBAAsB,KAAkD;AACtF,SAAO,IAAI,SAAS;AACtB;AAEO,SAAS,sBAAsB,KAAkD;AACtF,SAAO,IAAI,SAAS;AACtB;AAEO,SAAS,qBAAqB,KAAiD;AACpF,SAAO,IAAI,SAAS;AACtB;AAMO,IAAM,mCAA2D;AAAA,EACtE,SAAS;AAAA,EACT,QAAQ,CAAC;AACX;;;ADtIA,eAAeC,YAAWC,OAAgC;AACxD,MAAI;AACF,UAAMC,QAAOD,KAAI;AACjB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAYO,IAAM,yBAAN,MAA6B;AAAA,EACjB;AAAA,EACA;AAAA,EACT,SAAwC;AAAA,EAEhD,YAAY,aAAsB;AAChC,SAAK,cAAc,eAAe,mBAAmB,QAAQ;AAC7D,SAAK,aAAaE,MAAK,KAAK,aAAa,6CAA6C;AAAA,EACxF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAAwC;AAC5C,QAAI,KAAK,WAAW,MAAM;AACxB,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,SAAS,MAAMH,YAAW,KAAK,UAAU;AAC/C,QAAI,CAAC,QAAQ;AAEX,WAAK,SAAS;AAAA,QACZ,GAAG;AAAA,QACH,QAAQ,CAAC,GAAG,iCAAiC,MAAM;AAAA,MACrD;AACA,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,UAAU,MAAMI,UAAS,KAAK,YAAY,OAAO;AACvD,QAAI;AACJ,QAAI;AACF,eAAS,KAAK,MAAM,OAAO;AAAA,IAC7B,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,wCAAwC,KAAK,UAAU,KACrD,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CACvD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAS,6BAA6B,UAAU,MAAM;AAC5D,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,IAAI,MAAM,gCAAgC,KAAK,UAAU,KAAK,OAAO,MAAM,OAAO,EAAE;AAAA,IAC5F;AAEA,SAAK,SAAS,OAAO;AACrB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,QAA+C;AACxD,UAAM,gBAAgB,KAAK,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AACtE,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cAAc,YAA4C;AAC9D,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,UAAM,WAAW,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,WAAW,IAAI;AACrE,QAAI,aAAa,QAAW;AAC1B,YAAM,IAAI,MAAM,qBAAqB,WAAW,IAAI,kBAAkB;AAAA,IACxE;AACA,WAAO,OAAO,KAAK,UAAU;AAC7B,UAAM,KAAK,KAAK,MAAM;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,MAAgC;AACrD,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,UAAM,QAAQ,OAAO,OAAO,UAAU,CAAC,MAAM,EAAE,SAAS,IAAI;AAC5D,QAAI,UAAU,IAAI;AAChB,aAAO;AAAA,IACT;AACA,WAAO,OAAO,OAAO,OAAO,CAAC;AAC7B,UAAM,KAAK,KAAK,MAAM;AACtB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,iBACJ,MACA,SACe;AACf,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,UAAM,QAAQ,OAAO,OAAO,UAAU,CAAC,MAAM,EAAE,SAAS,IAAI;AAC5D,QAAI,UAAU,IAAI;AAChB,YAAM,IAAI,MAAM,qBAAqB,IAAI,aAAa;AAAA,IACxD;AAIA,UAAM,WAAW,OAAO,OAAO,KAAK;AACpC,QAAI,aAAa,QAAW;AAC1B,YAAM,IAAI,MAAM,qBAAqB,IAAI,wBAAwB,OAAO,KAAK,CAAC,EAAE;AAAA,IAClF;AACA,QAAI,QAAQ,gBAAgB,QAAW;AACrC,eAAS,cAAc,QAAQ;AAAA,IACjC;AACA,QAAI,QAAQ,SAAS,QAAW;AAC9B,eAAS,OAAO,QAAQ;AAAA,IAC1B;AACA,UAAM,KAAK,KAAK,MAAM;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAU,MAAoD;AAClE,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,WAAO,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAmC;AACvC,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,WAAO,OAAO,OAAO,SAAS;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAYH,OAAsB;AAChC,QAAII,YAAWJ,KAAI,GAAG;AACpB,aAAOA;AAAA,IACT;AACA,WAAOK,SAAQ,KAAK,aAAaL,KAAI;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAyB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,SAAS;AAAA,EAChB;AACF;;;AErMA,SAAS,cAAAM,mBAAkB;AAC3B,SAAS,YAAAC,WAAU,SAAAC,QAAO,QAAAC,OAAM,UAAAC,eAAc;AAC9C,SAAS,QAAAC,QAAM,WAAAC,gBAAe;;;ACF9B,SAAS,aAAa;AACtB,SAAS,SAAAC,cAAa;AAWtB,eAAsB,gBAAgB,SAAgD;AACpF,QAAM,EAAE,KAAK,WAAW,QAAQ,QAAQ,EAAE,IAAI;AAE9C,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,OAAO,CAAC,SAAS,WAAW,OAAO,KAAK,CAAC;AAC/C,MAAI,WAAW,QAAW;AACxB,SAAK,KAAK,YAAY,MAAM;AAAA,EAC9B;AACA,OAAK,KAAK,KAAK,SAAS;AAExB,SAAO,IAAI,QAAQ,CAACC,aAAY;AAC9B,UAAM,MAAM,MAAM,OAAO,MAAM,EAAE,OAAO,CAAC,UAAU,QAAQ,MAAM,EAAE,CAAC;AAEpE,QAAI,SAAS;AACb,QAAI,OAAO,GAAG,QAAQ,CAAC,SAAiB;AACtC,gBAAU,KAAK,SAAS;AAAA,IAC1B,CAAC;AAED,QAAI,GAAG,SAAS,CAAC,UAAiB;AAChC,MAAAA,SAAQ,IAAI,KAAK,CAAC;AAAA,IACpB,CAAC;AAED,QAAI,GAAG,SAAS,CAAC,SAAwB;AACvC,UAAI,SAAS,GAAG;AACd,QAAAA,SAAQ,GAAG,SAAS,CAAC;AAAA,MACvB,OAAO;AACL,QAAAA,SAAQ,IAAI,IAAI,MAAM,qBAAqB,MAAM,EAAE,CAAC,CAAC;AAAA,MACvD;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACH;AAEO,SAAS,SAAS,QAAyB;AAChD,SAAO,OAAO,WAAW,SAAS,KAAK,OAAO,WAAW,UAAU,KAAK,OAAO,WAAW,MAAM;AAClG;AAEO,SAAS,gBAAgB,KAAqB;AACnD,QAAM,QAAQ,sBAAsB,KAAK,GAAG;AAC5C,QAAM,OAAO,QAAQ,CAAC;AACtB,MAAI,SAAS,QAAW;AACtB,WAAO;AAAA,EACT;AACA,SAAO;AACT;;;ADlCA,eAAeC,YAAWC,OAAgC;AACxD,MAAI;AACF,UAAMC,QAAOD,KAAI;AACjB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAmCO,IAAM,eAAN,MAAmB;AAAA,EACP;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT,WAA0B,EAAE,QAAQ,CAAC,EAAE;AAAA,EAE/C,YAAY,SAAiB,SAA+B;AAC1D,SAAK,UAAU;AACf,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,mBAAmB,SAAS,oBAAoB;AACrD,SAAK,cAAc,SAAS,eAAe;AAAA,EAC7C;AAAA,EAEA,MAAM,aAA4B;AAChC,UAAME,OAAM,KAAK,SAAS,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,KAAK,aAAa;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,0BACN,OACA,OAC6B;AAE7B,UAAM,OAAO,MAAM,SAAS,SAAY,CAAC,GAAG,MAAM,IAAI,IAAI;AAC1D,UAAM,OAAO;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,aAAa,MAAM;AAAA,MACnB;AAAA,IACF;AAEA,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK,QAAQ;AACX,cAAM,YAAY;AAClB,cAAM,UAA+B;AAAA,UACnC,GAAG;AAAA,UACH,MAAM;AAAA;AAAA,UAEN,MAAM,MAAM,QAAQ,UAAU;AAAA,QAChC;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,QAAQ;AACX,cAAM,YAAY;AAElB,YAAI,UAAU,QAAQ,QAAW;AAC/B,iBAAO;AAAA,QACT;AACA,cAAM,UAA+B;AAAA,UACnC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,KAAK,UAAU;AAAA,UACf,QAAQ,UAAU;AAAA,UAClB,OAAO,MAAM;AAAA,QACf;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,OAAO;AACV,cAAM,WAAW;AACjB,cAAM,SAA6B;AAAA,UACjC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,KAAK,SAAS;AAAA,UACd,OAAO,SAAS;AAAA,UAChB,UAAU,MAAM;AAAA,UAChB,mBAAmB,MAAM;AAAA,UACzB,qBAAqB,MAAM;AAAA,QAC7B;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,kCAAkC,OAA2C;AAEnF,UAAM,OAAO,MAAM,SAAS,SAAY,CAAC,GAAG,MAAM,IAAI,IAAI;AAC1D,UAAM,OAAO;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,aAAa,MAAM;AAAA,MACnB;AAAA,IACF;AAEA,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK,QAAQ;AACX,cAAM,UAA+B;AAAA,UACnC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,MAAM,MAAM;AAAA,QACd;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,QAAQ;AAEX,YAAI,MAAM,QAAQ,QAAW;AAC3B,iBAAO;AAAA,QACT;AACA,cAAM,UAA+B;AAAA,UACnC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,KAAK,MAAM;AAAA,UACX,QAAQ,MAAM;AAAA,UACd,OAAO,MAAM;AAAA,QACf;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,OAAO;AACV,cAAM,SAA6B;AAAA,UACjC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,KAAK,MAAM;AAAA,UACX,OAAO,MAAM;AAAA,UACb,UAAU,MAAM;AAAA,UAChB,mBAAmB,MAAM;AAAA,UACzB,qBAAqB,MAAM;AAAA,QAC7B;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,OAAyB,SAAoD;AACxF,QAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,KAAK,MAAM,IAAI;AAC3C,aAAO,IAAI,IAAI,MAAM,4BAA4B,CAAC;AAAA,IACpD;AAEA,UAAM,WAAW,MAAM,KAAK,UAAU,MAAM,IAAI;AAChD,QAAI,aAAa,QAAW;AAC1B,aAAO,IAAI,IAAI,MAAM,oBAAoB,MAAM,IAAI,kBAAkB,CAAC;AAAA,IACxE;AAEA,UAAM,KAAK,cAAcC,YAAW,CAAC;AACrC,UAAM,MAAM,oBAAI,KAAK;AAErB,QAAI;AAEJ,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK,QAAQ;AACX,YAAI,MAAM,SAAS,QAAW;AAC5B,iBAAO,IAAI,IAAI,MAAM,kCAAkC,CAAC;AAAA,QAC1D;AAEA,cAAM,iBACJ,KAAK,gBAAgB,SACjBC,SAAQ,KAAK,aAAa,MAAM,IAAI,IACpCA,SAAQ,MAAM,IAAI;AAExB,YAAI;AACF,gBAAM,QAAQ,MAAMC,MAAK,cAAc;AACvC,cAAI,CAAC,MAAM,YAAY,GAAG;AACxB,mBAAO,IAAI,IAAI,MAAM,4BAA4B,cAAc,EAAE,CAAC;AAAA,UACpE;AAAA,QACF,QAAQ;AACN,iBAAO,IAAI,IAAI,MAAM,6BAA6B,cAAc,EAAE,CAAC;AAAA,QACrE;AACA,gBAAQ;AAAA,UACN,MAAM;AAAA,UACN;AAAA,UACA,MAAM,MAAM;AAAA,UACZ,MAAM;AAAA,UACN,aAAa,MAAM;AAAA,UACnB,MAAM,MAAM;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,WAAW;AAAA,QACb;AACA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,WAAW,MAAM;AAGrB,YAAI,MAAM,QAAQ,QAAW;AAC3B,gBAAM,WAAWC,OAAK,KAAK,SAAS,SAAS,EAAE;AAC/C,gBAAM,SAAS,MAAM,gBAAgB;AAAA,YACnC,KAAK,MAAM;AAAA,YACX,WAAW;AAAA,YACX,GAAI,MAAM,WAAW,SAAY,EAAE,QAAQ,MAAM,OAAO,IAAI,CAAC;AAAA,YAC7D,OAAO,MAAM,SAAS;AAAA,UACxB,CAAC;AAED,cAAI,CAAC,OAAO,SAAS;AACnB,mBAAO,IAAI,OAAO,KAAK;AAAA,UACzB;AACA,qBAAW,OAAO;AAAA,QACpB;AAEA,YAAI,aAAa,QAAW;AAC1B,iBAAO,IAAI,IAAI,MAAM,sCAAsC,CAAC;AAAA,QAC9D;AAGA,cAAM,qBACJ,KAAK,gBAAgB,SAAYF,SAAQ,KAAK,aAAa,QAAQ,IAAIA,SAAQ,QAAQ;AAGzF,YAAI,MAAM,QAAQ,QAAW;AAC3B,cAAI;AACF,kBAAM,QAAQ,MAAMC,MAAK,kBAAkB;AAC3C,gBAAI,CAAC,MAAM,YAAY,GAAG;AACxB,qBAAO,IAAI,IAAI,MAAM,4BAA4B,kBAAkB,EAAE,CAAC;AAAA,YACxE;AAAA,UACF,QAAQ;AACN,mBAAO,IAAI,IAAI,MAAM,mCAAmC,kBAAkB,EAAE,CAAC;AAAA,UAC/E;AAAA,QACF;AAEA,gBAAQ;AAAA,UACN,MAAM;AAAA,UACN;AAAA,UACA,MAAM,MAAM;AAAA,UACZ,MAAM;AAAA,UACN,KAAK,MAAM;AAAA,UACX,QAAQ,MAAM;AAAA,UACd,OAAO,MAAM,SAAS;AAAA,UACtB,aAAa,MAAM;AAAA,UACnB,MAAM,MAAM;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,WAAW;AAAA,QACb;AACA;AAAA,MACF;AAAA,MAEA,KAAK;AACH,YAAI,MAAM,QAAQ,QAAW;AAC3B,iBAAO,IAAI,IAAI,MAAM,gCAAgC,CAAC;AAAA,QACxD;AACA,gBAAQ;AAAA,UACN,MAAM;AAAA,UACN;AAAA,UACA,MAAM,MAAM;AAAA,UACZ,KAAK,MAAM;AAAA,UACX,OAAO,MAAM,SAAS;AAAA,UACtB,UAAU,MAAM;AAAA,UAChB,mBAAmB,MAAM;AAAA,UACzB,qBAAqB,MAAM;AAAA,UAC3B,aAAa,MAAM;AAAA,UACnB,MAAM,MAAM;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,WAAW;AAAA,QACb;AACA;AAAA,MAEF,SAAS;AAEP,cAAM,cAAqB,MAAM;AACjC,eAAO,IAAI,IAAI,MAAM,uBAAuB,OAAO,WAAW,CAAC,EAAE,CAAC;AAAA,MACpE;AAAA,IACF;AAEA,SAAK,SAAS,OAAO,KAAK,KAAK;AAC/B,UAAM,KAAK,aAAa;AAGxB,QAAI,KAAK,qBAAqB,QAAW;AACvC,YAAM,KAAK,iBAAiB,wBAAwB;AAAA,IACtD;AAGA,QAAI,KAAK,sBAAsB,UAAa,SAAS,uBAAuB,MAAM;AAChF,YAAM,aAAa,KAAK,0BAA0B,OAAO,KAAK;AAE9D,UAAI,eAAe,QAAW;AAC5B,cAAM,KAAK,kBAAkB,cAAc,UAAU;AAAA,MACvD;AAAA,IACF;AAEA,WAAO,GAAG,KAAK;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,MAAoC;AAC7C,QAAI,SAAS,QAAW;AACtB,aAAO,QAAQ,QAAQ,KAAK,SAAS,OAAO,OAAO,CAAC,MAAM,EAAE,SAAS,IAAI,CAAC;AAAA,IAC5E;AACA,WAAO,QAAQ,QAAQ,CAAC,GAAG,KAAK,SAAS,MAAM,CAAC;AAAA,EAClD;AAAA,EAEA,MAAM,IAAI,IAAyC;AACjD,WAAO,QAAQ,QAAQ,KAAK,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC;AAAA,EACtE;AAAA,EAEA,MAAM,UAAU,MAA0C;AACxD,WAAO,QAAQ,QAAQ,KAAK,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI,CAAC;AAAA,EAC1E;AAAA,EAEA,MAAM,cAAc,UAA8C;AAChE,WAAO,QAAQ;AAAA,MACb,KAAK,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,YAAY,EAAE,SAAS,QAAQ;AAAA,IAC3E;AAAA,EACF;AAAA,EAEA,MAAM,OACJ,IACA,SACA,SACwB;AACxB,UAAM,QAAQ,KAAK,SAAS,OAAO,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAC/D,QAAI,UAAU,IAAI;AAChB,aAAO,IAAI,IAAI,MAAM,oBAAoB,EAAE,EAAE,CAAC;AAAA,IAChD;AAEA,UAAM,QAAQ,KAAK,SAAS,OAAO,KAAK;AACxC,QAAI,UAAU,QAAW;AACvB,aAAO,IAAI,IAAI,MAAM,oBAAoB,EAAE,EAAE,CAAC;AAAA,IAChD;AAGA,QAAI,QAAQ,MAAM,KAAK,MAAM,IAAI;AAC/B,aAAO,IAAI,IAAI,MAAM,4BAA4B,CAAC;AAAA,IACpD;AAGA,UAAM,aAAa,QAAQ,SAAS,UAAa,QAAQ,SAAS,MAAM;AACxE,QAAI,YAAY;AACd,YAAM,WAAW,KAAK,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ,QAAQ,EAAE,OAAO,EAAE;AACxF,UAAI,aAAa,QAAW;AAC1B,eAAO,IAAI,IAAI,MAAM,oBAAoB,QAAQ,IAAI,kBAAkB,CAAC;AAAA,MAC1E;AAAA,IACF;AAGA,UAAM,UAAU;AAAA,MACd,GAAG;AAAA,MACH,GAAG;AAAA,MACH,WAAW,oBAAI,KAAK;AAAA,IACtB;AAEA,SAAK,SAAS,OAAO,KAAK,IAAI;AAC9B,UAAM,KAAK,aAAa;AAGxB,QAAI,KAAK,sBAAsB,UAAa,SAAS,uBAAuB,MAAM;AAChF,UAAI,YAAY;AAEd,cAAM,KAAK,kBAAkB,iBAAiB,MAAM,IAAI;AACxD,cAAM,gBAAgB,KAAK,kCAAkC,OAAO;AAEpE,YAAI,kBAAkB,QAAW;AAC/B,gBAAM,KAAK,kBAAkB,cAAc,aAAa;AAAA,QAC1D;AAAA,MACF,OAAO;AAEL,cAAM,aAAwD,CAAC;AAC/D,YAAI,QAAQ,gBAAgB,QAAW;AACrC,qBAAW,cAAc,QAAQ;AAAA,QACnC;AACA,YAAI,QAAQ,SAAS,QAAW;AAE9B,qBAAW,OAAO,CAAC,GAAG,QAAQ,IAAI;AAAA,QACpC;AAEA,YAAI,OAAO,KAAK,UAAU,EAAE,SAAS,GAAG;AACtC,gBAAM,KAAK,kBAAkB,iBAAiB,MAAM,MAAM,UAAU;AAAA,QACtE;AAAA,MACF;AAAA,IACF;AAEA,WAAO,GAAG,OAAO;AAAA,EACnB;AAAA,EAEA,MAAM,OAAO,IAAa,SAAmD;AAC3E,UAAM,QAAQ,KAAK,SAAS,OAAO,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAC/D,QAAI,UAAU,IAAI;AAChB,aAAO,IAAI,IAAI,MAAM,oBAAoB,EAAE,EAAE,CAAC;AAAA,IAChD;AAEA,UAAM,QAAQ,KAAK,SAAS,OAAO,KAAK;AACxC,QAAI,UAAU,QAAW;AACvB,aAAO,IAAI,IAAI,MAAM,oBAAoB,EAAE,EAAE,CAAC;AAAA,IAChD;AAEA,UAAM,YAAY,MAAM;AACxB,SAAK,SAAS,OAAO,OAAO,OAAO,CAAC;AACpC,UAAM,KAAK,aAAa;AAGxB,QAAI,KAAK,sBAAsB,UAAa,SAAS,uBAAuB,MAAM;AAChF,YAAM,KAAK,kBAAkB,iBAAiB,SAAS;AAAA,IACzD;AAEA,WAAO,GAAG,MAAS;AAAA,EACrB;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,eAAeC,OAAK,KAAK,SAAS,aAAa;AACrD,UAAM,SAAS,MAAMP,YAAW,YAAY;AAE5C,QAAI,CAAC,QAAQ;AAEX,WAAK,WAAW,EAAE,QAAQ,CAAC,EAAE;AAC7B,YAAM,KAAK,aAAa;AACxB;AAAA,IACF;AAGA,UAAM,UAAU,MAAMQ,UAAS,cAAc,OAAO;AACpD,QAAI;AAEF,YAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,WAAK,WAAW;AAAA,QACd,QAAQ,KAAK,OACV,OAAO,CAAC,MAAkB,MAAM,IAAI,EACpC,IAAI,CAAC,OAAO;AAAA,UACX,GAAG;AAAA,UACH,IAAI,cAAc,EAAE,EAAE;AAAA,UACtB,WAAW,IAAI,KAAK,EAAE,SAAS;AAAA,UAC/B,WAAW,IAAI,KAAK,EAAE,SAAS;AAAA,QACjC,EAAE;AAAA,MACN;AAAA,IACF,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,qCAAqC,YAAY,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC9G;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,eAAeD,OAAK,KAAK,SAAS,aAAa;AACrD,UAAM,gBAAgB,cAAc,KAAK,UAAU,KAAK,UAAU,MAAM,CAAC,CAAC;AAAA,EAC5E;AACF;;;AE/eA,SAAS,SAAAE,cAAgC;AACzC,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,cAAAC,mBAAkB;AAC3B,OAAOC,WAAU;AACjB,SAAS,uBAA4D;AACrE,SAAS,qBAAqB;AAC9B,SAAS,gBAAgB;;;ACNzB,SAAS,KAAAC,UAAS;AAGlB,IAAM,mBAAmBA,GAAE,OAAO;AAAA,EAChC,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE,QAAQ;AAAA,EACjB,WAAWA,GAAE,OAAO;AAAA,EACpB,WAAWA,GAAE,OAAO;AAAA,EACpB,SAASA,GAAE,OAAO;AAAA,EAClB,OAAOA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAC3B,CAAC;AAED,IAAM,iBAAiBA,GAAE,OAAO;AAAA,EAC9B,MAAMA,GAAE,KAAK,CAAC,YAAY,OAAO,CAAC;AAAA,EAClC,MAAMA,GAAE,OAAO;AAAA,EACf,UAAUA,GAAE,QAAQ;AAAA,EACpB,WAAWA,GAAE,OAAO;AAAA,EACpB,SAASA,GAAE,OAAO;AAAA,EAClB,OAAOA,GAAE,QAAQ,EAAE,SAAS;AAAA,EAC5B,WAAWA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,OAAOA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,SAAS;AAAA,EACpC,SAASA,GAAE,MAAM,gBAAgB,EAAE,SAAS;AAC9C,CAAC;AAED,IAAM,mBAAmBA,GAAE,OAAO;AAAA,EAChC,QAAQA,GAAE,OAAO;AAAA,EACjB,UAAUA,GAAE,OAAO;AAAA,EACnB,OAAOA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AACxC,CAAC;AAEM,IAAM,0BAA0BA,GAAE,OAAO;AAAA,EAC9C,OAAOA,GAAE,MAAM,cAAc;AAAA,EAC7B,SAASA,GAAE,MAAM,gBAAgB;AACnC,CAAC;AAYM,SAAS,0BAA0B,MAAkC;AAC1E,SAAO,wBAAwB,MAAM,IAAI;AAC3C;;;ADrCA,IAAMC,UAAS,aAAa,eAAe;AAe3C,SAAS,sBAA8B;AACrC,SAAO,QAAQ,aAAa,UAAU,WAAW;AACnD;AAOA,SAAS,kBAAkB,YAA4B;AACrD,MAAI,QAAQ,aAAa,SAAS;AAChC,WAAOC,MAAK,KAAK,YAAY,SAAS,WAAW,YAAY;AAAA,EAC/D;AACA,SAAOA,MAAK,KAAK,YAAY,SAAS,OAAO,SAAS;AACxD;AAEO,IAAM,eAAN,MAAmB;AAAA,EAChB,UAA+B;AAAA,EACtB,UAAuC,oBAAI,IAAI;AAAA,EACxD,wBAAwB;AAAA,EACxB,iBAA2C;AAAA,EAC3C,iBAA2C;AAAA,EAEnD,QAAuB;AACrB,QAAI,KAAK,QAAS,QAAO,QAAQ,QAAQ;AAIzC,UAAM,kBAAkB,cAAc,YAAY,GAAG;AAErD,UAAM,cAAc,GAAGA,MAAK,GAAG,OAAOA,MAAK,GAAG;AAC9C,UAAM,eAAe,gBAAgB,SAAS,WAAW;AAEzD,QAAI;AACJ,QAAI;AAEJ,QAAI,cAAc;AAEhB,YAAM,YAAY,gBAAgB,QAAQ,WAAW;AACrD,YAAM,aAAa,gBAAgB,UAAU,GAAG,SAAS;AACzD,yBAAmBA,MAAK,KAAK,YAAY,UAAU,eAAe;AAGlE,YAAM,aAAa,kBAAkB,UAAU;AAC/C,mBAAaC,YAAW,UAAU,IAAI,aAAa,oBAAoB;AAAA,IACzE,OAAO;AAEL,YAAM,SAASD,MAAK,QAAQA,MAAK,QAAQ,eAAe,CAAC;AACzD,YAAM,cAAcA,MAAK,QAAQ,MAAM;AACvC,yBAAmBA,MAAK,KAAK,aAAa,UAAU,eAAe;AAGnE,mBAAa,oBAAoB;AAAA,IACnC;AAEA,IAAAD,QAAO;AAAA,MACL,EAAE,kBAAkB,YAAY,iBAAiB,aAAa;AAAA,MAC9D;AAAA,IACF;AAEA,SAAK,UAAUG,OAAM,YAAY,CAAC,gBAAgB,GAAG;AAAA,MACnD,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,IAChC,CAAC;AAGD,SAAK,QAAQ,GAAG,SAAS,CAACC,SAAQ;AAChC,MAAAJ,QAAO,MAAM,EAAE,OAAOI,KAAI,SAAS,OAAOA,KAAI,MAAM,GAAG,6BAA6B;AACpF,WAAK,iBAAiB,IAAI,MAAM,kBAAkBA,KAAI,OAAO,EAAE,CAAC;AAAA,IAClE,CAAC;AAGD,SAAK,QAAQ,GAAG,QAAQ,CAAC,MAAM,WAAW;AACxC,UAAI,SAAS,KAAK,SAAS,MAAM;AAC/B,QAAAJ,QAAO,MAAM,EAAE,KAAK,GAAG,iDAAiD;AACxE,aAAK,iBAAiB,IAAI,MAAM,4BAA4B,OAAO,IAAI,CAAC,EAAE,CAAC;AAAA,MAC7E,WAAW,UAAU,CAAC,KAAK,uBAAuB;AAEhD,QAAAA,QAAO,MAAM,EAAE,OAAO,GAAG,0CAA0C;AACnE,aAAK,iBAAiB,IAAI,MAAM,8BAA8B,MAAM,EAAE,CAAC;AAAA,MACzE;AACA,WAAK,UAAU;AACf,WAAK,wBAAwB;AAAA,IAC/B,CAAC;AAGD,QAAI,KAAK,QAAQ,QAAQ;AACvB,WAAK,iBAAiB,gBAAgB,EAAE,OAAO,KAAK,QAAQ,OAAO,CAAC;AACpE,WAAK,eAAe,GAAG,QAAQ,CAAC,SAAS;AACvC,QAAAA,QAAO,KAAK,EAAE,QAAQ,KAAK,GAAG,6BAA6B;AAAA,MAC7D,CAAC;AAAA,IACH;AAEA,QAAI,KAAK,QAAQ,WAAW,MAAM;AAChC,WAAK,QAAQ,KAAK;AAClB,WAAK,UAAU;AACf,aAAO,QAAQ,OAAO,IAAI,MAAM,sCAAsC,CAAC;AAAA,IACzE;AACA,SAAK,iBAAiB,gBAAgB,EAAE,OAAO,KAAK,QAAQ,OAAO,CAAC;AACpE,SAAK,eAAe,GAAG,QAAQ,CAAC,SAAS;AAEvC,UAAI,CAAC,KAAK,KAAK,EAAE,WAAW,GAAG,GAAG;AAChC;AAAA,MACF;AAEA,UAAI;AAEF,cAAM,WAAW,KAAK,MAAM,IAAI;AAKhC,cAAM,UAAU,KAAK,QAAQ,IAAI,SAAS,EAAE;AAC5C,YAAI,YAAY,QAAW;AACzB,cAAI,SAAS,UAAU,QAAW;AAChC,yBAAa,QAAQ,OAAO;AAC5B,iBAAK,QAAQ,OAAO,SAAS,EAAE;AAC/B,oBAAQ,OAAO,IAAI,MAAM,SAAS,MAAM,OAAO,CAAC;AAAA,UAClD,WAAW,SAAS,WAAW,QAAW;AACxC,yBAAa,QAAQ,OAAO;AAC5B,iBAAK,QAAQ,OAAO,SAAS,EAAE;AAG/B,gBAAI;AACF,oBAAM,YAAY,0BAA0B,SAAS,MAAM;AAC3D,sBAAQ,QAAQ,SAAS;AAAA,YAC3B,SAAS,OAAgB;AAEvB,kBAAI,iBAAiB,UAAU;AAC7B,gBAAAA,QAAO;AAAA,kBACL;AAAA,oBACE,QAAQ,MAAM;AAAA,oBACd,UAAU,KAAK,UAAU,SAAS,MAAM;AAAA,kBAC1C;AAAA,kBACA;AAAA,gBACF;AACA,wBAAQ;AAAA,kBACN,IAAI,MAAM,+CAA+C,MAAM,OAAO,EAAE;AAAA,gBAC1E;AAAA,cACF,OAAO;AACL,sBAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,gBAAAA,QAAO,MAAM,EAAE,OAAO,aAAa,GAAG,2BAA2B;AACjE,wBAAQ,OAAO,IAAI,MAAM,8BAA8B,YAAY,EAAE,CAAC;AAAA,cACxE;AAAA,YACF;AAAA,UACF;AAAA,QAEF;AAAA,MACF,SAASI,MAAK;AACZ,QAAAJ,QAAO;AAAA,UACL;AAAA,YACE,OAAOI,gBAAe,QAAQA,KAAI,UAAU,OAAOA,IAAG;AAAA,YACtD;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,MAAM,YACJ,MACA,UACA,YAAoB,KACQ;AAC5B,QAAI,CAAC,KAAK,QAAS,OAAM,KAAK,MAAM;AAEpC,UAAM,KAAKC,YAAW;AACtB,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT;AAAA,MACA,QAAQ;AAAA,MACR,QAAQ,EAAE,MAAM,SAAS;AAAA,IAC3B;AAEA,WAAO,IAAI,QAAQ,CAACC,UAAS,WAAW;AACtC,YAAM,UAAU,WAAW,MAAM;AAC/B,cAAM,UAAU,KAAK,QAAQ,IAAI,EAAE;AACnC,YAAI,SAAS;AACX,eAAK,QAAQ,OAAO,EAAE;AACtB;AAAA,YACE,IAAI,MAAM,gCAAgC,OAAO,SAAS,CAAC,gBAAgB,QAAQ,EAAE;AAAA,UACvF;AAAA,QACF;AAAA,MACF,GAAG,SAAS;AAEZ,WAAK,QAAQ,IAAI,IAAI;AAAA,QACnB,SAAAA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AACD,UAAI,CAAC,KAAK,SAAS,OAAO;AACxB,eAAO,IAAI,MAAM,qCAAqC,CAAC;AACvD;AAAA,MACF;AACA,WAAK,QAAQ,MAAM,MAAM,GAAG,KAAK,UAAU,OAAO,CAAC;AAAA,CAAI;AAAA,IACzD,CAAC;AAAA,EACH;AAAA,EAEA,OAAsB;AACpB,QAAI,CAAC,KAAK,SAAS;AACjB,aAAO,QAAQ,QAAQ;AAAA,IACzB;AAEA,WAAO,IAAI,QAAQ,CAACA,aAAY;AAC9B,WAAK,wBAAwB;AAC7B,WAAK,iBAAiB,IAAI,MAAM,uBAAuB,CAAC;AAGxD,UAAI,KAAK,gBAAgB;AACvB,aAAK,eAAe,MAAM;AAC1B,aAAK,iBAAiB;AAAA,MACxB;AACA,UAAI,KAAK,gBAAgB;AACvB,aAAK,eAAe,MAAM;AAC1B,aAAK,iBAAiB;AAAA,MACxB;AAGA,YAAM,OAAO,KAAK;AAClB,UAAI,SAAS,MAAM;AACjB,QAAAA,SAAQ;AACR;AAAA,MACF;AAGA,YAAM,SAAS,MAAY;AACzB,QAAAA,SAAQ;AAAA,MACV;AACA,WAAK,KAAK,QAAQ,MAAM;AAGxB,WAAK,KAAK;AAGV,iBAAW,MAAM;AACf,aAAK,eAAe,QAAQ,MAAM;AAClC,YAAI,KAAK,YAAY,MAAM;AACzB,eAAK,KAAK,SAAS;AACnB,eAAK,UAAU;AAAA,QACjB;AACA,QAAAA,SAAQ;AAAA,MACV,GAAG,GAAI;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEQ,iBAAiB,OAAoB;AAC3C,eAAW,WAAW,KAAK,QAAQ,OAAO,GAAG;AAC3C,mBAAa,QAAQ,OAAO;AAC5B,cAAQ,OAAO,KAAK;AAAA,IACtB;AACA,SAAK,QAAQ,MAAM;AAAA,EACrB;AACF;;;AEvRA,SAAS,WAAAC,gBAAe;AACxB,SAAS,QAAAC,cAAY;AACrB,SAAS,UAAU,WAA2C;AAI9D,IAAI,WAAWA,OAAKD,SAAQ,GAAG,UAAU,0BAA0B;AAE5D,IAAM,kBAAN,MAAsB;AAAA,EACnB,YAA8C;AAAA;AAAA,EAE9C,cAA6B;AAAA,EACpB;AAAA,EACA;AAAA,EAEjB,YAAY,YAAY,2BAA2B,YAAY,IAAI;AACjE,SAAK,YAAY;AACjB,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,KAAK,cAAc,KAAM;AAG7B,SAAK,YAAY,MAAM,SAAS,sBAAsB,KAAK,WAAW;AAAA,MACpE,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,MAAM,MAAiC;AAC3C,QAAI,KAAK,cAAc,MAAM;AAC3B,YAAM,KAAK,WAAW;AAAA,IACxB;AACA,QAAI,KAAK,cAAc,MAAM;AAC3B,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AACA,UAAM,SAAS,MAAM,KAAK,UAAU,MAAM;AAAA,MACxC,SAAS;AAAA,MACT,WAAW;AAAA,IACb,CAAC;AACD,UAAM,SAAS,MAAM,KAAK,OAAO,IAAI;AAErC,SAAK,gBAAgB,OAAO;AAC5B,WAAO,OAAO,IAAI,CAAC,MAAM,OAAO,CAAC,CAAC;AAAA,EACpC;AAAA,EAEA,MAAM,WAAW,OAAsC;AACrD,UAAM,UAAsB,CAAC;AAE7B,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAG/C,YAAM,eAAe,MAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,SAAS,KAAK,MAAM,IAAI,CAAC,CAAC;AAE5E,cAAQ,KAAK,GAAG,YAAY;AAG5B,UAAI,IAAI,KAAK,YAAY,MAAM,QAAQ;AACrC,cAAM,IAAI,QAAQ,CAACE,aAAY,WAAWA,UAAS,GAAG,CAAC;AAAA,MACzD;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,gBAAwB;AACtB,QAAI,KAAK,gBAAgB,MAAM;AAC7B,YAAM,IAAI,MAAM,iDAAiD;AAAA,IACnE;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,mBAAoC;AACxC,QAAI,KAAK,gBAAgB,MAAM;AAE7B,YAAM,KAAK,MAAM,EAAE;AAAA,IACrB;AACA,QAAI,KAAK,gBAAgB,MAAM;AAC7B,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAyB;AAC7B,QAAI,KAAK,cAAc,MAAM;AAC3B,YAAM,KAAK,UAAU,QAAQ;AAC7B,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AACF;;;ACtGA,YAAY,aAAa;;;ACAzB,SAAS,KAAAC,UAAS;AAOX,IAAM,qBAAqBA,GAAE,KAAK,CAAC,QAAQ,SAAS,KAAK,CAAC;AAE1D,IAAM,yBAAyBA,GACnC,OAAO;AAAA,EACN,MAAMA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,KAAKA,GAAE,OAAO,EAAE,SAAS;AAAA,EACzB,MAAM;AAAA,EACN,SAASA,GAAE,OAAO;AAAA,EAClB,WAAWA,GAAE,OAAO;AAAA;AAAA,EACpB,UAAUA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,YAAYA,GAAE,OAAO,EAAE,SAAS;AAAA,EAChC,aAAaA,GAAE,OAAO,EAAE,SAAS;AACnC,CAAC,EACA,MAAM;;;ADEF,IAAM,aAAN,MAAiB;AAAA,EACd,aAAgC;AAAA,EACvB,SAA6B,oBAAI,IAAI;AAAA,EACrC;AAAA;AAAA,EAET,cAA6B;AAAA,EAErC,YAAY,SAAiB;AAC3B,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAc,YAA0B;AACtC,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,MAAM,WAAW,SAAiC;AAChD,QAAI,KAAK,gBAAgB,MAAM;AAC7B,YAAM,IAAI,MAAM,+DAA+D;AAAA,IACjF;AAEA,SAAK,eAAe,MAAc,gBAAQ,KAAK,OAAO;AAEtD,UAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,UAAM,aAAa,MAAM,KAAK,WAAW,WAAW;AAEpD,QAAI,CAAC,WAAW,SAAS,SAAS,GAAG;AAEnC,YAAM,QAAQ,MAAM,KAAK,WAAW,YAAY,WAAW;AAAA,QACzD;AAAA,UACE,IAAI;AAAA,UACJ,SAAS;AAAA,UACT,QAAQ,IAAI,MAAM,KAAK,WAAW,EAAE,KAAK,CAAC;AAAA,UAC1C,UAAU;AAAA,QACZ;AAAA,MACF,CAAC;AAED,YAAM,MAAM,OAAO,iBAAiB;AACpC,WAAK,OAAO,IAAI,WAAW,KAAK;AAAA,IAClC,OAAO;AACL,YAAM,QAAQ,MAAM,KAAK,WAAW,UAAU,SAAS;AACvD,WAAK,OAAO,IAAI,WAAW,KAAK;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,MAAM,aAAa,SAAkB,WAAsC;AACzE,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,iBAAkC,UAAU,IAAI,CAAC,SAAS;AAAA,MAC9D,IAAI,IAAI;AAAA,MACR,SAAS,IAAI;AAAA,MACb,QAAQ,CAAC,GAAG,IAAI,MAAM;AAAA,MACtB,UAAU,KAAK,UAAU,IAAI,QAAQ;AAAA,IACvC,EAAE;AACF,UAAM,MAAM,IAAI,cAAc;AAAA,EAChC;AAAA,EAEA,MAAM,gBAAgB,SAAkB,aAA0C;AAChF,QAAI,YAAY,WAAW,GAAG;AAC5B;AAAA,IACF;AACA,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,SAAS,YAAY,IAAI,CAAC,OAAO,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI;AAC3D,UAAM,MAAM,OAAO,UAAU,MAAM,GAAG;AAAA,EACxC;AAAA,EAEA,MAAM,kBAAkB,SAAiC;AACvD,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,MAAM,OAAO,gBAAgB;AAAA,EACrC;AAAA,EAEA,MAAM,OACJ,SACA,QACA,OAGA,YAGA;AACA,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,QAAQ,MAAM,aAAa,MAAM,EAAE,MAAM,KAAK,EAAE,aAAa,QAAQ;AAG3E,UAAM,UAAW,MAAM,MAAM,QAAQ;AAIrC,WAAO,QAAQ,IAAI,CAAC,MAAM;AACxB,YAAM,WAAW,uBAAuB,MAAM,KAAK,MAAM,EAAE,QAAQ,CAAC;AACpE,aAAO;AAAA,QACL,IAAI,iBAAiB,EAAE,EAAE;AAAA,QACzB,SAAS,EAAE;AAAA,QACX,OAAO,IAAI,EAAE;AAAA;AAAA;AAAA,QAGb;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,eAAe,SAAiC;AACpD,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,MAAM,YAAY,WAAW;AAAA,MACjC,QAAgB,cAAM,IAAI;AAAA,IAC5B,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,eACJ,SACA,OACA,OAGA;AACA,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AAGzC,UAAM,UAAW,MAAM,MAAM,OAAO,OAAO,KAAK,EAAE,MAAM,KAAK,EAAE,QAAQ;AAOvE,WAAO,QAAQ,IAAI,CAAC,MAAM;AACxB,YAAM,WAAW,uBAAuB,MAAM,KAAK,MAAM,EAAE,QAAQ,CAAC;AACpE,aAAO;AAAA,QACL,IAAI,iBAAiB,EAAE,EAAE;AAAA,QACzB,SAAS,EAAE;AAAA,QACX,OAAO,EAAE;AAAA;AAAA;AAAA,QAGT;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,YAAY,SAAiC;AACjD,UAAM,YAAY,KAAK,aAAa,OAAO;AAE3C,SAAK,eAAe,MAAc,gBAAQ,KAAK,OAAO;AACtD,UAAM,aAAa,MAAM,KAAK,WAAW,WAAW;AACpD,QAAI,WAAW,SAAS,SAAS,GAAG;AAClC,YAAM,KAAK,WAAW,UAAU,SAAS;AACzC,WAAK,OAAO,OAAO,SAAS;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,SAAK,OAAO,MAAM;AAClB,QAAI,KAAK,eAAe,MAAM;AAC5B,WAAK,WAAW,MAAM;AACtB,WAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAA4B;AAC1B,SAAK,MAAM;AACX,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEQ,aAAa,SAA0B;AAC7C,WAAO,aAAa,OAAO;AAAA,EAC7B;AAAA,EAEA,MAAc,SAAS,SAAkC;AACvD,UAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,QAAI,QAAQ,KAAK,OAAO,IAAI,SAAS;AACrC,QAAI,UAAU,QAAW;AACvB,YAAM,KAAK,WAAW,OAAO;AAC7B,cAAQ,KAAK,OAAO,IAAI,SAAS;AAAA,IACnC;AACA,QAAI,UAAU,QAAW;AACvB,YAAM,IAAI,MAAM,8BAA8B,OAAO,EAAE;AAAA,IACzD;AACA,WAAO;AAAA,EACT;AACF;;;AElMA,IAAMC,UAAS,aAAa,UAAU;AAkC/B,IAAM,uBAAN,MAAuD;AAAA;AAAA,EAEnD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGQ;AAAA,EACA;AAAA;AAAA;AAAA,EAIT,YAAoC;AAAA,EACpC,cAAsC;AAAA,EACtC,aAAsC;AAAA,EACtC,UAAgC;AAAA,EAChC,SAA8B;AAAA,EAEtC,YACE,QACA,WACA,SACA,OACA,OACA,cACA;AACA,SAAK,SAAS;AACd,SAAK,YAAY;AACjB,SAAK,UAAU;AACf,SAAK,QAAQ;AACb,SAAK,QAAQ;AACb,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,aAA8B;AAChC,QAAI,KAAK,gBAAgB,MAAM;AAC7B,MAAAA,QAAO,MAAM,mCAAmC;AAChD,WAAK,cAAc,IAAI;AAAA,QACrB,KAAK,UAAU,UAAU;AAAA,QACzB,KAAK,UAAU,UAAU;AAAA,MAC3B;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAA8B;AAChC,QAAI,KAAK,eAAe,MAAM;AAC5B,MAAAA,QAAO,MAAM,oCAAoC;AACjD,WAAK,aAAa,IAAI,iBAAiB,KAAK,SAAS,KAAK,YAAY;AAAA,IACxE;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,SAAwB;AAC1B,QAAI,KAAK,YAAY,MAAM;AACzB,MAAAA,QAAO,MAAM,iCAAiC;AAC9C,WAAK,UAAU,IAAI;AAAA,QACjB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,UAAU;AAAA,MACjB;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,QAAsB;AACxB,QAAI,KAAK,WAAW,MAAM;AACxB,MAAAA,QAAO,MAAM,gCAAgC;AAC7C,WAAK,SAAS,IAAI,aAAa,KAAK,OAAO,KAAK,YAAY;AAAA,QAC1D,kBAAkB,KAAK;AAAA,QACvB,iBAAiB,KAAK;AAAA,QACtB,WAAW,KAAK,UAAU,SAAS;AAAA,QACnC,cAAc,KAAK,UAAU,SAAS;AAAA,QACtC,aAAa,KAAK,UAAU,SAAS;AAAA,QACrC,gBAAgB,KAAK,UAAU,SAAS;AAAA,MAC1C,CAAC;AAAA,IACH;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAA4B;AAC9B,QAAI,KAAK,cAAc,MAAM;AAC3B,MAAAA,QAAO,MAAM,mCAAmC;AAChD,WAAK,YAAY,IAAI,gBAAgB,KAAK,OAAO;AAAA,IACnD;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,gBAAyB;AAC3B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAqB;AACvB,WAAO,KAAK,YAAY;AAAA,EAC1B;AACF;AAUA,eAAsB,mBACpB,YACA,SACA,aAC+B;AAC/B,EAAAA,QAAO,KAAK,EAAE,YAAY,SAAS,YAAY,GAAG,4BAA4B;AAC9E,QAAM,YAAY,KAAK,IAAI;AAE3B,QAAM,SAAS,IAAI,cAAc,YAAY,SAAS,WAAW;AACjE,QAAM,YAAY,MAAM,OAAO,KAAK;AACpC,QAAM,kBAAkB,OAAO,eAAe;AAK9C,QAAM,eAAe,IAAI,aAAa;AACtC,QAAM,aAAa,MAAM;AAGzB,QAAM,QAAQ,IAAI,WAAW,eAAe;AAG5C,QAAM,sBAAsB,OAAO,mBAAmB;AACtD,QAAM,oBAAoB,IAAI,uBAAuB,mBAAmB;AACxE,QAAM,mBAAmB,IAAI,iBAAiB,mBAAmB;AACjE,QAAM,eAAoC;AAAA,IACxC;AAAA,IACA;AAAA,IACA,aAAa;AAAA,EACf;AAEA,QAAM,QAAQ,IAAI,aAAa,iBAAiB,YAAY;AAC5D,QAAM,MAAM,WAAW;AAEvB,QAAM,aAAa,KAAK,IAAI,IAAI;AAChC,EAAAA,QAAO;AAAA,IACL,EAAE,SAAS,iBAAiB,aAAa,qBAAqB,WAAW;AAAA,IACzE;AAAA,EACF;AAEA,SAAO,IAAI,qBAAqB,QAAQ,WAAW,iBAAiB,OAAO,OAAO,YAAY;AAChG;AAQA,eAAsB,eACpB,YACA,SACA,aAC2B;AAC3B,EAAAA,QAAO,KAAK,EAAE,YAAY,SAAS,YAAY,GAAG,uBAAuB;AAEzE,QAAM,SAAS,IAAI,cAAc,YAAY,SAAS,WAAW;AACjE,QAAM,YAAY,MAAM,OAAO,KAAK;AACpC,QAAM,kBAAkB,OAAO,eAAe;AAK9C,QAAM,eAAe,IAAI,aAAa;AACtC,QAAM,aAAa,MAAM;AAGzB,QAAM,QAAQ,IAAI,WAAW,eAAe;AAC5C,QAAM,aAAa,IAAI,gBAAgB,UAAU,UAAU,OAAO,UAAU,UAAU,SAAS;AAE/F,QAAM,WAAW,WAAW;AAG5B,QAAM,sBAAsB,OAAO,mBAAmB;AACtD,QAAM,oBAAoB,IAAI,uBAAuB,mBAAmB;AACxE,QAAM,mBAAmB,IAAI,iBAAiB,mBAAmB;AACjE,QAAM,eAAoC;AAAA,IACxC;AAAA,IACA;AAAA,IACA,aAAa;AAAA,EACf;AAEA,QAAM,QAAQ,IAAI,aAAa,iBAAiB,YAAY;AAC5D,QAAM,MAAM,WAAW;AAEvB,QAAM,YAAY,IAAI,iBAAiB,iBAAiB,YAAY;AACpE,QAAM,WAAW,IAAI,gBAAgB,eAAe;AACpD,QAAM,SAAS,IAAI,cAAc,OAAO,YAAY,WAAW,UAAU,MAAM;AAC/E,QAAM,QAAQ,IAAI,aAAa,OAAO,YAAY;AAAA,IAChD,kBAAkB;AAAA,IAClB,iBAAiB;AAAA,IACjB,WAAW,UAAU,SAAS;AAAA,IAC9B,cAAc,UAAU,SAAS;AAAA,IACjC,aAAa,UAAU,SAAS;AAAA,IAChC,gBAAgB,UAAU,SAAS;AAAA,EACrC,CAAC;AAED,EAAAA,QAAO;AAAA,IACL,EAAE,SAAS,iBAAiB,aAAa,oBAAoB;AAAA,IAC7D;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AASA,eAAsB,gBAAgB,UAA2C;AAC/E,EAAAA,QAAO,KAAK,wBAAwB;AACpC,QAAM,SAAkB,CAAC;AASzB,QAAM,kBAAkB,oBAAoB;AAC5C,QAAM,sBAAsB,CAAC,mBAAmB,SAAS;AAEzD,MAAI,qBAAqB;AACvB,aAAS,OAAO,QAAQ;AAAA,EAC1B,OAAO;AACL,IAAAA,QAAO,MAAM,2CAA2C;AAAA,EAC1D;AAGA,MAAI;AACF,UAAM,SAAS,aAAa,KAAK;AAAA,EACnC,SAAS,GAAG;AACV,UAAM,QAAQ,aAAa,QAAQ,IAAI,IAAI,MAAM,OAAO,CAAC,CAAC;AAC1D,IAAAA,QAAO,MAAM,EAAE,MAAM,GAAG,8BAA8B;AACtD,WAAO,KAAK,KAAK;AAAA,EACnB;AAGA,QAAM,0BAA0B,CAAC,mBAAmB,SAAS;AAE7D,MAAI,yBAAyB;AAC3B,QAAI;AACF,YAAM,SAAS,WAAW,QAAQ;AAAA,IACpC,SAAS,GAAG;AACV,YAAM,QAAQ,aAAa,QAAQ,IAAI,IAAI,MAAM,OAAO,CAAC,CAAC;AAC1D,MAAAA,QAAO,MAAM,EAAE,MAAM,GAAG,iCAAiC;AACzD,aAAO,KAAK,KAAK;AAAA,IACnB;AAAA,EACF,OAAO;AACL,IAAAA,QAAO,MAAM,gDAAgD;AAAA,EAC/D;AAGA,MAAI;AACF,UAAM,SAAS,MAAM,WAAW;AAAA,EAClC,SAAS,GAAG;AACV,UAAM,QAAQ,aAAa,QAAQ,IAAI,IAAI,MAAM,OAAO,CAAC,CAAC;AAC1D,IAAAA,QAAO,MAAM,EAAE,MAAM,GAAG,0BAA0B;AAClD,WAAO,KAAK,KAAK;AAAA,EACnB;AAEA,QAAM,eAAe;AAGrB,MAAI,OAAO,WAAW,KAAK,OAAO,CAAC,MAAM,QAAW;AAClD,UAAM,IAAI,MAAM,4BAA4B,OAAO,CAAC,EAAE,OAAO,IAAI,EAAE,OAAO,OAAO,CAAC,EAAE,CAAC;AAAA,EACvF,WAAW,OAAO,SAAS,GAAG;AAC5B,UAAM,IAAI,eAAe,QAAQ,yCAAyC;AAAA,EAC5E;AACF;","names":["existsSync","join","path","join","existsSync","resolve","mkdirSync","existsSync","join","join","existsSync","mkdirSync","writeFileSync","mkdirSync","dirname","writeFile","mkdir","join","dirname","path","path","path","path","path","mkdir","dirname","writeFile","join","readFile","join","path","join","readFile","readFile","writeFile","access","join","fileExists","path","createHash","readFile","join","createHash","readFile","path","createDocumentId","path","readFile","createHash","join","readFile","access","mkdir","join","z","join","mkdir","readFile","path","access","firstLine","fullContent","signature","logger","timeMs","t","path","readFile","access","resolve","isAbsolute","join","z","fileExists","path","access","join","readFile","isAbsolute","resolve","randomUUID","readFile","mkdir","stat","access","join","resolve","mkdir","mkdir","resolve","fileExists","path","access","mkdir","randomUUID","resolve","stat","join","readFile","spawn","randomUUID","existsSync","path","z","logger","path","existsSync","spawn","err","randomUUID","resolve","homedir","join","resolve","z","logger"]}
1
+ {"version":3,"sources":["../src/analysis/adapter-registry.ts","../src/logging/logger.ts","../src/services/project-root.service.ts","../src/logging/payload.ts","../src/services/job.service.ts","../src/types/job.ts","../src/types/result.ts","../src/utils/atomic-write.ts","../src/services/code-graph.service.ts","../src/analysis/ast-parser.ts","../src/analysis/code-graph.ts","../src/analysis/tree-sitter-parser.ts","../src/analysis/go-ast-parser.ts","../src/analysis/parser-factory.ts","../src/analysis/python-ast-parser.ts","../src/analysis/rust-ast-parser.ts","../src/services/config.service.ts","../src/types/config.ts","../src/utils/deep-merge.ts","../src/services/gitignore.service.ts","../src/services/index.service.ts","../src/services/chunking.service.ts","../src/services/drift.service.ts","../src/services/manifest.service.ts","../src/types/manifest.ts","../src/services/code-unit.service.ts","../src/services/search.service.ts","../src/services/store-definition.service.ts","../src/types/store-definition.ts","../src/services/store.service.ts","../src/plugin/git-clone.ts","../src/crawl/bridge.ts","../src/crawl/schemas.ts","../src/db/embeddings.ts","../src/db/lance.ts","../src/types/document.ts","../src/services/index.ts"],"sourcesContent":["/**\n * Adapter Registry\n *\n * Singleton registry for language adapters. Provides lookup by extension\n * or language ID.\n *\n * @example\n * ```typescript\n * // Register an adapter\n * const registry = AdapterRegistry.getInstance();\n * registry.register(zilAdapter);\n *\n * // Look up by extension\n * const adapter = registry.getByExtension('.zil');\n * if (adapter) {\n * const nodes = adapter.parse(content, filePath);\n * }\n * ```\n */\n\nimport type { LanguageAdapter } from './language-adapter.js';\n\nexport class AdapterRegistry {\n private static instance: AdapterRegistry | undefined;\n\n /** Map from languageId to adapter */\n private readonly adaptersByLanguageId = new Map<string, LanguageAdapter>();\n\n /** Map from extension to adapter */\n private readonly adaptersByExtension = new Map<string, LanguageAdapter>();\n\n private constructor() {\n // Private constructor for singleton\n }\n\n /**\n * Get the singleton instance of the registry.\n */\n static getInstance(): AdapterRegistry {\n AdapterRegistry.instance ??= new AdapterRegistry();\n return AdapterRegistry.instance;\n }\n\n /**\n * Reset the singleton instance (for testing).\n */\n static resetInstance(): void {\n AdapterRegistry.instance = undefined;\n }\n\n /**\n * Register a language adapter.\n *\n * @param adapter - The adapter to register\n * @throws If a different adapter with the same extension is already registered\n */\n register(adapter: LanguageAdapter): void {\n // Skip if already registered with same languageId (idempotent)\n if (this.adaptersByLanguageId.has(adapter.languageId)) {\n return;\n }\n\n // Check for extension conflicts with other adapters\n for (const ext of adapter.extensions) {\n const normalizedExt = this.normalizeExtension(ext);\n const existingAdapter = this.adaptersByExtension.get(normalizedExt);\n if (existingAdapter !== undefined) {\n throw new Error(\n `Extension \"${normalizedExt}\" is already registered by adapter \"${existingAdapter.languageId}\"`\n );\n }\n }\n\n // Register by languageId\n this.adaptersByLanguageId.set(adapter.languageId, adapter);\n\n // Register by each extension\n for (const ext of adapter.extensions) {\n const normalizedExt = this.normalizeExtension(ext);\n this.adaptersByExtension.set(normalizedExt, adapter);\n }\n }\n\n /**\n * Unregister a language adapter by its language ID.\n *\n * @param languageId - The language ID to unregister\n * @returns true if the adapter was found and removed, false otherwise\n */\n unregister(languageId: string): boolean {\n const adapter = this.adaptersByLanguageId.get(languageId);\n if (adapter === undefined) {\n return false;\n }\n\n // Remove from languageId map\n this.adaptersByLanguageId.delete(languageId);\n\n // Remove from extension map\n for (const ext of adapter.extensions) {\n const normalizedExt = this.normalizeExtension(ext);\n this.adaptersByExtension.delete(normalizedExt);\n }\n\n return true;\n }\n\n /**\n * Get an adapter by file extension.\n *\n * @param ext - File extension (with or without leading dot)\n * @returns The adapter if found, undefined otherwise\n */\n getByExtension(ext: string): LanguageAdapter | undefined {\n const normalizedExt = this.normalizeExtension(ext);\n return this.adaptersByExtension.get(normalizedExt);\n }\n\n /**\n * Get an adapter by language ID.\n *\n * @param languageId - The unique language identifier\n * @returns The adapter if found, undefined otherwise\n */\n getByLanguageId(languageId: string): LanguageAdapter | undefined {\n return this.adaptersByLanguageId.get(languageId);\n }\n\n /**\n * Get all registered adapters.\n *\n * @returns Array of all registered adapters\n */\n getAllAdapters(): LanguageAdapter[] {\n return Array.from(this.adaptersByLanguageId.values());\n }\n\n /**\n * Check if an extension is registered.\n *\n * @param ext - File extension (with or without leading dot)\n * @returns true if the extension is registered\n */\n hasExtension(ext: string): boolean {\n const normalizedExt = this.normalizeExtension(ext);\n return this.adaptersByExtension.has(normalizedExt);\n }\n\n /**\n * Normalize extension to always have a leading dot.\n */\n private normalizeExtension(ext: string): string {\n return ext.startsWith('.') ? ext : `.${ext}`;\n }\n}\n","/**\n * Core logger factory using pino with file-based rolling logs\n *\n * Features:\n * - File-only output (no console pollution for Claude Code)\n * - Size-based rotation (10MB, keeps 5 files)\n * - LOG_LEVEL env var control (trace/debug/info/warn/error/fatal)\n * - Child loggers per module for context\n */\n\nimport { mkdirSync, existsSync } from 'node:fs';\nimport { join } from 'node:path';\nimport pino, { type Logger, type LoggerOptions } from 'pino';\nimport { ProjectRootService } from '../services/project-root.service.js';\n\n/** Valid log levels */\nexport type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'fatal';\n\nconst VALID_LEVELS: readonly LogLevel[] = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'];\nconst VALID_LEVELS_SET: ReadonlySet<string> = new Set(VALID_LEVELS);\n\n/** Default log directory under project root (per-repo) */\nfunction getLogDir(): string {\n const projectRoot = ProjectRootService.resolve();\n return join(projectRoot, '.bluera', 'bluera-knowledge', 'logs');\n}\n\n/** Resolve and create log directory - fails fast if cannot create */\nfunction ensureLogDir(): string {\n const logDir = getLogDir();\n if (!existsSync(logDir)) {\n mkdirSync(logDir, { recursive: true });\n }\n return logDir;\n}\n\n/** Check if a string is a valid log level */\nfunction isValidLogLevel(level: string): level is LogLevel {\n return VALID_LEVELS_SET.has(level);\n}\n\n/** Get log level from environment - fails fast on invalid value */\nfunction getLogLevel(): LogLevel {\n const level = process.env['LOG_LEVEL']?.toLowerCase();\n\n if (level === undefined || level === '') {\n return 'info';\n }\n\n if (!isValidLogLevel(level)) {\n throw new Error(`Invalid LOG_LEVEL: \"${level}\". Valid values: ${VALID_LEVELS.join(', ')}`);\n }\n\n return level;\n}\n\n/** Root logger instance - lazily initialized */\nlet rootLogger: Logger | null = null;\n\n/** Initialize the root logger with pino-roll transport */\nfunction initializeLogger(): Logger {\n if (rootLogger !== null) {\n return rootLogger;\n }\n\n const logDir = ensureLogDir();\n const logFile = join(logDir, 'app.log');\n const level = getLogLevel();\n\n const options: LoggerOptions = {\n level,\n timestamp: pino.stdTimeFunctions.isoTime,\n formatters: {\n level: (label) => ({ level: label }),\n },\n transport: {\n target: 'pino-roll',\n options: {\n file: logFile,\n size: '10m', // 10MB rotation\n limit: { count: 5 }, // Keep 5 rotated files\n mkdir: true,\n },\n },\n };\n\n rootLogger = pino(options);\n return rootLogger;\n}\n\n/**\n * Create a named child logger for a specific module\n *\n * @param module - Module name (e.g., 'crawler', 'mcp-server', 'search-service')\n * @returns Logger instance with module context\n *\n * @example\n * const logger = createLogger('crawler');\n * logger.info({ url }, 'Fetching page');\n */\nexport function createLogger(module: string): Logger {\n const root = initializeLogger();\n return root.child({ module });\n}\n\n/**\n * Get the current log level\n */\nexport function getCurrentLogLevel(): LogLevel {\n return getLogLevel();\n}\n\n/**\n * Check if a specific log level is enabled\n */\nexport function isLevelEnabled(level: LogLevel): boolean {\n const currentLevel = getLogLevel();\n const currentIndex = VALID_LEVELS.indexOf(currentLevel);\n const checkIndex = VALID_LEVELS.indexOf(level);\n return checkIndex >= currentIndex;\n}\n\n/**\n * Get the log directory path\n */\nexport function getLogDirectory(): string {\n return getLogDir();\n}\n\n/**\n * Flush and shutdown the logger - call before process exit\n */\nexport function shutdownLogger(): Promise<void> {\n return new Promise((resolve) => {\n if (rootLogger !== null) {\n rootLogger.flush();\n // Give time for async transport to flush\n setTimeout(() => {\n rootLogger = null;\n resolve();\n }, 100);\n } else {\n resolve();\n }\n });\n}\n","import { existsSync, statSync, realpathSync } from 'node:fs';\nimport { dirname, join, normalize, sep } from 'node:path';\n\nexport interface ProjectRootOptions {\n readonly projectRoot?: string | undefined;\n}\n\n/**\n * Service for resolving the project root directory using a hierarchical detection strategy.\n *\n * Resolution hierarchy:\n * 1. Explicit projectRoot option (highest priority)\n * 2. PROJECT_ROOT environment variable (set by plugin commands)\n * 3. Git root detection (walk up to find .git directory)\n * 4. PWD environment variable (set by MCP server and shells)\n * 5. process.cwd() (fallback)\n */\n// eslint-disable-next-line @typescript-eslint/no-extraneous-class\nexport class ProjectRootService {\n /**\n * Resolve project root directory using hierarchical detection.\n */\n static resolve(options?: ProjectRootOptions): string {\n // 1. Check explicit option first\n if (options?.projectRoot !== undefined && options.projectRoot !== '') {\n return this.normalize(options.projectRoot);\n }\n\n // 2. Check PROJECT_ROOT environment variable (plugin commands)\n const projectRootEnv = process.env['PROJECT_ROOT'];\n if (projectRootEnv !== undefined && projectRootEnv !== '') {\n return this.normalize(projectRootEnv);\n }\n\n // 3. Try git root detection (most reliable for repos)\n const gitRoot = this.findGitRoot(process.cwd());\n if (gitRoot !== null) {\n return gitRoot;\n }\n\n // 4. Check PWD environment variable (MCP server, shells) - only if not in a git repo\n const pwdEnv = process.env['PWD'];\n if (pwdEnv !== undefined && pwdEnv !== '') {\n return this.normalize(pwdEnv);\n }\n\n // 5. Fallback to process.cwd()\n return process.cwd();\n }\n\n /**\n * Find git repository root by walking up the directory tree looking for .git\n */\n static findGitRoot(startPath: string): string | null {\n let currentPath = normalize(startPath);\n const root = normalize(sep); // Root filesystem (/ on Unix, C:\\ on Windows)\n\n // Walk up directory tree\n while (currentPath !== root) {\n const gitPath = join(currentPath, '.git');\n\n if (existsSync(gitPath)) {\n try {\n const stats = statSync(gitPath);\n // .git can be a directory (normal repo) or file (submodule/worktree)\n if (stats.isDirectory() || stats.isFile()) {\n return currentPath;\n }\n } catch {\n // Ignore stat errors, continue searching\n }\n }\n\n // Move up one directory\n const parentPath = dirname(currentPath);\n if (parentPath === currentPath) {\n // Reached root without finding .git\n break;\n }\n currentPath = parentPath;\n }\n\n return null;\n }\n\n /**\n * Normalize path by resolving symlinks and normalizing separators\n */\n static normalize(path: string): string {\n try {\n // Resolve symlinks to real path\n const realPath = realpathSync(path);\n // Normalize separators\n return normalize(realPath);\n } catch {\n // If realpath fails (path doesn't exist), just normalize\n return normalize(path);\n }\n }\n\n /**\n * Validate that a path exists and is a directory\n */\n static validate(path: string): boolean {\n try {\n const stats = statSync(path);\n return stats.isDirectory();\n } catch {\n return false;\n }\n }\n}\n","/**\n * Large payload handling utilities for logging\n *\n * Handles large content (raw HTML, MCP responses) by:\n * - Truncating to preview in log entries\n * - Optionally dumping full content to separate files at trace level\n */\n\nimport { createHash } from 'node:crypto';\nimport { writeFileSync, mkdirSync, existsSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getLogDirectory, isLevelEnabled } from './logger.js';\n\n/** Maximum characters for log preview */\nconst MAX_PREVIEW_LENGTH = 500;\n\n/** Minimum size to trigger payload dump (10KB) */\nconst PAYLOAD_DUMP_THRESHOLD = 10_000;\n\n/** Summary of a large payload for logging */\nexport interface PayloadSummary {\n /** Truncated preview of content */\n preview: string;\n /** Size in bytes */\n sizeBytes: number;\n /** Short hash for identification */\n hash: string;\n /** Filename if full content was dumped (trace level only) */\n payloadFile?: string;\n}\n\n/** Get the payload dump directory */\nfunction getPayloadDir(): string {\n const dir = join(getLogDirectory(), 'payload');\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n return dir;\n}\n\n/** Generate a safe filename from an identifier */\nfunction safeFilename(identifier: string): string {\n return identifier.replace(/[^a-zA-Z0-9-]/g, '_').substring(0, 50);\n}\n\n/**\n * Summarize a large payload for logging\n *\n * Creates a summary with:\n * - Truncated preview (first 500 chars)\n * - Size in bytes\n * - Short MD5 hash for identification\n * - Optional full dump to file at trace level\n *\n * @param content - The full content to summarize\n * @param type - Type identifier (e.g., 'raw-html', 'mcp-response')\n * @param identifier - Unique identifier (e.g., URL, query)\n * @param dumpFull - Whether to dump full content to file (default: trace level check)\n * @returns PayloadSummary for inclusion in log entry\n *\n * @example\n * logger.info({\n * url,\n * ...summarizePayload(html, 'raw-html', url),\n * }, 'Fetched HTML');\n */\nexport function summarizePayload(\n content: string,\n type: string,\n identifier: string,\n dumpFull: boolean = isLevelEnabled('trace')\n): PayloadSummary {\n const sizeBytes = Buffer.byteLength(content, 'utf8');\n const hash = createHash('md5').update(content).digest('hex').substring(0, 12);\n const preview = truncateForLog(content, MAX_PREVIEW_LENGTH);\n\n const baseSummary = { preview, sizeBytes, hash };\n\n // Dump full payload to file if enabled and above threshold\n if (dumpFull && sizeBytes > PAYLOAD_DUMP_THRESHOLD) {\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const safeId = safeFilename(identifier);\n const filename = `${timestamp}-${type}-${safeId}-${hash}.json`;\n const filepath = join(getPayloadDir(), filename);\n\n writeFileSync(\n filepath,\n JSON.stringify(\n {\n timestamp: new Date().toISOString(),\n type,\n identifier,\n sizeBytes,\n content,\n },\n null,\n 2\n )\n );\n\n return { ...baseSummary, payloadFile: filename };\n }\n\n return baseSummary;\n}\n\n/**\n * Truncate content for logging with ellipsis indicator\n *\n * @param content - Content to truncate\n * @param maxLength - Maximum length (default: 500)\n * @returns Truncated string with '... [truncated]' if needed\n */\nexport function truncateForLog(content: string, maxLength: number = MAX_PREVIEW_LENGTH): string {\n if (content.length <= maxLength) {\n return content;\n }\n return `${content.substring(0, maxLength)}... [truncated]`;\n}\n","import { randomUUID } from 'crypto';\nimport fs from 'fs';\nimport path from 'path';\nimport { JobSchema } from '../types/job.js';\nimport { Result, ok, err } from '../types/result.js';\nimport { atomicWriteFileSync } from '../utils/atomic-write.js';\nimport type { Job, CreateJobParams, UpdateJobParams, JobStatus } from '../types/job.js';\n\nexport class JobService {\n private readonly jobsDir: string;\n\n constructor(dataDir?: string) {\n // Default to ~/.local/share/bluera-knowledge/jobs\n let baseDir: string;\n if (dataDir !== undefined) {\n baseDir = dataDir;\n } else {\n const homeDir = process.env['HOME'] ?? process.env['USERPROFILE'];\n if (homeDir === undefined) {\n throw new Error('HOME or USERPROFILE environment variable is required');\n }\n baseDir = path.join(homeDir, '.local/share/bluera-knowledge');\n }\n this.jobsDir = path.join(baseDir, 'jobs');\n\n // Ensure jobs directory exists\n if (!fs.existsSync(this.jobsDir)) {\n fs.mkdirSync(this.jobsDir, { recursive: true });\n }\n }\n\n /**\n * Create a new job\n */\n createJob(params: CreateJobParams): Job {\n const job: Job = {\n id: `job_${randomUUID().replace(/-/g, '').substring(0, 12)}`,\n type: params.type,\n status: 'pending',\n progress: 0,\n message: params.message ?? `${params.type} job created`,\n details: params.details,\n createdAt: new Date().toISOString(),\n updatedAt: new Date().toISOString(),\n };\n\n // Write job to file\n this.writeJob(job);\n\n return job;\n }\n\n /**\n * Update an existing job\n */\n updateJob(jobId: string, updates: UpdateJobParams): void {\n const job = this.getJob(jobId);\n\n if (!job) {\n throw new Error(`Job ${jobId} not found`);\n }\n\n // Merge updates\n if (updates.status !== undefined) {\n job.status = updates.status;\n }\n if (updates.progress !== undefined) {\n job.progress = updates.progress;\n }\n if (updates.message !== undefined) {\n job.message = updates.message;\n }\n if (updates.details !== undefined) {\n job.details = { ...job.details, ...updates.details };\n }\n\n job.updatedAt = new Date().toISOString();\n\n // Write updated job\n this.writeJob(job);\n }\n\n /**\n * Get a job by ID\n */\n getJob(jobId: string): Job | null {\n const jobFile = path.join(this.jobsDir, `${jobId}.json`);\n\n if (!fs.existsSync(jobFile)) {\n return null;\n }\n\n try {\n const content = fs.readFileSync(jobFile, 'utf-8');\n return JobSchema.parse(JSON.parse(content));\n } catch (error) {\n throw new Error(\n `Failed to read job ${jobId}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * List all jobs with optional status filter\n */\n listJobs(statusFilter?: JobStatus | JobStatus[]): Job[] {\n if (!fs.existsSync(this.jobsDir)) {\n return [];\n }\n\n const files = fs.readdirSync(this.jobsDir);\n const jobs: Job[] = [];\n\n for (const file of files) {\n if (!file.endsWith('.json') || file.endsWith('.pid')) {\n continue;\n }\n\n try {\n const content = fs.readFileSync(path.join(this.jobsDir, file), 'utf-8');\n const job = JobSchema.parse(JSON.parse(content));\n\n if (statusFilter !== undefined) {\n const filters = Array.isArray(statusFilter) ? statusFilter : [statusFilter];\n if (filters.includes(job.status)) {\n jobs.push(job);\n }\n } else {\n jobs.push(job);\n }\n } catch (error) {\n throw new Error(\n `Failed to read job file ${file}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n // Sort by updated time (most recent first)\n jobs.sort((a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime());\n\n return jobs;\n }\n\n /**\n * List active jobs (pending or running)\n */\n listActiveJobs(): Job[] {\n return this.listJobs(['pending', 'running']);\n }\n\n /**\n * Cancel a job\n */\n cancelJob(jobId: string): Result<void> {\n const job = this.getJob(jobId);\n\n if (!job) {\n return err(new Error(`Job ${jobId} not found`));\n }\n\n if (job.status === 'completed' || job.status === 'failed') {\n return err(new Error(`Cannot cancel ${job.status} job`));\n }\n\n if (job.status === 'cancelled') {\n return ok(undefined);\n }\n\n // Update job status\n this.updateJob(jobId, {\n status: 'cancelled',\n message: 'Job cancelled by user',\n details: { cancelledAt: new Date().toISOString() },\n });\n\n // Kill worker process if it exists\n const pidFile = path.join(this.jobsDir, `${jobId}.pid`);\n if (fs.existsSync(pidFile)) {\n try {\n const pid = parseInt(fs.readFileSync(pidFile, 'utf-8'), 10);\n // Validate PID: must be positive integer > 0\n // PID 0 = sends to process group (DANGEROUS - kills terminal!)\n // Negative PIDs have special meanings in kill()\n if (!Number.isNaN(pid) && Number.isInteger(pid) && pid > 0) {\n process.kill(pid, 'SIGTERM');\n }\n } catch {\n // Process may have already exited, ignore\n }\n // Always delete the PID file, even if kill failed\n try {\n fs.unlinkSync(pidFile);\n } catch {\n // Ignore if file already deleted\n }\n }\n\n return ok(undefined);\n }\n\n /**\n * Clean up old completed/failed/cancelled jobs\n */\n cleanupOldJobs(olderThanHours: number = 24): number {\n const jobs = this.listJobs();\n const cutoffTime = Date.now() - olderThanHours * 60 * 60 * 1000;\n let cleaned = 0;\n\n for (const job of jobs) {\n if (\n (job.status === 'completed' || job.status === 'failed' || job.status === 'cancelled') &&\n new Date(job.updatedAt).getTime() < cutoffTime\n ) {\n const jobFile = path.join(this.jobsDir, `${job.id}.json`);\n try {\n fs.unlinkSync(jobFile);\n cleaned++;\n } catch (error) {\n throw new Error(\n `Failed to delete job file ${job.id}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n }\n\n return cleaned;\n }\n\n /**\n * Clean up stale pending jobs that never started or got stuck\n *\n * @param olderThanHours - Consider pending jobs stale after this many hours (default 2)\n * @param options - Options for cleanup behavior\n * @param options.markAsFailed - If true, mark jobs as failed instead of deleting\n * @returns Number of jobs cleaned up or marked as failed\n */\n cleanupStalePendingJobs(\n olderThanHours: number = 2,\n options: { markAsFailed?: boolean } = {}\n ): number {\n const jobs = this.listJobs();\n const cutoffTime = Date.now() - olderThanHours * 60 * 60 * 1000;\n let cleaned = 0;\n\n for (const job of jobs) {\n if (job.status === 'pending' && new Date(job.updatedAt).getTime() < cutoffTime) {\n const jobFile = path.join(this.jobsDir, `${job.id}.json`);\n\n if (options.markAsFailed === true) {\n // Mark as failed instead of deleting\n this.updateJob(job.id, {\n status: 'failed',\n message: `Job marked as stale - pending for over ${String(olderThanHours)} hours without progress`,\n });\n } else {\n // Delete the job file\n try {\n fs.unlinkSync(jobFile);\n } catch (error) {\n throw new Error(\n `Failed to delete stale job ${job.id}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n cleaned++;\n }\n }\n\n return cleaned;\n }\n\n /**\n * Delete a specific job\n */\n deleteJob(jobId: string): boolean {\n const jobFile = path.join(this.jobsDir, `${jobId}.json`);\n\n if (!fs.existsSync(jobFile)) {\n return false;\n }\n\n try {\n fs.unlinkSync(jobFile);\n return true;\n } catch (error) {\n throw new Error(\n `Failed to delete job ${jobId}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * Write job to file\n */\n private writeJob(job: Job): void {\n const jobFile = path.join(this.jobsDir, `${job.id}.json`);\n atomicWriteFileSync(jobFile, JSON.stringify(job, null, 2));\n }\n}\n","import { z } from 'zod';\n\n// ============================================================================\n// Zod Schemas\n// ============================================================================\n\nexport const JobTypeSchema = z.enum(['clone', 'index', 'crawl']);\nexport const JobStatusSchema = z.enum(['pending', 'running', 'completed', 'failed', 'cancelled']);\n\nexport const JobDetailsSchema = z.object({\n storeName: z.string().optional(),\n storeId: z.string().optional(),\n url: z.string().optional(),\n path: z.string().optional(),\n filesProcessed: z.number().optional(),\n totalFiles: z.number().optional(),\n startedAt: z.string().optional(),\n completedAt: z.string().optional(),\n cancelledAt: z.string().optional(),\n error: z.string().optional(),\n // Crawl-specific fields\n crawlInstruction: z.string().optional(),\n extractInstruction: z.string().optional(),\n maxPages: z.number().optional(),\n simple: z.boolean().optional(),\n useHeadless: z.boolean().optional(),\n pagesCrawled: z.number().optional(),\n});\n\nexport const JobSchema = z.object({\n id: z.string(),\n type: JobTypeSchema,\n status: JobStatusSchema,\n progress: z.number().min(0).max(100),\n message: z.string(),\n details: JobDetailsSchema.default({}),\n createdAt: z.string(),\n updatedAt: z.string(),\n});\n\n// ============================================================================\n// Types (inferred from schemas)\n// ============================================================================\n\nexport type JobType = z.infer<typeof JobTypeSchema>;\nexport type JobStatus = z.infer<typeof JobStatusSchema>;\nexport type JobDetails = z.infer<typeof JobDetailsSchema>;\nexport type Job = z.infer<typeof JobSchema>;\n\nexport interface CreateJobParams {\n type: JobType;\n details: JobDetails;\n message?: string;\n}\n\nexport interface UpdateJobParams {\n status?: JobStatus;\n progress?: number;\n message?: string;\n details?: Partial<JobDetails>;\n}\n","export type Result<T, E = Error> =\n | { readonly success: true; readonly data: T }\n | { readonly success: false; readonly error: E };\n\nexport function ok<T>(data: T): Result<T, never> {\n return { success: true, data };\n}\n\nexport function err<E>(error: E): Result<never, E> {\n return { success: false, error };\n}\n\nexport function isOk<T, E>(result: Result<T, E>): result is { success: true; data: T } {\n return result.success;\n}\n\nexport function isErr<T, E>(result: Result<T, E>): result is { success: false; error: E } {\n return !result.success;\n}\n\nexport function unwrap<T, E>(result: Result<T, E>): T {\n if (isOk(result)) {\n return result.data;\n }\n // Type guard ensures result has 'error' property\n if (isErr(result)) {\n if (result.error instanceof Error) {\n throw result.error;\n }\n throw new Error(String(result.error));\n }\n // This should never happen due to discriminated union\n throw new Error('Invalid result type');\n}\n\nexport function unwrapOr<T, E>(result: Result<T, E>, defaultValue: T): T {\n if (isOk(result)) {\n return result.data;\n }\n return defaultValue;\n}\n","import { writeFileSync, renameSync, mkdirSync } from 'node:fs';\nimport { writeFile, rename, mkdir } from 'node:fs/promises';\nimport { dirname } from 'node:path';\n\n/**\n * Atomically write content to a file.\n *\n * Writes to a temporary file first, then renames it to the target path.\n * The rename operation is atomic on POSIX systems, ensuring that the file\n * is never in a partially-written state even if the process crashes.\n *\n * @param filePath - The target file path\n * @param content - The content to write\n */\nexport async function atomicWriteFile(filePath: string, content: string): Promise<void> {\n // Ensure parent directory exists\n await mkdir(dirname(filePath), { recursive: true });\n\n const tempPath = `${filePath}.tmp.${String(Date.now())}.${String(process.pid)}`;\n await writeFile(tempPath, content, 'utf-8');\n await rename(tempPath, filePath);\n}\n\n/**\n * Synchronously and atomically write content to a file.\n *\n * Writes to a temporary file first, then renames it to the target path.\n * The rename operation is atomic on POSIX systems, ensuring that the file\n * is never in a partially-written state even if the process crashes.\n *\n * @param filePath - The target file path\n * @param content - The content to write\n */\nexport function atomicWriteFileSync(filePath: string, content: string): void {\n // Ensure parent directory exists\n mkdirSync(dirname(filePath), { recursive: true });\n\n const tempPath = `${filePath}.tmp.${String(Date.now())}.${String(process.pid)}`;\n writeFileSync(tempPath, content, 'utf-8');\n renameSync(tempPath, filePath);\n}\n","import { readFile, writeFile, mkdir, rm } from 'node:fs/promises';\nimport { join, dirname } from 'node:path';\nimport { ASTParser } from '../analysis/ast-parser.js';\nimport { CodeGraph, type GraphNode } from '../analysis/code-graph.js';\nimport { GoASTParser } from '../analysis/go-ast-parser.js';\nimport { ParserFactory } from '../analysis/parser-factory.js';\nimport { RustASTParser } from '../analysis/rust-ast-parser.js';\nimport type { PythonBridge } from '../crawl/bridge.js';\nimport type { StoreId } from '../types/brands.js';\nimport type { CacheInvalidationEvent, CacheInvalidationListener } from '../types/cache-events.js';\n\ninterface SerializedGraph {\n nodes: Array<{\n id: string;\n file: string;\n type: string;\n name: string;\n exported: boolean;\n startLine: number;\n endLine: number;\n signature?: string;\n }>;\n edges: Array<{\n from: string;\n to: string;\n type: string;\n confidence: number;\n }>;\n}\n\n/**\n * Service for building, persisting, and querying code graphs.\n * Code graphs track relationships between code elements (functions, classes, etc.)\n * for enhanced search context.\n */\nexport class CodeGraphService {\n private readonly dataDir: string;\n private readonly parser: ASTParser;\n private readonly parserFactory: ParserFactory;\n private readonly graphCache: Map<string, CodeGraph>;\n private readonly cacheListeners: Set<CacheInvalidationListener>;\n\n constructor(dataDir: string, pythonBridge?: PythonBridge) {\n this.dataDir = dataDir;\n this.parser = new ASTParser();\n this.parserFactory = new ParserFactory(pythonBridge);\n this.graphCache = new Map();\n this.cacheListeners = new Set();\n }\n\n /**\n * Subscribe to cache invalidation events.\n * Returns an unsubscribe function.\n */\n onCacheInvalidation(listener: CacheInvalidationListener): () => void {\n this.cacheListeners.add(listener);\n return () => {\n this.cacheListeners.delete(listener);\n };\n }\n\n /**\n * Emit a cache invalidation event to all listeners.\n */\n private emitCacheInvalidation(event: CacheInvalidationEvent): void {\n for (const listener of this.cacheListeners) {\n listener(event);\n }\n }\n\n /**\n * Build a code graph from source files.\n */\n async buildGraph(files: Array<{ path: string; content: string }>): Promise<CodeGraph> {\n const graph = new CodeGraph();\n\n for (const file of files) {\n const ext = file.path.split('.').pop() ?? '';\n if (!['ts', 'tsx', 'js', 'jsx', 'py', 'rs', 'go'].includes(ext)) continue;\n\n // Parse nodes (functions, classes, etc.) using the factory\n const nodes = await this.parserFactory.parseFile(file.path, file.content);\n graph.addNodes(nodes, file.path);\n\n // Parse imports and add edges\n if (ext === 'rs') {\n // Use RustASTParser for Rust imports\n const rustParser = new RustASTParser();\n const imports = rustParser.extractImports(file.content);\n for (const imp of imports) {\n if (!imp.isType) {\n graph.addImport(file.path, imp.source, imp.specifiers);\n }\n }\n } else if (ext === 'go') {\n // Use GoASTParser for Go imports\n const goParser = new GoASTParser();\n const imports = goParser.extractImports(file.content);\n for (const imp of imports) {\n if (!imp.isType) {\n graph.addImport(file.path, imp.source, imp.specifiers);\n }\n }\n } else if (ext !== 'py') {\n // Use ASTParser for JS/TS imports (Python imports handled by Python parser)\n const imports = this.parser.extractImports(file.content);\n for (const imp of imports) {\n if (!imp.isType) {\n graph.addImport(file.path, imp.source, imp.specifiers);\n }\n }\n }\n\n // Analyze call relationships for each function/method\n for (const node of nodes) {\n const lines = file.content.split('\\n');\n\n if (node.type === 'function') {\n // Extract the function body for call analysis\n const functionCode = lines.slice(node.startLine - 1, node.endLine).join('\\n');\n graph.analyzeCallRelationships(functionCode, file.path, node.name);\n } else if (node.type === 'class' && node.methods !== undefined) {\n // For classes, analyze each method separately\n for (const method of node.methods) {\n const methodCode = lines.slice(method.startLine - 1, method.endLine).join('\\n');\n graph.analyzeCallRelationships(methodCode, file.path, `${node.name}.${method.name}`);\n }\n }\n }\n }\n\n return graph;\n }\n\n /**\n * Save a code graph for a store.\n */\n async saveGraph(storeId: StoreId, graph: CodeGraph): Promise<void> {\n const graphPath = this.getGraphPath(storeId);\n await mkdir(dirname(graphPath), { recursive: true });\n\n const serialized = graph.toJSON();\n await writeFile(graphPath, JSON.stringify(serialized, null, 2));\n\n // Notify listeners that the graph has been updated\n this.emitCacheInvalidation({ type: 'graph-updated', storeId });\n }\n\n /**\n * Delete the code graph file for a store.\n * Silently succeeds if the file doesn't exist.\n */\n async deleteGraph(storeId: StoreId): Promise<void> {\n const graphPath = this.getGraphPath(storeId);\n await rm(graphPath, { force: true });\n this.graphCache.delete(storeId);\n\n // Notify listeners that the graph has been deleted\n this.emitCacheInvalidation({ type: 'graph-deleted', storeId });\n }\n\n /**\n * Load a code graph for a store.\n * Returns undefined if no graph exists.\n */\n async loadGraph(storeId: StoreId): Promise<CodeGraph | undefined> {\n // Check cache first\n const cached = this.graphCache.get(storeId);\n if (cached) return cached;\n\n const graphPath = this.getGraphPath(storeId);\n\n try {\n const content = await readFile(graphPath, 'utf-8');\n const parsed: unknown = JSON.parse(content);\n\n // Validate structure\n if (!this.isSerializedGraph(parsed)) {\n return undefined;\n }\n\n const serialized = parsed;\n const graph = new CodeGraph();\n\n // Restore nodes\n for (const node of serialized.nodes) {\n const nodeType = this.validateNodeType(node.type);\n if (!nodeType) continue;\n\n // Method nodes are added directly to the graph since they're already separate nodes\n if (nodeType === 'method') {\n const graphNode: GraphNode = {\n id: node.id,\n file: node.file,\n type: 'method',\n name: node.name,\n exported: node.exported,\n startLine: node.startLine,\n endLine: node.endLine,\n };\n if (node.signature !== undefined) {\n graphNode.signature = node.signature;\n }\n graph.addGraphNode(graphNode);\n continue;\n }\n\n const codeNode: {\n type: 'function' | 'class' | 'interface' | 'type' | 'const';\n name: string;\n exported: boolean;\n startLine: number;\n endLine: number;\n signature?: string;\n } = {\n type: nodeType,\n name: node.name,\n exported: node.exported,\n startLine: node.startLine,\n endLine: node.endLine,\n };\n if (node.signature !== undefined) {\n codeNode.signature = node.signature;\n }\n graph.addNodes([codeNode], node.file);\n }\n\n // Restore edges\n for (const edge of serialized.edges) {\n const edgeType = this.validateEdgeType(edge.type);\n if (!edgeType) continue;\n\n graph.addEdge({\n from: edge.from,\n to: edge.to,\n type: edgeType,\n confidence: edge.confidence,\n });\n }\n\n this.graphCache.set(storeId, graph);\n return graph;\n } catch {\n return undefined;\n }\n }\n\n /**\n * Get usage stats for a code element.\n */\n getUsageStats(\n graph: CodeGraph,\n filePath: string,\n symbolName: string\n ): { calledBy: number; calls: number } {\n const nodeId = `${filePath}:${symbolName}`;\n return {\n calledBy: graph.getCalledByCount(nodeId),\n calls: graph.getCallsCount(nodeId),\n };\n }\n\n /**\n * Get related code (callers and callees) for a code element.\n */\n getRelatedCode(\n graph: CodeGraph,\n filePath: string,\n symbolName: string\n ): Array<{ id: string; relationship: string }> {\n const nodeId = `${filePath}:${symbolName}`;\n const related: Array<{ id: string; relationship: string }> = [];\n\n // Get callers (incoming call edges)\n const incoming = graph.getIncomingEdges(nodeId);\n for (const edge of incoming) {\n if (edge.type === 'calls') {\n related.push({ id: edge.from, relationship: 'calls this' });\n }\n }\n\n // Get callees (outgoing call edges)\n const outgoing = graph.getEdges(nodeId);\n for (const edge of outgoing) {\n if (edge.type === 'calls') {\n related.push({ id: edge.to, relationship: 'called by this' });\n }\n }\n\n return related;\n }\n\n /**\n * Clear cached graphs.\n */\n clearCache(): void {\n this.graphCache.clear();\n }\n\n private getGraphPath(storeId: StoreId): string {\n return join(this.dataDir, 'graphs', `${storeId}.json`);\n }\n\n /**\n * Type guard for SerializedGraph structure.\n */\n private isSerializedGraph(value: unknown): value is SerializedGraph {\n if (typeof value !== 'object' || value === null) return false;\n // Use 'in' operator for property checking\n if (!('nodes' in value) || !('edges' in value)) return false;\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- narrowed via 'in' checks above\n const obj = value as { nodes: unknown; edges: unknown };\n return Array.isArray(obj.nodes) && Array.isArray(obj.edges);\n }\n\n /**\n * Type guard for valid node types.\n */\n private isValidNodeType(\n type: string\n ): type is 'function' | 'class' | 'interface' | 'type' | 'const' | 'method' {\n return ['function', 'class', 'interface', 'type', 'const', 'method'].includes(type);\n }\n\n /**\n * Validate and return a node type, or undefined if invalid.\n */\n private validateNodeType(\n type: string\n ): 'function' | 'class' | 'interface' | 'type' | 'const' | 'method' | undefined {\n if (this.isValidNodeType(type)) {\n return type;\n }\n return undefined;\n }\n\n /**\n * Type guard for valid edge types.\n */\n private isValidEdgeType(type: string): type is 'calls' | 'imports' | 'extends' | 'implements' {\n return ['calls', 'imports', 'extends', 'implements'].includes(type);\n }\n\n /**\n * Validate and return an edge type, or undefined if invalid.\n */\n private validateEdgeType(\n type: string\n ): 'calls' | 'imports' | 'extends' | 'implements' | undefined {\n if (this.isValidEdgeType(type)) {\n return type;\n }\n return undefined;\n }\n}\n","import { parse, type ParserPlugin } from '@babel/parser';\nimport traverseModule from '@babel/traverse';\nimport * as t from '@babel/types';\nimport type { NodePath } from '@babel/traverse';\n\n// Handle both ESM and CJS module formats\ntype TraverseFunction = (ast: t.File, visitor: Record<string, unknown>) => void;\nfunction getTraverse(mod: unknown): TraverseFunction {\n if (typeof mod === 'function') {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n return mod as TraverseFunction;\n }\n if (mod !== null && typeof mod === 'object' && 'default' in mod) {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const withDefault = mod as { default: unknown };\n if (typeof withDefault.default === 'function') {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n return withDefault.default as TraverseFunction;\n }\n }\n throw new Error('Invalid traverse module export');\n}\nconst traverse = getTraverse(traverseModule);\n\nexport interface CodeNode {\n type: 'function' | 'class' | 'interface' | 'type' | 'const';\n name: string;\n exported: boolean;\n async?: boolean;\n startLine: number;\n endLine: number;\n signature?: string;\n methods?: Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }>;\n}\n\nexport interface ImportInfo {\n source: string;\n specifiers: string[];\n isType: boolean;\n}\n\nexport class ASTParser {\n parse(code: string, language: 'typescript' | 'javascript'): CodeNode[] {\n try {\n const plugins: ParserPlugin[] = ['jsx'];\n if (language === 'typescript') {\n plugins.push('typescript');\n }\n\n const ast = parse(code, {\n sourceType: 'module',\n plugins,\n });\n\n const nodes: CodeNode[] = [];\n\n traverse(ast, {\n FunctionDeclaration: (path: NodePath<t.FunctionDeclaration>) => {\n const node = path.node;\n if (!node.id) return;\n\n const exported =\n path.parent.type === 'ExportNamedDeclaration' ||\n path.parent.type === 'ExportDefaultDeclaration';\n\n nodes.push({\n type: 'function',\n name: node.id.name,\n exported,\n async: node.async,\n startLine: node.loc?.start.line ?? 0,\n endLine: node.loc?.end.line ?? 0,\n signature: this.extractFunctionSignature(node),\n });\n },\n\n ClassDeclaration: (path: NodePath<t.ClassDeclaration>) => {\n const node = path.node;\n if (!node.id) return;\n\n const exported =\n path.parent.type === 'ExportNamedDeclaration' ||\n path.parent.type === 'ExportDefaultDeclaration';\n\n const methods: CodeNode['methods'] = [];\n\n for (const member of node.body.body) {\n if (t.isClassMethod(member) && t.isIdentifier(member.key)) {\n methods.push({\n name: member.key.name,\n async: member.async,\n signature: this.extractMethodSignature(member),\n startLine: member.loc?.start.line ?? 0,\n endLine: member.loc?.end.line ?? 0,\n });\n }\n }\n\n nodes.push({\n type: 'class',\n name: node.id.name,\n exported,\n startLine: node.loc?.start.line ?? 0,\n endLine: node.loc?.end.line ?? 0,\n methods,\n });\n },\n\n TSInterfaceDeclaration: (path: NodePath<t.TSInterfaceDeclaration>) => {\n const node = path.node;\n\n const exported = path.parent.type === 'ExportNamedDeclaration';\n\n nodes.push({\n type: 'interface',\n name: node.id.name,\n exported,\n startLine: node.loc?.start.line ?? 0,\n endLine: node.loc?.end.line ?? 0,\n });\n },\n });\n\n return nodes;\n } catch {\n // Return empty array for malformed code\n return [];\n }\n }\n\n extractImports(code: string): ImportInfo[] {\n try {\n const ast = parse(code, {\n sourceType: 'module',\n plugins: ['typescript', 'jsx'],\n });\n\n const imports: ImportInfo[] = [];\n\n traverse(ast, {\n ImportDeclaration: (path: NodePath<t.ImportDeclaration>) => {\n const node = path.node;\n const specifiers: string[] = [];\n\n for (const spec of node.specifiers) {\n if (t.isImportDefaultSpecifier(spec)) {\n specifiers.push(spec.local.name);\n } else if (t.isImportSpecifier(spec)) {\n specifiers.push(spec.local.name);\n } else if (t.isImportNamespaceSpecifier(spec)) {\n specifiers.push(spec.local.name);\n }\n }\n\n imports.push({\n source: node.source.value,\n specifiers,\n isType: node.importKind === 'type',\n });\n },\n });\n\n return imports;\n } catch {\n // Return empty array for malformed code\n return [];\n }\n }\n\n private extractFunctionSignature(node: t.FunctionDeclaration): string {\n const params = node.params\n .map((p) => {\n if (t.isIdentifier(p)) return p.name;\n return 'param';\n })\n .join(', ');\n\n return `${node.id?.name ?? 'anonymous'}(${params})`;\n }\n\n private extractMethodSignature(node: t.ClassMethod): string {\n const params = node.params\n .map((p) => {\n if (t.isIdentifier(p)) return p.name;\n return 'param';\n })\n .join(', ');\n\n const name = t.isIdentifier(node.key) ? node.key.name : 'method';\n return `${name}(${params})`;\n }\n}\n","import type { CodeNode } from './ast-parser.js';\n\nexport interface GraphNode {\n id: string;\n file: string;\n type: 'function' | 'class' | 'interface' | 'type' | 'const' | 'method';\n name: string;\n exported: boolean;\n startLine: number;\n endLine: number;\n signature?: string;\n}\n\nexport interface GraphEdge {\n from: string;\n to: string;\n type: 'calls' | 'imports' | 'extends' | 'implements';\n confidence: number;\n}\n\nexport class CodeGraph {\n private readonly nodes: Map<string, GraphNode> = new Map<string, GraphNode>();\n private readonly edges: Map<string, GraphEdge[]> = new Map<string, GraphEdge[]>();\n\n addNodes(nodes: CodeNode[], file: string): void {\n for (const node of nodes) {\n const id = `${file}:${node.name}`;\n\n const graphNode: GraphNode = {\n id,\n file,\n type: node.type,\n name: node.name,\n exported: node.exported,\n startLine: node.startLine,\n endLine: node.endLine,\n };\n\n if (node.signature !== undefined) {\n graphNode.signature = node.signature;\n }\n\n this.nodes.set(id, graphNode);\n\n // Initialize edges array for this node\n if (!this.edges.has(id)) {\n this.edges.set(id, []);\n }\n\n // If this is a class with methods, create separate nodes for each method\n if (node.type === 'class' && node.methods !== undefined) {\n for (const method of node.methods) {\n const methodId = `${file}:${node.name}.${method.name}`;\n\n const methodNode: GraphNode = {\n id: methodId,\n file,\n type: 'method',\n name: method.name,\n exported: node.exported, // Methods inherit export status from class\n startLine: method.startLine,\n endLine: method.endLine,\n signature: method.signature,\n };\n\n this.nodes.set(methodId, methodNode);\n\n // Initialize edges array for this method\n if (!this.edges.has(methodId)) {\n this.edges.set(methodId, []);\n }\n }\n }\n }\n }\n\n addImport(fromFile: string, toFile: string, specifiers: string[]): void {\n // Normalize the toFile path (resolve relative imports)\n const resolvedTo = this.resolveImportPath(fromFile, toFile);\n\n for (const spec of specifiers) {\n const edge: GraphEdge = {\n from: fromFile,\n to: `${resolvedTo}:${spec}`,\n type: 'imports',\n confidence: 1.0,\n };\n\n const edges = this.edges.get(fromFile) ?? [];\n edges.push(edge);\n this.edges.set(fromFile, edges);\n }\n }\n\n analyzeCallRelationships(code: string, file: string, functionName: string): void {\n const nodeId = `${file}:${functionName}`;\n\n // Simple regex-based call detection (can be enhanced with AST later)\n const callPattern = /\\b([a-zA-Z_$][a-zA-Z0-9_$]*)\\s*\\(/g;\n const calls = new Set<string>();\n\n let match;\n while ((match = callPattern.exec(code)) !== null) {\n if (match[1] !== undefined && match[1] !== '') {\n calls.add(match[1]);\n }\n }\n\n const edges = this.edges.get(nodeId) ?? [];\n\n for (const calledFunction of calls) {\n // Try to find the called function in the graph\n const targetNode = this.findNodeByName(calledFunction);\n\n if (targetNode) {\n edges.push({\n from: nodeId,\n to: targetNode.id,\n type: 'calls',\n confidence: 0.8, // Lower confidence for regex-based detection\n });\n } else {\n // Unknown function, possibly from import\n edges.push({\n from: nodeId,\n to: `unknown:${calledFunction}`,\n type: 'calls',\n confidence: 0.5,\n });\n }\n }\n\n this.edges.set(nodeId, edges);\n }\n\n getNode(id: string): GraphNode | undefined {\n return this.nodes.get(id);\n }\n\n getEdges(nodeId: string): GraphEdge[] {\n return this.edges.get(nodeId) ?? [];\n }\n\n /**\n * Add an edge to the graph (used when restoring from serialized data)\n */\n addEdge(edge: GraphEdge): void {\n const edges = this.edges.get(edge.from) ?? [];\n edges.push(edge);\n this.edges.set(edge.from, edges);\n }\n\n /**\n * Add a graph node directly (used when restoring from serialized data)\n */\n addGraphNode(node: GraphNode): void {\n this.nodes.set(node.id, node);\n\n // Initialize edges array for this node if it doesn't exist\n if (!this.edges.has(node.id)) {\n this.edges.set(node.id, []);\n }\n }\n\n /**\n * Get edges where this node is the target (callers of this function)\n */\n getIncomingEdges(nodeId: string): GraphEdge[] {\n const incoming: GraphEdge[] = [];\n for (const edges of this.edges.values()) {\n for (const edge of edges) {\n if (edge.to === nodeId) {\n incoming.push(edge);\n }\n }\n }\n return incoming;\n }\n\n /**\n * Count how many nodes call this node\n */\n getCalledByCount(nodeId: string): number {\n return this.getIncomingEdges(nodeId).filter((e) => e.type === 'calls').length;\n }\n\n /**\n * Count how many nodes this node calls\n */\n getCallsCount(nodeId: string): number {\n return this.getEdges(nodeId).filter((e) => e.type === 'calls').length;\n }\n\n getAllNodes(): GraphNode[] {\n return Array.from(this.nodes.values());\n }\n\n private findNodeByName(name: string): GraphNode | undefined {\n for (const node of this.nodes.values()) {\n if (node.name === name) {\n return node;\n }\n }\n return undefined;\n }\n\n private resolveImportPath(fromFile: string, importPath: string): string {\n // Simple resolution - can be enhanced\n if (importPath.startsWith('.')) {\n // Relative import\n const fromDir = fromFile.split('/').slice(0, -1).join('/');\n const parts = importPath.split('/');\n\n let resolved = fromDir;\n for (const part of parts) {\n if (part === '..') {\n resolved = resolved.split('/').slice(0, -1).join('/');\n } else if (part !== '.') {\n resolved += `/${part}`;\n }\n }\n\n return resolved.replace(/\\.js$/, '');\n }\n\n // Package import\n return importPath;\n }\n\n toJSON(): {\n nodes: GraphNode[];\n edges: Array<{ from: string; to: string; type: string; confidence: number }>;\n } {\n const allEdges: GraphEdge[] = [];\n for (const edges of this.edges.values()) {\n allEdges.push(...edges);\n }\n\n return {\n nodes: Array.from(this.nodes.values()),\n edges: allEdges.map((e) => ({\n from: e.from,\n to: e.to,\n type: e.type,\n confidence: e.confidence,\n })),\n };\n }\n}\n","/**\n * Tree-sitter infrastructure for parsing Rust and Go code.\n * Provides utilities for AST traversal, querying, and position conversion.\n *\n * NOTE: tree-sitter requires native module compilation which may not be available\n * on all platforms (e.g., macOS with newer Node versions). Use isTreeSitterAvailable()\n * to check before calling parsing functions.\n */\n\nimport type Parser from 'tree-sitter';\n\n// Lazy-loaded tree-sitter modules (native module may not be available on all platforms)\nlet TreeSitterParser: typeof Parser | null = null;\nlet GoLanguage: Parser.Language | null = null;\nlet RustLanguage: Parser.Language | null = null;\nlet _initialized = false;\nlet _available = false;\n\n/**\n * Reset internal state for testing. DO NOT USE IN PRODUCTION CODE.\n * @internal\n */\nexport function _resetTreeSitterState(forceUnavailable = false): void {\n TreeSitterParser = null;\n GoLanguage = null;\n RustLanguage = null;\n _initialized = forceUnavailable;\n _available = false;\n}\n\n/**\n * Check if tree-sitter native module is available on this platform.\n * Call this before using any tree-sitter parsing functions.\n */\nexport function isTreeSitterAvailable(): boolean {\n if (!_initialized) {\n try {\n // Dynamic require for native modules that may not be available\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-unsafe-assignment -- Lazy load native module\n TreeSitterParser = require('tree-sitter');\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-unsafe-assignment -- Lazy load native module\n GoLanguage = require('tree-sitter-go');\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-unsafe-assignment -- Lazy load native module\n RustLanguage = require('tree-sitter-rust');\n _available = true;\n } catch {\n // Native module not available (e.g., no prebuilds for darwin-arm64)\n _available = false;\n }\n _initialized = true;\n }\n return _available;\n}\n\nexport interface TreeSitterPosition {\n row: number;\n column: number;\n}\n\nexport interface TreeSitterNode {\n type: string;\n text: string;\n startPosition: TreeSitterPosition;\n endPosition: TreeSitterPosition;\n startIndex: number;\n endIndex: number;\n childCount: number;\n namedChildCount: number;\n children: TreeSitterNode[];\n namedChildren: TreeSitterNode[];\n parent: TreeSitterNode | null;\n nextSibling: TreeSitterNode | null;\n previousSibling: TreeSitterNode | null;\n firstChild: TreeSitterNode | null;\n lastChild: TreeSitterNode | null;\n firstNamedChild: TreeSitterNode | null;\n lastNamedChild: TreeSitterNode | null;\n child(index: number): TreeSitterNode | null;\n namedChild(index: number): TreeSitterNode | null;\n childForFieldName(fieldName: string): TreeSitterNode | null;\n descendantsOfType(type: string | string[]): TreeSitterNode[];\n}\n\nexport interface TreeSitterTree {\n rootNode: TreeSitterNode;\n edit(delta: unknown): void;\n walk(): unknown;\n}\n\n/**\n * Initialize a tree-sitter parser for Rust.\n * Returns null if tree-sitter is not available.\n */\nexport function createRustParser(): Parser | null {\n if (!isTreeSitterAvailable() || TreeSitterParser === null || RustLanguage === null) {\n return null;\n }\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call -- Dynamic native module\n const parser: Parser = new TreeSitterParser();\n parser.setLanguage(RustLanguage);\n return parser;\n}\n\n/**\n * Parse Rust source code into an AST.\n * Returns null if tree-sitter is not available or code is malformed.\n */\nexport function parseRustCode(code: string): TreeSitterTree | null {\n try {\n const parser = createRustParser();\n if (parser === null) {\n return null;\n }\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return -- tree-sitter returns compatible type\n return parser.parse(code);\n } catch {\n // Return null for malformed code\n return null;\n }\n}\n\n/**\n * Initialize a tree-sitter parser for Go.\n * Returns null if tree-sitter is not available.\n */\nexport function createGoParser(): Parser | null {\n if (!isTreeSitterAvailable() || TreeSitterParser === null || GoLanguage === null) {\n return null;\n }\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call -- Dynamic native module\n const parser: Parser = new TreeSitterParser();\n parser.setLanguage(GoLanguage);\n return parser;\n}\n\n/**\n * Parse Go source code into an AST.\n * Returns null if tree-sitter is not available or code is malformed.\n */\nexport function parseGoCode(code: string): TreeSitterTree | null {\n try {\n const parser = createGoParser();\n if (parser === null) {\n return null;\n }\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return -- tree-sitter returns compatible type\n return parser.parse(code);\n } catch {\n // Return null for malformed code\n return null;\n }\n}\n\n/**\n * Convert tree-sitter position (0-indexed) to line number (1-indexed)\n */\nexport function positionToLineNumber(position: TreeSitterPosition): number {\n return position.row + 1;\n}\n\n/**\n * Get the text content of a node\n */\nexport function getNodeText(node: TreeSitterNode): string {\n return node.text;\n}\n\n/**\n * Get all children of a specific type\n */\nexport function getChildrenOfType(node: TreeSitterNode, type: string): TreeSitterNode[] {\n return node.children.filter((child) => child.type === type);\n}\n\n/**\n * Get the first child of a specific type\n */\nexport function getFirstChildOfType(node: TreeSitterNode, type: string): TreeSitterNode | null {\n return node.children.find((child) => child.type === type) ?? null;\n}\n\n/**\n * Get child by field name (e.g., \"name\", \"body\", \"parameters\")\n */\nexport function getChildByFieldName(\n node: TreeSitterNode,\n fieldName: string\n): TreeSitterNode | null {\n return node.childForFieldName(fieldName);\n}\n\n/**\n * Check if node has a visibility modifier (pub)\n */\nexport function hasVisibilityModifier(node: TreeSitterNode): boolean {\n return node.children.some((child) => child.type === 'visibility_modifier');\n}\n\n/**\n * Get visibility modifier text (e.g., \"pub\", \"pub(crate)\")\n */\nexport function getVisibilityModifier(node: TreeSitterNode): string | null {\n const visNode = node.children.find((child) => child.type === 'visibility_modifier');\n return visNode !== undefined ? visNode.text : null;\n}\n\n/**\n * Check if a function is async\n */\nexport function isAsyncFunction(node: TreeSitterNode): boolean {\n // Check for 'async' keyword in function_item or function_signature_item\n return node.children.some((child) => child.type === 'async' || child.text === 'async');\n}\n\n/**\n * Check if a function is unsafe\n */\nexport function isUnsafeFunction(node: TreeSitterNode): boolean {\n return node.children.some((child) => child.type === 'unsafe' || child.text === 'unsafe');\n}\n\n/**\n * Extract function signature including generics and parameters\n */\nexport function getFunctionSignature(node: TreeSitterNode): string {\n // Extract the full signature by getting text from name to return type\n const nameNode = getChildByFieldName(node, 'name');\n const parametersNode = getChildByFieldName(node, 'parameters');\n const returnTypeNode = getChildByFieldName(node, 'return_type');\n const typeParametersNode = getChildByFieldName(node, 'type_parameters');\n\n if (nameNode === null) {\n return '';\n }\n\n let signature = nameNode.text;\n\n // Add type parameters (generics)\n if (typeParametersNode !== null) {\n signature += typeParametersNode.text;\n }\n\n // Add parameters\n if (parametersNode !== null) {\n signature += parametersNode.text;\n }\n\n // Add return type\n if (returnTypeNode !== null) {\n signature += ` ${returnTypeNode.text}`;\n }\n\n return signature;\n}\n\n/**\n * Query nodes of specific type from the tree\n * @param tree The tree-sitter tree\n * @param nodeType The type of nodes to find (e.g., 'function_item', 'struct_item')\n * @returns Array of matching nodes\n */\nexport function queryNodesByType(\n tree: TreeSitterTree,\n nodeType: string | string[]\n): TreeSitterNode[] {\n const types = Array.isArray(nodeType) ? nodeType : [nodeType];\n return tree.rootNode.descendantsOfType(types);\n}\n\n/**\n * Extract use statement import path\n */\nexport function extractImportPath(useNode: TreeSitterNode): string {\n // Get the use_declaration argument\n const argumentNode = getChildByFieldName(useNode, 'argument');\n if (argumentNode === null) {\n return '';\n }\n return argumentNode.text;\n}\n","import {\n parseGoCode,\n queryNodesByType,\n positionToLineNumber,\n getChildByFieldName,\n getFunctionSignature,\n getFirstChildOfType,\n type TreeSitterNode,\n type TreeSitterTree,\n} from './tree-sitter-parser.js';\nimport type { CodeNode, ImportInfo } from './ast-parser.js';\n\n/**\n * Parser for Go code using tree-sitter\n * Extracts functions, methods, structs, interfaces, types, constants, and imports\n */\nexport class GoASTParser {\n /**\n * Parse Go code into CodeNode array\n * @param code Go source code\n * @param filePath File path for error context\n * @returns Array of CodeNode objects representing Go constructs\n */\n parse(code: string, _filePath: string): CodeNode[] {\n try {\n const tree = parseGoCode(code);\n if (tree === null) {\n // Malformed code - return empty array\n return [];\n }\n\n const nodes: CodeNode[] = [];\n\n // Parse functions\n const functions = this.parseFunctions(tree);\n nodes.push(...functions);\n\n // Parse structs\n const structs = this.parseStructs(tree);\n nodes.push(...structs);\n\n // Parse interfaces\n const interfaces = this.parseInterfaces(tree);\n nodes.push(...interfaces);\n\n // Parse type aliases\n const types = this.parseTypeAliases(tree);\n nodes.push(...types);\n\n // Parse constants and variables\n const constants = this.parseConstants(tree);\n nodes.push(...constants);\n\n // Parse methods and attach to structs\n this.parseMethods(tree, nodes);\n\n return nodes;\n } catch {\n // Return empty array for any parsing errors\n return [];\n }\n }\n\n /**\n * Extract imports from Go code\n * @param code Go source code\n * @returns Array of ImportInfo objects\n */\n extractImports(code: string): ImportInfo[] {\n try {\n const tree = parseGoCode(code);\n if (tree === null) {\n return [];\n }\n\n const imports: ImportInfo[] = [];\n const importDecls = queryNodesByType(tree, 'import_declaration');\n\n for (const importDecl of importDecls) {\n const importSpecs = importDecl.descendantsOfType('import_spec');\n\n for (const spec of importSpecs) {\n const pathNode = getChildByFieldName(spec, 'path');\n if (pathNode === null) {\n continue;\n }\n\n // Extract string content from interpreted_string_literal\n const stringContent = pathNode.descendantsOfType('interpreted_string_literal_content')[0];\n const path =\n stringContent !== undefined ? stringContent.text : pathNode.text.replace(/\"/g, '');\n\n if (path !== '') {\n imports.push({\n source: path,\n specifiers: [],\n isType: false,\n });\n }\n }\n }\n\n return imports;\n } catch {\n return [];\n }\n }\n\n /**\n * Parse function declarations\n */\n private parseFunctions(tree: TreeSitterTree): CodeNode[] {\n const functionNodes = queryNodesByType(tree, 'function_declaration');\n const nodes: CodeNode[] = [];\n\n for (const fnNode of functionNodes) {\n const nameNode = getChildByFieldName(fnNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(fnNode.startPosition);\n const endLine = positionToLineNumber(fnNode.endPosition);\n const signature = getFunctionSignature(fnNode);\n\n nodes.push({\n type: 'function',\n name,\n exported,\n async: false,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse struct definitions\n */\n private parseStructs(tree: TreeSitterTree): CodeNode[] {\n const typeDecls = queryNodesByType(tree, 'type_declaration');\n const nodes: CodeNode[] = [];\n\n for (const typeDecl of typeDecls) {\n // Get type_spec child node\n const typeSpec = getFirstChildOfType(typeDecl, 'type_spec');\n if (typeSpec === null) {\n continue;\n }\n\n const nameNode = getChildByFieldName(typeSpec, 'name');\n const typeNode = getChildByFieldName(typeSpec, 'type');\n\n if (nameNode === null || typeNode === null) {\n continue;\n }\n\n // Check if it's a struct type\n if (typeNode.type !== 'struct_type') {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(typeDecl.startPosition);\n const endLine = positionToLineNumber(typeDecl.endPosition);\n\n nodes.push({\n type: 'class',\n name,\n exported,\n startLine,\n endLine,\n signature: name,\n methods: [],\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse interface definitions\n */\n private parseInterfaces(tree: TreeSitterTree): CodeNode[] {\n const typeDecls = queryNodesByType(tree, 'type_declaration');\n const nodes: CodeNode[] = [];\n\n for (const typeDecl of typeDecls) {\n const typeSpec = getFirstChildOfType(typeDecl, 'type_spec');\n if (typeSpec === null) {\n continue;\n }\n\n const nameNode = getChildByFieldName(typeSpec, 'name');\n const typeNode = getChildByFieldName(typeSpec, 'type');\n\n if (nameNode === null || typeNode === null) {\n continue;\n }\n\n // Check if it's an interface type\n if (typeNode.type !== 'interface_type') {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(typeDecl.startPosition);\n const endLine = positionToLineNumber(typeDecl.endPosition);\n\n // Extract interface methods\n const methods = this.extractInterfaceMethods(typeNode);\n\n nodes.push({\n type: 'interface',\n name,\n exported,\n startLine,\n endLine,\n signature: name,\n methods,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse type aliases\n */\n private parseTypeAliases(tree: TreeSitterTree): CodeNode[] {\n const typeDecls = queryNodesByType(tree, 'type_declaration');\n const nodes: CodeNode[] = [];\n\n for (const typeDecl of typeDecls) {\n const typeSpec = getFirstChildOfType(typeDecl, 'type_spec');\n if (typeSpec === null) {\n continue;\n }\n\n const nameNode = getChildByFieldName(typeSpec, 'name');\n const typeNode = getChildByFieldName(typeSpec, 'type');\n\n if (nameNode === null || typeNode === null) {\n continue;\n }\n\n // Skip struct and interface types (handled by other methods)\n if (typeNode.type === 'struct_type' || typeNode.type === 'interface_type') {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(typeDecl.startPosition);\n const endLine = positionToLineNumber(typeDecl.endPosition);\n const signature = `${name} = ${typeNode.text}`;\n\n nodes.push({\n type: 'type',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse constants and variables\n */\n private parseConstants(tree: TreeSitterTree): CodeNode[] {\n const nodes: CodeNode[] = [];\n\n // Parse const declarations\n const constDecls = queryNodesByType(tree, 'const_declaration');\n for (const constDecl of constDecls) {\n const specs = constDecl.descendantsOfType('const_spec');\n for (const spec of specs) {\n const nameNode = getChildByFieldName(spec, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(spec.startPosition);\n const endLine = positionToLineNumber(spec.endPosition);\n\n const typeNode = getChildByFieldName(spec, 'type');\n const signature = typeNode !== null ? `${name}: ${typeNode.text}` : name;\n\n nodes.push({\n type: 'const',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n }\n\n // Parse var declarations\n const varDecls = queryNodesByType(tree, 'var_declaration');\n for (const varDecl of varDecls) {\n const specs = varDecl.descendantsOfType('var_spec');\n for (const spec of specs) {\n const nameNode = getChildByFieldName(spec, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = this.isExported(name);\n const startLine = positionToLineNumber(spec.startPosition);\n const endLine = positionToLineNumber(spec.endPosition);\n\n const typeNode = getChildByFieldName(spec, 'type');\n const signature = typeNode !== null ? `${name}: ${typeNode.text}` : name;\n\n nodes.push({\n type: 'const',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n }\n\n return nodes;\n }\n\n /**\n * Parse methods and attach to corresponding structs\n */\n private parseMethods(tree: TreeSitterTree, nodes: CodeNode[]): void {\n const methodNodes = queryNodesByType(tree, 'method_declaration');\n\n for (const methodNode of methodNodes) {\n const receiverType = this.getReceiverType(methodNode);\n if (receiverType === null) {\n continue;\n }\n\n const nameNode = getChildByFieldName(methodNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const signature = getFunctionSignature(methodNode);\n const startLine = positionToLineNumber(methodNode.startPosition);\n const endLine = positionToLineNumber(methodNode.endPosition);\n\n // Find the corresponding struct and attach method\n const structNode = nodes.find((node) => node.type === 'class' && node.name === receiverType);\n\n if (structNode?.methods !== undefined) {\n structNode.methods.push({\n name,\n async: false,\n signature,\n startLine,\n endLine,\n });\n }\n }\n }\n\n /**\n * Extract methods from interface definition\n */\n private extractInterfaceMethods(interfaceNode: TreeSitterNode): Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> {\n const methods: Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> = [];\n\n const methodElems = interfaceNode.descendantsOfType('method_elem');\n\n for (const methodElem of methodElems) {\n const nameNode = getChildByFieldName(methodElem, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const signature = getFunctionSignature(methodElem);\n const startLine = positionToLineNumber(methodElem.startPosition);\n const endLine = positionToLineNumber(methodElem.endPosition);\n\n methods.push({\n name,\n async: false,\n signature,\n startLine,\n endLine,\n });\n }\n\n return methods;\n }\n\n /**\n * Get the receiver type name for a method\n */\n private getReceiverType(methodNode: TreeSitterNode): string | null {\n const receiverNode = getChildByFieldName(methodNode, 'receiver');\n if (receiverNode === null) {\n return null;\n }\n\n const paramDecl = getFirstChildOfType(receiverNode, 'parameter_declaration');\n if (paramDecl === null) {\n return null;\n }\n\n const typeNode = getChildByFieldName(paramDecl, 'type');\n if (typeNode === null) {\n return null;\n }\n\n // Handle pointer receivers (*Type)\n if (typeNode.type === 'pointer_type') {\n const innerType = typeNode.children.find((child) => child.type === 'type_identifier');\n return innerType !== undefined ? innerType.text : null;\n }\n\n // Handle value receivers (Type)\n if (typeNode.type === 'type_identifier') {\n return typeNode.text;\n }\n\n return null;\n }\n\n /**\n * Check if a name is exported (starts with uppercase letter)\n */\n private isExported(name: string): boolean {\n if (name.length === 0) {\n return false;\n }\n const firstChar = name[0];\n if (firstChar === undefined) {\n return false;\n }\n return firstChar === firstChar.toUpperCase();\n }\n}\n","import path from 'node:path';\nimport { AdapterRegistry } from './adapter-registry.js';\nimport { ASTParser, type CodeNode } from './ast-parser.js';\nimport { GoASTParser } from './go-ast-parser.js';\nimport { PythonASTParser } from './python-ast-parser.js';\nimport { RustASTParser } from './rust-ast-parser.js';\nimport type { PythonBridge } from '../crawl/bridge.js';\n\nexport class ParserFactory {\n constructor(private readonly pythonBridge?: PythonBridge) {}\n\n async parseFile(filePath: string, code: string): Promise<CodeNode[]> {\n const ext = path.extname(filePath);\n\n if (['.ts', '.tsx'].includes(ext)) {\n const parser = new ASTParser();\n return parser.parse(code, 'typescript');\n }\n\n if (['.js', '.jsx'].includes(ext)) {\n const parser = new ASTParser();\n return parser.parse(code, 'javascript');\n }\n\n if (ext === '.py') {\n if (!this.pythonBridge) {\n throw new Error('Python bridge not available for parsing Python files');\n }\n const parser = new PythonASTParser(this.pythonBridge);\n return parser.parse(code, filePath);\n }\n\n if (ext === '.rs') {\n const parser = new RustASTParser();\n return parser.parse(code, filePath);\n }\n\n if (ext === '.go') {\n const parser = new GoASTParser();\n return parser.parse(code, filePath);\n }\n\n // Check for registered language adapters\n const registry = AdapterRegistry.getInstance();\n const adapter = registry.getByExtension(ext);\n if (adapter !== undefined) {\n return adapter.parse(code, filePath);\n }\n\n return [];\n }\n}\n","import type { CodeNode } from './ast-parser.js';\nimport type { PythonBridge, ParsePythonResult } from '../crawl/bridge.js';\n\nexport class PythonASTParser {\n constructor(private readonly bridge: PythonBridge) {}\n\n async parse(code: string, filePath: string): Promise<CodeNode[]> {\n const result: ParsePythonResult = await this.bridge.parsePython(code, filePath);\n\n return result.nodes.map((node) => {\n const codeNode: CodeNode = {\n type: node.type,\n name: node.name,\n exported: node.exported,\n startLine: node.startLine,\n endLine: node.endLine,\n };\n\n if (node.async !== undefined) {\n codeNode.async = node.async;\n }\n\n if (node.signature !== undefined) {\n codeNode.signature = node.signature;\n }\n\n if (node.methods !== undefined) {\n codeNode.methods = node.methods;\n }\n\n return codeNode;\n });\n }\n}\n","import {\n parseRustCode,\n queryNodesByType,\n positionToLineNumber,\n getChildByFieldName,\n hasVisibilityModifier,\n isAsyncFunction,\n getFunctionSignature,\n extractImportPath,\n type TreeSitterNode,\n type TreeSitterTree,\n} from './tree-sitter-parser.js';\nimport type { CodeNode, ImportInfo } from './ast-parser.js';\n\n/**\n * Parser for Rust code using tree-sitter\n * Extracts functions, structs, traits, types, constants, and imports\n */\nexport class RustASTParser {\n /**\n * Parse Rust code into CodeNode array\n * @param code Rust source code\n * @param filePath File path for error context\n * @returns Array of CodeNode objects representing Rust constructs\n */\n parse(code: string, _filePath: string): CodeNode[] {\n try {\n const tree = parseRustCode(code);\n if (tree === null) {\n // Malformed code - return empty array\n return [];\n }\n\n const nodes: CodeNode[] = [];\n\n // Parse functions\n const functions = this.parseFunctions(tree);\n nodes.push(...functions);\n\n // Parse structs\n const structs = this.parseStructs(tree);\n nodes.push(...structs);\n\n // Parse traits\n const traits = this.parseTraits(tree);\n nodes.push(...traits);\n\n // Parse type aliases\n const types = this.parseTypeAliases(tree);\n nodes.push(...types);\n\n // Parse constants and statics\n const constants = this.parseConstants(tree);\n nodes.push(...constants);\n\n // Parse impl blocks and attach methods to structs\n this.parseImplBlocks(tree, nodes);\n\n return nodes;\n } catch {\n // Return empty array for any parsing errors\n return [];\n }\n }\n\n /**\n * Extract imports from Rust code\n * @param code Rust source code\n * @returns Array of ImportInfo objects\n */\n extractImports(code: string): ImportInfo[] {\n try {\n const tree = parseRustCode(code);\n if (tree === null) {\n return [];\n }\n\n const useDeclarations = queryNodesByType(tree, 'use_declaration');\n const imports: ImportInfo[] = [];\n\n for (const useNode of useDeclarations) {\n const importPath = extractImportPath(useNode);\n if (importPath === '') {\n continue;\n }\n\n // Parse the import path to extract module and specifiers\n const { source, specifiers } = this.parseImportPath(importPath);\n\n imports.push({\n source,\n specifiers,\n isType: false, // Rust doesn't distinguish type-only imports at syntax level\n });\n }\n\n return imports;\n } catch {\n return [];\n }\n }\n\n /**\n * Parse function declarations (excluding impl block methods)\n */\n private parseFunctions(tree: TreeSitterTree): CodeNode[] {\n const functionNodes = queryNodesByType(tree, 'function_item');\n const nodes: CodeNode[] = [];\n\n for (const fnNode of functionNodes) {\n // Skip functions inside impl blocks - they'll be handled as methods\n if (this.isInsideImplBlock(fnNode)) {\n continue;\n }\n\n const nameNode = getChildByFieldName(fnNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(fnNode);\n const async = isAsyncFunction(fnNode);\n const startLine = positionToLineNumber(fnNode.startPosition);\n const endLine = positionToLineNumber(fnNode.endPosition);\n const signature = getFunctionSignature(fnNode);\n\n nodes.push({\n type: 'function',\n name,\n exported,\n async,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Check if a node is inside an impl block\n */\n private isInsideImplBlock(node: TreeSitterNode): boolean {\n let current = node.parent;\n while (current !== null) {\n if (current.type === 'impl_item') {\n return true;\n }\n current = current.parent;\n }\n return false;\n }\n\n /**\n * Parse struct definitions\n */\n private parseStructs(tree: TreeSitterTree): CodeNode[] {\n const structNodes = queryNodesByType(tree, 'struct_item');\n const nodes: CodeNode[] = [];\n\n for (const structNode of structNodes) {\n const nameNode = getChildByFieldName(structNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(structNode);\n const startLine = positionToLineNumber(structNode.startPosition);\n const endLine = positionToLineNumber(structNode.endPosition);\n\n // Get type parameters (generics) if present\n const typeParamsNode = getChildByFieldName(structNode, 'type_parameters');\n const signature = typeParamsNode !== null ? `${name}${typeParamsNode.text}` : name;\n\n nodes.push({\n type: 'class',\n name,\n exported,\n startLine,\n endLine,\n signature,\n methods: [], // Will be populated by parseImplBlocks\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse trait definitions\n */\n private parseTraits(tree: TreeSitterTree): CodeNode[] {\n const traitNodes = queryNodesByType(tree, 'trait_item');\n const nodes: CodeNode[] = [];\n\n for (const traitNode of traitNodes) {\n const nameNode = getChildByFieldName(traitNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(traitNode);\n const startLine = positionToLineNumber(traitNode.startPosition);\n const endLine = positionToLineNumber(traitNode.endPosition);\n\n // Get type parameters (generics) if present\n const typeParamsNode = getChildByFieldName(traitNode, 'type_parameters');\n const signature = typeParamsNode !== null ? `${name}${typeParamsNode.text}` : name;\n\n // Extract trait methods\n const methods = this.extractTraitMethods(traitNode);\n\n nodes.push({\n type: 'interface',\n name,\n exported,\n startLine,\n endLine,\n signature,\n methods,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse type aliases\n */\n private parseTypeAliases(tree: TreeSitterTree): CodeNode[] {\n const typeNodes = queryNodesByType(tree, 'type_item');\n const nodes: CodeNode[] = [];\n\n for (const typeNode of typeNodes) {\n const nameNode = getChildByFieldName(typeNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(typeNode);\n const startLine = positionToLineNumber(typeNode.startPosition);\n const endLine = positionToLineNumber(typeNode.endPosition);\n\n // Get the full type alias definition\n const valueNode = getChildByFieldName(typeNode, 'type');\n const signature = valueNode !== null ? `${name} = ${valueNode.text}` : name;\n\n nodes.push({\n type: 'type',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse constants and statics\n */\n private parseConstants(tree: TreeSitterTree): CodeNode[] {\n const constNodes = queryNodesByType(tree, ['const_item', 'static_item']);\n const nodes: CodeNode[] = [];\n\n for (const constNode of constNodes) {\n const nameNode = getChildByFieldName(constNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const exported = hasVisibilityModifier(constNode);\n const startLine = positionToLineNumber(constNode.startPosition);\n const endLine = positionToLineNumber(constNode.endPosition);\n\n // Get type annotation\n const typeNode = getChildByFieldName(constNode, 'type');\n const signature = typeNode !== null ? `${name}: ${typeNode.text}` : name;\n\n nodes.push({\n type: 'const',\n name,\n exported,\n startLine,\n endLine,\n signature,\n });\n }\n\n return nodes;\n }\n\n /**\n * Parse impl blocks and attach methods to corresponding structs\n */\n private parseImplBlocks(tree: TreeSitterTree, nodes: CodeNode[]): void {\n const implNodes = queryNodesByType(tree, 'impl_item');\n\n for (const implNode of implNodes) {\n // Get the type being implemented\n const typeNode = getChildByFieldName(implNode, 'type');\n if (typeNode === null) {\n continue;\n }\n\n const typeName = typeNode.text;\n\n // Extract methods from impl block\n const methods = this.extractImplMethods(implNode);\n\n // Find the corresponding struct and attach methods\n const structNode = nodes.find((node) => node.type === 'class' && node.name === typeName);\n\n if (structNode?.methods !== undefined) {\n structNode.methods.push(...methods);\n }\n }\n }\n\n /**\n * Extract methods from trait definition\n */\n private extractTraitMethods(traitNode: TreeSitterNode): Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> {\n const methods: Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> = [];\n\n // Get declaration_list (trait body)\n const bodyNode = getChildByFieldName(traitNode, 'body');\n if (bodyNode === null) {\n return methods;\n }\n\n // Find all function_signature_item nodes (trait method declarations)\n const functionSignatures = bodyNode.descendantsOfType('function_signature_item');\n\n for (const fnSigNode of functionSignatures) {\n const nameNode = getChildByFieldName(fnSigNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const async = isAsyncFunction(fnSigNode);\n const signature = getFunctionSignature(fnSigNode);\n const startLine = positionToLineNumber(fnSigNode.startPosition);\n const endLine = positionToLineNumber(fnSigNode.endPosition);\n\n methods.push({\n name,\n async,\n signature,\n startLine,\n endLine,\n });\n }\n\n return methods;\n }\n\n /**\n * Extract methods from impl block\n */\n private extractImplMethods(implNode: TreeSitterNode): Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> {\n const methods: Array<{\n name: string;\n async: boolean;\n signature: string;\n startLine: number;\n endLine: number;\n }> = [];\n\n // Get declaration_list (impl body)\n const bodyNode = getChildByFieldName(implNode, 'body');\n if (bodyNode === null) {\n return methods;\n }\n\n // Find all function_item nodes (impl methods)\n const functionItems = bodyNode.descendantsOfType('function_item');\n\n for (const fnNode of functionItems) {\n const nameNode = getChildByFieldName(fnNode, 'name');\n if (nameNode === null) {\n continue;\n }\n\n const name = nameNode.text;\n const async = isAsyncFunction(fnNode);\n const signature = getFunctionSignature(fnNode);\n const startLine = positionToLineNumber(fnNode.startPosition);\n const endLine = positionToLineNumber(fnNode.endPosition);\n\n methods.push({\n name,\n async,\n signature,\n startLine,\n endLine,\n });\n }\n\n return methods;\n }\n\n /**\n * Parse import path into source and specifiers\n * Examples:\n * - \"std::collections::HashMap\" -> { source: \"std::collections\", specifiers: [\"HashMap\"] }\n * - \"crate::utils::*\" -> { source: \"crate::utils\", specifiers: [\"*\"] }\n * - \"super::Type\" -> { source: \"super\", specifiers: [\"Type\"] }\n */\n private parseImportPath(importPath: string): { source: string; specifiers: string[] } {\n // Remove whitespace\n const path = importPath.trim();\n\n // Handle glob imports (use std::io::*)\n if (path.includes('::*')) {\n const source = path.replace('::*', '');\n return { source, specifiers: ['*'] };\n }\n\n // Handle scoped imports: use std::io::{Read, Write}\n const scopedMatch = path.match(/^(.+)::\\{(.+)\\}$/);\n if (scopedMatch !== null) {\n const source = scopedMatch[1] ?? '';\n const specifiersStr = scopedMatch[2] ?? '';\n const specifiers = specifiersStr.split(',').map((s) => s.trim());\n return { source, specifiers };\n }\n\n // Handle simple imports: use std::collections::HashMap\n const parts = path.split('::');\n if (parts.length > 1) {\n const specifiers = [parts[parts.length - 1] ?? ''];\n const source = parts.slice(0, -1).join('::');\n return { source, specifiers };\n }\n\n // Single item import\n return { source: '', specifiers: [path] };\n }\n}\n","import { readFile, access } from 'node:fs/promises';\nimport { homedir } from 'node:os';\nimport { isAbsolute, join, resolve } from 'node:path';\nimport { ProjectRootService } from './project-root.service.js';\nimport { DEFAULT_CONFIG } from '../types/config.js';\nimport { atomicWriteFile } from '../utils/atomic-write.js';\nimport { deepMerge } from '../utils/deep-merge.js';\nimport type { AppConfig } from '../types/config.js';\n\n/** Default config path relative to project root */\nconst DEFAULT_CONFIG_PATH = '.bluera/bluera-knowledge/config.json';\n\n/**\n * Check if a file exists\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n}\n\nexport class ConfigService {\n private readonly configPath: string;\n private readonly dataDir: string;\n private readonly projectRoot: string;\n private config: AppConfig | null = null;\n\n constructor(configPath?: string, dataDir?: string, projectRoot?: string) {\n // Resolve project root using hierarchical detection\n this.projectRoot = projectRoot ?? ProjectRootService.resolve();\n\n // Resolve configPath - per-repo by default\n // Explicit paths are resolved against projectRoot (handles ~ and relative paths)\n if (configPath !== undefined && configPath !== '') {\n this.configPath = this.expandPath(configPath, this.projectRoot);\n } else {\n this.configPath = join(this.projectRoot, DEFAULT_CONFIG_PATH);\n }\n\n // Resolve dataDir - per-repo by default\n // Explicit paths are resolved against projectRoot (handles ~ and relative paths)\n if (dataDir !== undefined && dataDir !== '') {\n this.dataDir = this.expandPath(dataDir, this.projectRoot);\n } else {\n this.dataDir = this.expandPath(DEFAULT_CONFIG.dataDir, this.projectRoot);\n }\n }\n\n /**\n * Get the resolved project root directory.\n */\n resolveProjectRoot(): string {\n return this.projectRoot;\n }\n\n async load(): Promise<AppConfig> {\n if (this.config !== null) {\n return this.config;\n }\n\n const exists = await fileExists(this.configPath);\n if (!exists) {\n // First run - create config file with defaults\n this.config = { ...DEFAULT_CONFIG };\n await this.save(this.config);\n return this.config;\n }\n\n // File exists - load it (throws on corruption per CLAUDE.md \"fail early\")\n const content = await readFile(this.configPath, 'utf-8');\n try {\n this.config = deepMerge(DEFAULT_CONFIG, JSON.parse(content));\n } catch (error) {\n throw new Error(\n `Failed to parse config file at ${this.configPath}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n\n return this.config;\n }\n\n async save(config: AppConfig): Promise<void> {\n await atomicWriteFile(this.configPath, JSON.stringify(config, null, 2));\n this.config = config;\n }\n\n resolveDataDir(): string {\n return this.dataDir;\n }\n\n resolveConfigPath(): string {\n return this.configPath;\n }\n\n private expandPath(path: string, baseDir: string): string {\n // Expand ~ to home directory\n if (path.startsWith('~')) {\n return path.replace('~', homedir());\n }\n // Resolve relative paths against base directory (not process.cwd())\n // Uses isAbsolute() for cross-platform compatibility (Windows paths like C:\\data)\n if (!isAbsolute(path)) {\n return resolve(baseDir, path);\n }\n // Return absolute paths as-is\n return path;\n }\n}\n","export interface EmbeddingConfig {\n readonly model: string;\n readonly batchSize: number;\n // Note: dimensions is fixed at 384 (determined by all-MiniLM-L6-v2 model)\n}\n\nexport interface IndexingConfig {\n readonly concurrency: number;\n readonly chunkSize: number;\n readonly chunkOverlap: number;\n readonly ignorePatterns: readonly string[];\n}\n\nexport interface SearchConfig {\n readonly defaultMode: 'vector' | 'fts' | 'hybrid';\n readonly defaultLimit: number;\n}\n\nexport interface CrawlConfig {\n readonly userAgent: string;\n readonly timeout: number;\n readonly maxConcurrency: number;\n}\n\nexport interface ServerConfig {\n readonly port: number;\n readonly host: string;\n}\n\nexport interface AppConfig {\n readonly version: number;\n readonly dataDir: string;\n readonly embedding: EmbeddingConfig;\n readonly indexing: IndexingConfig;\n readonly search: SearchConfig;\n readonly crawl: CrawlConfig;\n readonly server: ServerConfig;\n}\n\nexport const DEFAULT_CONFIG: AppConfig = {\n version: 1,\n dataDir: '.bluera/bluera-knowledge/data',\n embedding: {\n model: 'Xenova/all-MiniLM-L6-v2',\n batchSize: 32,\n },\n indexing: {\n concurrency: 4,\n chunkSize: 1000,\n chunkOverlap: 150,\n ignorePatterns: ['node_modules/**', '.git/**', '*.min.js', '*.map'],\n },\n search: {\n defaultMode: 'hybrid',\n defaultLimit: 10,\n },\n crawl: {\n userAgent: 'BlueraKnowledge/1.0',\n timeout: 30000,\n maxConcurrency: 3,\n },\n server: {\n port: 3847,\n host: '127.0.0.1',\n },\n};\n","/**\n * Deep merge utility for config objects.\n *\n * Recursively merges overrides into defaults:\n * - Objects: recursively merge nested properties\n * - Arrays: replace entirely (don't concat)\n * - Primitives/null/undefined: use override value when defined\n */\n\n/**\n * Check if a value is a plain object (not null, array, Date, etc.)\n */\nfunction isPlainObject(value: unknown): value is Record<string, unknown> {\n return (\n typeof value === 'object' && value !== null && !Array.isArray(value) && !(value instanceof Date)\n );\n}\n\n/**\n * Deep merge two objects, with overrides taking precedence.\n *\n * Accepts `unknown` as the second parameter to work with JSON.parse() output\n * without requiring type assertions at the call site.\n *\n * @param defaults - The base object with default values (typed)\n * @param overrides - Object with values to override (can be unknown from JSON.parse)\n * @returns A new object with merged values, typed as the defaults type\n *\n * @example\n * ```typescript\n * const defaults = {\n * search: { mode: 'hybrid', limit: 10, rrf: { k: 40 } }\n * };\n * const overrides = JSON.parse('{\"search\": {\"mode\": \"vector\"}}');\n * const result = deepMerge(defaults, overrides);\n * // { search: { mode: 'vector', limit: 10, rrf: { k: 40 } } }\n * ```\n */\nexport function deepMerge<T extends object>(defaults: T, overrides: unknown): T {\n // If overrides is not a plain object, return defaults unchanged\n if (!isPlainObject(overrides)) {\n return { ...defaults };\n }\n\n // Use internal helper that works with Record types\n // Type assertions unavoidable here: we need to bridge generic T to Record<string, unknown>\n // for iteration while preserving the return type. This is safe because we spread defaults\n // and only add/replace properties that exist in overrides.\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const defaultsRecord = defaults as T & Record<string, unknown>;\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n return deepMergeRecords(defaultsRecord, overrides) as T;\n}\n\n/**\n * Internal implementation that works with Record types.\n * Separated to satisfy TypeScript's type system without assertions.\n */\nfunction deepMergeRecords(\n defaults: Record<string, unknown>,\n overrides: Record<string, unknown>\n): Record<string, unknown> {\n const result: Record<string, unknown> = { ...defaults };\n\n for (const key of Object.keys(overrides)) {\n const defaultValue = defaults[key];\n const overrideValue = overrides[key];\n\n // Skip undefined overrides (treat as \"not specified\")\n if (overrideValue === undefined) {\n continue;\n }\n\n // If both values are plain objects, recursively merge\n if (isPlainObject(defaultValue) && isPlainObject(overrideValue)) {\n result[key] = deepMergeRecords(defaultValue, overrideValue);\n } else {\n // Arrays, primitives, null, Date, etc. - use override directly\n result[key] = overrideValue;\n }\n }\n\n return result;\n}\n","import { readFile, writeFile, access } from 'node:fs/promises';\nimport { join } from 'node:path';\n\n/**\n * Required .gitignore patterns for Bluera Knowledge\n *\n * These patterns ensure:\n * - The .bluera/ data directory (vector DB, cloned repos) is ignored\n * - The .bluera/ logs directory is ignored\n * - Config files are NOT ignored (can be committed for team sharing):\n * - stores.config.json (store definitions)\n * - config.json (app configuration)\n * - skill-activation.json (skill activation preferences)\n *\n * IMPORTANT: Git ignores children of ignored directories. To un-ignore a nested\n * file, you must first un-ignore each parent directory in the path. The order is:\n * 1. Ignore .bluera/ (everything ignored by default)\n * 2. Un-ignore .bluera/ itself (allow traversing into it)\n * 3. Un-ignore .bluera/bluera-knowledge/ (allow traversing deeper)\n * 4. Un-ignore the specific files we want tracked\n * 5. Re-ignore .bluera/bluera-knowledge/data/ and logs/ (keep untracked)\n */\nconst REQUIRED_PATTERNS = [\n '.bluera/',\n '!.bluera/',\n '!.bluera/bluera-knowledge/',\n '!.bluera/bluera-knowledge/stores.config.json',\n '!.bluera/bluera-knowledge/config.json',\n '!.bluera/bluera-knowledge/skill-activation.json',\n '.bluera/bluera-knowledge/data/',\n '.bluera/bluera-knowledge/logs/',\n];\n\n/**\n * Header comment for the gitignore section\n */\nconst SECTION_HEADER = `\n# Bluera Knowledge\n# Config files (stores.config.json, config.json, skill-activation.json) can be committed\n# Data directory (vector DB, cloned repos) and logs are not committed\n`;\n\n/**\n * Check if a file exists\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Service for managing .gitignore patterns for Bluera Knowledge.\n *\n * When stores are created, this service ensures the project's .gitignore\n * is updated to:\n * - Ignore the .bluera/ data directory (not committed)\n * - Allow committing .bluera/bluera-knowledge/stores.config.json (for team sharing)\n */\nexport class GitignoreService {\n private readonly gitignorePath: string;\n\n constructor(projectRoot: string) {\n this.gitignorePath = join(projectRoot, '.gitignore');\n }\n\n /**\n * Check if all required patterns are present in .gitignore\n */\n async hasRequiredPatterns(): Promise<boolean> {\n const exists = await fileExists(this.gitignorePath);\n if (!exists) {\n return false;\n }\n\n const content = await readFile(this.gitignorePath, 'utf-8');\n const lines = content.split('\\n').map((l) => l.trim());\n\n for (const pattern of REQUIRED_PATTERNS) {\n if (!lines.includes(pattern)) {\n return false;\n }\n }\n\n return true;\n }\n\n /**\n * Ensure required .gitignore patterns are present.\n *\n * - Creates .gitignore if it doesn't exist\n * - Appends missing patterns if .gitignore exists\n * - Does nothing if all patterns are already present\n *\n * @returns Object with updated flag and descriptive message\n */\n async ensureGitignorePatterns(): Promise<{ updated: boolean; message: string }> {\n const exists = await fileExists(this.gitignorePath);\n\n if (!exists) {\n // Create new .gitignore with our patterns\n const content = `${SECTION_HEADER.trim()}\\n${REQUIRED_PATTERNS.join('\\n')}\\n`;\n await writeFile(this.gitignorePath, content);\n return {\n updated: true,\n message: 'Created .gitignore with Bluera Knowledge patterns',\n };\n }\n\n // Read existing content\n const existingContent = await readFile(this.gitignorePath, 'utf-8');\n const lines = existingContent.split('\\n').map((l) => l.trim());\n\n // Find missing patterns\n const missingPatterns = REQUIRED_PATTERNS.filter((pattern) => !lines.includes(pattern));\n\n if (missingPatterns.length === 0) {\n return {\n updated: false,\n message: 'All Bluera Knowledge patterns already present in .gitignore',\n };\n }\n\n // Append missing patterns\n let newContent = existingContent;\n if (!newContent.endsWith('\\n')) {\n newContent += '\\n';\n }\n\n newContent += SECTION_HEADER;\n newContent += `${missingPatterns.join('\\n')}\\n`;\n\n await writeFile(this.gitignorePath, newContent);\n\n return {\n updated: true,\n message: `Updated .gitignore with ${String(missingPatterns.length)} Bluera Knowledge pattern(s)`,\n };\n }\n\n /**\n * Get the path to the .gitignore file\n */\n getGitignorePath(): string {\n return this.gitignorePath;\n }\n}\n","import { createHash } from 'node:crypto';\nimport { readFile, readdir } from 'node:fs/promises';\nimport { join, extname, basename, relative } from 'node:path';\nimport { ChunkingService } from './chunking.service.js';\nimport { DriftService } from './drift.service.js';\nimport { createLogger } from '../logging/index.js';\nimport { createDocumentId } from '../types/brands.js';\nimport { ok, err } from '../types/result.js';\nimport { parseIgnorePatternsForScanning } from '../utils/ignore-patterns.js';\nimport type { CodeGraphService } from './code-graph.service.js';\nimport type { ManifestService } from './manifest.service.js';\nimport type { EmbeddingEngine } from '../db/embeddings.js';\nimport type { LanceStore } from '../db/lance.js';\nimport type { DocumentId } from '../types/brands.js';\nimport type { Document } from '../types/document.js';\nimport type { TypedStoreManifest, TypedFileState } from '../types/manifest.js';\nimport type { ProgressCallback } from '../types/progress.js';\nimport type { Result } from '../types/result.js';\nimport type { Store, FileStore, RepoStore } from '../types/store.js';\n\nconst logger = createLogger('index-service');\n\ninterface IndexResult {\n filesIndexed: number;\n chunksCreated: number;\n timeMs: number;\n}\n\ninterface IndexOptions {\n chunkSize?: number;\n chunkOverlap?: number;\n codeGraphService?: CodeGraphService;\n concurrency?: number;\n manifestService?: ManifestService;\n ignorePatterns?: readonly string[];\n}\n\ninterface IncrementalIndexResult extends IndexResult {\n filesAdded: number;\n filesModified: number;\n filesDeleted: number;\n filesUnchanged: number;\n}\n\nconst TEXT_EXTENSIONS = new Set([\n // Text/docs\n '.txt',\n '.md',\n '.rst',\n '.adoc',\n // JavaScript/TypeScript\n '.js',\n '.ts',\n '.jsx',\n '.tsx',\n '.mjs',\n '.cjs',\n '.mts',\n '.cts',\n // Config/data\n '.json',\n '.yaml',\n '.yml',\n '.toml',\n '.ini',\n '.env',\n // Web\n '.html',\n '.htm',\n '.css',\n '.scss',\n '.sass',\n '.less',\n '.vue',\n '.svelte',\n // Python\n '.py',\n '.pyi',\n '.pyx',\n // Ruby\n '.rb',\n '.erb',\n '.rake',\n // Go\n '.go',\n // Rust\n '.rs',\n // Java/JVM\n '.java',\n '.kt',\n '.kts',\n '.scala',\n '.groovy',\n '.gradle',\n // C/C++\n '.c',\n '.cpp',\n '.cc',\n '.cxx',\n '.h',\n '.hpp',\n '.hxx',\n // C#/.NET\n '.cs',\n '.fs',\n '.vb',\n // Swift/Objective-C\n '.swift',\n '.m',\n '.mm',\n // PHP\n '.php',\n // Shell\n '.sh',\n '.bash',\n '.zsh',\n '.fish',\n '.ps1',\n '.psm1',\n // SQL\n '.sql',\n // Other\n '.xml',\n '.graphql',\n '.gql',\n '.proto',\n '.lua',\n '.r',\n '.R',\n '.jl',\n '.ex',\n '.exs',\n '.erl',\n '.hrl',\n '.clj',\n '.cljs',\n '.cljc',\n '.hs',\n '.elm',\n '.dart',\n '.pl',\n '.pm',\n '.tcl',\n '.vim',\n '.zig',\n '.nim',\n '.v',\n '.tf',\n '.hcl',\n '.dockerfile',\n '.makefile',\n '.cmake',\n]);\n\nexport class IndexService {\n private readonly lanceStore: LanceStore;\n private readonly embeddingEngine: EmbeddingEngine;\n private readonly chunker: ChunkingService;\n private readonly codeGraphService: CodeGraphService | undefined;\n private readonly manifestService: ManifestService | undefined;\n private readonly driftService: DriftService;\n private readonly concurrency: number;\n private readonly ignoreDirs: Set<string>;\n private readonly ignoreFilePatterns: Array<(filename: string) => boolean>;\n\n constructor(\n lanceStore: LanceStore,\n embeddingEngine: EmbeddingEngine,\n options: IndexOptions = {}\n ) {\n this.lanceStore = lanceStore;\n this.embeddingEngine = embeddingEngine;\n this.chunker = new ChunkingService({\n chunkSize: options.chunkSize ?? 1000,\n chunkOverlap: options.chunkOverlap ?? 150,\n });\n this.codeGraphService = options.codeGraphService;\n this.manifestService = options.manifestService;\n this.driftService = new DriftService();\n this.concurrency = options.concurrency ?? 4;\n\n const parsed = parseIgnorePatternsForScanning(options.ignorePatterns ?? []);\n this.ignoreDirs = parsed.dirs;\n this.ignoreFilePatterns = parsed.fileMatchers;\n }\n\n async indexStore(store: Store, onProgress?: ProgressCallback): Promise<Result<IndexResult>> {\n logger.info(\n {\n storeId: store.id,\n storeName: store.name,\n storeType: store.type,\n },\n 'Starting store indexing'\n );\n\n try {\n if (store.type === 'file' || store.type === 'repo') {\n return await this.indexFileStore(store, onProgress);\n }\n\n logger.error(\n { storeId: store.id, storeType: store.type },\n 'Unsupported store type for indexing'\n );\n return err(new Error(`Indexing not supported for store type: ${store.type}`));\n } catch (error) {\n logger.error(\n {\n storeId: store.id,\n error: error instanceof Error ? error.message : String(error),\n },\n 'Store indexing failed'\n );\n return err(error instanceof Error ? error : new Error(String(error)));\n }\n }\n\n /**\n * Incrementally index a store, only processing changed files.\n * Requires manifestService to be configured.\n *\n * @param store - The store to index\n * @param onProgress - Optional progress callback\n * @returns Result with incremental index statistics\n */\n async indexStoreIncremental(\n store: Store,\n onProgress?: ProgressCallback\n ): Promise<Result<IncrementalIndexResult>> {\n if (this.manifestService === undefined) {\n return err(new Error('ManifestService required for incremental indexing'));\n }\n\n if (store.type !== 'file' && store.type !== 'repo') {\n return err(new Error(`Incremental indexing not supported for store type: ${store.type}`));\n }\n\n logger.info(\n {\n storeId: store.id,\n storeName: store.name,\n storeType: store.type,\n },\n 'Starting incremental store indexing'\n );\n\n const startTime = Date.now();\n\n try {\n // Load manifest\n const manifest = await this.manifestService.load(store.id);\n\n // Scan current files\n const filePaths = await this.scanDirectory(store.path);\n const currentFiles = await Promise.all(\n filePaths.map((path) => this.driftService.getFileState(path))\n );\n\n // Detect changes\n const drift = await this.driftService.detectChanges(manifest, currentFiles);\n\n logger.debug(\n {\n storeId: store.id,\n added: drift.added.length,\n modified: drift.modified.length,\n deleted: drift.deleted.length,\n unchanged: drift.unchanged.length,\n },\n 'Drift detection complete'\n );\n\n // Collect document IDs to delete (from modified and deleted files)\n const documentIdsToDelete: DocumentId[] = [];\n for (const path of [...drift.modified, ...drift.deleted]) {\n const fileState = manifest.files[path];\n if (fileState !== undefined) {\n documentIdsToDelete.push(...fileState.documentIds);\n }\n }\n\n // Delete old documents\n if (documentIdsToDelete.length > 0) {\n await this.lanceStore.deleteDocuments(store.id, documentIdsToDelete);\n logger.debug(\n { storeId: store.id, count: documentIdsToDelete.length },\n 'Deleted old documents'\n );\n }\n\n // Process new and modified files\n const filesToProcess = [...drift.added, ...drift.modified];\n const totalFiles = filesToProcess.length;\n\n onProgress?.({\n type: 'start',\n current: 0,\n total: totalFiles,\n message: `Processing ${String(totalFiles)} changed files`,\n });\n\n const documents: Document[] = [];\n const newManifestFiles: Record<string, TypedFileState> = {};\n let filesProcessed = 0;\n\n // Keep unchanged files in manifest\n for (const path of drift.unchanged) {\n const existingState = manifest.files[path];\n if (existingState !== undefined) {\n newManifestFiles[path] = existingState;\n }\n }\n\n // Process changed files in parallel batches\n for (let i = 0; i < filesToProcess.length; i += this.concurrency) {\n const batch = filesToProcess.slice(i, i + this.concurrency);\n\n const batchResults = await Promise.all(\n batch.map(async (filePath) => {\n try {\n const result = await this.processFile(filePath, store);\n const documentIds = result.documents.map((d) => d.id);\n\n // Create file state for manifest\n const { state } = await this.driftService.createFileState(filePath, documentIds);\n\n return {\n filePath,\n documents: result.documents,\n fileState: state,\n };\n } catch (error) {\n logger.warn(\n { filePath, error: error instanceof Error ? error.message : String(error) },\n 'Failed to process file during incremental indexing, skipping'\n );\n return null;\n }\n })\n );\n\n // Collect results (skip null entries from failed files)\n for (const result of batchResults) {\n if (result !== null) {\n documents.push(...result.documents);\n newManifestFiles[result.filePath] = result.fileState;\n }\n }\n\n filesProcessed += batch.length;\n\n onProgress?.({\n type: 'progress',\n current: filesProcessed,\n total: totalFiles,\n message: `Processed ${String(filesProcessed)}/${String(totalFiles)} files`,\n });\n }\n\n // Add new documents\n if (documents.length > 0) {\n await this.lanceStore.addDocuments(store.id, documents);\n }\n\n // Recreate FTS index if any changes occurred (deletions or additions)\n if (documentIdsToDelete.length > 0 || documents.length > 0) {\n await this.lanceStore.createFtsIndex(store.id);\n }\n\n // Rebuild code graph if service available and source files changed\n if (this.codeGraphService) {\n const sourceExtensions = ['.ts', '.tsx', '.js', '.jsx', '.py', '.rs', '.go'];\n const hasSourceChanges =\n filesToProcess.some((p) => sourceExtensions.includes(extname(p).toLowerCase())) ||\n drift.deleted.some((p) => sourceExtensions.includes(extname(p).toLowerCase()));\n\n if (hasSourceChanges) {\n // Rebuild full graph from all current source files (simpler than incremental updates)\n const allSourceFiles: Array<{ path: string; content: string }> = [];\n const allPaths = [...drift.unchanged, ...filesToProcess];\n\n for (const filePath of allPaths) {\n const ext = extname(filePath).toLowerCase();\n if (sourceExtensions.includes(ext)) {\n try {\n const content = await readFile(filePath, 'utf-8');\n allSourceFiles.push({ path: filePath, content });\n } catch {\n // File may have been deleted between scan and read\n }\n }\n }\n\n if (allSourceFiles.length > 0) {\n const graph = await this.codeGraphService.buildGraph(allSourceFiles);\n await this.codeGraphService.saveGraph(store.id, graph);\n logger.debug(\n { storeId: store.id, sourceFiles: allSourceFiles.length },\n 'Rebuilt code graph during incremental indexing'\n );\n } else {\n // No source files remain - delete stale graph\n await this.codeGraphService.deleteGraph(store.id);\n logger.debug(\n { storeId: store.id },\n 'Deleted stale code graph (no source files remain)'\n );\n }\n }\n }\n\n // Save updated manifest\n const updatedManifest: TypedStoreManifest = {\n version: 1,\n storeId: store.id,\n indexedAt: new Date().toISOString(),\n files: newManifestFiles,\n };\n await this.manifestService.save(updatedManifest);\n\n onProgress?.({\n type: 'complete',\n current: totalFiles,\n total: totalFiles,\n message: 'Incremental indexing complete',\n });\n\n const timeMs = Date.now() - startTime;\n\n logger.info(\n {\n storeId: store.id,\n storeName: store.name,\n filesAdded: drift.added.length,\n filesModified: drift.modified.length,\n filesDeleted: drift.deleted.length,\n filesUnchanged: drift.unchanged.length,\n chunksCreated: documents.length,\n timeMs,\n },\n 'Incremental indexing complete'\n );\n\n return ok({\n filesIndexed: filesToProcess.length,\n chunksCreated: documents.length,\n timeMs,\n filesAdded: drift.added.length,\n filesModified: drift.modified.length,\n filesDeleted: drift.deleted.length,\n filesUnchanged: drift.unchanged.length,\n });\n } catch (error) {\n logger.error(\n {\n storeId: store.id,\n error: error instanceof Error ? error.message : String(error),\n },\n 'Incremental indexing failed'\n );\n return err(error instanceof Error ? error : new Error(String(error)));\n }\n }\n\n private async indexFileStore(\n store: FileStore | RepoStore,\n onProgress?: ProgressCallback\n ): Promise<Result<IndexResult>> {\n const startTime = Date.now();\n\n // Clear existing documents before full re-index to prevent duplicates\n await this.lanceStore.clearAllDocuments(store.id);\n\n // Clear stale manifest to ensure fresh incremental indexing later\n if (this.manifestService) {\n await this.manifestService.delete(store.id);\n }\n\n const files = await this.scanDirectory(store.path);\n const documents: Document[] = [];\n let filesProcessed = 0;\n\n logger.debug(\n {\n storeId: store.id,\n path: store.path,\n fileCount: files.length,\n concurrency: this.concurrency,\n },\n 'Files scanned for indexing'\n );\n\n // Collect source files for code graph building\n const sourceFiles: Array<{ path: string; content: string }> = [];\n\n // Emit start event\n onProgress?.({\n type: 'start',\n current: 0,\n total: files.length,\n message: 'Starting index',\n });\n\n // Process files in parallel batches\n for (let i = 0; i < files.length; i += this.concurrency) {\n const batch = files.slice(i, i + this.concurrency);\n\n const batchResults = await Promise.all(\n batch.map(async (filePath) => {\n try {\n return await this.processFile(filePath, store);\n } catch (error) {\n logger.warn(\n { filePath, error: error instanceof Error ? error.message : String(error) },\n 'Failed to process file, skipping'\n );\n return { documents: [], sourceFile: undefined };\n }\n })\n );\n\n // Collect results from batch\n for (const result of batchResults) {\n documents.push(...result.documents);\n if (result.sourceFile !== undefined) {\n sourceFiles.push(result.sourceFile);\n }\n }\n\n filesProcessed += batch.length;\n\n // Emit progress event after each batch\n onProgress?.({\n type: 'progress',\n current: filesProcessed,\n total: files.length,\n message: `Indexed ${String(filesProcessed)}/${String(files.length)} files`,\n });\n }\n\n if (documents.length > 0) {\n await this.lanceStore.addDocuments(store.id, documents);\n // Create FTS index for full-text search\n await this.lanceStore.createFtsIndex(store.id);\n }\n\n // Build and save code graph if service is available and we have source files\n if (this.codeGraphService && sourceFiles.length > 0) {\n const graph = await this.codeGraphService.buildGraph(sourceFiles);\n await this.codeGraphService.saveGraph(store.id, graph);\n } else if (this.codeGraphService) {\n // No source files - delete any stale graph\n await this.codeGraphService.deleteGraph(store.id);\n }\n\n // Emit complete event\n onProgress?.({\n type: 'complete',\n current: files.length,\n total: files.length,\n message: 'Indexing complete',\n });\n\n const timeMs = Date.now() - startTime;\n\n logger.info(\n {\n storeId: store.id,\n storeName: store.name,\n filesIndexed: filesProcessed,\n chunksCreated: documents.length,\n sourceFilesForGraph: sourceFiles.length,\n timeMs,\n },\n 'Store indexing complete'\n );\n\n return ok({\n filesIndexed: filesProcessed,\n chunksCreated: documents.length,\n timeMs,\n });\n }\n\n /**\n * Process a single file: read, chunk, embed, and return documents.\n * Extracted for parallel processing.\n */\n private async processFile(\n filePath: string,\n store: FileStore | RepoStore\n ): Promise<{\n documents: Document[];\n sourceFile: { path: string; content: string } | undefined;\n }> {\n const content = await readFile(filePath, 'utf-8');\n const fileHash = createHash('md5').update(content).digest('hex');\n const chunks = this.chunker.chunk(content, filePath);\n\n // Use relative path for document ID to ensure consistency across machines\n // and prevent collisions between files with identical content\n const relativePath = relative(store.path, filePath);\n const pathHash = createHash('md5').update(relativePath).digest('hex').slice(0, 8);\n\n const ext = extname(filePath).toLowerCase();\n const fileName = basename(filePath).toLowerCase();\n const fileType = this.classifyFileType(ext, fileName, filePath);\n\n // Track source file for code graph (supports JS/TS, Python, Rust, Go)\n const sourceFile = ['.ts', '.tsx', '.js', '.jsx', '.py', '.rs', '.go'].includes(ext)\n ? { path: filePath, content }\n : undefined;\n\n // Skip files with no chunks (empty files)\n if (chunks.length === 0) {\n return { documents: [], sourceFile };\n }\n\n // Batch embed all chunks from this file\n const chunkContents = chunks.map((c) => c.content);\n const vectors = await this.embeddingEngine.embedBatch(chunkContents);\n\n const documents: Document[] = [];\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i];\n const vector = vectors[i];\n\n // Fail fast if chunk/vector mismatch (should never happen)\n if (chunk === undefined || vector === undefined) {\n throw new Error(\n `Chunk/vector mismatch at index ${String(i)}: chunk=${String(chunk !== undefined)}, vector=${String(vector !== undefined)}`\n );\n }\n\n // Include pathHash in ID to prevent collisions when files have identical content\n const chunkId =\n chunks.length > 1\n ? `${store.id}-${pathHash}-${fileHash}-${String(chunk.chunkIndex)}`\n : `${store.id}-${pathHash}-${fileHash}`;\n\n documents.push({\n id: createDocumentId(chunkId),\n content: chunk.content,\n vector,\n metadata: {\n type: chunks.length > 1 ? 'chunk' : 'file',\n storeId: store.id,\n path: filePath,\n indexedAt: new Date().toISOString(),\n fileHash,\n chunkIndex: chunk.chunkIndex,\n totalChunks: chunk.totalChunks,\n fileType,\n sectionHeader: chunk.sectionHeader,\n functionName: chunk.functionName,\n hasDocComments: /\\/\\*\\*[\\s\\S]*?\\*\\//.test(chunk.content),\n docSummary: chunk.docSummary,\n },\n });\n }\n\n return { documents, sourceFile };\n }\n\n private async scanDirectory(dir: string): Promise<string[]> {\n const files: string[] = [];\n const entries = await readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n\n if (entry.isDirectory()) {\n // Skip directories matching ignore patterns\n if (!this.ignoreDirs.has(entry.name)) {\n files.push(...(await this.scanDirectory(fullPath)));\n }\n } else if (entry.isFile()) {\n // Skip files matching ignore patterns (e.g., *.min.js, *.map)\n const shouldIgnore = this.ignoreFilePatterns.some((matcher) => matcher(entry.name));\n if (shouldIgnore) {\n continue;\n }\n\n const ext = extname(entry.name).toLowerCase();\n if (TEXT_EXTENSIONS.has(ext)) {\n files.push(fullPath);\n }\n }\n }\n\n return files;\n }\n\n /**\n * Classify file type for ranking purposes.\n * Documentation files rank higher than source code for documentation queries.\n * Phase 4: Enhanced to detect internal implementation files.\n */\n private classifyFileType(ext: string, fileName: string, filePath: string): string {\n // Documentation files\n if (ext === '.md') {\n // CHANGELOG files get their own category for intent-based penalties\n if (fileName === 'changelog.md' || fileName === 'changes.md' || /changelog/i.test(fileName)) {\n return 'changelog';\n }\n // Special doc files get highest priority\n if (['readme.md', 'migration.md', 'contributing.md'].includes(fileName)) {\n return 'documentation-primary';\n }\n // Check path for documentation indicators\n if (/\\/(docs?|documentation|guides?|tutorials?|articles?)\\//i.test(filePath)) {\n return 'documentation';\n }\n return 'documentation';\n }\n\n // Test files\n if (/\\.(test|spec)\\.[jt]sx?$/.test(fileName) || /\\/__tests__\\//.test(filePath)) {\n return 'test';\n }\n\n // Example files\n if (/\\/examples?\\//.test(filePath) || fileName.includes('example')) {\n return 'example';\n }\n\n // Config files\n if (/^(tsconfig|package|\\.eslint|\\.prettier|vite\\.config|next\\.config)/i.test(fileName)) {\n return 'config';\n }\n\n // Source code - distinguish between internal and public-facing\n if (['.ts', '.tsx', '.js', '.jsx', '.py', '.go', '.rs', '.java'].includes(ext)) {\n // Internal implementation files (monorepo packages, lib internals)\n // These patterns indicate internal/core implementation code\n if (this.isInternalImplementation(filePath, fileName)) {\n return 'source-internal';\n }\n return 'source';\n }\n\n return 'other';\n }\n\n /**\n * Detect if a source file is internal implementation code.\n * Internal code should rank lower than public-facing APIs and docs.\n */\n private isInternalImplementation(filePath: string, fileName: string): boolean {\n const pathLower = filePath.toLowerCase();\n const fileNameLower = fileName.toLowerCase();\n\n // Monorepo internal packages (like Vue's packages/*/src/)\n if (/\\/packages\\/[^/]+\\/src\\//.test(pathLower)) {\n // Exception: index files often export public APIs\n if (fileNameLower === 'index.ts' || fileNameLower === 'index.js') {\n return false;\n }\n return true;\n }\n\n // Internal/core directories\n if (/\\/(internal|lib\\/core|core\\/src|_internal|private)\\//.test(pathLower)) {\n return true;\n }\n\n // Compiler/transform internals (often not what users want)\n if (\n /\\/(compiler|transforms?|parse|codegen)\\//.test(pathLower) &&\n !fileNameLower.includes('readme') &&\n !fileNameLower.includes('index')\n ) {\n return true;\n }\n\n return false;\n }\n}\n\n/**\n * Classify web content type based on URL patterns and page title.\n * Used for ranking boosts similar to local file classification.\n */\nexport function classifyWebContentType(url: string, title?: string): string {\n const urlLower = url.toLowerCase();\n const titleLower = (title ?? '').toLowerCase();\n\n // API reference documentation → documentation-primary (1.8x boost)\n if (\n /\\/api[-/]?(ref|reference|docs?)?\\//i.test(urlLower) ||\n /api\\s*(reference|documentation)/i.test(titleLower)\n ) {\n return 'documentation-primary';\n }\n\n // Getting started / tutorials → documentation-primary (1.8x boost)\n if (\n /\\/(getting[-_]?started|quickstart|tutorial|setup)\\b/i.test(urlLower) ||\n /(getting started|quickstart|tutorial)/i.test(titleLower)\n ) {\n return 'documentation-primary';\n }\n\n // General docs paths → documentation (1.5x boost)\n if (/\\/(docs?|documentation|reference|learn|manual|guide)/i.test(urlLower)) {\n return 'documentation';\n }\n\n // Examples and demos → example (1.4x boost)\n if (/\\/(examples?|demos?|samples?|cookbook)/i.test(urlLower)) {\n return 'example';\n }\n\n // Changelog → changelog (special handling in intent boosts)\n if (/changelog|release[-_]?notes/i.test(urlLower)) {\n return 'changelog';\n }\n\n // Blog posts → lower priority\n if (/\\/blog\\//i.test(urlLower)) {\n return 'other';\n }\n\n // Web content without specific path indicators is treated as documentation\n return 'documentation';\n}\n","export interface ChunkConfig {\n chunkSize: number;\n chunkOverlap: number;\n}\n\nexport interface Chunk {\n content: string;\n chunkIndex: number;\n totalChunks: number;\n startOffset: number;\n endOffset: number;\n /** Section header if this chunk starts a markdown section */\n sectionHeader?: string | undefined;\n /** Function or class name if this chunk contains a code declaration */\n functionName?: string | undefined;\n /** JSDoc/comment summary extracted from this chunk */\n docSummary?: string | undefined;\n}\n\n/**\n * Preset configurations for different content types.\n * Code uses smaller chunks for precise symbol matching.\n * Web/docs use larger chunks to preserve prose context.\n */\nconst CHUNK_PRESETS = {\n code: { chunkSize: 768, chunkOverlap: 100 },\n web: { chunkSize: 1200, chunkOverlap: 200 },\n docs: { chunkSize: 1200, chunkOverlap: 200 },\n} as const;\n\nexport type ContentType = keyof typeof CHUNK_PRESETS;\n\nexport class ChunkingService {\n private readonly chunkSize: number;\n private readonly chunkOverlap: number;\n\n constructor(config: ChunkConfig) {\n if (config.chunkOverlap >= config.chunkSize) {\n throw new Error(\n `chunkOverlap (${String(config.chunkOverlap)}) must be less than chunkSize (${String(config.chunkSize)})`\n );\n }\n this.chunkSize = config.chunkSize;\n this.chunkOverlap = config.chunkOverlap;\n }\n\n /**\n * Create a ChunkingService with preset configuration for a content type.\n * - 'code': Smaller chunks (768/100) for precise code symbol matching\n * - 'web': Larger chunks (1200/200) for web prose content\n * - 'docs': Larger chunks (1200/200) for documentation\n */\n static forContentType(type: ContentType): ChunkingService {\n return new ChunkingService(CHUNK_PRESETS[type]);\n }\n\n /**\n * Chunk text content. Uses semantic chunking for Markdown and code files,\n * falling back to sliding window for other content.\n */\n chunk(text: string, filePath?: string): Chunk[] {\n // Use semantic chunking for Markdown files\n if (filePath !== undefined && filePath !== '' && /\\.md$/i.test(filePath)) {\n return this.chunkMarkdown(text);\n }\n\n // Use semantic chunking for TypeScript/JavaScript files\n if (filePath !== undefined && filePath !== '' && /\\.(ts|tsx|js|jsx)$/i.test(filePath)) {\n return this.chunkCode(text);\n }\n\n return this.chunkSlidingWindow(text);\n }\n\n /**\n * Semantic chunking for Markdown files.\n * Splits on section headers to keep related content together.\n */\n private chunkMarkdown(text: string): Chunk[] {\n // Match markdown headers (# through ####)\n const headerRegex = /^(#{1,4})\\s+(.+)$/gm;\n const sections: Array<{ header: string; content: string; startOffset: number }> = [];\n\n let lastIndex = 0;\n let lastHeader = '';\n let match: RegExpExecArray | null;\n\n while ((match = headerRegex.exec(text)) !== null) {\n // Save previous section\n if (match.index > lastIndex) {\n const content = text.slice(lastIndex, match.index).trim();\n if (content) {\n sections.push({\n header: lastHeader,\n content: content,\n startOffset: lastIndex,\n });\n }\n }\n lastHeader = match[2] ?? '';\n lastIndex = match.index;\n }\n\n // Add final section\n const finalContent = text.slice(lastIndex).trim();\n if (finalContent) {\n sections.push({\n header: lastHeader,\n content: finalContent,\n startOffset: lastIndex,\n });\n }\n\n // If no sections found, fall back to sliding window\n if (sections.length === 0) {\n return this.chunkSlidingWindow(text);\n }\n\n // Convert sections to chunks, splitting large sections if needed\n const chunks: Chunk[] = [];\n\n for (const section of sections) {\n if (section.content.length <= this.chunkSize) {\n // Section fits in one chunk\n chunks.push({\n content: section.content,\n chunkIndex: chunks.length,\n totalChunks: 0,\n startOffset: section.startOffset,\n endOffset: section.startOffset + section.content.length,\n sectionHeader: section.header || undefined,\n });\n } else {\n // Split large section using sliding window\n const sectionChunks = this.chunkSlidingWindow(section.content);\n for (const subChunk of sectionChunks) {\n chunks.push({\n ...subChunk,\n chunkIndex: chunks.length,\n startOffset: section.startOffset + subChunk.startOffset,\n endOffset: section.startOffset + subChunk.endOffset,\n sectionHeader: section.header || undefined,\n });\n }\n }\n }\n\n // Set totalChunks\n for (const chunk of chunks) {\n chunk.totalChunks = chunks.length;\n }\n\n return chunks;\n }\n\n /**\n * Semantic chunking for TypeScript/JavaScript code files.\n * Splits on top-level declarations to keep functions/classes together.\n */\n private chunkCode(text: string): Chunk[] {\n // Match top-level declarations with optional JSDoc/comments before them\n const declarationRegex =\n /^(?:\\/\\*\\*[\\s\\S]*?\\*\\/\\s*)?(?:export\\s+)?(?:default\\s+)?(?:async\\s+)?(?:function|class|interface|type|const|let|var|enum)\\s+(\\w+)/gm;\n const declarations: Array<{ startOffset: number; endOffset: number; name?: string }> = [];\n\n let match: RegExpExecArray | null;\n while ((match = declarationRegex.exec(text)) !== null) {\n const name = match[1];\n const decl: { startOffset: number; endOffset: number; name?: string } = {\n startOffset: match.index,\n endOffset: match.index,\n };\n if (name !== undefined) {\n decl.name = name;\n }\n declarations.push(decl);\n }\n\n // If no declarations found, use sliding window\n if (declarations.length === 0) {\n return this.chunkSlidingWindow(text);\n }\n\n // Find end of each declaration using brace-aware boundary detection\n for (let i = 0; i < declarations.length; i++) {\n const currentDecl = declarations[i];\n const nextDecl = declarations[i + 1];\n if (currentDecl === undefined) continue;\n\n // For declarations that likely have braces (functions, classes, enums)\n // use smart boundary detection\n const declText = text.slice(currentDecl.startOffset);\n if (\n /^(?:\\/\\*\\*[\\s\\S]*?\\*\\/\\s*)?(?:export\\s+)?(?:async\\s+)?(?:function|class|enum)\\s+/m.test(\n declText\n )\n ) {\n const boundary = this.findDeclarationEnd(declText);\n if (boundary > 0) {\n currentDecl.endOffset = currentDecl.startOffset + boundary;\n } else {\n // Fall back to next declaration or EOF\n currentDecl.endOffset = nextDecl !== undefined ? nextDecl.startOffset : text.length;\n }\n } else {\n // For other declarations (interface, type, const, let, var), use next declaration or EOF\n currentDecl.endOffset = nextDecl !== undefined ? nextDecl.startOffset : text.length;\n }\n }\n\n const chunks: Chunk[] = [];\n\n for (const decl of declarations) {\n const content = text.slice(decl.startOffset, decl.endOffset).trim();\n\n if (content.length <= this.chunkSize) {\n // Declaration fits in one chunk\n chunks.push({\n content,\n chunkIndex: chunks.length,\n totalChunks: 0,\n startOffset: decl.startOffset,\n endOffset: decl.endOffset,\n functionName: decl.name,\n });\n } else {\n // Split large declaration with sliding window\n const declChunks = this.chunkSlidingWindow(content);\n for (const subChunk of declChunks) {\n chunks.push({\n ...subChunk,\n chunkIndex: chunks.length,\n startOffset: decl.startOffset + subChunk.startOffset,\n endOffset: decl.startOffset + subChunk.endOffset,\n functionName: decl.name,\n });\n }\n }\n }\n\n // Set totalChunks\n for (const chunk of chunks) {\n chunk.totalChunks = chunks.length;\n }\n\n return chunks.length > 0 ? chunks : this.chunkSlidingWindow(text);\n }\n\n /**\n * Find the end of a code declaration by counting braces while ignoring\n * braces inside strings and comments.\n * Returns the offset where the declaration ends, or -1 if not found.\n */\n private findDeclarationEnd(text: string): number {\n let braceCount = 0;\n let inString = false;\n let inSingleLineComment = false;\n let inMultiLineComment = false;\n let stringChar = '';\n let i = 0;\n let foundFirstBrace = false;\n\n // Find the first opening brace\n while (i < text.length) {\n const char = text[i];\n const nextChar = i + 1 < text.length ? text[i + 1] : '';\n\n // Handle comments\n if (!inString && !inMultiLineComment && char === '/' && nextChar === '/') {\n inSingleLineComment = true;\n i += 2;\n continue;\n }\n\n if (!inString && !inSingleLineComment && char === '/' && nextChar === '*') {\n inMultiLineComment = true;\n i += 2;\n continue;\n }\n\n if (inMultiLineComment && char === '*' && nextChar === '/') {\n inMultiLineComment = false;\n i += 2;\n continue;\n }\n\n if (inSingleLineComment && char === '\\n') {\n inSingleLineComment = false;\n i++;\n continue;\n }\n\n // Skip if in comment\n if (inSingleLineComment || inMultiLineComment) {\n i++;\n continue;\n }\n\n // Handle strings\n if (!inString && (char === '\"' || char === \"'\" || char === '`')) {\n inString = true;\n stringChar = char;\n i++;\n continue;\n }\n\n if (inString && char === '\\\\') {\n // Skip escaped character\n i += 2;\n continue;\n }\n\n if (inString && char === stringChar) {\n inString = false;\n stringChar = '';\n i++;\n continue;\n }\n\n // Skip if in string\n if (inString) {\n i++;\n continue;\n }\n\n // Count braces\n if (char === '{') {\n braceCount++;\n foundFirstBrace = true;\n } else if (char === '}') {\n braceCount--;\n if (foundFirstBrace && braceCount === 0) {\n // Found the closing brace\n return i + 1;\n }\n }\n\n i++;\n }\n\n // If we didn't find a complete declaration, return -1\n return -1;\n }\n\n /**\n * Traditional sliding window chunking for non-Markdown content.\n */\n private chunkSlidingWindow(text: string): Chunk[] {\n if (text.length <= this.chunkSize) {\n return [\n {\n content: text,\n chunkIndex: 0,\n totalChunks: 1,\n startOffset: 0,\n endOffset: text.length,\n },\n ];\n }\n\n const chunks: Chunk[] = [];\n const step = this.chunkSize - this.chunkOverlap;\n let start = 0;\n\n while (start < text.length) {\n const end = Math.min(start + this.chunkSize, text.length);\n chunks.push({\n content: text.slice(start, end),\n chunkIndex: chunks.length,\n totalChunks: 0,\n startOffset: start,\n endOffset: end,\n });\n start += step;\n if (end === text.length) break;\n }\n\n // Set totalChunks\n for (const chunk of chunks) {\n chunk.totalChunks = chunks.length;\n }\n\n return chunks;\n }\n}\n","import { createHash } from 'node:crypto';\nimport { readFile, stat } from 'node:fs/promises';\nimport type { TypedStoreManifest, TypedFileState, DriftResult } from '../types/manifest.js';\n\n/**\n * Current state of a file on disk.\n * Used for comparison against manifest.\n */\nexport interface CurrentFileState {\n path: string;\n mtime: number;\n size: number;\n}\n\n/**\n * Service for detecting file changes between disk state and manifest.\n *\n * Uses two-phase detection for efficiency:\n * - Phase 1 (fast): Compare mtime and size\n * - Phase 2 (deep): Compute hash for files that changed in phase 1\n *\n * This approach minimizes disk I/O by avoiding hash computation for unchanged files.\n */\nexport class DriftService {\n /**\n * Detect changes between current files and manifest.\n *\n * @param manifest - The stored manifest from last index\n * @param currentFiles - Current files on disk with mtime/size\n * @returns Classification of files into added, modified, deleted, unchanged\n */\n async detectChanges(\n manifest: TypedStoreManifest,\n currentFiles: CurrentFileState[]\n ): Promise<DriftResult> {\n const result: DriftResult = {\n added: [],\n modified: [],\n deleted: [],\n unchanged: [],\n };\n\n // Build a set of current file paths for quick lookup\n const currentPathSet = new Set(currentFiles.map((f) => f.path));\n const manifestPaths = new Set(Object.keys(manifest.files));\n\n // Find deleted files (in manifest but not on disk)\n for (const path of manifestPaths) {\n if (!currentPathSet.has(path)) {\n result.deleted.push(path);\n }\n }\n\n // Process current files\n const potentiallyModified: CurrentFileState[] = [];\n\n for (const file of currentFiles) {\n const manifestState = manifest.files[file.path];\n\n if (manifestState === undefined) {\n // New file (not in manifest)\n result.added.push(file.path);\n } else {\n // Phase 1: Fast check - compare mtime and size\n if (file.mtime === manifestState.mtime && file.size === manifestState.size) {\n // Same mtime and size - assume unchanged\n result.unchanged.push(file.path);\n } else {\n // mtime or size changed - need phase 2 check\n potentiallyModified.push(file);\n }\n }\n }\n\n // Phase 2: Deep check - compute hash for potentially modified files\n for (const file of potentiallyModified) {\n const manifestState = manifest.files[file.path];\n if (manifestState === undefined) {\n // Should not happen, but handle gracefully\n result.added.push(file.path);\n continue;\n }\n\n const currentHash = await this.computeFileHash(file.path);\n\n if (currentHash === manifestState.hash) {\n // Hash matches - file content unchanged (only metadata changed)\n result.unchanged.push(file.path);\n } else {\n // Hash differs - file actually modified\n result.modified.push(file.path);\n }\n }\n\n return result;\n }\n\n /**\n * Get the current state of a file on disk.\n */\n async getFileState(path: string): Promise<CurrentFileState> {\n const stats = await stat(path);\n return {\n path,\n mtime: stats.mtimeMs,\n size: stats.size,\n };\n }\n\n /**\n * Compute MD5 hash of a file.\n */\n async computeFileHash(path: string): Promise<string> {\n const content = await readFile(path);\n return createHash('md5').update(content).digest('hex');\n }\n\n /**\n * Create a file state entry for the manifest after indexing.\n *\n * @param path - File path\n * @param documentIds - Document IDs created from this file\n * @returns File state for manifest\n */\n async createFileState(\n path: string,\n documentIds: string[]\n ): Promise<{ state: TypedFileState; hash: string }> {\n const stats = await stat(path);\n const content = await readFile(path);\n const hash = createHash('md5').update(content).digest('hex');\n\n // Import createDocumentId dynamically to avoid circular deps\n const { createDocumentId } = await import('../types/brands.js');\n\n return {\n state: {\n mtime: stats.mtimeMs,\n size: stats.size,\n hash,\n documentIds: documentIds.map((id) => createDocumentId(id)),\n },\n hash,\n };\n }\n}\n","import { readFile, access, mkdir } from 'node:fs/promises';\nimport { join } from 'node:path';\nimport { createDocumentId } from '../types/brands.js';\nimport { StoreManifestSchema, createEmptyManifest } from '../types/manifest.js';\nimport { atomicWriteFile } from '../utils/atomic-write.js';\nimport type { StoreId } from '../types/brands.js';\nimport type { TypedStoreManifest, TypedFileState } from '../types/manifest.js';\n\n/**\n * Service for managing store manifests.\n *\n * Manifests track the state of indexed files to enable incremental re-indexing.\n * They are stored in the data directory under manifests/{storeId}.manifest.json.\n */\nexport class ManifestService {\n private readonly manifestsDir: string;\n\n constructor(dataDir: string) {\n this.manifestsDir = join(dataDir, 'manifests');\n }\n\n /**\n * Initialize the manifests directory.\n */\n async initialize(): Promise<void> {\n await mkdir(this.manifestsDir, { recursive: true });\n }\n\n /**\n * Get the file path for a store's manifest.\n */\n getManifestPath(storeId: StoreId): string {\n return join(this.manifestsDir, `${storeId}.manifest.json`);\n }\n\n /**\n * Load a store's manifest.\n * Returns an empty manifest if one doesn't exist.\n * Throws on parse/validation errors (fail fast).\n */\n async load(storeId: StoreId): Promise<TypedStoreManifest> {\n const manifestPath = this.getManifestPath(storeId);\n\n const exists = await this.fileExists(manifestPath);\n if (!exists) {\n return createEmptyManifest(storeId);\n }\n\n const content = await readFile(manifestPath, 'utf-8');\n let parsed: unknown;\n try {\n parsed = JSON.parse(content);\n } catch (error) {\n throw new Error(\n `Failed to parse manifest at ${manifestPath}: ${\n error instanceof Error ? error.message : String(error)\n }`\n );\n }\n\n const result = StoreManifestSchema.safeParse(parsed);\n if (!result.success) {\n throw new Error(`Invalid manifest at ${manifestPath}: ${result.error.message}`);\n }\n\n // Convert to typed manifest with branded types\n return this.toTypedManifest(result.data, storeId);\n }\n\n /**\n * Save a store's manifest atomically.\n */\n async save(manifest: TypedStoreManifest): Promise<void> {\n const manifestPath = this.getManifestPath(manifest.storeId);\n\n // Update indexedAt timestamp\n const toSave = {\n ...manifest,\n indexedAt: new Date().toISOString(),\n };\n\n await atomicWriteFile(manifestPath, JSON.stringify(toSave, null, 2));\n }\n\n /**\n * Delete a store's manifest.\n * Called when a store is deleted or during full re-index.\n */\n async delete(storeId: StoreId): Promise<void> {\n const manifestPath = this.getManifestPath(storeId);\n const { unlink } = await import('node:fs/promises');\n\n const exists = await this.fileExists(manifestPath);\n if (exists) {\n await unlink(manifestPath);\n }\n }\n\n /**\n * Check if a file exists.\n */\n private async fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Convert a parsed manifest to a typed manifest with branded types.\n */\n private toTypedManifest(\n data: { version: 1; storeId: string; indexedAt: string; files: Record<string, FileStateRaw> },\n storeId: StoreId\n ): TypedStoreManifest {\n const files: Record<string, TypedFileState> = {};\n\n for (const [path, state] of Object.entries(data.files)) {\n files[path] = {\n mtime: state.mtime,\n size: state.size,\n hash: state.hash,\n documentIds: state.documentIds.map((id) => createDocumentId(id)),\n };\n }\n\n return {\n version: 1,\n storeId,\n indexedAt: data.indexedAt,\n files,\n };\n }\n}\n\n/** Raw file state from parsed JSON (matches FileStateSchema) */\ninterface FileStateRaw {\n mtime: number;\n size: number;\n hash: string;\n documentIds: string[];\n}\n","import { z } from 'zod';\nimport type { StoreId, DocumentId } from './brands.js';\n\n/**\n * Manifest types for tracking indexed file state.\n *\n * The manifest enables incremental indexing by tracking:\n * - File metadata (mtime, size) for fast change detection\n * - Content hash for deep verification\n * - Document IDs for cleanup on file changes\n */\n\n// ============================================================================\n// File State Schema\n// ============================================================================\n\n/**\n * State of a single indexed file.\n * Used for change detection in two phases:\n * - Phase 1 (fast): mtime + size comparison\n * - Phase 2 (deep): hash comparison for files that passed phase 1\n */\nexport const FileStateSchema = z.object({\n /** File modification time in milliseconds since epoch */\n mtime: z.number(),\n /** File size in bytes */\n size: z.number(),\n /** MD5 hash of file content */\n hash: z.string(),\n /** Document IDs created from this file (for cleanup) */\n documentIds: z.array(z.string()),\n});\n\nexport type FileState = z.infer<typeof FileStateSchema>;\n\n// ============================================================================\n// Store Manifest Schema\n// ============================================================================\n\n/**\n * Manifest for a single store.\n * Tracks the state of all indexed files to enable incremental re-indexing.\n */\nexport const StoreManifestSchema = z.object({\n /** Schema version for future migrations */\n version: z.literal(1),\n /** Store ID this manifest belongs to */\n storeId: z.string(),\n /** When the manifest was last updated */\n indexedAt: z.string(),\n /** Map of file paths to their state */\n files: z.record(z.string(), FileStateSchema),\n});\n\nexport type StoreManifest = z.infer<typeof StoreManifestSchema>;\n\n// ============================================================================\n// Branded Type Wrappers\n// ============================================================================\n\n/**\n * Type-safe manifest with branded StoreId.\n * Use this in service code for proper type safety.\n */\nexport interface TypedStoreManifest {\n version: 1;\n storeId: StoreId;\n indexedAt: string;\n files: Record<string, TypedFileState>;\n}\n\n/**\n * Type-safe file state with branded DocumentIds.\n */\nexport interface TypedFileState {\n mtime: number;\n size: number;\n hash: string;\n documentIds: DocumentId[];\n}\n\n// ============================================================================\n// Change Detection Types\n// ============================================================================\n\n/**\n * Result of comparing current files against manifest.\n */\nexport interface DriftResult {\n /** Files that exist on disk but not in manifest */\n added: string[];\n /** Files that exist in both but have changed */\n modified: string[];\n /** Files that exist in manifest but not on disk */\n deleted: string[];\n /** Files that are unchanged */\n unchanged: string[];\n}\n\n// ============================================================================\n// Default Manifest\n// ============================================================================\n\n/**\n * Create an empty manifest for a store.\n */\nexport function createEmptyManifest(storeId: StoreId): TypedStoreManifest {\n return {\n version: 1,\n storeId,\n indexedAt: new Date().toISOString(),\n files: {},\n };\n}\n","export interface CodeUnit {\n type: 'function' | 'class' | 'interface' | 'type' | 'const' | 'documentation' | 'example';\n name: string;\n signature: string;\n fullContent: string;\n startLine: number;\n endLine: number;\n language: string;\n}\n\nexport class CodeUnitService {\n extractCodeUnit(code: string, symbolName: string, language: string): CodeUnit | undefined {\n const lines = code.split('\\n');\n\n // Find the line containing the symbol\n let startLine = -1;\n let type: CodeUnit['type'] = 'function';\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i] ?? '';\n\n if (line.includes(`function ${symbolName}`)) {\n startLine = i + 1; // 1-indexed\n type = 'function';\n break;\n }\n\n if (line.includes(`class ${symbolName}`)) {\n startLine = i + 1;\n type = 'class';\n break;\n }\n\n // Check for interface declarations\n if (line.match(new RegExp(`interface\\\\s+${symbolName}(?:\\\\s|{|<)`))) {\n startLine = i + 1;\n type = 'interface';\n break;\n }\n\n // Check for type declarations\n if (line.match(new RegExp(`type\\\\s+${symbolName}(?:\\\\s|=|<)`))) {\n startLine = i + 1;\n type = 'type';\n break;\n }\n\n // Check for arrow functions: const/let/var name = ...\n if (line.match(new RegExp(`(?:const|let|var)\\\\s+${symbolName}\\\\s*=`))) {\n startLine = i + 1;\n type = 'const';\n break;\n }\n }\n\n if (startLine === -1) return undefined;\n\n // Find end line using state machine that tracks strings and comments\n let endLine = startLine;\n let braceCount = 0;\n let foundFirstBrace = false;\n\n // For type aliases without braces (e.g., \"type UserId = string;\"), find semicolon\n if (type === 'type') {\n const firstLine = lines[startLine - 1] ?? '';\n if (!firstLine.includes('{') && firstLine.includes(';')) {\n // Single-line type alias\n endLine = startLine;\n const fullContent = firstLine;\n const signature = this.extractSignature(firstLine, symbolName, type);\n return {\n type,\n name: symbolName,\n signature,\n fullContent,\n startLine,\n endLine,\n language,\n };\n }\n }\n\n // State machine for tracking context\n let inSingleQuote = false;\n let inDoubleQuote = false;\n let inTemplateLiteral = false;\n let inMultiLineComment = false;\n\n for (let i = startLine - 1; i < lines.length; i++) {\n const line = lines[i] ?? '';\n let inSingleLineComment = false;\n\n for (let j = 0; j < line.length; j++) {\n const char = line[j];\n const prevChar = j > 0 ? line[j - 1] : '';\n const nextChar = j < line.length - 1 ? line[j + 1] : '';\n\n // Skip escaped characters within strings\n if (prevChar === '\\\\' && (inSingleQuote || inDoubleQuote || inTemplateLiteral)) {\n continue;\n }\n\n // Inside multi-line comment - only look for end marker\n if (inMultiLineComment) {\n if (char === '*' && nextChar === '/') {\n inMultiLineComment = false;\n j++; // Skip the /\n }\n continue;\n }\n\n // Inside single-line comment - skip rest of line\n if (inSingleLineComment) {\n continue;\n }\n\n // Inside a string - only look for closing delimiter\n if (inSingleQuote) {\n if (char === \"'\") inSingleQuote = false;\n continue;\n }\n if (inDoubleQuote) {\n if (char === '\"') inDoubleQuote = false;\n continue;\n }\n if (inTemplateLiteral) {\n if (char === '`') inTemplateLiteral = false;\n continue;\n }\n\n // Not inside any special context - check for context starters\n if (char === '/' && nextChar === '*') {\n inMultiLineComment = true;\n j++; // Skip the *\n continue;\n }\n if (char === '/' && nextChar === '/') {\n inSingleLineComment = true;\n continue;\n }\n if (char === \"'\") {\n inSingleQuote = true;\n continue;\n }\n if (char === '\"') {\n inDoubleQuote = true;\n continue;\n }\n if (char === '`') {\n inTemplateLiteral = true;\n continue;\n }\n\n // Count braces (we're not inside any string or comment)\n if (char === '{') {\n braceCount++;\n foundFirstBrace = true;\n }\n if (char === '}') braceCount--;\n }\n\n if (foundFirstBrace && braceCount === 0) {\n endLine = i + 1;\n break;\n }\n }\n\n const fullContent = lines.slice(startLine - 1, endLine).join('\\n');\n\n // Extract signature (first line, cleaned)\n const firstLine = lines[startLine - 1] ?? '';\n const signature = this.extractSignature(firstLine, symbolName, type);\n\n return {\n type,\n name: symbolName,\n signature,\n fullContent,\n startLine,\n endLine,\n language,\n };\n }\n\n private extractSignature(line: string, name: string, type: string): string {\n // Remove 'export', 'async', trim whitespace\n const sig = line\n .replace(/^\\s*export\\s+/, '')\n .replace(/^\\s*async\\s+/, '')\n .trim();\n\n if (type === 'function') {\n // Extract just \"functionName(params): returnType\"\n // Supports: simple types, generics (Promise<T>), arrays (T[]), unions (T | null)\n const match = sig.match(/function\\s+(\\w+\\([^)]*\\):\\s*[\\w<>[\\],\\s|]+)/);\n if (match?.[1] !== undefined && match[1].length > 0) return match[1].trim();\n }\n\n if (type === 'class') {\n return `class ${name}`;\n }\n\n if (type === 'interface') {\n return `interface ${name}`;\n }\n\n if (type === 'type') {\n // For type aliases, include generics if present\n const typeMatch = sig.match(new RegExp(`type\\\\s+(${name}(?:<[^>]+>)?)\\\\s*=`));\n if (typeMatch?.[1] !== undefined && typeMatch[1].length > 0) {\n return `type ${typeMatch[1]}`;\n }\n return `type ${name}`;\n }\n\n if (type === 'const') {\n // For arrow functions, extract the variable declaration part\n // Example: const myFunc = (param: string): void => ...\n // Returns: const myFunc = (param: string): void\n const arrowMatch = sig.match(\n new RegExp(\n `((?:const|let|var)\\\\s+${name}\\\\s*=\\\\s*(?:async\\\\s+)?\\\\([^)]*\\\\)(?::\\\\s*[^=]+)?)`\n )\n );\n const matchedSig = arrowMatch?.[1];\n if (matchedSig !== undefined && matchedSig !== '') return matchedSig.trim();\n\n // Fallback for simple arrow functions without params\n return `const ${name}`;\n }\n\n return sig;\n }\n}\n","import { CodeUnitService } from './code-unit.service.js';\nimport { createLogger } from '../logging/index.js';\nimport type { CodeGraphService } from './code-graph.service.js';\nimport type { CodeGraph } from '../analysis/code-graph.js';\nimport type { EmbeddingEngine } from '../db/embeddings.js';\nimport type { LanceStore } from '../db/lance.js';\nimport type { StoreId } from '../types/brands.js';\nimport type { SearchConfig } from '../types/config.js';\nimport type {\n SearchQuery,\n SearchResponse,\n SearchResult,\n SearchConfidence,\n DetailLevel,\n CodeUnit,\n SearchIntent,\n} from '../types/search.js';\n\nconst logger = createLogger('search-service');\n\n/**\n * Query intent classification for context-aware ranking.\n * Different intents prioritize different content types.\n */\nexport type QueryIntent = 'how-to' | 'implementation' | 'conceptual' | 'comparison' | 'debugging';\n\n/**\n * Classified intent with confidence score for multi-intent queries.\n */\nexport interface ClassifiedIntent {\n intent: QueryIntent;\n confidence: number;\n}\n\n/**\n * Intent-based file type multipliers - CONSERVATIVE version.\n * Applied on top of base file-type boosts.\n * Lessons learned: Too-aggressive penalties hurt when corpus lacks ideal content.\n * These values provide gentle guidance rather than dramatic reranking.\n */\nconst INTENT_FILE_BOOSTS: Record<QueryIntent, Record<string, number>> = {\n 'how-to': {\n 'documentation-primary': 1.3, // Strong boost for docs\n documentation: 1.2,\n example: 1.5, // Examples are ideal for \"how to\"\n source: 0.85, // Moderate penalty - source might still have good content\n 'source-internal': 0.7, // Stronger penalty - internal code less useful\n test: 0.8,\n config: 0.7,\n changelog: 0.6, // Changelogs rarely answer \"how to\" questions\n other: 0.9,\n },\n implementation: {\n 'documentation-primary': 0.95,\n documentation: 1.0,\n example: 1.0,\n source: 1.1, // Slight boost for source code\n 'source-internal': 1.05, // Internal code can be relevant\n test: 1.0,\n config: 0.95,\n changelog: 0.8, // Might reference implementation changes\n other: 1.0,\n },\n conceptual: {\n 'documentation-primary': 1.1,\n documentation: 1.05,\n example: 1.0,\n source: 0.95,\n 'source-internal': 0.9,\n test: 0.9,\n config: 0.85,\n changelog: 0.7, // Sometimes explains concepts behind changes\n other: 0.95,\n },\n comparison: {\n 'documentation-primary': 1.15,\n documentation: 1.1,\n example: 1.05,\n source: 0.9,\n 'source-internal': 0.85,\n test: 0.9,\n config: 0.85,\n changelog: 0.9, // Version comparisons can be useful\n other: 0.95,\n },\n debugging: {\n 'documentation-primary': 1.0,\n documentation: 1.0,\n example: 1.05,\n source: 1.0, // Source code helps with debugging\n 'source-internal': 0.95,\n test: 1.05, // Tests can show expected behavior\n config: 0.9,\n changelog: 1.1, // Often contains bug fixes and known issues\n other: 1.0,\n },\n};\n\n// Known frameworks/technologies for context-aware boosting\nconst FRAMEWORK_PATTERNS: Array<{ pattern: RegExp; terms: string[] }> = [\n { pattern: /\\bexpress\\b/i, terms: ['express', 'expressjs', 'express.js'] },\n { pattern: /\\bhono\\b/i, terms: ['hono'] },\n { pattern: /\\bzod\\b/i, terms: ['zod'] },\n { pattern: /\\breact\\b/i, terms: ['react', 'reactjs', 'react.js'] },\n { pattern: /\\bvue\\b/i, terms: ['vue', 'vuejs', 'vue.js', 'vue3'] },\n { pattern: /\\bnode\\b/i, terms: ['node', 'nodejs', 'node.js'] },\n { pattern: /\\btypescript\\b/i, terms: ['typescript', 'ts'] },\n { pattern: /\\bjwt\\b/i, terms: ['jwt', 'jsonwebtoken', 'json-web-token'] },\n];\n\n// Pattern definitions for intent classification\nconst HOW_TO_PATTERNS = [\n /how (do|can|should|would) (i|you|we)/i,\n /how to\\b/i,\n /what('s| is) the (best |right |correct )?(way|approach) to/i,\n /i (need|want|have) to/i,\n /show me how/i,\n /\\bwhat's the syntax\\b/i,\n /\\bhow do i (use|create|make|set up|configure|implement|add|get)\\b/i,\n /\\bi'm (trying|building|creating|making)\\b/i,\n];\n\nconst IMPLEMENTATION_PATTERNS = [\n /how (does|is) .* (implemented|work internally)/i,\n /\\binternal(ly)?\\b/i,\n /\\bsource code\\b/i,\n /\\bunder the hood\\b/i,\n /\\bimplementation (of|details?)\\b/i,\n];\n\nconst COMPARISON_PATTERNS = [\n /\\b(vs\\.?|versus)\\b/i,\n /\\bdifference(s)? between\\b/i,\n /\\bcompare\\b/i,\n /\\bshould (i|we) use .* or\\b/i,\n /\\bwhat's the difference\\b/i,\n /\\bwhich (one|is better)\\b/i,\n /\\bwhen (should|to) use\\b/i,\n];\n\nconst DEBUGGING_PATTERNS = [\n /\\b(error|bug|issue|problem|crash|fail|broken|wrong)\\b/i,\n /\\bdoesn't (work|compile|run)\\b/i,\n /\\bisn't (working|updating|rendering)\\b/i,\n /\\bwhy (is|does|doesn't|isn't)\\b/i,\n /\\bwhat('s| is) (wrong|happening|going on)\\b/i,\n /\\bwhat am i doing wrong\\b/i,\n /\\bnot (working|updating|showing)\\b/i,\n /\\bhow do i (fix|debug|solve|resolve)\\b/i,\n];\n\nconst CONCEPTUAL_PATTERNS = [\n /\\bwhat (is|are)\\b/i,\n /\\bexplain\\b/i,\n /\\bwhat does .* (mean|do)\\b/i,\n /\\bhow does .* work\\b/i,\n /\\bwhat('s| is) the (purpose|point|idea)\\b/i,\n];\n\n/**\n * Classify query intents with confidence scores.\n * Returns all matching intents, allowing queries to have multiple intents.\n */\nfunction classifyQueryIntents(query: string): ClassifiedIntent[] {\n const q = query.toLowerCase();\n const intents: ClassifiedIntent[] = [];\n\n // Check all pattern groups and add matching intents with confidence\n if (IMPLEMENTATION_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'implementation', confidence: 0.9 });\n }\n\n if (DEBUGGING_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'debugging', confidence: 0.85 });\n }\n\n if (COMPARISON_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'comparison', confidence: 0.8 });\n }\n\n if (HOW_TO_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'how-to', confidence: 0.75 });\n }\n\n if (CONCEPTUAL_PATTERNS.some((p) => p.test(q))) {\n intents.push({ intent: 'conceptual', confidence: 0.7 });\n }\n\n // If no patterns match, use how-to as the baseline intent\n if (intents.length === 0) {\n intents.push({ intent: 'how-to', confidence: 0.5 });\n }\n\n // Sort by confidence descending\n return intents.sort((a, b) => b.confidence - a.confidence);\n}\n\n/**\n * Get primary intent for logging/display purposes.\n */\nfunction getPrimaryIntent(intents: ClassifiedIntent[]): QueryIntent {\n return intents[0]?.intent ?? 'how-to';\n}\n\n/**\n * Map MCP SearchIntent to internal QueryIntent.\n * This allows users to override auto-classification via the API.\n */\nfunction mapSearchIntentToQueryIntent(intent: SearchIntent): QueryIntent {\n switch (intent) {\n case 'find-pattern':\n case 'find-implementation':\n case 'find-definition':\n return 'implementation';\n case 'find-usage':\n case 'find-documentation':\n return 'how-to';\n }\n}\n\n/**\n * RRF presets for different content types.\n * Web/docs content uses higher k to reduce noise from repetitive structure.\n */\nconst RRF_PRESETS = {\n code: { k: 20, vectorWeight: 0.6, ftsWeight: 0.4 },\n web: { k: 30, vectorWeight: 0.55, ftsWeight: 0.45 },\n} as const;\n\n/**\n * Detect if results are primarily web content (have urls vs file paths).\n */\nfunction detectContentType(results: SearchResult[]): 'web' | 'code' {\n const webCount = results.filter((r) => 'url' in r.metadata).length;\n return webCount > results.length / 2 ? 'web' : 'code';\n}\n\nexport class SearchService {\n private readonly lanceStore: LanceStore;\n private readonly embeddingEngine: EmbeddingEngine;\n private readonly codeUnitService: CodeUnitService;\n private readonly codeGraphService: CodeGraphService | undefined;\n private readonly graphCache: Map<string, CodeGraph | null>;\n private readonly searchConfig: SearchConfig | undefined;\n private readonly unsubscribeCacheInvalidation: (() => void) | undefined;\n\n constructor(\n lanceStore: LanceStore,\n embeddingEngine: EmbeddingEngine,\n codeGraphService?: CodeGraphService,\n searchConfig?: SearchConfig\n ) {\n this.lanceStore = lanceStore;\n this.embeddingEngine = embeddingEngine;\n this.codeUnitService = new CodeUnitService();\n this.codeGraphService = codeGraphService;\n this.graphCache = new Map();\n this.searchConfig = searchConfig;\n\n // Subscribe to cache invalidation events from CodeGraphService\n if (codeGraphService) {\n this.unsubscribeCacheInvalidation = codeGraphService.onCacheInvalidation((event) => {\n // Clear our cached graph when it's updated or deleted\n this.graphCache.delete(event.storeId);\n });\n }\n }\n\n /**\n * Clean up resources (unsubscribe from events).\n * Call this when destroying the service.\n */\n cleanup(): void {\n this.unsubscribeCacheInvalidation?.();\n }\n\n /**\n * Load code graph for a store, with caching.\n * Returns null if no graph is available.\n */\n private async loadGraphForStore(storeId: StoreId): Promise<CodeGraph | null> {\n if (!this.codeGraphService) return null;\n\n const cached = this.graphCache.get(storeId);\n if (cached !== undefined) return cached;\n\n const graph = await this.codeGraphService.loadGraph(storeId);\n const result = graph ?? null;\n this.graphCache.set(storeId, result);\n return result;\n }\n\n /**\n * Calculate confidence level based on max raw vector similarity score.\n * Configurable via environment variables, with sensible defaults for CLI usage.\n */\n private calculateConfidence(maxRawScore: number): SearchConfidence {\n const highThreshold = parseFloat(process.env['SEARCH_CONFIDENCE_HIGH'] ?? '0.5');\n const mediumThreshold = parseFloat(process.env['SEARCH_CONFIDENCE_MEDIUM'] ?? '0.3');\n\n if (maxRawScore >= highThreshold) return 'high';\n if (maxRawScore >= mediumThreshold) return 'medium';\n return 'low';\n }\n\n async search(query: SearchQuery): Promise<SearchResponse> {\n const startTime = Date.now();\n const mode = query.mode ?? this.searchConfig?.defaultMode ?? 'hybrid';\n const limit = query.limit ?? this.searchConfig?.defaultLimit ?? 10;\n const stores = query.stores ?? [];\n const detail = query.detail ?? 'minimal';\n\n // Auto-classify intents from query text (used for logging and when user doesn't specify intent)\n const intents = classifyQueryIntents(query.query);\n\n // Use user-provided intent if available, otherwise use auto-classified\n const primaryIntent =\n query.intent !== undefined\n ? mapSearchIntentToQueryIntent(query.intent)\n : getPrimaryIntent(intents);\n\n logger.debug(\n {\n query: query.query,\n mode,\n limit,\n stores,\n detail,\n intent: primaryIntent,\n userIntent: query.intent,\n autoClassifiedIntents: intents,\n minRelevance: query.minRelevance,\n },\n 'Search query received'\n );\n\n let allResults: SearchResult[] = [];\n let maxRawScore = 0;\n\n // Fetch more results than needed to allow for deduplication\n const fetchLimit = limit * 3;\n\n if (mode === 'vector') {\n // For vector mode, call vectorSearchRaw once and reuse results\n // This avoids double embedding cost (vectorSearch calls vectorSearchRaw internally)\n const rawResults = await this.vectorSearchRaw(query.query, stores, fetchLimit);\n maxRawScore = rawResults.length > 0 ? (rawResults[0]?.score ?? 0) : 0;\n // Apply same normalization logic as vectorSearch without re-embedding\n allResults = this.normalizeAndFilterScores(rawResults, query.threshold).slice(0, fetchLimit);\n } else if (mode === 'fts') {\n // FTS mode doesn't have vector similarity, so no confidence calculation\n allResults = await this.ftsSearch(query.query, stores, fetchLimit);\n } else {\n // Hybrid: combine vector and FTS with RRF, get maxRawScore for confidence\n const hybridResult = await this.hybridSearchWithMetadata(\n query.query,\n stores,\n fetchLimit,\n query.threshold\n );\n allResults = hybridResult.results;\n maxRawScore = hybridResult.maxRawScore;\n }\n\n // Apply minRelevance filter - if max raw score is below threshold, return empty\n // Skip in FTS mode since there are no vector scores to compare against\n if (query.minRelevance !== undefined) {\n if (mode === 'fts') {\n logger.warn(\n { query: query.query, minRelevance: query.minRelevance },\n 'minRelevance filter ignored in FTS mode (no vector scores available)'\n );\n } else if (maxRawScore < query.minRelevance) {\n const timeMs = Date.now() - startTime;\n logger.info(\n {\n query: query.query,\n mode,\n maxRawScore,\n minRelevance: query.minRelevance,\n timeMs,\n },\n 'Search filtered by minRelevance - no sufficiently relevant results'\n );\n\n return {\n query: query.query,\n mode,\n stores,\n results: [],\n totalResults: 0,\n timeMs,\n confidence: this.calculateConfidence(maxRawScore),\n maxRawScore,\n };\n }\n }\n\n // Deduplicate by source file - keep best chunk per source (considers query relevance)\n const dedupedResults = this.deduplicateBySource(allResults, query.query);\n const resultsToEnhance = dedupedResults.slice(0, limit);\n\n // Load code graphs for stores in results (for contextual/full detail levels)\n const graphs = new Map<string, CodeGraph | null>();\n if (detail === 'contextual' || detail === 'full') {\n const storeIds = new Set(resultsToEnhance.map((r) => r.metadata.storeId));\n for (const storeId of storeIds) {\n graphs.set(storeId, await this.loadGraphForStore(storeId));\n }\n }\n\n // Enhance results with progressive context\n const enhancedResults = resultsToEnhance.map((r) => {\n const graph = graphs.get(r.metadata.storeId) ?? null;\n return this.addProgressiveContext(r, query.query, detail, graph);\n });\n\n const timeMs = Date.now() - startTime;\n const confidence = mode !== 'fts' ? this.calculateConfidence(maxRawScore) : undefined;\n\n logger.info(\n {\n query: query.query,\n mode,\n resultCount: enhancedResults.length,\n dedupedFrom: allResults.length,\n intents: intents.map((i) => `${i.intent}(${i.confidence.toFixed(2)})`),\n maxRawScore: mode !== 'fts' ? maxRawScore : undefined,\n confidence,\n timeMs,\n },\n 'Search complete'\n );\n\n return {\n query: query.query,\n mode,\n stores,\n results: enhancedResults,\n totalResults: enhancedResults.length,\n timeMs,\n confidence,\n maxRawScore: mode !== 'fts' ? maxRawScore : undefined,\n };\n }\n\n /**\n * Deduplicate results by source file path.\n * Keeps the best chunk for each unique source, considering both score and query relevance.\n */\n private deduplicateBySource(results: SearchResult[], query: string): SearchResult[] {\n const bySource = new Map<string, SearchResult>();\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2);\n\n for (const result of results) {\n // Use storeId + file path as the source key to distinguish same paths across stores\n // (or url for web content, or id as last resort)\n const storeId = result.metadata.storeId;\n const source = result.metadata.path ?? result.metadata.url ?? result.id;\n const sourceKey = `${storeId}:${source}`;\n\n const existing = bySource.get(sourceKey);\n if (!existing) {\n bySource.set(sourceKey, result);\n } else {\n // Score-weighted relevance: accounts for fileType/framework boosts\n const existingTermCount = this.countQueryTerms(existing.content, queryTerms);\n const newTermCount = this.countQueryTerms(result.content, queryTerms);\n\n // Weight term count by score to account for ranking boosts\n const existingRelevance = existingTermCount * existing.score;\n const newRelevance = newTermCount * result.score;\n\n if (newRelevance > existingRelevance) {\n bySource.set(sourceKey, result);\n }\n }\n }\n\n // Return results sorted by score\n return Array.from(bySource.values()).sort((a, b) => b.score - a.score);\n }\n\n /**\n * Count how many query terms appear in the content.\n */\n private countQueryTerms(content: string, queryTerms: string[]): number {\n const lowerContent = content.toLowerCase();\n return queryTerms.filter((term) => lowerContent.includes(term)).length;\n }\n\n /**\n * Normalize scores to 0-1 range and optionally filter by threshold.\n * This ensures threshold values match displayed scores (UX consistency).\n *\n * Edge case handling:\n * - If there's only 1 result or all results have the same score, normalization\n * would make them all 1.0. In this case, we keep the raw scores to allow\n * threshold filtering to work meaningfully on absolute quality.\n */\n private normalizeAndFilterScores(results: SearchResult[], threshold?: number): SearchResult[] {\n if (results.length === 0) return [];\n\n // Sort by score descending\n const sorted = [...results].sort((a, b) => b.score - a.score);\n\n // Get score range for normalization\n const first = sorted[0];\n const last = sorted[sorted.length - 1];\n if (first === undefined || last === undefined) return [];\n\n const maxScore = first.score;\n const minScore = last.score;\n const range = maxScore - minScore;\n\n // Only normalize when there's meaningful score variation\n // If all scores are the same (range = 0), keep raw scores for threshold filtering\n const normalized =\n range > 0\n ? sorted.map((r) => ({\n ...r,\n score: Math.round(((r.score - minScore) / range) * 1000000) / 1000000,\n }))\n : sorted; // Keep raw scores when no variation (allows threshold to filter by quality)\n\n // Apply threshold filter on scores\n if (threshold !== undefined) {\n return normalized.filter((r) => r.score >= threshold);\n }\n\n return normalized;\n }\n\n /**\n * Fetch raw vector search results without normalization.\n * Returns results with raw cosine similarity scores [0-1].\n */\n private async vectorSearchRaw(\n query: string,\n stores: readonly StoreId[],\n limit: number\n ): Promise<SearchResult[]> {\n const queryVector = await this.embeddingEngine.embed(query);\n const results: SearchResult[] = [];\n\n for (const storeId of stores) {\n const hits = await this.lanceStore.search(storeId, queryVector, limit);\n results.push(\n ...hits.map((r) => ({\n id: r.id,\n score: r.score, // Raw cosine similarity (1 - distance)\n content: r.content,\n metadata: r.metadata,\n }))\n );\n }\n\n return results.sort((a, b) => b.score - a.score).slice(0, limit);\n }\n\n private async ftsSearch(\n query: string,\n stores: readonly StoreId[],\n limit: number\n ): Promise<SearchResult[]> {\n const results: SearchResult[] = [];\n\n for (const storeId of stores) {\n try {\n const hits = await this.lanceStore.fullTextSearch(storeId, query, limit);\n results.push(\n ...hits.map((r) => ({\n id: r.id,\n score: r.score,\n content: r.content,\n metadata: r.metadata,\n }))\n );\n } catch {\n // FTS index may not exist for this store - continue with other stores\n // and rely on vector search results. This is expected behavior since\n // FTS indexing is optional and hybrid search works with vector-only.\n }\n }\n\n return results.sort((a, b) => b.score - a.score).slice(0, limit);\n }\n\n /**\n * Internal hybrid search result with additional metadata for confidence calculation.\n */\n private async hybridSearchWithMetadata(\n query: string,\n stores: readonly StoreId[],\n limit: number,\n threshold?: number\n ): Promise<{ results: SearchResult[]; maxRawScore: number }> {\n // Classify query intents for context-aware ranking (supports multiple intents)\n const intents = classifyQueryIntents(query);\n\n // Get raw vector results (unnormalized) to track raw cosine similarity\n // We use these for both raw score tracking and as the basis for normalized vector results\n const rawVectorResults = await this.vectorSearchRaw(query, stores, limit * 2);\n\n // Build map of raw vector scores by document ID\n const rawVectorScores = new Map<string, number>();\n rawVectorResults.forEach((r) => {\n rawVectorScores.set(r.id, r.score);\n });\n\n // Track max raw score for confidence calculation\n const maxRawScore = rawVectorResults.length > 0 ? (rawVectorResults[0]?.score ?? 0) : 0;\n\n // Normalize raw vector results directly (avoids duplicate embedding call)\n // Don't apply threshold here - it's applied to final RRF-normalized scores at the end\n const vectorResults = this.normalizeAndFilterScores(rawVectorResults);\n\n // Get FTS results in parallel (only one call needed now)\n const ftsResults = await this.ftsSearch(query, stores, limit * 2);\n\n // Build rank maps\n const vectorRanks = new Map<string, number>();\n const ftsRanks = new Map<string, number>();\n const allDocs = new Map<string, SearchResult>();\n\n vectorResults.forEach((r, i) => {\n vectorRanks.set(r.id, i + 1);\n allDocs.set(r.id, r);\n });\n\n ftsResults.forEach((r, i) => {\n ftsRanks.set(r.id, i + 1);\n if (!allDocs.has(r.id)) {\n allDocs.set(r.id, r);\n }\n });\n\n // Calculate RRF scores with file-type boosting and preserve ranking metadata\n const rrfScores: Array<{\n id: string;\n score: number;\n result: SearchResult;\n rawVectorScore: number | undefined;\n metadata: {\n vectorRank?: number;\n ftsRank?: number;\n vectorRRF: number;\n ftsRRF: number;\n fileTypeBoost: number;\n frameworkBoost: number;\n urlKeywordBoost: number;\n pathKeywordBoost: number;\n rawVectorScore?: number;\n };\n }> = [];\n\n // Select RRF config based on content type (web vs code)\n const contentType = detectContentType([...allDocs.values()]);\n const { k, vectorWeight, ftsWeight } = RRF_PRESETS[contentType];\n\n for (const [id, result] of allDocs) {\n const vectorRank = vectorRanks.get(id) ?? Infinity;\n const ftsRank = ftsRanks.get(id) ?? Infinity;\n const rawVectorScore = rawVectorScores.get(id);\n\n const vectorRRF = vectorRank !== Infinity ? vectorWeight / (k + vectorRank) : 0;\n const ftsRRF = ftsRank !== Infinity ? ftsWeight / (k + ftsRank) : 0;\n\n // Apply file-type boost (base + multi-intent-adjusted)\n const fileTypeBoost = this.getFileTypeBoost(\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n result.metadata['fileType'] as string | undefined,\n intents\n );\n\n // Apply framework context boost\n const frameworkBoost = this.getFrameworkContextBoost(query, result);\n\n // Apply URL keyword boost (helps \"troubleshooting\" find /troubleshooting pages)\n const urlKeywordBoost = this.getUrlKeywordBoost(query, result);\n\n // Apply path keyword boost (helps \"dispatcher\" find async_dispatcher.py)\n const pathKeywordBoost = this.getPathKeywordBoost(query, result);\n\n const metadata: {\n vectorRank?: number;\n ftsRank?: number;\n vectorRRF: number;\n ftsRRF: number;\n fileTypeBoost: number;\n frameworkBoost: number;\n urlKeywordBoost: number;\n pathKeywordBoost: number;\n rawVectorScore?: number;\n } = {\n vectorRRF,\n ftsRRF,\n fileTypeBoost,\n frameworkBoost,\n urlKeywordBoost,\n pathKeywordBoost,\n };\n\n if (vectorRank !== Infinity) {\n metadata.vectorRank = vectorRank;\n }\n if (ftsRank !== Infinity) {\n metadata.ftsRank = ftsRank;\n }\n if (rawVectorScore !== undefined) {\n metadata.rawVectorScore = rawVectorScore;\n }\n\n rrfScores.push({\n id,\n score:\n (vectorRRF + ftsRRF) *\n fileTypeBoost *\n frameworkBoost *\n urlKeywordBoost *\n pathKeywordBoost,\n result,\n rawVectorScore,\n metadata,\n });\n }\n\n // Sort by RRF score\n const sorted = rrfScores.sort((a, b) => b.score - a.score).slice(0, limit);\n\n // Normalize scores to 0-1 range for better interpretability\n let normalizedResults: SearchResult[];\n\n if (sorted.length > 0) {\n const first = sorted[0];\n const last = sorted[sorted.length - 1];\n if (first === undefined || last === undefined) {\n normalizedResults = sorted.map((r) => ({\n ...r.result,\n score: r.score,\n rankingMetadata: r.metadata,\n }));\n } else {\n const maxScore = first.score;\n const minScore = last.score;\n const range = maxScore - minScore;\n\n if (range > 0) {\n // Round to avoid floating point precision issues in threshold comparisons\n normalizedResults = sorted.map((r) => ({\n ...r.result,\n score: Math.round(((r.score - minScore) / range) * 1000000) / 1000000,\n rankingMetadata: r.metadata,\n }));\n } else {\n // All same score - keep raw scores (allows threshold to filter by quality)\n normalizedResults = sorted.map((r) => ({\n ...r.result,\n score: r.score,\n rankingMetadata: r.metadata,\n }));\n }\n }\n } else {\n normalizedResults = [];\n }\n\n // Apply threshold filter on normalized scores (UX consistency)\n if (threshold !== undefined) {\n normalizedResults = normalizedResults.filter((r) => r.score >= threshold);\n }\n\n return { results: normalizedResults, maxRawScore };\n }\n\n async searchAllStores(query: SearchQuery, storeIds: StoreId[]): Promise<SearchResponse> {\n return this.search({\n ...query,\n stores: storeIds,\n });\n }\n\n /**\n * Get a score multiplier based on file type and query intent.\n * Documentation files get a strong boost to surface them higher.\n * Phase 4: Strengthened boosts for better documentation ranking.\n * Phase 1: Intent-based adjustments for context-aware ranking.\n */\n private getFileTypeBoost(fileType: string | undefined, intents: ClassifiedIntent[]): number {\n // Base file-type boosts\n let baseBoost: number;\n switch (fileType) {\n case 'documentation-primary':\n baseBoost = 1.8; // README, guides get very strong boost\n break;\n case 'documentation':\n baseBoost = 1.5; // docs/, tutorials/ get strong boost\n break;\n case 'example':\n baseBoost = 1.4; // examples/, demos/ are highly valuable\n break;\n case 'source':\n baseBoost = 1.0; // Source code baseline\n break;\n case 'source-internal':\n baseBoost = 0.75; // Internal implementation files (not too harsh)\n break;\n case 'test':\n baseBoost = parseFloat(process.env['SEARCH_TEST_FILE_BOOST'] ?? '0.5');\n break;\n case 'config':\n baseBoost = 0.5; // Config files rarely answer questions\n break;\n case 'changelog':\n baseBoost = 0.7; // Changelogs secondary to docs and examples\n break;\n default:\n baseBoost = 1.0;\n }\n\n // Blend intent-based multipliers weighted by confidence\n let weightedMultiplier = 0;\n let totalConfidence = 0;\n\n for (const { intent, confidence } of intents) {\n const intentBoosts = INTENT_FILE_BOOSTS[intent];\n const multiplier = intentBoosts[fileType ?? 'other'] ?? 1.0;\n weightedMultiplier += multiplier * confidence;\n totalConfidence += confidence;\n }\n\n const blendedMultiplier = totalConfidence > 0 ? weightedMultiplier / totalConfidence : 1.0;\n const finalBoost = baseBoost * blendedMultiplier;\n\n // Cap test file boost to prevent intent multipliers from overriding the penalty\n if (fileType === 'test') {\n return Math.min(finalBoost, 0.6);\n }\n\n return finalBoost;\n }\n\n /**\n * Get a score multiplier based on URL keyword matching.\n * Boosts results where URL path contains significant query keywords.\n * This helps queries like \"troubleshooting\" rank /troubleshooting pages first.\n */\n private getUrlKeywordBoost(query: string, result: SearchResult): number {\n const url = result.metadata.url;\n if (url === undefined || url === '') return 1.0;\n\n // Extract path segments from URL and normalize\n const urlPath = url.toLowerCase().replace(/[^a-z0-9]+/g, ' ');\n\n // Common stop words to filter from queries\n const stopWords = new Set([\n 'how',\n 'to',\n 'the',\n 'a',\n 'an',\n 'is',\n 'are',\n 'what',\n 'why',\n 'when',\n 'where',\n 'can',\n 'do',\n 'does',\n 'i',\n 'my',\n 'your',\n 'it',\n 'in',\n 'on',\n 'for',\n 'with',\n 'this',\n 'that',\n 'get',\n 'use',\n 'using',\n ]);\n\n // Extract meaningful query terms\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2 && !stopWords.has(t));\n\n if (queryTerms.length === 0) return 1.0;\n\n // Count matching terms in URL path\n const matchingTerms = queryTerms.filter((term) => urlPath.includes(term));\n\n if (matchingTerms.length === 0) return 1.0;\n\n // Boost based on proportion of matching terms\n // Single match: ~1.5, all terms match: ~2.0\n const matchRatio = matchingTerms.length / queryTerms.length;\n return 1.0 + 1.0 * matchRatio;\n }\n\n /**\n * Get a score multiplier based on file path keyword matching.\n * Boosts results where file path contains significant query keywords.\n * This helps queries like \"dispatcher\" rank async_dispatcher.py higher.\n */\n private getPathKeywordBoost(query: string, result: SearchResult): number {\n const path = result.metadata.path;\n if (path === undefined || path === '') return 1.0;\n\n // Extract path segments and normalize (split on slashes, dots, underscores, etc.)\n const pathSegments = path.toLowerCase().replace(/[^a-z0-9]+/g, ' ');\n\n // Common stop words to filter from queries\n const stopWords = new Set([\n 'how',\n 'to',\n 'the',\n 'a',\n 'an',\n 'is',\n 'are',\n 'what',\n 'why',\n 'when',\n 'where',\n 'can',\n 'do',\n 'does',\n 'i',\n 'my',\n 'your',\n 'it',\n 'in',\n 'on',\n 'for',\n 'with',\n 'this',\n 'that',\n 'get',\n 'use',\n 'using',\n ]);\n\n // Extract meaningful query terms\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2 && !stopWords.has(t));\n\n if (queryTerms.length === 0) return 1.0;\n\n // Count matching terms in file path\n const matchingTerms = queryTerms.filter((term) => pathSegments.includes(term));\n\n if (matchingTerms.length === 0) return 1.0;\n\n // Boost based on proportion of matching terms\n // Single match: ~1.5, all terms match: ~2.0\n const matchRatio = matchingTerms.length / queryTerms.length;\n return 1.0 + 1.0 * matchRatio;\n }\n\n /**\n * Get a score multiplier based on framework context.\n * If query mentions a framework, boost results from that framework's files.\n */\n private getFrameworkContextBoost(query: string, result: SearchResult): number {\n const path = result.metadata.path ?? result.metadata.url ?? '';\n const content = result.content.toLowerCase();\n const pathLower = path.toLowerCase();\n\n // Check if query mentions any known frameworks\n for (const { pattern, terms } of FRAMEWORK_PATTERNS) {\n if (pattern.test(query)) {\n // Query mentions this framework - check if result is from that framework\n const resultMatchesFramework = terms.some(\n (term) => pathLower.includes(term) || content.includes(term)\n );\n\n if (resultMatchesFramework) {\n return 1.5; // Strong boost for matching framework\n } else {\n return 0.8; // Moderate penalty for non-matching when framework is specified\n }\n }\n }\n\n return 1.0; // No framework context in query\n }\n\n private addProgressiveContext(\n result: SearchResult,\n query: string,\n detail: DetailLevel,\n graph: CodeGraph | null\n ): SearchResult {\n const enhanced = { ...result };\n\n // Layer 1: Always add summary\n const path = result.metadata.path ?? result.metadata.url ?? 'unknown';\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const fileType = result.metadata['fileType'] as string | undefined;\n\n // Try to extract code unit\n const codeUnit = this.extractCodeUnitFromResult(result);\n const symbolName = codeUnit?.name ?? this.extractSymbolName(result.content);\n\n enhanced.summary = {\n type: this.inferType(fileType, codeUnit),\n name: symbolName,\n signature: codeUnit?.signature ?? '',\n purpose: this.generatePurpose(result.content, query),\n location: `${path}${codeUnit ? `:${String(codeUnit.startLine)}` : ''}`,\n relevanceReason: this.generateRelevanceReason(result, query),\n };\n\n // Layer 2: Add context if requested\n if (detail === 'contextual' || detail === 'full') {\n // Get usage stats from code graph if available\n const usage = this.getUsageFromGraph(graph, path, symbolName);\n\n enhanced.context = {\n interfaces: this.extractInterfaces(result.content),\n keyImports: this.extractImports(result.content),\n relatedConcepts: this.extractConcepts(result.content, query),\n usage,\n };\n }\n\n // Layer 3: Add full context if requested\n if (detail === 'full') {\n // Get related code from graph if available\n const relatedCode = this.getRelatedCodeFromGraph(graph, path, symbolName);\n\n enhanced.full = {\n completeCode: codeUnit?.fullContent ?? result.content,\n relatedCode,\n documentation: this.extractDocumentation(result.content),\n tests: undefined,\n };\n }\n\n return enhanced;\n }\n\n private extractCodeUnitFromResult(result: SearchResult): CodeUnit | undefined {\n const path = result.metadata.path;\n if (path === undefined || path === '') return undefined;\n\n const ext = path.split('.').pop() ?? '';\n const language =\n ext === 'ts' || ext === 'tsx'\n ? 'typescript'\n : ext === 'js' || ext === 'jsx'\n ? 'javascript'\n : ext;\n\n // Try to find a symbol name in the content\n const symbolName = this.extractSymbolName(result.content);\n if (symbolName === '') return undefined;\n\n return this.codeUnitService.extractCodeUnit(result.content, symbolName, language);\n }\n\n private extractSymbolName(content: string): string {\n // Extract function or class name\n const funcMatch = content.match(/(?:export\\s+)?(?:async\\s+)?function\\s+(\\w+)/);\n if (funcMatch?.[1] !== undefined && funcMatch[1] !== '') return funcMatch[1];\n\n const classMatch = content.match(/(?:export\\s+)?class\\s+(\\w+)/);\n if (classMatch?.[1] !== undefined && classMatch[1] !== '') return classMatch[1];\n\n const constMatch = content.match(/(?:export\\s+)?const\\s+(\\w+)/);\n if (constMatch?.[1] !== undefined && constMatch[1] !== '') return constMatch[1];\n\n // Fallback: return \"(anonymous)\" for unnamed symbols\n return '(anonymous)';\n }\n\n private inferType(\n fileType: string | undefined,\n codeUnit: CodeUnit | undefined\n ): import('../types/search.js').ResultSummary['type'] {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n if (codeUnit) return codeUnit.type as import('../types/search.js').ResultSummary['type'];\n if (fileType === 'documentation' || fileType === 'documentation-primary')\n return 'documentation';\n return 'function';\n }\n\n private generatePurpose(content: string, query: string): string {\n // Extract first line of JSDoc comment if present\n const docMatch = content.match(/\\/\\*\\*\\s*\\n\\s*\\*\\s*([^\\n]+)/);\n if (docMatch?.[1] !== undefined && docMatch[1] !== '') return docMatch[1].trim();\n\n const lines = content.split('\\n');\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2);\n\n // Helper to check if line is skippable (imports, declarations)\n const shouldSkip = (cleaned: string): boolean => {\n return (\n cleaned.startsWith('import ') ||\n cleaned.startsWith('export ') ||\n cleaned.startsWith('interface ') ||\n cleaned.startsWith('type ')\n );\n };\n\n // Helper to score a line based on query term matches\n const scoreLine = (cleaned: string): number => {\n const lowerLine = cleaned.toLowerCase();\n return queryTerms.filter((term) => lowerLine.includes(term)).length;\n };\n\n // Helper to check if line is meaningful (length, not a comment)\n const isMeaningful = (cleaned: string): boolean => {\n if (cleaned.length === 0) return false;\n if (cleaned.startsWith('//') || cleaned.startsWith('/*')) return false;\n // Accept Markdown headings\n if (cleaned.startsWith('#') && cleaned.length > 3) return true;\n // Accept lines 15+ chars\n return cleaned.length >= 15;\n };\n\n // First pass: find lines with query terms, preferring complete sentences\n let bestLine: string | null = null;\n let bestScore = 0;\n\n for (const line of lines) {\n const cleaned = line.trim();\n if (shouldSkip(cleaned) || !isMeaningful(cleaned)) continue;\n\n let score = scoreLine(cleaned);\n\n // Boost score for complete sentences (end with period, !, ?)\n if (/[.!?]$/.test(cleaned)) {\n score += 0.5;\n }\n\n // Boost score for code examples (contains function calls or assignments)\n // Favor complete patterns: function calls WITH arguments, assignments with values\n if (/\\w+\\([^)]*\\)|=\\s*\\w+\\(|=>/.test(cleaned)) {\n score += 0.6; // Enhanced boost to preserve code examples in snippets\n }\n\n if (score > bestScore) {\n bestScore = score;\n bestLine = cleaned;\n }\n }\n\n // If we found a line with query terms, use it\n if (bestLine !== null && bestLine !== '' && bestScore > 0) {\n if (bestLine.length > 150) {\n const firstSentence = bestLine.match(/^[^.!?]+[.!?]/);\n if (firstSentence && firstSentence[0].length >= 20 && firstSentence[0].length <= 150) {\n return firstSentence[0].trim();\n }\n return `${bestLine.substring(0, 147)}...`;\n }\n return bestLine;\n }\n\n // Fallback: first meaningful line (original logic)\n for (const line of lines) {\n const cleaned = line.trim();\n if (shouldSkip(cleaned) || !isMeaningful(cleaned)) continue;\n\n if (cleaned.length > 150) {\n const firstSentence = cleaned.match(/^[^.!?]+[.!?]/);\n if (firstSentence && firstSentence[0].length >= 20 && firstSentence[0].length <= 150) {\n return firstSentence[0].trim();\n }\n return `${cleaned.substring(0, 147)}...`;\n }\n\n return cleaned;\n }\n\n return 'Code related to query';\n }\n\n private generateRelevanceReason(result: SearchResult, query: string): string {\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 2);\n const contentLower = result.content.toLowerCase();\n\n const matchedTerms = queryTerms.filter((term) => contentLower.includes(term));\n\n if (matchedTerms.length > 0) {\n return `Matches: ${matchedTerms.join(', ')}`;\n }\n\n return 'Semantically similar to query';\n }\n\n private extractInterfaces(content: string): string[] {\n const interfaces: string[] = [];\n const matches = content.matchAll(/interface\\s+(\\w+)/g);\n for (const match of matches) {\n if (match[1] !== undefined && match[1] !== '') interfaces.push(match[1]);\n }\n return interfaces;\n }\n\n private extractImports(content: string): string[] {\n const imports: string[] = [];\n const matches = content.matchAll(/import\\s+.*?from\\s+['\"]([^'\"]+)['\"]/g);\n for (const match of matches) {\n if (match[1] !== undefined && match[1] !== '') imports.push(match[1]);\n }\n return imports.slice(0, 5); // Top 5\n }\n\n private extractConcepts(content: string, _query: string): string[] {\n // TODO: Use _query parameter to prioritize query-related concepts in future enhancement\n\n // Common stopwords to filter out\n const stopwords = new Set([\n 'this',\n 'that',\n 'these',\n 'those',\n 'from',\n 'with',\n 'have',\n 'will',\n 'would',\n 'should',\n 'could',\n 'about',\n 'been',\n 'were',\n 'being',\n 'function',\n 'return',\n 'const',\n 'import',\n 'export',\n 'default',\n 'type',\n 'interface',\n 'class',\n 'extends',\n 'implements',\n 'async',\n 'await',\n 'then',\n 'catch',\n 'throw',\n 'error',\n 'undefined',\n 'null',\n 'true',\n 'false',\n 'void',\n 'number',\n 'string',\n 'boolean',\n 'object',\n 'array',\n 'promise',\n 'callback',\n 'resolve',\n 'reject',\n 'value',\n 'param',\n 'params',\n 'args',\n 'props',\n 'options',\n 'config',\n 'data',\n ]);\n\n // Simple keyword extraction\n const words = content.toLowerCase().match(/\\b[a-z]{4,}\\b/g) ?? [];\n const frequency = new Map<string, number>();\n\n for (const word of words) {\n // Skip stopwords\n if (stopwords.has(word)) continue;\n\n frequency.set(word, (frequency.get(word) ?? 0) + 1);\n }\n\n return Array.from(frequency.entries())\n .sort((a, b) => b[1] - a[1])\n .slice(0, 5)\n .map(([word]) => word);\n }\n\n private extractDocumentation(content: string): string {\n const docMatch = content.match(/\\/\\*\\*([\\s\\S]*?)\\*\\//);\n if (docMatch?.[1] !== undefined && docMatch[1] !== '') {\n return docMatch[1]\n .split('\\n')\n .map((line) => line.replace(/^\\s*\\*\\s?/, '').trim())\n .filter((line) => line.length > 0)\n .join('\\n');\n }\n return '';\n }\n\n /**\n * Get usage stats from code graph.\n * Returns default values if no graph is available.\n */\n private getUsageFromGraph(\n graph: CodeGraph | null,\n filePath: string,\n symbolName: string\n ): { calledBy: number; calls: number } {\n if (!graph || symbolName === '' || symbolName === '(anonymous)') {\n return { calledBy: 0, calls: 0 };\n }\n\n const nodeId = `${filePath}:${symbolName}`;\n return {\n calledBy: graph.getCalledByCount(nodeId),\n calls: graph.getCallsCount(nodeId),\n };\n }\n\n /**\n * Get related code from graph.\n * Returns callers and callees for the symbol.\n */\n private getRelatedCodeFromGraph(\n graph: CodeGraph | null,\n filePath: string,\n symbolName: string\n ): Array<{ file: string; summary: string; relationship: string }> {\n if (!graph || symbolName === '' || symbolName === '(anonymous)') {\n return [];\n }\n\n const nodeId = `${filePath}:${symbolName}`;\n const related: Array<{ file: string; summary: string; relationship: string }> = [];\n\n // Get callers (incoming edges)\n const incoming = graph.getIncomingEdges(nodeId);\n for (const edge of incoming) {\n if (edge.type === 'calls') {\n // Parse file:symbol from edge.from\n const [file, symbol] = this.parseNodeId(edge.from);\n related.push({\n file,\n summary: symbol ? `${symbol}()` : 'unknown',\n relationship: 'calls this',\n });\n }\n }\n\n // Get callees (outgoing edges)\n const outgoing = graph.getEdges(nodeId);\n for (const edge of outgoing) {\n if (edge.type === 'calls') {\n // Parse file:symbol from edge.to\n const [file, symbol] = this.parseNodeId(edge.to);\n related.push({\n file,\n summary: symbol ? `${symbol}()` : 'unknown',\n relationship: 'called by this',\n });\n }\n }\n\n // Limit to top 10 related items\n return related.slice(0, 10);\n }\n\n /**\n * Parse a node ID into file path and symbol name.\n */\n private parseNodeId(nodeId: string): [string, string] {\n const lastColon = nodeId.lastIndexOf(':');\n if (lastColon === -1) {\n return [nodeId, ''];\n }\n return [nodeId.substring(0, lastColon), nodeId.substring(lastColon + 1)];\n }\n}\n","import { readFile, access } from 'node:fs/promises';\nimport { resolve, isAbsolute, join } from 'node:path';\nimport { ProjectRootService } from './project-root.service.js';\nimport {\n StoreDefinitionsConfigSchema,\n DEFAULT_STORE_DEFINITIONS_CONFIG,\n} from '../types/store-definition.js';\nimport { atomicWriteFile } from '../utils/atomic-write.js';\nimport type { StoreDefinitionsConfig, StoreDefinition } from '../types/store-definition.js';\n\n/**\n * Check if a file exists\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Service for managing git-committable store definitions.\n *\n * Store definitions are saved to `.bluera/bluera-knowledge/stores.config.json`\n * within the project root. This file is designed to be committed to version\n * control, allowing teams to share store configurations.\n *\n * The actual store data (vector embeddings, cloned repos) lives in the data\n * directory and should be gitignored.\n */\nexport class StoreDefinitionService {\n private readonly configPath: string;\n private readonly projectRoot: string;\n private config: StoreDefinitionsConfig | null = null;\n\n constructor(projectRoot?: string) {\n this.projectRoot = projectRoot ?? ProjectRootService.resolve();\n this.configPath = join(this.projectRoot, '.bluera/bluera-knowledge/stores.config.json');\n }\n\n /**\n * Load store definitions from config file.\n * Returns empty config if file doesn't exist.\n * Throws on parse/validation errors (fail fast per CLAUDE.md).\n */\n async load(): Promise<StoreDefinitionsConfig> {\n if (this.config !== null) {\n return this.config;\n }\n\n const exists = await fileExists(this.configPath);\n if (!exists) {\n // Deep clone to avoid mutating the shared default\n this.config = {\n ...DEFAULT_STORE_DEFINITIONS_CONFIG,\n stores: [...DEFAULT_STORE_DEFINITIONS_CONFIG.stores],\n };\n return this.config;\n }\n\n const content = await readFile(this.configPath, 'utf-8');\n let parsed: unknown;\n try {\n parsed = JSON.parse(content);\n } catch (error) {\n throw new Error(\n `Failed to parse store definitions at ${this.configPath}: ${\n error instanceof Error ? error.message : String(error)\n }`\n );\n }\n\n const result = StoreDefinitionsConfigSchema.safeParse(parsed);\n if (!result.success) {\n throw new Error(`Invalid store definitions at ${this.configPath}: ${result.error.message}`);\n }\n\n this.config = result.data;\n return this.config;\n }\n\n /**\n * Save store definitions to config file.\n */\n async save(config: StoreDefinitionsConfig): Promise<void> {\n await atomicWriteFile(this.configPath, JSON.stringify(config, null, 2));\n this.config = config;\n }\n\n /**\n * Add a store definition.\n * Throws if a definition with the same name already exists.\n */\n async addDefinition(definition: StoreDefinition): Promise<void> {\n const config = await this.load();\n const existing = config.stores.find((s) => s.name === definition.name);\n if (existing !== undefined) {\n throw new Error(`Store definition \"${definition.name}\" already exists`);\n }\n config.stores.push(definition);\n await this.save(config);\n }\n\n /**\n * Remove a store definition by name.\n * Returns true if removed, false if not found.\n */\n async removeDefinition(name: string): Promise<boolean> {\n const config = await this.load();\n const index = config.stores.findIndex((s) => s.name === name);\n if (index === -1) {\n return false;\n }\n config.stores.splice(index, 1);\n await this.save(config);\n return true;\n }\n\n /**\n * Update an existing store definition.\n * Only updates the provided fields, preserving others.\n * Throws if definition not found.\n */\n async updateDefinition(\n name: string,\n updates: { description?: string; tags?: string[] }\n ): Promise<void> {\n const config = await this.load();\n const index = config.stores.findIndex((s) => s.name === name);\n if (index === -1) {\n throw new Error(`Store definition \"${name}\" not found`);\n }\n\n // Merge updates while preserving type safety\n // We only allow updating common optional fields (description, tags)\n const existing = config.stores[index];\n if (existing === undefined) {\n throw new Error(`Store definition \"${name}\" not found at index ${String(index)}`);\n }\n if (updates.description !== undefined) {\n existing.description = updates.description;\n }\n if (updates.tags !== undefined) {\n existing.tags = updates.tags;\n }\n await this.save(config);\n }\n\n /**\n * Get a store definition by name.\n * Returns undefined if not found.\n */\n async getByName(name: string): Promise<StoreDefinition | undefined> {\n const config = await this.load();\n return config.stores.find((s) => s.name === name);\n }\n\n /**\n * Check if any definitions exist.\n */\n async hasDefinitions(): Promise<boolean> {\n const config = await this.load();\n return config.stores.length > 0;\n }\n\n /**\n * Resolve a file store path relative to project root.\n */\n resolvePath(path: string): string {\n if (isAbsolute(path)) {\n return path;\n }\n return resolve(this.projectRoot, path);\n }\n\n /**\n * Get the config file path.\n */\n getConfigPath(): string {\n return this.configPath;\n }\n\n /**\n * Get the project root.\n */\n getProjectRoot(): string {\n return this.projectRoot;\n }\n\n /**\n * Clear the cached config (useful for testing).\n */\n clearCache(): void {\n this.config = null;\n }\n}\n","import { z } from 'zod';\n\n/**\n * Store definition schemas for git-committable configuration.\n *\n * Store definitions capture the essential information needed to recreate\n * a store, without the runtime data (vector embeddings, cloned repos).\n * This allows teams to share store configurations via version control.\n */\n\n// ============================================================================\n// Base Schema\n// ============================================================================\n\n/**\n * Base fields common to all store definitions\n */\nconst BaseStoreDefinitionSchema = z.object({\n name: z.string().min(1, 'Store name is required'),\n description: z.string().optional(),\n tags: z.array(z.string()).optional(),\n});\n\n// ============================================================================\n// File Store Definition\n// ============================================================================\n\n/**\n * File store definition - references a local directory.\n * Path can be relative (resolved against project root) or absolute.\n */\nexport const FileStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({\n type: z.literal('file'),\n path: z.string().min(1, 'Path is required for file stores'),\n});\n\nexport type FileStoreDefinition = z.infer<typeof FileStoreDefinitionSchema>;\n\n// ============================================================================\n// Repo Store Definition\n// ============================================================================\n\n/**\n * Validates git repository URLs, supporting both standard URLs and SCP-style SSH URLs.\n * - Standard URLs: https://github.com/org/repo.git, ssh://git@github.com/org/repo.git\n * - SCP-style SSH: git@github.com:org/repo.git\n */\nconst GitUrlSchema = z.string().refine(\n (val) => {\n // Accept standard URLs (http://, https://, ssh://, git://)\n try {\n new URL(val);\n return true;\n } catch {\n // Accept SCP-style SSH URLs: git@host:org/repo.git or git@host:org/repo\n return /^git@[\\w.-]+:[\\w./-]+$/.test(val);\n }\n },\n { message: 'Must be a valid URL or SSH URL (git@host:path)' }\n);\n\n/**\n * Repo store definition - references a git repository.\n * The repo will be cloned on sync.\n */\nexport const RepoStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({\n type: z.literal('repo'),\n url: GitUrlSchema,\n branch: z.string().optional(),\n depth: z.number().int().positive('Depth must be a positive integer').optional(),\n});\n\nexport type RepoStoreDefinition = z.infer<typeof RepoStoreDefinitionSchema>;\n\n// ============================================================================\n// Web Store Definition\n// ============================================================================\n\n/**\n * Web store definition - references a website to crawl.\n * Supports intelligent crawling with natural language instructions.\n */\nexport const WebStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({\n type: z.literal('web'),\n url: z.url('Valid URL is required for web stores'),\n depth: z.number().int().min(0, 'Depth must be non-negative').default(1),\n maxPages: z.number().int().positive('maxPages must be a positive integer').optional(),\n crawlInstructions: z.string().optional(),\n extractInstructions: z.string().optional(),\n});\n\nexport type WebStoreDefinition = z.infer<typeof WebStoreDefinitionSchema>;\n\n// ============================================================================\n// Union Type\n// ============================================================================\n\n/**\n * Discriminated union of all store definition types.\n * Use the `type` field to narrow the type.\n */\nexport const StoreDefinitionSchema = z.discriminatedUnion('type', [\n FileStoreDefinitionSchema,\n RepoStoreDefinitionSchema,\n WebStoreDefinitionSchema,\n]);\n\nexport type StoreDefinition = z.infer<typeof StoreDefinitionSchema>;\n\n// ============================================================================\n// Config Schema\n// ============================================================================\n\n/**\n * Root configuration schema for store definitions.\n * Version field enables future schema migrations.\n */\nexport const StoreDefinitionsConfigSchema = z.object({\n version: z.literal(1),\n stores: z.array(StoreDefinitionSchema),\n});\n\nexport type StoreDefinitionsConfig = z.infer<typeof StoreDefinitionsConfigSchema>;\n\n// ============================================================================\n// Type Guards\n// ============================================================================\n\nexport function isFileStoreDefinition(def: StoreDefinition): def is FileStoreDefinition {\n return def.type === 'file';\n}\n\nexport function isRepoStoreDefinition(def: StoreDefinition): def is RepoStoreDefinition {\n return def.type === 'repo';\n}\n\nexport function isWebStoreDefinition(def: StoreDefinition): def is WebStoreDefinition {\n return def.type === 'web';\n}\n\n// ============================================================================\n// Default Config\n// ============================================================================\n\nexport const DEFAULT_STORE_DEFINITIONS_CONFIG: StoreDefinitionsConfig = {\n version: 1,\n stores: [],\n};\n","import { randomUUID } from 'node:crypto';\nimport { readFile, mkdir, stat, access } from 'node:fs/promises';\nimport { join, resolve } from 'node:path';\nimport { cloneRepository } from '../plugin/git-clone.js';\nimport { createStoreId } from '../types/brands.js';\nimport { ok, err } from '../types/result.js';\nimport { atomicWriteFile } from '../utils/atomic-write.js';\nimport type { GitignoreService } from './gitignore.service.js';\nimport type { StoreDefinitionService } from './store-definition.service.js';\nimport type { StoreId } from '../types/brands.js';\nimport type { Result } from '../types/result.js';\nimport type {\n StoreDefinition,\n FileStoreDefinition,\n RepoStoreDefinition,\n WebStoreDefinition,\n} from '../types/store-definition.js';\nimport type { Store, FileStore, RepoStore, WebStore, StoreType } from '../types/store.js';\n\n/**\n * Check if a file exists\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path);\n return true;\n } catch {\n return false;\n }\n}\n\nexport interface CreateStoreInput {\n name: string;\n type: StoreType;\n path?: string | undefined;\n url?: string | undefined;\n description?: string | undefined;\n tags?: string[] | undefined;\n branch?: string | undefined;\n depth?: number | undefined;\n // Web store crawl options\n maxPages?: number | undefined;\n crawlInstructions?: string | undefined;\n extractInstructions?: string | undefined;\n}\n\nexport interface StoreServiceOptions {\n /** Optional definition service for auto-updating git-committable config */\n definitionService?: StoreDefinitionService;\n /** Optional gitignore service for ensuring .gitignore patterns */\n gitignoreService?: GitignoreService;\n /** Optional project root for resolving relative paths */\n projectRoot?: string;\n}\n\nexport interface OperationOptions {\n /** Skip syncing to store definitions (used by stores:sync command) */\n skipDefinitionSync?: boolean;\n}\n\ninterface StoreRegistry {\n stores: Store[];\n}\n\nexport class StoreService {\n private readonly dataDir: string;\n private readonly definitionService: StoreDefinitionService | undefined;\n private readonly gitignoreService: GitignoreService | undefined;\n private readonly projectRoot: string | undefined;\n private registry: StoreRegistry = { stores: [] };\n\n constructor(dataDir: string, options?: StoreServiceOptions) {\n this.dataDir = dataDir;\n this.definitionService = options?.definitionService ?? undefined;\n this.gitignoreService = options?.gitignoreService ?? undefined;\n this.projectRoot = options?.projectRoot ?? undefined;\n }\n\n async initialize(): Promise<void> {\n await mkdir(this.dataDir, { recursive: true });\n await this.loadRegistry();\n }\n\n /**\n * Convert a Store and CreateStoreInput to a StoreDefinition for persistence.\n * Returns undefined for stores that shouldn't be persisted (e.g., local repo stores).\n */\n private createDefinitionFromStore(\n store: Store,\n input: CreateStoreInput\n ): StoreDefinition | undefined {\n // Copy tags array to convert from readonly to mutable\n const tags = store.tags !== undefined ? [...store.tags] : undefined;\n const base = {\n name: store.name,\n description: store.description,\n tags,\n };\n\n switch (store.type) {\n case 'file': {\n const fileStore = store;\n const fileDef: FileStoreDefinition = {\n ...base,\n type: 'file',\n // Use original input path if provided (may be relative), otherwise use normalized\n path: input.path ?? fileStore.path,\n };\n return fileDef;\n }\n case 'repo': {\n const repoStore = store;\n // Local repo stores (no URL) are machine-specific; skip definition sync\n if (repoStore.url === undefined) {\n return undefined;\n }\n const repoDef: RepoStoreDefinition = {\n ...base,\n type: 'repo',\n url: repoStore.url,\n branch: repoStore.branch,\n depth: input.depth,\n };\n return repoDef;\n }\n case 'web': {\n const webStore = store;\n const webDef: WebStoreDefinition = {\n ...base,\n type: 'web',\n url: webStore.url,\n depth: webStore.depth,\n maxPages: input.maxPages,\n crawlInstructions: input.crawlInstructions,\n extractInstructions: input.extractInstructions,\n };\n return webDef;\n }\n }\n }\n\n /**\n * Create a StoreDefinition from an existing store (without original input).\n * Used when updating/renaming stores where we don't have the original input.\n * Returns undefined for stores that shouldn't be persisted (e.g., local repo stores).\n */\n private createDefinitionFromExistingStore(store: Store): StoreDefinition | undefined {\n // Copy tags array to convert from readonly to mutable\n const tags = store.tags !== undefined ? [...store.tags] : undefined;\n const base = {\n name: store.name,\n description: store.description,\n tags,\n };\n\n switch (store.type) {\n case 'file': {\n const fileDef: FileStoreDefinition = {\n ...base,\n type: 'file',\n path: store.path,\n };\n return fileDef;\n }\n case 'repo': {\n // Local repo stores (no URL) are machine-specific; skip definition sync\n if (store.url === undefined) {\n return undefined;\n }\n const repoDef: RepoStoreDefinition = {\n ...base,\n type: 'repo',\n url: store.url,\n branch: store.branch,\n depth: store.depth,\n };\n return repoDef;\n }\n case 'web': {\n const webDef: WebStoreDefinition = {\n ...base,\n type: 'web',\n url: store.url,\n depth: store.depth,\n maxPages: store.maxPages,\n crawlInstructions: store.crawlInstructions,\n extractInstructions: store.extractInstructions,\n };\n return webDef;\n }\n }\n }\n\n async create(input: CreateStoreInput, options?: OperationOptions): Promise<Result<Store>> {\n if (!input.name || input.name.trim() === '') {\n return err(new Error('Store name cannot be empty'));\n }\n\n const existing = await this.getByName(input.name);\n if (existing !== undefined) {\n return err(new Error(`Store with name \"${input.name}\" already exists`));\n }\n\n const id = createStoreId(randomUUID());\n const now = new Date();\n\n let store: Store;\n\n switch (input.type) {\n case 'file': {\n if (input.path === undefined) {\n return err(new Error('Path is required for file stores'));\n }\n // Normalize path to absolute path, using projectRoot if available\n const normalizedPath =\n this.projectRoot !== undefined\n ? resolve(this.projectRoot, input.path)\n : resolve(input.path);\n // Validate directory exists\n try {\n const stats = await stat(normalizedPath);\n if (!stats.isDirectory()) {\n return err(new Error(`Path is not a directory: ${normalizedPath}`));\n }\n } catch {\n return err(new Error(`Directory does not exist: ${normalizedPath}`));\n }\n store = {\n type: 'file',\n id,\n name: input.name,\n path: normalizedPath,\n description: input.description,\n tags: input.tags,\n status: 'ready',\n createdAt: now,\n updatedAt: now,\n } satisfies FileStore;\n break;\n }\n\n case 'repo': {\n let repoPath = input.path;\n\n // If URL provided, clone it\n if (input.url !== undefined) {\n const cloneDir = join(this.dataDir, 'repos', id);\n const result = await cloneRepository({\n url: input.url,\n targetDir: cloneDir,\n ...(input.branch !== undefined ? { branch: input.branch } : {}),\n depth: input.depth ?? 1,\n });\n\n if (!result.success) {\n return err(result.error);\n }\n repoPath = result.data;\n }\n\n if (repoPath === undefined) {\n return err(new Error('Path or URL required for repo stores'));\n }\n\n // Normalize path to absolute path, using projectRoot if available\n const normalizedRepoPath =\n this.projectRoot !== undefined ? resolve(this.projectRoot, repoPath) : resolve(repoPath);\n\n // Validate local repo path exists (only for local repos without URL)\n if (input.url === undefined) {\n try {\n const stats = await stat(normalizedRepoPath);\n if (!stats.isDirectory()) {\n return err(new Error(`Path is not a directory: ${normalizedRepoPath}`));\n }\n } catch {\n return err(new Error(`Repository path does not exist: ${normalizedRepoPath}`));\n }\n }\n\n store = {\n type: 'repo',\n id,\n name: input.name,\n path: normalizedRepoPath,\n url: input.url,\n branch: input.branch,\n depth: input.depth ?? 1,\n description: input.description,\n tags: input.tags,\n status: 'ready',\n createdAt: now,\n updatedAt: now,\n } satisfies RepoStore;\n break;\n }\n\n case 'web':\n if (input.url === undefined) {\n return err(new Error('URL is required for web stores'));\n }\n store = {\n type: 'web',\n id,\n name: input.name,\n url: input.url,\n depth: input.depth ?? 1,\n maxPages: input.maxPages,\n crawlInstructions: input.crawlInstructions,\n extractInstructions: input.extractInstructions,\n description: input.description,\n tags: input.tags,\n status: 'ready',\n createdAt: now,\n updatedAt: now,\n } satisfies WebStore;\n break;\n\n default: {\n // Exhaustive check - if this is reached, input.type is invalid\n const invalidType: never = input.type;\n return err(new Error(`Invalid store type: ${String(invalidType)}`));\n }\n }\n\n this.registry.stores.push(store);\n await this.saveRegistry();\n\n // Ensure .gitignore has required patterns\n if (this.gitignoreService !== undefined) {\n await this.gitignoreService.ensureGitignorePatterns();\n }\n\n // Sync to store definitions if service is available and not skipped\n if (this.definitionService !== undefined && options?.skipDefinitionSync !== true) {\n const definition = this.createDefinitionFromStore(store, input);\n // Only add if definition was created (local repo stores return undefined)\n if (definition !== undefined) {\n await this.definitionService.addDefinition(definition);\n }\n }\n\n return ok(store);\n }\n\n async list(type?: StoreType): Promise<Store[]> {\n if (type !== undefined) {\n return Promise.resolve(this.registry.stores.filter((s) => s.type === type));\n }\n return Promise.resolve([...this.registry.stores]);\n }\n\n async get(id: StoreId): Promise<Store | undefined> {\n return Promise.resolve(this.registry.stores.find((s) => s.id === id));\n }\n\n async getByName(name: string): Promise<Store | undefined> {\n return Promise.resolve(this.registry.stores.find((s) => s.name === name));\n }\n\n async getByIdOrName(idOrName: string): Promise<Store | undefined> {\n return Promise.resolve(\n this.registry.stores.find((s) => s.id === idOrName || s.name === idOrName)\n );\n }\n\n async update(\n id: StoreId,\n updates: Partial<Pick<Store, 'name' | 'description' | 'tags'>>,\n options?: OperationOptions\n ): Promise<Result<Store>> {\n const index = this.registry.stores.findIndex((s) => s.id === id);\n if (index === -1) {\n return err(new Error(`Store not found: ${id}`));\n }\n\n const store = this.registry.stores[index];\n if (store === undefined) {\n return err(new Error(`Store not found: ${id}`));\n }\n\n // Validate name is not empty when provided\n if (updates.name?.trim() === '') {\n return err(new Error('Store name cannot be empty'));\n }\n\n // Check for duplicate name when renaming\n const isRenaming = updates.name !== undefined && updates.name !== store.name;\n if (isRenaming) {\n const existing = this.registry.stores.find((s) => s.name === updates.name && s.id !== id);\n if (existing !== undefined) {\n return err(new Error(`Store with name '${updates.name}' already exists`));\n }\n }\n\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const updated = {\n ...store,\n ...updates,\n updatedAt: new Date(),\n } as Store;\n\n this.registry.stores[index] = updated;\n await this.saveRegistry();\n\n // Sync to store definitions if service is available and not skipped\n if (this.definitionService !== undefined && options?.skipDefinitionSync !== true) {\n if (isRenaming) {\n // When renaming: remove old definition and add new one with updated store data\n await this.definitionService.removeDefinition(store.name);\n const newDefinition = this.createDefinitionFromExistingStore(updated);\n // Only add if store type supports definitions (local repo stores don't)\n if (newDefinition !== undefined) {\n await this.definitionService.addDefinition(newDefinition);\n }\n } else {\n // Not renaming: just update description/tags on existing definition\n const defUpdates: { description?: string; tags?: string[] } = {};\n if (updates.description !== undefined) {\n defUpdates.description = updates.description;\n }\n if (updates.tags !== undefined) {\n // Copy tags array to convert from readonly to mutable\n defUpdates.tags = [...updates.tags];\n }\n // Only update if there are actual changes to sync\n if (Object.keys(defUpdates).length > 0) {\n await this.definitionService.updateDefinition(store.name, defUpdates);\n }\n }\n }\n\n return ok(updated);\n }\n\n async delete(id: StoreId, options?: OperationOptions): Promise<Result<void>> {\n const index = this.registry.stores.findIndex((s) => s.id === id);\n if (index === -1) {\n return err(new Error(`Store not found: ${id}`));\n }\n\n const store = this.registry.stores[index];\n if (store === undefined) {\n return err(new Error(`Store not found: ${id}`));\n }\n\n const storeName = store.name;\n this.registry.stores.splice(index, 1);\n await this.saveRegistry();\n\n // Sync to store definitions if service is available and not skipped\n if (this.definitionService !== undefined && options?.skipDefinitionSync !== true) {\n await this.definitionService.removeDefinition(storeName);\n }\n\n return ok(undefined);\n }\n\n private async loadRegistry(): Promise<void> {\n const registryPath = join(this.dataDir, 'stores.json');\n const exists = await fileExists(registryPath);\n\n if (!exists) {\n // First run - create empty registry\n this.registry = { stores: [] };\n await this.saveRegistry();\n return;\n }\n\n // File exists - load it (throws on corruption per CLAUDE.md \"fail early\")\n const content = await readFile(registryPath, 'utf-8');\n try {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const data = JSON.parse(content) as { stores: (Store | null)[] };\n this.registry = {\n stores: data.stores\n .filter((s): s is Store => s !== null)\n .map((s) => ({\n ...s,\n id: createStoreId(s.id),\n createdAt: new Date(s.createdAt),\n updatedAt: new Date(s.updatedAt),\n })),\n };\n } catch (error) {\n throw new Error(\n `Failed to parse store registry at ${registryPath}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n private async saveRegistry(): Promise<void> {\n const registryPath = join(this.dataDir, 'stores.json');\n await atomicWriteFile(registryPath, JSON.stringify(this.registry, null, 2));\n }\n}\n","import { spawn } from 'node:child_process';\nimport { mkdir } from 'node:fs/promises';\nimport { ok, err } from '../types/result.js';\nimport type { Result } from '../types/result.js';\n\nexport interface CloneOptions {\n url: string;\n targetDir: string;\n branch?: string;\n depth?: number;\n}\n\nexport async function cloneRepository(options: CloneOptions): Promise<Result<string>> {\n const { url, targetDir, branch, depth = 1 } = options;\n\n await mkdir(targetDir, { recursive: true });\n\n const args = ['clone', '--depth', String(depth)];\n if (branch !== undefined) {\n args.push('--branch', branch);\n }\n args.push(url, targetDir);\n\n return new Promise((resolve) => {\n const git = spawn('git', args, { stdio: ['ignore', 'pipe', 'pipe'] });\n\n let stderr = '';\n git.stderr.on('data', (data: Buffer) => {\n stderr += data.toString();\n });\n\n git.on('error', (error: Error) => {\n resolve(err(error));\n });\n\n git.on('close', (code: number | null) => {\n if (code === 0) {\n resolve(ok(targetDir));\n } else {\n resolve(err(new Error(`Git clone failed: ${stderr}`)));\n }\n });\n });\n}\n\nexport function isGitUrl(source: string): boolean {\n return source.startsWith('http://') || source.startsWith('https://') || source.startsWith('git@');\n}\n\nexport function extractRepoName(url: string): string {\n const match = /\\/([^/]+?)(\\.git)?$/.exec(url);\n const name = match?.[1];\n if (name === undefined) {\n return 'repository';\n }\n return name;\n}\n","import { spawn, type ChildProcess } from 'node:child_process';\nimport { randomUUID } from 'node:crypto';\nimport { existsSync } from 'node:fs';\nimport path from 'node:path';\nimport { createInterface, type Interface as ReadlineInterface } from 'node:readline';\nimport { fileURLToPath } from 'node:url';\nimport { ZodError } from 'zod';\nimport { type ParsePythonResult, validateParsePythonResult } from './schemas.js';\nimport { createLogger } from '../logging/index.js';\n\nconst logger = createLogger('python-bridge');\n\n// Re-export for backwards compatibility\nexport type { ParsePythonResult };\n\ninterface PendingRequest {\n resolve: (v: ParsePythonResult) => void;\n reject: (e: Error) => void;\n timeout: NodeJS.Timeout;\n}\n\n/**\n * Get the system Python executable name based on platform.\n * Windows uses 'python', Unix-like systems use 'python3'.\n */\nfunction getPythonExecutable(): string {\n return process.platform === 'win32' ? 'python' : 'python3';\n}\n\n/**\n * Get the venv Python path based on platform.\n * Windows: .venv/Scripts/python.exe\n * Unix-like: .venv/bin/python3\n */\nfunction getVenvPythonPath(pluginRoot: string): string {\n if (process.platform === 'win32') {\n return path.join(pluginRoot, '.venv', 'Scripts', 'python.exe');\n }\n return path.join(pluginRoot, '.venv', 'bin', 'python3');\n}\n\nexport class PythonBridge {\n private process: ChildProcess | null = null;\n private readonly pending: Map<string, PendingRequest> = new Map();\n private stoppingIntentionally = false;\n private stdoutReadline: ReadlineInterface | null = null;\n private stderrReadline: ReadlineInterface | null = null;\n\n start(): Promise<void> {\n if (this.process) return Promise.resolve();\n\n // Compute absolute path to Python worker using import.meta.url\n // This works both in development (src/) and production (dist/)\n const currentFilePath = fileURLToPath(import.meta.url);\n // Platform-agnostic check: match both /dist/ and \\dist\\ (Windows)\n const distPattern = `${path.sep}dist${path.sep}`;\n const isProduction = currentFilePath.includes(distPattern);\n\n let pythonWorkerPath: string;\n let pythonPath: string;\n\n if (isProduction) {\n // Production: Find dist dir and go to sibling python/ directory\n const distIndex = currentFilePath.indexOf(distPattern);\n const pluginRoot = currentFilePath.substring(0, distIndex);\n pythonWorkerPath = path.join(pluginRoot, 'python', 'ast_worker.py');\n\n // Use venv python if available (installed by check-dependencies.sh hook)\n const venvPython = getVenvPythonPath(pluginRoot);\n pythonPath = existsSync(venvPython) ? venvPython : getPythonExecutable();\n } else {\n // Development: Go up from src/crawl to find python/\n const srcDir = path.dirname(path.dirname(currentFilePath));\n const projectRoot = path.dirname(srcDir);\n pythonWorkerPath = path.join(projectRoot, 'python', 'ast_worker.py');\n\n // Development: Use system python (user manages their own environment)\n pythonPath = getPythonExecutable();\n }\n\n logger.debug(\n { pythonWorkerPath, pythonPath, currentFilePath, isProduction },\n 'Starting Python bridge process'\n );\n\n this.process = spawn(pythonPath, [pythonWorkerPath], {\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n\n // Add error handler for process spawn errors\n this.process.on('error', (err) => {\n logger.error({ error: err.message, stack: err.stack }, 'Python bridge process error');\n this.rejectAllPending(new Error(`Process error: ${err.message}`));\n });\n\n // Add exit handler to detect non-zero exits\n this.process.on('exit', (code, signal) => {\n if (code !== 0 && code !== null) {\n logger.error({ code }, 'Python bridge process exited with non-zero code');\n this.rejectAllPending(new Error(`Process exited with code ${String(code)}`));\n } else if (signal && !this.stoppingIntentionally) {\n // Only log if we didn't intentionally stop the process\n logger.error({ signal }, 'Python bridge process killed with signal');\n this.rejectAllPending(new Error(`Process killed with signal ${signal}`));\n }\n this.process = null;\n this.stoppingIntentionally = false;\n });\n\n // Add stderr logging\n if (this.process.stderr) {\n this.stderrReadline = createInterface({ input: this.process.stderr });\n this.stderrReadline.on('line', (line) => {\n logger.warn({ stderr: line }, 'Python bridge stderr output');\n });\n }\n\n if (this.process.stdout === null) {\n this.process.kill(); // Kill process to prevent zombie\n this.process = null; // Clean up reference\n return Promise.reject(new Error('Python bridge process stdout is null'));\n }\n this.stdoutReadline = createInterface({ input: this.process.stdout });\n this.stdoutReadline.on('line', (line) => {\n // Filter out non-JSON lines (Python logging output)\n if (!line.trim().startsWith('{')) {\n return;\n }\n\n try {\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const response = JSON.parse(line) as {\n id: string;\n error?: { message: string };\n result?: unknown;\n };\n const pending = this.pending.get(response.id);\n if (pending !== undefined) {\n if (response.error !== undefined) {\n clearTimeout(pending.timeout);\n this.pending.delete(response.id);\n pending.reject(new Error(response.error.message));\n } else if (response.result !== undefined) {\n clearTimeout(pending.timeout);\n this.pending.delete(response.id);\n\n // Validate response structure\n try {\n const validated = validateParsePythonResult(response.result);\n pending.resolve(validated);\n } catch (error: unknown) {\n // Log validation failure with original response for debugging\n if (error instanceof ZodError) {\n logger.error(\n {\n issues: error.issues,\n response: JSON.stringify(response.result),\n },\n 'Python bridge response validation failed'\n );\n pending.reject(\n new Error(`Invalid response format from Python bridge: ${error.message}`)\n );\n } else {\n const errorMessage = error instanceof Error ? error.message : String(error);\n logger.error({ error: errorMessage }, 'Response validation error');\n pending.reject(new Error(`Response validation error: ${errorMessage}`));\n }\n }\n }\n // If neither result nor error, leave pending (will timeout)\n }\n } catch (err) {\n logger.error(\n {\n error: err instanceof Error ? err.message : String(err),\n line,\n },\n 'Failed to parse JSON response from Python bridge'\n );\n }\n });\n\n return Promise.resolve();\n }\n\n async parsePython(\n code: string,\n filePath: string,\n timeoutMs: number = 10000\n ): Promise<ParsePythonResult> {\n if (!this.process) await this.start();\n\n const id = randomUUID();\n const request = {\n jsonrpc: '2.0',\n id,\n method: 'parse_python',\n params: { code, filePath },\n };\n\n return new Promise((resolve, reject) => {\n const timeout = setTimeout(() => {\n const pending = this.pending.get(id);\n if (pending) {\n this.pending.delete(id);\n reject(\n new Error(`Python parsing timeout after ${String(timeoutMs)}ms for file: ${filePath}`)\n );\n }\n }, timeoutMs);\n\n this.pending.set(id, {\n resolve,\n reject,\n timeout,\n });\n if (!this.process?.stdin) {\n reject(new Error('Python bridge process not available'));\n return;\n }\n this.process.stdin.write(`${JSON.stringify(request)}\\n`);\n });\n }\n\n stop(): Promise<void> {\n if (!this.process) {\n return Promise.resolve();\n }\n\n return new Promise((resolve) => {\n this.stoppingIntentionally = true;\n this.rejectAllPending(new Error('Python bridge stopped'));\n\n // Close readline interfaces to prevent resource leaks\n if (this.stdoutReadline) {\n this.stdoutReadline.close();\n this.stdoutReadline = null;\n }\n if (this.stderrReadline) {\n this.stderrReadline.close();\n this.stderrReadline = null;\n }\n\n // Wait for process to actually exit before resolving\n const proc = this.process;\n if (proc === null) {\n resolve();\n return;\n }\n\n // Set up exit handler to resolve when process terminates\n const onExit = (): void => {\n resolve();\n };\n proc.once('exit', onExit);\n\n // Send SIGTERM to gracefully stop\n proc.kill();\n\n // Safety timeout in case process doesn't exit within 1 second\n setTimeout(() => {\n proc.removeListener('exit', onExit);\n if (this.process === proc) {\n proc.kill('SIGKILL'); // Force kill\n this.process = null;\n }\n resolve();\n }, 1000);\n });\n }\n\n private rejectAllPending(error: Error): void {\n for (const pending of this.pending.values()) {\n clearTimeout(pending.timeout);\n pending.reject(error);\n }\n this.pending.clear();\n }\n}\n","import { z } from 'zod';\n\n// Schema for Python AST parsing response\nconst MethodInfoSchema = z.object({\n name: z.string(),\n async: z.boolean(),\n signature: z.string(),\n startLine: z.number(),\n endLine: z.number(),\n calls: z.array(z.string()),\n});\n\nconst CodeNodeSchema = z.object({\n type: z.enum(['function', 'class']),\n name: z.string(),\n exported: z.boolean(),\n startLine: z.number(),\n endLine: z.number(),\n async: z.boolean().optional(),\n signature: z.string().optional(),\n calls: z.array(z.string()).optional(),\n methods: z.array(MethodInfoSchema).optional(),\n});\n\nconst ImportInfoSchema = z.object({\n source: z.string(),\n imported: z.string(),\n alias: z.string().optional().nullable(),\n});\n\nexport const ParsePythonResultSchema = z.object({\n nodes: z.array(CodeNodeSchema),\n imports: z.array(ImportInfoSchema),\n});\n\nexport type ParsePythonResult = z.infer<typeof ParsePythonResultSchema>;\n\n/**\n * Validates a Python AST parsing response from Python bridge.\n * Throws ZodError if the response doesn't match the expected schema.\n *\n * @param data - Raw data from Python bridge\n * @returns Validated ParsePythonResult\n * @throws {z.ZodError} If validation fails\n */\nexport function validateParsePythonResult(data: unknown): ParsePythonResult {\n return ParsePythonResultSchema.parse(data);\n}\n","import { homedir } from 'node:os';\nimport { join } from 'node:path';\nimport { pipeline, env, type FeatureExtractionPipeline } from '@huggingface/transformers';\n\n// Set cache directory to ~/.cache/huggingface-transformers (outside node_modules)\n// This allows CI caching and prevents model re-downloads on each npm install\nenv.cacheDir = join(homedir(), '.cache', 'huggingface-transformers');\n\nexport class EmbeddingEngine {\n private extractor: FeatureExtractionPipeline | null = null;\n private initPromise: Promise<void> | null = null;\n // eslint-disable-next-line @typescript-eslint/prefer-readonly -- mutated in embed() and embedBatch()\n private _dimensions: number | null = null;\n // eslint-disable-next-line @typescript-eslint/prefer-readonly -- mutated in dispose()\n private disposed = false;\n private readonly modelName: string;\n private readonly batchSize: number;\n\n constructor(modelName = 'Xenova/all-MiniLM-L6-v2', batchSize = 32) {\n this.modelName = modelName;\n this.batchSize = batchSize;\n }\n\n /**\n * Guard against use-after-dispose\n */\n private assertNotDisposed(): void {\n if (this.disposed) {\n throw new Error('EmbeddingEngine has been disposed');\n }\n }\n\n /**\n * Initialize the embedding pipeline (concurrency-safe).\n * Multiple concurrent calls will share the same initialization promise.\n */\n async initialize(): Promise<void> {\n this.assertNotDisposed();\n if (this.extractor !== null) return;\n\n this.initPromise ??= (async (): Promise<void> => {\n try {\n // @ts-expect-error TS2590: TypeScript can't represent the complex union type from pipeline()\n // This is a known limitation with @huggingface/transformers overloaded signatures\n this.extractor = await pipeline('feature-extraction', this.modelName, {\n dtype: 'fp32',\n });\n } catch (error) {\n // Allow retry on failure\n this.initPromise = null;\n throw error;\n }\n })();\n await this.initPromise;\n }\n\n async embed(text: string): Promise<number[]> {\n this.assertNotDisposed();\n if (this.extractor === null) {\n await this.initialize();\n }\n if (this.extractor === null) {\n throw new Error('Failed to initialize embedding model');\n }\n const output = await this.extractor(text, {\n pooling: 'mean',\n normalize: true,\n });\n // Convert typed array to number[] (output.data is Float32Array-like)\n const result = Array.from(output.data, (v) => Number(v));\n // Cache dimensions from result length\n this._dimensions ??= result.length;\n return result;\n }\n\n async embedBatch(texts: string[]): Promise<number[][]> {\n this.assertNotDisposed();\n if (this.extractor === null) {\n await this.initialize();\n }\n if (this.extractor === null) {\n throw new Error('Failed to initialize embedding model');\n }\n\n const results: number[][] = [];\n\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n\n // True batching: single pipeline call for entire batch\n const output = await this.extractor(batch, {\n pooling: 'mean',\n normalize: true,\n });\n\n // Unpack batch results from tensor\n // Output dims are [batchSize, embeddingDim] with pooling\n const dim = output.dims[output.dims.length - 1] ?? 0;\n for (let b = 0; b < batch.length; b++) {\n const start = b * dim;\n const end = start + dim;\n results.push(Array.from(output.data.slice(start, end), (v) => Number(v)));\n }\n\n // Cache dimensions\n this._dimensions ??= dim;\n\n // Yield event loop between batches (not fixed 100ms delay)\n if (i + this.batchSize < texts.length) {\n await new Promise((resolve) => setImmediate(resolve));\n }\n }\n\n return results;\n }\n\n /**\n * Get cached embedding dimensions. Throws if embed() hasn't been called yet.\n * Use ensureDimensions() if you need to guarantee dimensions are available.\n */\n getDimensions(): number {\n if (this._dimensions === null) {\n throw new Error('Cannot get dimensions before first embed() call');\n }\n return this._dimensions;\n }\n\n /**\n * Ensure dimensions are available, initializing the model if needed.\n * Returns the embedding dimensions for the current model.\n */\n async ensureDimensions(): Promise<number> {\n if (this._dimensions === null) {\n // Use non-empty probe string (more reliable than empty string)\n await this.embed('dimension probe');\n }\n if (this._dimensions === null) {\n throw new Error('Failed to determine embedding dimensions');\n }\n return this._dimensions;\n }\n\n /**\n * Dispose the embedding pipeline to free resources.\n * Should be called before process exit to prevent ONNX runtime cleanup issues on macOS.\n * After disposal, this engine cannot be used again.\n */\n async dispose(): Promise<void> {\n if (this.extractor !== null) {\n await this.extractor.dispose();\n this.extractor = null;\n }\n this.initPromise = null;\n this._dimensions = null;\n this.disposed = true;\n }\n}\n","import * as lancedb from '@lancedb/lancedb';\nimport { createDocumentId } from '../types/brands.js';\nimport { DocumentMetadataSchema } from '../types/document.js';\nimport type { StoreId, DocumentId } from '../types/brands.js';\nimport type { Document, DocumentMetadata } from '../types/document.js';\nimport type { Table, Connection } from '@lancedb/lancedb';\n\ninterface LanceDocument {\n id: string;\n content: string;\n vector: number[];\n metadata: string; // JSON serialized\n [key: string]: unknown;\n}\n\ninterface SearchHit {\n id: string;\n content: string;\n metadata: string;\n _distance: number;\n}\n\nexport class LanceStore {\n private connection: Connection | null = null;\n private readonly tables: Map<string, Table> = new Map();\n private readonly dataDir: string;\n // eslint-disable-next-line @typescript-eslint/prefer-readonly -- set via setDimensions()\n private _dimensions: number | null = null;\n\n constructor(dataDir: string) {\n this.dataDir = dataDir;\n }\n\n /**\n * Set the embedding dimensions. Must be called before initialize().\n * This allows dimensions to be derived from the embedding model at runtime.\n * Idempotent: subsequent calls are ignored if dimensions are already set.\n */\n setDimensions(dimensions: number): void {\n this._dimensions ??= dimensions;\n }\n\n async initialize(storeId: StoreId): Promise<void> {\n if (this._dimensions === null) {\n throw new Error('Dimensions not set. Call setDimensions() before initialize().');\n }\n\n this.connection ??= await lancedb.connect(this.dataDir);\n\n const tableName = this.getTableName(storeId);\n const tableNames = await this.connection.tableNames();\n\n if (!tableNames.includes(tableName)) {\n // Create table with initial schema\n const table = await this.connection.createTable(tableName, [\n {\n id: '__init__',\n content: '',\n vector: new Array(this._dimensions).fill(0),\n metadata: '{}',\n },\n ]);\n // Delete the init row\n await table.delete('id = \"__init__\"');\n this.tables.set(tableName, table);\n } else {\n const table = await this.connection.openTable(tableName);\n this.tables.set(tableName, table);\n }\n }\n\n async addDocuments(storeId: StoreId, documents: Document[]): Promise<void> {\n const table = await this.getTable(storeId);\n const lanceDocuments: LanceDocument[] = documents.map((doc) => ({\n id: doc.id,\n content: doc.content,\n vector: [...doc.vector],\n metadata: JSON.stringify(doc.metadata),\n }));\n await table.add(lanceDocuments);\n }\n\n async deleteDocuments(storeId: StoreId, documentIds: DocumentId[]): Promise<void> {\n if (documentIds.length === 0) {\n return;\n }\n const table = await this.getTable(storeId);\n const idList = documentIds.map((id) => `\"${id}\"`).join(', ');\n await table.delete(`id IN (${idList})`);\n }\n\n async clearAllDocuments(storeId: StoreId): Promise<void> {\n const table = await this.getTable(storeId);\n await table.delete('id IS NOT NULL');\n }\n\n async search(\n storeId: StoreId,\n vector: number[],\n limit: number,\n // threshold is kept for API compatibility but filtering is done after normalization\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n _threshold?: number\n ): Promise<\n Array<{ id: DocumentId; content: string; score: number; metadata: DocumentMetadata }>\n > {\n const table = await this.getTable(storeId);\n const query = table.vectorSearch(vector).limit(limit).distanceType('cosine');\n\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const results = (await query.toArray()) as SearchHit[];\n\n // Return all results - threshold filtering is applied after score normalization\n // in search.service.ts to match displayed scores\n return results.map((r) => {\n const metadata = DocumentMetadataSchema.parse(JSON.parse(r.metadata));\n return {\n id: createDocumentId(r.id),\n content: r.content,\n score: 1 - r._distance,\n // Schema validates structure, cast to branded type\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n metadata: metadata as DocumentMetadata,\n };\n });\n }\n\n async createFtsIndex(storeId: StoreId): Promise<void> {\n const table = await this.getTable(storeId);\n await table.createIndex('content', {\n config: lancedb.Index.fts(),\n });\n }\n\n async fullTextSearch(\n storeId: StoreId,\n query: string,\n limit: number\n ): Promise<\n Array<{ id: DocumentId; content: string; score: number; metadata: DocumentMetadata }>\n > {\n const table = await this.getTable(storeId);\n\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n const results = (await table.search(query, 'fts').limit(limit).toArray()) as Array<{\n id: string;\n content: string;\n metadata: string;\n _score: number;\n }>;\n\n return results.map((r) => {\n const metadata = DocumentMetadataSchema.parse(JSON.parse(r.metadata));\n return {\n id: createDocumentId(r.id),\n content: r.content,\n score: r._score,\n // Schema validates structure, cast to branded type\n // eslint-disable-next-line @typescript-eslint/consistent-type-assertions\n metadata: metadata as DocumentMetadata,\n };\n });\n }\n\n async deleteStore(storeId: StoreId): Promise<void> {\n const tableName = this.getTableName(storeId);\n // Connect on-demand - no dimensions needed for listing/dropping tables\n this.connection ??= await lancedb.connect(this.dataDir);\n const tableNames = await this.connection.tableNames();\n if (tableNames.includes(tableName)) {\n await this.connection.dropTable(tableName);\n this.tables.delete(tableName);\n }\n }\n\n close(): void {\n this.tables.clear();\n if (this.connection !== null) {\n this.connection.close();\n this.connection = null;\n }\n }\n\n /**\n * Async close for API consistency. Calls sync close() internally.\n * Do NOT call process.exit() after this - let the event loop drain\n * naturally so native threads can complete cleanup.\n */\n closeAsync(): Promise<void> {\n this.close();\n return Promise.resolve();\n }\n\n private getTableName(storeId: StoreId): string {\n return `documents_${storeId}`;\n }\n\n private async getTable(storeId: StoreId): Promise<Table> {\n const tableName = this.getTableName(storeId);\n let table = this.tables.get(tableName);\n if (table === undefined) {\n await this.initialize(storeId);\n table = this.tables.get(tableName);\n }\n if (table === undefined) {\n throw new Error(`Table not found for store: ${storeId}`);\n }\n return table;\n }\n}\n","import { z } from 'zod';\nimport type { DocumentId, StoreId } from './brands.js';\n\n// ============================================================================\n// Zod Schemas\n// ============================================================================\n\nexport const DocumentTypeSchema = z.enum(['file', 'chunk', 'web']);\n\nexport const DocumentMetadataSchema = z\n .object({\n path: z.string().optional(),\n url: z.string().optional(),\n type: DocumentTypeSchema,\n storeId: z.string(),\n indexedAt: z.string(), // ISO 8601 string (what JSON serialization produces)\n fileHash: z.string().optional(),\n chunkIndex: z.number().optional(),\n totalChunks: z.number().optional(),\n })\n .loose(); // Allow additional fields per index signature\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport type DocumentType = z.infer<typeof DocumentTypeSchema>;\n\nexport interface DocumentMetadata {\n readonly path?: string | undefined;\n readonly url?: string | undefined;\n readonly type: DocumentType;\n readonly storeId: StoreId;\n readonly indexedAt: string; // ISO 8601 string\n readonly fileHash?: string | undefined;\n readonly chunkIndex?: number | undefined;\n readonly totalChunks?: number | undefined;\n readonly [key: string]: unknown;\n}\n\nexport interface Document {\n readonly id: DocumentId;\n readonly content: string;\n readonly vector: readonly number[];\n readonly metadata: DocumentMetadata;\n}\n\nexport interface DocumentChunk {\n readonly id: DocumentId;\n readonly content: string;\n readonly startLine?: number | undefined;\n readonly endLine?: number | undefined;\n readonly metadata: DocumentMetadata;\n}\n","import { CodeGraphService } from './code-graph.service.js';\nimport { ConfigService } from './config.service.js';\nimport { GitignoreService } from './gitignore.service.js';\nimport { IndexService } from './index.service.js';\nimport { ManifestService } from './manifest.service.js';\nimport { SearchService } from './search.service.js';\nimport { StoreDefinitionService } from './store-definition.service.js';\nimport { StoreService } from './store.service.js';\nimport { PythonBridge } from '../crawl/bridge.js';\nimport { EmbeddingEngine } from '../db/embeddings.js';\nimport { LanceStore } from '../db/lance.js';\nimport { createLogger, shutdownLogger } from '../logging/index.js';\nimport type { StoreServiceOptions } from './store.service.js';\nimport type { AppConfig } from '../types/config.js';\n\nconst logger = createLogger('services');\n\nexport { ConfigService } from './config.service.js';\nexport { StoreService } from './store.service.js';\nexport { SearchService } from './search.service.js';\nexport { IndexService } from './index.service.js';\nexport { JobService } from './job.service.js';\nexport { WatchService } from './watch.service.js';\nexport { ChunkingService } from './chunking.service.js';\nexport { CodeGraphService } from './code-graph.service.js';\n\nexport interface ServiceContainer {\n config: ConfigService;\n store: StoreService;\n search: SearchService;\n index: IndexService;\n lance: LanceStore;\n embeddings: EmbeddingEngine;\n codeGraph: CodeGraphService;\n pythonBridge: PythonBridge;\n manifest: ManifestService;\n}\n\n/**\n * Lazy service container that defers heavy initialization until first use.\n *\n * Initialization strategy:\n * - Eager (lightweight): config, store, lance (wrapper only), pythonBridge (started for fork safety)\n * - Lazy (heavy): embeddings (3-10s model load), search, index, codeGraph\n *\n * IMPORTANT: PythonBridge must be started BEFORE lancedb.connect() is called.\n * LanceDB's native Rust code is not fork-safe - spawning subprocesses after\n * lancedb is loaded corrupts the mutex state, causing crashes on shutdown.\n */\nexport class LazyServiceContainer implements ServiceContainer {\n // Eagerly initialized (lightweight)\n readonly config: ConfigService;\n readonly store: StoreService;\n readonly lance: LanceStore;\n readonly pythonBridge: PythonBridge;\n\n // Configuration for lazy initialization\n private readonly appConfig: AppConfig;\n private readonly dataDir: string;\n\n // Lazily initialized (heavy)\n // eslint-disable-next-line @typescript-eslint/prefer-readonly -- mutated in lazy getter\n private _manifest: ManifestService | null = null;\n private _embeddings: EmbeddingEngine | null = null;\n private _codeGraph: CodeGraphService | null = null;\n private _search: SearchService | null = null;\n private _index: IndexService | null = null;\n\n constructor(\n config: ConfigService,\n appConfig: AppConfig,\n dataDir: string,\n store: StoreService,\n lance: LanceStore,\n pythonBridge: PythonBridge\n ) {\n this.config = config;\n this.appConfig = appConfig;\n this.dataDir = dataDir;\n this.store = store;\n this.lance = lance;\n this.pythonBridge = pythonBridge;\n }\n\n /**\n * EmbeddingEngine is lazily created on first access.\n * Model loading (3-10s) is deferred until embed() is called.\n */\n get embeddings(): EmbeddingEngine {\n if (this._embeddings === null) {\n logger.debug('Lazy-initializing EmbeddingEngine');\n this._embeddings = new EmbeddingEngine(\n this.appConfig.embedding.model,\n this.appConfig.embedding.batchSize\n );\n }\n return this._embeddings;\n }\n\n /**\n * CodeGraphService is lazily created on first access.\n */\n get codeGraph(): CodeGraphService {\n if (this._codeGraph === null) {\n logger.debug('Lazy-initializing CodeGraphService');\n this._codeGraph = new CodeGraphService(this.dataDir, this.pythonBridge);\n }\n return this._codeGraph;\n }\n\n /**\n * SearchService is lazily created on first access.\n */\n get search(): SearchService {\n if (this._search === null) {\n logger.debug('Lazy-initializing SearchService');\n this._search = new SearchService(\n this.lance,\n this.embeddings,\n this.codeGraph,\n this.appConfig.search\n );\n }\n return this._search;\n }\n\n /**\n * IndexService is lazily created on first access.\n */\n get index(): IndexService {\n if (this._index === null) {\n logger.debug('Lazy-initializing IndexService');\n this._index = new IndexService(this.lance, this.embeddings, {\n codeGraphService: this.codeGraph,\n manifestService: this.manifest,\n chunkSize: this.appConfig.indexing.chunkSize,\n chunkOverlap: this.appConfig.indexing.chunkOverlap,\n concurrency: this.appConfig.indexing.concurrency,\n ignorePatterns: this.appConfig.indexing.ignorePatterns,\n });\n }\n return this._index;\n }\n\n /**\n * ManifestService is lazily created on first access.\n */\n get manifest(): ManifestService {\n if (this._manifest === null) {\n logger.debug('Lazy-initializing ManifestService');\n this._manifest = new ManifestService(this.dataDir);\n }\n return this._manifest;\n }\n\n /**\n * Check if embeddings have been initialized (for cleanup purposes).\n */\n get hasEmbeddings(): boolean {\n return this._embeddings !== null;\n }\n\n /**\n * Check if search service has been initialized (for cleanup purposes).\n */\n get hasSearch(): boolean {\n return this._search !== null;\n }\n}\n\n/**\n * Create lazy service container for MCP server.\n *\n * This defers heavy initialization (embeddings model loading) until first use,\n * reducing MCP server startup time from ~5s to <500ms.\n *\n * PythonBridge is started eagerly to avoid fork-safety issues with LanceDB.\n */\nexport async function createLazyServices(\n configPath?: string,\n dataDir?: string,\n projectRoot?: string\n): Promise<LazyServiceContainer> {\n logger.info({ configPath, dataDir, projectRoot }, 'Initializing lazy services');\n const startTime = Date.now();\n\n const config = new ConfigService(configPath, dataDir, projectRoot);\n const appConfig = await config.load();\n const resolvedDataDir = config.resolveDataDir();\n\n // IMPORTANT: Start PythonBridge BEFORE creating LanceStore.\n // LanceDB's native Rust code is not fork-safe. Spawning subprocesses after\n // lancedb is loaded corrupts the mutex state, causing crashes on shutdown.\n const pythonBridge = new PythonBridge();\n await pythonBridge.start();\n\n // Now safe to create LanceStore wrapper (doesn't connect until initialize() is called)\n const lance = new LanceStore(resolvedDataDir);\n\n // Create project-root-dependent services using resolved project root\n const resolvedProjectRoot = config.resolveProjectRoot();\n const definitionService = new StoreDefinitionService(resolvedProjectRoot);\n const gitignoreService = new GitignoreService(resolvedProjectRoot);\n const storeOptions: StoreServiceOptions = {\n definitionService,\n gitignoreService,\n projectRoot: resolvedProjectRoot,\n };\n\n const store = new StoreService(resolvedDataDir, storeOptions);\n await store.initialize();\n\n const durationMs = Date.now() - startTime;\n logger.info(\n { dataDir: resolvedDataDir, projectRoot: resolvedProjectRoot, durationMs },\n 'Lazy services initialized'\n );\n\n return new LazyServiceContainer(config, appConfig, resolvedDataDir, store, lance, pythonBridge);\n}\n\n/**\n * Create services with eager initialization (for CLI commands).\n *\n * This initializes all services including the embedding model upfront.\n * Use createLazyServices() for MCP server to reduce startup time.\n */\nexport async function createServices(\n configPath?: string,\n dataDir?: string,\n projectRoot?: string\n): Promise<ServiceContainer> {\n logger.info({ configPath, dataDir, projectRoot }, 'Initializing services');\n\n const config = new ConfigService(configPath, dataDir, projectRoot);\n const appConfig = await config.load();\n const resolvedDataDir = config.resolveDataDir();\n\n // IMPORTANT: Start PythonBridge BEFORE creating LanceStore.\n // LanceDB's native Rust code is not fork-safe. Spawning subprocesses after\n // lancedb is loaded corrupts the mutex state, causing crashes on shutdown.\n const pythonBridge = new PythonBridge();\n await pythonBridge.start();\n\n // Now safe to initialize lancedb and other services\n const lance = new LanceStore(resolvedDataDir);\n const embeddings = new EmbeddingEngine(appConfig.embedding.model, appConfig.embedding.batchSize);\n\n await embeddings.initialize();\n\n // Create project-root-dependent services using resolved project root\n const resolvedProjectRoot = config.resolveProjectRoot();\n const definitionService = new StoreDefinitionService(resolvedProjectRoot);\n const gitignoreService = new GitignoreService(resolvedProjectRoot);\n const storeOptions: StoreServiceOptions = {\n definitionService,\n gitignoreService,\n projectRoot: resolvedProjectRoot,\n };\n\n const store = new StoreService(resolvedDataDir, storeOptions);\n await store.initialize();\n\n const codeGraph = new CodeGraphService(resolvedDataDir, pythonBridge);\n const manifest = new ManifestService(resolvedDataDir);\n const search = new SearchService(lance, embeddings, codeGraph, appConfig.search);\n const index = new IndexService(lance, embeddings, {\n codeGraphService: codeGraph,\n manifestService: manifest,\n chunkSize: appConfig.indexing.chunkSize,\n chunkOverlap: appConfig.indexing.chunkOverlap,\n concurrency: appConfig.indexing.concurrency,\n ignorePatterns: appConfig.indexing.ignorePatterns,\n });\n\n logger.info(\n { dataDir: resolvedDataDir, projectRoot: resolvedProjectRoot },\n 'Services initialized successfully'\n );\n\n return {\n config,\n store,\n search,\n index,\n lance,\n embeddings,\n codeGraph,\n pythonBridge,\n manifest,\n };\n}\n\n/**\n * Cleanly shut down all services, stopping background processes.\n * Call this after CLI commands complete to allow the process to exit.\n * Attempts all cleanup operations and throws if any fail.\n *\n * For LazyServiceContainer, only disposes embeddings if they were initialized.\n */\nexport async function destroyServices(services: ServiceContainer): Promise<void> {\n logger.info('Shutting down services');\n const errors: Error[] = [];\n\n // IMPORTANT: Shutdown in reverse order of initialization (LIFO).\n // PythonBridge must stop BEFORE LanceStore closes to avoid mutex corruption.\n // LanceDB's native Rust code is not fork-safe and has threading issues\n // if subprocess signals are sent while lancedb is shutting down.\n\n // 0. Clean up SearchService event subscriptions (no async, just unsubscribe)\n // Skip for lazy containers where search was never accessed (avoids triggering initialization)\n const isLazyContainer = services instanceof LazyServiceContainer;\n const shouldCleanupSearch = !isLazyContainer || services.hasSearch;\n\n if (shouldCleanupSearch) {\n services.search.cleanup();\n } else {\n logger.debug('Skipping search cleanup (not initialized)');\n }\n\n // 1. Stop Python bridge first (reverse of init: started first, stopped first)\n try {\n await services.pythonBridge.stop();\n } catch (e) {\n const error = e instanceof Error ? e : new Error(String(e));\n logger.error({ error }, 'Error stopping Python bridge');\n errors.push(error);\n }\n\n // 2. Dispose embedding engine (only if initialized for lazy containers)\n const shouldDisposeEmbeddings = !isLazyContainer || services.hasEmbeddings;\n\n if (shouldDisposeEmbeddings) {\n try {\n await services.embeddings.dispose();\n } catch (e) {\n const error = e instanceof Error ? e : new Error(String(e));\n logger.error({ error }, 'Error disposing EmbeddingEngine');\n errors.push(error);\n }\n } else {\n logger.debug('Skipping embeddings disposal (not initialized)');\n }\n\n // 3. Close LanceStore last (reverse of init: created after PythonBridge started)\n try {\n await services.lance.closeAsync();\n } catch (e) {\n const error = e instanceof Error ? e : new Error(String(e));\n logger.error({ error }, 'Error closing LanceStore');\n errors.push(error);\n }\n\n await shutdownLogger();\n\n // Throw if any errors occurred during cleanup\n if (errors.length === 1 && errors[0] !== undefined) {\n throw new Error(`Service shutdown failed: ${errors[0].message}`, { cause: errors[0] });\n } else if (errors.length > 1) {\n throw new AggregateError(errors, 'Multiple errors during service shutdown');\n }\n}\n"],"mappings":";;;;;;;;;;;;AAsBO,IAAM,kBAAN,MAAM,iBAAgB;AAAA,EAC3B,OAAe;AAAA;AAAA,EAGE,uBAAuB,oBAAI,IAA6B;AAAA;AAAA,EAGxD,sBAAsB,oBAAI,IAA6B;AAAA,EAEhE,cAAc;AAAA,EAEtB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cAA+B;AACpC,qBAAgB,aAAa,IAAI,iBAAgB;AACjD,WAAO,iBAAgB;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,gBAAsB;AAC3B,qBAAgB,WAAW;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,SAAS,SAAgC;AAEvC,QAAI,KAAK,qBAAqB,IAAI,QAAQ,UAAU,GAAG;AACrD;AAAA,IACF;AAGA,eAAW,OAAO,QAAQ,YAAY;AACpC,YAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,YAAM,kBAAkB,KAAK,oBAAoB,IAAI,aAAa;AAClE,UAAI,oBAAoB,QAAW;AACjC,cAAM,IAAI;AAAA,UACR,cAAc,aAAa,uCAAuC,gBAAgB,UAAU;AAAA,QAC9F;AAAA,MACF;AAAA,IACF;AAGA,SAAK,qBAAqB,IAAI,QAAQ,YAAY,OAAO;AAGzD,eAAW,OAAO,QAAQ,YAAY;AACpC,YAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,WAAK,oBAAoB,IAAI,eAAe,OAAO;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,WAAW,YAA6B;AACtC,UAAM,UAAU,KAAK,qBAAqB,IAAI,UAAU;AACxD,QAAI,YAAY,QAAW;AACzB,aAAO;AAAA,IACT;AAGA,SAAK,qBAAqB,OAAO,UAAU;AAG3C,eAAW,OAAO,QAAQ,YAAY;AACpC,YAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,WAAK,oBAAoB,OAAO,aAAa;AAAA,IAC/C;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAe,KAA0C;AACvD,UAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,WAAO,KAAK,oBAAoB,IAAI,aAAa;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,gBAAgB,YAAiD;AAC/D,WAAO,KAAK,qBAAqB,IAAI,UAAU;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAoC;AAClC,WAAO,MAAM,KAAK,KAAK,qBAAqB,OAAO,CAAC;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,KAAsB;AACjC,UAAM,gBAAgB,KAAK,mBAAmB,GAAG;AACjD,WAAO,KAAK,oBAAoB,IAAI,aAAa;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,KAAqB;AAC9C,WAAO,IAAI,WAAW,GAAG,IAAI,MAAM,IAAI,GAAG;AAAA,EAC5C;AACF;;;AChJA,SAAS,WAAW,cAAAA,mBAAkB;AACtC,SAAS,QAAAC,aAAY;AACrB,OAAO,UAA+C;;;ACZtD,SAAS,YAAY,UAAU,oBAAoB;AACnD,SAAS,SAAS,MAAM,WAAW,WAAW;AAiBvC,IAAM,qBAAN,MAAyB;AAAA;AAAA;AAAA;AAAA,EAI9B,OAAO,QAAQ,SAAsC;AAEnD,QAAI,SAAS,gBAAgB,UAAa,QAAQ,gBAAgB,IAAI;AACpE,aAAO,KAAK,UAAU,QAAQ,WAAW;AAAA,IAC3C;AAGA,UAAM,iBAAiB,QAAQ,IAAI,cAAc;AACjD,QAAI,mBAAmB,UAAa,mBAAmB,IAAI;AACzD,aAAO,KAAK,UAAU,cAAc;AAAA,IACtC;AAGA,UAAM,UAAU,KAAK,YAAY,QAAQ,IAAI,CAAC;AAC9C,QAAI,YAAY,MAAM;AACpB,aAAO;AAAA,IACT;AAGA,UAAM,SAAS,QAAQ,IAAI,KAAK;AAChC,QAAI,WAAW,UAAa,WAAW,IAAI;AACzC,aAAO,KAAK,UAAU,MAAM;AAAA,IAC9B;AAGA,WAAO,QAAQ,IAAI;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YAAY,WAAkC;AACnD,QAAI,cAAc,UAAU,SAAS;AACrC,UAAM,OAAO,UAAU,GAAG;AAG1B,WAAO,gBAAgB,MAAM;AAC3B,YAAM,UAAU,KAAK,aAAa,MAAM;AAExC,UAAI,WAAW,OAAO,GAAG;AACvB,YAAI;AACF,gBAAM,QAAQ,SAAS,OAAO;AAE9B,cAAI,MAAM,YAAY,KAAK,MAAM,OAAO,GAAG;AACzC,mBAAO;AAAA,UACT;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAGA,YAAM,aAAa,QAAQ,WAAW;AACtC,UAAI,eAAe,aAAa;AAE9B;AAAA,MACF;AACA,oBAAc;AAAA,IAChB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,UAAUC,OAAsB;AACrC,QAAI;AAEF,YAAM,WAAW,aAAaA,KAAI;AAElC,aAAO,UAAU,QAAQ;AAAA,IAC3B,QAAQ;AAEN,aAAO,UAAUA,KAAI;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,SAASA,OAAuB;AACrC,QAAI;AACF,YAAM,QAAQ,SAASA,KAAI;AAC3B,aAAO,MAAM,YAAY;AAAA,IAC3B,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AD7FA,IAAM,eAAoC,CAAC,SAAS,SAAS,QAAQ,QAAQ,SAAS,OAAO;AAC7F,IAAM,mBAAwC,IAAI,IAAI,YAAY;AAGlE,SAAS,YAAoB;AAC3B,QAAM,cAAc,mBAAmB,QAAQ;AAC/C,SAAOC,MAAK,aAAa,WAAW,oBAAoB,MAAM;AAChE;AAGA,SAAS,eAAuB;AAC9B,QAAM,SAAS,UAAU;AACzB,MAAI,CAACC,YAAW,MAAM,GAAG;AACvB,cAAU,QAAQ,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AACA,SAAO;AACT;AAGA,SAAS,gBAAgB,OAAkC;AACzD,SAAO,iBAAiB,IAAI,KAAK;AACnC;AAGA,SAAS,cAAwB;AAC/B,QAAM,QAAQ,QAAQ,IAAI,WAAW,GAAG,YAAY;AAEpD,MAAI,UAAU,UAAa,UAAU,IAAI;AACvC,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,gBAAgB,KAAK,GAAG;AAC3B,UAAM,IAAI,MAAM,uBAAuB,KAAK,oBAAoB,aAAa,KAAK,IAAI,CAAC,EAAE;AAAA,EAC3F;AAEA,SAAO;AACT;AAGA,IAAI,aAA4B;AAGhC,SAAS,mBAA2B;AAClC,MAAI,eAAe,MAAM;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,aAAa;AAC5B,QAAM,UAAUD,MAAK,QAAQ,SAAS;AACtC,QAAM,QAAQ,YAAY;AAE1B,QAAM,UAAyB;AAAA,IAC7B;AAAA,IACA,WAAW,KAAK,iBAAiB;AAAA,IACjC,YAAY;AAAA,MACV,OAAO,CAAC,WAAW,EAAE,OAAO,MAAM;AAAA,IACpC;AAAA,IACA,WAAW;AAAA,MACT,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,MAAM;AAAA,QACN,MAAM;AAAA;AAAA,QACN,OAAO,EAAE,OAAO,EAAE;AAAA;AAAA,QAClB,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,eAAa,KAAK,OAAO;AACzB,SAAO;AACT;AAYO,SAAS,aAAa,QAAwB;AACnD,QAAM,OAAO,iBAAiB;AAC9B,SAAO,KAAK,MAAM,EAAE,OAAO,CAAC;AAC9B;AAYO,SAAS,eAAe,OAA0B;AACvD,QAAM,eAAe,YAAY;AACjC,QAAM,eAAe,aAAa,QAAQ,YAAY;AACtD,QAAM,aAAa,aAAa,QAAQ,KAAK;AAC7C,SAAO,cAAc;AACvB;AAKO,SAAS,kBAA0B;AACxC,SAAO,UAAU;AACnB;AAKO,SAAS,iBAAgC;AAC9C,SAAO,IAAI,QAAQ,CAACE,aAAY;AAC9B,QAAI,eAAe,MAAM;AACvB,iBAAW,MAAM;AAEjB,iBAAW,MAAM;AACf,qBAAa;AACb,QAAAA,SAAQ;AAAA,MACV,GAAG,GAAG;AAAA,IACR,OAAO;AACL,MAAAA,SAAQ;AAAA,IACV;AAAA,EACF,CAAC;AACH;;;AEzIA,SAAS,kBAAkB;AAC3B,SAAS,eAAe,aAAAC,YAAW,cAAAC,mBAAkB;AACrD,SAAS,QAAAC,aAAY;AAIrB,IAAM,qBAAqB;AAG3B,IAAM,yBAAyB;AAe/B,SAAS,gBAAwB;AAC/B,QAAM,MAAMC,MAAK,gBAAgB,GAAG,SAAS;AAC7C,MAAI,CAACC,YAAW,GAAG,GAAG;AACpB,IAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AAGA,SAAS,aAAa,YAA4B;AAChD,SAAO,WAAW,QAAQ,kBAAkB,GAAG,EAAE,UAAU,GAAG,EAAE;AAClE;AAuBO,SAAS,iBACd,SACA,MACA,YACA,WAAoB,eAAe,OAAO,GAC1B;AAChB,QAAM,YAAY,OAAO,WAAW,SAAS,MAAM;AACnD,QAAM,OAAO,WAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK,EAAE,UAAU,GAAG,EAAE;AAC5E,QAAM,UAAU,eAAe,SAAS,kBAAkB;AAE1D,QAAM,cAAc,EAAE,SAAS,WAAW,KAAK;AAG/C,MAAI,YAAY,YAAY,wBAAwB;AAClD,UAAM,aAAY,oBAAI,KAAK,GAAE,YAAY,EAAE,QAAQ,SAAS,GAAG;AAC/D,UAAM,SAAS,aAAa,UAAU;AACtC,UAAM,WAAW,GAAG,SAAS,IAAI,IAAI,IAAI,MAAM,IAAI,IAAI;AACvD,UAAM,WAAWF,MAAK,cAAc,GAAG,QAAQ;AAE/C;AAAA,MACE;AAAA,MACA,KAAK;AAAA,QACH;AAAA,UACE,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,GAAG,aAAa,aAAa,SAAS;AAAA,EACjD;AAEA,SAAO;AACT;AASO,SAAS,eAAe,SAAiB,YAAoB,oBAA4B;AAC9F,MAAI,QAAQ,UAAU,WAAW;AAC/B,WAAO;AAAA,EACT;AACA,SAAO,GAAG,QAAQ,UAAU,GAAG,SAAS,CAAC;AAC3C;;;ACtHA,SAAS,kBAAkB;AAC3B,OAAO,QAAQ;AACf,OAAO,UAAU;;;ACFjB,SAAS,SAAS;AAMX,IAAM,gBAAgB,EAAE,KAAK,CAAC,SAAS,SAAS,OAAO,CAAC;AACxD,IAAM,kBAAkB,EAAE,KAAK,CAAC,WAAW,WAAW,aAAa,UAAU,WAAW,CAAC;AAEzF,IAAM,mBAAmB,EAAE,OAAO;AAAA,EACvC,WAAW,EAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,KAAK,EAAE,OAAO,EAAE,SAAS;AAAA,EACzB,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,gBAAgB,EAAE,OAAO,EAAE,SAAS;AAAA,EACpC,YAAY,EAAE,OAAO,EAAE,SAAS;AAAA,EAChC,WAAW,EAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,aAAa,EAAE,OAAO,EAAE,SAAS;AAAA,EACjC,aAAa,EAAE,OAAO,EAAE,SAAS;AAAA,EACjC,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA,EACtC,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,UAAU,EAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,QAAQ,EAAE,QAAQ,EAAE,SAAS;AAAA,EAC7B,aAAa,EAAE,QAAQ,EAAE,SAAS;AAAA,EAClC,cAAc,EAAE,OAAO,EAAE,SAAS;AACpC,CAAC;AAEM,IAAM,YAAY,EAAE,OAAO;AAAA,EAChC,IAAI,EAAE,OAAO;AAAA,EACb,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,GAAG;AAAA,EACnC,SAAS,EAAE,OAAO;AAAA,EAClB,SAAS,iBAAiB,QAAQ,CAAC,CAAC;AAAA,EACpC,WAAW,EAAE,OAAO;AAAA,EACpB,WAAW,EAAE,OAAO;AACtB,CAAC;;;AClCM,SAAS,GAAM,MAA2B;AAC/C,SAAO,EAAE,SAAS,MAAM,KAAK;AAC/B;AAEO,SAAS,IAAO,OAA4B;AACjD,SAAO,EAAE,SAAS,OAAO,MAAM;AACjC;;;ACVA,SAAS,iBAAAG,gBAAe,YAAY,aAAAC,kBAAiB;AACrD,SAAS,WAAW,QAAQ,aAAa;AACzC,SAAS,WAAAC,gBAAe;AAYxB,eAAsB,gBAAgB,UAAkB,SAAgC;AAEtF,QAAM,MAAMA,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,QAAM,WAAW,GAAG,QAAQ,QAAQ,OAAO,KAAK,IAAI,CAAC,CAAC,IAAI,OAAO,QAAQ,GAAG,CAAC;AAC7E,QAAM,UAAU,UAAU,SAAS,OAAO;AAC1C,QAAM,OAAO,UAAU,QAAQ;AACjC;AAYO,SAAS,oBAAoB,UAAkB,SAAuB;AAE3E,EAAAD,WAAUC,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAEhD,QAAM,WAAW,GAAG,QAAQ,QAAQ,OAAO,KAAK,IAAI,CAAC,CAAC,IAAI,OAAO,QAAQ,GAAG,CAAC;AAC7E,EAAAF,eAAc,UAAU,SAAS,OAAO;AACxC,aAAW,UAAU,QAAQ;AAC/B;;;AHhCO,IAAM,aAAN,MAAiB;AAAA,EACL;AAAA,EAEjB,YAAY,SAAkB;AAE5B,QAAI;AACJ,QAAI,YAAY,QAAW;AACzB,gBAAU;AAAA,IACZ,OAAO;AACL,YAAM,UAAU,QAAQ,IAAI,MAAM,KAAK,QAAQ,IAAI,aAAa;AAChE,UAAI,YAAY,QAAW;AACzB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AACA,gBAAU,KAAK,KAAK,SAAS,+BAA+B;AAAA,IAC9D;AACA,SAAK,UAAU,KAAK,KAAK,SAAS,MAAM;AAGxC,QAAI,CAAC,GAAG,WAAW,KAAK,OAAO,GAAG;AAChC,SAAG,UAAU,KAAK,SAAS,EAAE,WAAW,KAAK,CAAC;AAAA,IAChD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,QAA8B;AACtC,UAAM,MAAW;AAAA,MACf,IAAI,OAAO,WAAW,EAAE,QAAQ,MAAM,EAAE,EAAE,UAAU,GAAG,EAAE,CAAC;AAAA,MAC1D,MAAM,OAAO;AAAA,MACb,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,SAAS,OAAO,WAAW,GAAG,OAAO,IAAI;AAAA,MACzC,SAAS,OAAO;AAAA,MAChB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAGA,SAAK,SAAS,GAAG;AAEjB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAAe,SAAgC;AACvD,UAAM,MAAM,KAAK,OAAO,KAAK;AAE7B,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,OAAO,KAAK,YAAY;AAAA,IAC1C;AAGA,QAAI,QAAQ,WAAW,QAAW;AAChC,UAAI,SAAS,QAAQ;AAAA,IACvB;AACA,QAAI,QAAQ,aAAa,QAAW;AAClC,UAAI,WAAW,QAAQ;AAAA,IACzB;AACA,QAAI,QAAQ,YAAY,QAAW;AACjC,UAAI,UAAU,QAAQ;AAAA,IACxB;AACA,QAAI,QAAQ,YAAY,QAAW;AACjC,UAAI,UAAU,EAAE,GAAG,IAAI,SAAS,GAAG,QAAQ,QAAQ;AAAA,IACrD;AAEA,QAAI,aAAY,oBAAI,KAAK,GAAE,YAAY;AAGvC,SAAK,SAAS,GAAG;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAA2B;AAChC,UAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,KAAK,OAAO;AAEvD,QAAI,CAAC,GAAG,WAAW,OAAO,GAAG;AAC3B,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,UAAU,GAAG,aAAa,SAAS,OAAO;AAChD,aAAO,UAAU,MAAM,KAAK,MAAM,OAAO,CAAC;AAAA,IAC5C,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,sBAAsB,KAAK,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACxF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,cAA+C;AACtD,QAAI,CAAC,GAAG,WAAW,KAAK,OAAO,GAAG;AAChC,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,QAAQ,GAAG,YAAY,KAAK,OAAO;AACzC,UAAM,OAAc,CAAC;AAErB,eAAW,QAAQ,OAAO;AACxB,UAAI,CAAC,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,MAAM,GAAG;AACpD;AAAA,MACF;AAEA,UAAI;AACF,cAAM,UAAU,GAAG,aAAa,KAAK,KAAK,KAAK,SAAS,IAAI,GAAG,OAAO;AACtE,cAAM,MAAM,UAAU,MAAM,KAAK,MAAM,OAAO,CAAC;AAE/C,YAAI,iBAAiB,QAAW;AAC9B,gBAAM,UAAU,MAAM,QAAQ,YAAY,IAAI,eAAe,CAAC,YAAY;AAC1E,cAAI,QAAQ,SAAS,IAAI,MAAM,GAAG;AAChC,iBAAK,KAAK,GAAG;AAAA,UACf;AAAA,QACF,OAAO;AACL,eAAK,KAAK,GAAG;AAAA,QACf;AAAA,MACF,SAAS,OAAO;AACd,cAAM,IAAI;AAAA,UACR,2BAA2B,IAAI,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAC5F;AAAA,MACF;AAAA,IACF;AAGA,SAAK,KAAK,CAAC,GAAG,MAAM,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC;AAErF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAwB;AACtB,WAAO,KAAK,SAAS,CAAC,WAAW,SAAS,CAAC;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAA6B;AACrC,UAAM,MAAM,KAAK,OAAO,KAAK;AAE7B,QAAI,CAAC,KAAK;AACR,aAAO,IAAI,IAAI,MAAM,OAAO,KAAK,YAAY,CAAC;AAAA,IAChD;AAEA,QAAI,IAAI,WAAW,eAAe,IAAI,WAAW,UAAU;AACzD,aAAO,IAAI,IAAI,MAAM,iBAAiB,IAAI,MAAM,MAAM,CAAC;AAAA,IACzD;AAEA,QAAI,IAAI,WAAW,aAAa;AAC9B,aAAO,GAAG,MAAS;AAAA,IACrB;AAGA,SAAK,UAAU,OAAO;AAAA,MACpB,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,SAAS,EAAE,cAAa,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,IACnD,CAAC;AAGD,UAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,KAAK,MAAM;AACtD,QAAI,GAAG,WAAW,OAAO,GAAG;AAC1B,UAAI;AACF,cAAM,MAAM,SAAS,GAAG,aAAa,SAAS,OAAO,GAAG,EAAE;AAI1D,YAAI,CAAC,OAAO,MAAM,GAAG,KAAK,OAAO,UAAU,GAAG,KAAK,MAAM,GAAG;AAC1D,kBAAQ,KAAK,KAAK,SAAS;AAAA,QAC7B;AAAA,MACF,QAAQ;AAAA,MAER;AAEA,UAAI;AACF,WAAG,WAAW,OAAO;AAAA,MACvB,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,WAAO,GAAG,MAAS;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,iBAAyB,IAAY;AAClD,UAAM,OAAO,KAAK,SAAS;AAC3B,UAAM,aAAa,KAAK,IAAI,IAAI,iBAAiB,KAAK,KAAK;AAC3D,QAAI,UAAU;AAEd,eAAW,OAAO,MAAM;AACtB,WACG,IAAI,WAAW,eAAe,IAAI,WAAW,YAAY,IAAI,WAAW,gBACzE,IAAI,KAAK,IAAI,SAAS,EAAE,QAAQ,IAAI,YACpC;AACA,cAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,IAAI,EAAE,OAAO;AACxD,YAAI;AACF,aAAG,WAAW,OAAO;AACrB;AAAA,QACF,SAAS,OAAO;AACd,gBAAM,IAAI;AAAA,YACR,6BAA6B,IAAI,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UAChG;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,wBACE,iBAAyB,GACzB,UAAsC,CAAC,GAC/B;AACR,UAAM,OAAO,KAAK,SAAS;AAC3B,UAAM,aAAa,KAAK,IAAI,IAAI,iBAAiB,KAAK,KAAK;AAC3D,QAAI,UAAU;AAEd,eAAW,OAAO,MAAM;AACtB,UAAI,IAAI,WAAW,aAAa,IAAI,KAAK,IAAI,SAAS,EAAE,QAAQ,IAAI,YAAY;AAC9E,cAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,IAAI,EAAE,OAAO;AAExD,YAAI,QAAQ,iBAAiB,MAAM;AAEjC,eAAK,UAAU,IAAI,IAAI;AAAA,YACrB,QAAQ;AAAA,YACR,SAAS,0CAA0C,OAAO,cAAc,CAAC;AAAA,UAC3E,CAAC;AAAA,QACH,OAAO;AAEL,cAAI;AACF,eAAG,WAAW,OAAO;AAAA,UACvB,SAAS,OAAO;AACd,kBAAM,IAAI;AAAA,cACR,8BAA8B,IAAI,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,YACjG;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAAwB;AAChC,UAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,KAAK,OAAO;AAEvD,QAAI,CAAC,GAAG,WAAW,OAAO,GAAG;AAC3B,aAAO;AAAA,IACT;AAEA,QAAI;AACF,SAAG,WAAW,OAAO;AACrB,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,wBAAwB,KAAK,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC1F;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,KAAgB;AAC/B,UAAM,UAAU,KAAK,KAAK,KAAK,SAAS,GAAG,IAAI,EAAE,OAAO;AACxD,wBAAoB,SAAS,KAAK,UAAU,KAAK,MAAM,CAAC,CAAC;AAAA,EAC3D;AACF;;;AI1SA,SAAS,UAAU,aAAAG,YAAW,SAAAC,QAAO,UAAU;AAC/C,SAAS,QAAAC,OAAM,WAAAC,gBAAe;;;ACD9B,SAAS,aAAgC;AACzC,OAAO,oBAAoB;AAC3B,YAAY,OAAO;AAKnB,SAAS,YAAY,KAAgC;AACnD,MAAI,OAAO,QAAQ,YAAY;AAE7B,WAAO;AAAA,EACT;AACA,MAAI,QAAQ,QAAQ,OAAO,QAAQ,YAAY,aAAa,KAAK;AAE/D,UAAM,cAAc;AACpB,QAAI,OAAO,YAAY,YAAY,YAAY;AAE7C,aAAO,YAAY;AAAA,IACrB;AAAA,EACF;AACA,QAAM,IAAI,MAAM,gCAAgC;AAClD;AACA,IAAM,WAAW,YAAY,cAAc;AAyBpC,IAAM,YAAN,MAAgB;AAAA,EACrB,MAAM,MAAc,UAAmD;AACrE,QAAI;AACF,YAAM,UAA0B,CAAC,KAAK;AACtC,UAAI,aAAa,cAAc;AAC7B,gBAAQ,KAAK,YAAY;AAAA,MAC3B;AAEA,YAAM,MAAM,MAAM,MAAM;AAAA,QACtB,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAED,YAAM,QAAoB,CAAC;AAE3B,eAAS,KAAK;AAAA,QACZ,qBAAqB,CAACC,UAA0C;AAC9D,gBAAM,OAAOA,MAAK;AAClB,cAAI,CAAC,KAAK,GAAI;AAEd,gBAAM,WACJA,MAAK,OAAO,SAAS,4BACrBA,MAAK,OAAO,SAAS;AAEvB,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM,KAAK,GAAG;AAAA,YACd;AAAA,YACA,OAAO,KAAK;AAAA,YACZ,WAAW,KAAK,KAAK,MAAM,QAAQ;AAAA,YACnC,SAAS,KAAK,KAAK,IAAI,QAAQ;AAAA,YAC/B,WAAW,KAAK,yBAAyB,IAAI;AAAA,UAC/C,CAAC;AAAA,QACH;AAAA,QAEA,kBAAkB,CAACA,UAAuC;AACxD,gBAAM,OAAOA,MAAK;AAClB,cAAI,CAAC,KAAK,GAAI;AAEd,gBAAM,WACJA,MAAK,OAAO,SAAS,4BACrBA,MAAK,OAAO,SAAS;AAEvB,gBAAM,UAA+B,CAAC;AAEtC,qBAAW,UAAU,KAAK,KAAK,MAAM;AACnC,gBAAM,gBAAc,MAAM,KAAO,eAAa,OAAO,GAAG,GAAG;AACzD,sBAAQ,KAAK;AAAA,gBACX,MAAM,OAAO,IAAI;AAAA,gBACjB,OAAO,OAAO;AAAA,gBACd,WAAW,KAAK,uBAAuB,MAAM;AAAA,gBAC7C,WAAW,OAAO,KAAK,MAAM,QAAQ;AAAA,gBACrC,SAAS,OAAO,KAAK,IAAI,QAAQ;AAAA,cACnC,CAAC;AAAA,YACH;AAAA,UACF;AAEA,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM,KAAK,GAAG;AAAA,YACd;AAAA,YACA,WAAW,KAAK,KAAK,MAAM,QAAQ;AAAA,YACnC,SAAS,KAAK,KAAK,IAAI,QAAQ;AAAA,YAC/B;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QAEA,wBAAwB,CAACA,UAA6C;AACpE,gBAAM,OAAOA,MAAK;AAElB,gBAAM,WAAWA,MAAK,OAAO,SAAS;AAEtC,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM,KAAK,GAAG;AAAA,YACd;AAAA,YACA,WAAW,KAAK,KAAK,MAAM,QAAQ;AAAA,YACnC,SAAS,KAAK,KAAK,IAAI,QAAQ;AAAA,UACjC,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA,EAEA,eAAe,MAA4B;AACzC,QAAI;AACF,YAAM,MAAM,MAAM,MAAM;AAAA,QACtB,YAAY;AAAA,QACZ,SAAS,CAAC,cAAc,KAAK;AAAA,MAC/B,CAAC;AAED,YAAM,UAAwB,CAAC;AAE/B,eAAS,KAAK;AAAA,QACZ,mBAAmB,CAACA,UAAwC;AAC1D,gBAAM,OAAOA,MAAK;AAClB,gBAAM,aAAuB,CAAC;AAE9B,qBAAW,QAAQ,KAAK,YAAY;AAClC,gBAAM,2BAAyB,IAAI,GAAG;AACpC,yBAAW,KAAK,KAAK,MAAM,IAAI;AAAA,YACjC,WAAa,oBAAkB,IAAI,GAAG;AACpC,yBAAW,KAAK,KAAK,MAAM,IAAI;AAAA,YACjC,WAAa,6BAA2B,IAAI,GAAG;AAC7C,yBAAW,KAAK,KAAK,MAAM,IAAI;AAAA,YACjC;AAAA,UACF;AAEA,kBAAQ,KAAK;AAAA,YACX,QAAQ,KAAK,OAAO;AAAA,YACpB;AAAA,YACA,QAAQ,KAAK,eAAe;AAAA,UAC9B,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA,EAEQ,yBAAyB,MAAqC;AACpE,UAAM,SAAS,KAAK,OACjB,IAAI,CAAC,MAAM;AACV,UAAM,eAAa,CAAC,EAAG,QAAO,EAAE;AAChC,aAAO;AAAA,IACT,CAAC,EACA,KAAK,IAAI;AAEZ,WAAO,GAAG,KAAK,IAAI,QAAQ,WAAW,IAAI,MAAM;AAAA,EAClD;AAAA,EAEQ,uBAAuB,MAA6B;AAC1D,UAAM,SAAS,KAAK,OACjB,IAAI,CAAC,MAAM;AACV,UAAM,eAAa,CAAC,EAAG,QAAO,EAAE;AAChC,aAAO;AAAA,IACT,CAAC,EACA,KAAK,IAAI;AAEZ,UAAM,OAAS,eAAa,KAAK,GAAG,IAAI,KAAK,IAAI,OAAO;AACxD,WAAO,GAAG,IAAI,IAAI,MAAM;AAAA,EAC1B;AACF;;;ACjLO,IAAM,YAAN,MAAgB;AAAA,EACJ,QAAgC,oBAAI,IAAuB;AAAA,EAC3D,QAAkC,oBAAI,IAAyB;AAAA,EAEhF,SAAS,OAAmB,MAAoB;AAC9C,eAAW,QAAQ,OAAO;AACxB,YAAM,KAAK,GAAG,IAAI,IAAI,KAAK,IAAI;AAE/B,YAAM,YAAuB;AAAA,QAC3B;AAAA,QACA;AAAA,QACA,MAAM,KAAK;AAAA,QACX,MAAM,KAAK;AAAA,QACX,UAAU,KAAK;AAAA,QACf,WAAW,KAAK;AAAA,QAChB,SAAS,KAAK;AAAA,MAChB;AAEA,UAAI,KAAK,cAAc,QAAW;AAChC,kBAAU,YAAY,KAAK;AAAA,MAC7B;AAEA,WAAK,MAAM,IAAI,IAAI,SAAS;AAG5B,UAAI,CAAC,KAAK,MAAM,IAAI,EAAE,GAAG;AACvB,aAAK,MAAM,IAAI,IAAI,CAAC,CAAC;AAAA,MACvB;AAGA,UAAI,KAAK,SAAS,WAAW,KAAK,YAAY,QAAW;AACvD,mBAAW,UAAU,KAAK,SAAS;AACjC,gBAAM,WAAW,GAAG,IAAI,IAAI,KAAK,IAAI,IAAI,OAAO,IAAI;AAEpD,gBAAM,aAAwB;AAAA,YAC5B,IAAI;AAAA,YACJ;AAAA,YACA,MAAM;AAAA,YACN,MAAM,OAAO;AAAA,YACb,UAAU,KAAK;AAAA;AAAA,YACf,WAAW,OAAO;AAAA,YAClB,SAAS,OAAO;AAAA,YAChB,WAAW,OAAO;AAAA,UACpB;AAEA,eAAK,MAAM,IAAI,UAAU,UAAU;AAGnC,cAAI,CAAC,KAAK,MAAM,IAAI,QAAQ,GAAG;AAC7B,iBAAK,MAAM,IAAI,UAAU,CAAC,CAAC;AAAA,UAC7B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,UAAU,UAAkB,QAAgB,YAA4B;AAEtE,UAAM,aAAa,KAAK,kBAAkB,UAAU,MAAM;AAE1D,eAAW,QAAQ,YAAY;AAC7B,YAAM,OAAkB;AAAA,QACtB,MAAM;AAAA,QACN,IAAI,GAAG,UAAU,IAAI,IAAI;AAAA,QACzB,MAAM;AAAA,QACN,YAAY;AAAA,MACd;AAEA,YAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ,KAAK,CAAC;AAC3C,YAAM,KAAK,IAAI;AACf,WAAK,MAAM,IAAI,UAAU,KAAK;AAAA,IAChC;AAAA,EACF;AAAA,EAEA,yBAAyB,MAAc,MAAc,cAA4B;AAC/E,UAAM,SAAS,GAAG,IAAI,IAAI,YAAY;AAGtC,UAAM,cAAc;AACpB,UAAM,QAAQ,oBAAI,IAAY;AAE9B,QAAI;AACJ,YAAQ,QAAQ,YAAY,KAAK,IAAI,OAAO,MAAM;AAChD,UAAI,MAAM,CAAC,MAAM,UAAa,MAAM,CAAC,MAAM,IAAI;AAC7C,cAAM,IAAI,MAAM,CAAC,CAAC;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,QAAQ,KAAK,MAAM,IAAI,MAAM,KAAK,CAAC;AAEzC,eAAW,kBAAkB,OAAO;AAElC,YAAM,aAAa,KAAK,eAAe,cAAc;AAErD,UAAI,YAAY;AACd,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,WAAW;AAAA,UACf,MAAM;AAAA,UACN,YAAY;AAAA;AAAA,QACd,CAAC;AAAA,MACH,OAAO;AAEL,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,WAAW,cAAc;AAAA,UAC7B,MAAM;AAAA,UACN,YAAY;AAAA,QACd,CAAC;AAAA,MACH;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,QAAQ,KAAK;AAAA,EAC9B;AAAA,EAEA,QAAQ,IAAmC;AACzC,WAAO,KAAK,MAAM,IAAI,EAAE;AAAA,EAC1B;AAAA,EAEA,SAAS,QAA6B;AACpC,WAAO,KAAK,MAAM,IAAI,MAAM,KAAK,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,MAAuB;AAC7B,UAAM,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI,KAAK,CAAC;AAC5C,UAAM,KAAK,IAAI;AACf,SAAK,MAAM,IAAI,KAAK,MAAM,KAAK;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,MAAuB;AAClC,SAAK,MAAM,IAAI,KAAK,IAAI,IAAI;AAG5B,QAAI,CAAC,KAAK,MAAM,IAAI,KAAK,EAAE,GAAG;AAC5B,WAAK,MAAM,IAAI,KAAK,IAAI,CAAC,CAAC;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,QAA6B;AAC5C,UAAM,WAAwB,CAAC;AAC/B,eAAW,SAAS,KAAK,MAAM,OAAO,GAAG;AACvC,iBAAW,QAAQ,OAAO;AACxB,YAAI,KAAK,OAAO,QAAQ;AACtB,mBAAS,KAAK,IAAI;AAAA,QACpB;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,QAAwB;AACvC,WAAO,KAAK,iBAAiB,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO,EAAE;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,QAAwB;AACpC,WAAO,KAAK,SAAS,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO,EAAE;AAAA,EACjE;AAAA,EAEA,cAA2B;AACzB,WAAO,MAAM,KAAK,KAAK,MAAM,OAAO,CAAC;AAAA,EACvC;AAAA,EAEQ,eAAe,MAAqC;AAC1D,eAAW,QAAQ,KAAK,MAAM,OAAO,GAAG;AACtC,UAAI,KAAK,SAAS,MAAM;AACtB,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kBAAkB,UAAkB,YAA4B;AAEtE,QAAI,WAAW,WAAW,GAAG,GAAG;AAE9B,YAAM,UAAU,SAAS,MAAM,GAAG,EAAE,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG;AACzD,YAAM,QAAQ,WAAW,MAAM,GAAG;AAElC,UAAI,WAAW;AACf,iBAAW,QAAQ,OAAO;AACxB,YAAI,SAAS,MAAM;AACjB,qBAAW,SAAS,MAAM,GAAG,EAAE,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG;AAAA,QACtD,WAAW,SAAS,KAAK;AACvB,sBAAY,IAAI,IAAI;AAAA,QACtB;AAAA,MACF;AAEA,aAAO,SAAS,QAAQ,SAAS,EAAE;AAAA,IACrC;AAGA,WAAO;AAAA,EACT;AAAA,EAEA,SAGE;AACA,UAAM,WAAwB,CAAC;AAC/B,eAAW,SAAS,KAAK,MAAM,OAAO,GAAG;AACvC,eAAS,KAAK,GAAG,KAAK;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAO,MAAM,KAAK,KAAK,MAAM,OAAO,CAAC;AAAA,MACrC,OAAO,SAAS,IAAI,CAAC,OAAO;AAAA,QAC1B,MAAM,EAAE;AAAA,QACR,IAAI,EAAE;AAAA,QACN,MAAM,EAAE;AAAA,QACR,YAAY,EAAE;AAAA,MAChB,EAAE;AAAA,IACJ;AAAA,EACF;AACF;;;AC5OA,IAAI,mBAAyC;AAC7C,IAAI,aAAqC;AACzC,IAAI,eAAuC;AAC3C,IAAI,eAAe;AACnB,IAAI,aAAa;AAkBV,SAAS,wBAAiC;AAC/C,MAAI,CAAC,cAAc;AACjB,QAAI;AAGF,yBAAmB,UAAQ,aAAa;AAExC,mBAAa,UAAQ,gBAAgB;AAErC,qBAAe,UAAQ,kBAAkB;AACzC,mBAAa;AAAA,IACf,QAAQ;AAEN,mBAAa;AAAA,IACf;AACA,mBAAe;AAAA,EACjB;AACA,SAAO;AACT;AAyCO,SAAS,mBAAkC;AAChD,MAAI,CAAC,sBAAsB,KAAK,qBAAqB,QAAQ,iBAAiB,MAAM;AAClF,WAAO;AAAA,EACT;AAEA,QAAM,SAAiB,IAAI,iBAAiB;AAC5C,SAAO,YAAY,YAAY;AAC/B,SAAO;AACT;AAMO,SAAS,cAAc,MAAqC;AACjE,MAAI;AACF,UAAM,SAAS,iBAAiB;AAChC,QAAI,WAAW,MAAM;AACnB,aAAO;AAAA,IACT;AAEA,WAAO,OAAO,MAAM,IAAI;AAAA,EAC1B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAMO,SAAS,iBAAgC;AAC9C,MAAI,CAAC,sBAAsB,KAAK,qBAAqB,QAAQ,eAAe,MAAM;AAChF,WAAO;AAAA,EACT;AAEA,QAAM,SAAiB,IAAI,iBAAiB;AAC5C,SAAO,YAAY,UAAU;AAC7B,SAAO;AACT;AAMO,SAAS,YAAY,MAAqC;AAC/D,MAAI;AACF,UAAM,SAAS,eAAe;AAC9B,QAAI,WAAW,MAAM;AACnB,aAAO;AAAA,IACT;AAEA,WAAO,OAAO,MAAM,IAAI;AAAA,EAC1B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,qBAAqB,UAAsC;AACzE,SAAO,SAAS,MAAM;AACxB;AAmBO,SAAS,oBAAoB,MAAsB,MAAqC;AAC7F,SAAO,KAAK,SAAS,KAAK,CAAC,UAAU,MAAM,SAAS,IAAI,KAAK;AAC/D;AAKO,SAAS,oBACd,MACA,WACuB;AACvB,SAAO,KAAK,kBAAkB,SAAS;AACzC;AAKO,SAAS,sBAAsB,MAA+B;AACnE,SAAO,KAAK,SAAS,KAAK,CAAC,UAAU,MAAM,SAAS,qBAAqB;AAC3E;AAaO,SAAS,gBAAgB,MAA+B;AAE7D,SAAO,KAAK,SAAS,KAAK,CAAC,UAAU,MAAM,SAAS,WAAW,MAAM,SAAS,OAAO;AACvF;AAYO,SAAS,qBAAqB,MAA8B;AAEjE,QAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,QAAM,iBAAiB,oBAAoB,MAAM,YAAY;AAC7D,QAAM,iBAAiB,oBAAoB,MAAM,aAAa;AAC9D,QAAM,qBAAqB,oBAAoB,MAAM,iBAAiB;AAEtE,MAAI,aAAa,MAAM;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,YAAY,SAAS;AAGzB,MAAI,uBAAuB,MAAM;AAC/B,iBAAa,mBAAmB;AAAA,EAClC;AAGA,MAAI,mBAAmB,MAAM;AAC3B,iBAAa,eAAe;AAAA,EAC9B;AAGA,MAAI,mBAAmB,MAAM;AAC3B,iBAAa,IAAI,eAAe,IAAI;AAAA,EACtC;AAEA,SAAO;AACT;AAQO,SAAS,iBACd,MACA,UACkB;AAClB,QAAM,QAAQ,MAAM,QAAQ,QAAQ,IAAI,WAAW,CAAC,QAAQ;AAC5D,SAAO,KAAK,SAAS,kBAAkB,KAAK;AAC9C;AAKO,SAAS,kBAAkB,SAAiC;AAEjE,QAAM,eAAe,oBAAoB,SAAS,UAAU;AAC5D,MAAI,iBAAiB,MAAM;AACzB,WAAO;AAAA,EACT;AACA,SAAO,aAAa;AACtB;;;ACvQO,IAAM,cAAN,MAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOvB,MAAM,MAAc,WAA+B;AACjD,QAAI;AACF,YAAM,OAAO,YAAY,IAAI;AAC7B,UAAI,SAAS,MAAM;AAEjB,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,QAAoB,CAAC;AAG3B,YAAM,YAAY,KAAK,eAAe,IAAI;AAC1C,YAAM,KAAK,GAAG,SAAS;AAGvB,YAAM,UAAU,KAAK,aAAa,IAAI;AACtC,YAAM,KAAK,GAAG,OAAO;AAGrB,YAAM,aAAa,KAAK,gBAAgB,IAAI;AAC5C,YAAM,KAAK,GAAG,UAAU;AAGxB,YAAM,QAAQ,KAAK,iBAAiB,IAAI;AACxC,YAAM,KAAK,GAAG,KAAK;AAGnB,YAAM,YAAY,KAAK,eAAe,IAAI;AAC1C,YAAM,KAAK,GAAG,SAAS;AAGvB,WAAK,aAAa,MAAM,KAAK;AAE7B,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe,MAA4B;AACzC,QAAI;AACF,YAAM,OAAO,YAAY,IAAI;AAC7B,UAAI,SAAS,MAAM;AACjB,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,UAAwB,CAAC;AAC/B,YAAM,cAAc,iBAAiB,MAAM,oBAAoB;AAE/D,iBAAW,cAAc,aAAa;AACpC,cAAM,cAAc,WAAW,kBAAkB,aAAa;AAE9D,mBAAW,QAAQ,aAAa;AAC9B,gBAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,cAAI,aAAa,MAAM;AACrB;AAAA,UACF;AAGA,gBAAM,gBAAgB,SAAS,kBAAkB,oCAAoC,EAAE,CAAC;AACxF,gBAAMC,QACJ,kBAAkB,SAAY,cAAc,OAAO,SAAS,KAAK,QAAQ,MAAM,EAAE;AAEnF,cAAIA,UAAS,IAAI;AACf,oBAAQ,KAAK;AAAA,cACX,QAAQA;AAAA,cACR,YAAY,CAAC;AAAA,cACb,QAAQ;AAAA,YACV,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAkC;AACvD,UAAM,gBAAgB,iBAAiB,MAAM,sBAAsB;AACnE,UAAM,QAAoB,CAAC;AAE3B,eAAW,UAAU,eAAe;AAClC,YAAM,WAAW,oBAAoB,QAAQ,MAAM;AACnD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,YAAY,qBAAqB,OAAO,aAAa;AAC3D,YAAM,UAAU,qBAAqB,OAAO,WAAW;AACvD,YAAM,YAAY,qBAAqB,MAAM;AAE7C,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAkC;AACrD,UAAM,YAAY,iBAAiB,MAAM,kBAAkB;AAC3D,UAAM,QAAoB,CAAC;AAE3B,eAAW,YAAY,WAAW;AAEhC,YAAM,WAAW,oBAAoB,UAAU,WAAW;AAC1D,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,YAAM,WAAW,oBAAoB,UAAU,MAAM;AAErD,UAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C;AAAA,MACF;AAGA,UAAI,SAAS,SAAS,eAAe;AACnC;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,YAAY,qBAAqB,SAAS,aAAa;AAC7D,YAAM,UAAU,qBAAqB,SAAS,WAAW;AAEzD,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,WAAW;AAAA,QACX,SAAS,CAAC;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,MAAkC;AACxD,UAAM,YAAY,iBAAiB,MAAM,kBAAkB;AAC3D,UAAM,QAAoB,CAAC;AAE3B,eAAW,YAAY,WAAW;AAChC,YAAM,WAAW,oBAAoB,UAAU,WAAW;AAC1D,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,YAAM,WAAW,oBAAoB,UAAU,MAAM;AAErD,UAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C;AAAA,MACF;AAGA,UAAI,SAAS,SAAS,kBAAkB;AACtC;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,YAAY,qBAAqB,SAAS,aAAa;AAC7D,YAAM,UAAU,qBAAqB,SAAS,WAAW;AAGzD,YAAM,UAAU,KAAK,wBAAwB,QAAQ;AAErD,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,WAAW;AAAA,QACX;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,MAAkC;AACzD,UAAM,YAAY,iBAAiB,MAAM,kBAAkB;AAC3D,UAAM,QAAoB,CAAC;AAE3B,eAAW,YAAY,WAAW;AAChC,YAAM,WAAW,oBAAoB,UAAU,WAAW;AAC1D,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,YAAM,WAAW,oBAAoB,UAAU,MAAM;AAErD,UAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C;AAAA,MACF;AAGA,UAAI,SAAS,SAAS,iBAAiB,SAAS,SAAS,kBAAkB;AACzE;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,YAAY,qBAAqB,SAAS,aAAa;AAC7D,YAAM,UAAU,qBAAqB,SAAS,WAAW;AACzD,YAAM,YAAY,GAAG,IAAI,MAAM,SAAS,IAAI;AAE5C,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAkC;AACvD,UAAM,QAAoB,CAAC;AAG3B,UAAM,aAAa,iBAAiB,MAAM,mBAAmB;AAC7D,eAAW,aAAa,YAAY;AAClC,YAAM,QAAQ,UAAU,kBAAkB,YAAY;AACtD,iBAAW,QAAQ,OAAO;AACxB,cAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,YAAI,aAAa,MAAM;AACrB;AAAA,QACF;AAEA,cAAM,OAAO,SAAS;AACtB,cAAM,WAAW,KAAK,WAAW,IAAI;AACrC,cAAM,YAAY,qBAAqB,KAAK,aAAa;AACzD,cAAM,UAAU,qBAAqB,KAAK,WAAW;AAErD,cAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,cAAM,YAAY,aAAa,OAAO,GAAG,IAAI,KAAK,SAAS,IAAI,KAAK;AAEpE,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,WAAW,iBAAiB,MAAM,iBAAiB;AACzD,eAAW,WAAW,UAAU;AAC9B,YAAM,QAAQ,QAAQ,kBAAkB,UAAU;AAClD,iBAAW,QAAQ,OAAO;AACxB,cAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,YAAI,aAAa,MAAM;AACrB;AAAA,QACF;AAEA,cAAM,OAAO,SAAS;AACtB,cAAM,WAAW,KAAK,WAAW,IAAI;AACrC,cAAM,YAAY,qBAAqB,KAAK,aAAa;AACzD,cAAM,UAAU,qBAAqB,KAAK,WAAW;AAErD,cAAM,WAAW,oBAAoB,MAAM,MAAM;AACjD,cAAM,YAAY,aAAa,OAAO,GAAG,IAAI,KAAK,SAAS,IAAI,KAAK;AAEpE,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAsB,OAAyB;AAClE,UAAM,cAAc,iBAAiB,MAAM,oBAAoB;AAE/D,eAAW,cAAc,aAAa;AACpC,YAAM,eAAe,KAAK,gBAAgB,UAAU;AACpD,UAAI,iBAAiB,MAAM;AACzB;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,YAAY,MAAM;AACvD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,YAAY,qBAAqB,UAAU;AACjD,YAAM,YAAY,qBAAqB,WAAW,aAAa;AAC/D,YAAM,UAAU,qBAAqB,WAAW,WAAW;AAG3D,YAAM,aAAa,MAAM,KAAK,CAAC,SAAS,KAAK,SAAS,WAAW,KAAK,SAAS,YAAY;AAE3F,UAAI,YAAY,YAAY,QAAW;AACrC,mBAAW,QAAQ,KAAK;AAAA,UACtB;AAAA,UACA,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,wBAAwB,eAM7B;AACD,UAAM,UAMD,CAAC;AAEN,UAAM,cAAc,cAAc,kBAAkB,aAAa;AAEjE,eAAW,cAAc,aAAa;AACpC,YAAM,WAAW,oBAAoB,YAAY,MAAM;AACvD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,YAAY,qBAAqB,UAAU;AACjD,YAAM,YAAY,qBAAqB,WAAW,aAAa;AAC/D,YAAM,UAAU,qBAAqB,WAAW,WAAW;AAE3D,cAAQ,KAAK;AAAA,QACX;AAAA,QACA,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,YAA2C;AACjE,UAAM,eAAe,oBAAoB,YAAY,UAAU;AAC/D,QAAI,iBAAiB,MAAM;AACzB,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,oBAAoB,cAAc,uBAAuB;AAC3E,QAAI,cAAc,MAAM;AACtB,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,QAAI,aAAa,MAAM;AACrB,aAAO;AAAA,IACT;AAGA,QAAI,SAAS,SAAS,gBAAgB;AACpC,YAAM,YAAY,SAAS,SAAS,KAAK,CAAC,UAAU,MAAM,SAAS,iBAAiB;AACpF,aAAO,cAAc,SAAY,UAAU,OAAO;AAAA,IACpD;AAGA,QAAI,SAAS,SAAS,mBAAmB;AACvC,aAAO,SAAS;AAAA,IAClB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,MAAuB;AACxC,QAAI,KAAK,WAAW,GAAG;AACrB,aAAO;AAAA,IACT;AACA,UAAM,YAAY,KAAK,CAAC;AACxB,QAAI,cAAc,QAAW;AAC3B,aAAO;AAAA,IACT;AACA,WAAO,cAAc,UAAU,YAAY;AAAA,EAC7C;AACF;;;ACtdA,OAAOC,WAAU;;;ACGV,IAAM,kBAAN,MAAsB;AAAA,EAC3B,YAA6B,QAAsB;AAAtB;AAAA,EAAuB;AAAA,EAEpD,MAAM,MAAM,MAAc,UAAuC;AAC/D,UAAM,SAA4B,MAAM,KAAK,OAAO,YAAY,MAAM,QAAQ;AAE9E,WAAO,OAAO,MAAM,IAAI,CAAC,SAAS;AAChC,YAAM,WAAqB;AAAA,QACzB,MAAM,KAAK;AAAA,QACX,MAAM,KAAK;AAAA,QACX,UAAU,KAAK;AAAA,QACf,WAAW,KAAK;AAAA,QAChB,SAAS,KAAK;AAAA,MAChB;AAEA,UAAI,KAAK,UAAU,QAAW;AAC5B,iBAAS,QAAQ,KAAK;AAAA,MACxB;AAEA,UAAI,KAAK,cAAc,QAAW;AAChC,iBAAS,YAAY,KAAK;AAAA,MAC5B;AAEA,UAAI,KAAK,YAAY,QAAW;AAC9B,iBAAS,UAAU,KAAK;AAAA,MAC1B;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;;;ACfO,IAAM,gBAAN,MAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOzB,MAAM,MAAc,WAA+B;AACjD,QAAI;AACF,YAAM,OAAO,cAAc,IAAI;AAC/B,UAAI,SAAS,MAAM;AAEjB,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,QAAoB,CAAC;AAG3B,YAAM,YAAY,KAAK,eAAe,IAAI;AAC1C,YAAM,KAAK,GAAG,SAAS;AAGvB,YAAM,UAAU,KAAK,aAAa,IAAI;AACtC,YAAM,KAAK,GAAG,OAAO;AAGrB,YAAM,SAAS,KAAK,YAAY,IAAI;AACpC,YAAM,KAAK,GAAG,MAAM;AAGpB,YAAM,QAAQ,KAAK,iBAAiB,IAAI;AACxC,YAAM,KAAK,GAAG,KAAK;AAGnB,YAAM,YAAY,KAAK,eAAe,IAAI;AAC1C,YAAM,KAAK,GAAG,SAAS;AAGvB,WAAK,gBAAgB,MAAM,KAAK;AAEhC,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe,MAA4B;AACzC,QAAI;AACF,YAAM,OAAO,cAAc,IAAI;AAC/B,UAAI,SAAS,MAAM;AACjB,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,kBAAkB,iBAAiB,MAAM,iBAAiB;AAChE,YAAM,UAAwB,CAAC;AAE/B,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,kBAAkB,OAAO;AAC5C,YAAI,eAAe,IAAI;AACrB;AAAA,QACF;AAGA,cAAM,EAAE,QAAQ,WAAW,IAAI,KAAK,gBAAgB,UAAU;AAE9D,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA;AAAA,UACA,QAAQ;AAAA;AAAA,QACV,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAkC;AACvD,UAAM,gBAAgB,iBAAiB,MAAM,eAAe;AAC5D,UAAM,QAAoB,CAAC;AAE3B,eAAW,UAAU,eAAe;AAElC,UAAI,KAAK,kBAAkB,MAAM,GAAG;AAClC;AAAA,MACF;AAEA,YAAM,WAAW,oBAAoB,QAAQ,MAAM;AACnD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,MAAM;AAC7C,YAAM,QAAQ,gBAAgB,MAAM;AACpC,YAAM,YAAY,qBAAqB,OAAO,aAAa;AAC3D,YAAM,UAAU,qBAAqB,OAAO,WAAW;AACvD,YAAM,YAAY,qBAAqB,MAAM;AAE7C,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,MAA+B;AACvD,QAAI,UAAU,KAAK;AACnB,WAAO,YAAY,MAAM;AACvB,UAAI,QAAQ,SAAS,aAAa;AAChC,eAAO;AAAA,MACT;AACA,gBAAU,QAAQ;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAkC;AACrD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,QAAoB,CAAC;AAE3B,eAAW,cAAc,aAAa;AACpC,YAAM,WAAW,oBAAoB,YAAY,MAAM;AACvD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,UAAU;AACjD,YAAM,YAAY,qBAAqB,WAAW,aAAa;AAC/D,YAAM,UAAU,qBAAqB,WAAW,WAAW;AAG3D,YAAM,iBAAiB,oBAAoB,YAAY,iBAAiB;AACxE,YAAM,YAAY,mBAAmB,OAAO,GAAG,IAAI,GAAG,eAAe,IAAI,KAAK;AAE9E,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS,CAAC;AAAA;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,MAAkC;AACpD,UAAM,aAAa,iBAAiB,MAAM,YAAY;AACtD,UAAM,QAAoB,CAAC;AAE3B,eAAW,aAAa,YAAY;AAClC,YAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,SAAS;AAChD,YAAM,YAAY,qBAAqB,UAAU,aAAa;AAC9D,YAAM,UAAU,qBAAqB,UAAU,WAAW;AAG1D,YAAM,iBAAiB,oBAAoB,WAAW,iBAAiB;AACvE,YAAM,YAAY,mBAAmB,OAAO,GAAG,IAAI,GAAG,eAAe,IAAI,KAAK;AAG9E,YAAM,UAAU,KAAK,oBAAoB,SAAS;AAElD,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,MAAkC;AACzD,UAAM,YAAY,iBAAiB,MAAM,WAAW;AACpD,UAAM,QAAoB,CAAC;AAE3B,eAAW,YAAY,WAAW;AAChC,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,QAAQ;AAC/C,YAAM,YAAY,qBAAqB,SAAS,aAAa;AAC7D,YAAM,UAAU,qBAAqB,SAAS,WAAW;AAGzD,YAAM,YAAY,oBAAoB,UAAU,MAAM;AACtD,YAAM,YAAY,cAAc,OAAO,GAAG,IAAI,MAAM,UAAU,IAAI,KAAK;AAEvE,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAkC;AACvD,UAAM,aAAa,iBAAiB,MAAM,CAAC,cAAc,aAAa,CAAC;AACvE,UAAM,QAAoB,CAAC;AAE3B,eAAW,aAAa,YAAY;AAClC,YAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,sBAAsB,SAAS;AAChD,YAAM,YAAY,qBAAqB,UAAU,aAAa;AAC9D,YAAM,UAAU,qBAAqB,UAAU,WAAW;AAG1D,YAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,YAAM,YAAY,aAAa,OAAO,GAAG,IAAI,KAAK,SAAS,IAAI,KAAK;AAEpE,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,MAAsB,OAAyB;AACrE,UAAM,YAAY,iBAAiB,MAAM,WAAW;AAEpD,eAAW,YAAY,WAAW;AAEhC,YAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,WAAW,SAAS;AAG1B,YAAM,UAAU,KAAK,mBAAmB,QAAQ;AAGhD,YAAM,aAAa,MAAM,KAAK,CAAC,SAAS,KAAK,SAAS,WAAW,KAAK,SAAS,QAAQ;AAEvF,UAAI,YAAY,YAAY,QAAW;AACrC,mBAAW,QAAQ,KAAK,GAAG,OAAO;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,WAMzB;AACD,UAAM,UAMD,CAAC;AAGN,UAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,QAAI,aAAa,MAAM;AACrB,aAAO;AAAA,IACT;AAGA,UAAM,qBAAqB,SAAS,kBAAkB,yBAAyB;AAE/E,eAAW,aAAa,oBAAoB;AAC1C,YAAM,WAAW,oBAAoB,WAAW,MAAM;AACtD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,QAAQ,gBAAgB,SAAS;AACvC,YAAM,YAAY,qBAAqB,SAAS;AAChD,YAAM,YAAY,qBAAqB,UAAU,aAAa;AAC9D,YAAM,UAAU,qBAAqB,UAAU,WAAW;AAE1D,cAAQ,KAAK;AAAA,QACX;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,UAMxB;AACD,UAAM,UAMD,CAAC;AAGN,UAAM,WAAW,oBAAoB,UAAU,MAAM;AACrD,QAAI,aAAa,MAAM;AACrB,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,SAAS,kBAAkB,eAAe;AAEhE,eAAW,UAAU,eAAe;AAClC,YAAM,WAAW,oBAAoB,QAAQ,MAAM;AACnD,UAAI,aAAa,MAAM;AACrB;AAAA,MACF;AAEA,YAAM,OAAO,SAAS;AACtB,YAAM,QAAQ,gBAAgB,MAAM;AACpC,YAAM,YAAY,qBAAqB,MAAM;AAC7C,YAAM,YAAY,qBAAqB,OAAO,aAAa;AAC3D,YAAM,UAAU,qBAAqB,OAAO,WAAW;AAEvD,cAAQ,KAAK;AAAA,QACX;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,gBAAgB,YAA8D;AAEpF,UAAMC,QAAO,WAAW,KAAK;AAG7B,QAAIA,MAAK,SAAS,KAAK,GAAG;AACxB,YAAM,SAASA,MAAK,QAAQ,OAAO,EAAE;AACrC,aAAO,EAAE,QAAQ,YAAY,CAAC,GAAG,EAAE;AAAA,IACrC;AAGA,UAAM,cAAcA,MAAK,MAAM,kBAAkB;AACjD,QAAI,gBAAgB,MAAM;AACxB,YAAM,SAAS,YAAY,CAAC,KAAK;AACjC,YAAM,gBAAgB,YAAY,CAAC,KAAK;AACxC,YAAM,aAAa,cAAc,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAC/D,aAAO,EAAE,QAAQ,WAAW;AAAA,IAC9B;AAGA,UAAM,QAAQA,MAAK,MAAM,IAAI;AAC7B,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,aAAa,CAAC,MAAM,MAAM,SAAS,CAAC,KAAK,EAAE;AACjD,YAAM,SAAS,MAAM,MAAM,GAAG,EAAE,EAAE,KAAK,IAAI;AAC3C,aAAO,EAAE,QAAQ,WAAW;AAAA,IAC9B;AAGA,WAAO,EAAE,QAAQ,IAAI,YAAY,CAACA,KAAI,EAAE;AAAA,EAC1C;AACF;;;AF1cO,IAAM,gBAAN,MAAoB;AAAA,EACzB,YAA6B,cAA6B;AAA7B;AAAA,EAA8B;AAAA,EAE3D,MAAM,UAAU,UAAkB,MAAmC;AACnE,UAAM,MAAMC,MAAK,QAAQ,QAAQ;AAEjC,QAAI,CAAC,OAAO,MAAM,EAAE,SAAS,GAAG,GAAG;AACjC,YAAM,SAAS,IAAI,UAAU;AAC7B,aAAO,OAAO,MAAM,MAAM,YAAY;AAAA,IACxC;AAEA,QAAI,CAAC,OAAO,MAAM,EAAE,SAAS,GAAG,GAAG;AACjC,YAAM,SAAS,IAAI,UAAU;AAC7B,aAAO,OAAO,MAAM,MAAM,YAAY;AAAA,IACxC;AAEA,QAAI,QAAQ,OAAO;AACjB,UAAI,CAAC,KAAK,cAAc;AACtB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AACA,YAAM,SAAS,IAAI,gBAAgB,KAAK,YAAY;AACpD,aAAO,OAAO,MAAM,MAAM,QAAQ;AAAA,IACpC;AAEA,QAAI,QAAQ,OAAO;AACjB,YAAM,SAAS,IAAI,cAAc;AACjC,aAAO,OAAO,MAAM,MAAM,QAAQ;AAAA,IACpC;AAEA,QAAI,QAAQ,OAAO;AACjB,YAAM,SAAS,IAAI,YAAY;AAC/B,aAAO,OAAO,MAAM,MAAM,QAAQ;AAAA,IACpC;AAGA,UAAM,WAAW,gBAAgB,YAAY;AAC7C,UAAM,UAAU,SAAS,eAAe,GAAG;AAC3C,QAAI,YAAY,QAAW;AACzB,aAAO,QAAQ,MAAM,MAAM,QAAQ;AAAA,IACrC;AAEA,WAAO,CAAC;AAAA,EACV;AACF;;;ALhBO,IAAM,mBAAN,MAAuB;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,SAAiB,cAA6B;AACxD,SAAK,UAAU;AACf,SAAK,SAAS,IAAI,UAAU;AAC5B,SAAK,gBAAgB,IAAI,cAAc,YAAY;AACnD,SAAK,aAAa,oBAAI,IAAI;AAC1B,SAAK,iBAAiB,oBAAI,IAAI;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,oBAAoB,UAAiD;AACnE,SAAK,eAAe,IAAI,QAAQ;AAChC,WAAO,MAAM;AACX,WAAK,eAAe,OAAO,QAAQ;AAAA,IACrC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB,OAAqC;AACjE,eAAW,YAAY,KAAK,gBAAgB;AAC1C,eAAS,KAAK;AAAA,IAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAqE;AACpF,UAAM,QAAQ,IAAI,UAAU;AAE5B,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,KAAK,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK;AAC1C,UAAI,CAAC,CAAC,MAAM,OAAO,MAAM,OAAO,MAAM,MAAM,IAAI,EAAE,SAAS,GAAG,EAAG;AAGjE,YAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,KAAK,MAAM,KAAK,OAAO;AACxE,YAAM,SAAS,OAAO,KAAK,IAAI;AAG/B,UAAI,QAAQ,MAAM;AAEhB,cAAM,aAAa,IAAI,cAAc;AACrC,cAAM,UAAU,WAAW,eAAe,KAAK,OAAO;AACtD,mBAAW,OAAO,SAAS;AACzB,cAAI,CAAC,IAAI,QAAQ;AACf,kBAAM,UAAU,KAAK,MAAM,IAAI,QAAQ,IAAI,UAAU;AAAA,UACvD;AAAA,QACF;AAAA,MACF,WAAW,QAAQ,MAAM;AAEvB,cAAM,WAAW,IAAI,YAAY;AACjC,cAAM,UAAU,SAAS,eAAe,KAAK,OAAO;AACpD,mBAAW,OAAO,SAAS;AACzB,cAAI,CAAC,IAAI,QAAQ;AACf,kBAAM,UAAU,KAAK,MAAM,IAAI,QAAQ,IAAI,UAAU;AAAA,UACvD;AAAA,QACF;AAAA,MACF,WAAW,QAAQ,MAAM;AAEvB,cAAM,UAAU,KAAK,OAAO,eAAe,KAAK,OAAO;AACvD,mBAAW,OAAO,SAAS;AACzB,cAAI,CAAC,IAAI,QAAQ;AACf,kBAAM,UAAU,KAAK,MAAM,IAAI,QAAQ,IAAI,UAAU;AAAA,UACvD;AAAA,QACF;AAAA,MACF;AAGA,iBAAW,QAAQ,OAAO;AACxB,cAAM,QAAQ,KAAK,QAAQ,MAAM,IAAI;AAErC,YAAI,KAAK,SAAS,YAAY;AAE5B,gBAAM,eAAe,MAAM,MAAM,KAAK,YAAY,GAAG,KAAK,OAAO,EAAE,KAAK,IAAI;AAC5E,gBAAM,yBAAyB,cAAc,KAAK,MAAM,KAAK,IAAI;AAAA,QACnE,WAAW,KAAK,SAAS,WAAW,KAAK,YAAY,QAAW;AAE9D,qBAAW,UAAU,KAAK,SAAS;AACjC,kBAAM,aAAa,MAAM,MAAM,OAAO,YAAY,GAAG,OAAO,OAAO,EAAE,KAAK,IAAI;AAC9E,kBAAM,yBAAyB,YAAY,KAAK,MAAM,GAAG,KAAK,IAAI,IAAI,OAAO,IAAI,EAAE;AAAA,UACrF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAU,SAAkB,OAAiC;AACjE,UAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,UAAMC,OAAMC,SAAQ,SAAS,GAAG,EAAE,WAAW,KAAK,CAAC;AAEnD,UAAM,aAAa,MAAM,OAAO;AAChC,UAAMC,WAAU,WAAW,KAAK,UAAU,YAAY,MAAM,CAAC,CAAC;AAG9D,SAAK,sBAAsB,EAAE,MAAM,iBAAiB,QAAQ,CAAC;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,YAAY,SAAiC;AACjD,UAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,UAAM,GAAG,WAAW,EAAE,OAAO,KAAK,CAAC;AACnC,SAAK,WAAW,OAAO,OAAO;AAG9B,SAAK,sBAAsB,EAAE,MAAM,iBAAiB,QAAQ,CAAC;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAU,SAAkD;AAEhE,UAAM,SAAS,KAAK,WAAW,IAAI,OAAO;AAC1C,QAAI,OAAQ,QAAO;AAEnB,UAAM,YAAY,KAAK,aAAa,OAAO;AAE3C,QAAI;AACF,YAAM,UAAU,MAAM,SAAS,WAAW,OAAO;AACjD,YAAM,SAAkB,KAAK,MAAM,OAAO;AAG1C,UAAI,CAAC,KAAK,kBAAkB,MAAM,GAAG;AACnC,eAAO;AAAA,MACT;AAEA,YAAM,aAAa;AACnB,YAAM,QAAQ,IAAI,UAAU;AAG5B,iBAAW,QAAQ,WAAW,OAAO;AACnC,cAAM,WAAW,KAAK,iBAAiB,KAAK,IAAI;AAChD,YAAI,CAAC,SAAU;AAGf,YAAI,aAAa,UAAU;AACzB,gBAAM,YAAuB;AAAA,YAC3B,IAAI,KAAK;AAAA,YACT,MAAM,KAAK;AAAA,YACX,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,YACX,UAAU,KAAK;AAAA,YACf,WAAW,KAAK;AAAA,YAChB,SAAS,KAAK;AAAA,UAChB;AACA,cAAI,KAAK,cAAc,QAAW;AAChC,sBAAU,YAAY,KAAK;AAAA,UAC7B;AACA,gBAAM,aAAa,SAAS;AAC5B;AAAA,QACF;AAEA,cAAM,WAOF;AAAA,UACF,MAAM;AAAA,UACN,MAAM,KAAK;AAAA,UACX,UAAU,KAAK;AAAA,UACf,WAAW,KAAK;AAAA,UAChB,SAAS,KAAK;AAAA,QAChB;AACA,YAAI,KAAK,cAAc,QAAW;AAChC,mBAAS,YAAY,KAAK;AAAA,QAC5B;AACA,cAAM,SAAS,CAAC,QAAQ,GAAG,KAAK,IAAI;AAAA,MACtC;AAGA,iBAAW,QAAQ,WAAW,OAAO;AACnC,cAAM,WAAW,KAAK,iBAAiB,KAAK,IAAI;AAChD,YAAI,CAAC,SAAU;AAEf,cAAM,QAAQ;AAAA,UACZ,MAAM,KAAK;AAAA,UACX,IAAI,KAAK;AAAA,UACT,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,QACnB,CAAC;AAAA,MACH;AAEA,WAAK,WAAW,IAAI,SAAS,KAAK;AAClC,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,cACE,OACA,UACA,YACqC;AACrC,UAAM,SAAS,GAAG,QAAQ,IAAI,UAAU;AACxC,WAAO;AAAA,MACL,UAAU,MAAM,iBAAiB,MAAM;AAAA,MACvC,OAAO,MAAM,cAAc,MAAM;AAAA,IACnC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,eACE,OACA,UACA,YAC6C;AAC7C,UAAM,SAAS,GAAG,QAAQ,IAAI,UAAU;AACxC,UAAM,UAAuD,CAAC;AAG9D,UAAM,WAAW,MAAM,iBAAiB,MAAM;AAC9C,eAAW,QAAQ,UAAU;AAC3B,UAAI,KAAK,SAAS,SAAS;AACzB,gBAAQ,KAAK,EAAE,IAAI,KAAK,MAAM,cAAc,aAAa,CAAC;AAAA,MAC5D;AAAA,IACF;AAGA,UAAM,WAAW,MAAM,SAAS,MAAM;AACtC,eAAW,QAAQ,UAAU;AAC3B,UAAI,KAAK,SAAS,SAAS;AACzB,gBAAQ,KAAK,EAAE,IAAI,KAAK,IAAI,cAAc,iBAAiB,CAAC;AAAA,MAC9D;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,WAAW,MAAM;AAAA,EACxB;AAAA,EAEQ,aAAa,SAA0B;AAC7C,WAAOC,MAAK,KAAK,SAAS,UAAU,GAAG,OAAO,OAAO;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,OAA0C;AAClE,QAAI,OAAO,UAAU,YAAY,UAAU,KAAM,QAAO;AAExD,QAAI,EAAE,WAAW,UAAU,EAAE,WAAW,OAAQ,QAAO;AAEvD,UAAM,MAAM;AACZ,WAAO,MAAM,QAAQ,IAAI,KAAK,KAAK,MAAM,QAAQ,IAAI,KAAK;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,MAC0E;AAC1E,WAAO,CAAC,YAAY,SAAS,aAAa,QAAQ,SAAS,QAAQ,EAAE,SAAS,IAAI;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBACN,MAC8E;AAC9E,QAAI,KAAK,gBAAgB,IAAI,GAAG;AAC9B,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,MAAsE;AAC5F,WAAO,CAAC,SAAS,WAAW,WAAW,YAAY,EAAE,SAAS,IAAI;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA,EAKQ,iBACN,MAC4D;AAC5D,QAAI,KAAK,gBAAgB,IAAI,GAAG;AAC9B,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AACF;;;AQlWA,SAAS,YAAAC,WAAU,cAAc;AACjC,SAAS,eAAe;AACxB,SAAS,YAAY,QAAAC,OAAM,eAAe;;;ACqCnC,IAAM,iBAA4B;AAAA,EACvC,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,IACT,OAAO;AAAA,IACP,WAAW;AAAA,EACb;AAAA,EACA,UAAU;AAAA,IACR,aAAa;AAAA,IACb,WAAW;AAAA,IACX,cAAc;AAAA,IACd,gBAAgB,CAAC,mBAAmB,WAAW,YAAY,OAAO;AAAA,EACpE;AAAA,EACA,QAAQ;AAAA,IACN,aAAa;AAAA,IACb,cAAc;AAAA,EAChB;AAAA,EACA,OAAO;AAAA,IACL,WAAW;AAAA,IACX,SAAS;AAAA,IACT,gBAAgB;AAAA,EAClB;AAAA,EACA,QAAQ;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AACF;;;ACrDA,SAAS,cAAc,OAAkD;AACvE,SACE,OAAO,UAAU,YAAY,UAAU,QAAQ,CAAC,MAAM,QAAQ,KAAK,KAAK,EAAE,iBAAiB;AAE/F;AAsBO,SAAS,UAA4B,UAAa,WAAuB;AAE9E,MAAI,CAAC,cAAc,SAAS,GAAG;AAC7B,WAAO,EAAE,GAAG,SAAS;AAAA,EACvB;AAOA,QAAM,iBAAiB;AAEvB,SAAO,iBAAiB,gBAAgB,SAAS;AACnD;AAMA,SAAS,iBACP,UACA,WACyB;AACzB,QAAM,SAAkC,EAAE,GAAG,SAAS;AAEtD,aAAW,OAAO,OAAO,KAAK,SAAS,GAAG;AACxC,UAAM,eAAe,SAAS,GAAG;AACjC,UAAM,gBAAgB,UAAU,GAAG;AAGnC,QAAI,kBAAkB,QAAW;AAC/B;AAAA,IACF;AAGA,QAAI,cAAc,YAAY,KAAK,cAAc,aAAa,GAAG;AAC/D,aAAO,GAAG,IAAI,iBAAiB,cAAc,aAAa;AAAA,IAC5D,OAAO;AAEL,aAAO,GAAG,IAAI;AAAA,IAChB;AAAA,EACF;AAEA,SAAO;AACT;;;AFzEA,IAAM,sBAAsB;AAK5B,eAAe,WAAWC,OAAgC;AACxD,MAAI;AACF,UAAM,OAAOA,KAAI;AACjB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,IAAM,gBAAN,MAAoB;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACT,SAA2B;AAAA,EAEnC,YAAY,YAAqB,SAAkB,aAAsB;AAEvE,SAAK,cAAc,eAAe,mBAAmB,QAAQ;AAI7D,QAAI,eAAe,UAAa,eAAe,IAAI;AACjD,WAAK,aAAa,KAAK,WAAW,YAAY,KAAK,WAAW;AAAA,IAChE,OAAO;AACL,WAAK,aAAaC,MAAK,KAAK,aAAa,mBAAmB;AAAA,IAC9D;AAIA,QAAI,YAAY,UAAa,YAAY,IAAI;AAC3C,WAAK,UAAU,KAAK,WAAW,SAAS,KAAK,WAAW;AAAA,IAC1D,OAAO;AACL,WAAK,UAAU,KAAK,WAAW,eAAe,SAAS,KAAK,WAAW;AAAA,IACzE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,qBAA6B;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,OAA2B;AAC/B,QAAI,KAAK,WAAW,MAAM;AACxB,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,SAAS,MAAM,WAAW,KAAK,UAAU;AAC/C,QAAI,CAAC,QAAQ;AAEX,WAAK,SAAS,EAAE,GAAG,eAAe;AAClC,YAAM,KAAK,KAAK,KAAK,MAAM;AAC3B,aAAO,KAAK;AAAA,IACd;AAGA,UAAM,UAAU,MAAMC,UAAS,KAAK,YAAY,OAAO;AACvD,QAAI;AACF,WAAK,SAAS,UAAU,gBAAgB,KAAK,MAAM,OAAO,CAAC;AAAA,IAC7D,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,kCAAkC,KAAK,UAAU,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC9G;AAAA,IACF;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,KAAK,QAAkC;AAC3C,UAAM,gBAAgB,KAAK,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AACtE,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,iBAAyB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,oBAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,WAAWF,OAAc,SAAyB;AAExD,QAAIA,MAAK,WAAW,GAAG,GAAG;AACxB,aAAOA,MAAK,QAAQ,KAAK,QAAQ,CAAC;AAAA,IACpC;AAGA,QAAI,CAAC,WAAWA,KAAI,GAAG;AACrB,aAAO,QAAQ,SAASA,KAAI;AAAA,IAC9B;AAEA,WAAOA;AAAA,EACT;AACF;;;AG9GA,SAAS,YAAAG,WAAU,aAAAC,YAAW,UAAAC,eAAc;AAC5C,SAAS,QAAAC,aAAY;AAqBrB,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAKA,IAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AASvB,eAAeC,YAAWC,OAAgC;AACxD,MAAI;AACF,UAAMH,QAAOG,KAAI;AACjB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAUO,IAAM,mBAAN,MAAuB;AAAA,EACX;AAAA,EAEjB,YAAY,aAAqB;AAC/B,SAAK,gBAAgBF,MAAK,aAAa,YAAY;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,sBAAwC;AAC5C,UAAM,SAAS,MAAMC,YAAW,KAAK,aAAa;AAClD,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,MAAMJ,UAAS,KAAK,eAAe,OAAO;AAC1D,UAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAErD,eAAW,WAAW,mBAAmB;AACvC,UAAI,CAAC,MAAM,SAAS,OAAO,GAAG;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,0BAA0E;AAC9E,UAAM,SAAS,MAAMI,YAAW,KAAK,aAAa;AAElD,QAAI,CAAC,QAAQ;AAEX,YAAM,UAAU,GAAG,eAAe,KAAK,CAAC;AAAA,EAAK,kBAAkB,KAAK,IAAI,CAAC;AAAA;AACzE,YAAMH,WAAU,KAAK,eAAe,OAAO;AAC3C,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS;AAAA,MACX;AAAA,IACF;AAGA,UAAM,kBAAkB,MAAMD,UAAS,KAAK,eAAe,OAAO;AAClE,UAAM,QAAQ,gBAAgB,MAAM,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAG7D,UAAM,kBAAkB,kBAAkB,OAAO,CAAC,YAAY,CAAC,MAAM,SAAS,OAAO,CAAC;AAEtF,QAAI,gBAAgB,WAAW,GAAG;AAChC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS;AAAA,MACX;AAAA,IACF;AAGA,QAAI,aAAa;AACjB,QAAI,CAAC,WAAW,SAAS,IAAI,GAAG;AAC9B,oBAAc;AAAA,IAChB;AAEA,kBAAc;AACd,kBAAc,GAAG,gBAAgB,KAAK,IAAI,CAAC;AAAA;AAE3C,UAAMC,WAAU,KAAK,eAAe,UAAU;AAE9C,WAAO;AAAA,MACL,SAAS;AAAA,MACT,SAAS,2BAA2B,OAAO,gBAAgB,MAAM,CAAC;AAAA,IACpE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAA2B;AACzB,WAAO,KAAK;AAAA,EACd;AACF;;;ACrJA,SAAS,cAAAK,mBAAkB;AAC3B,SAAS,YAAAC,WAAU,eAAe;AAClC,SAAS,QAAAC,OAAM,SAAS,UAAU,gBAAgB;;;ACsBlD,IAAM,gBAAgB;AAAA,EACpB,MAAM,EAAE,WAAW,KAAK,cAAc,IAAI;AAAA,EAC1C,KAAK,EAAE,WAAW,MAAM,cAAc,IAAI;AAAA,EAC1C,MAAM,EAAE,WAAW,MAAM,cAAc,IAAI;AAC7C;AAIO,IAAM,kBAAN,MAAM,iBAAgB;AAAA,EACV;AAAA,EACA;AAAA,EAEjB,YAAY,QAAqB;AAC/B,QAAI,OAAO,gBAAgB,OAAO,WAAW;AAC3C,YAAM,IAAI;AAAA,QACR,iBAAiB,OAAO,OAAO,YAAY,CAAC,kCAAkC,OAAO,OAAO,SAAS,CAAC;AAAA,MACxG;AAAA,IACF;AACA,SAAK,YAAY,OAAO;AACxB,SAAK,eAAe,OAAO;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,eAAe,MAAoC;AACxD,WAAO,IAAI,iBAAgB,cAAc,IAAI,CAAC;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAc,UAA4B;AAE9C,QAAI,aAAa,UAAa,aAAa,MAAM,SAAS,KAAK,QAAQ,GAAG;AACxE,aAAO,KAAK,cAAc,IAAI;AAAA,IAChC;AAGA,QAAI,aAAa,UAAa,aAAa,MAAM,sBAAsB,KAAK,QAAQ,GAAG;AACrF,aAAO,KAAK,UAAU,IAAI;AAAA,IAC5B;AAEA,WAAO,KAAK,mBAAmB,IAAI;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,cAAc,MAAuB;AAE3C,UAAM,cAAc;AACpB,UAAM,WAA4E,CAAC;AAEnF,QAAI,YAAY;AAChB,QAAI,aAAa;AACjB,QAAI;AAEJ,YAAQ,QAAQ,YAAY,KAAK,IAAI,OAAO,MAAM;AAEhD,UAAI,MAAM,QAAQ,WAAW;AAC3B,cAAM,UAAU,KAAK,MAAM,WAAW,MAAM,KAAK,EAAE,KAAK;AACxD,YAAI,SAAS;AACX,mBAAS,KAAK;AAAA,YACZ,QAAQ;AAAA,YACR;AAAA,YACA,aAAa;AAAA,UACf,CAAC;AAAA,QACH;AAAA,MACF;AACA,mBAAa,MAAM,CAAC,KAAK;AACzB,kBAAY,MAAM;AAAA,IACpB;AAGA,UAAM,eAAe,KAAK,MAAM,SAAS,EAAE,KAAK;AAChD,QAAI,cAAc;AAChB,eAAS,KAAK;AAAA,QACZ,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAGA,QAAI,SAAS,WAAW,GAAG;AACzB,aAAO,KAAK,mBAAmB,IAAI;AAAA,IACrC;AAGA,UAAM,SAAkB,CAAC;AAEzB,eAAW,WAAW,UAAU;AAC9B,UAAI,QAAQ,QAAQ,UAAU,KAAK,WAAW;AAE5C,eAAO,KAAK;AAAA,UACV,SAAS,QAAQ;AAAA,UACjB,YAAY,OAAO;AAAA,UACnB,aAAa;AAAA,UACb,aAAa,QAAQ;AAAA,UACrB,WAAW,QAAQ,cAAc,QAAQ,QAAQ;AAAA,UACjD,eAAe,QAAQ,UAAU;AAAA,QACnC,CAAC;AAAA,MACH,OAAO;AAEL,cAAM,gBAAgB,KAAK,mBAAmB,QAAQ,OAAO;AAC7D,mBAAW,YAAY,eAAe;AACpC,iBAAO,KAAK;AAAA,YACV,GAAG;AAAA,YACH,YAAY,OAAO;AAAA,YACnB,aAAa,QAAQ,cAAc,SAAS;AAAA,YAC5C,WAAW,QAAQ,cAAc,SAAS;AAAA,YAC1C,eAAe,QAAQ,UAAU;AAAA,UACnC,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,cAAc,OAAO;AAAA,IAC7B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,UAAU,MAAuB;AAEvC,UAAM,mBACJ;AACF,UAAM,eAAiF,CAAC;AAExF,QAAI;AACJ,YAAQ,QAAQ,iBAAiB,KAAK,IAAI,OAAO,MAAM;AACrD,YAAM,OAAO,MAAM,CAAC;AACpB,YAAM,OAAkE;AAAA,QACtE,aAAa,MAAM;AAAA,QACnB,WAAW,MAAM;AAAA,MACnB;AACA,UAAI,SAAS,QAAW;AACtB,aAAK,OAAO;AAAA,MACd;AACA,mBAAa,KAAK,IAAI;AAAA,IACxB;AAGA,QAAI,aAAa,WAAW,GAAG;AAC7B,aAAO,KAAK,mBAAmB,IAAI;AAAA,IACrC;AAGA,aAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,YAAM,cAAc,aAAa,CAAC;AAClC,YAAM,WAAW,aAAa,IAAI,CAAC;AACnC,UAAI,gBAAgB,OAAW;AAI/B,YAAM,WAAW,KAAK,MAAM,YAAY,WAAW;AACnD,UACE,oFAAoF;AAAA,QAClF;AAAA,MACF,GACA;AACA,cAAM,WAAW,KAAK,mBAAmB,QAAQ;AACjD,YAAI,WAAW,GAAG;AAChB,sBAAY,YAAY,YAAY,cAAc;AAAA,QACpD,OAAO;AAEL,sBAAY,YAAY,aAAa,SAAY,SAAS,cAAc,KAAK;AAAA,QAC/E;AAAA,MACF,OAAO;AAEL,oBAAY,YAAY,aAAa,SAAY,SAAS,cAAc,KAAK;AAAA,MAC/E;AAAA,IACF;AAEA,UAAM,SAAkB,CAAC;AAEzB,eAAW,QAAQ,cAAc;AAC/B,YAAM,UAAU,KAAK,MAAM,KAAK,aAAa,KAAK,SAAS,EAAE,KAAK;AAElE,UAAI,QAAQ,UAAU,KAAK,WAAW;AAEpC,eAAO,KAAK;AAAA,UACV;AAAA,UACA,YAAY,OAAO;AAAA,UACnB,aAAa;AAAA,UACb,aAAa,KAAK;AAAA,UAClB,WAAW,KAAK;AAAA,UAChB,cAAc,KAAK;AAAA,QACrB,CAAC;AAAA,MACH,OAAO;AAEL,cAAM,aAAa,KAAK,mBAAmB,OAAO;AAClD,mBAAW,YAAY,YAAY;AACjC,iBAAO,KAAK;AAAA,YACV,GAAG;AAAA,YACH,YAAY,OAAO;AAAA,YACnB,aAAa,KAAK,cAAc,SAAS;AAAA,YACzC,WAAW,KAAK,cAAc,SAAS;AAAA,YACvC,cAAc,KAAK;AAAA,UACrB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,cAAc,OAAO;AAAA,IAC7B;AAEA,WAAO,OAAO,SAAS,IAAI,SAAS,KAAK,mBAAmB,IAAI;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,mBAAmB,MAAsB;AAC/C,QAAI,aAAa;AACjB,QAAI,WAAW;AACf,QAAI,sBAAsB;AAC1B,QAAI,qBAAqB;AACzB,QAAI,aAAa;AACjB,QAAI,IAAI;AACR,QAAI,kBAAkB;AAGtB,WAAO,IAAI,KAAK,QAAQ;AACtB,YAAM,OAAO,KAAK,CAAC;AACnB,YAAM,WAAW,IAAI,IAAI,KAAK,SAAS,KAAK,IAAI,CAAC,IAAI;AAGrD,UAAI,CAAC,YAAY,CAAC,sBAAsB,SAAS,OAAO,aAAa,KAAK;AACxE,8BAAsB;AACtB,aAAK;AACL;AAAA,MACF;AAEA,UAAI,CAAC,YAAY,CAAC,uBAAuB,SAAS,OAAO,aAAa,KAAK;AACzE,6BAAqB;AACrB,aAAK;AACL;AAAA,MACF;AAEA,UAAI,sBAAsB,SAAS,OAAO,aAAa,KAAK;AAC1D,6BAAqB;AACrB,aAAK;AACL;AAAA,MACF;AAEA,UAAI,uBAAuB,SAAS,MAAM;AACxC,8BAAsB;AACtB;AACA;AAAA,MACF;AAGA,UAAI,uBAAuB,oBAAoB;AAC7C;AACA;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,SAAS,OAAO,SAAS,OAAO,SAAS,MAAM;AAC/D,mBAAW;AACX,qBAAa;AACb;AACA;AAAA,MACF;AAEA,UAAI,YAAY,SAAS,MAAM;AAE7B,aAAK;AACL;AAAA,MACF;AAEA,UAAI,YAAY,SAAS,YAAY;AACnC,mBAAW;AACX,qBAAa;AACb;AACA;AAAA,MACF;AAGA,UAAI,UAAU;AACZ;AACA;AAAA,MACF;AAGA,UAAI,SAAS,KAAK;AAChB;AACA,0BAAkB;AAAA,MACpB,WAAW,SAAS,KAAK;AACvB;AACA,YAAI,mBAAmB,eAAe,GAAG;AAEvC,iBAAO,IAAI;AAAA,QACb;AAAA,MACF;AAEA;AAAA,IACF;AAGA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,MAAuB;AAChD,QAAI,KAAK,UAAU,KAAK,WAAW;AACjC,aAAO;AAAA,QACL;AAAA,UACE,SAAS;AAAA,UACT,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,aAAa;AAAA,UACb,WAAW,KAAK;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAkB,CAAC;AACzB,UAAM,OAAO,KAAK,YAAY,KAAK;AACnC,QAAI,QAAQ;AAEZ,WAAO,QAAQ,KAAK,QAAQ;AAC1B,YAAM,MAAM,KAAK,IAAI,QAAQ,KAAK,WAAW,KAAK,MAAM;AACxD,aAAO,KAAK;AAAA,QACV,SAAS,KAAK,MAAM,OAAO,GAAG;AAAA,QAC9B,YAAY,OAAO;AAAA,QACnB,aAAa;AAAA,QACb,aAAa;AAAA,QACb,WAAW;AAAA,MACb,CAAC;AACD,eAAS;AACT,UAAI,QAAQ,KAAK,OAAQ;AAAA,IAC3B;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,cAAc,OAAO;AAAA,IAC7B;AAEA,WAAO;AAAA,EACT;AACF;;;AChYA,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,YAAAC,WAAU,YAAY;AAsBxB,IAAM,eAAN,MAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQxB,MAAM,cACJ,UACA,cACsB;AACtB,UAAM,SAAsB;AAAA,MAC1B,OAAO,CAAC;AAAA,MACR,UAAU,CAAC;AAAA,MACX,SAAS,CAAC;AAAA,MACV,WAAW,CAAC;AAAA,IACd;AAGA,UAAM,iBAAiB,IAAI,IAAI,aAAa,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AAC9D,UAAM,gBAAgB,IAAI,IAAI,OAAO,KAAK,SAAS,KAAK,CAAC;AAGzD,eAAWC,SAAQ,eAAe;AAChC,UAAI,CAAC,eAAe,IAAIA,KAAI,GAAG;AAC7B,eAAO,QAAQ,KAAKA,KAAI;AAAA,MAC1B;AAAA,IACF;AAGA,UAAM,sBAA0C,CAAC;AAEjD,eAAW,QAAQ,cAAc;AAC/B,YAAM,gBAAgB,SAAS,MAAM,KAAK,IAAI;AAE9C,UAAI,kBAAkB,QAAW;AAE/B,eAAO,MAAM,KAAK,KAAK,IAAI;AAAA,MAC7B,OAAO;AAEL,YAAI,KAAK,UAAU,cAAc,SAAS,KAAK,SAAS,cAAc,MAAM;AAE1E,iBAAO,UAAU,KAAK,KAAK,IAAI;AAAA,QACjC,OAAO;AAEL,8BAAoB,KAAK,IAAI;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AAGA,eAAW,QAAQ,qBAAqB;AACtC,YAAM,gBAAgB,SAAS,MAAM,KAAK,IAAI;AAC9C,UAAI,kBAAkB,QAAW;AAE/B,eAAO,MAAM,KAAK,KAAK,IAAI;AAC3B;AAAA,MACF;AAEA,YAAM,cAAc,MAAM,KAAK,gBAAgB,KAAK,IAAI;AAExD,UAAI,gBAAgB,cAAc,MAAM;AAEtC,eAAO,UAAU,KAAK,KAAK,IAAI;AAAA,MACjC,OAAO;AAEL,eAAO,SAAS,KAAK,KAAK,IAAI;AAAA,MAChC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAaA,OAAyC;AAC1D,UAAM,QAAQ,MAAM,KAAKA,KAAI;AAC7B,WAAO;AAAA,MACL,MAAAA;AAAA,MACA,OAAO,MAAM;AAAA,MACb,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAgBA,OAA+B;AACnD,UAAM,UAAU,MAAMD,UAASC,KAAI;AACnC,WAAOF,YAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,gBACJE,OACA,aACkD;AAClD,UAAM,QAAQ,MAAM,KAAKA,KAAI;AAC7B,UAAM,UAAU,MAAMD,UAASC,KAAI;AACnC,UAAM,OAAOF,YAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAG3D,UAAM,EAAE,kBAAAG,kBAAiB,IAAI,MAAM,OAAO,sBAAoB;AAE9D,WAAO;AAAA,MACL,OAAO;AAAA,QACL,OAAO,MAAM;AAAA,QACb,MAAM,MAAM;AAAA,QACZ;AAAA,QACA,aAAa,YAAY,IAAI,CAAC,OAAOA,kBAAiB,EAAE,CAAC;AAAA,MAC3D;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;AF7HA,IAAM,SAAS,aAAa,eAAe;AAwB3C,IAAM,kBAAkB,oBAAI,IAAI;AAAA;AAAA,EAE9B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAEM,IAAM,eAAN,MAAmB;AAAA,EACP;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YACE,YACA,iBACA,UAAwB,CAAC,GACzB;AACA,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,SAAK,UAAU,IAAI,gBAAgB;AAAA,MACjC,WAAW,QAAQ,aAAa;AAAA,MAChC,cAAc,QAAQ,gBAAgB;AAAA,IACxC,CAAC;AACD,SAAK,mBAAmB,QAAQ;AAChC,SAAK,kBAAkB,QAAQ;AAC/B,SAAK,eAAe,IAAI,aAAa;AACrC,SAAK,cAAc,QAAQ,eAAe;AAE1C,UAAM,SAAS,+BAA+B,QAAQ,kBAAkB,CAAC,CAAC;AAC1E,SAAK,aAAa,OAAO;AACzB,SAAK,qBAAqB,OAAO;AAAA,EACnC;AAAA,EAEA,MAAM,WAAW,OAAc,YAA6D;AAC1F,WAAO;AAAA,MACL;AAAA,QACE,SAAS,MAAM;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,WAAW,MAAM;AAAA,MACnB;AAAA,MACA;AAAA,IACF;AAEA,QAAI;AACF,UAAI,MAAM,SAAS,UAAU,MAAM,SAAS,QAAQ;AAClD,eAAO,MAAM,KAAK,eAAe,OAAO,UAAU;AAAA,MACpD;AAEA,aAAO;AAAA,QACL,EAAE,SAAS,MAAM,IAAI,WAAW,MAAM,KAAK;AAAA,QAC3C;AAAA,MACF;AACA,aAAO,IAAI,IAAI,MAAM,0CAA0C,MAAM,IAAI,EAAE,CAAC;AAAA,IAC9E,SAAS,OAAO;AACd,aAAO;AAAA,QACL;AAAA,UACE,SAAS,MAAM;AAAA,UACf,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D;AAAA,QACA;AAAA,MACF;AACA,aAAO,IAAI,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,sBACJ,OACA,YACyC;AACzC,QAAI,KAAK,oBAAoB,QAAW;AACtC,aAAO,IAAI,IAAI,MAAM,mDAAmD,CAAC;AAAA,IAC3E;AAEA,QAAI,MAAM,SAAS,UAAU,MAAM,SAAS,QAAQ;AAClD,aAAO,IAAI,IAAI,MAAM,sDAAsD,MAAM,IAAI,EAAE,CAAC;AAAA,IAC1F;AAEA,WAAO;AAAA,MACL;AAAA,QACE,SAAS,MAAM;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,WAAW,MAAM;AAAA,MACnB;AAAA,MACA;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AAEF,YAAM,WAAW,MAAM,KAAK,gBAAgB,KAAK,MAAM,EAAE;AAGzD,YAAM,YAAY,MAAM,KAAK,cAAc,MAAM,IAAI;AACrD,YAAM,eAAe,MAAM,QAAQ;AAAA,QACjC,UAAU,IAAI,CAACC,UAAS,KAAK,aAAa,aAAaA,KAAI,CAAC;AAAA,MAC9D;AAGA,YAAM,QAAQ,MAAM,KAAK,aAAa,cAAc,UAAU,YAAY;AAE1E,aAAO;AAAA,QACL;AAAA,UACE,SAAS,MAAM;AAAA,UACf,OAAO,MAAM,MAAM;AAAA,UACnB,UAAU,MAAM,SAAS;AAAA,UACzB,SAAS,MAAM,QAAQ;AAAA,UACvB,WAAW,MAAM,UAAU;AAAA,QAC7B;AAAA,QACA;AAAA,MACF;AAGA,YAAM,sBAAoC,CAAC;AAC3C,iBAAWA,SAAQ,CAAC,GAAG,MAAM,UAAU,GAAG,MAAM,OAAO,GAAG;AACxD,cAAM,YAAY,SAAS,MAAMA,KAAI;AACrC,YAAI,cAAc,QAAW;AAC3B,8BAAoB,KAAK,GAAG,UAAU,WAAW;AAAA,QACnD;AAAA,MACF;AAGA,UAAI,oBAAoB,SAAS,GAAG;AAClC,cAAM,KAAK,WAAW,gBAAgB,MAAM,IAAI,mBAAmB;AACnE,eAAO;AAAA,UACL,EAAE,SAAS,MAAM,IAAI,OAAO,oBAAoB,OAAO;AAAA,UACvD;AAAA,QACF;AAAA,MACF;AAGA,YAAM,iBAAiB,CAAC,GAAG,MAAM,OAAO,GAAG,MAAM,QAAQ;AACzD,YAAM,aAAa,eAAe;AAElC,mBAAa;AAAA,QACX,MAAM;AAAA,QACN,SAAS;AAAA,QACT,OAAO;AAAA,QACP,SAAS,cAAc,OAAO,UAAU,CAAC;AAAA,MAC3C,CAAC;AAED,YAAM,YAAwB,CAAC;AAC/B,YAAM,mBAAmD,CAAC;AAC1D,UAAI,iBAAiB;AAGrB,iBAAWA,SAAQ,MAAM,WAAW;AAClC,cAAM,gBAAgB,SAAS,MAAMA,KAAI;AACzC,YAAI,kBAAkB,QAAW;AAC/B,2BAAiBA,KAAI,IAAI;AAAA,QAC3B;AAAA,MACF;AAGA,eAAS,IAAI,GAAG,IAAI,eAAe,QAAQ,KAAK,KAAK,aAAa;AAChE,cAAM,QAAQ,eAAe,MAAM,GAAG,IAAI,KAAK,WAAW;AAE1D,cAAM,eAAe,MAAM,QAAQ;AAAA,UACjC,MAAM,IAAI,OAAO,aAAa;AAC5B,gBAAI;AACF,oBAAM,SAAS,MAAM,KAAK,YAAY,UAAU,KAAK;AACrD,oBAAM,cAAc,OAAO,UAAU,IAAI,CAAC,MAAM,EAAE,EAAE;AAGpD,oBAAM,EAAE,MAAM,IAAI,MAAM,KAAK,aAAa,gBAAgB,UAAU,WAAW;AAE/E,qBAAO;AAAA,gBACL;AAAA,gBACA,WAAW,OAAO;AAAA,gBAClB,WAAW;AAAA,cACb;AAAA,YACF,SAAS,OAAO;AACd,qBAAO;AAAA,gBACL,EAAE,UAAU,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,gBAC1E;AAAA,cACF;AACA,qBAAO;AAAA,YACT;AAAA,UACF,CAAC;AAAA,QACH;AAGA,mBAAW,UAAU,cAAc;AACjC,cAAI,WAAW,MAAM;AACnB,sBAAU,KAAK,GAAG,OAAO,SAAS;AAClC,6BAAiB,OAAO,QAAQ,IAAI,OAAO;AAAA,UAC7C;AAAA,QACF;AAEA,0BAAkB,MAAM;AAExB,qBAAa;AAAA,UACX,MAAM;AAAA,UACN,SAAS;AAAA,UACT,OAAO;AAAA,UACP,SAAS,aAAa,OAAO,cAAc,CAAC,IAAI,OAAO,UAAU,CAAC;AAAA,QACpE,CAAC;AAAA,MACH;AAGA,UAAI,UAAU,SAAS,GAAG;AACxB,cAAM,KAAK,WAAW,aAAa,MAAM,IAAI,SAAS;AAAA,MACxD;AAGA,UAAI,oBAAoB,SAAS,KAAK,UAAU,SAAS,GAAG;AAC1D,cAAM,KAAK,WAAW,eAAe,MAAM,EAAE;AAAA,MAC/C;AAGA,UAAI,KAAK,kBAAkB;AACzB,cAAM,mBAAmB,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,OAAO,KAAK;AAC3E,cAAM,mBACJ,eAAe,KAAK,CAAC,MAAM,iBAAiB,SAAS,QAAQ,CAAC,EAAE,YAAY,CAAC,CAAC,KAC9E,MAAM,QAAQ,KAAK,CAAC,MAAM,iBAAiB,SAAS,QAAQ,CAAC,EAAE,YAAY,CAAC,CAAC;AAE/E,YAAI,kBAAkB;AAEpB,gBAAM,iBAA2D,CAAC;AAClE,gBAAM,WAAW,CAAC,GAAG,MAAM,WAAW,GAAG,cAAc;AAEvD,qBAAW,YAAY,UAAU;AAC/B,kBAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,gBAAI,iBAAiB,SAAS,GAAG,GAAG;AAClC,kBAAI;AACF,sBAAM,UAAU,MAAMC,UAAS,UAAU,OAAO;AAChD,+BAAe,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AAAA,cACjD,QAAQ;AAAA,cAER;AAAA,YACF;AAAA,UACF;AAEA,cAAI,eAAe,SAAS,GAAG;AAC7B,kBAAM,QAAQ,MAAM,KAAK,iBAAiB,WAAW,cAAc;AACnE,kBAAM,KAAK,iBAAiB,UAAU,MAAM,IAAI,KAAK;AACrD,mBAAO;AAAA,cACL,EAAE,SAAS,MAAM,IAAI,aAAa,eAAe,OAAO;AAAA,cACxD;AAAA,YACF;AAAA,UACF,OAAO;AAEL,kBAAM,KAAK,iBAAiB,YAAY,MAAM,EAAE;AAChD,mBAAO;AAAA,cACL,EAAE,SAAS,MAAM,GAAG;AAAA,cACpB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,YAAM,kBAAsC;AAAA,QAC1C,SAAS;AAAA,QACT,SAAS,MAAM;AAAA,QACf,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,OAAO;AAAA,MACT;AACA,YAAM,KAAK,gBAAgB,KAAK,eAAe;AAE/C,mBAAa;AAAA,QACX,MAAM;AAAA,QACN,SAAS;AAAA,QACT,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAED,YAAM,SAAS,KAAK,IAAI,IAAI;AAE5B,aAAO;AAAA,QACL;AAAA,UACE,SAAS,MAAM;AAAA,UACf,WAAW,MAAM;AAAA,UACjB,YAAY,MAAM,MAAM;AAAA,UACxB,eAAe,MAAM,SAAS;AAAA,UAC9B,cAAc,MAAM,QAAQ;AAAA,UAC5B,gBAAgB,MAAM,UAAU;AAAA,UAChC,eAAe,UAAU;AAAA,UACzB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAEA,aAAO,GAAG;AAAA,QACR,cAAc,eAAe;AAAA,QAC7B,eAAe,UAAU;AAAA,QACzB;AAAA,QACA,YAAY,MAAM,MAAM;AAAA,QACxB,eAAe,MAAM,SAAS;AAAA,QAC9B,cAAc,MAAM,QAAQ;AAAA,QAC5B,gBAAgB,MAAM,UAAU;AAAA,MAClC,CAAC;AAAA,IACH,SAAS,OAAO;AACd,aAAO;AAAA,QACL;AAAA,UACE,SAAS,MAAM;AAAA,UACf,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D;AAAA,QACA;AAAA,MACF;AACA,aAAO,IAAI,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AAAA,EAEA,MAAc,eACZ,OACA,YAC8B;AAC9B,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,KAAK,WAAW,kBAAkB,MAAM,EAAE;AAGhD,QAAI,KAAK,iBAAiB;AACxB,YAAM,KAAK,gBAAgB,OAAO,MAAM,EAAE;AAAA,IAC5C;AAEA,UAAM,QAAQ,MAAM,KAAK,cAAc,MAAM,IAAI;AACjD,UAAM,YAAwB,CAAC;AAC/B,QAAI,iBAAiB;AAErB,WAAO;AAAA,MACL;AAAA,QACE,SAAS,MAAM;AAAA,QACf,MAAM,MAAM;AAAA,QACZ,WAAW,MAAM;AAAA,QACjB,aAAa,KAAK;AAAA,MACpB;AAAA,MACA;AAAA,IACF;AAGA,UAAM,cAAwD,CAAC;AAG/D,iBAAa;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,OAAO,MAAM;AAAA,MACb,SAAS;AAAA,IACX,CAAC;AAGD,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,aAAa;AACvD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,WAAW;AAEjD,YAAM,eAAe,MAAM,QAAQ;AAAA,QACjC,MAAM,IAAI,OAAO,aAAa;AAC5B,cAAI;AACF,mBAAO,MAAM,KAAK,YAAY,UAAU,KAAK;AAAA,UAC/C,SAAS,OAAO;AACd,mBAAO;AAAA,cACL,EAAE,UAAU,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,cAC1E;AAAA,YACF;AACA,mBAAO,EAAE,WAAW,CAAC,GAAG,YAAY,OAAU;AAAA,UAChD;AAAA,QACF,CAAC;AAAA,MACH;AAGA,iBAAW,UAAU,cAAc;AACjC,kBAAU,KAAK,GAAG,OAAO,SAAS;AAClC,YAAI,OAAO,eAAe,QAAW;AACnC,sBAAY,KAAK,OAAO,UAAU;AAAA,QACpC;AAAA,MACF;AAEA,wBAAkB,MAAM;AAGxB,mBAAa;AAAA,QACX,MAAM;AAAA,QACN,SAAS;AAAA,QACT,OAAO,MAAM;AAAA,QACb,SAAS,WAAW,OAAO,cAAc,CAAC,IAAI,OAAO,MAAM,MAAM,CAAC;AAAA,MACpE,CAAC;AAAA,IACH;AAEA,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,KAAK,WAAW,aAAa,MAAM,IAAI,SAAS;AAEtD,YAAM,KAAK,WAAW,eAAe,MAAM,EAAE;AAAA,IAC/C;AAGA,QAAI,KAAK,oBAAoB,YAAY,SAAS,GAAG;AACnD,YAAM,QAAQ,MAAM,KAAK,iBAAiB,WAAW,WAAW;AAChE,YAAM,KAAK,iBAAiB,UAAU,MAAM,IAAI,KAAK;AAAA,IACvD,WAAW,KAAK,kBAAkB;AAEhC,YAAM,KAAK,iBAAiB,YAAY,MAAM,EAAE;AAAA,IAClD;AAGA,iBAAa;AAAA,MACX,MAAM;AAAA,MACN,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,MACb,SAAS;AAAA,IACX,CAAC;AAED,UAAM,SAAS,KAAK,IAAI,IAAI;AAE5B,WAAO;AAAA,MACL;AAAA,QACE,SAAS,MAAM;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,cAAc;AAAA,QACd,eAAe,UAAU;AAAA,QACzB,qBAAqB,YAAY;AAAA,QACjC;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAEA,WAAO,GAAG;AAAA,MACR,cAAc;AAAA,MACd,eAAe,UAAU;AAAA,MACzB;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,YACZ,UACA,OAIC;AACD,UAAM,UAAU,MAAMA,UAAS,UAAU,OAAO;AAChD,UAAM,WAAWC,YAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAC/D,UAAM,SAAS,KAAK,QAAQ,MAAM,SAAS,QAAQ;AAInD,UAAM,eAAe,SAAS,MAAM,MAAM,QAAQ;AAClD,UAAM,WAAWA,YAAW,KAAK,EAAE,OAAO,YAAY,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,CAAC;AAEhF,UAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,UAAM,WAAW,SAAS,QAAQ,EAAE,YAAY;AAChD,UAAM,WAAW,KAAK,iBAAiB,KAAK,UAAU,QAAQ;AAG9D,UAAM,aAAa,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,OAAO,KAAK,EAAE,SAAS,GAAG,IAC/E,EAAE,MAAM,UAAU,QAAQ,IAC1B;AAGJ,QAAI,OAAO,WAAW,GAAG;AACvB,aAAO,EAAE,WAAW,CAAC,GAAG,WAAW;AAAA,IACrC;AAGA,UAAM,gBAAgB,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO;AACjD,UAAM,UAAU,MAAM,KAAK,gBAAgB,WAAW,aAAa;AAEnE,UAAM,YAAwB,CAAC;AAC/B,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,QAAQ,OAAO,CAAC;AACtB,YAAM,SAAS,QAAQ,CAAC;AAGxB,UAAI,UAAU,UAAa,WAAW,QAAW;AAC/C,cAAM,IAAI;AAAA,UACR,kCAAkC,OAAO,CAAC,CAAC,WAAW,OAAO,UAAU,MAAS,CAAC,YAAY,OAAO,WAAW,MAAS,CAAC;AAAA,QAC3H;AAAA,MACF;AAGA,YAAM,UACJ,OAAO,SAAS,IACZ,GAAG,MAAM,EAAE,IAAI,QAAQ,IAAI,QAAQ,IAAI,OAAO,MAAM,UAAU,CAAC,KAC/D,GAAG,MAAM,EAAE,IAAI,QAAQ,IAAI,QAAQ;AAEzC,gBAAU,KAAK;AAAA,QACb,IAAI,iBAAiB,OAAO;AAAA,QAC5B,SAAS,MAAM;AAAA,QACf;AAAA,QACA,UAAU;AAAA,UACR,MAAM,OAAO,SAAS,IAAI,UAAU;AAAA,UACpC,SAAS,MAAM;AAAA,UACf,MAAM;AAAA,UACN,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC;AAAA,UACA,YAAY,MAAM;AAAA,UAClB,aAAa,MAAM;AAAA,UACnB;AAAA,UACA,eAAe,MAAM;AAAA,UACrB,cAAc,MAAM;AAAA,UACpB,gBAAgB,qBAAqB,KAAK,MAAM,OAAO;AAAA,UACvD,YAAY,MAAM;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,EAAE,WAAW,WAAW;AAAA,EACjC;AAAA,EAEA,MAAc,cAAc,KAAgC;AAC1D,UAAM,QAAkB,CAAC;AACzB,UAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAE1D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAWC,MAAK,KAAK,MAAM,IAAI;AAErC,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,CAAC,KAAK,WAAW,IAAI,MAAM,IAAI,GAAG;AACpC,gBAAM,KAAK,GAAI,MAAM,KAAK,cAAc,QAAQ,CAAE;AAAA,QACpD;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AAEzB,cAAM,eAAe,KAAK,mBAAmB,KAAK,CAAC,YAAY,QAAQ,MAAM,IAAI,CAAC;AAClF,YAAI,cAAc;AAChB;AAAA,QACF;AAEA,cAAM,MAAM,QAAQ,MAAM,IAAI,EAAE,YAAY;AAC5C,YAAI,gBAAgB,IAAI,GAAG,GAAG;AAC5B,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,iBAAiB,KAAa,UAAkB,UAA0B;AAEhF,QAAI,QAAQ,OAAO;AAEjB,UAAI,aAAa,kBAAkB,aAAa,gBAAgB,aAAa,KAAK,QAAQ,GAAG;AAC3F,eAAO;AAAA,MACT;AAEA,UAAI,CAAC,aAAa,gBAAgB,iBAAiB,EAAE,SAAS,QAAQ,GAAG;AACvE,eAAO;AAAA,MACT;AAEA,UAAI,0DAA0D,KAAK,QAAQ,GAAG;AAC5E,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAGA,QAAI,0BAA0B,KAAK,QAAQ,KAAK,gBAAgB,KAAK,QAAQ,GAAG;AAC9E,aAAO;AAAA,IACT;AAGA,QAAI,gBAAgB,KAAK,QAAQ,KAAK,SAAS,SAAS,SAAS,GAAG;AAClE,aAAO;AAAA,IACT;AAGA,QAAI,qEAAqE,KAAK,QAAQ,GAAG;AACvF,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO,OAAO,EAAE,SAAS,GAAG,GAAG;AAG9E,UAAI,KAAK,yBAAyB,UAAU,QAAQ,GAAG;AACrD,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,yBAAyB,UAAkB,UAA2B;AAC5E,UAAM,YAAY,SAAS,YAAY;AACvC,UAAM,gBAAgB,SAAS,YAAY;AAG3C,QAAI,2BAA2B,KAAK,SAAS,GAAG;AAE9C,UAAI,kBAAkB,cAAc,kBAAkB,YAAY;AAChE,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAGA,QAAI,uDAAuD,KAAK,SAAS,GAAG;AAC1E,aAAO;AAAA,IACT;AAGA,QACE,2CAA2C,KAAK,SAAS,KACzD,CAAC,cAAc,SAAS,QAAQ,KAChC,CAAC,cAAc,SAAS,OAAO,GAC/B;AACA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AACF;AAMO,SAAS,uBAAuB,KAAa,OAAwB;AAC1E,QAAM,WAAW,IAAI,YAAY;AACjC,QAAM,cAAc,SAAS,IAAI,YAAY;AAG7C,MACE,sCAAsC,KAAK,QAAQ,KACnD,mCAAmC,KAAK,UAAU,GAClD;AACA,WAAO;AAAA,EACT;AAGA,MACE,uDAAuD,KAAK,QAAQ,KACpE,yCAAyC,KAAK,UAAU,GACxD;AACA,WAAO;AAAA,EACT;AAGA,MAAI,wDAAwD,KAAK,QAAQ,GAAG;AAC1E,WAAO;AAAA,EACT;AAGA,MAAI,0CAA0C,KAAK,QAAQ,GAAG;AAC5D,WAAO;AAAA,EACT;AAGA,MAAI,+BAA+B,KAAK,QAAQ,GAAG;AACjD,WAAO;AAAA,EACT;AAGA,MAAI,YAAY,KAAK,QAAQ,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,SAAO;AACT;;;AG1zBA,SAAS,YAAAC,WAAU,UAAAC,SAAQ,SAAAC,cAAa;AACxC,SAAS,QAAAC,aAAY;;;ACDrB,SAAS,KAAAC,UAAS;AAsBX,IAAM,kBAAkBA,GAAE,OAAO;AAAA;AAAA,EAEtC,OAAOA,GAAE,OAAO;AAAA;AAAA,EAEhB,MAAMA,GAAE,OAAO;AAAA;AAAA,EAEf,MAAMA,GAAE,OAAO;AAAA;AAAA,EAEf,aAAaA,GAAE,MAAMA,GAAE,OAAO,CAAC;AACjC,CAAC;AAYM,IAAM,sBAAsBA,GAAE,OAAO;AAAA;AAAA,EAE1C,SAASA,GAAE,QAAQ,CAAC;AAAA;AAAA,EAEpB,SAASA,GAAE,OAAO;AAAA;AAAA,EAElB,WAAWA,GAAE,OAAO;AAAA;AAAA,EAEpB,OAAOA,GAAE,OAAOA,GAAE,OAAO,GAAG,eAAe;AAC7C,CAAC;AAsDM,SAAS,oBAAoB,SAAsC;AACxE,SAAO;AAAA,IACL,SAAS;AAAA,IACT;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,OAAO,CAAC;AAAA,EACV;AACF;;;ADnGO,IAAM,kBAAN,MAAsB;AAAA,EACV;AAAA,EAEjB,YAAY,SAAiB;AAC3B,SAAK,eAAeC,MAAK,SAAS,WAAW;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,UAAMC,OAAM,KAAK,cAAc,EAAE,WAAW,KAAK,CAAC;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAgB,SAA0B;AACxC,WAAOD,MAAK,KAAK,cAAc,GAAG,OAAO,gBAAgB;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,KAAK,SAA+C;AACxD,UAAM,eAAe,KAAK,gBAAgB,OAAO;AAEjD,UAAM,SAAS,MAAM,KAAK,WAAW,YAAY;AACjD,QAAI,CAAC,QAAQ;AACX,aAAO,oBAAoB,OAAO;AAAA,IACpC;AAEA,UAAM,UAAU,MAAME,UAAS,cAAc,OAAO;AACpD,QAAI;AACJ,QAAI;AACF,eAAS,KAAK,MAAM,OAAO;AAAA,IAC7B,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,+BAA+B,YAAY,KACzC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CACvD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAS,oBAAoB,UAAU,MAAM;AACnD,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,IAAI,MAAM,uBAAuB,YAAY,KAAK,OAAO,MAAM,OAAO,EAAE;AAAA,IAChF;AAGA,WAAO,KAAK,gBAAgB,OAAO,MAAM,OAAO;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,UAA6C;AACtD,UAAM,eAAe,KAAK,gBAAgB,SAAS,OAAO;AAG1D,UAAM,SAAS;AAAA,MACb,GAAG;AAAA,MACH,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAEA,UAAM,gBAAgB,cAAc,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,SAAiC;AAC5C,UAAM,eAAe,KAAK,gBAAgB,OAAO;AACjD,UAAM,EAAE,OAAO,IAAI,MAAM,OAAO,aAAkB;AAElD,UAAM,SAAS,MAAM,KAAK,WAAW,YAAY;AACjD,QAAI,QAAQ;AACV,YAAM,OAAO,YAAY;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAWC,OAAgC;AACvD,QAAI;AACF,YAAMC,QAAOD,KAAI;AACjB,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,MACA,SACoB;AACpB,UAAM,QAAwC,CAAC;AAE/C,eAAW,CAACA,OAAM,KAAK,KAAK,OAAO,QAAQ,KAAK,KAAK,GAAG;AACtD,YAAMA,KAAI,IAAI;AAAA,QACZ,OAAO,MAAM;AAAA,QACb,MAAM,MAAM;AAAA,QACZ,MAAM,MAAM;AAAA,QACZ,aAAa,MAAM,YAAY,IAAI,CAAC,OAAO,iBAAiB,EAAE,CAAC;AAAA,MACjE;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA,WAAW,KAAK;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF;;;AE7HO,IAAM,kBAAN,MAAsB;AAAA,EAC3B,gBAAgB,MAAc,YAAoB,UAAwC;AACxF,UAAM,QAAQ,KAAK,MAAM,IAAI;AAG7B,QAAI,YAAY;AAChB,QAAI,OAAyB;AAE7B,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,OAAO,MAAM,CAAC,KAAK;AAEzB,UAAI,KAAK,SAAS,YAAY,UAAU,EAAE,GAAG;AAC3C,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAEA,UAAI,KAAK,SAAS,SAAS,UAAU,EAAE,GAAG;AACxC,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAGA,UAAI,KAAK,MAAM,IAAI,OAAO,gBAAgB,UAAU,aAAa,CAAC,GAAG;AACnE,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAGA,UAAI,KAAK,MAAM,IAAI,OAAO,WAAW,UAAU,aAAa,CAAC,GAAG;AAC9D,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAGA,UAAI,KAAK,MAAM,IAAI,OAAO,wBAAwB,UAAU,OAAO,CAAC,GAAG;AACrE,oBAAY,IAAI;AAChB,eAAO;AACP;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,GAAI,QAAO;AAG7B,QAAI,UAAU;AACd,QAAI,aAAa;AACjB,QAAI,kBAAkB;AAGtB,QAAI,SAAS,QAAQ;AACnB,YAAME,aAAY,MAAM,YAAY,CAAC,KAAK;AAC1C,UAAI,CAACA,WAAU,SAAS,GAAG,KAAKA,WAAU,SAAS,GAAG,GAAG;AAEvD,kBAAU;AACV,cAAMC,eAAcD;AACpB,cAAME,aAAY,KAAK,iBAAiBF,YAAW,YAAY,IAAI;AACnE,eAAO;AAAA,UACL;AAAA,UACA,MAAM;AAAA,UACN,WAAAE;AAAA,UACA,aAAAD;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,QAAI,gBAAgB;AACpB,QAAI,gBAAgB;AACpB,QAAI,oBAAoB;AACxB,QAAI,qBAAqB;AAEzB,aAAS,IAAI,YAAY,GAAG,IAAI,MAAM,QAAQ,KAAK;AACjD,YAAM,OAAO,MAAM,CAAC,KAAK;AACzB,UAAI,sBAAsB;AAE1B,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,cAAM,OAAO,KAAK,CAAC;AACnB,cAAM,WAAW,IAAI,IAAI,KAAK,IAAI,CAAC,IAAI;AACvC,cAAM,WAAW,IAAI,KAAK,SAAS,IAAI,KAAK,IAAI,CAAC,IAAI;AAGrD,YAAI,aAAa,SAAS,iBAAiB,iBAAiB,oBAAoB;AAC9E;AAAA,QACF;AAGA,YAAI,oBAAoB;AACtB,cAAI,SAAS,OAAO,aAAa,KAAK;AACpC,iCAAqB;AACrB;AAAA,UACF;AACA;AAAA,QACF;AAGA,YAAI,qBAAqB;AACvB;AAAA,QACF;AAGA,YAAI,eAAe;AACjB,cAAI,SAAS,IAAK,iBAAgB;AAClC;AAAA,QACF;AACA,YAAI,eAAe;AACjB,cAAI,SAAS,IAAK,iBAAgB;AAClC;AAAA,QACF;AACA,YAAI,mBAAmB;AACrB,cAAI,SAAS,IAAK,qBAAoB;AACtC;AAAA,QACF;AAGA,YAAI,SAAS,OAAO,aAAa,KAAK;AACpC,+BAAqB;AACrB;AACA;AAAA,QACF;AACA,YAAI,SAAS,OAAO,aAAa,KAAK;AACpC,gCAAsB;AACtB;AAAA,QACF;AACA,YAAI,SAAS,KAAK;AAChB,0BAAgB;AAChB;AAAA,QACF;AACA,YAAI,SAAS,KAAK;AAChB,0BAAgB;AAChB;AAAA,QACF;AACA,YAAI,SAAS,KAAK;AAChB,8BAAoB;AACpB;AAAA,QACF;AAGA,YAAI,SAAS,KAAK;AAChB;AACA,4BAAkB;AAAA,QACpB;AACA,YAAI,SAAS,IAAK;AAAA,MACpB;AAEA,UAAI,mBAAmB,eAAe,GAAG;AACvC,kBAAU,IAAI;AACd;AAAA,MACF;AAAA,IACF;AAEA,UAAM,cAAc,MAAM,MAAM,YAAY,GAAG,OAAO,EAAE,KAAK,IAAI;AAGjE,UAAM,YAAY,MAAM,YAAY,CAAC,KAAK;AAC1C,UAAM,YAAY,KAAK,iBAAiB,WAAW,YAAY,IAAI;AAEnE,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,iBAAiB,MAAc,MAAc,MAAsB;AAEzE,UAAM,MAAM,KACT,QAAQ,iBAAiB,EAAE,EAC3B,QAAQ,gBAAgB,EAAE,EAC1B,KAAK;AAER,QAAI,SAAS,YAAY;AAGvB,YAAM,QAAQ,IAAI,MAAM,6CAA6C;AACrE,UAAI,QAAQ,CAAC,MAAM,UAAa,MAAM,CAAC,EAAE,SAAS,EAAG,QAAO,MAAM,CAAC,EAAE,KAAK;AAAA,IAC5E;AAEA,QAAI,SAAS,SAAS;AACpB,aAAO,SAAS,IAAI;AAAA,IACtB;AAEA,QAAI,SAAS,aAAa;AACxB,aAAO,aAAa,IAAI;AAAA,IAC1B;AAEA,QAAI,SAAS,QAAQ;AAEnB,YAAM,YAAY,IAAI,MAAM,IAAI,OAAO,YAAY,IAAI,oBAAoB,CAAC;AAC5E,UAAI,YAAY,CAAC,MAAM,UAAa,UAAU,CAAC,EAAE,SAAS,GAAG;AAC3D,eAAO,QAAQ,UAAU,CAAC,CAAC;AAAA,MAC7B;AACA,aAAO,QAAQ,IAAI;AAAA,IACrB;AAEA,QAAI,SAAS,SAAS;AAIpB,YAAM,aAAa,IAAI;AAAA,QACrB,IAAI;AAAA,UACF,yBAAyB,IAAI;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,aAAa,aAAa,CAAC;AACjC,UAAI,eAAe,UAAa,eAAe,GAAI,QAAO,WAAW,KAAK;AAG1E,aAAO,SAAS,IAAI;AAAA,IACtB;AAEA,WAAO;AAAA,EACT;AACF;;;ACvNA,IAAME,UAAS,aAAa,gBAAgB;AAsB5C,IAAM,qBAAkE;AAAA,EACtE,UAAU;AAAA,IACR,yBAAyB;AAAA;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA;AAAA,IACT,QAAQ;AAAA;AAAA,IACR,mBAAmB;AAAA;AAAA,IACnB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,gBAAgB;AAAA,IACd,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA,IACT,QAAQ;AAAA;AAAA,IACR,mBAAmB;AAAA;AAAA,IACnB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,YAAY;AAAA,IACV,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,mBAAmB;AAAA,IACnB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,YAAY;AAAA,IACV,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,mBAAmB;AAAA,IACnB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,WAAW;AAAA,IACT,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,SAAS;AAAA,IACT,QAAQ;AAAA;AAAA,IACR,mBAAmB;AAAA,IACnB,MAAM;AAAA;AAAA,IACN,QAAQ;AAAA,IACR,WAAW;AAAA;AAAA,IACX,OAAO;AAAA,EACT;AACF;AAGA,IAAM,qBAAkE;AAAA,EACtE,EAAE,SAAS,gBAAgB,OAAO,CAAC,WAAW,aAAa,YAAY,EAAE;AAAA,EACzE,EAAE,SAAS,aAAa,OAAO,CAAC,MAAM,EAAE;AAAA,EACxC,EAAE,SAAS,YAAY,OAAO,CAAC,KAAK,EAAE;AAAA,EACtC,EAAE,SAAS,cAAc,OAAO,CAAC,SAAS,WAAW,UAAU,EAAE;AAAA,EACjE,EAAE,SAAS,YAAY,OAAO,CAAC,OAAO,SAAS,UAAU,MAAM,EAAE;AAAA,EACjE,EAAE,SAAS,aAAa,OAAO,CAAC,QAAQ,UAAU,SAAS,EAAE;AAAA,EAC7D,EAAE,SAAS,mBAAmB,OAAO,CAAC,cAAc,IAAI,EAAE;AAAA,EAC1D,EAAE,SAAS,YAAY,OAAO,CAAC,OAAO,gBAAgB,gBAAgB,EAAE;AAC1E;AAGA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,0BAA0B;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,sBAAsB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,qBAAqB;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,sBAAsB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAMA,SAAS,qBAAqB,OAAmC;AAC/D,QAAM,IAAI,MAAM,YAAY;AAC5B,QAAM,UAA8B,CAAC;AAGrC,MAAI,wBAAwB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAClD,YAAQ,KAAK,EAAE,QAAQ,kBAAkB,YAAY,IAAI,CAAC;AAAA,EAC5D;AAEA,MAAI,mBAAmB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAC7C,YAAQ,KAAK,EAAE,QAAQ,aAAa,YAAY,KAAK,CAAC;AAAA,EACxD;AAEA,MAAI,oBAAoB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAC9C,YAAQ,KAAK,EAAE,QAAQ,cAAc,YAAY,IAAI,CAAC;AAAA,EACxD;AAEA,MAAI,gBAAgB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAC1C,YAAQ,KAAK,EAAE,QAAQ,UAAU,YAAY,KAAK,CAAC;AAAA,EACrD;AAEA,MAAI,oBAAoB,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG;AAC9C,YAAQ,KAAK,EAAE,QAAQ,cAAc,YAAY,IAAI,CAAC;AAAA,EACxD;AAGA,MAAI,QAAQ,WAAW,GAAG;AACxB,YAAQ,KAAK,EAAE,QAAQ,UAAU,YAAY,IAAI,CAAC;AAAA,EACpD;AAGA,SAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAC3D;AAKA,SAAS,iBAAiB,SAA0C;AAClE,SAAO,QAAQ,CAAC,GAAG,UAAU;AAC/B;AAMA,SAAS,6BAA6B,QAAmC;AACvE,UAAQ,QAAQ;AAAA,IACd,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,EACX;AACF;AAMA,IAAM,cAAc;AAAA,EAClB,MAAM,EAAE,GAAG,IAAI,cAAc,KAAK,WAAW,IAAI;AAAA,EACjD,KAAK,EAAE,GAAG,IAAI,cAAc,MAAM,WAAW,KAAK;AACpD;AAKA,SAAS,kBAAkB,SAAyC;AAClE,QAAM,WAAW,QAAQ,OAAO,CAAC,MAAM,SAAS,EAAE,QAAQ,EAAE;AAC5D,SAAO,WAAW,QAAQ,SAAS,IAAI,QAAQ;AACjD;AAEO,IAAM,gBAAN,MAAoB;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YACE,YACA,iBACA,kBACA,cACA;AACA,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,SAAK,kBAAkB,IAAI,gBAAgB;AAC3C,SAAK,mBAAmB;AACxB,SAAK,aAAa,oBAAI,IAAI;AAC1B,SAAK,eAAe;AAGpB,QAAI,kBAAkB;AACpB,WAAK,+BAA+B,iBAAiB,oBAAoB,CAAC,UAAU;AAElF,aAAK,WAAW,OAAO,MAAM,OAAO;AAAA,MACtC,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,UAAgB;AACd,SAAK,+BAA+B;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,kBAAkB,SAA6C;AAC3E,QAAI,CAAC,KAAK,iBAAkB,QAAO;AAEnC,UAAM,SAAS,KAAK,WAAW,IAAI,OAAO;AAC1C,QAAI,WAAW,OAAW,QAAO;AAEjC,UAAM,QAAQ,MAAM,KAAK,iBAAiB,UAAU,OAAO;AAC3D,UAAM,SAAS,SAAS;AACxB,SAAK,WAAW,IAAI,SAAS,MAAM;AACnC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAAoB,aAAuC;AACjE,UAAM,gBAAgB,WAAW,QAAQ,IAAI,wBAAwB,KAAK,KAAK;AAC/E,UAAM,kBAAkB,WAAW,QAAQ,IAAI,0BAA0B,KAAK,KAAK;AAEnF,QAAI,eAAe,cAAe,QAAO;AACzC,QAAI,eAAe,gBAAiB,QAAO;AAC3C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,OAAO,OAA6C;AACxD,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,OAAO,MAAM,QAAQ,KAAK,cAAc,eAAe;AAC7D,UAAM,QAAQ,MAAM,SAAS,KAAK,cAAc,gBAAgB;AAChE,UAAM,SAAS,MAAM,UAAU,CAAC;AAChC,UAAM,SAAS,MAAM,UAAU;AAG/B,UAAM,UAAU,qBAAqB,MAAM,KAAK;AAGhD,UAAM,gBACJ,MAAM,WAAW,SACb,6BAA6B,MAAM,MAAM,IACzC,iBAAiB,OAAO;AAE9B,IAAAA,QAAO;AAAA,MACL;AAAA,QACE,OAAO,MAAM;AAAA,QACb;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,YAAY,MAAM;AAAA,QAClB,uBAAuB;AAAA,QACvB,cAAc,MAAM;AAAA,MACtB;AAAA,MACA;AAAA,IACF;AAEA,QAAI,aAA6B,CAAC;AAClC,QAAI,cAAc;AAGlB,UAAM,aAAa,QAAQ;AAE3B,QAAI,SAAS,UAAU;AAGrB,YAAM,aAAa,MAAM,KAAK,gBAAgB,MAAM,OAAO,QAAQ,UAAU;AAC7E,oBAAc,WAAW,SAAS,IAAK,WAAW,CAAC,GAAG,SAAS,IAAK;AAEpE,mBAAa,KAAK,yBAAyB,YAAY,MAAM,SAAS,EAAE,MAAM,GAAG,UAAU;AAAA,IAC7F,WAAW,SAAS,OAAO;AAEzB,mBAAa,MAAM,KAAK,UAAU,MAAM,OAAO,QAAQ,UAAU;AAAA,IACnE,OAAO;AAEL,YAAM,eAAe,MAAM,KAAK;AAAA,QAC9B,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,MAAM;AAAA,MACR;AACA,mBAAa,aAAa;AAC1B,oBAAc,aAAa;AAAA,IAC7B;AAIA,QAAI,MAAM,iBAAiB,QAAW;AACpC,UAAI,SAAS,OAAO;AAClB,QAAAA,QAAO;AAAA,UACL,EAAE,OAAO,MAAM,OAAO,cAAc,MAAM,aAAa;AAAA,UACvD;AAAA,QACF;AAAA,MACF,WAAW,cAAc,MAAM,cAAc;AAC3C,cAAMC,UAAS,KAAK,IAAI,IAAI;AAC5B,QAAAD,QAAO;AAAA,UACL;AAAA,YACE,OAAO,MAAM;AAAA,YACb;AAAA,YACA;AAAA,YACA,cAAc,MAAM;AAAA,YACpB,QAAAC;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAEA,eAAO;AAAA,UACL,OAAO,MAAM;AAAA,UACb;AAAA,UACA;AAAA,UACA,SAAS,CAAC;AAAA,UACV,cAAc;AAAA,UACd,QAAAA;AAAA,UACA,YAAY,KAAK,oBAAoB,WAAW;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,iBAAiB,KAAK,oBAAoB,YAAY,MAAM,KAAK;AACvE,UAAM,mBAAmB,eAAe,MAAM,GAAG,KAAK;AAGtD,UAAM,SAAS,oBAAI,IAA8B;AACjD,QAAI,WAAW,gBAAgB,WAAW,QAAQ;AAChD,YAAM,WAAW,IAAI,IAAI,iBAAiB,IAAI,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AACxE,iBAAW,WAAW,UAAU;AAC9B,eAAO,IAAI,SAAS,MAAM,KAAK,kBAAkB,OAAO,CAAC;AAAA,MAC3D;AAAA,IACF;AAGA,UAAM,kBAAkB,iBAAiB,IAAI,CAAC,MAAM;AAClD,YAAM,QAAQ,OAAO,IAAI,EAAE,SAAS,OAAO,KAAK;AAChD,aAAO,KAAK,sBAAsB,GAAG,MAAM,OAAO,QAAQ,KAAK;AAAA,IACjE,CAAC;AAED,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,UAAM,aAAa,SAAS,QAAQ,KAAK,oBAAoB,WAAW,IAAI;AAE5E,IAAAD,QAAO;AAAA,MACL;AAAA,QACE,OAAO,MAAM;AAAA,QACb;AAAA,QACA,aAAa,gBAAgB;AAAA,QAC7B,aAAa,WAAW;AAAA,QACxB,SAAS,QAAQ,IAAI,CAAC,MAAM,GAAG,EAAE,MAAM,IAAI,EAAE,WAAW,QAAQ,CAAC,CAAC,GAAG;AAAA,QACrE,aAAa,SAAS,QAAQ,cAAc;AAAA,QAC5C;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAEA,WAAO;AAAA,MACL,OAAO,MAAM;AAAA,MACb;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,cAAc,gBAAgB;AAAA,MAC9B;AAAA,MACA;AAAA,MACA,aAAa,SAAS,QAAQ,cAAc;AAAA,IAC9C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAAoB,SAAyB,OAA+B;AAClF,UAAM,WAAW,oBAAI,IAA0B;AAC/C,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACE,OAAMA,GAAE,SAAS,CAAC;AAE7B,eAAW,UAAU,SAAS;AAG5B,YAAM,UAAU,OAAO,SAAS;AAChC,YAAM,SAAS,OAAO,SAAS,QAAQ,OAAO,SAAS,OAAO,OAAO;AACrE,YAAM,YAAY,GAAG,OAAO,IAAI,MAAM;AAEtC,YAAM,WAAW,SAAS,IAAI,SAAS;AACvC,UAAI,CAAC,UAAU;AACb,iBAAS,IAAI,WAAW,MAAM;AAAA,MAChC,OAAO;AAEL,cAAM,oBAAoB,KAAK,gBAAgB,SAAS,SAAS,UAAU;AAC3E,cAAM,eAAe,KAAK,gBAAgB,OAAO,SAAS,UAAU;AAGpE,cAAM,oBAAoB,oBAAoB,SAAS;AACvD,cAAM,eAAe,eAAe,OAAO;AAE3C,YAAI,eAAe,mBAAmB;AACpC,mBAAS,IAAI,WAAW,MAAM;AAAA,QAChC;AAAA,MACF;AAAA,IACF;AAGA,WAAO,MAAM,KAAK,SAAS,OAAO,CAAC,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAAA,EACvE;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,SAAiB,YAA8B;AACrE,UAAM,eAAe,QAAQ,YAAY;AACzC,WAAO,WAAW,OAAO,CAAC,SAAS,aAAa,SAAS,IAAI,CAAC,EAAE;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,yBAAyB,SAAyB,WAAoC;AAC5F,QAAI,QAAQ,WAAW,EAAG,QAAO,CAAC;AAGlC,UAAM,SAAS,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAG5D,UAAM,QAAQ,OAAO,CAAC;AACtB,UAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AACrC,QAAI,UAAU,UAAa,SAAS,OAAW,QAAO,CAAC;AAEvD,UAAM,WAAW,MAAM;AACvB,UAAM,WAAW,KAAK;AACtB,UAAM,QAAQ,WAAW;AAIzB,UAAM,aACJ,QAAQ,IACJ,OAAO,IAAI,CAAC,OAAO;AAAA,MACjB,GAAG;AAAA,MACH,OAAO,KAAK,OAAQ,EAAE,QAAQ,YAAY,QAAS,GAAO,IAAI;AAAA,IAChE,EAAE,IACF;AAGN,QAAI,cAAc,QAAW;AAC3B,aAAO,WAAW,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS;AAAA,IACtD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBACZ,OACA,QACA,OACyB;AACzB,UAAM,cAAc,MAAM,KAAK,gBAAgB,MAAM,KAAK;AAC1D,UAAM,UAA0B,CAAC;AAEjC,eAAW,WAAW,QAAQ;AAC5B,YAAM,OAAO,MAAM,KAAK,WAAW,OAAO,SAAS,aAAa,KAAK;AACrE,cAAQ;AAAA,QACN,GAAG,KAAK,IAAI,CAAC,OAAO;AAAA,UAClB,IAAI,EAAE;AAAA,UACN,OAAO,EAAE;AAAA;AAAA,UACT,SAAS,EAAE;AAAA,UACX,UAAU,EAAE;AAAA,QACd,EAAE;AAAA,MACJ;AAAA,IACF;AAEA,WAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,KAAK;AAAA,EACjE;AAAA,EAEA,MAAc,UACZ,OACA,QACA,OACyB;AACzB,UAAM,UAA0B,CAAC;AAEjC,eAAW,WAAW,QAAQ;AAC5B,UAAI;AACF,cAAM,OAAO,MAAM,KAAK,WAAW,eAAe,SAAS,OAAO,KAAK;AACvE,gBAAQ;AAAA,UACN,GAAG,KAAK,IAAI,CAAC,OAAO;AAAA,YAClB,IAAI,EAAE;AAAA,YACN,OAAO,EAAE;AAAA,YACT,SAAS,EAAE;AAAA,YACX,UAAU,EAAE;AAAA,UACd,EAAE;AAAA,QACJ;AAAA,MACF,QAAQ;AAAA,MAIR;AAAA,IACF;AAEA,WAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,KAAK;AAAA,EACjE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,yBACZ,OACA,QACA,OACA,WAC2D;AAE3D,UAAM,UAAU,qBAAqB,KAAK;AAI1C,UAAM,mBAAmB,MAAM,KAAK,gBAAgB,OAAO,QAAQ,QAAQ,CAAC;AAG5E,UAAM,kBAAkB,oBAAI,IAAoB;AAChD,qBAAiB,QAAQ,CAAC,MAAM;AAC9B,sBAAgB,IAAI,EAAE,IAAI,EAAE,KAAK;AAAA,IACnC,CAAC;AAGD,UAAM,cAAc,iBAAiB,SAAS,IAAK,iBAAiB,CAAC,GAAG,SAAS,IAAK;AAItF,UAAM,gBAAgB,KAAK,yBAAyB,gBAAgB;AAGpE,UAAM,aAAa,MAAM,KAAK,UAAU,OAAO,QAAQ,QAAQ,CAAC;AAGhE,UAAM,cAAc,oBAAI,IAAoB;AAC5C,UAAM,WAAW,oBAAI,IAAoB;AACzC,UAAM,UAAU,oBAAI,IAA0B;AAE9C,kBAAc,QAAQ,CAAC,GAAG,MAAM;AAC9B,kBAAY,IAAI,EAAE,IAAI,IAAI,CAAC;AAC3B,cAAQ,IAAI,EAAE,IAAI,CAAC;AAAA,IACrB,CAAC;AAED,eAAW,QAAQ,CAAC,GAAG,MAAM;AAC3B,eAAS,IAAI,EAAE,IAAI,IAAI,CAAC;AACxB,UAAI,CAAC,QAAQ,IAAI,EAAE,EAAE,GAAG;AACtB,gBAAQ,IAAI,EAAE,IAAI,CAAC;AAAA,MACrB;AAAA,IACF,CAAC;AAGD,UAAM,YAgBD,CAAC;AAGN,UAAM,cAAc,kBAAkB,CAAC,GAAG,QAAQ,OAAO,CAAC,CAAC;AAC3D,UAAM,EAAE,GAAG,cAAc,UAAU,IAAI,YAAY,WAAW;AAE9D,eAAW,CAAC,IAAI,MAAM,KAAK,SAAS;AAClC,YAAM,aAAa,YAAY,IAAI,EAAE,KAAK;AAC1C,YAAM,UAAU,SAAS,IAAI,EAAE,KAAK;AACpC,YAAM,iBAAiB,gBAAgB,IAAI,EAAE;AAE7C,YAAM,YAAY,eAAe,WAAW,gBAAgB,IAAI,cAAc;AAC9E,YAAM,SAAS,YAAY,WAAW,aAAa,IAAI,WAAW;AAGlE,YAAM,gBAAgB,KAAK;AAAA;AAAA,QAEzB,OAAO,SAAS,UAAU;AAAA,QAC1B;AAAA,MACF;AAGA,YAAM,iBAAiB,KAAK,yBAAyB,OAAO,MAAM;AAGlE,YAAM,kBAAkB,KAAK,mBAAmB,OAAO,MAAM;AAG7D,YAAM,mBAAmB,KAAK,oBAAoB,OAAO,MAAM;AAE/D,YAAM,WAUF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,UAAI,eAAe,UAAU;AAC3B,iBAAS,aAAa;AAAA,MACxB;AACA,UAAI,YAAY,UAAU;AACxB,iBAAS,UAAU;AAAA,MACrB;AACA,UAAI,mBAAmB,QAAW;AAChC,iBAAS,iBAAiB;AAAA,MAC5B;AAEA,gBAAU,KAAK;AAAA,QACb;AAAA,QACA,QACG,YAAY,UACb,gBACA,iBACA,kBACA;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,SAAS,UAAU,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,KAAK;AAGzE,QAAI;AAEJ,QAAI,OAAO,SAAS,GAAG;AACrB,YAAM,QAAQ,OAAO,CAAC;AACtB,YAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AACrC,UAAI,UAAU,UAAa,SAAS,QAAW;AAC7C,4BAAoB,OAAO,IAAI,CAAC,OAAO;AAAA,UACrC,GAAG,EAAE;AAAA,UACL,OAAO,EAAE;AAAA,UACT,iBAAiB,EAAE;AAAA,QACrB,EAAE;AAAA,MACJ,OAAO;AACL,cAAM,WAAW,MAAM;AACvB,cAAM,WAAW,KAAK;AACtB,cAAM,QAAQ,WAAW;AAEzB,YAAI,QAAQ,GAAG;AAEb,8BAAoB,OAAO,IAAI,CAAC,OAAO;AAAA,YACrC,GAAG,EAAE;AAAA,YACL,OAAO,KAAK,OAAQ,EAAE,QAAQ,YAAY,QAAS,GAAO,IAAI;AAAA,YAC9D,iBAAiB,EAAE;AAAA,UACrB,EAAE;AAAA,QACJ,OAAO;AAEL,8BAAoB,OAAO,IAAI,CAAC,OAAO;AAAA,YACrC,GAAG,EAAE;AAAA,YACL,OAAO,EAAE;AAAA,YACT,iBAAiB,EAAE;AAAA,UACrB,EAAE;AAAA,QACJ;AAAA,MACF;AAAA,IACF,OAAO;AACL,0BAAoB,CAAC;AAAA,IACvB;AAGA,QAAI,cAAc,QAAW;AAC3B,0BAAoB,kBAAkB,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS;AAAA,IAC1E;AAEA,WAAO,EAAE,SAAS,mBAAmB,YAAY;AAAA,EACnD;AAAA,EAEA,MAAM,gBAAgB,OAAoB,UAA8C;AACtF,WAAO,KAAK,OAAO;AAAA,MACjB,GAAG;AAAA,MACH,QAAQ;AAAA,IACV,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,iBAAiB,UAA8B,SAAqC;AAE1F,QAAI;AACJ,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY,WAAW,QAAQ,IAAI,wBAAwB,KAAK,KAAK;AACrE;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF,KAAK;AACH,oBAAY;AACZ;AAAA,MACF;AACE,oBAAY;AAAA,IAChB;AAGA,QAAI,qBAAqB;AACzB,QAAI,kBAAkB;AAEtB,eAAW,EAAE,QAAQ,WAAW,KAAK,SAAS;AAC5C,YAAM,eAAe,mBAAmB,MAAM;AAC9C,YAAM,aAAa,aAAa,YAAY,OAAO,KAAK;AACxD,4BAAsB,aAAa;AACnC,yBAAmB;AAAA,IACrB;AAEA,UAAM,oBAAoB,kBAAkB,IAAI,qBAAqB,kBAAkB;AACvF,UAAM,aAAa,YAAY;AAG/B,QAAI,aAAa,QAAQ;AACvB,aAAO,KAAK,IAAI,YAAY,GAAG;AAAA,IACjC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,mBAAmB,OAAe,QAA8B;AACtE,UAAM,MAAM,OAAO,SAAS;AAC5B,QAAI,QAAQ,UAAa,QAAQ,GAAI,QAAO;AAG5C,UAAM,UAAU,IAAI,YAAY,EAAE,QAAQ,eAAe,GAAG;AAG5D,UAAM,YAAY,oBAAI,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACA,OAAMA,GAAE,SAAS,KAAK,CAAC,UAAU,IAAIA,EAAC,CAAC;AAElD,QAAI,WAAW,WAAW,EAAG,QAAO;AAGpC,UAAM,gBAAgB,WAAW,OAAO,CAAC,SAAS,QAAQ,SAAS,IAAI,CAAC;AAExE,QAAI,cAAc,WAAW,EAAG,QAAO;AAIvC,UAAM,aAAa,cAAc,SAAS,WAAW;AACrD,WAAO,IAAM,IAAM;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,oBAAoB,OAAe,QAA8B;AACvE,UAAMC,QAAO,OAAO,SAAS;AAC7B,QAAIA,UAAS,UAAaA,UAAS,GAAI,QAAO;AAG9C,UAAM,eAAeA,MAAK,YAAY,EAAE,QAAQ,eAAe,GAAG;AAGlE,UAAM,YAAY,oBAAI,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACD,OAAMA,GAAE,SAAS,KAAK,CAAC,UAAU,IAAIA,EAAC,CAAC;AAElD,QAAI,WAAW,WAAW,EAAG,QAAO;AAGpC,UAAM,gBAAgB,WAAW,OAAO,CAAC,SAAS,aAAa,SAAS,IAAI,CAAC;AAE7E,QAAI,cAAc,WAAW,EAAG,QAAO;AAIvC,UAAM,aAAa,cAAc,SAAS,WAAW;AACrD,WAAO,IAAM,IAAM;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,yBAAyB,OAAe,QAA8B;AAC5E,UAAMC,QAAO,OAAO,SAAS,QAAQ,OAAO,SAAS,OAAO;AAC5D,UAAM,UAAU,OAAO,QAAQ,YAAY;AAC3C,UAAM,YAAYA,MAAK,YAAY;AAGnC,eAAW,EAAE,SAAS,MAAM,KAAK,oBAAoB;AACnD,UAAI,QAAQ,KAAK,KAAK,GAAG;AAEvB,cAAM,yBAAyB,MAAM;AAAA,UACnC,CAAC,SAAS,UAAU,SAAS,IAAI,KAAK,QAAQ,SAAS,IAAI;AAAA,QAC7D;AAEA,YAAI,wBAAwB;AAC1B,iBAAO;AAAA,QACT,OAAO;AACL,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,QACA,OACA,QACA,OACc;AACd,UAAM,WAAW,EAAE,GAAG,OAAO;AAG7B,UAAMA,QAAO,OAAO,SAAS,QAAQ,OAAO,SAAS,OAAO;AAE5D,UAAM,WAAW,OAAO,SAAS,UAAU;AAG3C,UAAM,WAAW,KAAK,0BAA0B,MAAM;AACtD,UAAM,aAAa,UAAU,QAAQ,KAAK,kBAAkB,OAAO,OAAO;AAE1E,aAAS,UAAU;AAAA,MACjB,MAAM,KAAK,UAAU,UAAU,QAAQ;AAAA,MACvC,MAAM;AAAA,MACN,WAAW,UAAU,aAAa;AAAA,MAClC,SAAS,KAAK,gBAAgB,OAAO,SAAS,KAAK;AAAA,MACnD,UAAU,GAAGA,KAAI,GAAG,WAAW,IAAI,OAAO,SAAS,SAAS,CAAC,KAAK,EAAE;AAAA,MACpE,iBAAiB,KAAK,wBAAwB,QAAQ,KAAK;AAAA,IAC7D;AAGA,QAAI,WAAW,gBAAgB,WAAW,QAAQ;AAEhD,YAAM,QAAQ,KAAK,kBAAkB,OAAOA,OAAM,UAAU;AAE5D,eAAS,UAAU;AAAA,QACjB,YAAY,KAAK,kBAAkB,OAAO,OAAO;AAAA,QACjD,YAAY,KAAK,eAAe,OAAO,OAAO;AAAA,QAC9C,iBAAiB,KAAK,gBAAgB,OAAO,SAAS,KAAK;AAAA,QAC3D;AAAA,MACF;AAAA,IACF;AAGA,QAAI,WAAW,QAAQ;AAErB,YAAM,cAAc,KAAK,wBAAwB,OAAOA,OAAM,UAAU;AAExE,eAAS,OAAO;AAAA,QACd,cAAc,UAAU,eAAe,OAAO;AAAA,QAC9C;AAAA,QACA,eAAe,KAAK,qBAAqB,OAAO,OAAO;AAAA,QACvD,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,0BAA0B,QAA4C;AAC5E,UAAMA,QAAO,OAAO,SAAS;AAC7B,QAAIA,UAAS,UAAaA,UAAS,GAAI,QAAO;AAE9C,UAAM,MAAMA,MAAK,MAAM,GAAG,EAAE,IAAI,KAAK;AACrC,UAAM,WACJ,QAAQ,QAAQ,QAAQ,QACpB,eACA,QAAQ,QAAQ,QAAQ,QACtB,eACA;AAGR,UAAM,aAAa,KAAK,kBAAkB,OAAO,OAAO;AACxD,QAAI,eAAe,GAAI,QAAO;AAE9B,WAAO,KAAK,gBAAgB,gBAAgB,OAAO,SAAS,YAAY,QAAQ;AAAA,EAClF;AAAA,EAEQ,kBAAkB,SAAyB;AAEjD,UAAM,YAAY,QAAQ,MAAM,6CAA6C;AAC7E,QAAI,YAAY,CAAC,MAAM,UAAa,UAAU,CAAC,MAAM,GAAI,QAAO,UAAU,CAAC;AAE3E,UAAM,aAAa,QAAQ,MAAM,6BAA6B;AAC9D,QAAI,aAAa,CAAC,MAAM,UAAa,WAAW,CAAC,MAAM,GAAI,QAAO,WAAW,CAAC;AAE9E,UAAM,aAAa,QAAQ,MAAM,6BAA6B;AAC9D,QAAI,aAAa,CAAC,MAAM,UAAa,WAAW,CAAC,MAAM,GAAI,QAAO,WAAW,CAAC;AAG9E,WAAO;AAAA,EACT;AAAA,EAEQ,UACN,UACA,UACoD;AAEpD,QAAI,SAAU,QAAO,SAAS;AAC9B,QAAI,aAAa,mBAAmB,aAAa;AAC/C,aAAO;AACT,WAAO;AAAA,EACT;AAAA,EAEQ,gBAAgB,SAAiB,OAAuB;AAE9D,UAAM,WAAW,QAAQ,MAAM,6BAA6B;AAC5D,QAAI,WAAW,CAAC,MAAM,UAAa,SAAS,CAAC,MAAM,GAAI,QAAO,SAAS,CAAC,EAAE,KAAK;AAE/E,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACD,OAAMA,GAAE,SAAS,CAAC;AAG7B,UAAM,aAAa,CAAC,YAA6B;AAC/C,aACE,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,YAAY,KAC/B,QAAQ,WAAW,OAAO;AAAA,IAE9B;AAGA,UAAM,YAAY,CAAC,YAA4B;AAC7C,YAAM,YAAY,QAAQ,YAAY;AACtC,aAAO,WAAW,OAAO,CAAC,SAAS,UAAU,SAAS,IAAI,CAAC,EAAE;AAAA,IAC/D;AAGA,UAAM,eAAe,CAAC,YAA6B;AACjD,UAAI,QAAQ,WAAW,EAAG,QAAO;AACjC,UAAI,QAAQ,WAAW,IAAI,KAAK,QAAQ,WAAW,IAAI,EAAG,QAAO;AAEjE,UAAI,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,EAAG,QAAO;AAE1D,aAAO,QAAQ,UAAU;AAAA,IAC3B;AAGA,QAAI,WAA0B;AAC9B,QAAI,YAAY;AAEhB,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,KAAK,KAAK;AAC1B,UAAI,WAAW,OAAO,KAAK,CAAC,aAAa,OAAO,EAAG;AAEnD,UAAI,QAAQ,UAAU,OAAO;AAG7B,UAAI,SAAS,KAAK,OAAO,GAAG;AAC1B,iBAAS;AAAA,MACX;AAIA,UAAI,4BAA4B,KAAK,OAAO,GAAG;AAC7C,iBAAS;AAAA,MACX;AAEA,UAAI,QAAQ,WAAW;AACrB,oBAAY;AACZ,mBAAW;AAAA,MACb;AAAA,IACF;AAGA,QAAI,aAAa,QAAQ,aAAa,MAAM,YAAY,GAAG;AACzD,UAAI,SAAS,SAAS,KAAK;AACzB,cAAM,gBAAgB,SAAS,MAAM,eAAe;AACpD,YAAI,iBAAiB,cAAc,CAAC,EAAE,UAAU,MAAM,cAAc,CAAC,EAAE,UAAU,KAAK;AACpF,iBAAO,cAAc,CAAC,EAAE,KAAK;AAAA,QAC/B;AACA,eAAO,GAAG,SAAS,UAAU,GAAG,GAAG,CAAC;AAAA,MACtC;AACA,aAAO;AAAA,IACT;AAGA,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,KAAK,KAAK;AAC1B,UAAI,WAAW,OAAO,KAAK,CAAC,aAAa,OAAO,EAAG;AAEnD,UAAI,QAAQ,SAAS,KAAK;AACxB,cAAM,gBAAgB,QAAQ,MAAM,eAAe;AACnD,YAAI,iBAAiB,cAAc,CAAC,EAAE,UAAU,MAAM,cAAc,CAAC,EAAE,UAAU,KAAK;AACpF,iBAAO,cAAc,CAAC,EAAE,KAAK;AAAA,QAC/B;AACA,eAAO,GAAG,QAAQ,UAAU,GAAG,GAAG,CAAC;AAAA,MACrC;AAEA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,wBAAwB,QAAsB,OAAuB;AAC3E,UAAM,aAAa,MAChB,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAACA,OAAMA,GAAE,SAAS,CAAC;AAC7B,UAAM,eAAe,OAAO,QAAQ,YAAY;AAEhD,UAAM,eAAe,WAAW,OAAO,CAAC,SAAS,aAAa,SAAS,IAAI,CAAC;AAE5E,QAAI,aAAa,SAAS,GAAG;AAC3B,aAAO,YAAY,aAAa,KAAK,IAAI,CAAC;AAAA,IAC5C;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,kBAAkB,SAA2B;AACnD,UAAM,aAAuB,CAAC;AAC9B,UAAM,UAAU,QAAQ,SAAS,oBAAoB;AACrD,eAAW,SAAS,SAAS;AAC3B,UAAI,MAAM,CAAC,MAAM,UAAa,MAAM,CAAC,MAAM,GAAI,YAAW,KAAK,MAAM,CAAC,CAAC;AAAA,IACzE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,eAAe,SAA2B;AAChD,UAAM,UAAoB,CAAC;AAC3B,UAAM,UAAU,QAAQ,SAAS,sCAAsC;AACvE,eAAW,SAAS,SAAS;AAC3B,UAAI,MAAM,CAAC,MAAM,UAAa,MAAM,CAAC,MAAM,GAAI,SAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,IACtE;AACA,WAAO,QAAQ,MAAM,GAAG,CAAC;AAAA,EAC3B;AAAA,EAEQ,gBAAgB,SAAiB,QAA0B;AAIjE,UAAM,YAAY,oBAAI,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,QAAQ,QAAQ,YAAY,EAAE,MAAM,gBAAgB,KAAK,CAAC;AAChE,UAAM,YAAY,oBAAI,IAAoB;AAE1C,eAAW,QAAQ,OAAO;AAExB,UAAI,UAAU,IAAI,IAAI,EAAG;AAEzB,gBAAU,IAAI,OAAO,UAAU,IAAI,IAAI,KAAK,KAAK,CAAC;AAAA,IACpD;AAEA,WAAO,MAAM,KAAK,UAAU,QAAQ,CAAC,EAClC,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,MAAM,GAAG,CAAC,EACV,IAAI,CAAC,CAAC,IAAI,MAAM,IAAI;AAAA,EACzB;AAAA,EAEQ,qBAAqB,SAAyB;AACpD,UAAM,WAAW,QAAQ,MAAM,sBAAsB;AACrD,QAAI,WAAW,CAAC,MAAM,UAAa,SAAS,CAAC,MAAM,IAAI;AACrD,aAAO,SAAS,CAAC,EACd,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,QAAQ,aAAa,EAAE,EAAE,KAAK,CAAC,EAClD,OAAO,CAAC,SAAS,KAAK,SAAS,CAAC,EAChC,KAAK,IAAI;AAAA,IACd;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBACN,OACA,UACA,YACqC;AACrC,QAAI,CAAC,SAAS,eAAe,MAAM,eAAe,eAAe;AAC/D,aAAO,EAAE,UAAU,GAAG,OAAO,EAAE;AAAA,IACjC;AAEA,UAAM,SAAS,GAAG,QAAQ,IAAI,UAAU;AACxC,WAAO;AAAA,MACL,UAAU,MAAM,iBAAiB,MAAM;AAAA,MACvC,OAAO,MAAM,cAAc,MAAM;AAAA,IACnC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,wBACN,OACA,UACA,YACgE;AAChE,QAAI,CAAC,SAAS,eAAe,MAAM,eAAe,eAAe;AAC/D,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,SAAS,GAAG,QAAQ,IAAI,UAAU;AACxC,UAAM,UAA0E,CAAC;AAGjF,UAAM,WAAW,MAAM,iBAAiB,MAAM;AAC9C,eAAW,QAAQ,UAAU;AAC3B,UAAI,KAAK,SAAS,SAAS;AAEzB,cAAM,CAAC,MAAM,MAAM,IAAI,KAAK,YAAY,KAAK,IAAI;AACjD,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,SAAS,SAAS,GAAG,MAAM,OAAO;AAAA,UAClC,cAAc;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,WAAW,MAAM,SAAS,MAAM;AACtC,eAAW,QAAQ,UAAU;AAC3B,UAAI,KAAK,SAAS,SAAS;AAEzB,cAAM,CAAC,MAAM,MAAM,IAAI,KAAK,YAAY,KAAK,EAAE;AAC/C,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,SAAS,SAAS,GAAG,MAAM,OAAO;AAAA,UAClC,cAAc;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,WAAO,QAAQ,MAAM,GAAG,EAAE;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,QAAkC;AACpD,UAAM,YAAY,OAAO,YAAY,GAAG;AACxC,QAAI,cAAc,IAAI;AACpB,aAAO,CAAC,QAAQ,EAAE;AAAA,IACpB;AACA,WAAO,CAAC,OAAO,UAAU,GAAG,SAAS,GAAG,OAAO,UAAU,YAAY,CAAC,CAAC;AAAA,EACzE;AACF;;;ACl3CA,SAAS,YAAAE,WAAU,UAAAC,eAAc;AACjC,SAAS,WAAAC,UAAS,cAAAC,aAAY,QAAAC,aAAY;;;ACD1C,SAAS,KAAAC,UAAS;AAiBlB,IAAM,4BAA4BA,GAAE,OAAO;AAAA,EACzC,MAAMA,GAAE,OAAO,EAAE,IAAI,GAAG,wBAAwB;AAAA,EAChD,aAAaA,GAAE,OAAO,EAAE,SAAS;AAAA,EACjC,MAAMA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,SAAS;AACrC,CAAC;AAUM,IAAM,4BAA4B,0BAA0B,OAAO;AAAA,EACxE,MAAMA,GAAE,QAAQ,MAAM;AAAA,EACtB,MAAMA,GAAE,OAAO,EAAE,IAAI,GAAG,kCAAkC;AAC5D,CAAC;AAaD,IAAM,eAAeA,GAAE,OAAO,EAAE;AAAA,EAC9B,CAAC,QAAQ;AAEP,QAAI;AACF,UAAI,IAAI,GAAG;AACX,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,yBAAyB,KAAK,GAAG;AAAA,IAC1C;AAAA,EACF;AAAA,EACA,EAAE,SAAS,iDAAiD;AAC9D;AAMO,IAAM,4BAA4B,0BAA0B,OAAO;AAAA,EACxE,MAAMA,GAAE,QAAQ,MAAM;AAAA,EACtB,KAAK;AAAA,EACL,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,IAAI,EAAE,SAAS,kCAAkC,EAAE,SAAS;AAChF,CAAC;AAYM,IAAM,2BAA2B,0BAA0B,OAAO;AAAA,EACvE,MAAMA,GAAE,QAAQ,KAAK;AAAA,EACrB,KAAKA,GAAE,IAAI,sCAAsC;AAAA,EACjD,OAAOA,GAAE,OAAO,EAAE,IAAI,EAAE,IAAI,GAAG,4BAA4B,EAAE,QAAQ,CAAC;AAAA,EACtE,UAAUA,GAAE,OAAO,EAAE,IAAI,EAAE,SAAS,qCAAqC,EAAE,SAAS;AAAA,EACpF,mBAAmBA,GAAE,OAAO,EAAE,SAAS;AAAA,EACvC,qBAAqBA,GAAE,OAAO,EAAE,SAAS;AAC3C,CAAC;AAYM,IAAM,wBAAwBA,GAAE,mBAAmB,QAAQ;AAAA,EAChE;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAYM,IAAM,+BAA+BA,GAAE,OAAO;AAAA,EACnD,SAASA,GAAE,QAAQ,CAAC;AAAA,EACpB,QAAQA,GAAE,MAAM,qBAAqB;AACvC,CAAC;AAQM,SAAS,sBAAsB,KAAkD;AACtF,SAAO,IAAI,SAAS;AACtB;AAEO,SAAS,sBAAsB,KAAkD;AACtF,SAAO,IAAI,SAAS;AACtB;AAEO,SAAS,qBAAqB,KAAiD;AACpF,SAAO,IAAI,SAAS;AACtB;AAMO,IAAM,mCAA2D;AAAA,EACtE,SAAS;AAAA,EACT,QAAQ,CAAC;AACX;;;ADtIA,eAAeC,YAAWC,OAAgC;AACxD,MAAI;AACF,UAAMC,QAAOD,KAAI;AACjB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAYO,IAAM,yBAAN,MAA6B;AAAA,EACjB;AAAA,EACA;AAAA,EACT,SAAwC;AAAA,EAEhD,YAAY,aAAsB;AAChC,SAAK,cAAc,eAAe,mBAAmB,QAAQ;AAC7D,SAAK,aAAaE,MAAK,KAAK,aAAa,6CAA6C;AAAA,EACxF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAAwC;AAC5C,QAAI,KAAK,WAAW,MAAM;AACxB,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,SAAS,MAAMH,YAAW,KAAK,UAAU;AAC/C,QAAI,CAAC,QAAQ;AAEX,WAAK,SAAS;AAAA,QACZ,GAAG;AAAA,QACH,QAAQ,CAAC,GAAG,iCAAiC,MAAM;AAAA,MACrD;AACA,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,UAAU,MAAMI,UAAS,KAAK,YAAY,OAAO;AACvD,QAAI;AACJ,QAAI;AACF,eAAS,KAAK,MAAM,OAAO;AAAA,IAC7B,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,wCAAwC,KAAK,UAAU,KACrD,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CACvD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAS,6BAA6B,UAAU,MAAM;AAC5D,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,IAAI,MAAM,gCAAgC,KAAK,UAAU,KAAK,OAAO,MAAM,OAAO,EAAE;AAAA,IAC5F;AAEA,SAAK,SAAS,OAAO;AACrB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,QAA+C;AACxD,UAAM,gBAAgB,KAAK,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AACtE,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cAAc,YAA4C;AAC9D,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,UAAM,WAAW,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,WAAW,IAAI;AACrE,QAAI,aAAa,QAAW;AAC1B,YAAM,IAAI,MAAM,qBAAqB,WAAW,IAAI,kBAAkB;AAAA,IACxE;AACA,WAAO,OAAO,KAAK,UAAU;AAC7B,UAAM,KAAK,KAAK,MAAM;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,MAAgC;AACrD,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,UAAM,QAAQ,OAAO,OAAO,UAAU,CAAC,MAAM,EAAE,SAAS,IAAI;AAC5D,QAAI,UAAU,IAAI;AAChB,aAAO;AAAA,IACT;AACA,WAAO,OAAO,OAAO,OAAO,CAAC;AAC7B,UAAM,KAAK,KAAK,MAAM;AACtB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,iBACJ,MACA,SACe;AACf,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,UAAM,QAAQ,OAAO,OAAO,UAAU,CAAC,MAAM,EAAE,SAAS,IAAI;AAC5D,QAAI,UAAU,IAAI;AAChB,YAAM,IAAI,MAAM,qBAAqB,IAAI,aAAa;AAAA,IACxD;AAIA,UAAM,WAAW,OAAO,OAAO,KAAK;AACpC,QAAI,aAAa,QAAW;AAC1B,YAAM,IAAI,MAAM,qBAAqB,IAAI,wBAAwB,OAAO,KAAK,CAAC,EAAE;AAAA,IAClF;AACA,QAAI,QAAQ,gBAAgB,QAAW;AACrC,eAAS,cAAc,QAAQ;AAAA,IACjC;AACA,QAAI,QAAQ,SAAS,QAAW;AAC9B,eAAS,OAAO,QAAQ;AAAA,IAC1B;AACA,UAAM,KAAK,KAAK,MAAM;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAU,MAAoD;AAClE,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,WAAO,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAmC;AACvC,UAAM,SAAS,MAAM,KAAK,KAAK;AAC/B,WAAO,OAAO,OAAO,SAAS;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAYH,OAAsB;AAChC,QAAII,YAAWJ,KAAI,GAAG;AACpB,aAAOA;AAAA,IACT;AACA,WAAOK,SAAQ,KAAK,aAAaL,KAAI;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAyB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,SAAS;AAAA,EAChB;AACF;;;AErMA,SAAS,cAAAM,mBAAkB;AAC3B,SAAS,YAAAC,WAAU,SAAAC,QAAO,QAAAC,OAAM,UAAAC,eAAc;AAC9C,SAAS,QAAAC,QAAM,WAAAC,gBAAe;;;ACF9B,SAAS,aAAa;AACtB,SAAS,SAAAC,cAAa;AAWtB,eAAsB,gBAAgB,SAAgD;AACpF,QAAM,EAAE,KAAK,WAAW,QAAQ,QAAQ,EAAE,IAAI;AAE9C,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,OAAO,CAAC,SAAS,WAAW,OAAO,KAAK,CAAC;AAC/C,MAAI,WAAW,QAAW;AACxB,SAAK,KAAK,YAAY,MAAM;AAAA,EAC9B;AACA,OAAK,KAAK,KAAK,SAAS;AAExB,SAAO,IAAI,QAAQ,CAACC,aAAY;AAC9B,UAAM,MAAM,MAAM,OAAO,MAAM,EAAE,OAAO,CAAC,UAAU,QAAQ,MAAM,EAAE,CAAC;AAEpE,QAAI,SAAS;AACb,QAAI,OAAO,GAAG,QAAQ,CAAC,SAAiB;AACtC,gBAAU,KAAK,SAAS;AAAA,IAC1B,CAAC;AAED,QAAI,GAAG,SAAS,CAAC,UAAiB;AAChC,MAAAA,SAAQ,IAAI,KAAK,CAAC;AAAA,IACpB,CAAC;AAED,QAAI,GAAG,SAAS,CAAC,SAAwB;AACvC,UAAI,SAAS,GAAG;AACd,QAAAA,SAAQ,GAAG,SAAS,CAAC;AAAA,MACvB,OAAO;AACL,QAAAA,SAAQ,IAAI,IAAI,MAAM,qBAAqB,MAAM,EAAE,CAAC,CAAC;AAAA,MACvD;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACH;AAEO,SAAS,SAAS,QAAyB;AAChD,SAAO,OAAO,WAAW,SAAS,KAAK,OAAO,WAAW,UAAU,KAAK,OAAO,WAAW,MAAM;AAClG;AAEO,SAAS,gBAAgB,KAAqB;AACnD,QAAM,QAAQ,sBAAsB,KAAK,GAAG;AAC5C,QAAM,OAAO,QAAQ,CAAC;AACtB,MAAI,SAAS,QAAW;AACtB,WAAO;AAAA,EACT;AACA,SAAO;AACT;;;ADlCA,eAAeC,YAAWC,OAAgC;AACxD,MAAI;AACF,UAAMC,QAAOD,KAAI;AACjB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAmCO,IAAM,eAAN,MAAmB;AAAA,EACP;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT,WAA0B,EAAE,QAAQ,CAAC,EAAE;AAAA,EAE/C,YAAY,SAAiB,SAA+B;AAC1D,SAAK,UAAU;AACf,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,mBAAmB,SAAS,oBAAoB;AACrD,SAAK,cAAc,SAAS,eAAe;AAAA,EAC7C;AAAA,EAEA,MAAM,aAA4B;AAChC,UAAME,OAAM,KAAK,SAAS,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,KAAK,aAAa;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,0BACN,OACA,OAC6B;AAE7B,UAAM,OAAO,MAAM,SAAS,SAAY,CAAC,GAAG,MAAM,IAAI,IAAI;AAC1D,UAAM,OAAO;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,aAAa,MAAM;AAAA,MACnB;AAAA,IACF;AAEA,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK,QAAQ;AACX,cAAM,YAAY;AAClB,cAAM,UAA+B;AAAA,UACnC,GAAG;AAAA,UACH,MAAM;AAAA;AAAA,UAEN,MAAM,MAAM,QAAQ,UAAU;AAAA,QAChC;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,QAAQ;AACX,cAAM,YAAY;AAElB,YAAI,UAAU,QAAQ,QAAW;AAC/B,iBAAO;AAAA,QACT;AACA,cAAM,UAA+B;AAAA,UACnC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,KAAK,UAAU;AAAA,UACf,QAAQ,UAAU;AAAA,UAClB,OAAO,MAAM;AAAA,QACf;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,OAAO;AACV,cAAM,WAAW;AACjB,cAAM,SAA6B;AAAA,UACjC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,KAAK,SAAS;AAAA,UACd,OAAO,SAAS;AAAA,UAChB,UAAU,MAAM;AAAA,UAChB,mBAAmB,MAAM;AAAA,UACzB,qBAAqB,MAAM;AAAA,QAC7B;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,kCAAkC,OAA2C;AAEnF,UAAM,OAAO,MAAM,SAAS,SAAY,CAAC,GAAG,MAAM,IAAI,IAAI;AAC1D,UAAM,OAAO;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,aAAa,MAAM;AAAA,MACnB;AAAA,IACF;AAEA,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK,QAAQ;AACX,cAAM,UAA+B;AAAA,UACnC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,MAAM,MAAM;AAAA,QACd;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,QAAQ;AAEX,YAAI,MAAM,QAAQ,QAAW;AAC3B,iBAAO;AAAA,QACT;AACA,cAAM,UAA+B;AAAA,UACnC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,KAAK,MAAM;AAAA,UACX,QAAQ,MAAM;AAAA,UACd,OAAO,MAAM;AAAA,QACf;AACA,eAAO;AAAA,MACT;AAAA,MACA,KAAK,OAAO;AACV,cAAM,SAA6B;AAAA,UACjC,GAAG;AAAA,UACH,MAAM;AAAA,UACN,KAAK,MAAM;AAAA,UACX,OAAO,MAAM;AAAA,UACb,UAAU,MAAM;AAAA,UAChB,mBAAmB,MAAM;AAAA,UACzB,qBAAqB,MAAM;AAAA,QAC7B;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,OAAyB,SAAoD;AACxF,QAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,KAAK,MAAM,IAAI;AAC3C,aAAO,IAAI,IAAI,MAAM,4BAA4B,CAAC;AAAA,IACpD;AAEA,UAAM,WAAW,MAAM,KAAK,UAAU,MAAM,IAAI;AAChD,QAAI,aAAa,QAAW;AAC1B,aAAO,IAAI,IAAI,MAAM,oBAAoB,MAAM,IAAI,kBAAkB,CAAC;AAAA,IACxE;AAEA,UAAM,KAAK,cAAcC,YAAW,CAAC;AACrC,UAAM,MAAM,oBAAI,KAAK;AAErB,QAAI;AAEJ,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK,QAAQ;AACX,YAAI,MAAM,SAAS,QAAW;AAC5B,iBAAO,IAAI,IAAI,MAAM,kCAAkC,CAAC;AAAA,QAC1D;AAEA,cAAM,iBACJ,KAAK,gBAAgB,SACjBC,SAAQ,KAAK,aAAa,MAAM,IAAI,IACpCA,SAAQ,MAAM,IAAI;AAExB,YAAI;AACF,gBAAM,QAAQ,MAAMC,MAAK,cAAc;AACvC,cAAI,CAAC,MAAM,YAAY,GAAG;AACxB,mBAAO,IAAI,IAAI,MAAM,4BAA4B,cAAc,EAAE,CAAC;AAAA,UACpE;AAAA,QACF,QAAQ;AACN,iBAAO,IAAI,IAAI,MAAM,6BAA6B,cAAc,EAAE,CAAC;AAAA,QACrE;AACA,gBAAQ;AAAA,UACN,MAAM;AAAA,UACN;AAAA,UACA,MAAM,MAAM;AAAA,UACZ,MAAM;AAAA,UACN,aAAa,MAAM;AAAA,UACnB,MAAM,MAAM;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,WAAW;AAAA,QACb;AACA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,WAAW,MAAM;AAGrB,YAAI,MAAM,QAAQ,QAAW;AAC3B,gBAAM,WAAWC,OAAK,KAAK,SAAS,SAAS,EAAE;AAC/C,gBAAM,SAAS,MAAM,gBAAgB;AAAA,YACnC,KAAK,MAAM;AAAA,YACX,WAAW;AAAA,YACX,GAAI,MAAM,WAAW,SAAY,EAAE,QAAQ,MAAM,OAAO,IAAI,CAAC;AAAA,YAC7D,OAAO,MAAM,SAAS;AAAA,UACxB,CAAC;AAED,cAAI,CAAC,OAAO,SAAS;AACnB,mBAAO,IAAI,OAAO,KAAK;AAAA,UACzB;AACA,qBAAW,OAAO;AAAA,QACpB;AAEA,YAAI,aAAa,QAAW;AAC1B,iBAAO,IAAI,IAAI,MAAM,sCAAsC,CAAC;AAAA,QAC9D;AAGA,cAAM,qBACJ,KAAK,gBAAgB,SAAYF,SAAQ,KAAK,aAAa,QAAQ,IAAIA,SAAQ,QAAQ;AAGzF,YAAI,MAAM,QAAQ,QAAW;AAC3B,cAAI;AACF,kBAAM,QAAQ,MAAMC,MAAK,kBAAkB;AAC3C,gBAAI,CAAC,MAAM,YAAY,GAAG;AACxB,qBAAO,IAAI,IAAI,MAAM,4BAA4B,kBAAkB,EAAE,CAAC;AAAA,YACxE;AAAA,UACF,QAAQ;AACN,mBAAO,IAAI,IAAI,MAAM,mCAAmC,kBAAkB,EAAE,CAAC;AAAA,UAC/E;AAAA,QACF;AAEA,gBAAQ;AAAA,UACN,MAAM;AAAA,UACN;AAAA,UACA,MAAM,MAAM;AAAA,UACZ,MAAM;AAAA,UACN,KAAK,MAAM;AAAA,UACX,QAAQ,MAAM;AAAA,UACd,OAAO,MAAM,SAAS;AAAA,UACtB,aAAa,MAAM;AAAA,UACnB,MAAM,MAAM;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,WAAW;AAAA,QACb;AACA;AAAA,MACF;AAAA,MAEA,KAAK;AACH,YAAI,MAAM,QAAQ,QAAW;AAC3B,iBAAO,IAAI,IAAI,MAAM,gCAAgC,CAAC;AAAA,QACxD;AACA,gBAAQ;AAAA,UACN,MAAM;AAAA,UACN;AAAA,UACA,MAAM,MAAM;AAAA,UACZ,KAAK,MAAM;AAAA,UACX,OAAO,MAAM,SAAS;AAAA,UACtB,UAAU,MAAM;AAAA,UAChB,mBAAmB,MAAM;AAAA,UACzB,qBAAqB,MAAM;AAAA,UAC3B,aAAa,MAAM;AAAA,UACnB,MAAM,MAAM;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,WAAW;AAAA,QACb;AACA;AAAA,MAEF,SAAS;AAEP,cAAM,cAAqB,MAAM;AACjC,eAAO,IAAI,IAAI,MAAM,uBAAuB,OAAO,WAAW,CAAC,EAAE,CAAC;AAAA,MACpE;AAAA,IACF;AAEA,SAAK,SAAS,OAAO,KAAK,KAAK;AAC/B,UAAM,KAAK,aAAa;AAGxB,QAAI,KAAK,qBAAqB,QAAW;AACvC,YAAM,KAAK,iBAAiB,wBAAwB;AAAA,IACtD;AAGA,QAAI,KAAK,sBAAsB,UAAa,SAAS,uBAAuB,MAAM;AAChF,YAAM,aAAa,KAAK,0BAA0B,OAAO,KAAK;AAE9D,UAAI,eAAe,QAAW;AAC5B,cAAM,KAAK,kBAAkB,cAAc,UAAU;AAAA,MACvD;AAAA,IACF;AAEA,WAAO,GAAG,KAAK;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,MAAoC;AAC7C,QAAI,SAAS,QAAW;AACtB,aAAO,QAAQ,QAAQ,KAAK,SAAS,OAAO,OAAO,CAAC,MAAM,EAAE,SAAS,IAAI,CAAC;AAAA,IAC5E;AACA,WAAO,QAAQ,QAAQ,CAAC,GAAG,KAAK,SAAS,MAAM,CAAC;AAAA,EAClD;AAAA,EAEA,MAAM,IAAI,IAAyC;AACjD,WAAO,QAAQ,QAAQ,KAAK,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC;AAAA,EACtE;AAAA,EAEA,MAAM,UAAU,MAA0C;AACxD,WAAO,QAAQ,QAAQ,KAAK,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI,CAAC;AAAA,EAC1E;AAAA,EAEA,MAAM,cAAc,UAA8C;AAChE,WAAO,QAAQ;AAAA,MACb,KAAK,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,YAAY,EAAE,SAAS,QAAQ;AAAA,IAC3E;AAAA,EACF;AAAA,EAEA,MAAM,OACJ,IACA,SACA,SACwB;AACxB,UAAM,QAAQ,KAAK,SAAS,OAAO,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAC/D,QAAI,UAAU,IAAI;AAChB,aAAO,IAAI,IAAI,MAAM,oBAAoB,EAAE,EAAE,CAAC;AAAA,IAChD;AAEA,UAAM,QAAQ,KAAK,SAAS,OAAO,KAAK;AACxC,QAAI,UAAU,QAAW;AACvB,aAAO,IAAI,IAAI,MAAM,oBAAoB,EAAE,EAAE,CAAC;AAAA,IAChD;AAGA,QAAI,QAAQ,MAAM,KAAK,MAAM,IAAI;AAC/B,aAAO,IAAI,IAAI,MAAM,4BAA4B,CAAC;AAAA,IACpD;AAGA,UAAM,aAAa,QAAQ,SAAS,UAAa,QAAQ,SAAS,MAAM;AACxE,QAAI,YAAY;AACd,YAAM,WAAW,KAAK,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ,QAAQ,EAAE,OAAO,EAAE;AACxF,UAAI,aAAa,QAAW;AAC1B,eAAO,IAAI,IAAI,MAAM,oBAAoB,QAAQ,IAAI,kBAAkB,CAAC;AAAA,MAC1E;AAAA,IACF;AAGA,UAAM,UAAU;AAAA,MACd,GAAG;AAAA,MACH,GAAG;AAAA,MACH,WAAW,oBAAI,KAAK;AAAA,IACtB;AAEA,SAAK,SAAS,OAAO,KAAK,IAAI;AAC9B,UAAM,KAAK,aAAa;AAGxB,QAAI,KAAK,sBAAsB,UAAa,SAAS,uBAAuB,MAAM;AAChF,UAAI,YAAY;AAEd,cAAM,KAAK,kBAAkB,iBAAiB,MAAM,IAAI;AACxD,cAAM,gBAAgB,KAAK,kCAAkC,OAAO;AAEpE,YAAI,kBAAkB,QAAW;AAC/B,gBAAM,KAAK,kBAAkB,cAAc,aAAa;AAAA,QAC1D;AAAA,MACF,OAAO;AAEL,cAAM,aAAwD,CAAC;AAC/D,YAAI,QAAQ,gBAAgB,QAAW;AACrC,qBAAW,cAAc,QAAQ;AAAA,QACnC;AACA,YAAI,QAAQ,SAAS,QAAW;AAE9B,qBAAW,OAAO,CAAC,GAAG,QAAQ,IAAI;AAAA,QACpC;AAEA,YAAI,OAAO,KAAK,UAAU,EAAE,SAAS,GAAG;AACtC,gBAAM,KAAK,kBAAkB,iBAAiB,MAAM,MAAM,UAAU;AAAA,QACtE;AAAA,MACF;AAAA,IACF;AAEA,WAAO,GAAG,OAAO;AAAA,EACnB;AAAA,EAEA,MAAM,OAAO,IAAa,SAAmD;AAC3E,UAAM,QAAQ,KAAK,SAAS,OAAO,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAC/D,QAAI,UAAU,IAAI;AAChB,aAAO,IAAI,IAAI,MAAM,oBAAoB,EAAE,EAAE,CAAC;AAAA,IAChD;AAEA,UAAM,QAAQ,KAAK,SAAS,OAAO,KAAK;AACxC,QAAI,UAAU,QAAW;AACvB,aAAO,IAAI,IAAI,MAAM,oBAAoB,EAAE,EAAE,CAAC;AAAA,IAChD;AAEA,UAAM,YAAY,MAAM;AACxB,SAAK,SAAS,OAAO,OAAO,OAAO,CAAC;AACpC,UAAM,KAAK,aAAa;AAGxB,QAAI,KAAK,sBAAsB,UAAa,SAAS,uBAAuB,MAAM;AAChF,YAAM,KAAK,kBAAkB,iBAAiB,SAAS;AAAA,IACzD;AAEA,WAAO,GAAG,MAAS;AAAA,EACrB;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,eAAeC,OAAK,KAAK,SAAS,aAAa;AACrD,UAAM,SAAS,MAAMP,YAAW,YAAY;AAE5C,QAAI,CAAC,QAAQ;AAEX,WAAK,WAAW,EAAE,QAAQ,CAAC,EAAE;AAC7B,YAAM,KAAK,aAAa;AACxB;AAAA,IACF;AAGA,UAAM,UAAU,MAAMQ,UAAS,cAAc,OAAO;AACpD,QAAI;AAEF,YAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,WAAK,WAAW;AAAA,QACd,QAAQ,KAAK,OACV,OAAO,CAAC,MAAkB,MAAM,IAAI,EACpC,IAAI,CAAC,OAAO;AAAA,UACX,GAAG;AAAA,UACH,IAAI,cAAc,EAAE,EAAE;AAAA,UACtB,WAAW,IAAI,KAAK,EAAE,SAAS;AAAA,UAC/B,WAAW,IAAI,KAAK,EAAE,SAAS;AAAA,QACjC,EAAE;AAAA,MACN;AAAA,IACF,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,qCAAqC,YAAY,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC9G;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,eAAeD,OAAK,KAAK,SAAS,aAAa;AACrD,UAAM,gBAAgB,cAAc,KAAK,UAAU,KAAK,UAAU,MAAM,CAAC,CAAC;AAAA,EAC5E;AACF;;;AE/eA,SAAS,SAAAE,cAAgC;AACzC,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,cAAAC,mBAAkB;AAC3B,OAAOC,WAAU;AACjB,SAAS,uBAA4D;AACrE,SAAS,qBAAqB;AAC9B,SAAS,gBAAgB;;;ACNzB,SAAS,KAAAC,UAAS;AAGlB,IAAM,mBAAmBA,GAAE,OAAO;AAAA,EAChC,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE,QAAQ;AAAA,EACjB,WAAWA,GAAE,OAAO;AAAA,EACpB,WAAWA,GAAE,OAAO;AAAA,EACpB,SAASA,GAAE,OAAO;AAAA,EAClB,OAAOA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAC3B,CAAC;AAED,IAAM,iBAAiBA,GAAE,OAAO;AAAA,EAC9B,MAAMA,GAAE,KAAK,CAAC,YAAY,OAAO,CAAC;AAAA,EAClC,MAAMA,GAAE,OAAO;AAAA,EACf,UAAUA,GAAE,QAAQ;AAAA,EACpB,WAAWA,GAAE,OAAO;AAAA,EACpB,SAASA,GAAE,OAAO;AAAA,EAClB,OAAOA,GAAE,QAAQ,EAAE,SAAS;AAAA,EAC5B,WAAWA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,OAAOA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,SAAS;AAAA,EACpC,SAASA,GAAE,MAAM,gBAAgB,EAAE,SAAS;AAC9C,CAAC;AAED,IAAM,mBAAmBA,GAAE,OAAO;AAAA,EAChC,QAAQA,GAAE,OAAO;AAAA,EACjB,UAAUA,GAAE,OAAO;AAAA,EACnB,OAAOA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AACxC,CAAC;AAEM,IAAM,0BAA0BA,GAAE,OAAO;AAAA,EAC9C,OAAOA,GAAE,MAAM,cAAc;AAAA,EAC7B,SAASA,GAAE,MAAM,gBAAgB;AACnC,CAAC;AAYM,SAAS,0BAA0B,MAAkC;AAC1E,SAAO,wBAAwB,MAAM,IAAI;AAC3C;;;ADrCA,IAAMC,UAAS,aAAa,eAAe;AAe3C,SAAS,sBAA8B;AACrC,SAAO,QAAQ,aAAa,UAAU,WAAW;AACnD;AAOA,SAAS,kBAAkB,YAA4B;AACrD,MAAI,QAAQ,aAAa,SAAS;AAChC,WAAOC,MAAK,KAAK,YAAY,SAAS,WAAW,YAAY;AAAA,EAC/D;AACA,SAAOA,MAAK,KAAK,YAAY,SAAS,OAAO,SAAS;AACxD;AAEO,IAAM,eAAN,MAAmB;AAAA,EAChB,UAA+B;AAAA,EACtB,UAAuC,oBAAI,IAAI;AAAA,EACxD,wBAAwB;AAAA,EACxB,iBAA2C;AAAA,EAC3C,iBAA2C;AAAA,EAEnD,QAAuB;AACrB,QAAI,KAAK,QAAS,QAAO,QAAQ,QAAQ;AAIzC,UAAM,kBAAkB,cAAc,YAAY,GAAG;AAErD,UAAM,cAAc,GAAGA,MAAK,GAAG,OAAOA,MAAK,GAAG;AAC9C,UAAM,eAAe,gBAAgB,SAAS,WAAW;AAEzD,QAAI;AACJ,QAAI;AAEJ,QAAI,cAAc;AAEhB,YAAM,YAAY,gBAAgB,QAAQ,WAAW;AACrD,YAAM,aAAa,gBAAgB,UAAU,GAAG,SAAS;AACzD,yBAAmBA,MAAK,KAAK,YAAY,UAAU,eAAe;AAGlE,YAAM,aAAa,kBAAkB,UAAU;AAC/C,mBAAaC,YAAW,UAAU,IAAI,aAAa,oBAAoB;AAAA,IACzE,OAAO;AAEL,YAAM,SAASD,MAAK,QAAQA,MAAK,QAAQ,eAAe,CAAC;AACzD,YAAM,cAAcA,MAAK,QAAQ,MAAM;AACvC,yBAAmBA,MAAK,KAAK,aAAa,UAAU,eAAe;AAGnE,mBAAa,oBAAoB;AAAA,IACnC;AAEA,IAAAD,QAAO;AAAA,MACL,EAAE,kBAAkB,YAAY,iBAAiB,aAAa;AAAA,MAC9D;AAAA,IACF;AAEA,SAAK,UAAUG,OAAM,YAAY,CAAC,gBAAgB,GAAG;AAAA,MACnD,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,IAChC,CAAC;AAGD,SAAK,QAAQ,GAAG,SAAS,CAACC,SAAQ;AAChC,MAAAJ,QAAO,MAAM,EAAE,OAAOI,KAAI,SAAS,OAAOA,KAAI,MAAM,GAAG,6BAA6B;AACpF,WAAK,iBAAiB,IAAI,MAAM,kBAAkBA,KAAI,OAAO,EAAE,CAAC;AAAA,IAClE,CAAC;AAGD,SAAK,QAAQ,GAAG,QAAQ,CAAC,MAAM,WAAW;AACxC,UAAI,SAAS,KAAK,SAAS,MAAM;AAC/B,QAAAJ,QAAO,MAAM,EAAE,KAAK,GAAG,iDAAiD;AACxE,aAAK,iBAAiB,IAAI,MAAM,4BAA4B,OAAO,IAAI,CAAC,EAAE,CAAC;AAAA,MAC7E,WAAW,UAAU,CAAC,KAAK,uBAAuB;AAEhD,QAAAA,QAAO,MAAM,EAAE,OAAO,GAAG,0CAA0C;AACnE,aAAK,iBAAiB,IAAI,MAAM,8BAA8B,MAAM,EAAE,CAAC;AAAA,MACzE;AACA,WAAK,UAAU;AACf,WAAK,wBAAwB;AAAA,IAC/B,CAAC;AAGD,QAAI,KAAK,QAAQ,QAAQ;AACvB,WAAK,iBAAiB,gBAAgB,EAAE,OAAO,KAAK,QAAQ,OAAO,CAAC;AACpE,WAAK,eAAe,GAAG,QAAQ,CAAC,SAAS;AACvC,QAAAA,QAAO,KAAK,EAAE,QAAQ,KAAK,GAAG,6BAA6B;AAAA,MAC7D,CAAC;AAAA,IACH;AAEA,QAAI,KAAK,QAAQ,WAAW,MAAM;AAChC,WAAK,QAAQ,KAAK;AAClB,WAAK,UAAU;AACf,aAAO,QAAQ,OAAO,IAAI,MAAM,sCAAsC,CAAC;AAAA,IACzE;AACA,SAAK,iBAAiB,gBAAgB,EAAE,OAAO,KAAK,QAAQ,OAAO,CAAC;AACpE,SAAK,eAAe,GAAG,QAAQ,CAAC,SAAS;AAEvC,UAAI,CAAC,KAAK,KAAK,EAAE,WAAW,GAAG,GAAG;AAChC;AAAA,MACF;AAEA,UAAI;AAEF,cAAM,WAAW,KAAK,MAAM,IAAI;AAKhC,cAAM,UAAU,KAAK,QAAQ,IAAI,SAAS,EAAE;AAC5C,YAAI,YAAY,QAAW;AACzB,cAAI,SAAS,UAAU,QAAW;AAChC,yBAAa,QAAQ,OAAO;AAC5B,iBAAK,QAAQ,OAAO,SAAS,EAAE;AAC/B,oBAAQ,OAAO,IAAI,MAAM,SAAS,MAAM,OAAO,CAAC;AAAA,UAClD,WAAW,SAAS,WAAW,QAAW;AACxC,yBAAa,QAAQ,OAAO;AAC5B,iBAAK,QAAQ,OAAO,SAAS,EAAE;AAG/B,gBAAI;AACF,oBAAM,YAAY,0BAA0B,SAAS,MAAM;AAC3D,sBAAQ,QAAQ,SAAS;AAAA,YAC3B,SAAS,OAAgB;AAEvB,kBAAI,iBAAiB,UAAU;AAC7B,gBAAAA,QAAO;AAAA,kBACL;AAAA,oBACE,QAAQ,MAAM;AAAA,oBACd,UAAU,KAAK,UAAU,SAAS,MAAM;AAAA,kBAC1C;AAAA,kBACA;AAAA,gBACF;AACA,wBAAQ;AAAA,kBACN,IAAI,MAAM,+CAA+C,MAAM,OAAO,EAAE;AAAA,gBAC1E;AAAA,cACF,OAAO;AACL,sBAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,gBAAAA,QAAO,MAAM,EAAE,OAAO,aAAa,GAAG,2BAA2B;AACjE,wBAAQ,OAAO,IAAI,MAAM,8BAA8B,YAAY,EAAE,CAAC;AAAA,cACxE;AAAA,YACF;AAAA,UACF;AAAA,QAEF;AAAA,MACF,SAASI,MAAK;AACZ,QAAAJ,QAAO;AAAA,UACL;AAAA,YACE,OAAOI,gBAAe,QAAQA,KAAI,UAAU,OAAOA,IAAG;AAAA,YACtD;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,MAAM,YACJ,MACA,UACA,YAAoB,KACQ;AAC5B,QAAI,CAAC,KAAK,QAAS,OAAM,KAAK,MAAM;AAEpC,UAAM,KAAKC,YAAW;AACtB,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT;AAAA,MACA,QAAQ;AAAA,MACR,QAAQ,EAAE,MAAM,SAAS;AAAA,IAC3B;AAEA,WAAO,IAAI,QAAQ,CAACC,UAAS,WAAW;AACtC,YAAM,UAAU,WAAW,MAAM;AAC/B,cAAM,UAAU,KAAK,QAAQ,IAAI,EAAE;AACnC,YAAI,SAAS;AACX,eAAK,QAAQ,OAAO,EAAE;AACtB;AAAA,YACE,IAAI,MAAM,gCAAgC,OAAO,SAAS,CAAC,gBAAgB,QAAQ,EAAE;AAAA,UACvF;AAAA,QACF;AAAA,MACF,GAAG,SAAS;AAEZ,WAAK,QAAQ,IAAI,IAAI;AAAA,QACnB,SAAAA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AACD,UAAI,CAAC,KAAK,SAAS,OAAO;AACxB,eAAO,IAAI,MAAM,qCAAqC,CAAC;AACvD;AAAA,MACF;AACA,WAAK,QAAQ,MAAM,MAAM,GAAG,KAAK,UAAU,OAAO,CAAC;AAAA,CAAI;AAAA,IACzD,CAAC;AAAA,EACH;AAAA,EAEA,OAAsB;AACpB,QAAI,CAAC,KAAK,SAAS;AACjB,aAAO,QAAQ,QAAQ;AAAA,IACzB;AAEA,WAAO,IAAI,QAAQ,CAACA,aAAY;AAC9B,WAAK,wBAAwB;AAC7B,WAAK,iBAAiB,IAAI,MAAM,uBAAuB,CAAC;AAGxD,UAAI,KAAK,gBAAgB;AACvB,aAAK,eAAe,MAAM;AAC1B,aAAK,iBAAiB;AAAA,MACxB;AACA,UAAI,KAAK,gBAAgB;AACvB,aAAK,eAAe,MAAM;AAC1B,aAAK,iBAAiB;AAAA,MACxB;AAGA,YAAM,OAAO,KAAK;AAClB,UAAI,SAAS,MAAM;AACjB,QAAAA,SAAQ;AACR;AAAA,MACF;AAGA,YAAM,SAAS,MAAY;AACzB,QAAAA,SAAQ;AAAA,MACV;AACA,WAAK,KAAK,QAAQ,MAAM;AAGxB,WAAK,KAAK;AAGV,iBAAW,MAAM;AACf,aAAK,eAAe,QAAQ,MAAM;AAClC,YAAI,KAAK,YAAY,MAAM;AACzB,eAAK,KAAK,SAAS;AACnB,eAAK,UAAU;AAAA,QACjB;AACA,QAAAA,SAAQ;AAAA,MACV,GAAG,GAAI;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEQ,iBAAiB,OAAoB;AAC3C,eAAW,WAAW,KAAK,QAAQ,OAAO,GAAG;AAC3C,mBAAa,QAAQ,OAAO;AAC5B,cAAQ,OAAO,KAAK;AAAA,IACtB;AACA,SAAK,QAAQ,MAAM;AAAA,EACrB;AACF;;;AEvRA,SAAS,WAAAC,gBAAe;AACxB,SAAS,QAAAC,cAAY;AACrB,SAAS,UAAU,WAA2C;AAI9D,IAAI,WAAWA,OAAKD,SAAQ,GAAG,UAAU,0BAA0B;AAE5D,IAAM,kBAAN,MAAsB;AAAA,EACnB,YAA8C;AAAA,EAC9C,cAAoC;AAAA;AAAA,EAEpC,cAA6B;AAAA;AAAA,EAE7B,WAAW;AAAA,EACF;AAAA,EACA;AAAA,EAEjB,YAAY,YAAY,2BAA2B,YAAY,IAAI;AACjE,SAAK,YAAY;AACjB,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAA0B;AAChC,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,mCAAmC;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,aAA4B;AAChC,SAAK,kBAAkB;AACvB,QAAI,KAAK,cAAc,KAAM;AAE7B,SAAK,iBAAiB,YAA2B;AAC/C,UAAI;AAGF,aAAK,YAAY,MAAM,SAAS,sBAAsB,KAAK,WAAW;AAAA,UACpE,OAAO;AAAA,QACT,CAAC;AAAA,MACH,SAAS,OAAO;AAEd,aAAK,cAAc;AACnB,cAAM;AAAA,MACR;AAAA,IACF,GAAG;AACH,UAAM,KAAK;AAAA,EACb;AAAA,EAEA,MAAM,MAAM,MAAiC;AAC3C,SAAK,kBAAkB;AACvB,QAAI,KAAK,cAAc,MAAM;AAC3B,YAAM,KAAK,WAAW;AAAA,IACxB;AACA,QAAI,KAAK,cAAc,MAAM;AAC3B,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AACA,UAAM,SAAS,MAAM,KAAK,UAAU,MAAM;AAAA,MACxC,SAAS;AAAA,MACT,WAAW;AAAA,IACb,CAAC;AAED,UAAM,SAAS,MAAM,KAAK,OAAO,MAAM,CAAC,MAAM,OAAO,CAAC,CAAC;AAEvD,SAAK,gBAAgB,OAAO;AAC5B,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,WAAW,OAAsC;AACrD,SAAK,kBAAkB;AACvB,QAAI,KAAK,cAAc,MAAM;AAC3B,YAAM,KAAK,WAAW;AAAA,IACxB;AACA,QAAI,KAAK,cAAc,MAAM;AAC3B,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,UAAM,UAAsB,CAAC;AAE7B,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAG/C,YAAM,SAAS,MAAM,KAAK,UAAU,OAAO;AAAA,QACzC,SAAS;AAAA,QACT,WAAW;AAAA,MACb,CAAC;AAID,YAAM,MAAM,OAAO,KAAK,OAAO,KAAK,SAAS,CAAC,KAAK;AACnD,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAM,QAAQ,IAAI;AAClB,cAAM,MAAM,QAAQ;AACpB,gBAAQ,KAAK,MAAM,KAAK,OAAO,KAAK,MAAM,OAAO,GAAG,GAAG,CAAC,MAAM,OAAO,CAAC,CAAC,CAAC;AAAA,MAC1E;AAGA,WAAK,gBAAgB;AAGrB,UAAI,IAAI,KAAK,YAAY,MAAM,QAAQ;AACrC,cAAM,IAAI,QAAQ,CAACE,aAAY,aAAaA,QAAO,CAAC;AAAA,MACtD;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,gBAAwB;AACtB,QAAI,KAAK,gBAAgB,MAAM;AAC7B,YAAM,IAAI,MAAM,iDAAiD;AAAA,IACnE;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,mBAAoC;AACxC,QAAI,KAAK,gBAAgB,MAAM;AAE7B,YAAM,KAAK,MAAM,iBAAiB;AAAA,IACpC;AACA,QAAI,KAAK,gBAAgB,MAAM;AAC7B,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,UAAyB;AAC7B,QAAI,KAAK,cAAc,MAAM;AAC3B,YAAM,KAAK,UAAU,QAAQ;AAC7B,WAAK,YAAY;AAAA,IACnB;AACA,SAAK,cAAc;AACnB,SAAK,cAAc;AACnB,SAAK,WAAW;AAAA,EAClB;AACF;;;AC5JA,YAAY,aAAa;;;ACAzB,SAAS,KAAAC,UAAS;AAOX,IAAM,qBAAqBA,GAAE,KAAK,CAAC,QAAQ,SAAS,KAAK,CAAC;AAE1D,IAAM,yBAAyBA,GACnC,OAAO;AAAA,EACN,MAAMA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,KAAKA,GAAE,OAAO,EAAE,SAAS;AAAA,EACzB,MAAM;AAAA,EACN,SAASA,GAAE,OAAO;AAAA,EAClB,WAAWA,GAAE,OAAO;AAAA;AAAA,EACpB,UAAUA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,YAAYA,GAAE,OAAO,EAAE,SAAS;AAAA,EAChC,aAAaA,GAAE,OAAO,EAAE,SAAS;AACnC,CAAC,EACA,MAAM;;;ADEF,IAAM,aAAN,MAAiB;AAAA,EACd,aAAgC;AAAA,EACvB,SAA6B,oBAAI,IAAI;AAAA,EACrC;AAAA;AAAA,EAET,cAA6B;AAAA,EAErC,YAAY,SAAiB;AAC3B,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAc,YAA0B;AACtC,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,MAAM,WAAW,SAAiC;AAChD,QAAI,KAAK,gBAAgB,MAAM;AAC7B,YAAM,IAAI,MAAM,+DAA+D;AAAA,IACjF;AAEA,SAAK,eAAe,MAAc,gBAAQ,KAAK,OAAO;AAEtD,UAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,UAAM,aAAa,MAAM,KAAK,WAAW,WAAW;AAEpD,QAAI,CAAC,WAAW,SAAS,SAAS,GAAG;AAEnC,YAAM,QAAQ,MAAM,KAAK,WAAW,YAAY,WAAW;AAAA,QACzD;AAAA,UACE,IAAI;AAAA,UACJ,SAAS;AAAA,UACT,QAAQ,IAAI,MAAM,KAAK,WAAW,EAAE,KAAK,CAAC;AAAA,UAC1C,UAAU;AAAA,QACZ;AAAA,MACF,CAAC;AAED,YAAM,MAAM,OAAO,iBAAiB;AACpC,WAAK,OAAO,IAAI,WAAW,KAAK;AAAA,IAClC,OAAO;AACL,YAAM,QAAQ,MAAM,KAAK,WAAW,UAAU,SAAS;AACvD,WAAK,OAAO,IAAI,WAAW,KAAK;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,MAAM,aAAa,SAAkB,WAAsC;AACzE,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,iBAAkC,UAAU,IAAI,CAAC,SAAS;AAAA,MAC9D,IAAI,IAAI;AAAA,MACR,SAAS,IAAI;AAAA,MACb,QAAQ,CAAC,GAAG,IAAI,MAAM;AAAA,MACtB,UAAU,KAAK,UAAU,IAAI,QAAQ;AAAA,IACvC,EAAE;AACF,UAAM,MAAM,IAAI,cAAc;AAAA,EAChC;AAAA,EAEA,MAAM,gBAAgB,SAAkB,aAA0C;AAChF,QAAI,YAAY,WAAW,GAAG;AAC5B;AAAA,IACF;AACA,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,SAAS,YAAY,IAAI,CAAC,OAAO,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI;AAC3D,UAAM,MAAM,OAAO,UAAU,MAAM,GAAG;AAAA,EACxC;AAAA,EAEA,MAAM,kBAAkB,SAAiC;AACvD,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,MAAM,OAAO,gBAAgB;AAAA,EACrC;AAAA,EAEA,MAAM,OACJ,SACA,QACA,OAGA,YAGA;AACA,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,QAAQ,MAAM,aAAa,MAAM,EAAE,MAAM,KAAK,EAAE,aAAa,QAAQ;AAG3E,UAAM,UAAW,MAAM,MAAM,QAAQ;AAIrC,WAAO,QAAQ,IAAI,CAAC,MAAM;AACxB,YAAM,WAAW,uBAAuB,MAAM,KAAK,MAAM,EAAE,QAAQ,CAAC;AACpE,aAAO;AAAA,QACL,IAAI,iBAAiB,EAAE,EAAE;AAAA,QACzB,SAAS,EAAE;AAAA,QACX,OAAO,IAAI,EAAE;AAAA;AAAA;AAAA,QAGb;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,eAAe,SAAiC;AACpD,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AACzC,UAAM,MAAM,YAAY,WAAW;AAAA,MACjC,QAAgB,cAAM,IAAI;AAAA,IAC5B,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,eACJ,SACA,OACA,OAGA;AACA,UAAM,QAAQ,MAAM,KAAK,SAAS,OAAO;AAGzC,UAAM,UAAW,MAAM,MAAM,OAAO,OAAO,KAAK,EAAE,MAAM,KAAK,EAAE,QAAQ;AAOvE,WAAO,QAAQ,IAAI,CAAC,MAAM;AACxB,YAAM,WAAW,uBAAuB,MAAM,KAAK,MAAM,EAAE,QAAQ,CAAC;AACpE,aAAO;AAAA,QACL,IAAI,iBAAiB,EAAE,EAAE;AAAA,QACzB,SAAS,EAAE;AAAA,QACX,OAAO,EAAE;AAAA;AAAA;AAAA,QAGT;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,YAAY,SAAiC;AACjD,UAAM,YAAY,KAAK,aAAa,OAAO;AAE3C,SAAK,eAAe,MAAc,gBAAQ,KAAK,OAAO;AACtD,UAAM,aAAa,MAAM,KAAK,WAAW,WAAW;AACpD,QAAI,WAAW,SAAS,SAAS,GAAG;AAClC,YAAM,KAAK,WAAW,UAAU,SAAS;AACzC,WAAK,OAAO,OAAO,SAAS;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,SAAK,OAAO,MAAM;AAClB,QAAI,KAAK,eAAe,MAAM;AAC5B,WAAK,WAAW,MAAM;AACtB,WAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAA4B;AAC1B,SAAK,MAAM;AACX,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEQ,aAAa,SAA0B;AAC7C,WAAO,aAAa,OAAO;AAAA,EAC7B;AAAA,EAEA,MAAc,SAAS,SAAkC;AACvD,UAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,QAAI,QAAQ,KAAK,OAAO,IAAI,SAAS;AACrC,QAAI,UAAU,QAAW;AACvB,YAAM,KAAK,WAAW,OAAO;AAC7B,cAAQ,KAAK,OAAO,IAAI,SAAS;AAAA,IACnC;AACA,QAAI,UAAU,QAAW;AACvB,YAAM,IAAI,MAAM,8BAA8B,OAAO,EAAE;AAAA,IACzD;AACA,WAAO;AAAA,EACT;AACF;;;AElMA,IAAMC,UAAS,aAAa,UAAU;AAkC/B,IAAM,uBAAN,MAAuD;AAAA;AAAA,EAEnD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGQ;AAAA,EACA;AAAA;AAAA;AAAA,EAIT,YAAoC;AAAA,EACpC,cAAsC;AAAA,EACtC,aAAsC;AAAA,EACtC,UAAgC;AAAA,EAChC,SAA8B;AAAA,EAEtC,YACE,QACA,WACA,SACA,OACA,OACA,cACA;AACA,SAAK,SAAS;AACd,SAAK,YAAY;AACjB,SAAK,UAAU;AACf,SAAK,QAAQ;AACb,SAAK,QAAQ;AACb,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,aAA8B;AAChC,QAAI,KAAK,gBAAgB,MAAM;AAC7B,MAAAA,QAAO,MAAM,mCAAmC;AAChD,WAAK,cAAc,IAAI;AAAA,QACrB,KAAK,UAAU,UAAU;AAAA,QACzB,KAAK,UAAU,UAAU;AAAA,MAC3B;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAA8B;AAChC,QAAI,KAAK,eAAe,MAAM;AAC5B,MAAAA,QAAO,MAAM,oCAAoC;AACjD,WAAK,aAAa,IAAI,iBAAiB,KAAK,SAAS,KAAK,YAAY;AAAA,IACxE;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,SAAwB;AAC1B,QAAI,KAAK,YAAY,MAAM;AACzB,MAAAA,QAAO,MAAM,iCAAiC;AAC9C,WAAK,UAAU,IAAI;AAAA,QACjB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,UAAU;AAAA,MACjB;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,QAAsB;AACxB,QAAI,KAAK,WAAW,MAAM;AACxB,MAAAA,QAAO,MAAM,gCAAgC;AAC7C,WAAK,SAAS,IAAI,aAAa,KAAK,OAAO,KAAK,YAAY;AAAA,QAC1D,kBAAkB,KAAK;AAAA,QACvB,iBAAiB,KAAK;AAAA,QACtB,WAAW,KAAK,UAAU,SAAS;AAAA,QACnC,cAAc,KAAK,UAAU,SAAS;AAAA,QACtC,aAAa,KAAK,UAAU,SAAS;AAAA,QACrC,gBAAgB,KAAK,UAAU,SAAS;AAAA,MAC1C,CAAC;AAAA,IACH;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAA4B;AAC9B,QAAI,KAAK,cAAc,MAAM;AAC3B,MAAAA,QAAO,MAAM,mCAAmC;AAChD,WAAK,YAAY,IAAI,gBAAgB,KAAK,OAAO;AAAA,IACnD;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,gBAAyB;AAC3B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAqB;AACvB,WAAO,KAAK,YAAY;AAAA,EAC1B;AACF;AAUA,eAAsB,mBACpB,YACA,SACA,aAC+B;AAC/B,EAAAA,QAAO,KAAK,EAAE,YAAY,SAAS,YAAY,GAAG,4BAA4B;AAC9E,QAAM,YAAY,KAAK,IAAI;AAE3B,QAAM,SAAS,IAAI,cAAc,YAAY,SAAS,WAAW;AACjE,QAAM,YAAY,MAAM,OAAO,KAAK;AACpC,QAAM,kBAAkB,OAAO,eAAe;AAK9C,QAAM,eAAe,IAAI,aAAa;AACtC,QAAM,aAAa,MAAM;AAGzB,QAAM,QAAQ,IAAI,WAAW,eAAe;AAG5C,QAAM,sBAAsB,OAAO,mBAAmB;AACtD,QAAM,oBAAoB,IAAI,uBAAuB,mBAAmB;AACxE,QAAM,mBAAmB,IAAI,iBAAiB,mBAAmB;AACjE,QAAM,eAAoC;AAAA,IACxC;AAAA,IACA;AAAA,IACA,aAAa;AAAA,EACf;AAEA,QAAM,QAAQ,IAAI,aAAa,iBAAiB,YAAY;AAC5D,QAAM,MAAM,WAAW;AAEvB,QAAM,aAAa,KAAK,IAAI,IAAI;AAChC,EAAAA,QAAO;AAAA,IACL,EAAE,SAAS,iBAAiB,aAAa,qBAAqB,WAAW;AAAA,IACzE;AAAA,EACF;AAEA,SAAO,IAAI,qBAAqB,QAAQ,WAAW,iBAAiB,OAAO,OAAO,YAAY;AAChG;AAQA,eAAsB,eACpB,YACA,SACA,aAC2B;AAC3B,EAAAA,QAAO,KAAK,EAAE,YAAY,SAAS,YAAY,GAAG,uBAAuB;AAEzE,QAAM,SAAS,IAAI,cAAc,YAAY,SAAS,WAAW;AACjE,QAAM,YAAY,MAAM,OAAO,KAAK;AACpC,QAAM,kBAAkB,OAAO,eAAe;AAK9C,QAAM,eAAe,IAAI,aAAa;AACtC,QAAM,aAAa,MAAM;AAGzB,QAAM,QAAQ,IAAI,WAAW,eAAe;AAC5C,QAAM,aAAa,IAAI,gBAAgB,UAAU,UAAU,OAAO,UAAU,UAAU,SAAS;AAE/F,QAAM,WAAW,WAAW;AAG5B,QAAM,sBAAsB,OAAO,mBAAmB;AACtD,QAAM,oBAAoB,IAAI,uBAAuB,mBAAmB;AACxE,QAAM,mBAAmB,IAAI,iBAAiB,mBAAmB;AACjE,QAAM,eAAoC;AAAA,IACxC;AAAA,IACA;AAAA,IACA,aAAa;AAAA,EACf;AAEA,QAAM,QAAQ,IAAI,aAAa,iBAAiB,YAAY;AAC5D,QAAM,MAAM,WAAW;AAEvB,QAAM,YAAY,IAAI,iBAAiB,iBAAiB,YAAY;AACpE,QAAM,WAAW,IAAI,gBAAgB,eAAe;AACpD,QAAM,SAAS,IAAI,cAAc,OAAO,YAAY,WAAW,UAAU,MAAM;AAC/E,QAAM,QAAQ,IAAI,aAAa,OAAO,YAAY;AAAA,IAChD,kBAAkB;AAAA,IAClB,iBAAiB;AAAA,IACjB,WAAW,UAAU,SAAS;AAAA,IAC9B,cAAc,UAAU,SAAS;AAAA,IACjC,aAAa,UAAU,SAAS;AAAA,IAChC,gBAAgB,UAAU,SAAS;AAAA,EACrC,CAAC;AAED,EAAAA,QAAO;AAAA,IACL,EAAE,SAAS,iBAAiB,aAAa,oBAAoB;AAAA,IAC7D;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AASA,eAAsB,gBAAgB,UAA2C;AAC/E,EAAAA,QAAO,KAAK,wBAAwB;AACpC,QAAM,SAAkB,CAAC;AASzB,QAAM,kBAAkB,oBAAoB;AAC5C,QAAM,sBAAsB,CAAC,mBAAmB,SAAS;AAEzD,MAAI,qBAAqB;AACvB,aAAS,OAAO,QAAQ;AAAA,EAC1B,OAAO;AACL,IAAAA,QAAO,MAAM,2CAA2C;AAAA,EAC1D;AAGA,MAAI;AACF,UAAM,SAAS,aAAa,KAAK;AAAA,EACnC,SAAS,GAAG;AACV,UAAM,QAAQ,aAAa,QAAQ,IAAI,IAAI,MAAM,OAAO,CAAC,CAAC;AAC1D,IAAAA,QAAO,MAAM,EAAE,MAAM,GAAG,8BAA8B;AACtD,WAAO,KAAK,KAAK;AAAA,EACnB;AAGA,QAAM,0BAA0B,CAAC,mBAAmB,SAAS;AAE7D,MAAI,yBAAyB;AAC3B,QAAI;AACF,YAAM,SAAS,WAAW,QAAQ;AAAA,IACpC,SAAS,GAAG;AACV,YAAM,QAAQ,aAAa,QAAQ,IAAI,IAAI,MAAM,OAAO,CAAC,CAAC;AAC1D,MAAAA,QAAO,MAAM,EAAE,MAAM,GAAG,iCAAiC;AACzD,aAAO,KAAK,KAAK;AAAA,IACnB;AAAA,EACF,OAAO;AACL,IAAAA,QAAO,MAAM,gDAAgD;AAAA,EAC/D;AAGA,MAAI;AACF,UAAM,SAAS,MAAM,WAAW;AAAA,EAClC,SAAS,GAAG;AACV,UAAM,QAAQ,aAAa,QAAQ,IAAI,IAAI,MAAM,OAAO,CAAC,CAAC;AAC1D,IAAAA,QAAO,MAAM,EAAE,MAAM,GAAG,0BAA0B;AAClD,WAAO,KAAK,KAAK;AAAA,EACnB;AAEA,QAAM,eAAe;AAGrB,MAAI,OAAO,WAAW,KAAK,OAAO,CAAC,MAAM,QAAW;AAClD,UAAM,IAAI,MAAM,4BAA4B,OAAO,CAAC,EAAE,OAAO,IAAI,EAAE,OAAO,OAAO,CAAC,EAAE,CAAC;AAAA,EACvF,WAAW,OAAO,SAAS,GAAG;AAC5B,UAAM,IAAI,eAAe,QAAQ,yCAAyC;AAAA,EAC5E;AACF;","names":["existsSync","join","path","join","existsSync","resolve","mkdirSync","existsSync","join","join","existsSync","mkdirSync","writeFileSync","mkdirSync","dirname","writeFile","mkdir","join","dirname","path","path","path","path","path","mkdir","dirname","writeFile","join","readFile","join","path","join","readFile","readFile","writeFile","access","join","fileExists","path","createHash","readFile","join","createHash","readFile","path","createDocumentId","path","readFile","createHash","join","readFile","access","mkdir","join","z","join","mkdir","readFile","path","access","firstLine","fullContent","signature","logger","timeMs","t","path","readFile","access","resolve","isAbsolute","join","z","fileExists","path","access","join","readFile","isAbsolute","resolve","randomUUID","readFile","mkdir","stat","access","join","resolve","mkdir","mkdir","resolve","fileExists","path","access","mkdir","randomUUID","resolve","stat","join","readFile","spawn","randomUUID","existsSync","path","z","logger","path","existsSync","spawn","err","randomUUID","resolve","homedir","join","resolve","z","logger"]}