@liendev/lien 0.12.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/constants.ts","../src/config/schema.ts","../src/config/merge.ts","../src/config/migration.ts","../src/errors/codes.ts","../src/errors/index.ts","../src/config/service.ts","../src/git/utils.ts","../src/vectordb/version.ts","../src/indexer/scanner.ts","../src/indexer/symbol-extractor.ts","../src/indexer/chunker.ts","../src/embeddings/local.ts","../src/embeddings/types.ts","../src/vectordb/relevance.ts","../src/vectordb/intent-classifier.ts","../src/vectordb/lancedb.ts","../src/utils/version.ts","../src/indexer/manifest.ts","../src/git/tracker.ts","../src/indexer/change-detector.ts","../src/indexer/incremental.ts","../src/utils/loading-messages.ts","../src/indexer/index.ts","../src/cli/index.ts","../src/cli/init.ts","../src/utils/banner.ts","../src/frameworks/detector-service.ts","../src/frameworks/types.ts","../src/frameworks/nodejs/detector.ts","../src/frameworks/nodejs/config.ts","../src/frameworks/laravel/detector.ts","../src/frameworks/laravel/config.ts","../src/frameworks/shopify/detector.ts","../src/frameworks/shopify/config.ts","../src/frameworks/registry.ts","../src/cli/status.ts","../src/cli/index-cmd.ts","../src/cli/serve.ts","../src/mcp/server.ts","../src/mcp/utils/zod-to-json-schema.ts","../src/mcp/schemas/search.schema.ts","../src/mcp/schemas/similarity.schema.ts","../src/mcp/schemas/file.schema.ts","../src/mcp/schemas/symbols.schema.ts","../src/mcp/tools.ts","../src/watcher/index.ts","../src/mcp/utils/tool-wrapper.ts","../src/index.ts"],"sourcesContent":["/**\n * Centralized constants for the Lien project.\n * This file contains all magic numbers and configuration defaults\n * to ensure consistency across the codebase.\n */\n\n// Chunking settings\nexport const DEFAULT_CHUNK_SIZE = 75;\nexport const DEFAULT_CHUNK_OVERLAP = 10;\n\n// Concurrency and batching\nexport const DEFAULT_CONCURRENCY = 4;\nexport const DEFAULT_EMBEDDING_BATCH_SIZE = 50;\n\n// Micro-batching for event loop yielding\n// Process N embeddings at a time, then yield to event loop\n// This prevents UI freezing during CPU-intensive embedding generation\nexport const EMBEDDING_MICRO_BATCH_SIZE = 10;\n\n// Vector database batch size limits\n// Maximum batch size before splitting (prevents LanceDB errors on very large batches)\nexport const VECTOR_DB_MAX_BATCH_SIZE = 1000;\n// Minimum batch size for retry logic (stop splitting below this size)\nexport const VECTOR_DB_MIN_BATCH_SIZE = 10;\n\n// Embedding model configuration\nexport const EMBEDDING_DIMENSIONS = 384; // all-MiniLM-L6-v2\nexport const DEFAULT_EMBEDDING_MODEL = 'Xenova/all-MiniLM-L6-v2';\n\n// MCP server configuration\nexport const DEFAULT_PORT = 7133; // LIEN in leetspeak\nexport const VERSION_CHECK_INTERVAL_MS = 2000;\n\n// Git detection\nexport const DEFAULT_GIT_POLL_INTERVAL_MS = 10000; // Check every 10 seconds\n\n// File watching\nexport const DEFAULT_DEBOUNCE_MS = 1000;\n\n// Configuration version\nexport const CURRENT_CONFIG_VERSION = '0.3.0';\n\n// Index format version - bump on ANY breaking change to indexing\n// Examples that require version bump:\n// - Chunking algorithm changes\n// - Embedding model changes (e.g., switch from all-MiniLM-L6-v2 to another model)\n// - Vector DB schema changes (new metadata fields)\n// - Metadata structure changes\nexport const INDEX_FORMAT_VERSION = 1;\n\n","import {\n DEFAULT_CHUNK_SIZE,\n DEFAULT_CHUNK_OVERLAP,\n DEFAULT_CONCURRENCY,\n DEFAULT_EMBEDDING_BATCH_SIZE,\n DEFAULT_PORT,\n DEFAULT_GIT_POLL_INTERVAL_MS,\n DEFAULT_DEBOUNCE_MS,\n CURRENT_CONFIG_VERSION,\n} from '../constants.js';\n\n/**\n * Framework-specific configuration\n */\nexport interface FrameworkConfig {\n include: string[]; // File patterns relative to framework path\n exclude: string[]; // Exclude patterns relative to framework path\n}\n\n/**\n * Framework instance in a monorepo\n */\nexport interface FrameworkInstance {\n name: string; // 'nodejs', 'laravel'\n path: string; // '.', 'cognito-backend', 'packages/cli'\n enabled: boolean;\n config: FrameworkConfig;\n}\n\n/**\n * Main Lien configuration supporting monorepo setups\n */\nexport interface LienConfig {\n version: string;\n core: {\n chunkSize: number;\n chunkOverlap: number;\n concurrency: number;\n embeddingBatchSize: number;\n };\n mcp: {\n port: number;\n transport: 'stdio' | 'socket';\n autoIndexOnFirstRun: boolean;\n };\n gitDetection: {\n enabled: boolean;\n pollIntervalMs: number;\n };\n fileWatching: {\n enabled: boolean;\n debounceMs: number;\n };\n frameworks: FrameworkInstance[];\n}\n\n/**\n * Legacy config format for backwards compatibility\n * @deprecated Use LienConfig with frameworks array instead\n */\nexport interface LegacyLienConfig {\n version: string;\n indexing: {\n exclude: string[];\n include: string[];\n chunkSize: number;\n chunkOverlap: number;\n concurrency: number;\n embeddingBatchSize: number;\n };\n mcp: {\n port: number;\n transport: 'stdio' | 'socket';\n autoIndexOnFirstRun: boolean;\n };\n gitDetection: {\n enabled: boolean;\n pollIntervalMs: number;\n };\n fileWatching: {\n enabled: boolean;\n debounceMs: number;\n };\n}\n\n/**\n * Type guard to check if a config is the legacy format\n * @param config - Config object to check\n * @returns True if config is LegacyLienConfig\n */\nexport function isLegacyConfig(\n config: LienConfig | LegacyLienConfig\n): config is LegacyLienConfig {\n return 'indexing' in config && !('frameworks' in config);\n}\n\n/**\n * Type guard to check if a config is the modern format\n * @param config - Config object to check\n * @returns True if config is LienConfig\n */\nexport function isModernConfig(\n config: LienConfig | LegacyLienConfig\n): config is LienConfig {\n return 'frameworks' in config;\n}\n\n/**\n * Default configuration with empty frameworks array\n * Frameworks should be detected and added via lien init\n */\nexport const defaultConfig: LienConfig = {\n version: CURRENT_CONFIG_VERSION,\n core: {\n chunkSize: DEFAULT_CHUNK_SIZE,\n chunkOverlap: DEFAULT_CHUNK_OVERLAP,\n concurrency: DEFAULT_CONCURRENCY,\n embeddingBatchSize: DEFAULT_EMBEDDING_BATCH_SIZE,\n },\n mcp: {\n port: DEFAULT_PORT,\n transport: 'stdio',\n autoIndexOnFirstRun: true,\n },\n gitDetection: {\n enabled: true,\n pollIntervalMs: DEFAULT_GIT_POLL_INTERVAL_MS,\n },\n fileWatching: {\n enabled: true, // Enabled by default (fast with incremental indexing!)\n debounceMs: DEFAULT_DEBOUNCE_MS,\n },\n frameworks: [], // Will be populated by lien init via framework detection\n};\n\n","import { LienConfig } from './schema.js';\n\n/**\n * Deep merges user config with defaults, preserving user customizations.\n * User values always take precedence over defaults.\n * \n * @param defaults - The default configuration\n * @param user - The user's partial configuration\n * @returns Complete merged configuration\n */\nexport function deepMergeConfig(defaults: LienConfig, user: Partial<LienConfig>): LienConfig {\n return {\n version: user.version ?? defaults.version,\n core: {\n ...defaults.core,\n ...user.core,\n },\n mcp: {\n ...defaults.mcp,\n ...user.mcp,\n },\n gitDetection: {\n ...defaults.gitDetection,\n ...user.gitDetection,\n },\n fileWatching: {\n ...defaults.fileWatching,\n ...user.fileWatching,\n },\n frameworks: user.frameworks ?? defaults.frameworks,\n };\n}\n\n/**\n * Detects new fields that exist in the 'after' config but not in the 'before' config.\n * Returns a list of human-readable field paths.\n * \n * @param before - The existing config (potentially missing fields)\n * @param after - The complete config with all fields\n * @returns Array of new field paths (e.g., [\"mcp.autoIndexOnFirstRun\", \"gitDetection\"])\n */\nexport function detectNewFields(before: Record<string, any>, after: Record<string, any>): string[] {\n const newFields: string[] = [];\n\n // Check top-level sections\n for (const key of Object.keys(after)) {\n if (!(key in before)) {\n newFields.push(key);\n continue;\n }\n\n // Check nested fields for object sections\n if (typeof after[key] === 'object' && after[key] !== null && !Array.isArray(after[key])) {\n const beforeSection = (before[key] as Record<string, any>) || {};\n const afterSection = after[key] as Record<string, any>;\n\n for (const nestedKey of Object.keys(afterSection)) {\n if (!(nestedKey in beforeSection)) {\n newFields.push(`${key}.${nestedKey}`);\n }\n }\n }\n }\n\n return newFields;\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { LienConfig, LegacyLienConfig, FrameworkInstance, defaultConfig } from './schema.js';\n\n/**\n * Checks if a config object needs migration from v0.2.0 to v0.3.0\n */\nexport function needsMigration(config: any): boolean {\n // Check if config uses old structure:\n // - Has 'indexing' field instead of 'core' and 'frameworks'\n // - Or has no 'frameworks' field at all\n // - Or version is explicitly set to something < 0.3.0\n if (!config) {\n return false;\n }\n\n // If it has frameworks array, it's already in new format\n if (config.frameworks !== undefined) {\n return false;\n }\n\n // If it has 'indexing' field, it's the old format\n if (config.indexing !== undefined) {\n return true;\n }\n\n // If version is explicitly < 0.3.0\n if (config.version && config.version.startsWith('0.2')) {\n return true;\n }\n\n return false;\n}\n\n/**\n * Migrates a v0.2.0 config to v0.3.0 format\n */\nexport function migrateConfig(oldConfig: Partial<LegacyLienConfig>): LienConfig {\n // Start with default config structure\n const newConfig: LienConfig = {\n version: '0.3.0',\n core: {\n chunkSize: oldConfig.indexing?.chunkSize ?? defaultConfig.core.chunkSize,\n chunkOverlap: oldConfig.indexing?.chunkOverlap ?? defaultConfig.core.chunkOverlap,\n concurrency: oldConfig.indexing?.concurrency ?? defaultConfig.core.concurrency,\n embeddingBatchSize: oldConfig.indexing?.embeddingBatchSize ?? defaultConfig.core.embeddingBatchSize,\n },\n mcp: {\n port: oldConfig.mcp?.port ?? defaultConfig.mcp.port,\n transport: oldConfig.mcp?.transport ?? defaultConfig.mcp.transport,\n autoIndexOnFirstRun: oldConfig.mcp?.autoIndexOnFirstRun ?? defaultConfig.mcp.autoIndexOnFirstRun,\n },\n gitDetection: {\n enabled: oldConfig.gitDetection?.enabled ?? defaultConfig.gitDetection.enabled,\n pollIntervalMs: oldConfig.gitDetection?.pollIntervalMs ?? defaultConfig.gitDetection.pollIntervalMs,\n },\n fileWatching: {\n enabled: oldConfig.fileWatching?.enabled ?? defaultConfig.fileWatching.enabled,\n debounceMs: oldConfig.fileWatching?.debounceMs ?? defaultConfig.fileWatching.debounceMs,\n },\n frameworks: [],\n };\n\n // Convert old indexing config to a single \"generic\" framework\n if (oldConfig.indexing) {\n const genericFramework: FrameworkInstance = {\n name: 'generic',\n path: '.',\n enabled: true,\n config: {\n include: oldConfig.indexing.include ?? ['**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}'],\n exclude: oldConfig.indexing.exclude ?? [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/coverage/**',\n '**/.next/**',\n '**/.nuxt/**',\n '**/vendor/**',\n ],\n },\n };\n\n newConfig.frameworks.push(genericFramework);\n } else {\n // No indexing config present, use defaults for generic framework\n const genericFramework: FrameworkInstance = {\n name: 'generic',\n path: '.',\n enabled: true,\n config: {\n include: ['**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}'],\n exclude: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/coverage/**',\n '**/.next/**',\n '**/.nuxt/**',\n '**/vendor/**',\n ],\n },\n };\n\n newConfig.frameworks.push(genericFramework);\n }\n\n return newConfig;\n}\n\n/**\n * Migrates config file and creates backup\n */\nexport async function migrateConfigFile(rootDir: string = process.cwd()): Promise<{\n migrated: boolean;\n backupPath?: string;\n config: LienConfig;\n}> {\n const configPath = path.join(rootDir, '.lien.config.json');\n\n try {\n // Read existing config\n const configContent = await fs.readFile(configPath, 'utf-8');\n const oldConfig = JSON.parse(configContent);\n\n // Check if migration is needed\n if (!needsMigration(oldConfig)) {\n return {\n migrated: false,\n config: oldConfig as LienConfig,\n };\n }\n\n // Perform migration\n const newConfig = migrateConfig(oldConfig);\n\n // Create backup\n const backupPath = `${configPath}.v0.2.0.backup`;\n await fs.copyFile(configPath, backupPath);\n\n // Write migrated config\n await fs.writeFile(configPath, JSON.stringify(newConfig, null, 2) + '\\n', 'utf-8');\n\n return {\n migrated: true,\n backupPath,\n config: newConfig,\n };\n } catch (error) {\n // If config doesn't exist, return default\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return {\n migrated: false,\n config: defaultConfig,\n };\n }\n throw error;\n }\n}\n\n","/**\n * Error codes for all Lien-specific errors.\n * Used to identify error types programmatically.\n */\nexport enum LienErrorCode {\n // Configuration\n CONFIG_NOT_FOUND = 'CONFIG_NOT_FOUND',\n CONFIG_INVALID = 'CONFIG_INVALID',\n \n // Index\n INDEX_NOT_FOUND = 'INDEX_NOT_FOUND',\n INDEX_CORRUPTED = 'INDEX_CORRUPTED',\n \n // Embeddings\n EMBEDDING_MODEL_FAILED = 'EMBEDDING_MODEL_FAILED',\n EMBEDDING_GENERATION_FAILED = 'EMBEDDING_GENERATION_FAILED',\n \n // File System\n FILE_NOT_FOUND = 'FILE_NOT_FOUND',\n FILE_NOT_READABLE = 'FILE_NOT_READABLE',\n INVALID_PATH = 'INVALID_PATH',\n \n // Tool Input\n INVALID_INPUT = 'INVALID_INPUT',\n \n // System\n INTERNAL_ERROR = 'INTERNAL_ERROR',\n}\n\n","import { LienErrorCode } from './codes.js';\n\n// Re-export for consumers\nexport { LienErrorCode } from './codes.js';\n\n/**\n * Severity levels for errors\n */\nexport type ErrorSeverity = 'low' | 'medium' | 'high' | 'critical';\n\n/**\n * Base error class for all Lien-specific errors\n */\nexport class LienError extends Error {\n constructor(\n message: string,\n public readonly code: LienErrorCode,\n public readonly context?: Record<string, unknown>,\n public readonly severity: ErrorSeverity = 'medium',\n public readonly recoverable: boolean = true,\n public readonly retryable: boolean = false\n ) {\n super(message);\n this.name = 'LienError';\n \n // Maintains proper stack trace for where our error was thrown (only available on V8)\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n \n /**\n * Serialize error to JSON for MCP responses\n */\n toJSON() {\n return {\n error: this.message,\n code: this.code,\n severity: this.severity,\n recoverable: this.recoverable,\n context: this.context,\n };\n }\n \n /**\n * Check if this error is retryable\n */\n isRetryable(): boolean {\n return this.retryable;\n }\n \n /**\n * Check if this error is recoverable\n */\n isRecoverable(): boolean {\n return this.recoverable;\n }\n}\n\n/**\n * Configuration-related errors (loading, parsing, migration)\n */\nexport class ConfigError extends LienError {\n constructor(message: string, context?: Record<string, unknown>) {\n super(message, LienErrorCode.CONFIG_INVALID, context, 'medium', true, false);\n this.name = 'ConfigError';\n }\n}\n\n/**\n * Indexing-related errors (file processing, chunking)\n */\nexport class IndexingError extends LienError {\n constructor(\n message: string,\n public readonly file?: string,\n context?: Record<string, unknown>\n ) {\n super(message, LienErrorCode.INTERNAL_ERROR, { ...context, file }, 'medium', true, false);\n this.name = 'IndexingError';\n }\n}\n\n/**\n * Embedding generation errors\n */\nexport class EmbeddingError extends LienError {\n constructor(message: string, context?: Record<string, unknown>) {\n super(message, LienErrorCode.EMBEDDING_GENERATION_FAILED, context, 'high', true, true);\n this.name = 'EmbeddingError';\n }\n}\n\n/**\n * Vector database errors (connection, query, storage)\n */\nexport class DatabaseError extends LienError {\n constructor(message: string, context?: Record<string, unknown>) {\n super(message, LienErrorCode.INTERNAL_ERROR, context, 'high', true, true);\n this.name = 'DatabaseError';\n }\n}\n\n/**\n * Helper function to wrap unknown errors with context\n * @param error - Unknown error object to wrap\n * @param context - Context message describing what operation failed\n * @param additionalContext - Optional additional context data\n * @returns LienError with proper message and context\n */\nexport function wrapError(\n error: unknown,\n context: string,\n additionalContext?: Record<string, unknown>\n): LienError {\n const message = error instanceof Error ? error.message : String(error);\n const stack = error instanceof Error ? error.stack : undefined;\n \n const wrappedError = new LienError(\n `${context}: ${message}`,\n LienErrorCode.INTERNAL_ERROR,\n additionalContext\n );\n \n // Preserve original stack trace if available\n if (stack) {\n wrappedError.stack = `${wrappedError.stack}\\n\\nCaused by:\\n${stack}`;\n }\n \n return wrappedError;\n}\n\n/**\n * Type guard to check if an error is a LienError\n */\nexport function isLienError(error: unknown): error is LienError {\n return error instanceof LienError;\n}\n\n/**\n * Extract error message from unknown error type\n * @param error - Unknown error object\n * @returns Error message string\n */\nexport function getErrorMessage(error: unknown): string {\n if (error instanceof Error) {\n return error.message;\n }\n return String(error);\n}\n\n/**\n * Extract stack trace from unknown error type\n * @param error - Unknown error object\n * @returns Stack trace string or undefined\n */\nexport function getErrorStack(error: unknown): string | undefined {\n if (error instanceof Error) {\n return error.stack;\n }\n return undefined;\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { LienConfig, LegacyLienConfig, defaultConfig, isLegacyConfig, isModernConfig } from './schema.js';\nimport { deepMergeConfig } from './merge.js';\nimport { needsMigration as checkNeedsMigration, migrateConfig as performMigration } from './migration.js';\nimport { ConfigError, wrapError } from '../errors/index.js';\n\n/**\n * Validation result with errors and warnings\n */\nexport interface ValidationResult {\n valid: boolean;\n errors: string[];\n warnings: string[];\n}\n\n/**\n * Migration result with status and config\n */\nexport interface MigrationResult {\n migrated: boolean;\n backupPath?: string;\n config: LienConfig;\n}\n\n/**\n * ConfigService encapsulates all configuration operations including\n * loading, saving, migration, and validation.\n * \n * This service provides a single point of truth for config management\n * with comprehensive error handling and validation.\n */\nexport class ConfigService {\n private static readonly CONFIG_FILENAME = '.lien.config.json';\n \n /**\n * Load configuration from the specified directory.\n * Automatically handles migration if needed.\n * \n * @param rootDir - Root directory containing the config file\n * @returns Loaded and validated configuration\n * @throws {ConfigError} If config is invalid or cannot be loaded\n */\n async load(rootDir: string = process.cwd()): Promise<LienConfig> {\n const configPath = this.getConfigPath(rootDir);\n \n try {\n const configContent = await fs.readFile(configPath, 'utf-8');\n const userConfig = JSON.parse(configContent);\n \n // Check if migration is needed\n if (this.needsMigration(userConfig)) {\n console.log('🔄 Migrating config from v0.2.0 to v0.3.0...');\n \n const result = await this.migrate(rootDir);\n \n if (result.migrated && result.backupPath) {\n const backupFilename = path.basename(result.backupPath);\n console.log(`✅ Migration complete! Backup saved as ${backupFilename}`);\n console.log('📝 Your config now uses the framework-based structure.');\n }\n \n return result.config;\n }\n \n // Merge with defaults first\n const mergedConfig = deepMergeConfig(defaultConfig, userConfig as Partial<LienConfig>);\n \n // Then validate the merged config\n const validation = this.validate(mergedConfig);\n if (!validation.valid) {\n throw new ConfigError(\n `Invalid configuration:\\n${validation.errors.join('\\n')}`,\n { errors: validation.errors, warnings: validation.warnings }\n );\n }\n \n // Show warnings if any\n if (validation.warnings.length > 0) {\n console.warn('⚠️ Configuration warnings:');\n validation.warnings.forEach(warning => console.warn(` ${warning}`));\n }\n \n return mergedConfig;\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n // Config doesn't exist, return defaults\n return defaultConfig;\n }\n \n if (error instanceof ConfigError) {\n throw error;\n }\n \n if (error instanceof SyntaxError) {\n throw new ConfigError(\n 'Failed to parse config file: Invalid JSON syntax',\n { path: configPath, originalError: error.message }\n );\n }\n \n throw wrapError(error, 'Failed to load configuration', { path: configPath });\n }\n }\n \n /**\n * Save configuration to the specified directory.\n * Validates the config before saving.\n * \n * @param rootDir - Root directory to save the config file\n * @param config - Configuration to save\n * @throws {ConfigError} If config is invalid or cannot be saved\n */\n async save(rootDir: string, config: LienConfig): Promise<void> {\n const configPath = this.getConfigPath(rootDir);\n \n // Validate before saving\n const validation = this.validate(config);\n if (!validation.valid) {\n throw new ConfigError(\n `Cannot save invalid configuration:\\n${validation.errors.join('\\n')}`,\n { errors: validation.errors }\n );\n }\n \n try {\n const configJson = JSON.stringify(config, null, 2) + '\\n';\n await fs.writeFile(configPath, configJson, 'utf-8');\n } catch (error) {\n throw wrapError(error, 'Failed to save configuration', { path: configPath });\n }\n }\n \n /**\n * Check if a configuration file exists in the specified directory.\n * \n * @param rootDir - Root directory to check\n * @returns True if config file exists\n */\n async exists(rootDir: string = process.cwd()): Promise<boolean> {\n const configPath = this.getConfigPath(rootDir);\n try {\n await fs.access(configPath);\n return true;\n } catch {\n return false;\n }\n }\n \n /**\n * Migrate configuration from v0.2.0 to v0.3.0 format.\n * Creates a backup of the original config file.\n * \n * @param rootDir - Root directory containing the config file\n * @returns Migration result with status and new config\n * @throws {ConfigError} If migration fails\n */\n async migrate(rootDir: string = process.cwd()): Promise<MigrationResult> {\n const configPath = this.getConfigPath(rootDir);\n \n try {\n // Read existing config\n const configContent = await fs.readFile(configPath, 'utf-8');\n const oldConfig = JSON.parse(configContent);\n \n // Check if migration is needed\n if (!this.needsMigration(oldConfig)) {\n return {\n migrated: false,\n config: oldConfig as LienConfig,\n };\n }\n \n // Perform migration\n const newConfig = performMigration(oldConfig);\n \n // Validate migrated config\n const validation = this.validate(newConfig);\n if (!validation.valid) {\n throw new ConfigError(\n `Migration produced invalid configuration:\\n${validation.errors.join('\\n')}`,\n { errors: validation.errors }\n );\n }\n \n // Create backup\n const backupPath = `${configPath}.v0.2.0.backup`;\n await fs.copyFile(configPath, backupPath);\n \n // Write migrated config\n await this.save(rootDir, newConfig);\n \n return {\n migrated: true,\n backupPath,\n config: newConfig,\n };\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return {\n migrated: false,\n config: defaultConfig,\n };\n }\n \n if (error instanceof ConfigError) {\n throw error;\n }\n \n throw wrapError(error, 'Configuration migration failed', { path: configPath });\n }\n }\n \n /**\n * Check if a config object needs migration from v0.2.0 to v0.3.0.\n * \n * @param config - Config object to check\n * @returns True if migration is needed\n */\n needsMigration(config: unknown): boolean {\n return checkNeedsMigration(config);\n }\n \n /**\n * Validate a configuration object.\n * Checks all constraints and returns detailed validation results.\n * \n * @param config - Configuration to validate\n * @returns Validation result with errors and warnings\n */\n validate(config: unknown): ValidationResult {\n const errors: string[] = [];\n const warnings: string[] = [];\n \n // Type check\n if (!config || typeof config !== 'object') {\n return {\n valid: false,\n errors: ['Configuration must be an object'],\n warnings: [],\n };\n }\n \n const cfg = config as Partial<LienConfig>;\n \n // Check for required top-level fields\n if (!cfg.version) {\n errors.push('Missing required field: version');\n }\n \n // Validate based on config type\n if (isModernConfig(cfg as LienConfig | LegacyLienConfig)) {\n this.validateModernConfig(cfg as LienConfig, errors, warnings);\n } else if (isLegacyConfig(cfg as LienConfig | LegacyLienConfig)) {\n this.validateLegacyConfig(cfg as LegacyLienConfig, errors, warnings);\n } else {\n errors.push('Configuration format not recognized. Must have either \"frameworks\" or \"indexing\" field');\n }\n \n return {\n valid: errors.length === 0,\n errors,\n warnings,\n };\n }\n \n /**\n * Validate a partial configuration object.\n * Useful for validating user input before merging with defaults.\n * \n * @param config - Partial configuration to validate\n * @returns Validation result with errors and warnings\n */\n validatePartial(config: Partial<LienConfig>): ValidationResult {\n const errors: string[] = [];\n const warnings: string[] = [];\n \n // Validate core settings if present\n if (config.core) {\n this.validateCoreConfig(config.core, errors, warnings);\n }\n \n // Validate MCP settings if present\n if (config.mcp) {\n this.validateMCPConfig(config.mcp, errors, warnings);\n }\n \n // Validate git detection settings if present\n if (config.gitDetection) {\n this.validateGitDetectionConfig(config.gitDetection, errors, warnings);\n }\n \n // Validate file watching settings if present\n if (config.fileWatching) {\n this.validateFileWatchingConfig(config.fileWatching, errors, warnings);\n }\n \n // Validate frameworks if present\n if (config.frameworks) {\n this.validateFrameworks(config.frameworks, errors, warnings);\n }\n \n return {\n valid: errors.length === 0,\n errors,\n warnings,\n };\n }\n \n /**\n * Get the full path to the config file\n */\n private getConfigPath(rootDir: string): string {\n return path.join(rootDir, ConfigService.CONFIG_FILENAME);\n }\n \n /**\n * Validate modern (v0.3.0+) configuration\n */\n private validateModernConfig(\n config: LienConfig,\n errors: string[],\n warnings: string[]\n ): void {\n // Validate core settings\n if (!config.core) {\n errors.push('Missing required field: core');\n return;\n }\n this.validateCoreConfig(config.core, errors, warnings);\n \n // Validate MCP settings\n if (!config.mcp) {\n errors.push('Missing required field: mcp');\n return;\n }\n this.validateMCPConfig(config.mcp, errors, warnings);\n \n // Validate git detection settings\n if (!config.gitDetection) {\n errors.push('Missing required field: gitDetection');\n return;\n }\n this.validateGitDetectionConfig(config.gitDetection, errors, warnings);\n \n // Validate file watching settings\n if (!config.fileWatching) {\n errors.push('Missing required field: fileWatching');\n return;\n }\n this.validateFileWatchingConfig(config.fileWatching, errors, warnings);\n \n // Validate frameworks\n if (!config.frameworks) {\n errors.push('Missing required field: frameworks');\n return;\n }\n this.validateFrameworks(config.frameworks, errors, warnings);\n }\n \n /**\n * Validate legacy (v0.2.0) configuration\n */\n private validateLegacyConfig(\n config: LegacyLienConfig,\n errors: string[],\n warnings: string[]\n ): void {\n warnings.push('Using legacy configuration format. Consider running \"lien init\" to migrate to v0.3.0');\n \n // Validate indexing settings\n if (!config.indexing) {\n errors.push('Missing required field: indexing');\n return;\n }\n \n const { indexing } = config;\n \n if (typeof indexing.chunkSize !== 'number' || indexing.chunkSize <= 0) {\n errors.push('indexing.chunkSize must be a positive number');\n }\n \n if (typeof indexing.chunkOverlap !== 'number' || indexing.chunkOverlap < 0) {\n errors.push('indexing.chunkOverlap must be a non-negative number');\n }\n \n if (typeof indexing.concurrency !== 'number' || indexing.concurrency < 1 || indexing.concurrency > 16) {\n errors.push('indexing.concurrency must be between 1 and 16');\n }\n \n if (typeof indexing.embeddingBatchSize !== 'number' || indexing.embeddingBatchSize <= 0) {\n errors.push('indexing.embeddingBatchSize must be a positive number');\n }\n \n // Validate MCP settings (same for both)\n if (config.mcp) {\n this.validateMCPConfig(config.mcp, errors, warnings);\n }\n }\n \n /**\n * Validate core configuration settings\n */\n private validateCoreConfig(\n core: Partial<LienConfig['core']>,\n errors: string[],\n warnings: string[]\n ): void {\n if (core.chunkSize !== undefined) {\n if (typeof core.chunkSize !== 'number' || core.chunkSize <= 0) {\n errors.push('core.chunkSize must be a positive number');\n } else if (core.chunkSize < 50) {\n warnings.push('core.chunkSize is very small (<50 lines). This may result in poor search quality');\n } else if (core.chunkSize > 500) {\n warnings.push('core.chunkSize is very large (>500 lines). This may impact performance');\n }\n }\n \n if (core.chunkOverlap !== undefined) {\n if (typeof core.chunkOverlap !== 'number' || core.chunkOverlap < 0) {\n errors.push('core.chunkOverlap must be a non-negative number');\n }\n }\n \n if (core.concurrency !== undefined) {\n if (typeof core.concurrency !== 'number' || core.concurrency < 1 || core.concurrency > 16) {\n errors.push('core.concurrency must be between 1 and 16');\n }\n }\n \n if (core.embeddingBatchSize !== undefined) {\n if (typeof core.embeddingBatchSize !== 'number' || core.embeddingBatchSize <= 0) {\n errors.push('core.embeddingBatchSize must be a positive number');\n } else if (core.embeddingBatchSize > 100) {\n warnings.push('core.embeddingBatchSize is very large (>100). This may cause memory issues');\n }\n }\n }\n \n /**\n * Validate MCP configuration settings\n */\n private validateMCPConfig(\n mcp: Partial<LienConfig['mcp']>,\n errors: string[],\n _warnings: string[]\n ): void {\n if (mcp.port !== undefined) {\n if (typeof mcp.port !== 'number' || mcp.port < 1024 || mcp.port > 65535) {\n errors.push('mcp.port must be between 1024 and 65535');\n }\n }\n \n if (mcp.transport !== undefined) {\n if (mcp.transport !== 'stdio' && mcp.transport !== 'socket') {\n errors.push('mcp.transport must be either \"stdio\" or \"socket\"');\n }\n }\n \n if (mcp.autoIndexOnFirstRun !== undefined) {\n if (typeof mcp.autoIndexOnFirstRun !== 'boolean') {\n errors.push('mcp.autoIndexOnFirstRun must be a boolean');\n }\n }\n }\n \n /**\n * Validate git detection configuration settings\n */\n private validateGitDetectionConfig(\n gitDetection: Partial<LienConfig['gitDetection']>,\n errors: string[],\n _warnings: string[]\n ): void {\n if (gitDetection.enabled !== undefined) {\n if (typeof gitDetection.enabled !== 'boolean') {\n errors.push('gitDetection.enabled must be a boolean');\n }\n }\n \n if (gitDetection.pollIntervalMs !== undefined) {\n if (typeof gitDetection.pollIntervalMs !== 'number' || gitDetection.pollIntervalMs < 100) {\n errors.push('gitDetection.pollIntervalMs must be at least 100ms');\n } else if (gitDetection.pollIntervalMs < 1000) {\n _warnings.push('gitDetection.pollIntervalMs is very short (<1s). This may impact performance');\n }\n }\n }\n \n /**\n * Validate file watching configuration settings\n */\n private validateFileWatchingConfig(\n fileWatching: Partial<LienConfig['fileWatching']>,\n errors: string[],\n warnings: string[]\n ): void {\n if (fileWatching.enabled !== undefined) {\n if (typeof fileWatching.enabled !== 'boolean') {\n errors.push('fileWatching.enabled must be a boolean');\n }\n }\n \n if (fileWatching.debounceMs !== undefined) {\n if (typeof fileWatching.debounceMs !== 'number' || fileWatching.debounceMs < 0) {\n errors.push('fileWatching.debounceMs must be a non-negative number');\n } else if (fileWatching.debounceMs < 100) {\n warnings.push('fileWatching.debounceMs is very short (<100ms). This may cause excessive reindexing');\n }\n }\n }\n \n /**\n * Validate frameworks configuration\n */\n private validateFrameworks(\n frameworks: unknown[],\n errors: string[],\n warnings: string[]\n ): void {\n if (!Array.isArray(frameworks)) {\n errors.push('frameworks must be an array');\n return;\n }\n \n frameworks.forEach((framework, index) => {\n if (!framework || typeof framework !== 'object') {\n errors.push(`frameworks[${index}] must be an object`);\n return;\n }\n \n const fw = framework as Partial<any>;\n \n // Validate required fields\n if (!fw.name) {\n errors.push(`frameworks[${index}] missing required field: name`);\n }\n \n if (fw.path === undefined) {\n errors.push(`frameworks[${index}] missing required field: path`);\n } else if (typeof fw.path !== 'string') {\n errors.push(`frameworks[${index}].path must be a string`);\n } else if (path.isAbsolute(fw.path)) {\n errors.push(`frameworks[${index}].path must be relative, got: ${fw.path}`);\n }\n \n if (fw.enabled === undefined) {\n errors.push(`frameworks[${index}] missing required field: enabled`);\n } else if (typeof fw.enabled !== 'boolean') {\n errors.push(`frameworks[${index}].enabled must be a boolean`);\n }\n \n if (!fw.config) {\n errors.push(`frameworks[${index}] missing required field: config`);\n } else {\n this.validateFrameworkConfig(fw.config, `frameworks[${index}].config`, errors, warnings);\n }\n });\n }\n \n /**\n * Validate framework-specific configuration\n */\n private validateFrameworkConfig(\n config: any,\n prefix: string,\n errors: string[],\n _warnings: string[]\n ): void {\n if (!config || typeof config !== 'object') {\n errors.push(`${prefix} must be an object`);\n return;\n }\n \n // Validate include patterns\n if (!Array.isArray(config.include)) {\n errors.push(`${prefix}.include must be an array`);\n } else {\n config.include.forEach((pattern: unknown, i: number) => {\n if (typeof pattern !== 'string') {\n errors.push(`${prefix}.include[${i}] must be a string`);\n }\n });\n }\n \n // Validate exclude patterns\n if (!Array.isArray(config.exclude)) {\n errors.push(`${prefix}.exclude must be an array`);\n } else {\n config.exclude.forEach((pattern: unknown, i: number) => {\n if (typeof pattern !== 'string') {\n errors.push(`${prefix}.exclude[${i}] must be a string`);\n }\n });\n }\n }\n}\n\n// Export a singleton instance for convenience\nexport const configService = new ConfigService();\n\n","import { exec } from 'child_process';\nimport { promisify } from 'util';\nimport fs from 'fs/promises';\nimport path from 'path';\n\nconst execAsync = promisify(exec);\n\n/**\n * Checks if a directory is a git repository.\n * \n * @param rootDir - Directory to check\n * @returns true if directory is a git repo, false otherwise\n */\nexport async function isGitRepo(rootDir: string): Promise<boolean> {\n try {\n const gitDir = path.join(rootDir, '.git');\n await fs.access(gitDir);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Gets the current git branch name.\n * \n * @param rootDir - Root directory of the git repository\n * @returns Branch name (e.g., \"main\", \"feature-branch\")\n * @throws Error if not a git repo or git command fails\n */\nexport async function getCurrentBranch(rootDir: string): Promise<string> {\n try {\n const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {\n cwd: rootDir,\n timeout: 5000, // 5 second timeout\n });\n return stdout.trim();\n } catch (error) {\n throw new Error(`Failed to get current branch: ${error}`);\n }\n}\n\n/**\n * Gets the current git commit SHA (HEAD).\n * \n * @param rootDir - Root directory of the git repository\n * @returns Commit SHA (full 40-character hash)\n * @throws Error if not a git repo or git command fails\n */\nexport async function getCurrentCommit(rootDir: string): Promise<string> {\n try {\n const { stdout } = await execAsync('git rev-parse HEAD', {\n cwd: rootDir,\n timeout: 5000,\n });\n return stdout.trim();\n } catch (error) {\n throw new Error(`Failed to get current commit: ${error}`);\n }\n}\n\n/**\n * Gets the list of files that changed between two git references.\n * \n * @param rootDir - Root directory of the git repository\n * @param fromRef - Starting reference (branch name, commit SHA, or tag)\n * @param toRef - Ending reference (branch name, commit SHA, or tag)\n * @returns Array of file paths (relative to repo root) that changed\n * @throws Error if git command fails\n */\nexport async function getChangedFiles(\n rootDir: string,\n fromRef: string,\n toRef: string\n): Promise<string[]> {\n try {\n const { stdout } = await execAsync(\n `git diff --name-only ${fromRef}...${toRef}`,\n {\n cwd: rootDir,\n timeout: 10000, // 10 second timeout for diffs\n }\n );\n \n const files = stdout\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(file => path.join(rootDir, file)); // Convert to absolute paths\n \n return files;\n } catch (error) {\n throw new Error(`Failed to get changed files: ${error}`);\n }\n}\n\n/**\n * Gets the list of files that changed in a specific commit.\n * \n * @param rootDir - Root directory of the git repository\n * @param commitSha - Commit SHA to check\n * @returns Array of file paths (absolute) that changed in this commit\n * @throws Error if git command fails\n */\nexport async function getChangedFilesInCommit(\n rootDir: string,\n commitSha: string\n): Promise<string[]> {\n try {\n const { stdout } = await execAsync(\n `git diff-tree --no-commit-id --name-only -r ${commitSha}`,\n {\n cwd: rootDir,\n timeout: 10000,\n }\n );\n \n const files = stdout\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(file => path.join(rootDir, file)); // Convert to absolute paths\n \n return files;\n } catch (error) {\n throw new Error(`Failed to get changed files in commit: ${error}`);\n }\n}\n\n/**\n * Gets the list of files that changed between two commits.\n * More efficient than getChangedFiles for commit-to-commit comparisons.\n * \n * @param rootDir - Root directory of the git repository\n * @param fromCommit - Starting commit SHA\n * @param toCommit - Ending commit SHA\n * @returns Array of file paths (absolute) that changed between commits\n * @throws Error if git command fails\n */\nexport async function getChangedFilesBetweenCommits(\n rootDir: string,\n fromCommit: string,\n toCommit: string\n): Promise<string[]> {\n try {\n const { stdout } = await execAsync(\n `git diff --name-only ${fromCommit} ${toCommit}`,\n {\n cwd: rootDir,\n timeout: 10000,\n }\n );\n \n const files = stdout\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(file => path.join(rootDir, file)); // Convert to absolute paths\n \n return files;\n } catch (error) {\n throw new Error(`Failed to get changed files between commits: ${error}`);\n }\n}\n\n/**\n * Checks if git is installed and available.\n * \n * @returns true if git is available, false otherwise\n */\nexport async function isGitAvailable(): Promise<boolean> {\n try {\n await execAsync('git --version', { timeout: 3000 });\n return true;\n } catch {\n return false;\n }\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\n\nconst VERSION_FILE = '.lien-index-version';\n\n/**\n * Writes a version timestamp file to mark when the index was last updated.\n * This file is used by the MCP server to detect when it needs to reconnect.\n * \n * @param indexPath - Path to the index directory\n */\nexport async function writeVersionFile(indexPath: string): Promise<void> {\n try {\n const versionFilePath = path.join(indexPath, VERSION_FILE);\n const timestamp = Date.now().toString();\n await fs.writeFile(versionFilePath, timestamp, 'utf-8');\n } catch (error) {\n // Don't throw - version file is a convenience feature, not critical\n console.error(`Warning: Failed to write version file: ${error}`);\n }\n}\n\n/**\n * Reads the version timestamp from the index directory.\n * Returns 0 if the file doesn't exist (e.g., old index).\n * \n * @param indexPath - Path to the index directory\n * @returns Version timestamp, or 0 if not found\n */\nexport async function readVersionFile(indexPath: string): Promise<number> {\n try {\n const versionFilePath = path.join(indexPath, VERSION_FILE);\n const content = await fs.readFile(versionFilePath, 'utf-8');\n const timestamp = parseInt(content.trim(), 10);\n return isNaN(timestamp) ? 0 : timestamp;\n } catch (error) {\n // File doesn't exist or can't be read - treat as version 0\n return 0;\n }\n}\n\n","import { glob } from 'glob';\nimport ignore from 'ignore';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { ScanOptions } from './types.js';\nimport { LienConfig, FrameworkInstance } from '../config/schema.js';\n\n/**\n * Scan codebase using framework-aware configuration\n * @param rootDir - Project root directory\n * @param config - Lien configuration with frameworks\n * @returns Array of file paths relative to rootDir\n */\nexport async function scanCodebaseWithFrameworks(\n rootDir: string,\n config: LienConfig\n): Promise<string[]> {\n const allFiles: string[] = [];\n \n // Scan each framework\n for (const framework of config.frameworks) {\n if (!framework.enabled) {\n continue;\n }\n \n const frameworkFiles = await scanFramework(rootDir, framework);\n allFiles.push(...frameworkFiles);\n }\n \n return allFiles;\n}\n\n/**\n * Scan files for a specific framework instance\n */\nasync function scanFramework(\n rootDir: string,\n framework: FrameworkInstance\n): Promise<string[]> {\n const frameworkPath = path.join(rootDir, framework.path);\n \n // Load .gitignore from framework path\n const gitignorePath = path.join(frameworkPath, '.gitignore');\n let ig = ignore();\n \n try {\n const gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');\n ig = ignore().add(gitignoreContent);\n } catch (e) {\n // No .gitignore in framework path, try root\n const rootGitignorePath = path.join(rootDir, '.gitignore');\n try {\n const gitignoreContent = await fs.readFile(rootGitignorePath, 'utf-8');\n ig = ignore().add(gitignoreContent);\n } catch (e) {\n // No .gitignore at all, that's fine\n }\n }\n \n // Add framework-specific exclusions\n ig.add([\n ...framework.config.exclude,\n '.lien/**',\n ]);\n \n // Find all files matching framework patterns\n const allFiles: string[] = [];\n \n for (const pattern of framework.config.include) {\n const files = await glob(pattern, {\n cwd: frameworkPath,\n absolute: false, // Get paths relative to framework path\n nodir: true,\n ignore: framework.config.exclude,\n });\n allFiles.push(...files);\n }\n \n // Remove duplicates\n const uniqueFiles = Array.from(new Set(allFiles));\n \n // Filter using ignore patterns and prefix with framework path\n return uniqueFiles\n .filter(file => !ig.ignores(file))\n .map(file => {\n // Return path relative to root: framework.path/file\n return framework.path === '.' \n ? file \n : path.join(framework.path, file);\n });\n}\n\n/**\n * Legacy scan function for backwards compatibility\n * @deprecated Use scanCodebaseWithFrameworks instead\n */\nexport async function scanCodebase(options: ScanOptions): Promise<string[]> {\n const { rootDir, includePatterns = [], excludePatterns = [] } = options;\n \n // Load .gitignore\n const gitignorePath = path.join(rootDir, '.gitignore');\n let ig = ignore();\n \n try {\n const gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');\n ig = ignore().add(gitignoreContent);\n } catch (e) {\n // No .gitignore, that's fine\n }\n \n // Add default exclusions\n ig.add([\n 'node_modules/**',\n '.git/**',\n 'dist/**',\n 'build/**',\n '*.min.js',\n '*.min.css',\n '.lien/**',\n ...excludePatterns,\n ]);\n \n // Determine patterns to search for\n const patterns = includePatterns.length > 0 \n ? includePatterns \n : ['**/*.{ts,tsx,js,jsx,py,go,rs,java,cpp,c,h,md,mdx}'];\n \n // Find all code files\n const allFiles: string[] = [];\n \n for (const pattern of patterns) {\n const files = await glob(pattern, {\n cwd: rootDir,\n absolute: true,\n nodir: true,\n ignore: ['node_modules/**', '.git/**'],\n });\n allFiles.push(...files);\n }\n \n // Remove duplicates\n const uniqueFiles = Array.from(new Set(allFiles));\n \n // Filter using ignore patterns\n return uniqueFiles.filter(file => {\n const relativePath = path.relative(rootDir, file);\n return !ig.ignores(relativePath);\n });\n}\n\nexport function detectLanguage(filepath: string): string {\n const ext = path.extname(filepath).toLowerCase();\n \n const languageMap: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.mjs': 'javascript',\n '.cjs': 'javascript',\n '.vue': 'vue',\n '.py': 'python',\n '.go': 'go',\n '.rs': 'rust',\n '.java': 'java',\n '.cpp': 'cpp',\n '.cc': 'cpp',\n '.cxx': 'cpp',\n '.c': 'c',\n '.h': 'c',\n '.hpp': 'cpp',\n '.php': 'php',\n '.rb': 'ruby',\n '.swift': 'swift',\n '.kt': 'kotlin',\n '.cs': 'csharp',\n '.scala': 'scala',\n '.liquid': 'liquid',\n '.md': 'markdown',\n '.mdx': 'markdown',\n '.markdown': 'markdown',\n };\n \n return languageMap[ext] || 'unknown';\n}\n\n","/**\n * Symbol extraction utilities for different programming languages.\n * Extracts function, class, and interface names from code chunks for better indexing.\n */\n\nexport interface ExtractedSymbols {\n functions: string[];\n classes: string[];\n interfaces: string[];\n}\n\n/**\n * Extract symbols (functions, classes, interfaces) from code content.\n * \n * @param content - The code content to extract symbols from\n * @param language - The programming language of the content\n * @returns Extracted symbols organized by type\n */\nexport function extractSymbols(\n content: string,\n language: string\n): ExtractedSymbols {\n const symbols: ExtractedSymbols = {\n functions: [],\n classes: [],\n interfaces: [],\n };\n \n const normalizedLang = language.toLowerCase();\n \n switch (normalizedLang) {\n case 'typescript':\n case 'tsx':\n symbols.functions = extractTSFunctions(content);\n symbols.classes = extractTSClasses(content);\n symbols.interfaces = extractTSInterfaces(content);\n break;\n \n case 'javascript':\n case 'jsx':\n symbols.functions = extractJSFunctions(content);\n symbols.classes = extractJSClasses(content);\n break;\n \n case 'python':\n case 'py':\n symbols.functions = extractPythonFunctions(content);\n symbols.classes = extractPythonClasses(content);\n break;\n \n case 'php':\n symbols.functions = extractPHPFunctions(content);\n symbols.classes = extractPHPClasses(content);\n symbols.interfaces = extractPHPInterfaces(content);\n break;\n \n case 'vue':\n // Extract from <script> blocks (handles both Options API and Composition API)\n symbols.functions = extractVueFunctions(content);\n symbols.classes = extractVueComponents(content);\n break;\n \n case 'go':\n symbols.functions = extractGoFunctions(content);\n symbols.interfaces = extractGoInterfaces(content);\n break;\n \n case 'java':\n symbols.functions = extractJavaFunctions(content);\n symbols.classes = extractJavaClasses(content);\n symbols.interfaces = extractJavaInterfaces(content);\n break;\n \n case 'csharp':\n case 'cs':\n symbols.functions = extractCSharpFunctions(content);\n symbols.classes = extractCSharpClasses(content);\n symbols.interfaces = extractCSharpInterfaces(content);\n break;\n \n case 'ruby':\n case 'rb':\n symbols.functions = extractRubyFunctions(content);\n symbols.classes = extractRubyClasses(content);\n break;\n \n case 'rust':\n case 'rs':\n symbols.functions = extractRustFunctions(content);\n break;\n }\n \n return symbols;\n}\n\n// TypeScript / JavaScript Functions\nfunction extractTSFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Regular functions: function name(...) or async function name(...)\n const functionMatches = content.matchAll(/(?:async\\s+)?function\\s+(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n // Arrow functions: const/let/var name = (...) =>\n const arrowMatches = content.matchAll(/(?:const|let|var)\\s+(\\w+)\\s*=\\s*(?:async\\s*)?\\([^)]*\\)\\s*=>/g);\n for (const match of arrowMatches) {\n names.add(match[1]);\n }\n \n // Method definitions: name(...) { or async name(...) {\n const methodMatches = content.matchAll(/(?:async\\s+)?(\\w+)\\s*\\([^)]*\\)\\s*[:{]/g);\n for (const match of methodMatches) {\n // Exclude common keywords\n if (!['if', 'for', 'while', 'switch', 'catch'].includes(match[1])) {\n names.add(match[1]);\n }\n }\n \n // Export function\n const exportMatches = content.matchAll(/export\\s+(?:async\\s+)?function\\s+(\\w+)\\s*\\(/g);\n for (const match of exportMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractJSFunctions(content: string): string[] {\n return extractTSFunctions(content); // Same patterns\n}\n\nfunction extractTSClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class declarations: class Name or export class Name\n const classMatches = content.matchAll(/(?:export\\s+)?(?:abstract\\s+)?class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractJSClasses(content: string): string[] {\n return extractTSClasses(content); // Same patterns\n}\n\nfunction extractTSInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface declarations: interface Name or export interface Name\n const interfaceMatches = content.matchAll(/(?:export\\s+)?interface\\s+(\\w+)/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n // Type aliases: type Name = or export type Name =\n const typeMatches = content.matchAll(/(?:export\\s+)?type\\s+(\\w+)\\s*=/g);\n for (const match of typeMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Python Functions\nfunction extractPythonFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Function definitions: def name(...):\n const functionMatches = content.matchAll(/def\\s+(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n // Async functions: async def name(...):\n const asyncMatches = content.matchAll(/async\\s+def\\s+(\\w+)\\s*\\(/g);\n for (const match of asyncMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractPythonClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: class Name or class Name(Base):\n const classMatches = content.matchAll(/class\\s+(\\w+)(?:\\s*\\(|:)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// PHP Functions\nfunction extractPHPFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Function definitions: function name(...) or public function name(...)\n const functionMatches = content.matchAll(/(?:public|private|protected)?\\s*function\\s+(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractPHPClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: class Name or abstract class Name\n const classMatches = content.matchAll(/(?:abstract\\s+)?class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractPHPInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface definitions: interface Name\n const interfaceMatches = content.matchAll(/interface\\s+(\\w+)/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n // Trait definitions: trait Name\n const traitMatches = content.matchAll(/trait\\s+(\\w+)/g);\n for (const match of traitMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Go Functions\nfunction extractGoFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Function definitions: func Name(...) or func (r *Receiver) Name(...)\n const functionMatches = content.matchAll(/func\\s+(?:\\(\\w+\\s+\\*?\\w+\\)\\s+)?(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractGoInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface definitions: type Name interface {\n const interfaceMatches = content.matchAll(/type\\s+(\\w+)\\s+interface\\s*\\{/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n // Struct definitions: type Name struct {\n const structMatches = content.matchAll(/type\\s+(\\w+)\\s+struct\\s*\\{/g);\n for (const match of structMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Java Functions\nfunction extractJavaFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Method definitions: public/private/protected return_type name(...)\n const methodMatches = content.matchAll(/(?:public|private|protected)\\s+(?:static\\s+)?(?:\\w+(?:<[^>]+>)?)\\s+(\\w+)\\s*\\(/g);\n for (const match of methodMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractJavaClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: public class Name or abstract class Name\n const classMatches = content.matchAll(/(?:public\\s+)?(?:abstract\\s+)?class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractJavaInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface definitions: public interface Name\n const interfaceMatches = content.matchAll(/(?:public\\s+)?interface\\s+(\\w+)/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// C# Functions\nfunction extractCSharpFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Method definitions: public/private/protected return_type Name(...)\n const methodMatches = content.matchAll(/(?:public|private|protected|internal)\\s+(?:static\\s+)?(?:async\\s+)?(?:\\w+(?:<[^>]+>)?)\\s+(\\w+)\\s*\\(/g);\n for (const match of methodMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractCSharpClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: public class Name or abstract class Name\n const classMatches = content.matchAll(/(?:public|internal)?\\s*(?:abstract\\s+)?class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractCSharpInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface definitions: public interface Name\n const interfaceMatches = content.matchAll(/(?:public|internal)?\\s*interface\\s+(\\w+)/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Ruby Functions\nfunction extractRubyFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Method definitions: def name or def self.name\n const methodMatches = content.matchAll(/def\\s+(?:self\\.)?(\\w+)/g);\n for (const match of methodMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractRubyClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: class Name or class Name < Base\n const classMatches = content.matchAll(/class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n // Module definitions: module Name\n const moduleMatches = content.matchAll(/module\\s+(\\w+)/g);\n for (const match of moduleMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Rust Functions\nfunction extractRustFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Function definitions: fn name(...) or pub fn name(...)\n const functionMatches = content.matchAll(/(?:pub\\s+)?fn\\s+(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n // Struct definitions: struct Name {\n const structMatches = content.matchAll(/(?:pub\\s+)?struct\\s+(\\w+)/g);\n for (const match of structMatches) {\n names.add(match[1]);\n }\n \n // Trait definitions: trait Name {\n const traitMatches = content.matchAll(/(?:pub\\s+)?trait\\s+(\\w+)/g);\n for (const match of traitMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Vue Functions\nfunction extractVueFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Extract script content from Vue SFC\n const scriptMatch = content.match(/<script[^>]*>([\\s\\S]*?)<\\/script>/);\n if (!scriptMatch) return [];\n \n const scriptContent = scriptMatch[1];\n \n // Composition API: const/function name = ...\n const compositionMatches = scriptContent.matchAll(/(?:const|function)\\s+(\\w+)\\s*=/g);\n for (const match of compositionMatches) {\n names.add(match[1]);\n }\n \n // Options API methods\n const methodMatches = scriptContent.matchAll(/(\\w+)\\s*\\([^)]*\\)\\s*{/g);\n for (const match of methodMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Vue Components\nfunction extractVueComponents(content: string): string[] {\n const names = new Set<string>();\n \n // Extract component name from filename convention or export\n const scriptMatch = content.match(/<script[^>]*>([\\s\\S]*?)<\\/script>/);\n if (!scriptMatch) return [];\n \n const scriptContent = scriptMatch[1];\n \n // export default { name: 'ComponentName' }\n const nameMatch = scriptContent.match(/name:\\s*['\"](\\w+)['\"]/);\n if (nameMatch) {\n names.add(nameMatch[1]);\n }\n \n // defineComponent or <script setup> components\n const defineComponentMatch = scriptContent.match(/defineComponent\\s*\\(/);\n if (defineComponentMatch) {\n names.add('VueComponent');\n }\n \n return Array.from(names);\n}\n\n","import { CodeChunk } from './types.js';\nimport { detectLanguage } from './scanner.js';\nimport { extractSymbols } from './symbol-extractor.js';\n\nexport interface ChunkOptions {\n chunkSize?: number;\n chunkOverlap?: number;\n}\n\nexport function chunkFile(\n filepath: string,\n content: string,\n options: ChunkOptions = {}\n): CodeChunk[] {\n const { chunkSize = 75, chunkOverlap = 10 } = options;\n \n const lines = content.split('\\n');\n const chunks: CodeChunk[] = [];\n const language = detectLanguage(filepath);\n \n // Handle empty files\n if (lines.length === 0 || (lines.length === 1 && lines[0].trim() === '')) {\n return chunks;\n }\n \n // Chunk by lines with overlap\n for (let i = 0; i < lines.length; i += chunkSize - chunkOverlap) {\n const endLine = Math.min(i + chunkSize, lines.length);\n const chunkLines = lines.slice(i, endLine);\n const chunkContent = chunkLines.join('\\n');\n \n // Skip empty chunks\n if (chunkContent.trim().length === 0) {\n continue;\n }\n \n // Extract symbols from the chunk\n const symbols = extractSymbols(chunkContent, language);\n \n chunks.push({\n content: chunkContent,\n metadata: {\n file: filepath,\n startLine: i + 1,\n endLine: endLine,\n type: 'block', // MVP: all chunks are 'block' type\n language,\n symbols,\n },\n });\n \n // If we've reached the end, break\n if (endLine >= lines.length) {\n break;\n }\n }\n \n return chunks;\n}\n\nexport function chunkText(text: string, options: ChunkOptions = {}): string[] {\n const { chunkSize = 75, chunkOverlap = 10 } = options;\n \n const lines = text.split('\\n');\n const chunks: string[] = [];\n \n for (let i = 0; i < lines.length; i += chunkSize - chunkOverlap) {\n const endLine = Math.min(i + chunkSize, lines.length);\n const chunkLines = lines.slice(i, endLine);\n const chunkContent = chunkLines.join('\\n');\n \n if (chunkContent.trim().length > 0) {\n chunks.push(chunkContent);\n }\n \n if (endLine >= lines.length) {\n break;\n }\n }\n \n return chunks;\n}\n\n","import { pipeline, env, type FeatureExtractionPipeline } from '@xenova/transformers';\nimport { EmbeddingService } from './types.js';\nimport { EmbeddingError, wrapError } from '../errors/index.js';\nimport { DEFAULT_EMBEDDING_MODEL } from '../constants.js';\n\n// Configure transformers.js to cache models locally\nenv.allowRemoteModels = true;\nenv.allowLocalModels = true;\n\nexport class LocalEmbeddings implements EmbeddingService {\n private extractor: FeatureExtractionPipeline | null = null;\n private readonly modelName = DEFAULT_EMBEDDING_MODEL;\n private initPromise: Promise<void> | null = null;\n \n async initialize(): Promise<void> {\n // Prevent multiple simultaneous initializations\n if (this.initPromise) {\n return this.initPromise;\n }\n \n if (this.extractor) {\n return;\n }\n \n this.initPromise = (async () => {\n try {\n // This downloads ~100MB on first run, then caches in ~/.cache/huggingface\n this.extractor = await pipeline('feature-extraction', this.modelName) as FeatureExtractionPipeline;\n } catch (error: unknown) {\n this.initPromise = null;\n throw wrapError(error, 'Failed to initialize embedding model');\n }\n })();\n \n return this.initPromise;\n }\n \n async embed(text: string): Promise<Float32Array> {\n await this.initialize();\n \n if (!this.extractor) {\n throw new EmbeddingError('Embedding model not initialized');\n }\n \n try {\n const output = await this.extractor(text, {\n pooling: 'mean',\n normalize: true,\n });\n \n return output.data as Float32Array;\n } catch (error: unknown) {\n throw wrapError(error, 'Failed to generate embedding', { textLength: text.length });\n }\n }\n \n async embedBatch(texts: string[]): Promise<Float32Array[]> {\n await this.initialize();\n \n if (!this.extractor) {\n throw new EmbeddingError('Embedding model not initialized');\n }\n \n try {\n // Process embeddings with Promise.all for concurrent execution\n // Each call is sequential but Promise.all allows task interleaving\n const results = await Promise.all(\n texts.map(text => this.embed(text))\n );\n return results;\n } catch (error: unknown) {\n throw wrapError(error, 'Failed to generate batch embeddings', { batchSize: texts.length });\n }\n }\n}\n\n","import { EMBEDDING_DIMENSIONS } from '../constants.js';\n\nexport interface EmbeddingService {\n initialize(): Promise<void>;\n embed(text: string): Promise<Float32Array>;\n embedBatch(texts: string[]): Promise<Float32Array[]>;\n}\n\nexport const EMBEDDING_DIMENSION = EMBEDDING_DIMENSIONS;\n\n","/**\n * Relevance category based on semantic similarity score\n */\nexport type RelevanceCategory = 'highly_relevant' | 'relevant' | 'loosely_related' | 'not_relevant';\n\n/**\n * Calculate relevance category from cosine distance score.\n * \n * Lower scores indicate higher similarity (closer in vector space).\n * Thresholds based on observed score distributions from dogfooding.\n * \n * @param score - Cosine distance score from vector search\n * @returns Human-readable relevance category\n */\nexport function calculateRelevance(score: number): RelevanceCategory {\n if (score < 1.0) return 'highly_relevant';\n if (score < 1.3) return 'relevant';\n if (score < 1.5) return 'loosely_related';\n return 'not_relevant';\n}\n\n","/**\n * Query Intent Classification\n * \n * Classifies user search queries into three categories to apply\n * appropriate relevance boosting strategies:\n * \n * - LOCATION: \"Where is X?\" - User wants to find specific files/code\n * - CONCEPTUAL: \"How does X work?\" - User wants to understand concepts\n * - IMPLEMENTATION: \"How is X implemented?\" - User wants implementation details\n * \n * Examples:\n * - \"where is the auth handler\" → LOCATION\n * - \"how does authentication work\" → CONCEPTUAL\n * - \"how is authentication implemented\" → IMPLEMENTATION\n */\n\n/**\n * Query intent types for semantic search\n */\nexport enum QueryIntent {\n /** User wants to locate specific files or code (e.g., \"where is X\") */\n LOCATION = 'location',\n \n /** User wants to understand concepts/processes (e.g., \"how does X work\") */\n CONCEPTUAL = 'conceptual',\n \n /** User wants implementation details (e.g., \"how is X implemented\") */\n IMPLEMENTATION = 'implementation',\n}\n\n/**\n * Classifies a search query into one of three intent categories.\n * \n * Uses pattern matching to detect query intent:\n * - LOCATION: Queries about finding/locating code\n * - CONCEPTUAL: Queries about understanding processes/concepts\n * - IMPLEMENTATION: Queries about code implementation details\n * \n * @param query - The search query string\n * @returns The detected query intent (defaults to IMPLEMENTATION)\n * \n * @example\n * classifyQueryIntent(\"where is the user controller\") // → LOCATION\n * classifyQueryIntent(\"how does authentication work\") // → CONCEPTUAL\n * classifyQueryIntent(\"how is the API implemented\") // → IMPLEMENTATION\n */\nexport function classifyQueryIntent(query: string): QueryIntent {\n const lower = query.toLowerCase().trim();\n \n // LOCATION queries - user wants to find specific files\n // Patterns: \"where is/are\", \"find the\", \"locate\"\n if (\n lower.match(/where\\s+(is|are|does|can\\s+i\\s+find)/) ||\n lower.match(/find\\s+the\\s+/) ||\n lower.match(/locate\\s+/)\n ) {\n return QueryIntent.LOCATION;\n }\n \n // CONCEPTUAL queries - user wants to understand how things work\n // Patterns: \"how does X work\", \"what is/are\", \"explain\", \"understand\", etc.\n if (\n lower.match(/how\\s+does\\s+.*\\s+work/) ||\n lower.match(/what\\s+(is|are|does)/) ||\n lower.match(/explain\\s+/) ||\n lower.match(/understand\\s+/) ||\n lower.match(/\\b(process|workflow|architecture)\\b/)\n ) {\n return QueryIntent.CONCEPTUAL;\n }\n \n // IMPLEMENTATION queries - user wants code implementation details\n // Patterns: \"how is/are X implemented/built/coded\", \"implementation of\", \"source code for\"\n if (\n lower.match(/how\\s+(is|are)\\s+.*\\s+(implemented|built|coded)/) ||\n lower.match(/implementation\\s+of/) ||\n lower.match(/source\\s+code\\s+for/)\n ) {\n return QueryIntent.IMPLEMENTATION;\n }\n \n // Default to IMPLEMENTATION for ambiguous queries\n // This is the most common use case for code search\n return QueryIntent.IMPLEMENTATION;\n}\n\n","import * as lancedb from 'vectordb';\nimport path from 'path';\nimport os from 'os';\nimport crypto from 'crypto';\nimport { SearchResult, VectorDBInterface } from './types.js';\nimport { ChunkMetadata } from '../indexer/types.js';\nimport { EMBEDDING_DIMENSION } from '../embeddings/types.js';\nimport { readVersionFile, writeVersionFile } from './version.js';\nimport { DatabaseError, wrapError } from '../errors/index.js';\nimport { calculateRelevance } from './relevance.js';\nimport { QueryIntent, classifyQueryIntent } from './intent-classifier.js';\nimport { VECTOR_DB_MAX_BATCH_SIZE, VECTOR_DB_MIN_BATCH_SIZE } from '../constants.js';\n\n/**\n * Helper Functions for File Type Detection\n */\n\n/**\n * Check if a file is a documentation file.\n * Matches common documentation patterns across different ecosystems.\n * \n * @param filepath - Path to check\n * @returns True if file is documentation\n */\nfunction isDocumentationFile(filepath: string): boolean {\n const lower = filepath.toLowerCase();\n const filename = path.basename(filepath).toLowerCase();\n \n // README files\n if (filename.startsWith('readme')) return true;\n \n // CHANGELOG files\n if (filename.startsWith('changelog')) return true;\n \n // Markdown files (common for docs)\n if (filename.endsWith('.md') || filename.endsWith('.mdx') || filename.endsWith('.markdown')) {\n return true;\n }\n \n // Documentation directories\n if (\n lower.includes('/docs/') ||\n lower.includes('/documentation/') ||\n lower.includes('/wiki/') ||\n lower.includes('/.github/')\n ) {\n return true;\n }\n \n // Architecture/workflow documentation\n if (\n lower.includes('architecture') ||\n lower.includes('workflow') ||\n lower.includes('/flow/')\n ) {\n return true;\n }\n \n return false;\n}\n\n/**\n * Check if a file is a test file.\n * Matches common test file patterns.\n * \n * @param filepath - Path to check\n * @returns True if file is a test file\n */\nfunction isTestFile(filepath: string): boolean {\n const lower = filepath.toLowerCase();\n \n // Test directories\n if (\n lower.includes('/test/') ||\n lower.includes('/tests/') ||\n lower.includes('/__tests__/')\n ) {\n return true;\n }\n \n // Test file naming patterns\n if (\n lower.includes('.test.') ||\n lower.includes('.spec.') ||\n lower.includes('_test.') ||\n lower.includes('_spec.')\n ) {\n return true;\n }\n \n return false;\n}\n\n/**\n * Check if a file is a utility/helper file.\n * Matches common utility file patterns.\n * \n * @param filepath - Path to check\n * @returns True if file is a utility file\n */\nfunction isUtilityFile(filepath: string): boolean {\n const lower = filepath.toLowerCase();\n \n // Utility directories\n if (\n lower.includes('/utils/') ||\n lower.includes('/utilities/') ||\n lower.includes('/helpers/') ||\n lower.includes('/lib/')\n ) {\n return true;\n }\n \n // Utility file naming patterns\n if (\n lower.includes('.util.') ||\n lower.includes('.helper.') ||\n lower.includes('-util.') ||\n lower.includes('-helper.')\n ) {\n return true;\n }\n \n return false;\n}\n\n/**\n * Boost relevance score based on path matching.\n * If query tokens match directory names in the file path, improve the score.\n * \n * @param query - Original search query\n * @param filepath - Path to the file\n * @param baseScore - Original distance score from vector search\n * @returns Adjusted score (lower is better)\n */\nfunction boostPathRelevance(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n const queryTokens = query.toLowerCase().split(/\\s+/);\n const pathSegments = filepath.toLowerCase().split('/');\n \n let boostFactor = 1.0;\n \n // Check if query mentions any directory name in the path\n for (const token of queryTokens) {\n // Skip very short tokens (like \"is\", \"a\", etc.)\n if (token.length <= 2) continue;\n \n // Check if this token appears in any path segment\n if (pathSegments.some(seg => seg.includes(token))) {\n boostFactor *= 0.9; // 10% boost (reduce distance)\n }\n }\n \n return baseScore * boostFactor;\n}\n\n/**\n * Boost relevance score based on filename matching.\n * If query tokens match the filename, significantly improve the score.\n * Exact matches get stronger boost than partial matches.\n * \n * @param query - Original search query\n * @param filepath - Path to the file\n * @param baseScore - Original distance score from vector search\n * @returns Adjusted score (lower is better)\n */\nfunction boostFilenameRelevance(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n const filename = path.basename(filepath, path.extname(filepath)).toLowerCase();\n const queryTokens = query.toLowerCase().split(/\\s+/);\n \n let boostFactor = 1.0;\n \n // Check if any query token matches the filename\n for (const token of queryTokens) {\n // Skip very short tokens\n if (token.length <= 2) continue;\n \n // Exact match: 30% boost (stronger signal)\n if (filename === token) {\n boostFactor *= 0.70;\n }\n // Partial match: 20% boost\n else if (filename.includes(token)) {\n boostFactor *= 0.80;\n }\n }\n \n return baseScore * boostFactor;\n}\n\n/**\n * Intent-Specific Boosting Strategies\n */\n\n/**\n * Boost relevance for LOCATION intent queries.\n * \n * LOCATION queries (e.g., \"where is the auth handler\") need strong\n * filename and path matching with penalties for test files.\n * \n * Strategy:\n * - Filename exact match: 40% boost\n * - Filename partial match: 30% boost\n * - Path match: 15% boost\n * - Test file penalty: -10%\n * \n * @param query - Original search query\n * @param filepath - Path to the file\n * @param baseScore - Original distance score from vector search\n * @returns Boosted score (lower is better)\n */\nfunction boostForLocationIntent(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n let score = baseScore;\n \n // Apply strong filename boosting\n const filename = path.basename(filepath, path.extname(filepath)).toLowerCase();\n const queryTokens = query.toLowerCase().split(/\\s+/);\n \n for (const token of queryTokens) {\n if (token.length <= 2) continue;\n \n // Exact match: 40% boost (very strong for location queries)\n if (filename === token) {\n score *= 0.60;\n }\n // Partial match: 30% boost\n else if (filename.includes(token)) {\n score *= 0.70;\n }\n }\n \n // Apply path boosting\n score = boostPathRelevance(query, filepath, score);\n \n // Penalize test files for location queries\n // Users usually want production code, not tests\n if (isTestFile(filepath)) {\n score *= 1.10; // 10% penalty (higher score = worse)\n }\n \n return score;\n}\n\n/**\n * Boost relevance for CONCEPTUAL intent queries.\n * \n * CONCEPTUAL queries (e.g., \"how does authentication work\") need\n * documentation and architecture files boosted.\n * \n * Strategy:\n * - Documentation files: 35% boost\n * - Architecture/flow files: Additional 10% boost\n * - Utility files: 5% penalty\n * - Reduced filename/path boosting: 10% filename, 5% path\n * \n * @param query - Original search query\n * @param filepath - Path to the file\n * @param baseScore - Original distance score from vector search\n * @returns Boosted score (lower is better)\n */\nfunction boostForConceptualIntent(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n let score = baseScore;\n \n // Strong boost for documentation files\n if (isDocumentationFile(filepath)) {\n score *= 0.65; // 35% boost\n \n // Extra boost for architecture/workflow documentation\n const lower = filepath.toLowerCase();\n if (\n lower.includes('architecture') ||\n lower.includes('workflow') ||\n lower.includes('flow')\n ) {\n score *= 0.90; // Additional 10% boost\n }\n }\n \n // Light penalty for utility files (too low-level for conceptual queries)\n if (isUtilityFile(filepath)) {\n score *= 1.05; // 5% penalty\n }\n \n // Apply reduced filename/path boosting (less important for conceptual queries)\n const filename = path.basename(filepath, path.extname(filepath)).toLowerCase();\n const queryTokens = query.toLowerCase().split(/\\s+/);\n \n for (const token of queryTokens) {\n if (token.length <= 2) continue;\n \n // Reduced filename boost: 10%\n if (filename.includes(token)) {\n score *= 0.90;\n }\n }\n \n // Reduced path boost: 5%\n const pathSegments = filepath.toLowerCase().split(path.sep);\n for (const token of queryTokens) {\n if (token.length <= 2) continue;\n \n for (const segment of pathSegments) {\n if (segment.includes(token)) {\n score *= 0.95;\n break;\n }\n }\n }\n \n return score;\n}\n\n/**\n * Boost relevance for IMPLEMENTATION intent queries.\n * \n * IMPLEMENTATION queries (e.g., \"how is authentication implemented\")\n * need balanced boosting with moderate test file boost to show usage.\n * \n * Strategy:\n * - Filename exact match: 30% boost\n * - Filename partial match: 20% boost\n * - Path match: 10% boost\n * - Test files: 10% boost (to show real usage)\n * \n * This is the default/balanced strategy.\n * \n * @param query - Original search query\n * @param filepath - Path to the file\n * @param baseScore - Original distance score from vector search\n * @returns Boosted score (lower is better)\n */\nfunction boostForImplementationIntent(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n let score = baseScore;\n \n // Apply standard filename boosting\n score = boostFilenameRelevance(query, filepath, score);\n \n // Apply standard path boosting\n score = boostPathRelevance(query, filepath, score);\n \n // Moderate boost for test files (they show real usage patterns)\n if (isTestFile(filepath)) {\n score *= 0.90; // 10% boost\n }\n \n return score;\n}\n\n/**\n * Apply all relevance boosting strategies to a search score.\n * \n * Uses query intent classification to apply appropriate boosting:\n * - LOCATION: Strong filename/path boost, test penalty\n * - CONCEPTUAL: Documentation boost, utility penalty\n * - IMPLEMENTATION: Balanced boost with test file boost\n * \n * @param query - Original search query (optional)\n * @param filepath - Path to the file\n * @param baseScore - Original distance score from vector search\n * @returns Boosted score (lower is better)\n */\nfunction applyRelevanceBoosting(\n query: string | undefined,\n filepath: string,\n baseScore: number\n): number {\n if (!query) {\n return baseScore;\n }\n \n // Classify query intent\n const intent = classifyQueryIntent(query);\n \n // Apply intent-specific boosting\n switch (intent) {\n case QueryIntent.LOCATION:\n return boostForLocationIntent(query, filepath, baseScore);\n \n case QueryIntent.CONCEPTUAL:\n return boostForConceptualIntent(query, filepath, baseScore);\n \n case QueryIntent.IMPLEMENTATION:\n return boostForImplementationIntent(query, filepath, baseScore);\n \n default:\n // Fallback to implementation strategy\n return boostForImplementationIntent(query, filepath, baseScore);\n }\n}\n\ntype LanceDBConnection = Awaited<ReturnType<typeof lancedb.connect>>;\ntype LanceDBTable = Awaited<ReturnType<LanceDBConnection['openTable']>>;\n\n/**\n * Database record structure as stored in LanceDB\n */\ninterface DBRecord {\n vector: number[];\n content: string;\n file: string;\n startLine: number;\n endLine: number;\n type: string;\n language: string;\n functionNames: string[];\n classNames: string[];\n interfaceNames: string[];\n _distance?: number; // Added by LanceDB for search results\n}\n\nexport class VectorDB implements VectorDBInterface {\n private db: LanceDBConnection | null = null;\n private table: LanceDBTable | null = null;\n public readonly dbPath: string;\n private readonly tableName = 'code_chunks';\n private lastVersionCheck: number = 0;\n private currentVersion: number = 0;\n \n constructor(projectRoot: string) {\n // Store in user's home directory under ~/.lien/indices/{projectName-hash}\n const projectName = path.basename(projectRoot);\n \n // Create unique identifier from full path to prevent collisions\n // This ensures projects with same name in different locations get separate indices\n const pathHash = crypto\n .createHash('md5')\n .update(projectRoot)\n .digest('hex')\n .substring(0, 8);\n \n this.dbPath = path.join(\n os.homedir(),\n '.lien',\n 'indices',\n `${projectName}-${pathHash}`\n );\n }\n \n async initialize(): Promise<void> {\n try {\n this.db = await lancedb.connect(this.dbPath);\n \n try {\n this.table = await this.db.openTable(this.tableName);\n } catch {\n // Table doesn't exist yet - will be created on first insert\n // Set table to null to signal it needs creation\n this.table = null;\n }\n \n // Read and cache the current version\n try {\n this.currentVersion = await readVersionFile(this.dbPath);\n } catch {\n // Version file doesn't exist yet, will be created on first index\n this.currentVersion = 0;\n }\n } catch (error: unknown) {\n throw wrapError(error, 'Failed to initialize vector database', { dbPath: this.dbPath });\n }\n }\n \n async insertBatch(\n vectors: Float32Array[],\n metadatas: ChunkMetadata[],\n contents: string[]\n ): Promise<void> {\n if (!this.db) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n if (vectors.length !== metadatas.length || vectors.length !== contents.length) {\n throw new DatabaseError('Vectors, metadatas, and contents arrays must have the same length', {\n vectorsLength: vectors.length,\n metadatasLength: metadatas.length,\n contentsLength: contents.length,\n });\n }\n \n // Handle empty batch gracefully\n if (vectors.length === 0) {\n return;\n }\n \n // Split large batches into smaller chunks for better reliability\n if (vectors.length > VECTOR_DB_MAX_BATCH_SIZE) {\n // Split into smaller batches\n for (let i = 0; i < vectors.length; i += VECTOR_DB_MAX_BATCH_SIZE) {\n const batchVectors = vectors.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));\n const batchMetadata = metadatas.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));\n const batchContents = contents.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));\n \n await this._insertBatchInternal(batchVectors, batchMetadata, batchContents);\n }\n } else {\n await this._insertBatchInternal(vectors, metadatas, contents);\n }\n }\n \n /**\n * Internal method to insert a single batch with iterative retry logic.\n * Uses a queue-based approach to avoid deep recursion on large batch failures.\n */\n private async _insertBatchInternal(\n vectors: Float32Array[],\n metadatas: ChunkMetadata[],\n contents: string[]\n ): Promise<void> {\n // Queue of batches to process (start with the full batch)\n interface BatchToProcess {\n vectors: Float32Array[];\n metadatas: ChunkMetadata[];\n contents: string[];\n }\n \n const queue: BatchToProcess[] = [{ vectors, metadatas, contents }];\n const failedRecords: BatchToProcess[] = [];\n \n // Process batches iteratively\n while (queue.length > 0) {\n const batch = queue.shift()!;\n \n try {\n const records = batch.vectors.map((vector, i) => ({\n vector: Array.from(vector),\n content: batch.contents[i],\n file: batch.metadatas[i].file,\n startLine: batch.metadatas[i].startLine,\n endLine: batch.metadatas[i].endLine,\n type: batch.metadatas[i].type,\n language: batch.metadatas[i].language,\n // Ensure arrays have at least empty string for Arrow type inference\n functionNames: (batch.metadatas[i].symbols?.functions && batch.metadatas[i].symbols.functions.length > 0) ? batch.metadatas[i].symbols.functions : [''],\n classNames: (batch.metadatas[i].symbols?.classes && batch.metadatas[i].symbols.classes.length > 0) ? batch.metadatas[i].symbols.classes : [''],\n interfaceNames: (batch.metadatas[i].symbols?.interfaces && batch.metadatas[i].symbols.interfaces.length > 0) ? batch.metadatas[i].symbols.interfaces : [''],\n }));\n \n // Create table if it doesn't exist, otherwise add to existing table\n if (!this.table) {\n // Let LanceDB createTable handle type inference from the data\n this.table = await this.db!.createTable(this.tableName, records) as LanceDBTable;\n } else {\n await this.table.add(records);\n }\n } catch (error) {\n // If batch has more than min size records, split and retry\n if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {\n const half = Math.floor(batch.vectors.length / 2);\n \n // Split in half and add back to queue\n queue.push({\n vectors: batch.vectors.slice(0, half),\n metadatas: batch.metadatas.slice(0, half),\n contents: batch.contents.slice(0, half),\n });\n queue.push({\n vectors: batch.vectors.slice(half),\n metadatas: batch.metadatas.slice(half),\n contents: batch.contents.slice(half),\n });\n } else {\n // Small batch failed - collect for final error report\n failedRecords.push(batch);\n }\n }\n }\n \n // If any small batches failed, throw error with details\n if (failedRecords.length > 0) {\n const totalFailed = failedRecords.reduce((sum, batch) => sum + batch.vectors.length, 0);\n throw new DatabaseError(\n `Failed to insert ${totalFailed} record(s) after retry attempts`,\n {\n failedBatches: failedRecords.length,\n totalRecords: totalFailed,\n sampleFile: failedRecords[0].metadatas[0].file,\n }\n );\n }\n }\n \n async search(\n queryVector: Float32Array,\n limit: number = 5,\n query?: string\n ): Promise<SearchResult[]> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n // Request more results than needed to account for filtering and re-ranking\n const results = await this.table\n .search(Array.from(queryVector))\n .limit(limit + 20) // Get extra for re-ranking after boosting\n .execute();\n \n // Filter out empty content, apply boosting, then sort by boosted score\n const filtered = (results as unknown as DBRecord[])\n .filter((r: DBRecord) => \n r.content && \n r.content.trim().length > 0 &&\n r.file && \n r.file.length > 0\n )\n .map((r: DBRecord) => {\n const baseScore = r._distance ?? 0;\n const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);\n \n return {\n content: r.content,\n metadata: {\n file: r.file,\n startLine: r.startLine,\n endLine: r.endLine,\n type: r.type as 'function' | 'class' | 'block',\n language: r.language,\n },\n score: boostedScore,\n relevance: calculateRelevance(boostedScore),\n };\n })\n .sort((a, b) => a.score - b.score) // Re-sort by boosted score\n .slice(0, limit); // Take only the requested number after re-ranking\n \n return filtered;\n } catch (error) {\n const errorMsg = String(error);\n \n // Detect corrupted index or missing data files (common after reindexing)\n if (errorMsg.includes('Not found:') || errorMsg.includes('.lance')) {\n // Attempt to reconnect - index may have been rebuilt\n try {\n await this.initialize();\n \n // Retry search with fresh connection\n const results = await this.table\n .search(Array.from(queryVector))\n .limit(limit + 20)\n .execute();\n \n return (results as unknown as DBRecord[])\n .filter((r: DBRecord) => \n r.content && \n r.content.trim().length > 0 &&\n r.file && \n r.file.length > 0\n )\n .map((r: DBRecord) => {\n const baseScore = r._distance ?? 0;\n const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);\n \n return {\n content: r.content,\n metadata: {\n file: r.file,\n startLine: r.startLine,\n endLine: r.endLine,\n type: r.type as 'function' | 'class' | 'block',\n language: r.language,\n },\n score: boostedScore,\n relevance: calculateRelevance(boostedScore),\n };\n })\n .sort((a, b) => a.score - b.score)\n .slice(0, limit);\n } catch (retryError: unknown) {\n throw new DatabaseError(\n `Index appears corrupted or outdated. Please restart the MCP server or run 'lien reindex' in the project directory.`,\n { originalError: retryError }\n );\n }\n }\n \n throw wrapError(error, 'Failed to search vector database');\n }\n }\n \n async scanWithFilter(options: {\n language?: string;\n pattern?: string;\n limit?: number;\n }): Promise<SearchResult[]> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n const { language, pattern, limit = 100 } = options;\n \n try {\n // Use vector search with zero vector to get a large sample\n // This is a workaround since LanceDB doesn't have a direct scan API\n const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);\n const query = this.table.search(zeroVector)\n .where('file != \"\"')\n .limit(Math.max(limit * 5, 200)); // Get a larger sample to ensure we have enough after filtering\n \n const results = await query.execute();\n \n // Filter in JavaScript for more reliable filtering\n let filtered = (results as unknown as DBRecord[]).filter((r: DBRecord) => \n r.content && \n r.content.trim().length > 0 &&\n r.file && \n r.file.length > 0\n );\n \n // Apply language filter\n if (language) {\n filtered = filtered.filter((r: DBRecord) => \n r.language && r.language.toLowerCase() === language.toLowerCase()\n );\n }\n \n // Apply regex pattern filter\n if (pattern) {\n const regex = new RegExp(pattern, 'i');\n filtered = filtered.filter((r: DBRecord) =>\n regex.test(r.content) || regex.test(r.file)\n );\n }\n \n return filtered.slice(0, limit).map((r: DBRecord) => {\n const score = 0;\n return {\n content: r.content,\n metadata: {\n file: r.file,\n startLine: r.startLine,\n endLine: r.endLine,\n type: r.type as 'function' | 'class' | 'block',\n language: r.language,\n },\n score,\n relevance: calculateRelevance(score),\n };\n });\n } catch (error) {\n throw wrapError(error, 'Failed to scan with filter');\n }\n }\n \n async querySymbols(options: {\n language?: string;\n pattern?: string;\n symbolType?: 'function' | 'class' | 'interface';\n limit?: number;\n }): Promise<SearchResult[]> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n const { language, pattern, symbolType, limit = 50 } = options;\n \n try {\n // Use vector search with zero vector to get a large sample\n const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);\n const query = this.table.search(zeroVector)\n .where('file != \"\"')\n .limit(Math.max(limit * 10, 500)); // Get a large sample to ensure we have enough after symbol filtering\n \n const results = await query.execute();\n \n // Filter in JavaScript for more precise control\n let filtered = (results as unknown as DBRecord[]).filter((r: DBRecord) => {\n // Basic validation\n if (!r.content || r.content.trim().length === 0) {\n return false;\n }\n if (!r.file || r.file.length === 0) {\n return false;\n }\n \n // Language filter\n if (language && (!r.language || r.language.toLowerCase() !== language.toLowerCase())) {\n return false;\n }\n \n // Get relevant symbol names based on symbolType\n const symbols = symbolType === 'function' ? (r.functionNames || []) :\n symbolType === 'class' ? (r.classNames || []) :\n symbolType === 'interface' ? (r.interfaceNames || []) :\n [...(r.functionNames || []), ...(r.classNames || []), ...(r.interfaceNames || [])];\n \n // Must have at least one symbol\n if (symbols.length === 0) {\n return false;\n }\n \n // Pattern filter on symbol names\n if (pattern) {\n const regex = new RegExp(pattern, 'i');\n return symbols.some((s: string) => regex.test(s));\n }\n \n return true;\n });\n \n return filtered.slice(0, limit).map((r: DBRecord) => {\n const score = 0;\n return {\n content: r.content,\n metadata: {\n file: r.file,\n startLine: r.startLine,\n endLine: r.endLine,\n type: r.type as 'function' | 'class' | 'block',\n language: r.language,\n symbols: {\n functions: r.functionNames || [],\n classes: r.classNames || [],\n interfaces: r.interfaceNames || [],\n },\n },\n score,\n relevance: calculateRelevance(score),\n };\n });\n } catch (error) {\n throw wrapError(error, 'Failed to query symbols');\n }\n }\n \n async clear(): Promise<void> {\n if (!this.db) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n // Drop table if it exists\n if (this.table) {\n await this.db.dropTable(this.tableName);\n }\n // Set table to null - will be recreated on first insert\n this.table = null;\n } catch (error) {\n throw wrapError(error, 'Failed to clear vector database');\n }\n }\n \n /**\n * Deletes all chunks from a specific file.\n * Used for incremental reindexing when a file is deleted or needs to be re-indexed.\n * \n * @param filepath - Path to the file whose chunks should be deleted\n */\n async deleteByFile(filepath: string): Promise<void> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n // Use LanceDB's SQL-like delete with predicate\n await this.table.delete(`file = \"${filepath}\"`);\n } catch (error) {\n throw wrapError(error, 'Failed to delete file from vector database');\n }\n }\n \n /**\n * Updates a file in the index by atomically deleting old chunks and inserting new ones.\n * This is the primary method for incremental reindexing.\n * \n * @param filepath - Path to the file being updated\n * @param vectors - New embedding vectors\n * @param metadatas - New chunk metadata\n * @param contents - New chunk contents\n */\n async updateFile(\n filepath: string,\n vectors: Float32Array[],\n metadatas: ChunkMetadata[],\n contents: string[]\n ): Promise<void> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n // 1. Delete old chunks from this file\n await this.deleteByFile(filepath);\n \n // 2. Insert new chunks (if any)\n if (vectors.length > 0) {\n await this.insertBatch(vectors, metadatas, contents);\n }\n \n // 3. Update version file to trigger MCP reconnection\n await writeVersionFile(this.dbPath);\n } catch (error) {\n throw wrapError(error, 'Failed to update file in vector database');\n }\n }\n \n /**\n * Checks if the index version has changed since last check.\n * Uses caching to minimize I/O overhead (checks at most once per second).\n * \n * @returns true if version has changed, false otherwise\n */\n async checkVersion(): Promise<boolean> {\n const now = Date.now();\n \n // Cache version checks for 1 second to minimize I/O\n if (now - this.lastVersionCheck < 1000) {\n return false;\n }\n \n this.lastVersionCheck = now;\n \n try {\n const version = await readVersionFile(this.dbPath);\n \n if (version > this.currentVersion) {\n this.currentVersion = version;\n return true;\n }\n \n return false;\n } catch (error) {\n // If we can't read version file, don't reconnect\n return false;\n }\n }\n \n /**\n * Reconnects to the database by reinitializing the connection.\n * Used when the index has been rebuilt/reindexed.\n * Forces a complete reload from disk by closing existing connections first.\n */\n async reconnect(): Promise<void> {\n try {\n // Close existing connections to force reload from disk\n this.table = null;\n this.db = null;\n \n // Reinitialize with fresh connection\n await this.initialize();\n } catch (error) {\n throw wrapError(error, 'Failed to reconnect to vector database');\n }\n }\n \n /**\n * Gets the current index version (timestamp of last reindex).\n * \n * @returns Version timestamp, or 0 if unknown\n */\n getCurrentVersion(): number {\n return this.currentVersion;\n }\n \n /**\n * Gets the current index version as a human-readable date string.\n * \n * @returns Formatted date string, or 'Unknown' if no version\n */\n getVersionDate(): string {\n if (this.currentVersion === 0) {\n return 'Unknown';\n }\n return new Date(this.currentVersion).toLocaleString();\n }\n \n /**\n * Checks if the database contains real indexed data.\n * Used to detect first run and trigger auto-indexing.\n * \n * @returns true if database has real code chunks, false if empty or only schema rows\n */\n async hasData(): Promise<boolean> {\n if (!this.table) {\n return false;\n }\n \n try {\n const count = await this.table.countRows();\n \n // Check if table is empty\n if (count === 0) {\n return false;\n }\n \n // Check if all rows are empty (schema rows only)\n // Sample a few rows to verify they contain real data\n const sample = await this.table\n .search(Array(EMBEDDING_DIMENSION).fill(0))\n .limit(Math.min(count, 5))\n .execute();\n \n const hasRealData = (sample as unknown as DBRecord[]).some((r: DBRecord) => \n r.content && \n r.content.trim().length > 0\n );\n \n return hasRealData;\n } catch {\n // If any error occurs, assume no data\n return false;\n }\n }\n \n static async load(projectRoot: string): Promise<VectorDB> {\n const db = new VectorDB(projectRoot);\n await db.initialize();\n return db;\n }\n}\n\n","import { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\n\n/**\n * Centralized package version loader.\n * Handles different build output structures (development vs production).\n * \n * Build scenarios:\n * - Development (ts-node): src/utils/version.ts → ../package.json\n * - Production (dist): dist/utils/version.js → ../package.json\n * - Nested builds: dist/something/version.js → ../../package.json\n */\n\n// Setup require for ESM\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst require = createRequire(import.meta.url);\n\nlet packageJson: { version: string; name?: string };\n\ntry {\n // Try relative to current file (works in most scenarios)\n packageJson = require(join(__dirname, '../package.json'));\n} catch {\n try {\n // Fallback: go up one more level (nested build output)\n packageJson = require(join(__dirname, '../../package.json'));\n } catch {\n // Last resort: hardcoded fallback (should never happen in production)\n console.warn('[Lien] Warning: Could not load package.json, using fallback version');\n packageJson = { version: '0.0.0-unknown' };\n }\n}\n\n/**\n * Get the current package version\n */\nexport function getPackageVersion(): string {\n return packageJson.version;\n}\n\n/**\n * Get the full package.json (for compatibility)\n */\nexport function getPackageInfo(): { version: string; name?: string } {\n return packageJson;\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { INDEX_FORMAT_VERSION } from '../constants.js';\nimport { GitState } from '../git/tracker.js';\nimport { getPackageVersion } from '../utils/version.js';\n\nconst MANIFEST_FILE = 'manifest.json';\n\n/**\n * Represents a single file in the index manifest\n */\nexport interface FileEntry {\n filepath: string;\n lastModified: number;\n chunkCount: number;\n}\n\n/**\n * Index manifest tracking all indexed files and version information\n */\nexport interface IndexManifest {\n formatVersion: number; // Index format version for compatibility checking\n lienVersion: string; // Lien package version (for reference)\n lastIndexed: number; // Timestamp of last indexing operation\n gitState?: GitState; // Last known git state\n files: Record<string, FileEntry>; // Map of filepath -> FileEntry (stored as object for JSON)\n}\n\n/**\n * Manages the index manifest file, tracking which files are indexed\n * and their metadata for incremental indexing support.\n * \n * The manifest includes version checking to invalidate indices when\n * Lien's indexing format changes (e.g., new chunking algorithm,\n * different embedding model, schema changes).\n */\nexport class ManifestManager {\n private manifestPath: string;\n private indexPath: string;\n \n /**\n * Promise-based lock to prevent race conditions during concurrent updates.\n * Ensures read-modify-write operations are atomic.\n */\n private updateLock = Promise.resolve();\n \n /**\n * Creates a new ManifestManager\n * @param indexPath - Path to the index directory (same as VectorDB path)\n */\n constructor(indexPath: string) {\n this.indexPath = indexPath;\n this.manifestPath = path.join(indexPath, MANIFEST_FILE);\n }\n \n /**\n * Loads the manifest from disk.\n * Returns null if:\n * - Manifest doesn't exist (first run)\n * - Manifest is corrupt\n * - Format version is incompatible (triggers full reindex)\n * \n * @returns Loaded manifest or null\n */\n async load(): Promise<IndexManifest | null> {\n try {\n const content = await fs.readFile(this.manifestPath, 'utf-8');\n const manifest = JSON.parse(content) as IndexManifest;\n \n // VERSION CHECK: Invalidate if format version doesn't match\n if (manifest.formatVersion !== INDEX_FORMAT_VERSION) {\n console.error(\n `[Lien] Index format v${manifest.formatVersion} is incompatible with current v${INDEX_FORMAT_VERSION}`\n );\n console.error(`[Lien] Full reindex required after Lien upgrade`);\n \n // Clear old manifest and return null (triggers full reindex)\n await this.clear();\n return null;\n }\n \n return manifest;\n } catch (error) {\n // File doesn't exist or is invalid - return null for first run\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return null;\n }\n \n // Corrupt manifest - log warning and return null\n console.error(`[Lien] Warning: Failed to load manifest: ${error}`);\n return null;\n }\n }\n \n /**\n * Saves the manifest to disk.\n * Always saves with current format and package versions.\n * \n * @param manifest - Manifest to save\n */\n async save(manifest: IndexManifest): Promise<void> {\n try {\n // Ensure index directory exists\n await fs.mkdir(this.indexPath, { recursive: true });\n \n // Always save with current versions\n const manifestToSave: IndexManifest = {\n ...manifest,\n formatVersion: INDEX_FORMAT_VERSION,\n lienVersion: getPackageVersion(),\n lastIndexed: Date.now(),\n };\n \n const content = JSON.stringify(manifestToSave, null, 2);\n await fs.writeFile(this.manifestPath, content, 'utf-8');\n } catch (error) {\n // Don't throw - manifest is best-effort\n console.error(`[Lien] Warning: Failed to save manifest: ${error}`);\n }\n }\n \n /**\n * Adds or updates a file entry in the manifest.\n * Protected by lock to prevent race conditions during concurrent updates.\n * \n * @param filepath - Path to the file\n * @param entry - File entry metadata\n */\n async updateFile(filepath: string, entry: FileEntry): Promise<void> {\n // Chain this operation to the lock to ensure atomicity\n this.updateLock = this.updateLock.then(async () => {\n const manifest = await this.load() || this.createEmpty();\n manifest.files[filepath] = entry;\n await this.save(manifest);\n }).catch(error => {\n console.error(`[Lien] Failed to update manifest for ${filepath}: ${error}`);\n // Return to reset lock - don't let errors block future operations\n return undefined;\n });\n \n // Wait for this operation to complete\n await this.updateLock;\n }\n \n /**\n * Removes a file entry from the manifest.\n * Protected by lock to prevent race conditions during concurrent updates.\n * \n * Note: If the manifest doesn't exist, this is a no-op (not an error).\n * This can happen legitimately after clearing the index or on fresh installs.\n * \n * @param filepath - Path to the file to remove\n */\n async removeFile(filepath: string): Promise<void> {\n // Chain this operation to the lock to ensure atomicity\n this.updateLock = this.updateLock.then(async () => {\n const manifest = await this.load();\n if (!manifest) {\n // No manifest exists - nothing to remove from (expected in some scenarios)\n return;\n }\n \n delete manifest.files[filepath];\n await this.save(manifest);\n }).catch(error => {\n console.error(`[Lien] Failed to remove manifest entry for ${filepath}: ${error}`);\n // Return to reset lock - don't let errors block future operations\n return undefined;\n });\n \n // Wait for this operation to complete\n await this.updateLock;\n }\n \n /**\n * Updates multiple files at once (more efficient than individual updates).\n * Protected by lock to prevent race conditions during concurrent updates.\n * \n * @param entries - Array of file entries to update\n */\n async updateFiles(entries: FileEntry[]): Promise<void> {\n // Chain this operation to the lock to ensure atomicity\n this.updateLock = this.updateLock.then(async () => {\n const manifest = await this.load() || this.createEmpty();\n \n for (const entry of entries) {\n manifest.files[entry.filepath] = entry;\n }\n \n await this.save(manifest);\n }).catch(error => {\n console.error(`[Lien] Failed to update manifest for ${entries.length} files: ${error}`);\n // Return to reset lock - don't let errors block future operations\n return undefined;\n });\n \n // Wait for this operation to complete\n await this.updateLock;\n }\n \n /**\n * Updates the git state in the manifest.\n * Protected by lock to prevent race conditions during concurrent updates.\n * \n * @param gitState - Current git state\n */\n async updateGitState(gitState: GitState): Promise<void> {\n // Chain this operation to the lock to ensure atomicity\n this.updateLock = this.updateLock.then(async () => {\n const manifest = await this.load() || this.createEmpty();\n \n manifest.gitState = gitState;\n await this.save(manifest);\n }).catch(error => {\n console.error(`[Lien] Failed to update git state in manifest: ${error}`);\n // Return to reset lock - don't let errors block future operations\n return undefined;\n });\n \n // Wait for this operation to complete\n await this.updateLock;\n }\n \n /**\n * Gets the list of files currently in the manifest\n * \n * @returns Array of filepaths\n */\n async getIndexedFiles(): Promise<string[]> {\n const manifest = await this.load();\n if (!manifest) return [];\n \n return Object.keys(manifest.files);\n }\n \n /**\n * Detects which files have changed based on mtime comparison\n * \n * @param currentFiles - Map of current files with their mtimes\n * @returns Array of filepaths that have changed\n */\n async getChangedFiles(currentFiles: Map<string, number>): Promise<string[]> {\n const manifest = await this.load();\n if (!manifest) {\n // No manifest = all files are \"changed\" (need full index)\n return Array.from(currentFiles.keys());\n }\n \n const changedFiles: string[] = [];\n \n for (const [filepath, mtime] of currentFiles) {\n const entry = manifest.files[filepath];\n \n if (!entry) {\n // New file\n changedFiles.push(filepath);\n } else if (entry.lastModified < mtime) {\n // File modified since last index\n changedFiles.push(filepath);\n }\n }\n \n return changedFiles;\n }\n \n /**\n * Gets files that are in the manifest but not in the current file list\n * (i.e., deleted files)\n * \n * @param currentFiles - Set of current file paths\n * @returns Array of deleted file paths\n */\n async getDeletedFiles(currentFiles: Set<string>): Promise<string[]> {\n const manifest = await this.load();\n if (!manifest) return [];\n \n const deletedFiles: string[] = [];\n \n for (const filepath of Object.keys(manifest.files)) {\n if (!currentFiles.has(filepath)) {\n deletedFiles.push(filepath);\n }\n }\n \n return deletedFiles;\n }\n \n /**\n * Clears the manifest file\n */\n async clear(): Promise<void> {\n try {\n await fs.unlink(this.manifestPath);\n } catch (error) {\n // Ignore error if file doesn't exist\n if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {\n console.error(`[Lien] Warning: Failed to clear manifest: ${error}`);\n }\n }\n }\n \n /**\n * Creates an empty manifest with current version information\n * \n * @returns Empty manifest\n */\n private createEmpty(): IndexManifest {\n return {\n formatVersion: INDEX_FORMAT_VERSION,\n lienVersion: getPackageVersion(),\n lastIndexed: Date.now(),\n files: {},\n };\n }\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport {\n isGitRepo,\n getCurrentBranch,\n getCurrentCommit,\n getChangedFiles,\n getChangedFilesBetweenCommits,\n} from './utils.js';\n\nexport interface GitState {\n branch: string;\n commit: string;\n timestamp: number;\n}\n\n/**\n * Tracks git state (branch and commit) and detects changes.\n * Persists state to disk to survive server restarts.\n */\nexport class GitStateTracker {\n private stateFile: string;\n private rootDir: string;\n private currentState: GitState | null = null;\n \n constructor(rootDir: string, indexPath: string) {\n this.rootDir = rootDir;\n this.stateFile = path.join(indexPath, '.git-state.json');\n }\n \n /**\n * Loads the last known git state from disk.\n * Returns null if no state file exists (first run).\n */\n private async loadState(): Promise<GitState | null> {\n try {\n const content = await fs.readFile(this.stateFile, 'utf-8');\n return JSON.parse(content);\n } catch {\n // File doesn't exist or is invalid - this is fine for first run\n return null;\n }\n }\n \n /**\n * Saves the current git state to disk.\n */\n private async saveState(state: GitState): Promise<void> {\n try {\n const content = JSON.stringify(state, null, 2);\n await fs.writeFile(this.stateFile, content, 'utf-8');\n } catch (error) {\n // Log but don't throw - state persistence is best-effort\n console.error(`[Lien] Warning: Failed to save git state: ${error}`);\n }\n }\n \n /**\n * Gets the current git state from the repository.\n * \n * @returns Current git state\n * @throws Error if git commands fail\n */\n private async getCurrentGitState(): Promise<GitState> {\n const branch = await getCurrentBranch(this.rootDir);\n const commit = await getCurrentCommit(this.rootDir);\n \n return {\n branch,\n commit,\n timestamp: Date.now(),\n };\n }\n \n /**\n * Initializes the tracker by loading saved state and checking current state.\n * Should be called once when MCP server starts.\n * \n * @returns Array of changed files if state changed, null if no changes or first run\n */\n async initialize(): Promise<string[] | null> {\n // Check if this is a git repo\n const isRepo = await isGitRepo(this.rootDir);\n if (!isRepo) {\n return null;\n }\n \n try {\n // Get current state\n this.currentState = await this.getCurrentGitState();\n \n // Load previous state\n const previousState = await this.loadState();\n \n if (!previousState) {\n // First run - save current state\n await this.saveState(this.currentState);\n return null;\n }\n \n // Check if state changed\n const branchChanged = previousState.branch !== this.currentState.branch;\n const commitChanged = previousState.commit !== this.currentState.commit;\n \n if (!branchChanged && !commitChanged) {\n // No changes\n return null;\n }\n \n // State changed - get list of changed files\n let changedFiles: string[] = [];\n \n if (branchChanged) {\n // Branch changed - compare current branch with previous branch\n try {\n changedFiles = await getChangedFiles(\n this.rootDir,\n previousState.branch,\n this.currentState.branch\n );\n } catch (error) {\n // If branches diverged too much or don't exist, fall back to commit diff\n console.error(`[Lien] Branch diff failed, using commit diff: ${error}`);\n changedFiles = await getChangedFilesBetweenCommits(\n this.rootDir,\n previousState.commit,\n this.currentState.commit\n );\n }\n } else if (commitChanged) {\n // Same branch, different commit\n changedFiles = await getChangedFilesBetweenCommits(\n this.rootDir,\n previousState.commit,\n this.currentState.commit\n );\n }\n \n // Save new state\n await this.saveState(this.currentState);\n \n return changedFiles;\n } catch (error) {\n console.error(`[Lien] Failed to initialize git tracker: ${error}`);\n return null;\n }\n }\n \n /**\n * Checks for git state changes since last check.\n * This is called periodically by the MCP server.\n * \n * @returns Array of changed files if state changed, null if no changes\n */\n async detectChanges(): Promise<string[] | null> {\n // Check if this is a git repo\n const isRepo = await isGitRepo(this.rootDir);\n if (!isRepo) {\n return null;\n }\n \n try {\n // Get current state\n const newState = await this.getCurrentGitState();\n \n // If we don't have a previous state, just save current and return\n if (!this.currentState) {\n this.currentState = newState;\n await this.saveState(newState);\n return null;\n }\n \n // Check if state changed\n const branchChanged = this.currentState.branch !== newState.branch;\n const commitChanged = this.currentState.commit !== newState.commit;\n \n if (!branchChanged && !commitChanged) {\n // No changes\n return null;\n }\n \n // State changed - get list of changed files\n let changedFiles: string[] = [];\n \n if (branchChanged) {\n // Branch changed\n try {\n changedFiles = await getChangedFiles(\n this.rootDir,\n this.currentState.branch,\n newState.branch\n );\n } catch (error) {\n // Fall back to commit diff\n console.error(`[Lien] Branch diff failed, using commit diff: ${error}`);\n changedFiles = await getChangedFilesBetweenCommits(\n this.rootDir,\n this.currentState.commit,\n newState.commit\n );\n }\n } else if (commitChanged) {\n // Same branch, different commit\n changedFiles = await getChangedFilesBetweenCommits(\n this.rootDir,\n this.currentState.commit,\n newState.commit\n );\n }\n \n // Update current state\n this.currentState = newState;\n await this.saveState(newState);\n \n return changedFiles;\n } catch (error) {\n console.error(`[Lien] Failed to detect git changes: ${error}`);\n return null;\n }\n }\n \n /**\n * Gets the current git state.\n * Useful for status display.\n */\n getState(): GitState | null {\n return this.currentState;\n }\n \n /**\n * Manually updates the saved state.\n * Useful after manual reindexing to sync state.\n */\n async updateState(): Promise<void> {\n try {\n this.currentState = await this.getCurrentGitState();\n await this.saveState(this.currentState);\n } catch (error) {\n console.error(`[Lien] Failed to update git state: ${error}`);\n }\n }\n}\n\n","import fs from 'fs/promises';\nimport { VectorDB } from '../vectordb/lancedb.js';\nimport { ManifestManager, IndexManifest } from './manifest.js';\nimport { scanCodebase, scanCodebaseWithFrameworks } from './scanner.js';\nimport { LienConfig, LegacyLienConfig, isModernConfig, isLegacyConfig } from '../config/schema.js';\nimport { GitStateTracker } from '../git/tracker.js';\nimport { isGitAvailable, isGitRepo, getChangedFiles } from '../git/utils.js';\n\n/**\n * Result of change detection, categorized by type of change\n */\nexport interface ChangeDetectionResult {\n added: string[]; // New files not in previous index\n modified: string[]; // Existing files that have been modified\n deleted: string[]; // Files that were indexed but no longer exist\n reason: 'mtime' | 'full' | 'git-state-changed'; // How changes were detected\n}\n\n/**\n * Detects which files have changed since last indexing.\n * Uses git state detection to handle branch switches, then falls back to mtime.\n * \n * @param rootDir - Root directory of the project\n * @param vectorDB - Initialized VectorDB instance\n * @param config - Lien configuration\n * @returns Change detection result\n */\nexport async function detectChanges(\n rootDir: string,\n vectorDB: VectorDB,\n config: LienConfig | LegacyLienConfig\n): Promise<ChangeDetectionResult> {\n const manifest = new ManifestManager(vectorDB.dbPath);\n const savedManifest = await manifest.load();\n \n // No manifest = first run = full index\n if (!savedManifest) {\n const allFiles = await getAllFiles(rootDir, config);\n return {\n added: allFiles,\n modified: [],\n deleted: [],\n reason: 'full',\n };\n }\n \n // Check if git state has changed (branch switch, new commits)\n // This is critical because git doesn't always update mtimes when checking out files\n const gitAvailable = await isGitAvailable();\n const isRepo = await isGitRepo(rootDir);\n \n if (gitAvailable && isRepo && savedManifest.gitState) {\n const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);\n await gitTracker.initialize();\n \n const currentState = gitTracker.getState();\n \n // If branch or commit changed, use git to detect which files actually changed\n if (currentState && \n (currentState.branch !== savedManifest.gitState.branch ||\n currentState.commit !== savedManifest.gitState.commit)) {\n \n try {\n // Get files that changed between old and new commit using git diff\n const changedFilesPaths = await getChangedFiles(\n rootDir,\n savedManifest.gitState.commit,\n currentState.commit\n );\n const changedFilesSet = new Set(changedFilesPaths);\n \n // Get all current files to determine new files and deletions\n const allFiles = await getAllFiles(rootDir, config);\n const currentFileSet = new Set(allFiles);\n \n const added: string[] = [];\n const modified: string[] = [];\n const deleted: string[] = [];\n \n // Categorize changed files\n for (const filepath of changedFilesPaths) {\n if (currentFileSet.has(filepath)) {\n // File exists - check if it's new or modified\n if (savedManifest.files[filepath]) {\n modified.push(filepath);\n } else {\n added.push(filepath);\n }\n }\n // If file doesn't exist in current set, it will be caught by deletion logic below\n }\n \n // Find truly new files (not in git diff, but not in old manifest)\n for (const filepath of allFiles) {\n if (!savedManifest.files[filepath] && !changedFilesSet.has(filepath)) {\n added.push(filepath);\n }\n }\n \n // Compute deleted files: files in old manifest but not in new branch\n for (const filepath of Object.keys(savedManifest.files)) {\n if (!currentFileSet.has(filepath)) {\n deleted.push(filepath);\n }\n }\n \n return {\n added,\n modified,\n deleted,\n reason: 'git-state-changed',\n };\n } catch (error) {\n // If git diff fails, fall back to full reindex\n console.warn(`[Lien] Git diff failed, falling back to full reindex: ${error}`);\n const allFiles = await getAllFiles(rootDir, config);\n const currentFileSet = new Set(allFiles);\n \n const deleted: string[] = [];\n for (const filepath of Object.keys(savedManifest.files)) {\n if (!currentFileSet.has(filepath)) {\n deleted.push(filepath);\n }\n }\n \n return {\n added: allFiles,\n modified: [],\n deleted,\n reason: 'git-state-changed',\n };\n }\n }\n }\n \n // Use mtime-based detection for file-level changes\n return await mtimeBasedDetection(rootDir, savedManifest, config);\n}\n\n/**\n * Gets all files in the project based on configuration\n */\nasync function getAllFiles(\n rootDir: string,\n config: LienConfig | LegacyLienConfig\n): Promise<string[]> {\n if (isModernConfig(config) && config.frameworks.length > 0) {\n return await scanCodebaseWithFrameworks(rootDir, config);\n } else if (isLegacyConfig(config)) {\n return await scanCodebase({\n rootDir,\n includePatterns: config.indexing.include,\n excludePatterns: config.indexing.exclude,\n });\n } else {\n return await scanCodebase({\n rootDir,\n includePatterns: [],\n excludePatterns: [],\n });\n }\n}\n\n/**\n * Detects changes by comparing file modification times\n */\nasync function mtimeBasedDetection(\n rootDir: string,\n savedManifest: IndexManifest,\n config: LienConfig | LegacyLienConfig\n): Promise<ChangeDetectionResult> {\n const added: string[] = [];\n const modified: string[] = [];\n const deleted: string[] = [];\n \n // Get all current files\n const currentFiles = await getAllFiles(rootDir, config);\n const currentFileSet = new Set(currentFiles);\n \n // Get mtimes for all current files\n const fileStats = new Map<string, number>();\n \n for (const filepath of currentFiles) {\n try {\n const stats = await fs.stat(filepath);\n fileStats.set(filepath, stats.mtimeMs);\n } catch {\n // Ignore files we can't stat\n continue;\n }\n }\n \n // Check for new and modified files\n for (const [filepath, mtime] of fileStats) {\n const entry = savedManifest.files[filepath];\n \n if (!entry) {\n // New file\n added.push(filepath);\n } else if (entry.lastModified < mtime) {\n // File modified since last index\n modified.push(filepath);\n }\n }\n \n // Check for deleted files\n for (const filepath of Object.keys(savedManifest.files)) {\n if (!currentFileSet.has(filepath)) {\n deleted.push(filepath);\n }\n }\n \n return {\n added,\n modified,\n deleted,\n reason: 'mtime',\n };\n}\n\n","import fs from 'fs/promises';\nimport { chunkFile } from './chunker.js';\nimport { EmbeddingService } from '../embeddings/types.js';\nimport { VectorDB } from '../vectordb/lancedb.js';\nimport { LienConfig, LegacyLienConfig, isModernConfig, isLegacyConfig } from '../config/schema.js';\nimport { ManifestManager } from './manifest.js';\nimport { EMBEDDING_MICRO_BATCH_SIZE } from '../constants.js';\nimport { CodeChunk } from './types.js';\n\nexport interface IncrementalIndexOptions {\n verbose?: boolean;\n}\n\n/**\n * Result of processing a file's content into chunks and embeddings.\n */\ninterface ProcessFileResult {\n chunkCount: number;\n vectors: Float32Array[];\n chunks: CodeChunk[];\n texts: string[];\n}\n\n/**\n * Shared helper that processes file content into chunks and embeddings.\n * This is the core logic shared between indexSingleFile and indexMultipleFiles.\n * \n * Returns null for empty files (0 chunks), which callers should handle appropriately.\n * \n * @param filepath - Path to the file being processed\n * @param content - File content\n * @param embeddings - Embeddings service\n * @param config - Lien configuration\n * @param verbose - Whether to log verbose output\n * @returns ProcessFileResult for non-empty files, null for empty files\n */\nasync function processFileContent(\n filepath: string,\n content: string,\n embeddings: EmbeddingService,\n config: LienConfig | LegacyLienConfig,\n verbose: boolean\n): Promise<ProcessFileResult | null> {\n // Get chunk settings (support both v0.3.0 and legacy v0.2.0 configs)\n const chunkSize = isModernConfig(config)\n ? config.core.chunkSize\n : (isLegacyConfig(config) ? config.indexing.chunkSize : 75);\n const chunkOverlap = isModernConfig(config)\n ? config.core.chunkOverlap\n : (isLegacyConfig(config) ? config.indexing.chunkOverlap : 10);\n \n // Chunk the file\n const chunks = chunkFile(filepath, content, {\n chunkSize,\n chunkOverlap,\n });\n \n if (chunks.length === 0) {\n // Empty file - return null so caller can handle appropriately\n if (verbose) {\n console.error(`[Lien] Empty file: ${filepath}`);\n }\n return null;\n }\n \n // Generate embeddings for all chunks\n // Use micro-batching to prevent event loop blocking\n const texts = chunks.map(c => c.content);\n const vectors: Float32Array[] = [];\n \n for (let j = 0; j < texts.length; j += EMBEDDING_MICRO_BATCH_SIZE) {\n const microBatch = texts.slice(j, Math.min(j + EMBEDDING_MICRO_BATCH_SIZE, texts.length));\n const microResults = await embeddings.embedBatch(microBatch);\n vectors.push(...microResults);\n \n // Yield to event loop for responsiveness\n if (texts.length > EMBEDDING_MICRO_BATCH_SIZE) {\n await new Promise(resolve => setImmediate(resolve));\n }\n }\n \n return {\n chunkCount: chunks.length,\n vectors,\n chunks,\n texts,\n };\n}\n\n/**\n * Indexes a single file incrementally by updating its chunks in the vector database.\n * This is the core function for incremental reindexing - it handles file changes,\n * deletions, and additions.\n * \n * @param filepath - Absolute path to the file to index\n * @param vectorDB - Initialized VectorDB instance\n * @param embeddings - Initialized embeddings service\n * @param config - Lien configuration\n * @param options - Optional settings\n */\nexport async function indexSingleFile(\n filepath: string,\n vectorDB: VectorDB,\n embeddings: EmbeddingService,\n config: LienConfig | LegacyLienConfig,\n options: IncrementalIndexOptions = {}\n): Promise<void> {\n const { verbose } = options;\n \n try {\n // Check if file exists\n try {\n await fs.access(filepath);\n } catch {\n // File doesn't exist - delete from index and manifest\n if (verbose) {\n console.error(`[Lien] File deleted: ${filepath}`);\n }\n await vectorDB.deleteByFile(filepath);\n \n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.removeFile(filepath);\n return;\n }\n \n // Read file content\n const content = await fs.readFile(filepath, 'utf-8');\n \n // Process file content (chunking + embeddings) - shared logic\n const result = await processFileContent(filepath, content, embeddings, config, verbose || false);\n \n // Get actual file mtime for manifest\n const stats = await fs.stat(filepath);\n const manifest = new ManifestManager(vectorDB.dbPath);\n \n if (result === null) {\n // Empty file - remove from vector DB but keep in manifest with chunkCount: 0\n await vectorDB.deleteByFile(filepath);\n await manifest.updateFile(filepath, {\n filepath,\n lastModified: stats.mtimeMs,\n chunkCount: 0,\n });\n return;\n }\n \n // Non-empty file - update in database (atomic: delete old + insert new)\n await vectorDB.updateFile(\n filepath,\n result.vectors,\n result.chunks.map(c => c.metadata),\n result.texts\n );\n \n // Update manifest after successful indexing\n await manifest.updateFile(filepath, {\n filepath,\n lastModified: stats.mtimeMs,\n chunkCount: result.chunkCount,\n });\n \n if (verbose) {\n console.error(`[Lien] ✓ Updated ${filepath} (${result.chunkCount} chunks)`);\n }\n } catch (error) {\n // Log error but don't throw - we want to continue with other files\n console.error(`[Lien] ⚠️ Failed to index ${filepath}: ${error}`);\n }\n}\n\n/**\n * Indexes multiple files incrementally.\n * Processes files sequentially for simplicity and reliability.\n * \n * Note: This function counts both successfully indexed files AND successfully\n * handled deletions (files that don't exist but were removed from the index).\n * \n * @param filepaths - Array of absolute file paths to index\n * @param vectorDB - Initialized VectorDB instance\n * @param embeddings - Initialized embeddings service\n * @param config - Lien configuration\n * @param options - Optional settings\n * @returns Number of successfully processed files (indexed or deleted)\n */\nexport async function indexMultipleFiles(\n filepaths: string[],\n vectorDB: VectorDB,\n embeddings: EmbeddingService,\n config: LienConfig | LegacyLienConfig,\n options: IncrementalIndexOptions = {}\n): Promise<number> {\n const { verbose } = options;\n let processedCount = 0;\n \n // Batch manifest updates for performance\n const manifestEntries: Array<{ filepath: string; chunkCount: number; mtime: number }> = [];\n \n // Process each file sequentially (simple and reliable)\n for (const filepath of filepaths) {\n // Try to read the file and get its stats\n let content: string;\n let fileMtime: number;\n try {\n const stats = await fs.stat(filepath);\n fileMtime = stats.mtimeMs;\n content = await fs.readFile(filepath, 'utf-8');\n } catch (error) {\n // File doesn't exist or couldn't be read - delete from index\n if (verbose) {\n console.error(`[Lien] File not readable: ${filepath}`);\n }\n try {\n await vectorDB.deleteByFile(filepath);\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.removeFile(filepath);\n } catch (error) {\n // Ignore errors if file wasn't in index\n if (verbose) {\n console.error(`[Lien] Note: ${filepath} not in index`);\n }\n }\n // Count as successfully processed (we handled the deletion)\n processedCount++;\n continue;\n }\n \n try {\n // Process file content (chunking + embeddings) - shared logic\n const result = await processFileContent(filepath, content, embeddings, config, verbose || false);\n \n if (result === null) {\n // Empty file - remove from vector DB but keep in manifest with chunkCount: 0\n try {\n await vectorDB.deleteByFile(filepath);\n } catch (error) {\n // Ignore errors if file wasn't in index\n }\n \n // Update manifest immediately for empty files (not batched)\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.updateFile(filepath, {\n filepath,\n lastModified: fileMtime,\n chunkCount: 0,\n });\n \n // Count as successful processing (handled empty file)\n processedCount++;\n continue;\n }\n \n // Non-empty file - delete old chunks if they exist\n try {\n await vectorDB.deleteByFile(filepath);\n } catch (error) {\n // Ignore - file might not be in index yet\n }\n \n // Insert new chunks\n await vectorDB.insertBatch(\n result.vectors,\n result.chunks.map(c => c.metadata),\n result.texts\n );\n \n // Queue manifest update (batch at end) with actual file mtime\n manifestEntries.push({\n filepath,\n chunkCount: result.chunkCount,\n mtime: fileMtime,\n });\n \n if (verbose) {\n console.error(`[Lien] ✓ Updated ${filepath} (${result.chunkCount} chunks)`);\n }\n \n processedCount++;\n } catch (error) {\n // Log error but don't throw - we want to continue with other files\n console.error(`[Lien] ⚠️ Failed to index ${filepath}: ${error}`);\n }\n }\n \n // Batch update manifest at the end (much faster than updating after each file)\n if (manifestEntries.length > 0) {\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.updateFiles(\n manifestEntries.map(entry => ({\n filepath: entry.filepath,\n lastModified: entry.mtime, // Use actual file mtime for accurate change detection\n chunkCount: entry.chunkCount,\n }))\n );\n }\n \n return processedCount;\n}\n\n","/**\n * Witty loading messages to keep users entertained during long operations.\n * Inspired by tools like npm, yarn, and other personality-driven CLIs.\n */\n\nconst INDEXING_MESSAGES = [\n 'Teaching AI to read your spaghetti code...',\n 'Convincing the LLM that your variable names make sense...',\n 'Indexing your TODO comments (so many TODOs)...',\n 'Building semantic links faster than you can say \"grep\"...',\n 'Making your codebase searchable (the good, the bad, and the ugly)...',\n 'Chunking code like a boss...',\n \"Feeding your code to the neural network (it's hungry)...\",\n \"Creating embeddings (it's like compression, but fancier)...\",\n 'Teaching machines to understand your midnight commits...',\n 'Vectorizing your technical debt...',\n \"Indexing... because Ctrl+F wasn't cutting it anymore...\",\n 'Making semantic connections (unlike your last refactor)...',\n 'Processing files faster than your CI pipeline...',\n 'Embedding wisdom from your comments (all 3 of them)...',\n 'Analyzing code semantics (yes, even that one function)...',\n 'Building search index (now with 100% more AI)...',\n \"Crunching vectors like it's nobody's business...\",\n 'Linking code fragments across the spacetime continuum...',\n 'Teaching transformers about your coding style...',\n 'Preparing for semantic search domination...',\n 'Indexing your genius (and that hacky workaround from 2019)...',\n \"Making your codebase AI-readable (you're welcome, future you)...\",\n 'Converting code to math (engineers love this trick)...',\n \"Building the neural net's mental model of your app...\",\n 'Chunking files like a lumberjack, but for code...',\n];\n\nconst EMBEDDING_MESSAGES = [\n 'Generating embeddings (math is happening)...',\n 'Teaching transformers about your forEach loops...',\n 'Converting code to 384-dimensional space (wild, right?)...',\n 'Running the neural network (the Matrix, but for code)...',\n 'Creating semantic vectors (fancy word for AI magic)...',\n 'Embedding your code into hyperspace...',\n 'Teaching the model what \"clean code\" means in your codebase...',\n 'Generating vectors faster than you can say \"AI\"...',\n 'Making math from your methods...',\n 'Transforming code into numbers (the AI way)...',\n 'Processing with transformers.js (yes, it runs locally!)...',\n \"Embedding semantics (your code's hidden meaning)...\",\n 'Vectorizing variables (alliteration achieved)...',\n 'Teaching AI the difference between foo and bar...',\n 'Creating embeddings (384 dimensions of awesome)...',\n];\n\nconst MODEL_LOADING_MESSAGES = [\n 'Waking up the neural network...',\n 'Loading transformer model (patience, young padawan)...',\n 'Downloading AI brain (first run only, promise!)...',\n 'Initializing the semantic search engine...',\n 'Booting up the language model (coffee break recommended)...',\n 'Loading 100MB of pure AI goodness...',\n 'Preparing the transformer for action...',\n 'Model loading (this is why we run locally)...',\n 'Spinning up the embedding generator...',\n 'Getting the AI ready for your codebase...',\n];\n\nlet currentIndexingIndex = 0;\nlet currentEmbeddingIndex = 0;\nlet currentModelIndex = 0;\n\n/**\n * Get the next witty message for the indexing process.\n * Messages are returned sequentially in a round-robin fashion.\n */\nexport function getIndexingMessage(): string {\n const message = INDEXING_MESSAGES[currentIndexingIndex % INDEXING_MESSAGES.length];\n currentIndexingIndex++;\n return message;\n}\n\n/**\n * Get the next witty message for the embedding generation process.\n * Messages are returned sequentially in a round-robin fashion.\n */\nexport function getEmbeddingMessage(): string {\n const message = EMBEDDING_MESSAGES[currentEmbeddingIndex % EMBEDDING_MESSAGES.length];\n currentEmbeddingIndex++;\n return message;\n}\n\n/**\n * Get the next witty message for the model loading process.\n * Messages are returned sequentially in a round-robin fashion.\n */\nexport function getModelLoadingMessage(): string {\n const message = MODEL_LOADING_MESSAGES[currentModelIndex % MODEL_LOADING_MESSAGES.length];\n currentModelIndex++;\n return message;\n}\n\n/**\n * Reset all message counters (useful for testing)\n */\nexport function resetMessageCounters(): void {\n currentIndexingIndex = 0;\n currentEmbeddingIndex = 0;\n currentModelIndex = 0;\n}\n\n","import fs from 'fs/promises';\nimport ora from 'ora';\nimport chalk from 'chalk';\nimport pLimit from 'p-limit';\nimport { scanCodebase, scanCodebaseWithFrameworks } from './scanner.js';\nimport { chunkFile } from './chunker.js';\nimport { LocalEmbeddings } from '../embeddings/local.js';\nimport { VectorDB } from '../vectordb/lancedb.js';\nimport { configService } from '../config/service.js';\nimport { CodeChunk } from './types.js';\nimport { writeVersionFile } from '../vectordb/version.js';\nimport { isLegacyConfig, isModernConfig } from '../config/schema.js';\nimport { ManifestManager } from './manifest.js';\nimport { detectChanges } from './change-detector.js';\nimport { indexMultipleFiles } from './incremental.js';\nimport { getIndexingMessage, getEmbeddingMessage, getModelLoadingMessage } from '../utils/loading-messages.js';\nimport { EMBEDDING_MICRO_BATCH_SIZE } from '../constants.js';\n\nexport interface IndexingOptions {\n rootDir?: string;\n verbose?: boolean;\n force?: boolean; // Force full reindex, skip incremental\n}\n\ninterface ChunkWithContent {\n chunk: CodeChunk;\n content: string;\n}\n\nexport async function indexCodebase(options: IndexingOptions = {}): Promise<void> {\n const rootDir = options.rootDir ?? process.cwd();\n const spinner = ora('Starting indexing process...').start();\n let updateInterval: NodeJS.Timeout | undefined;\n \n try {\n // 1. Load configuration\n spinner.text = 'Loading configuration...';\n const config = await configService.load(rootDir);\n \n // 1.5. Initialize vector database early (needed for manifest)\n spinner.text = 'Initializing vector database...';\n const vectorDB = new VectorDB(rootDir);\n await vectorDB.initialize();\n \n // 1.6. Try incremental indexing if manifest exists and not forced\n if (!options.force) {\n spinner.text = 'Checking for changes...';\n const manifest = new ManifestManager(vectorDB.dbPath);\n const savedManifest = await manifest.load();\n \n if (savedManifest) {\n // Detect changes using mtime\n const changes = await detectChanges(rootDir, vectorDB, config);\n \n if (changes.reason !== 'full') {\n const totalChanges = changes.added.length + changes.modified.length;\n const totalDeleted = changes.deleted.length;\n \n if (totalChanges === 0 && totalDeleted === 0) {\n spinner.succeed('No changes detected - index is up to date!');\n return;\n }\n \n spinner.succeed(\n `Detected changes: ${totalChanges} files to index, ${totalDeleted} to remove (${changes.reason} detection)`\n );\n \n // Initialize embeddings for incremental update\n spinner.start(getModelLoadingMessage());\n const embeddings = new LocalEmbeddings();\n await embeddings.initialize();\n spinner.succeed('Embedding model loaded');\n \n // Handle deletions\n if (totalDeleted > 0) {\n spinner.start(`Removing ${totalDeleted} deleted files...`);\n let removedCount = 0;\n for (const filepath of changes.deleted) {\n try {\n await vectorDB.deleteByFile(filepath);\n await manifest.removeFile(filepath);\n removedCount++;\n } catch (err) {\n spinner.warn(`Failed to remove file \"${filepath}\": ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n spinner.succeed(`Removed ${removedCount}/${totalDeleted} deleted files`);\n }\n \n // Handle additions and modifications\n if (totalChanges > 0) {\n spinner.start(`Reindexing ${totalChanges} changed files...`);\n const filesToIndex = [...changes.added, ...changes.modified];\n const count = await indexMultipleFiles(\n filesToIndex,\n vectorDB,\n embeddings,\n config,\n { verbose: options.verbose }\n );\n \n // Update version file to trigger MCP reconnection\n await writeVersionFile(vectorDB.dbPath);\n \n spinner.succeed(\n `Incremental reindex complete: ${count}/${totalChanges} files indexed successfully`\n );\n }\n \n // Update git state after incremental indexing (for branch switch detection)\n const { isGitAvailable, isGitRepo } = await import('../git/utils.js');\n const { GitStateTracker } = await import('../git/tracker.js');\n const gitAvailable = await isGitAvailable();\n const isRepo = await isGitRepo(rootDir);\n \n if (gitAvailable && isRepo) {\n const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);\n await gitTracker.initialize();\n const gitState = gitTracker.getState();\n if (gitState) {\n // Reuse existing manifest instance\n await manifest.updateGitState(gitState);\n }\n }\n \n console.log(chalk.dim('\\nNext step: Run'), chalk.bold('lien serve'), chalk.dim('to start the MCP server'));\n return; // Exit early - incremental index complete!\n }\n \n // If we get here, changes.reason === 'full', so continue with full index below\n spinner.text = 'Full reindex required...';\n }\n } else {\n spinner.text = 'Force flag enabled, performing full reindex...';\n }\n \n // 2. Scan for files (framework-aware if frameworks configured)\n spinner.text = 'Scanning codebase...';\n let files: string[];\n \n if (isModernConfig(config) && config.frameworks.length > 0) {\n // Use framework-aware scanning for new configs\n files = await scanCodebaseWithFrameworks(rootDir, config);\n } else if (isLegacyConfig(config)) {\n // Fall back to legacy scanning for old configs\n files = await scanCodebase({\n rootDir,\n includePatterns: config.indexing.include,\n excludePatterns: config.indexing.exclude,\n });\n } else {\n // Modern config with no frameworks - use empty patterns\n files = await scanCodebase({\n rootDir,\n includePatterns: [],\n excludePatterns: [],\n });\n }\n \n if (files.length === 0) {\n spinner.fail('No files found to index');\n return;\n }\n \n spinner.text = `Found ${files.length} files`;\n \n // 3. Initialize embeddings model\n spinner.text = getModelLoadingMessage();\n const embeddings = new LocalEmbeddings();\n await embeddings.initialize();\n spinner.succeed('Embedding model loaded');\n \n // 5. Process files concurrently\n const concurrency = isModernConfig(config) \n ? config.core.concurrency \n : 4;\n const embeddingBatchSize = isModernConfig(config)\n ? config.core.embeddingBatchSize\n : 50;\n // Use smaller batch size to keep UI responsive (process more frequently)\n const vectorDBBatchSize = 100;\n \n spinner.start(`Processing files with ${concurrency}x concurrency...`);\n \n const startTime = Date.now();\n let processedFiles = 0;\n let processedChunks = 0;\n \n // Accumulator for chunks across multiple files\n const chunkAccumulator: ChunkWithContent[] = [];\n const limit = pLimit(concurrency);\n \n // Track successfully indexed files for manifest\n const indexedFileEntries: Array<{ filepath: string; chunkCount: number; mtime: number }> = [];\n \n // Shared state for progress updates (decoupled from actual work)\n const progressState = {\n processedFiles: 0,\n totalFiles: files.length,\n wittyMessage: getIndexingMessage(),\n };\n \n // Start a periodic timer to update the spinner independently\n const SPINNER_UPDATE_INTERVAL_MS = 200; // How often to update spinner\n const MESSAGE_ROTATION_INTERVAL_MS = 8000; // How often to rotate message\n const MESSAGE_ROTATION_TICKS = Math.floor(MESSAGE_ROTATION_INTERVAL_MS / SPINNER_UPDATE_INTERVAL_MS);\n \n let spinnerTick = 0;\n updateInterval = setInterval(() => {\n // Rotate witty message periodically\n spinnerTick++;\n if (spinnerTick >= MESSAGE_ROTATION_TICKS) {\n progressState.wittyMessage = getIndexingMessage();\n spinnerTick = 0; // Reset counter to prevent unbounded growth\n }\n \n spinner.text = `${progressState.processedFiles}/${progressState.totalFiles} files | ${progressState.wittyMessage}`;\n }, SPINNER_UPDATE_INTERVAL_MS);\n \n // Function to process accumulated chunks\n const processAccumulatedChunks = async () => {\n if (chunkAccumulator.length === 0) return;\n \n const toProcess = chunkAccumulator.splice(0, chunkAccumulator.length);\n \n // Process embeddings in smaller batches AND insert incrementally to keep UI responsive\n for (let i = 0; i < toProcess.length; i += embeddingBatchSize) {\n const batch = toProcess.slice(i, Math.min(i + embeddingBatchSize, toProcess.length));\n \n // Update shared state (spinner updates automatically via interval)\n progressState.wittyMessage = getEmbeddingMessage();\n \n // Process embeddings in micro-batches to prevent event loop blocking\n // Transformers.js is CPU-intensive, so we yield control periodically\n const texts = batch.map(item => item.content);\n const embeddingVectors: Float32Array[] = [];\n \n for (let j = 0; j < texts.length; j += EMBEDDING_MICRO_BATCH_SIZE) {\n const microBatch = texts.slice(j, Math.min(j + EMBEDDING_MICRO_BATCH_SIZE, texts.length));\n const microResults = await embeddings.embedBatch(microBatch);\n embeddingVectors.push(...microResults);\n \n // Yield to event loop so spinner can update\n await new Promise(resolve => setImmediate(resolve));\n }\n \n processedChunks += batch.length;\n \n // Update state before DB insertion\n progressState.wittyMessage = `Inserting ${batch.length} chunks into vector space...`;\n \n await vectorDB.insertBatch(\n embeddingVectors,\n batch.map(item => item.chunk.metadata),\n texts\n );\n \n // Yield after DB insertion too\n await new Promise(resolve => setImmediate(resolve));\n }\n \n progressState.wittyMessage = getIndexingMessage();\n };\n \n // Process files with concurrency limit\n const filePromises = files.map((file) =>\n limit(async () => {\n try {\n // Get file stats to capture actual modification time\n const stats = await fs.stat(file);\n const content = await fs.readFile(file, 'utf-8');\n const chunkSize = isModernConfig(config)\n ? config.core.chunkSize\n : 75;\n const chunkOverlap = isModernConfig(config)\n ? config.core.chunkOverlap\n : 10;\n \n const chunks = chunkFile(file, content, {\n chunkSize,\n chunkOverlap,\n });\n \n if (chunks.length === 0) {\n processedFiles++;\n progressState.processedFiles = processedFiles;\n return;\n }\n \n // Add chunks to accumulator\n for (const chunk of chunks) {\n chunkAccumulator.push({\n chunk,\n content: chunk.content,\n });\n }\n \n // Track this file for manifest with actual file mtime\n indexedFileEntries.push({\n filepath: file,\n chunkCount: chunks.length,\n mtime: stats.mtimeMs,\n });\n \n processedFiles++;\n progressState.processedFiles = processedFiles;\n \n // Process when batch is large enough (use smaller batch for responsiveness)\n if (chunkAccumulator.length >= vectorDBBatchSize) {\n await processAccumulatedChunks();\n }\n } catch (error) {\n if (options.verbose) {\n console.error(chalk.yellow(`\\n⚠️ Skipping ${file}: ${error}`));\n }\n processedFiles++;\n progressState.processedFiles = processedFiles;\n }\n })\n );\n \n // Wait for all files to be processed\n await Promise.all(filePromises);\n \n // Process remaining chunks\n progressState.wittyMessage = 'Processing final chunks...';\n await processAccumulatedChunks();\n \n // Stop the progress update interval\n clearInterval(updateInterval);\n \n // Save manifest with all indexed files\n spinner.start('Saving index manifest...');\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.updateFiles(\n indexedFileEntries.map(entry => ({\n filepath: entry.filepath,\n lastModified: entry.mtime, // Use actual file mtime for accurate change detection\n chunkCount: entry.chunkCount,\n }))\n );\n \n // Save git state if in a git repo (for branch switch detection)\n const { isGitAvailable, isGitRepo } = await import('../git/utils.js');\n const { GitStateTracker } = await import('../git/tracker.js');\n const gitAvailable = await isGitAvailable();\n const isRepo = await isGitRepo(rootDir);\n \n if (gitAvailable && isRepo) {\n const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);\n await gitTracker.initialize();\n const gitState = gitTracker.getState();\n if (gitState) {\n await manifest.updateGitState(gitState);\n }\n }\n \n spinner.succeed('Manifest saved');\n \n // Write version file to mark successful completion\n // This allows the MCP server to detect when reindexing is complete\n await writeVersionFile(vectorDB.dbPath);\n \n const totalTime = ((Date.now() - startTime) / 1000).toFixed(1);\n spinner.succeed(\n `Indexed ${processedFiles} files (${processedChunks} chunks) in ${totalTime}s using ${concurrency}x concurrency`\n );\n \n console.log(chalk.dim('\\nNext step: Run'), chalk.bold('lien serve'), chalk.dim('to start the MCP server'));\n } catch (error) {\n // Make sure to clear interval on error too\n if (updateInterval) {\n clearInterval(updateInterval);\n }\n spinner.fail(`Indexing failed: ${error}`);\n throw error;\n }\n}\n\n","import { Command } from 'commander';\nimport { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\nimport { initCommand } from './init.js';\nimport { statusCommand } from './status.js';\nimport { indexCommand } from './index-cmd.js';\nimport { serveCommand } from './serve.js';\n\n// Get version from package.json dynamically\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst require = createRequire(import.meta.url);\n\nlet packageJson;\ntry {\n packageJson = require(join(__dirname, '../package.json'));\n} catch {\n packageJson = require(join(__dirname, '../../package.json'));\n}\n\nexport const program = new Command();\n\nprogram\n .name('lien')\n .description('Local semantic code search for AI assistants via MCP')\n .version(packageJson.version);\n\nprogram\n .command('init')\n .description('Initialize Lien in the current directory')\n .option('-u, --upgrade', 'Upgrade existing config with new options')\n .option('-y, --yes', 'Skip interactive prompts and use defaults')\n .option('-p, --path <path>', 'Path to initialize (defaults to current directory)')\n .action(initCommand);\n\nprogram\n .command('index')\n .description('Index the codebase for semantic search')\n .option('-f, --force', 'Force full reindex (skip incremental)')\n .option('-w, --watch', 'Watch for changes and re-index automatically')\n .option('-v, --verbose', 'Show detailed logging during indexing')\n .action(indexCommand);\n\nprogram\n .command('serve')\n .description('Start the MCP server for Cursor integration')\n .option('-p, --port <port>', 'Port number (for future use)', '7133')\n .option('--no-watch', 'Disable file watching for this session')\n .option('-w, --watch', '[DEPRECATED] File watching is now enabled by default')\n .option('-r, --root <path>', 'Root directory to serve (defaults to current directory)')\n .action(serveCommand);\n\nprogram\n .command('status')\n .description('Show indexing status and statistics')\n .action(statusCommand);\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\nimport chalk from 'chalk';\nimport inquirer from 'inquirer';\nimport { defaultConfig, LienConfig, FrameworkInstance, FrameworkConfig } from '../config/schema.js';\nimport { deepMergeConfig, detectNewFields } from '../config/merge.js';\nimport { showCompactBanner } from '../utils/banner.js';\nimport { needsMigration, migrateConfig } from '../config/migration.js';\nimport { detectAllFrameworks } from '../frameworks/detector-service.js';\nimport { getFrameworkDetector } from '../frameworks/registry.js';\n\n// ES module equivalent of __dirname\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\n\nexport interface InitOptions {\n upgrade?: boolean;\n yes?: boolean;\n path?: string;\n}\n\nexport async function initCommand(options: InitOptions = {}) {\n const rootDir = options.path || process.cwd();\n const configPath = path.join(rootDir, '.lien.config.json');\n \n try {\n // Check if config already exists\n let configExists = false;\n try {\n await fs.access(configPath);\n configExists = true;\n } catch {\n // File doesn't exist\n }\n \n // Handle upgrade scenario\n if (configExists && options.upgrade) {\n await upgradeConfig(configPath);\n return;\n }\n \n // Warn if config exists and not upgrading\n if (configExists && !options.upgrade) {\n console.log(chalk.yellow('⚠️ .lien.config.json already exists'));\n console.log(chalk.dim('Run'), chalk.bold('lien init --upgrade'), chalk.dim('to merge new config options'));\n return;\n }\n \n // Create new config with framework detection\n if (!configExists) {\n await createNewConfig(rootDir, options);\n }\n } catch (error) {\n console.error(chalk.red('Error creating config file:'), error);\n process.exit(1);\n }\n}\n\nasync function createNewConfig(rootDir: string, options: InitOptions) {\n // Show banner for new initialization\n showCompactBanner();\n console.log(chalk.bold('Initializing Lien...\\n'));\n \n // 1. Run framework detection\n console.log(chalk.dim('🔍 Detecting frameworks in'), chalk.bold(rootDir));\n const detections = await detectAllFrameworks(rootDir);\n \n let frameworks: FrameworkInstance[] = [];\n \n if (detections.length === 0) {\n console.log(chalk.yellow('\\n⚠️ No frameworks detected'));\n \n if (!options.yes) {\n const { useGeneric } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'useGeneric',\n message: 'Create a generic config (index all supported file types)?',\n default: true,\n },\n ]);\n \n if (!useGeneric) {\n console.log(chalk.dim('Aborted.'));\n return;\n }\n }\n \n // Create generic framework\n frameworks.push({\n name: 'generic',\n path: '.',\n enabled: true,\n config: {\n include: ['**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}'],\n exclude: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/coverage/**',\n '**/.next/**',\n '**/.nuxt/**',\n '**/vendor/**',\n ],\n },\n });\n } else {\n // 2. Display detected frameworks\n console.log(chalk.green(`\\n✓ Found ${detections.length} framework(s):\\n`));\n \n for (const det of detections) {\n const pathDisplay = det.path === '.' ? 'root' : det.path;\n console.log(chalk.bold(` ${det.name}`), chalk.dim(`(${det.confidence} confidence)`));\n console.log(chalk.dim(` Location: ${pathDisplay}`));\n \n if (det.evidence.length > 0) {\n det.evidence.forEach((e) => {\n console.log(chalk.dim(` • ${e}`));\n });\n }\n console.log();\n }\n \n // 3. Interactive confirmation\n if (!options.yes) {\n const { confirm } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'confirm',\n message: 'Configure these frameworks?',\n default: true,\n },\n ]);\n \n if (!confirm) {\n console.log(chalk.dim('Aborted.'));\n return;\n }\n }\n \n // 4. Generate configs for each detected framework\n for (const det of detections) {\n const detector = getFrameworkDetector(det.name);\n if (!detector) {\n console.warn(chalk.yellow(`⚠️ No detector found for ${det.name}, skipping`));\n continue;\n }\n \n // Generate default config\n const frameworkConfig = await detector.generateConfig(rootDir, det.path);\n \n // Optional: Ask to customize (only in interactive mode)\n let shouldCustomize = false;\n if (!options.yes) {\n const { customize } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'customize',\n message: `Customize ${det.name} settings?`,\n default: false,\n },\n ]);\n shouldCustomize = customize;\n }\n \n let finalConfig = frameworkConfig;\n if (shouldCustomize) {\n const customized = await promptForCustomization(det.name, frameworkConfig);\n finalConfig = { ...frameworkConfig, ...customized };\n } else {\n const pathDisplay = det.path === '.' ? 'root' : det.path;\n console.log(chalk.dim(` → Using defaults for ${det.name} at ${pathDisplay}`));\n }\n \n frameworks.push({\n name: det.name,\n path: det.path,\n enabled: true,\n config: finalConfig,\n });\n }\n }\n \n // 5. Ask about Cursor rules installation\n if (!options.yes) {\n const { installCursorRules } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'installCursorRules',\n message: 'Install recommended Cursor rules?',\n default: true,\n },\n ]);\n \n if (installCursorRules) {\n try {\n const cursorRulesDir = path.join(rootDir, '.cursor');\n await fs.mkdir(cursorRulesDir, { recursive: true });\n \n // Find template - it's in the package root (same dir as package.json)\n // When compiled: everything bundles to dist/index.js, so __dirname is dist/\n // Go up one level from dist/ to reach package root\n const templatePath = path.join(__dirname, '../CURSOR_RULES_TEMPLATE.md');\n \n const rulesPath = path.join(cursorRulesDir, 'rules');\n let targetPath: string;\n let isDirectory = false;\n let isFile = false;\n\n try {\n const stats = await fs.stat(rulesPath);\n isDirectory = stats.isDirectory();\n isFile = stats.isFile();\n } catch {\n // Doesn't exist, that's fine\n }\n\n if (isDirectory) {\n // .cursor/rules is already a directory, create lien.mdc inside it\n targetPath = path.join(rulesPath, 'lien.mdc');\n await fs.copyFile(templatePath, targetPath);\n console.log(chalk.green('✓ Installed Cursor rules as .cursor/rules/lien.mdc'));\n } else if (isFile) {\n // .cursor/rules exists as a file - ask to convert to directory structure\n const { convertToDir } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'convertToDir',\n message: 'Existing .cursor/rules file found. Convert to directory and preserve your rules?',\n default: true,\n },\n ]);\n\n if (convertToDir) {\n // Convert file to directory structure\n // 1. Read existing rules\n const existingRules = await fs.readFile(rulesPath, 'utf-8');\n // 2. Delete the file\n await fs.unlink(rulesPath);\n // 3. Create rules as a directory\n await fs.mkdir(rulesPath);\n // 4. Save original rules as project.mdc\n await fs.writeFile(path.join(rulesPath, 'project.mdc'), existingRules);\n // 5. Add Lien rules as lien.mdc\n await fs.copyFile(templatePath, path.join(rulesPath, 'lien.mdc'));\n console.log(chalk.green('✓ Converted .cursor/rules to directory'));\n console.log(chalk.green(' - Your project rules: .cursor/rules/project.mdc'));\n console.log(chalk.green(' - Lien rules: .cursor/rules/lien.mdc'));\n } else {\n console.log(chalk.dim('Skipped Cursor rules installation (preserving existing file)'));\n }\n } else {\n // .cursor/rules doesn't exist, create it as a directory\n await fs.mkdir(rulesPath, { recursive: true });\n targetPath = path.join(rulesPath, 'lien.mdc');\n await fs.copyFile(templatePath, targetPath);\n console.log(chalk.green('✓ Installed Cursor rules as .cursor/rules/lien.mdc'));\n }\n } catch (error) {\n console.log(chalk.yellow('⚠️ Could not install Cursor rules'));\n console.log(chalk.dim(`Error: ${error instanceof Error ? error.message : 'Unknown error'}`));\n console.log(chalk.dim('You can manually copy CURSOR_RULES_TEMPLATE.md to .cursor/rules/lien.mdc'));\n }\n }\n }\n \n // 6. Build final config\n const config: LienConfig = {\n ...defaultConfig,\n frameworks,\n };\n \n // 7. Write config\n const configPath = path.join(rootDir, '.lien.config.json');\n await fs.writeFile(configPath, JSON.stringify(config, null, 2) + '\\n', 'utf-8');\n \n // 8. Show success message\n console.log(chalk.green('\\n✓ Created .lien.config.json'));\n console.log(chalk.green(`✓ Configured ${frameworks.length} framework(s)`));\n console.log(chalk.dim('\\nNext steps:'));\n console.log(chalk.dim(' 1. Run'), chalk.bold('lien index'), chalk.dim('to index your codebase'));\n console.log(chalk.dim(' 2. Run'), chalk.bold('lien serve'), chalk.dim('to start the MCP server'));\n console.log(chalk.dim(' 3. Configure Cursor to use the MCP server (see README.md)'));\n}\n\nasync function promptForCustomization(frameworkName: string, config: FrameworkConfig): Promise<Partial<FrameworkConfig>> {\n console.log(chalk.bold(`\\nCustomizing ${frameworkName} settings:`));\n \n const answers = await inquirer.prompt([\n {\n type: 'input',\n name: 'include',\n message: 'File patterns to include (comma-separated):',\n default: config.include.join(', '),\n filter: (input: string) => input.split(',').map(s => s.trim()),\n },\n {\n type: 'input',\n name: 'exclude',\n message: 'File patterns to exclude (comma-separated):',\n default: config.exclude.join(', '),\n filter: (input: string) => input.split(',').map(s => s.trim()),\n },\n ]);\n \n return {\n include: answers.include,\n exclude: answers.exclude,\n };\n}\n\nasync function upgradeConfig(configPath: string) {\n try {\n // 1. Backup existing config\n const backupPath = `${configPath}.backup`;\n await fs.copyFile(configPath, backupPath);\n \n // 2. Read existing config\n const existingContent = await fs.readFile(configPath, 'utf-8');\n const existingConfig = JSON.parse(existingContent);\n \n let upgradedConfig: LienConfig;\n let migrated = false;\n \n // 3. Check if migration is needed (v0.2.0 -> v0.3.0)\n if (needsMigration(existingConfig)) {\n console.log(chalk.blue('🔄 Migrating config from v0.2.0 to v0.3.0...'));\n upgradedConfig = migrateConfig(existingConfig);\n migrated = true;\n } else {\n // Just merge with defaults for v0.3.0 configs\n const newFields = detectNewFields(existingConfig, defaultConfig);\n upgradedConfig = deepMergeConfig(defaultConfig, existingConfig as Partial<LienConfig>);\n \n if (newFields.length > 0) {\n console.log(chalk.dim('\\nNew options added:'));\n newFields.forEach(field => console.log(chalk.dim(' •'), chalk.bold(field)));\n }\n }\n \n // 4. Write upgraded config\n await fs.writeFile(\n configPath,\n JSON.stringify(upgradedConfig, null, 2) + '\\n',\n 'utf-8'\n );\n \n // 5. Show results\n console.log(chalk.green('✓ Config upgraded successfully'));\n console.log(chalk.dim('Backup saved to:'), backupPath);\n \n if (migrated) {\n console.log(chalk.dim('\\n📝 Your config now uses the framework-based structure.'));\n }\n } catch (error) {\n console.error(chalk.red('Error upgrading config:'), error);\n throw error;\n }\n}\n","import figlet from 'figlet';\nimport chalk from 'chalk';\nimport { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\n\n// Get package.json dynamically\n// In development: src/utils/banner.ts -> ../../package.json\n// In production (bundled): dist/index.js -> ../package.json\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst require = createRequire(import.meta.url);\n\n// Try production path first (dist -> package.json), then dev path\nlet packageJson;\ntry {\n packageJson = require(join(__dirname, '../package.json'));\n} catch {\n packageJson = require(join(__dirname, '../../package.json'));\n}\n\n// Package info\nconst PACKAGE_NAME = packageJson.name;\nconst VERSION = packageJson.version;\n\n/**\n * Wrap text in a box with a footer line\n */\nfunction wrapInBox(text: string, footer: string, padding = 1): string {\n const lines = text.split('\\n').filter(line => line.trim().length > 0);\n \n // Use only the main content (logo) to determine box width\n const maxLength = Math.max(...lines.map(line => line.length));\n \n const horizontalBorder = '─'.repeat(maxLength + padding * 2);\n const top = `┌${horizontalBorder}┐`;\n const bottom = `└${horizontalBorder}┘`;\n const separator = `├${horizontalBorder}┤`;\n \n const paddedLines = lines.map(line => {\n const padRight = ' '.repeat(maxLength - line.length + padding);\n const padLeft = ' '.repeat(padding);\n return `│${padLeft}${line}${padRight}│`;\n });\n \n // Center the footer line\n const totalPad = maxLength - footer.length;\n const leftPad = Math.floor(totalPad / 2);\n const rightPad = totalPad - leftPad;\n const centeredFooter = ' '.repeat(leftPad) + footer + ' '.repeat(rightPad);\n \n const paddedFooter = `│${' '.repeat(padding)}${centeredFooter}${' '.repeat(padding)}│`;\n \n return [top, ...paddedLines, separator, paddedFooter, bottom].join('\\n');\n}\n\n/**\n * Display the gorgeous ANSI Shadow banner (uses stderr for MCP server)\n */\nexport function showBanner(): void {\n const banner = figlet.textSync('LIEN', {\n font: 'ANSI Shadow',\n horizontalLayout: 'fitted',\n verticalLayout: 'fitted',\n });\n\n const footer = `${PACKAGE_NAME} - v${VERSION}`;\n const boxedBanner = wrapInBox(banner.trim(), footer);\n console.error(chalk.cyan(boxedBanner));\n console.error(); // Empty line\n}\n\n/**\n * Display the gorgeous ANSI Shadow banner (uses stdout for CLI commands)\n */\nexport function showCompactBanner(): void {\n const banner = figlet.textSync('LIEN', {\n font: 'ANSI Shadow',\n horizontalLayout: 'fitted',\n verticalLayout: 'fitted',\n });\n\n const footer = `${PACKAGE_NAME} - v${VERSION}`;\n const boxedBanner = wrapInBox(banner.trim(), footer);\n console.log(chalk.cyan(boxedBanner));\n console.log(); // Empty line\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { DetectionResult, DetectionOptions, defaultDetectionOptions } from './types.js';\nimport { frameworkDetectors } from './registry.js';\n\n/**\n * Detect all frameworks in a monorepo by recursively scanning subdirectories\n * @param rootDir - Absolute path to project root\n * @param options - Detection options (max depth, skip dirs)\n * @returns Array of detected frameworks with their paths\n */\nexport async function detectAllFrameworks(\n rootDir: string,\n options: Partial<DetectionOptions> = {}\n): Promise<DetectionResult[]> {\n const opts = { ...defaultDetectionOptions, ...options };\n const results: DetectionResult[] = [];\n const visited = new Set<string>();\n \n // Detect at root first\n await detectAtPath(rootDir, '.', results, visited);\n \n // Recursively scan subdirectories\n await scanSubdirectories(rootDir, '.', results, visited, 0, opts);\n \n return results;\n}\n\n/**\n * Detect frameworks at a specific path\n */\nasync function detectAtPath(\n rootDir: string,\n relativePath: string,\n results: DetectionResult[],\n visited: Set<string>\n): Promise<void> {\n // Mark as visited\n const fullPath = path.join(rootDir, relativePath);\n if (visited.has(fullPath)) {\n return;\n }\n visited.add(fullPath);\n \n // Run all detectors and collect results\n const detectedAtPath: Array<DetectionResult & { priority: number }> = [];\n \n for (const detector of frameworkDetectors) {\n try {\n const result = await detector.detect(rootDir, relativePath);\n if (result.detected) {\n detectedAtPath.push({\n ...result,\n priority: detector.priority ?? 0,\n });\n }\n } catch (error) {\n // Log error but continue with other detectors\n console.error(`Error running detector '${detector.name}' at ${relativePath}:`, error);\n }\n }\n \n // Conflict resolution: Allow multiple HIGH-confidence frameworks to coexist\n // This enables hybrid projects (e.g., Shopify + Node.js, Laravel + Node.js)\n if (detectedAtPath.length > 1) {\n // Separate frameworks by confidence level\n const highConfidence = detectedAtPath.filter(d => d.confidence === 'high');\n const mediumConfidence = detectedAtPath.filter(d => d.confidence === 'medium');\n const lowConfidence = detectedAtPath.filter(d => d.confidence === 'low');\n \n if (highConfidence.length > 1) {\n // Multiple HIGH-confidence frameworks -> keep all (hybrid/monorepo behavior)\n // Strip internal priority property before adding to results\n const cleanResults = highConfidence.map(({ priority, ...result }) => result);\n results.push(...cleanResults);\n const names = highConfidence.map(d => d.name).join(' + ');\n console.log(` → Detected hybrid project: ${names}`);\n \n // Log skipped medium/low confidence detections\n if (mediumConfidence.length > 0 || lowConfidence.length > 0) {\n const skippedNames = [...mediumConfidence, ...lowConfidence].map(d => d.name).join(', ');\n console.log(` → Skipping lower confidence detections: ${skippedNames}`);\n }\n } else if (highConfidence.length === 1) {\n // Only one HIGH-confidence framework\n const { priority, ...result } = highConfidence[0];\n results.push(result);\n \n // Log skipped medium/low confidence detections\n if (mediumConfidence.length > 0 || lowConfidence.length > 0) {\n const skippedNames = [...mediumConfidence, ...lowConfidence].map(d => d.name).join(', ');\n console.log(` → Skipping lower confidence detections: ${skippedNames}`);\n }\n } else if (mediumConfidence.length > 0) {\n // No HIGH confidence, but have MEDIUM -> use priority system\n mediumConfidence.sort((a, b) => b.priority - a.priority);\n const { priority, ...winner } = mediumConfidence[0];\n results.push(winner);\n \n // Skipped = remaining medium + all low confidence\n const skipped = [...mediumConfidence.slice(1), ...lowConfidence];\n if (skipped.length > 0) {\n const skippedNames = skipped.map(d => d.name).join(', ');\n console.log(` → Skipping ${skippedNames} at ${relativePath} (${winner.name} takes precedence)`);\n }\n } else if (lowConfidence.length > 0) {\n // Only LOW confidence -> use priority system\n lowConfidence.sort((a, b) => b.priority - a.priority);\n const { priority, ...winner } = lowConfidence[0];\n results.push(winner);\n \n // Skipped = remaining low confidence\n const skipped = lowConfidence.slice(1);\n if (skipped.length > 0) {\n const skippedNames = skipped.map(d => d.name).join(', ');\n console.log(` → Skipping ${skippedNames} at ${relativePath} (${winner.name} takes precedence)`);\n }\n }\n } else if (detectedAtPath.length === 1) {\n const { priority, ...result } = detectedAtPath[0];\n results.push(result);\n }\n}\n\n/**\n * Recursively scan subdirectories for frameworks\n */\nasync function scanSubdirectories(\n rootDir: string,\n relativePath: string,\n results: DetectionResult[],\n visited: Set<string>,\n depth: number,\n options: DetectionOptions\n): Promise<void> {\n // Check depth limit\n if (depth >= options.maxDepth) {\n return;\n }\n \n const fullPath = path.join(rootDir, relativePath);\n \n try {\n const entries = await fs.readdir(fullPath, { withFileTypes: true });\n \n // Process only directories\n const dirs = entries.filter(e => e.isDirectory());\n \n for (const dir of dirs) {\n // Skip directories in the skip list\n if (options.skipDirs.includes(dir.name)) {\n continue;\n }\n \n // Skip hidden directories (except .git, .github which are already in skipDirs)\n if (dir.name.startsWith('.')) {\n continue;\n }\n \n const subPath = relativePath === '.' \n ? dir.name \n : path.join(relativePath, dir.name);\n \n // Detect at this subdirectory\n await detectAtPath(rootDir, subPath, results, visited);\n \n // Recurse deeper\n await scanSubdirectories(rootDir, subPath, results, visited, depth + 1, options);\n }\n } catch (error) {\n // Silently skip directories we can't read (permission errors, etc.)\n return;\n }\n}\n\n/**\n * Get a human-readable summary of detected frameworks\n */\nexport function getDetectionSummary(results: DetectionResult[]): string {\n if (results.length === 0) {\n return 'No frameworks detected';\n }\n \n const lines: string[] = [];\n \n for (const result of results) {\n const pathDisplay = result.path === '.' ? 'root' : result.path;\n lines.push(`${result.name} at ${pathDisplay} (${result.confidence} confidence)`);\n \n if (result.evidence.length > 0) {\n result.evidence.forEach(e => {\n lines.push(` - ${e}`);\n });\n }\n }\n \n return lines.join('\\n');\n}\n\n","import { FrameworkConfig } from '../config/schema.js';\n\n/**\n * Result of framework detection\n */\nexport interface DetectionResult {\n detected: boolean;\n name: string; // 'nodejs', 'laravel'\n path: string; // Relative path from root: '.', 'packages/cli', 'cognito-backend'\n confidence: 'high' | 'medium' | 'low';\n evidence: string[]; // Human-readable evidence (e.g., \"Found package.json with jest\")\n version?: string; // Framework/language version if detectable\n}\n\n/**\n * Interface for framework detectors\n */\nexport interface FrameworkDetector {\n name: string; // Unique framework identifier\n \n /**\n * Priority for conflict resolution (higher = takes precedence)\n * - 100: Specific frameworks (Laravel, Rails, Django)\n * - 50: Generic frameworks (Node.js, Python)\n * - 0: Fallback/generic\n */\n priority?: number;\n \n /**\n * Detect if this framework exists at the given path\n * @param rootDir - Absolute path to project root\n * @param relativePath - Relative path from root to check (e.g., '.' or 'packages/cli')\n * @returns Detection result with evidence\n */\n detect(rootDir: string, relativePath: string): Promise<DetectionResult>;\n \n /**\n * Generate default configuration for this framework\n * @param rootDir - Absolute path to project root\n * @param relativePath - Relative path where framework was detected\n * @returns Framework-specific configuration\n */\n generateConfig(rootDir: string, relativePath: string): Promise<FrameworkConfig>;\n}\n\n/**\n * Options for framework detection\n */\nexport interface DetectionOptions {\n maxDepth: number; // Maximum directory depth to scan\n skipDirs: string[]; // Directories to skip (node_modules, vendor, etc.)\n}\n\n/**\n * Default detection options\n */\nexport const defaultDetectionOptions: DetectionOptions = {\n maxDepth: 3,\n skipDirs: [\n 'node_modules',\n 'vendor',\n 'dist',\n 'build',\n '.next',\n '.nuxt',\n 'coverage',\n '.git',\n '.idea',\n '.vscode',\n 'tmp',\n 'temp',\n ],\n};\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { FrameworkDetector, DetectionResult } from '../types.js';\nimport { generateNodeJsConfig } from './config.js';\n\n/**\n * Node.js/TypeScript/JavaScript framework detector\n */\nexport const nodejsDetector: FrameworkDetector = {\n name: 'nodejs',\n priority: 50, // Generic, yields to specific frameworks like Laravel\n \n async detect(rootDir: string, relativePath: string): Promise<DetectionResult> {\n const fullPath = path.join(rootDir, relativePath);\n const result: DetectionResult = {\n detected: false,\n name: 'nodejs',\n path: relativePath,\n confidence: 'low',\n evidence: [],\n };\n \n // Check for package.json\n const packageJsonPath = path.join(fullPath, 'package.json');\n let packageJson: any = null;\n \n try {\n const content = await fs.readFile(packageJsonPath, 'utf-8');\n packageJson = JSON.parse(content);\n result.evidence.push('Found package.json');\n } catch {\n // No package.json, not a Node.js project\n return result;\n }\n \n // At this point, we know it's a Node.js project\n result.detected = true;\n result.confidence = 'high';\n \n // Check for TypeScript\n if (packageJson.devDependencies?.typescript || packageJson.dependencies?.typescript) {\n result.evidence.push('TypeScript detected');\n }\n \n // Check for testing frameworks\n const testFrameworks = [\n { name: 'jest', display: 'Jest' },\n { name: 'vitest', display: 'Vitest' },\n { name: 'mocha', display: 'Mocha' },\n { name: 'ava', display: 'AVA' },\n { name: '@playwright/test', display: 'Playwright' },\n ];\n \n for (const framework of testFrameworks) {\n if (\n packageJson.devDependencies?.[framework.name] || \n packageJson.dependencies?.[framework.name]\n ) {\n result.evidence.push(`${framework.display} test framework detected`);\n break; // Only mention first test framework found\n }\n }\n \n // Check for common frameworks/libraries\n const frameworks = [\n { name: 'next', display: 'Next.js' },\n { name: 'react', display: 'React' },\n { name: 'vue', display: 'Vue' },\n { name: 'express', display: 'Express' },\n { name: '@nestjs/core', display: 'NestJS' },\n ];\n \n for (const fw of frameworks) {\n if (packageJson.dependencies?.[fw.name]) {\n result.evidence.push(`${fw.display} detected`);\n break; // Only mention first framework found\n }\n }\n \n // Try to detect version from package.json engines or node version\n if (packageJson.engines?.node) {\n result.version = packageJson.engines.node;\n }\n \n return result;\n },\n \n async generateConfig(rootDir: string, relativePath: string) {\n return generateNodeJsConfig(rootDir, relativePath);\n },\n};\n\n","import { FrameworkConfig } from '../../config/schema.js';\n\n/**\n * Generate Node.js framework configuration\n */\nexport async function generateNodeJsConfig(\n _rootDir: string,\n _relativePath: string\n): Promise<FrameworkConfig> {\n return {\n include: [\n // Broader patterns to catch all common project structures\n // (frontend/, src/, lib/, app/, components/, etc.)\n '**/*.ts',\n '**/*.tsx',\n '**/*.js',\n '**/*.jsx',\n '**/*.vue',\n '**/*.mjs',\n '**/*.cjs',\n '**/*.md',\n '**/*.mdx',\n ],\n exclude: [\n 'node_modules/**',\n 'dist/**',\n 'build/**',\n 'coverage/**',\n '.next/**',\n '.nuxt/**',\n '.vite/**',\n '.lien/**',\n 'out/**',\n '*.min.js',\n '*.min.css',\n '*.bundle.js',\n \n // Test artifacts (source files are indexed, but not output)\n 'playwright-report/**',\n 'test-results/**',\n \n // Build/generated artifacts\n '__generated__/**',\n \n // Common build/cache directories\n '.cache/**',\n '.turbo/**',\n '.vercel/**',\n '.netlify/**',\n ],\n };\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { FrameworkDetector, DetectionResult } from '../types.js';\nimport { generateLaravelConfig } from './config.js';\n\n/**\n * Laravel/PHP framework detector\n */\nexport const laravelDetector: FrameworkDetector = {\n name: 'laravel',\n priority: 100, // Laravel takes precedence over Node.js\n \n async detect(rootDir: string, relativePath: string): Promise<DetectionResult> {\n const fullPath = path.join(rootDir, relativePath);\n const result: DetectionResult = {\n detected: false,\n name: 'laravel',\n path: relativePath,\n confidence: 'low',\n evidence: [],\n };\n \n // Check for composer.json with Laravel\n const composerJsonPath = path.join(fullPath, 'composer.json');\n let composerJson: any = null;\n \n try {\n const content = await fs.readFile(composerJsonPath, 'utf-8');\n composerJson = JSON.parse(content);\n result.evidence.push('Found composer.json');\n } catch {\n // No composer.json, not a Laravel project\n return result;\n }\n \n // Check if Laravel framework is in dependencies\n const hasLaravel = \n composerJson.require?.['laravel/framework'] ||\n composerJson['require-dev']?.['laravel/framework'];\n \n if (!hasLaravel) {\n // Has composer.json but not Laravel\n return result;\n }\n \n result.evidence.push('Laravel framework detected in composer.json');\n \n // Check for artisan file (strong indicator of Laravel)\n const artisanPath = path.join(fullPath, 'artisan');\n try {\n await fs.access(artisanPath);\n result.evidence.push('Found artisan file');\n result.confidence = 'high';\n } catch {\n result.confidence = 'medium';\n }\n \n // Check for typical Laravel directory structure\n const laravelDirs = ['app', 'routes', 'config', 'database'];\n let foundDirs = 0;\n \n for (const dir of laravelDirs) {\n try {\n const dirPath = path.join(fullPath, dir);\n const stats = await fs.stat(dirPath);\n if (stats.isDirectory()) {\n foundDirs++;\n }\n } catch {\n // Directory doesn't exist\n }\n }\n \n if (foundDirs >= 2) {\n result.evidence.push(`Laravel directory structure detected (${foundDirs}/${laravelDirs.length} dirs)`);\n result.confidence = 'high';\n }\n \n // Check for test directories\n const testDirsToCheck = [\n path.join(fullPath, 'tests', 'Feature'),\n path.join(fullPath, 'tests', 'Unit'),\n ];\n \n for (const testDir of testDirsToCheck) {\n try {\n const stats = await fs.stat(testDir);\n if (stats.isDirectory()) {\n const dirName = path.basename(path.dirname(testDir)) + '/' + path.basename(testDir);\n result.evidence.push(`Found ${dirName} test directory`);\n }\n } catch {\n // Test directory doesn't exist\n }\n }\n \n // Extract Laravel version if available\n if (composerJson.require?.['laravel/framework']) {\n result.version = composerJson.require['laravel/framework'];\n }\n \n result.detected = true;\n return result;\n },\n \n async generateConfig(rootDir: string, relativePath: string) {\n return generateLaravelConfig(rootDir, relativePath);\n },\n};\n\n","import { FrameworkConfig } from '../../config/schema.js';\n\n/**\n * Generate Laravel framework configuration\n */\nexport async function generateLaravelConfig(\n _rootDir: string,\n _relativePath: string\n): Promise<FrameworkConfig> {\n return {\n include: [\n // PHP backend\n 'app/**/*.php',\n 'routes/**/*.php',\n 'config/**/*.php',\n 'database/**/*.php',\n 'resources/**/*.php',\n 'tests/**/*.php',\n '*.php',\n // Frontend assets (Vue/React/Inertia) - Broadened for flexibility\n '**/*.js',\n '**/*.ts',\n '**/*.jsx',\n '**/*.tsx',\n '**/*.vue',\n // Blade templates\n 'resources/views/**/*.blade.php',\n // Documentation\n '**/*.md',\n '**/*.mdx',\n 'docs/**/*.md',\n 'README.md',\n 'CHANGELOG.md',\n ],\n exclude: [\n 'vendor/**',\n 'storage/**',\n 'bootstrap/cache/**',\n 'public/**',\n 'node_modules/**',\n 'dist/**',\n 'build/**',\n \n // Test artifacts (source files are indexed, but not output)\n 'playwright-report/**',\n 'test-results/**',\n 'coverage/**',\n \n // Build/generated artifacts\n '__generated__/**',\n \n // Frontend build outputs\n '.vite/**',\n '.nuxt/**',\n '.next/**',\n ],\n };\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { FrameworkDetector, DetectionResult } from '../types.js';\nimport { generateShopifyConfig } from './config.js';\n\n/**\n * Shopify Liquid theme framework detector\n */\nexport const shopifyDetector: FrameworkDetector = {\n name: 'shopify',\n priority: 100, // High priority (same as Laravel)\n \n async detect(rootDir: string, relativePath: string): Promise<DetectionResult> {\n const fullPath = path.join(rootDir, relativePath);\n const result: DetectionResult = {\n detected: false,\n name: 'shopify',\n path: relativePath,\n confidence: 'low',\n evidence: [],\n };\n \n // 1. Check for config/settings_schema.json (STRONGEST signal)\n const settingsSchemaPath = path.join(fullPath, 'config', 'settings_schema.json');\n let hasSettingsSchema = false;\n \n try {\n await fs.access(settingsSchemaPath);\n hasSettingsSchema = true;\n result.evidence.push('Found config/settings_schema.json');\n } catch {\n // Not present, continue checking other markers\n }\n \n // 2. Check for layout/theme.liquid\n const themeLayoutPath = path.join(fullPath, 'layout', 'theme.liquid');\n let hasThemeLayout = false;\n \n try {\n await fs.access(themeLayoutPath);\n hasThemeLayout = true;\n result.evidence.push('Found layout/theme.liquid');\n } catch {\n // Not present\n }\n \n // 3. Check for typical Shopify directories\n const shopifyDirs = ['sections', 'snippets', 'templates', 'locales'];\n let foundDirs = 0;\n \n for (const dir of shopifyDirs) {\n try {\n const dirPath = path.join(fullPath, dir);\n const stats = await fs.stat(dirPath);\n if (stats.isDirectory()) {\n foundDirs++;\n }\n } catch {\n // Directory doesn't exist\n }\n }\n \n if (foundDirs >= 2) {\n result.evidence.push(`Shopify directory structure detected (${foundDirs}/${shopifyDirs.length} dirs)`);\n }\n \n // 4. Check for shopify.theme.toml (Shopify CLI)\n try {\n const tomlPath = path.join(fullPath, 'shopify.theme.toml');\n await fs.access(tomlPath);\n result.evidence.push('Found shopify.theme.toml');\n } catch {\n // Optional file\n }\n \n // 5. Check for .shopifyignore\n try {\n const ignorePath = path.join(fullPath, '.shopifyignore');\n await fs.access(ignorePath);\n result.evidence.push('Found .shopifyignore');\n } catch {\n // Optional file\n }\n \n // Determine detection confidence with early returns\n // High: Has settings_schema.json + 2+ directories\n if (hasSettingsSchema && foundDirs >= 2) {\n result.detected = true;\n result.confidence = 'high';\n return result;\n }\n \n // Medium: Has settings_schema alone, OR has theme.liquid + 1+ directory\n if (hasSettingsSchema || (hasThemeLayout && foundDirs >= 1)) {\n result.detected = true;\n result.confidence = 'medium';\n return result;\n }\n \n // Medium: Has 3+ typical directories but no strong markers\n if (foundDirs >= 3) {\n result.detected = true;\n result.confidence = 'medium';\n return result;\n }\n \n // Not detected\n return result;\n },\n \n async generateConfig(rootDir: string, relativePath: string) {\n return generateShopifyConfig(rootDir, relativePath);\n },\n};\n\n","import { FrameworkConfig } from '../../config/schema.js';\n\n/**\n * Generate Shopify theme framework configuration\n */\nexport async function generateShopifyConfig(\n _rootDir: string,\n _relativePath: string\n): Promise<FrameworkConfig> {\n return {\n include: [\n // Core Liquid templates\n 'layout/**/*.liquid',\n 'sections/**/*.liquid',\n 'snippets/**/*.liquid',\n 'templates/**/*.liquid', // Matches any nesting level (e.g., templates/customers/account.liquid)\n \n // Theme editor blocks (Online Store 2.0)\n 'blocks/**/*.liquid',\n \n // Assets (CSS, JS with optional Liquid templating)\n 'assets/**/*.js',\n 'assets/**/*.js.liquid',\n 'assets/**/*.css',\n 'assets/**/*.css.liquid',\n 'assets/**/*.scss',\n 'assets/**/*.scss.liquid',\n \n // Configuration files\n 'config/*.json',\n \n // Locales (i18n)\n 'locales/*.json',\n \n // Documentation\n '*.md',\n 'docs/**/*.md',\n \n // Shopify-specific config files\n 'shopify.theme.toml',\n '.shopifyignore',\n ],\n exclude: [\n 'node_modules/**',\n 'dist/**',\n 'build/**',\n '.git/**',\n \n // Playwright/testing artifacts\n 'playwright-report/**',\n 'test-results/**',\n \n // Build/generated artifacts\n '__generated__/**',\n \n // Common frontend build outputs\n '.vite/**',\n '.nuxt/**',\n '.next/**',\n ],\n };\n}\n\n","import { FrameworkDetector } from './types.js';\nimport { nodejsDetector } from './nodejs/detector.js';\nimport { laravelDetector } from './laravel/detector.js';\nimport { shopifyDetector } from './shopify/detector.js';\n\n/**\n * Registry of all available framework detectors\n * Frameworks will be added as they are implemented\n */\nexport const frameworkDetectors: FrameworkDetector[] = [\n nodejsDetector,\n laravelDetector,\n shopifyDetector,\n];\n\n/**\n * Register a framework detector\n */\nexport function registerFramework(detector: FrameworkDetector): void {\n // Check if already registered\n const existing = frameworkDetectors.find(d => d.name === detector.name);\n if (existing) {\n console.warn(`Framework detector '${detector.name}' is already registered, skipping`);\n return;\n }\n \n frameworkDetectors.push(detector);\n}\n\n/**\n * Get a framework detector by name\n */\nexport function getFrameworkDetector(name: string): FrameworkDetector | undefined {\n return frameworkDetectors.find(d => d.name === name);\n}\n\n","import chalk from 'chalk';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport os from 'os';\nimport crypto from 'crypto';\nimport { configService } from '../config/service.js';\nimport { isGitRepo, getCurrentBranch, getCurrentCommit } from '../git/utils.js';\nimport { readVersionFile } from '../vectordb/version.js';\nimport { showCompactBanner } from '../utils/banner.js';\nimport { isModernConfig } from '../config/schema.js';\n\nexport async function statusCommand() {\n const rootDir = process.cwd();\n const projectName = path.basename(rootDir);\n \n // Use same hashing logic as VectorDB to show correct path\n const pathHash = crypto\n .createHash('md5')\n .update(rootDir)\n .digest('hex')\n .substring(0, 8);\n \n const indexPath = path.join(os.homedir(), '.lien', 'indices', `${projectName}-${pathHash}`);\n \n showCompactBanner();\n console.log(chalk.bold('Status\\n'));\n \n // Check if config exists\n const hasConfig = await configService.exists(rootDir);\n console.log(chalk.dim('Configuration:'), hasConfig ? chalk.green('✓ Found') : chalk.red('✗ Not initialized'));\n \n if (!hasConfig) {\n console.log(chalk.yellow('\\nRun'), chalk.bold('lien init'), chalk.yellow('to initialize'));\n return;\n }\n \n // Check if index exists\n try {\n const stats = await fs.stat(indexPath);\n console.log(chalk.dim('Index location:'), indexPath);\n console.log(chalk.dim('Index status:'), chalk.green('✓ Exists'));\n \n // Try to get directory size\n try {\n const files = await fs.readdir(indexPath, { recursive: true });\n console.log(chalk.dim('Index files:'), files.length);\n } catch (e) {\n // Ignore\n }\n \n console.log(chalk.dim('Last modified:'), stats.mtime.toLocaleString());\n \n // Show version file info\n try {\n const version = await readVersionFile(indexPath);\n if (version > 0) {\n const versionDate = new Date(version);\n console.log(chalk.dim('Last reindex:'), versionDate.toLocaleString());\n }\n } catch {\n // Ignore\n }\n } catch (error) {\n console.log(chalk.dim('Index status:'), chalk.yellow('✗ Not indexed'));\n console.log(chalk.yellow('\\nRun'), chalk.bold('lien index'), chalk.yellow('to index your codebase'));\n }\n \n // Load and show configuration settings\n try {\n const config = await configService.load(rootDir);\n \n console.log(chalk.bold('\\nFeatures:'));\n \n // Git detection status\n const isRepo = await isGitRepo(rootDir);\n if (config.gitDetection.enabled && isRepo) {\n console.log(chalk.dim('Git detection:'), chalk.green('✓ Enabled'));\n console.log(chalk.dim(' Poll interval:'), `${config.gitDetection.pollIntervalMs / 1000}s`);\n \n // Show current git state\n try {\n const branch = await getCurrentBranch(rootDir);\n const commit = await getCurrentCommit(rootDir);\n console.log(chalk.dim(' Current branch:'), branch);\n console.log(chalk.dim(' Current commit:'), commit.substring(0, 8));\n \n // Check if git state file exists\n const gitStateFile = path.join(indexPath, '.git-state.json');\n try {\n const gitStateContent = await fs.readFile(gitStateFile, 'utf-8');\n const gitState = JSON.parse(gitStateContent);\n if (gitState.branch !== branch || gitState.commit !== commit) {\n console.log(chalk.yellow(' ⚠️ Git state changed - will reindex on next serve'));\n }\n } catch {\n // Git state file doesn't exist yet\n }\n } catch {\n // Ignore git command errors\n }\n } else if (config.gitDetection.enabled && !isRepo) {\n console.log(chalk.dim('Git detection:'), chalk.yellow('Enabled (not a git repo)'));\n } else {\n console.log(chalk.dim('Git detection:'), chalk.gray('Disabled'));\n }\n \n // File watching status\n if (config.fileWatching.enabled) {\n console.log(chalk.dim('File watching:'), chalk.green('✓ Enabled'));\n console.log(chalk.dim(' Debounce:'), `${config.fileWatching.debounceMs}ms`);\n } else {\n console.log(chalk.dim('File watching:'), chalk.gray('Disabled'));\n console.log(chalk.dim(' Enable with:'), chalk.bold('lien serve --watch'));\n }\n \n // Indexing settings\n console.log(chalk.bold('\\nIndexing Settings:'));\n if (isModernConfig(config)) {\n console.log(chalk.dim('Concurrency:'), config.core.concurrency);\n console.log(chalk.dim('Batch size:'), config.core.embeddingBatchSize);\n console.log(chalk.dim('Chunk size:'), config.core.chunkSize);\n console.log(chalk.dim('Chunk overlap:'), config.core.chunkOverlap);\n }\n \n } catch (error) {\n console.log(chalk.yellow('\\nWarning: Could not load configuration'));\n }\n}\n\n","import chalk from 'chalk';\nimport { indexCodebase } from '../indexer/index.js';\nimport { showCompactBanner } from '../utils/banner.js';\n\nexport async function indexCommand(options: { watch?: boolean; verbose?: boolean; force?: boolean }) {\n showCompactBanner();\n \n try {\n // If force flag is set, clear the index and manifest first (clean slate)\n if (options.force) {\n const { VectorDB } = await import('../vectordb/lancedb.js');\n const { ManifestManager } = await import('../indexer/manifest.js');\n \n console.log(chalk.yellow('Clearing existing index and manifest...'));\n const vectorDB = new VectorDB(process.cwd());\n await vectorDB.initialize();\n await vectorDB.clear();\n \n // Also clear manifest\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.clear();\n \n console.log(chalk.green('✓ Index and manifest cleared\\n'));\n }\n \n await indexCodebase({\n rootDir: process.cwd(),\n verbose: options.verbose || false,\n force: options.force || false,\n });\n \n if (options.watch) {\n console.log(chalk.yellow('\\n⚠️ Watch mode not yet implemented'));\n // TODO: Implement file watching with chokidar\n }\n } catch (error) {\n console.error(chalk.red('Error during indexing:'), error);\n process.exit(1);\n }\n}\n\n","import chalk from 'chalk';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { startMCPServer } from '../mcp/server.js';\nimport { showBanner } from '../utils/banner.js';\n\nexport async function serveCommand(options: { port?: string; watch?: boolean; noWatch?: boolean; root?: string }) {\n const rootDir = options.root ? path.resolve(options.root) : process.cwd();\n \n try {\n // Validate root directory if --root was specified\n if (options.root) {\n try {\n const stats = await fs.stat(rootDir);\n if (!stats.isDirectory()) {\n console.error(chalk.red(`Error: --root path is not a directory: ${rootDir}`));\n process.exit(1);\n }\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n console.error(chalk.red(`Error: --root directory does not exist: ${rootDir}`));\n } else if ((error as NodeJS.ErrnoException).code === 'EACCES') {\n console.error(chalk.red(`Error: --root directory is not accessible: ${rootDir}`));\n } else {\n console.error(chalk.red(`Error: Failed to access --root directory: ${rootDir}`));\n console.error(chalk.dim((error as Error).message));\n }\n process.exit(1);\n }\n }\n \n // Log to stderr since stdout is for MCP protocol\n showBanner();\n console.error(chalk.bold('Starting MCP server...\\n'));\n \n if (options.root) {\n console.error(chalk.dim(`Serving from: ${rootDir}\\n`));\n }\n \n // Handle deprecated --watch flag\n if (options.watch) {\n console.error(chalk.yellow('⚠️ --watch flag is deprecated (file watching is now default)'));\n console.error(chalk.dim(' Use --no-watch to disable file watching\\n'));\n }\n \n // Determine file watching state\n // Priority: --no-watch > --watch (deprecated) > config default\n const watch = options.noWatch ? false : options.watch ? true : undefined;\n \n await startMCPServer({\n rootDir,\n verbose: true,\n watch,\n });\n } catch (error) {\n console.error(chalk.red('Failed to start MCP server:'), error);\n process.exit(1);\n }\n}\n\n","import { Server } from '@modelcontextprotocol/sdk/server/index.js';\nimport { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';\nimport {\n CallToolRequestSchema,\n ListToolsRequestSchema,\n} from '@modelcontextprotocol/sdk/types.js';\nimport { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\nimport { tools } from './tools.js';\nimport { VectorDB } from '../vectordb/lancedb.js';\nimport { LocalEmbeddings } from '../embeddings/local.js';\nimport { GitStateTracker } from '../git/tracker.js';\nimport { indexMultipleFiles, indexSingleFile } from '../indexer/incremental.js';\nimport { configService } from '../config/service.js';\nimport { ManifestManager } from '../indexer/manifest.js';\nimport { isGitAvailable, isGitRepo } from '../git/utils.js';\nimport { FileWatcher } from '../watcher/index.js';\nimport { VERSION_CHECK_INTERVAL_MS } from '../constants.js';\nimport { wrapToolHandler } from './utils/tool-wrapper.js';\nimport {\n SemanticSearchSchema,\n FindSimilarSchema,\n GetFileContextSchema,\n ListFunctionsSchema,\n} from './schemas/index.js';\nimport { LienError, LienErrorCode } from '../errors/index.js';\n\n// Get version from package.json dynamically\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst require = createRequire(import.meta.url);\n\nlet packageJson: { name: string; version: string };\ntry {\n packageJson = require(join(__dirname, '../package.json'));\n} catch {\n packageJson = require(join(__dirname, '../../package.json'));\n}\n\nexport interface MCPServerOptions {\n rootDir: string;\n verbose?: boolean;\n watch?: boolean;\n}\n\nexport async function startMCPServer(options: MCPServerOptions): Promise<void> {\n const { rootDir, verbose, watch } = options;\n \n // Log to stderr (stdout is reserved for MCP protocol)\n const log = (message: string) => {\n if (verbose) {\n console.error(`[Lien MCP] ${message}`);\n }\n };\n \n log('Initializing MCP server...');\n \n // Initialize embeddings and vector DB\n const embeddings = new LocalEmbeddings();\n const vectorDB = new VectorDB(rootDir);\n \n try {\n log('Loading embedding model...');\n await embeddings.initialize();\n \n log('Loading vector database...');\n await vectorDB.initialize();\n \n log('Embeddings and vector DB ready');\n } catch (error) {\n console.error(`Failed to initialize: ${error}`);\n process.exit(1);\n }\n \n // Create MCP server\n const server = new Server(\n {\n name: 'lien',\n version: packageJson.version,\n },\n {\n capabilities: {\n tools: {},\n },\n }\n );\n \n // Register tool list handler\n server.setRequestHandler(ListToolsRequestSchema, async () => ({\n tools,\n }));\n \n // Helper function to check version and reconnect if needed\n const checkAndReconnect = async () => {\n try {\n const versionChanged = await vectorDB.checkVersion();\n if (versionChanged) {\n log('Index version changed, reconnecting to database...');\n await vectorDB.reconnect();\n log('Reconnected to updated index');\n }\n } catch (error) {\n // Log but don't throw - fall back to existing connection\n log(`Version check failed: ${error}`);\n }\n };\n \n // Helper to get current index metadata for responses\n const getIndexMetadata = () => ({\n indexVersion: vectorDB.getCurrentVersion(),\n indexDate: vectorDB.getVersionDate(),\n });\n \n // Start background polling for version changes (every 2 seconds)\n // This ensures we reconnect as soon as possible after reindex, even if no tool calls are made\n const versionCheckInterval = setInterval(async () => {\n await checkAndReconnect();\n }, VERSION_CHECK_INTERVAL_MS);\n \n // Register tool call handler\n server.setRequestHandler(CallToolRequestSchema, async (request) => {\n const { name, arguments: args } = request.params;\n \n log(`Handling tool call: ${name}`);\n \n try {\n switch (name) {\n case 'semantic_search':\n return await wrapToolHandler(\n SemanticSearchSchema,\n async (validatedArgs) => {\n log(`Searching for: \"${validatedArgs.query}\"`);\n \n // Check if index has been updated and reconnect if needed\n await checkAndReconnect();\n \n const queryEmbedding = await embeddings.embed(validatedArgs.query);\n const results = await vectorDB.search(queryEmbedding, validatedArgs.limit, validatedArgs.query);\n \n log(`Found ${results.length} results`);\n \n return {\n indexInfo: getIndexMetadata(),\n results,\n };\n }\n )(args);\n \n case 'find_similar':\n return await wrapToolHandler(\n FindSimilarSchema,\n async (validatedArgs) => {\n log(`Finding similar code...`);\n \n // Check if index has been updated and reconnect if needed\n await checkAndReconnect();\n \n const codeEmbedding = await embeddings.embed(validatedArgs.code);\n // Pass code as query for relevance boosting\n const results = await vectorDB.search(codeEmbedding, validatedArgs.limit, validatedArgs.code);\n \n log(`Found ${results.length} similar chunks`);\n \n return {\n indexInfo: getIndexMetadata(),\n results,\n };\n }\n )(args);\n \n case 'get_file_context':\n return await wrapToolHandler(\n GetFileContextSchema,\n async (validatedArgs) => {\n log(`Getting context for: ${validatedArgs.filepath}`);\n \n // Check if index has been updated and reconnect if needed\n await checkAndReconnect();\n \n // Search for chunks from this file by embedding the filepath\n // This is a simple approach; could be improved with metadata filtering\n const fileEmbedding = await embeddings.embed(validatedArgs.filepath);\n const allResults = await vectorDB.search(fileEmbedding, 50, validatedArgs.filepath);\n \n // Filter results to only include chunks from the target file\n const fileChunks = allResults.filter(r => \n r.metadata.file.includes(validatedArgs.filepath) || validatedArgs.filepath.includes(r.metadata.file)\n );\n \n let results = fileChunks;\n \n if (validatedArgs.includeRelated && fileChunks.length > 0) {\n // Get related chunks by searching with the first chunk's content\n const relatedEmbedding = await embeddings.embed(fileChunks[0].content);\n const related = await vectorDB.search(relatedEmbedding, 5, fileChunks[0].content);\n \n // Add related chunks that aren't from the same file\n const relatedOtherFiles = related.filter(r => \n !r.metadata.file.includes(validatedArgs.filepath) && !validatedArgs.filepath.includes(r.metadata.file)\n );\n \n results = [...fileChunks, ...relatedOtherFiles];\n }\n \n log(`Found ${results.length} chunks`);\n \n return {\n indexInfo: getIndexMetadata(),\n file: validatedArgs.filepath,\n chunks: results,\n };\n }\n )(args);\n \n case 'list_functions':\n return await wrapToolHandler(\n ListFunctionsSchema,\n async (validatedArgs) => {\n log('Listing functions with symbol metadata...');\n \n // Check if index has been updated and reconnect if needed\n await checkAndReconnect();\n \n let results;\n let usedMethod = 'symbols';\n \n try {\n // Try using symbol-based query first (v0.5.0+)\n results = await vectorDB.querySymbols({\n language: validatedArgs.language,\n pattern: validatedArgs.pattern,\n limit: 50,\n });\n \n // If no results and pattern was provided, it might be an old index\n // Fall back to content scanning\n if (results.length === 0 && (validatedArgs.language || validatedArgs.pattern)) {\n log('No symbol results, falling back to content scan...');\n results = await vectorDB.scanWithFilter({\n language: validatedArgs.language,\n pattern: validatedArgs.pattern,\n limit: 50,\n });\n usedMethod = 'content';\n }\n } catch (error) {\n // If querySymbols fails (e.g., old index without symbol fields), fall back\n log(`Symbol query failed, falling back to content scan: ${error}`);\n results = await vectorDB.scanWithFilter({\n language: validatedArgs.language,\n pattern: validatedArgs.pattern,\n limit: 50,\n });\n usedMethod = 'content';\n }\n \n log(`Found ${results.length} matches using ${usedMethod} method`);\n \n return {\n indexInfo: getIndexMetadata(),\n method: usedMethod,\n results,\n note: usedMethod === 'content' \n ? 'Using content search. Run \"lien reindex\" to enable faster symbol-based queries.'\n : undefined,\n };\n }\n )(args);\n \n default:\n throw new LienError(\n `Unknown tool: ${name}`,\n LienErrorCode.INVALID_INPUT,\n { requestedTool: name, availableTools: tools.map(t => t.name) },\n 'medium',\n false,\n false\n );\n }\n } catch (error) {\n // Handle errors at the switch level (e.g., unknown tool)\n if (error instanceof LienError) {\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify(error.toJSON(), null, 2),\n }],\n };\n }\n \n // Unexpected error\n console.error(`Unexpected error handling tool call ${name}:`, error);\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify({\n error: error instanceof Error ? error.message : 'Unknown error',\n code: LienErrorCode.INTERNAL_ERROR,\n tool: name,\n }, null, 2),\n }],\n };\n }\n });\n \n // Load configuration for auto-indexing, git detection, and file watching\n const config = await configService.load(rootDir);\n \n // Check if this is the first run (no data in index) and auto-index if needed\n const hasIndex = await vectorDB.hasData();\n \n if (!hasIndex && config.mcp.autoIndexOnFirstRun) {\n log('📦 No index found - running initial indexing...');\n log('⏱️ This may take 5-20 minutes depending on project size');\n \n try {\n // Import indexCodebase function\n const { indexCodebase } = await import('../indexer/index.js');\n await indexCodebase({ rootDir, verbose: true });\n log('✅ Initial indexing complete!');\n } catch (error) {\n log(`⚠️ Initial indexing failed: ${error}`);\n log('You can manually run: lien index');\n // Don't exit - server can still start, just won't have data\n }\n } else if (!hasIndex) {\n log('⚠️ No index found. Auto-indexing is disabled in config.');\n log('Run \"lien index\" to index your codebase.');\n }\n \n // Initialize git detection if enabled\n let gitTracker: GitStateTracker | null = null;\n let gitPollInterval: NodeJS.Timeout | null = null;\n let fileWatcher: FileWatcher | null = null;\n \n if (config.gitDetection.enabled) {\n const gitAvailable = await isGitAvailable();\n const isRepo = await isGitRepo(rootDir);\n \n if (gitAvailable && isRepo) {\n log('✓ Detected git repository');\n gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);\n \n // Check for git changes on startup\n try {\n log('Checking for git changes...');\n const changedFiles = await gitTracker.initialize();\n \n if (changedFiles && changedFiles.length > 0) {\n log(`🌿 Git changes detected: ${changedFiles.length} files changed`);\n log('Reindexing changed files...');\n \n const count = await indexMultipleFiles(\n changedFiles,\n vectorDB,\n embeddings,\n config,\n { verbose }\n );\n \n log(`✓ Reindexed ${count} files`);\n } else {\n log('✓ Index is up to date with git state');\n }\n } catch (error) {\n log(`Warning: Failed to check git state on startup: ${error}`);\n }\n \n // Start background polling for git changes\n log(`✓ Git detection enabled (checking every ${config.gitDetection.pollIntervalMs / 1000}s)`);\n \n gitPollInterval = setInterval(async () => {\n try {\n const changedFiles = await gitTracker!.detectChanges();\n \n if (changedFiles && changedFiles.length > 0) {\n log(`🌿 Git change detected: ${changedFiles.length} files changed`);\n log('Reindexing in background...');\n \n // Don't await - run in background\n indexMultipleFiles(\n changedFiles,\n vectorDB,\n embeddings,\n config,\n { verbose }\n ).then(count => {\n log(`✓ Background reindex complete: ${count} files`);\n }).catch(error => {\n log(`Warning: Background reindex failed: ${error}`);\n });\n }\n } catch (error) {\n log(`Warning: Git detection check failed: ${error}`);\n }\n }, config.gitDetection.pollIntervalMs);\n } else {\n if (!gitAvailable) {\n log('Git not available - git detection disabled');\n } else if (!isRepo) {\n log('Not a git repository - git detection disabled');\n }\n }\n } else {\n log('Git detection disabled by configuration');\n }\n \n // Initialize file watching if enabled\n // Priority: CLI flag if explicitly set (true/false), otherwise use config default\n const fileWatchingEnabled = watch !== undefined ? watch : config.fileWatching.enabled;\n \n if (fileWatchingEnabled) {\n log('👀 Starting file watcher...');\n fileWatcher = new FileWatcher(rootDir, config);\n \n try {\n await fileWatcher.start(async (event) => {\n const { type, filepath } = event;\n \n if (type === 'unlink') {\n // File deleted\n log(`🗑️ File deleted: ${filepath}`);\n try {\n await vectorDB.deleteByFile(filepath);\n \n // Update manifest\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.removeFile(filepath);\n \n log(`✓ Removed ${filepath} from index`);\n } catch (error) {\n log(`Warning: Failed to remove ${filepath}: ${error}`);\n }\n } else {\n // File added or changed\n const action = type === 'add' ? 'added' : 'changed';\n log(`📝 File ${action}: ${filepath}`);\n \n // Reindex in background\n indexSingleFile(filepath, vectorDB, embeddings, config, { verbose })\n .catch((error) => {\n log(`Warning: Failed to reindex ${filepath}: ${error}`);\n });\n }\n });\n \n const watchedCount = fileWatcher.getWatchedFiles().length;\n log(`✓ File watching enabled (watching ${watchedCount} files)`);\n } catch (error) {\n log(`Warning: Failed to start file watcher: ${error}`);\n fileWatcher = null;\n }\n }\n \n // Handle shutdown gracefully\n const cleanup = async () => {\n log('Shutting down MCP server...');\n clearInterval(versionCheckInterval);\n if (gitPollInterval) {\n clearInterval(gitPollInterval);\n }\n if (fileWatcher) {\n await fileWatcher.stop();\n }\n process.exit(0);\n };\n \n // Listen for termination signals\n process.on('SIGINT', cleanup);\n process.on('SIGTERM', cleanup);\n \n // Connect to stdio transport\n const transport = new StdioServerTransport();\n \n // Use SDK's transport callbacks for parent process detection\n // This avoids conflicts with the transport's stdin management\n transport.onclose = () => {\n log('Transport closed, parent process likely terminated');\n cleanup().catch(() => process.exit(0));\n };\n \n transport.onerror = (error) => {\n log(`Transport error: ${error}`);\n // Transport will close after error, onclose will handle cleanup\n };\n \n await server.connect(transport);\n \n log('MCP server started and listening on stdio');\n}\n\n","import { z } from 'zod';\nimport { zodToJsonSchema } from 'zod-to-json-schema';\n\n/**\n * Convert a Zod schema to an MCP tool schema.\n * \n * This utility generates JSON Schema from Zod schemas for use in MCP tool definitions.\n * The resulting schema includes all validation rules and descriptions from the Zod schema.\n * \n * @param zodSchema - The Zod schema to convert\n * @param name - The tool name\n * @param description - The tool description\n * @returns MCP-compatible tool schema object\n * \n * @example\n * ```typescript\n * const SearchSchema = z.object({\n * query: z.string().min(3).describe(\"Search query\"),\n * limit: z.number().default(5)\n * });\n * \n * const tool = toMCPToolSchema(\n * SearchSchema,\n * 'semantic_search',\n * 'Search the codebase semantically'\n * );\n * ```\n */\nexport function toMCPToolSchema(\n zodSchema: z.ZodSchema,\n name: string,\n description: string\n) {\n return {\n name,\n description,\n inputSchema: zodToJsonSchema(zodSchema, {\n target: 'jsonSchema7',\n $refStrategy: 'none',\n }),\n };\n}\n\n","import { z } from 'zod';\n\n/**\n * Schema for semantic search tool input.\n * \n * Validates query strings and result limits for semantic code search.\n * Includes rich descriptions to guide AI assistants on proper usage.\n */\nexport const SemanticSearchSchema = z.object({\n query: z.string()\n .min(3, \"Query must be at least 3 characters\")\n .max(500, \"Query too long (max 500 characters)\")\n .describe(\n \"Natural language description of what you're looking for.\\n\\n\" +\n \"Use full sentences describing functionality, not exact names.\\n\\n\" +\n \"Good examples:\\n\" +\n \" - 'handles user authentication'\\n\" +\n \" - 'validates email format'\\n\" +\n \" - 'processes payment transactions'\\n\\n\" +\n \"Bad examples:\\n\" +\n \" - 'auth' (too vague)\\n\" +\n \" - 'validateEmail' (use grep for exact names)\"\n ),\n \n limit: z.number()\n .int()\n .min(1, \"Limit must be at least 1\")\n .max(50, \"Limit cannot exceed 50\")\n .default(5)\n .describe(\n \"Number of results to return.\\n\\n\" +\n \"Default: 5\\n\" +\n \"Increase to 10-15 for broad exploration.\"\n ),\n});\n\n/**\n * Inferred TypeScript type for semantic search input\n */\nexport type SemanticSearchInput = z.infer<typeof SemanticSearchSchema>;\n\n","import { z } from 'zod';\n\n/**\n * Schema for find_similar tool input.\n * \n * Validates code snippets and result limits for similarity search.\n */\nexport const FindSimilarSchema = z.object({\n code: z.string()\n .min(10, \"Code snippet must be at least 10 characters\")\n .describe(\n \"Code snippet to find similar implementations for.\\n\\n\" +\n \"Provide a representative code sample that demonstrates the pattern \" +\n \"you want to find similar examples of in the codebase.\"\n ),\n \n limit: z.number()\n .int()\n .min(1, \"Limit must be at least 1\")\n .max(20, \"Limit cannot exceed 20\")\n .default(5)\n .describe(\n \"Number of similar code blocks to return.\\n\\n\" +\n \"Default: 5\"\n ),\n});\n\n/**\n * Inferred TypeScript type for find similar input\n */\nexport type FindSimilarInput = z.infer<typeof FindSimilarSchema>;\n\n","import { z } from 'zod';\n\n/**\n * Schema for get_file_context tool input.\n * \n * Validates file paths and context options for retrieving file-specific code chunks.\n */\nexport const GetFileContextSchema = z.object({\n filepath: z.string()\n .min(1, \"Filepath cannot be empty\")\n .describe(\n \"Relative path to file from workspace root.\\n\\n\" +\n \"Example: 'src/components/Button.tsx'\"\n ),\n \n includeRelated: z.boolean()\n .default(true)\n .describe(\n \"Include semantically related chunks from nearby code.\\n\\n\" +\n \"Default: true\\n\\n\" +\n \"When enabled, also returns related code from other files that are \" +\n \"semantically similar to the target file's contents.\"\n ),\n});\n\n/**\n * Inferred TypeScript type for file context input\n */\nexport type GetFileContextInput = z.infer<typeof GetFileContextSchema>;\n\n","import { z } from 'zod';\n\n/**\n * Schema for list_functions tool input.\n * \n * Validates pattern and language filters for symbol listing.\n */\nexport const ListFunctionsSchema = z.object({\n pattern: z.string()\n .optional()\n .describe(\n \"Regex pattern to match symbol names.\\n\\n\" +\n \"Examples:\\n\" +\n \" - '.*Controller.*' to find all Controllers\\n\" +\n \" - 'handle.*' to find handlers\\n\" +\n \" - '.*Service$' to find Services\\n\\n\" +\n \"If omitted, returns all symbols.\"\n ),\n \n language: z.string()\n .optional()\n .describe(\n \"Filter by programming language.\\n\\n\" +\n \"Examples: 'typescript', 'python', 'javascript', 'php'\\n\\n\" +\n \"If omitted, searches all languages.\"\n ),\n});\n\n/**\n * Inferred TypeScript type for list functions input\n */\nexport type ListFunctionsInput = z.infer<typeof ListFunctionsSchema>;\n\n","import { toMCPToolSchema } from './utils/zod-to-json-schema.js';\nimport {\n SemanticSearchSchema,\n FindSimilarSchema,\n GetFileContextSchema,\n ListFunctionsSchema,\n} from './schemas/index.js';\n\n/**\n * MCP tool definitions with Zod-generated schemas.\n * \n * All schemas are automatically generated from Zod definitions,\n * providing type safety and rich validation at runtime.\n */\nexport const tools = [\n toMCPToolSchema(\n SemanticSearchSchema,\n 'semantic_search',\n 'Search the codebase semantically for relevant code using natural language. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity.'\n ),\n toMCPToolSchema(\n FindSimilarSchema,\n 'find_similar',\n 'Find code similar to a given code snippet. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity.'\n ),\n toMCPToolSchema(\n GetFileContextSchema,\n 'get_file_context',\n 'Get all chunks and related context for a specific file. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity.'\n ),\n toMCPToolSchema(\n ListFunctionsSchema,\n 'list_functions',\n 'List functions, classes, and interfaces by name pattern and language'\n ),\n];\n\n","import chokidar from 'chokidar';\nimport { LienConfig, LegacyLienConfig, isLegacyConfig, isModernConfig } from '../config/schema.js';\n\nexport interface FileChangeEvent {\n type: 'add' | 'change' | 'unlink';\n filepath: string;\n}\n\nexport type FileChangeHandler = (event: FileChangeEvent) => void | Promise<void>;\n\n/**\n * File watcher service that monitors code files for changes.\n * Uses chokidar for robust file watching with debouncing support.\n */\nexport class FileWatcher {\n private watcher: chokidar.FSWatcher | null = null;\n private debounceTimers: Map<string, NodeJS.Timeout> = new Map();\n private config: LienConfig | LegacyLienConfig;\n private rootDir: string;\n private onChangeHandler: FileChangeHandler | null = null;\n \n constructor(rootDir: string, config: LienConfig | LegacyLienConfig) {\n this.rootDir = rootDir;\n this.config = config;\n }\n \n /**\n * Starts watching files for changes.\n * \n * @param handler - Callback function called when files change\n */\n async start(handler: FileChangeHandler): Promise<void> {\n if (this.watcher) {\n throw new Error('File watcher is already running');\n }\n \n this.onChangeHandler = handler;\n \n // Get watch patterns based on config type\n let includePatterns: string[];\n let excludePatterns: string[];\n \n if (isLegacyConfig(this.config)) {\n includePatterns = this.config.indexing.include;\n excludePatterns = this.config.indexing.exclude;\n } else if (isModernConfig(this.config)) {\n // For modern configs, aggregate patterns from all frameworks\n includePatterns = this.config.frameworks.flatMap(f => f.config.include);\n excludePatterns = this.config.frameworks.flatMap(f => f.config.exclude);\n } else {\n includePatterns = ['**/*'];\n excludePatterns = [];\n }\n \n // Configure chokidar\n this.watcher = chokidar.watch(includePatterns, {\n cwd: this.rootDir,\n ignored: excludePatterns,\n persistent: true,\n ignoreInitial: true, // Don't trigger for existing files\n awaitWriteFinish: {\n stabilityThreshold: 500, // Wait 500ms for file to stop changing\n pollInterval: 100,\n },\n // Performance optimizations\n usePolling: false,\n interval: 100,\n binaryInterval: 300,\n });\n \n // Register event handlers with debouncing\n this.watcher\n .on('add', (filepath) => this.handleChange('add', filepath))\n .on('change', (filepath) => this.handleChange('change', filepath))\n .on('unlink', (filepath) => this.handleChange('unlink', filepath))\n .on('error', (error) => {\n console.error(`[Lien] File watcher error: ${error}`);\n });\n \n // Wait for watcher to be ready\n await new Promise<void>((resolve) => {\n this.watcher!.on('ready', () => {\n resolve();\n });\n });\n }\n \n /**\n * Handles a file change event with debouncing.\n * Debouncing prevents rapid reindexing when files are saved multiple times quickly.\n */\n private handleChange(type: 'add' | 'change' | 'unlink', filepath: string): void {\n // Clear existing debounce timer for this file\n const existingTimer = this.debounceTimers.get(filepath);\n if (existingTimer) {\n clearTimeout(existingTimer);\n }\n \n // Set new debounce timer\n const timer = setTimeout(() => {\n this.debounceTimers.delete(filepath);\n \n // Call handler\n if (this.onChangeHandler) {\n const absolutePath = filepath.startsWith('/')\n ? filepath\n : `${this.rootDir}/${filepath}`;\n \n try {\n const result = this.onChangeHandler({\n type,\n filepath: absolutePath,\n });\n \n // Handle async handlers\n if (result instanceof Promise) {\n result.catch((error) => {\n console.error(`[Lien] Error handling file change: ${error}`);\n });\n }\n } catch (error) {\n console.error(`[Lien] Error handling file change: ${error}`);\n }\n }\n }, this.config.fileWatching.debounceMs);\n \n this.debounceTimers.set(filepath, timer);\n }\n \n /**\n * Stops the file watcher and cleans up resources.\n */\n async stop(): Promise<void> {\n if (!this.watcher) {\n return;\n }\n \n // Clear all pending debounce timers\n for (const timer of this.debounceTimers.values()) {\n clearTimeout(timer);\n }\n this.debounceTimers.clear();\n \n // Close watcher\n await this.watcher.close();\n this.watcher = null;\n this.onChangeHandler = null;\n }\n \n /**\n * Gets the list of files currently being watched.\n */\n getWatchedFiles(): string[] {\n if (!this.watcher) {\n return [];\n }\n \n const watched = this.watcher.getWatched();\n const files: string[] = [];\n \n for (const [dir, filenames] of Object.entries(watched)) {\n for (const filename of filenames) {\n files.push(`${dir}/${filename}`);\n }\n }\n \n return files;\n }\n \n /**\n * Checks if the watcher is currently running.\n */\n isRunning(): boolean {\n return this.watcher !== null;\n }\n}\n\n","import { ZodSchema, ZodError } from 'zod';\nimport { LienError, LienErrorCode } from '../../errors/index.js';\n\n/**\n * Wrap a tool handler with Zod validation and error handling.\n * \n * This utility provides automatic:\n * - Input validation using Zod schemas\n * - Type-safe handler execution with inferred types\n * - Consistent error formatting for validation, Lien, and unexpected errors\n * - MCP-compatible response structure\n * \n * @param schema - Zod schema to validate tool inputs against\n * @param handler - Tool handler function that receives validated inputs\n * @returns Wrapped handler that validates inputs and handles errors\n * \n * @example\n * ```typescript\n * const SearchSchema = z.object({\n * query: z.string().min(3),\n * limit: z.number().default(5)\n * });\n * \n * const searchHandler = wrapToolHandler(\n * SearchSchema,\n * async (args) => {\n * // args is fully typed: { query: string; limit: number }\n * const results = await search(args.query, args.limit);\n * return { results };\n * }\n * );\n * \n * // Use in MCP server\n * return await searchHandler(request.params.arguments);\n * ```\n */\nexport function wrapToolHandler<T>(\n schema: ZodSchema<T>,\n handler: (validated: T) => Promise<any>\n) {\n return async (args: unknown) => {\n try {\n // Validate input with Zod\n const validated = schema.parse(args);\n \n // Execute handler with validated, typed input\n const result = await handler(validated);\n \n // Return MCP-compatible success response\n return {\n content: [{\n type: 'text' as const,\n text: JSON.stringify(result, null, 2),\n }],\n };\n \n } catch (error) {\n // Handle Zod validation errors\n if (error instanceof ZodError) {\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify({\n error: 'Invalid parameters',\n code: LienErrorCode.INVALID_INPUT,\n details: error.errors.map(e => ({\n field: e.path.join('.'),\n message: e.message,\n })),\n }, null, 2),\n }],\n };\n }\n \n // Handle known Lien errors\n if (error instanceof LienError) {\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify(error.toJSON(), null, 2),\n }],\n };\n }\n \n // Handle unexpected errors\n console.error('Unexpected error in tool handler:', error);\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify({\n error: error instanceof Error ? error.message : 'Unknown error',\n code: LienErrorCode.INTERNAL_ERROR,\n }, null, 2),\n }],\n };\n }\n };\n}\n\n","import { program } from './cli/index.js';\n\nprogram.parse();\n\n"],"mappings":";;;;;;;;;;;;AAAA,IAOa,oBACA,uBAGA,qBACA,8BAKA,4BAIA,0BAEA,0BAGA,sBACA,yBAGA,cACA,2BAGA,8BAGA,qBAGA,wBAQA;AAhDb;AAAA;AAAA;AAOO,IAAM,qBAAqB;AAC3B,IAAM,wBAAwB;AAG9B,IAAM,sBAAsB;AAC5B,IAAM,+BAA+B;AAKrC,IAAM,6BAA6B;AAInC,IAAM,2BAA2B;AAEjC,IAAM,2BAA2B;AAGjC,IAAM,uBAAuB;AAC7B,IAAM,0BAA0B;AAGhC,IAAM,eAAe;AACrB,IAAM,4BAA4B;AAGlC,IAAM,+BAA+B;AAGrC,IAAM,sBAAsB;AAG5B,IAAM,yBAAyB;AAQ/B,IAAM,uBAAuB;AAAA;AAAA;;;AC0C7B,SAAS,eACd,QAC4B;AAC5B,SAAO,cAAc,UAAU,EAAE,gBAAgB;AACnD;AAOO,SAAS,eACd,QACsB;AACtB,SAAO,gBAAgB;AACzB;AAzGA,IA+Ga;AA/Gb;AAAA;AAAA;AAAA;AA+GO,IAAM,gBAA4B;AAAA,MACvC,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,WAAW;AAAA,QACX,cAAc;AAAA,QACd,aAAa;AAAA,QACb,oBAAoB;AAAA,MACtB;AAAA,MACA,KAAK;AAAA,QACH,MAAM;AAAA,QACN,WAAW;AAAA,QACX,qBAAqB;AAAA,MACvB;AAAA,MACA,cAAc;AAAA,QACZ,SAAS;AAAA,QACT,gBAAgB;AAAA,MAClB;AAAA,MACA,cAAc;AAAA,QACZ,SAAS;AAAA;AAAA,QACT,YAAY;AAAA,MACd;AAAA,MACA,YAAY,CAAC;AAAA;AAAA,IACf;AAAA;AAAA;;;AC3HO,SAAS,gBAAgB,UAAsB,MAAuC;AAC3F,SAAO;AAAA,IACL,SAAS,KAAK,WAAW,SAAS;AAAA,IAClC,MAAM;AAAA,MACJ,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,KAAK;AAAA,MACH,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,cAAc;AAAA,MACZ,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,cAAc;AAAA,MACZ,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,YAAY,KAAK,cAAc,SAAS;AAAA,EAC1C;AACF;AAUO,SAAS,gBAAgB,QAA6B,OAAsC;AACjG,QAAM,YAAsB,CAAC;AAG7B,aAAW,OAAO,OAAO,KAAK,KAAK,GAAG;AACpC,QAAI,EAAE,OAAO,SAAS;AACpB,gBAAU,KAAK,GAAG;AAClB;AAAA,IACF;AAGA,QAAI,OAAO,MAAM,GAAG,MAAM,YAAY,MAAM,GAAG,MAAM,QAAQ,CAAC,MAAM,QAAQ,MAAM,GAAG,CAAC,GAAG;AACvF,YAAM,gBAAiB,OAAO,GAAG,KAA6B,CAAC;AAC/D,YAAM,eAAe,MAAM,GAAG;AAE9B,iBAAW,aAAa,OAAO,KAAK,YAAY,GAAG;AACjD,YAAI,EAAE,aAAa,gBAAgB;AACjC,oBAAU,KAAK,GAAG,GAAG,IAAI,SAAS,EAAE;AAAA,QACtC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAjEA;AAAA;AAAA;AAAA;AAAA;;;ACOO,SAAS,eAAe,QAAsB;AAKnD,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,eAAe,QAAW;AACnC,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,aAAa,QAAW;AACjC,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,WAAW,OAAO,QAAQ,WAAW,KAAK,GAAG;AACtD,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,SAAS,cAAc,WAAkD;AAE9E,QAAM,YAAwB;AAAA,IAC5B,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,WAAW,UAAU,UAAU,aAAa,cAAc,KAAK;AAAA,MAC/D,cAAc,UAAU,UAAU,gBAAgB,cAAc,KAAK;AAAA,MACrE,aAAa,UAAU,UAAU,eAAe,cAAc,KAAK;AAAA,MACnE,oBAAoB,UAAU,UAAU,sBAAsB,cAAc,KAAK;AAAA,IACnF;AAAA,IACA,KAAK;AAAA,MACH,MAAM,UAAU,KAAK,QAAQ,cAAc,IAAI;AAAA,MAC/C,WAAW,UAAU,KAAK,aAAa,cAAc,IAAI;AAAA,MACzD,qBAAqB,UAAU,KAAK,uBAAuB,cAAc,IAAI;AAAA,IAC/E;AAAA,IACA,cAAc;AAAA,MACZ,SAAS,UAAU,cAAc,WAAW,cAAc,aAAa;AAAA,MACvE,gBAAgB,UAAU,cAAc,kBAAkB,cAAc,aAAa;AAAA,IACvF;AAAA,IACA,cAAc;AAAA,MACZ,SAAS,UAAU,cAAc,WAAW,cAAc,aAAa;AAAA,MACvE,YAAY,UAAU,cAAc,cAAc,cAAc,aAAa;AAAA,IAC/E;AAAA,IACA,YAAY,CAAC;AAAA,EACf;AAGA,MAAI,UAAU,UAAU;AACtB,UAAM,mBAAsC;AAAA,MAC1C,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ;AAAA,QACN,SAAS,UAAU,SAAS,WAAW,CAAC,6CAA6C;AAAA,QACrF,SAAS,UAAU,SAAS,WAAW;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,cAAU,WAAW,KAAK,gBAAgB;AAAA,EAC5C,OAAO;AAEL,UAAM,mBAAsC;AAAA,MAC1C,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ;AAAA,QACN,SAAS,CAAC,6CAA6C;AAAA,QACvD,SAAS;AAAA,UACP;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,cAAU,WAAW,KAAK,gBAAgB;AAAA,EAC5C;AAEA,SAAO;AACT;AA9GA;AAAA;AAAA;AAEA;AAAA;AAAA;;;ACFA;AAAA;AAAA;AAAA;AAAA;;;AC8GO,SAAS,UACd,OACA,SACA,mBACW;AACX,QAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,QAAM,QAAQ,iBAAiB,QAAQ,MAAM,QAAQ;AAErD,QAAM,eAAe,IAAI;AAAA,IACvB,GAAG,OAAO,KAAK,OAAO;AAAA;AAAA,IAEtB;AAAA,EACF;AAGA,MAAI,OAAO;AACT,iBAAa,QAAQ,GAAG,aAAa,KAAK;AAAA;AAAA;AAAA,EAAmB,KAAK;AAAA,EACpE;AAEA,SAAO;AACT;AAlIA,IAaa,WAiDA,aAwBA,gBAUA;AAhGb;AAAA;AAAA;AAAA;AAGA;AAUO,IAAM,YAAN,cAAwB,MAAM;AAAA,MACnC,YACE,SACgB,MACA,SACA,WAA0B,UAC1B,cAAuB,MACvB,YAAqB,OACrC;AACA,cAAM,OAAO;AANG;AACA;AACA;AACA;AACA;AAGhB,aAAK,OAAO;AAGZ,YAAI,MAAM,mBAAmB;AAC3B,gBAAM,kBAAkB,MAAM,KAAK,WAAW;AAAA,QAChD;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,SAAS;AACP,eAAO;AAAA,UACL,OAAO,KAAK;AAAA,UACZ,MAAM,KAAK;AAAA,UACX,UAAU,KAAK;AAAA,UACf,aAAa,KAAK;AAAA,UAClB,SAAS,KAAK;AAAA,QAChB;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,cAAuB;AACrB,eAAO,KAAK;AAAA,MACd;AAAA;AAAA;AAAA;AAAA,MAKA,gBAAyB;AACvB,eAAO,KAAK;AAAA,MACd;AAAA,IACF;AAKO,IAAM,cAAN,cAA0B,UAAU;AAAA,MACzC,YAAY,SAAiB,SAAmC;AAC9D,cAAM,gDAAuC,SAAS,UAAU,MAAM,KAAK;AAC3E,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAmBO,IAAM,iBAAN,cAA6B,UAAU;AAAA,MAC5C,YAAY,SAAiB,SAAmC;AAC9D,cAAM,0EAAoD,SAAS,QAAQ,MAAM,IAAI;AACrF,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAKO,IAAM,gBAAN,cAA4B,UAAU;AAAA,MAC3C,YAAY,SAAiB,SAAmC;AAC9D,cAAM,gDAAuC,SAAS,QAAQ,MAAM,IAAI;AACxE,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAAA;AAAA;;;ACrGA,OAAOA,SAAQ;AACf,OAAOC,WAAU;AADjB,IAgCa,eAujBA;AAvlBb;AAAA;AAAA;AAEA;AACA;AACA;AACA;AA2BO,IAAM,gBAAN,MAAM,eAAc;AAAA,MACzB,OAAwB,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAU1C,MAAM,KAAK,UAAkB,QAAQ,IAAI,GAAwB;AAC/D,cAAM,aAAa,KAAK,cAAc,OAAO;AAE7C,YAAI;AACF,gBAAM,gBAAgB,MAAMD,IAAG,SAAS,YAAY,OAAO;AAC3D,gBAAM,aAAa,KAAK,MAAM,aAAa;AAG3C,cAAI,KAAK,eAAe,UAAU,GAAG;AACnC,oBAAQ,IAAI,qDAA8C;AAE1D,kBAAM,SAAS,MAAM,KAAK,QAAQ,OAAO;AAEzC,gBAAI,OAAO,YAAY,OAAO,YAAY;AACxC,oBAAM,iBAAiBC,MAAK,SAAS,OAAO,UAAU;AACtD,sBAAQ,IAAI,8CAAyC,cAAc,EAAE;AACrE,sBAAQ,IAAI,+DAAwD;AAAA,YACtE;AAEA,mBAAO,OAAO;AAAA,UAChB;AAGA,gBAAM,eAAe,gBAAgB,eAAe,UAAiC;AAGrF,gBAAM,aAAa,KAAK,SAAS,YAAY;AAC7C,cAAI,CAAC,WAAW,OAAO;AACrB,kBAAM,IAAI;AAAA,cACR;AAAA,EAA2B,WAAW,OAAO,KAAK,IAAI,CAAC;AAAA,cACvD,EAAE,QAAQ,WAAW,QAAQ,UAAU,WAAW,SAAS;AAAA,YAC7D;AAAA,UACF;AAGA,cAAI,WAAW,SAAS,SAAS,GAAG;AAClC,oBAAQ,KAAK,uCAA6B;AAC1C,uBAAW,SAAS,QAAQ,aAAW,QAAQ,KAAK,MAAM,OAAO,EAAE,CAAC;AAAA,UACtE;AAEA,iBAAO;AAAA,QACT,SAAS,OAAO;AACd,cAAK,MAAgC,SAAS,UAAU;AAEtD,mBAAO;AAAA,UACT;AAEA,cAAI,iBAAiB,aAAa;AAChC,kBAAM;AAAA,UACR;AAEA,cAAI,iBAAiB,aAAa;AAChC,kBAAM,IAAI;AAAA,cACR;AAAA,cACA,EAAE,MAAM,YAAY,eAAe,MAAM,QAAQ;AAAA,YACnD;AAAA,UACF;AAEA,gBAAM,UAAU,OAAO,gCAAgC,EAAE,MAAM,WAAW,CAAC;AAAA,QAC7E;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUA,MAAM,KAAK,SAAiB,QAAmC;AAC7D,cAAM,aAAa,KAAK,cAAc,OAAO;AAG7C,cAAM,aAAa,KAAK,SAAS,MAAM;AACvC,YAAI,CAAC,WAAW,OAAO;AACrB,gBAAM,IAAI;AAAA,YACR;AAAA,EAAuC,WAAW,OAAO,KAAK,IAAI,CAAC;AAAA,YACnE,EAAE,QAAQ,WAAW,OAAO;AAAA,UAC9B;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,aAAa,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI;AACrD,gBAAMD,IAAG,UAAU,YAAY,YAAY,OAAO;AAAA,QACpD,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,gCAAgC,EAAE,MAAM,WAAW,CAAC;AAAA,QAC7E;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,OAAO,UAAkB,QAAQ,IAAI,GAAqB;AAC9D,cAAM,aAAa,KAAK,cAAc,OAAO;AAC7C,YAAI;AACF,gBAAMA,IAAG,OAAO,UAAU;AAC1B,iBAAO;AAAA,QACT,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUA,MAAM,QAAQ,UAAkB,QAAQ,IAAI,GAA6B;AACvE,cAAM,aAAa,KAAK,cAAc,OAAO;AAE7C,YAAI;AAEF,gBAAM,gBAAgB,MAAMA,IAAG,SAAS,YAAY,OAAO;AAC3D,gBAAM,YAAY,KAAK,MAAM,aAAa;AAG1C,cAAI,CAAC,KAAK,eAAe,SAAS,GAAG;AACnC,mBAAO;AAAA,cACL,UAAU;AAAA,cACV,QAAQ;AAAA,YACV;AAAA,UACF;AAGA,gBAAM,YAAY,cAAiB,SAAS;AAG5C,gBAAM,aAAa,KAAK,SAAS,SAAS;AAC1C,cAAI,CAAC,WAAW,OAAO;AACrB,kBAAM,IAAI;AAAA,cACR;AAAA,EAA8C,WAAW,OAAO,KAAK,IAAI,CAAC;AAAA,cAC1E,EAAE,QAAQ,WAAW,OAAO;AAAA,YAC9B;AAAA,UACF;AAGA,gBAAM,aAAa,GAAG,UAAU;AAChC,gBAAMA,IAAG,SAAS,YAAY,UAAU;AAGxC,gBAAM,KAAK,KAAK,SAAS,SAAS;AAElC,iBAAO;AAAA,YACL,UAAU;AAAA,YACV;AAAA,YACA,QAAQ;AAAA,UACV;AAAA,QACF,SAAS,OAAO;AACd,cAAK,MAAgC,SAAS,UAAU;AACtD,mBAAO;AAAA,cACL,UAAU;AAAA,cACV,QAAQ;AAAA,YACV;AAAA,UACF;AAEA,cAAI,iBAAiB,aAAa;AAChC,kBAAM;AAAA,UACR;AAEA,gBAAM,UAAU,OAAO,kCAAkC,EAAE,MAAM,WAAW,CAAC;AAAA,QAC/E;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,eAAe,QAA0B;AACvC,eAAO,eAAoB,MAAM;AAAA,MACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,SAAS,QAAmC;AAC1C,cAAM,SAAmB,CAAC;AAC1B,cAAM,WAAqB,CAAC;AAG5B,YAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,QAAQ,CAAC,iCAAiC;AAAA,YAC1C,UAAU,CAAC;AAAA,UACb;AAAA,QACF;AAEA,cAAM,MAAM;AAGZ,YAAI,CAAC,IAAI,SAAS;AAChB,iBAAO,KAAK,iCAAiC;AAAA,QAC/C;AAGA,YAAI,eAAe,GAAoC,GAAG;AACxD,eAAK,qBAAqB,KAAmB,QAAQ,QAAQ;AAAA,QAC/D,WAAW,eAAe,GAAoC,GAAG;AAC/D,eAAK,qBAAqB,KAAyB,QAAQ,QAAQ;AAAA,QACrE,OAAO;AACL,iBAAO,KAAK,wFAAwF;AAAA,QACtG;AAEA,eAAO;AAAA,UACL,OAAO,OAAO,WAAW;AAAA,UACzB;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,gBAAgB,QAA+C;AAC7D,cAAM,SAAmB,CAAC;AAC1B,cAAM,WAAqB,CAAC;AAG5B,YAAI,OAAO,MAAM;AACf,eAAK,mBAAmB,OAAO,MAAM,QAAQ,QAAQ;AAAA,QACvD;AAGA,YAAI,OAAO,KAAK;AACd,eAAK,kBAAkB,OAAO,KAAK,QAAQ,QAAQ;AAAA,QACrD;AAGA,YAAI,OAAO,cAAc;AACvB,eAAK,2BAA2B,OAAO,cAAc,QAAQ,QAAQ;AAAA,QACvE;AAGA,YAAI,OAAO,cAAc;AACvB,eAAK,2BAA2B,OAAO,cAAc,QAAQ,QAAQ;AAAA,QACvE;AAGA,YAAI,OAAO,YAAY;AACrB,eAAK,mBAAmB,OAAO,YAAY,QAAQ,QAAQ;AAAA,QAC7D;AAEA,eAAO;AAAA,UACL,OAAO,OAAO,WAAW;AAAA,UACzB;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,cAAc,SAAyB;AAC7C,eAAOC,MAAK,KAAK,SAAS,eAAc,eAAe;AAAA,MACzD;AAAA;AAAA;AAAA;AAAA,MAKQ,qBACN,QACA,QACA,UACM;AAEN,YAAI,CAAC,OAAO,MAAM;AAChB,iBAAO,KAAK,8BAA8B;AAC1C;AAAA,QACF;AACA,aAAK,mBAAmB,OAAO,MAAM,QAAQ,QAAQ;AAGrD,YAAI,CAAC,OAAO,KAAK;AACf,iBAAO,KAAK,6BAA6B;AACzC;AAAA,QACF;AACA,aAAK,kBAAkB,OAAO,KAAK,QAAQ,QAAQ;AAGnD,YAAI,CAAC,OAAO,cAAc;AACxB,iBAAO,KAAK,sCAAsC;AAClD;AAAA,QACF;AACA,aAAK,2BAA2B,OAAO,cAAc,QAAQ,QAAQ;AAGrE,YAAI,CAAC,OAAO,cAAc;AACxB,iBAAO,KAAK,sCAAsC;AAClD;AAAA,QACF;AACA,aAAK,2BAA2B,OAAO,cAAc,QAAQ,QAAQ;AAGrE,YAAI,CAAC,OAAO,YAAY;AACtB,iBAAO,KAAK,oCAAoC;AAChD;AAAA,QACF;AACA,aAAK,mBAAmB,OAAO,YAAY,QAAQ,QAAQ;AAAA,MAC7D;AAAA;AAAA;AAAA;AAAA,MAKQ,qBACN,QACA,QACA,UACM;AACN,iBAAS,KAAK,sFAAsF;AAGpG,YAAI,CAAC,OAAO,UAAU;AACpB,iBAAO,KAAK,kCAAkC;AAC9C;AAAA,QACF;AAEA,cAAM,EAAE,SAAS,IAAI;AAErB,YAAI,OAAO,SAAS,cAAc,YAAY,SAAS,aAAa,GAAG;AACrE,iBAAO,KAAK,8CAA8C;AAAA,QAC5D;AAEA,YAAI,OAAO,SAAS,iBAAiB,YAAY,SAAS,eAAe,GAAG;AAC1E,iBAAO,KAAK,qDAAqD;AAAA,QACnE;AAEA,YAAI,OAAO,SAAS,gBAAgB,YAAY,SAAS,cAAc,KAAK,SAAS,cAAc,IAAI;AACrG,iBAAO,KAAK,+CAA+C;AAAA,QAC7D;AAEA,YAAI,OAAO,SAAS,uBAAuB,YAAY,SAAS,sBAAsB,GAAG;AACvF,iBAAO,KAAK,uDAAuD;AAAA,QACrE;AAGA,YAAI,OAAO,KAAK;AACd,eAAK,kBAAkB,OAAO,KAAK,QAAQ,QAAQ;AAAA,QACrD;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,mBACN,MACA,QACA,UACM;AACN,YAAI,KAAK,cAAc,QAAW;AAChC,cAAI,OAAO,KAAK,cAAc,YAAY,KAAK,aAAa,GAAG;AAC7D,mBAAO,KAAK,0CAA0C;AAAA,UACxD,WAAW,KAAK,YAAY,IAAI;AAC9B,qBAAS,KAAK,kFAAkF;AAAA,UAClG,WAAW,KAAK,YAAY,KAAK;AAC/B,qBAAS,KAAK,wEAAwE;AAAA,UACxF;AAAA,QACF;AAEA,YAAI,KAAK,iBAAiB,QAAW;AACnC,cAAI,OAAO,KAAK,iBAAiB,YAAY,KAAK,eAAe,GAAG;AAClE,mBAAO,KAAK,iDAAiD;AAAA,UAC/D;AAAA,QACF;AAEA,YAAI,KAAK,gBAAgB,QAAW;AAClC,cAAI,OAAO,KAAK,gBAAgB,YAAY,KAAK,cAAc,KAAK,KAAK,cAAc,IAAI;AACzF,mBAAO,KAAK,2CAA2C;AAAA,UACzD;AAAA,QACF;AAEA,YAAI,KAAK,uBAAuB,QAAW;AACzC,cAAI,OAAO,KAAK,uBAAuB,YAAY,KAAK,sBAAsB,GAAG;AAC/E,mBAAO,KAAK,mDAAmD;AAAA,UACjE,WAAW,KAAK,qBAAqB,KAAK;AACxC,qBAAS,KAAK,4EAA4E;AAAA,UAC5F;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,kBACN,KACA,QACA,WACM;AACN,YAAI,IAAI,SAAS,QAAW;AAC1B,cAAI,OAAO,IAAI,SAAS,YAAY,IAAI,OAAO,QAAQ,IAAI,OAAO,OAAO;AACvE,mBAAO,KAAK,yCAAyC;AAAA,UACvD;AAAA,QACF;AAEA,YAAI,IAAI,cAAc,QAAW;AAC/B,cAAI,IAAI,cAAc,WAAW,IAAI,cAAc,UAAU;AAC3D,mBAAO,KAAK,kDAAkD;AAAA,UAChE;AAAA,QACF;AAEA,YAAI,IAAI,wBAAwB,QAAW;AACzC,cAAI,OAAO,IAAI,wBAAwB,WAAW;AAChD,mBAAO,KAAK,2CAA2C;AAAA,UACzD;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,2BACN,cACA,QACA,WACM;AACN,YAAI,aAAa,YAAY,QAAW;AACtC,cAAI,OAAO,aAAa,YAAY,WAAW;AAC7C,mBAAO,KAAK,wCAAwC;AAAA,UACtD;AAAA,QACF;AAEA,YAAI,aAAa,mBAAmB,QAAW;AAC7C,cAAI,OAAO,aAAa,mBAAmB,YAAY,aAAa,iBAAiB,KAAK;AACxF,mBAAO,KAAK,oDAAoD;AAAA,UAClE,WAAW,aAAa,iBAAiB,KAAM;AAC7C,sBAAU,KAAK,8EAA8E;AAAA,UAC/F;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,2BACN,cACA,QACA,UACM;AACN,YAAI,aAAa,YAAY,QAAW;AACtC,cAAI,OAAO,aAAa,YAAY,WAAW;AAC7C,mBAAO,KAAK,wCAAwC;AAAA,UACtD;AAAA,QACF;AAEA,YAAI,aAAa,eAAe,QAAW;AACzC,cAAI,OAAO,aAAa,eAAe,YAAY,aAAa,aAAa,GAAG;AAC9E,mBAAO,KAAK,uDAAuD;AAAA,UACrE,WAAW,aAAa,aAAa,KAAK;AACxC,qBAAS,KAAK,qFAAqF;AAAA,UACrG;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,mBACN,YACA,QACA,UACM;AACN,YAAI,CAAC,MAAM,QAAQ,UAAU,GAAG;AAC9B,iBAAO,KAAK,6BAA6B;AACzC;AAAA,QACF;AAEA,mBAAW,QAAQ,CAAC,WAAW,UAAU;AACvC,cAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC/C,mBAAO,KAAK,cAAc,KAAK,qBAAqB;AACpD;AAAA,UACF;AAEA,gBAAM,KAAK;AAGX,cAAI,CAAC,GAAG,MAAM;AACZ,mBAAO,KAAK,cAAc,KAAK,gCAAgC;AAAA,UACjE;AAEA,cAAI,GAAG,SAAS,QAAW;AACzB,mBAAO,KAAK,cAAc,KAAK,gCAAgC;AAAA,UACjE,WAAW,OAAO,GAAG,SAAS,UAAU;AACtC,mBAAO,KAAK,cAAc,KAAK,yBAAyB;AAAA,UAC1D,WAAWA,MAAK,WAAW,GAAG,IAAI,GAAG;AACnC,mBAAO,KAAK,cAAc,KAAK,iCAAiC,GAAG,IAAI,EAAE;AAAA,UAC3E;AAEA,cAAI,GAAG,YAAY,QAAW;AAC5B,mBAAO,KAAK,cAAc,KAAK,mCAAmC;AAAA,UACpE,WAAW,OAAO,GAAG,YAAY,WAAW;AAC1C,mBAAO,KAAK,cAAc,KAAK,6BAA6B;AAAA,UAC9D;AAEA,cAAI,CAAC,GAAG,QAAQ;AACd,mBAAO,KAAK,cAAc,KAAK,kCAAkC;AAAA,UACnE,OAAO;AACL,iBAAK,wBAAwB,GAAG,QAAQ,cAAc,KAAK,YAAY,QAAQ,QAAQ;AAAA,UACzF;AAAA,QACF,CAAC;AAAA,MACH;AAAA;AAAA;AAAA;AAAA,MAKQ,wBACN,QACA,QACA,QACA,WACM;AACN,YAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,iBAAO,KAAK,GAAG,MAAM,oBAAoB;AACzC;AAAA,QACF;AAGA,YAAI,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AAClC,iBAAO,KAAK,GAAG,MAAM,2BAA2B;AAAA,QAClD,OAAO;AACL,iBAAO,QAAQ,QAAQ,CAAC,SAAkB,MAAc;AACtD,gBAAI,OAAO,YAAY,UAAU;AAC/B,qBAAO,KAAK,GAAG,MAAM,YAAY,CAAC,oBAAoB;AAAA,YACxD;AAAA,UACF,CAAC;AAAA,QACH;AAGA,YAAI,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AAClC,iBAAO,KAAK,GAAG,MAAM,2BAA2B;AAAA,QAClD,OAAO;AACL,iBAAO,QAAQ,QAAQ,CAAC,SAAkB,MAAc;AACtD,gBAAI,OAAO,YAAY,UAAU;AAC/B,qBAAO,KAAK,GAAG,MAAM,YAAY,CAAC,oBAAoB;AAAA,YACxD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGO,IAAM,gBAAgB,IAAI,cAAc;AAAA;AAAA;;;ACvlB/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAC1B,OAAOC,SAAQ;AACf,OAAOC,WAAU;AAUjB,eAAsB,UAAU,SAAmC;AACjE,MAAI;AACF,UAAM,SAASA,MAAK,KAAK,SAAS,MAAM;AACxC,UAAMD,IAAG,OAAO,MAAM;AACtB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AASA,eAAsB,iBAAiB,SAAkC;AACvE,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM,UAAU,mCAAmC;AAAA,MACpE,KAAK;AAAA,MACL,SAAS;AAAA;AAAA,IACX,CAAC;AACD,WAAO,OAAO,KAAK;AAAA,EACrB,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,iCAAiC,KAAK,EAAE;AAAA,EAC1D;AACF;AASA,eAAsB,iBAAiB,SAAkC;AACvE,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM,UAAU,sBAAsB;AAAA,MACvD,KAAK;AAAA,MACL,SAAS;AAAA,IACX,CAAC;AACD,WAAO,OAAO,KAAK;AAAA,EACrB,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,iCAAiC,KAAK,EAAE;AAAA,EAC1D;AACF;AAWA,eAAsB,gBACpB,SACA,SACA,OACmB;AACnB,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB,wBAAwB,OAAO,MAAM,KAAK;AAAA,MAC1C;AAAA,QACE,KAAK;AAAA,QACL,SAAS;AAAA;AAAA,MACX;AAAA,IACF;AAEA,UAAM,QAAQ,OACX,KAAK,EACL,MAAM,IAAI,EACV,OAAO,OAAO,EACd,IAAI,UAAQC,MAAK,KAAK,SAAS,IAAI,CAAC;AAEvC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,EACzD;AACF;AAUA,eAAsB,wBACpB,SACA,WACmB;AACnB,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB,+CAA+C,SAAS;AAAA,MACxD;AAAA,QACE,KAAK;AAAA,QACL,SAAS;AAAA,MACX;AAAA,IACF;AAEA,UAAM,QAAQ,OACX,KAAK,EACL,MAAM,IAAI,EACV,OAAO,OAAO,EACd,IAAI,UAAQA,MAAK,KAAK,SAAS,IAAI,CAAC;AAEvC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,0CAA0C,KAAK,EAAE;AAAA,EACnE;AACF;AAYA,eAAsB,8BACpB,SACA,YACA,UACmB;AACnB,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB,wBAAwB,UAAU,IAAI,QAAQ;AAAA,MAC9C;AAAA,QACE,KAAK;AAAA,QACL,SAAS;AAAA,MACX;AAAA,IACF;AAEA,UAAM,QAAQ,OACX,KAAK,EACL,MAAM,IAAI,EACV,OAAO,OAAO,EACd,IAAI,UAAQA,MAAK,KAAK,SAAS,IAAI,CAAC;AAEvC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,gDAAgD,KAAK,EAAE;AAAA,EACzE;AACF;AAOA,eAAsB,iBAAmC;AACvD,MAAI;AACF,UAAM,UAAU,iBAAiB,EAAE,SAAS,IAAK,CAAC;AAClD,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAjLA,IAKM;AALN;AAAA;AAAA;AAKA,IAAM,YAAY,UAAU,IAAI;AAAA;AAAA;;;ACLhC,OAAOC,SAAQ;AACf,OAAOC,WAAU;AAUjB,eAAsB,iBAAiB,WAAkC;AACvE,MAAI;AACF,UAAM,kBAAkBA,MAAK,KAAK,WAAW,YAAY;AACzD,UAAM,YAAY,KAAK,IAAI,EAAE,SAAS;AACtC,UAAMD,IAAG,UAAU,iBAAiB,WAAW,OAAO;AAAA,EACxD,SAAS,OAAO;AAEd,YAAQ,MAAM,0CAA0C,KAAK,EAAE;AAAA,EACjE;AACF;AASA,eAAsB,gBAAgB,WAAoC;AACxE,MAAI;AACF,UAAM,kBAAkBC,MAAK,KAAK,WAAW,YAAY;AACzD,UAAM,UAAU,MAAMD,IAAG,SAAS,iBAAiB,OAAO;AAC1D,UAAM,YAAY,SAAS,QAAQ,KAAK,GAAG,EAAE;AAC7C,WAAO,MAAM,SAAS,IAAI,IAAI;AAAA,EAChC,SAAS,OAAO;AAEd,WAAO;AAAA,EACT;AACF;AAvCA,IAGM;AAHN;AAAA;AAAA;AAGA,IAAM,eAAe;AAAA;AAAA;;;ACHrB,SAAS,YAAY;AACrB,OAAO,YAAY;AACnB,OAAOE,UAAQ;AACf,OAAOC,YAAU;AAUjB,eAAsB,2BACpB,SACA,QACmB;AACnB,QAAM,WAAqB,CAAC;AAG5B,aAAW,aAAa,OAAO,YAAY;AACzC,QAAI,CAAC,UAAU,SAAS;AACtB;AAAA,IACF;AAEA,UAAM,iBAAiB,MAAM,cAAc,SAAS,SAAS;AAC7D,aAAS,KAAK,GAAG,cAAc;AAAA,EACjC;AAEA,SAAO;AACT;AAKA,eAAe,cACb,SACA,WACmB;AACnB,QAAM,gBAAgBA,OAAK,KAAK,SAAS,UAAU,IAAI;AAGvD,QAAM,gBAAgBA,OAAK,KAAK,eAAe,YAAY;AAC3D,MAAI,KAAK,OAAO;AAEhB,MAAI;AACF,UAAM,mBAAmB,MAAMD,KAAG,SAAS,eAAe,OAAO;AACjE,SAAK,OAAO,EAAE,IAAI,gBAAgB;AAAA,EACpC,SAAS,GAAG;AAEV,UAAM,oBAAoBC,OAAK,KAAK,SAAS,YAAY;AACzD,QAAI;AACF,YAAM,mBAAmB,MAAMD,KAAG,SAAS,mBAAmB,OAAO;AACrE,WAAK,OAAO,EAAE,IAAI,gBAAgB;AAAA,IACpC,SAASE,IAAG;AAAA,IAEZ;AAAA,EACF;AAGA,KAAG,IAAI;AAAA,IACL,GAAG,UAAU,OAAO;AAAA,IACpB;AAAA,EACF,CAAC;AAGD,QAAM,WAAqB,CAAC;AAE5B,aAAW,WAAW,UAAU,OAAO,SAAS;AAC9C,UAAM,QAAQ,MAAM,KAAK,SAAS;AAAA,MAChC,KAAK;AAAA,MACL,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,MACP,QAAQ,UAAU,OAAO;AAAA,IAC3B,CAAC;AACD,aAAS,KAAK,GAAG,KAAK;AAAA,EACxB;AAGA,QAAM,cAAc,MAAM,KAAK,IAAI,IAAI,QAAQ,CAAC;AAGhD,SAAO,YACJ,OAAO,UAAQ,CAAC,GAAG,QAAQ,IAAI,CAAC,EAChC,IAAI,UAAQ;AAEX,WAAO,UAAU,SAAS,MACtB,OACAD,OAAK,KAAK,UAAU,MAAM,IAAI;AAAA,EACpC,CAAC;AACL;AAMA,eAAsB,aAAa,SAAyC;AAC1E,QAAM,EAAE,SAAS,kBAAkB,CAAC,GAAG,kBAAkB,CAAC,EAAE,IAAI;AAGhE,QAAM,gBAAgBA,OAAK,KAAK,SAAS,YAAY;AACrD,MAAI,KAAK,OAAO;AAEhB,MAAI;AACF,UAAM,mBAAmB,MAAMD,KAAG,SAAS,eAAe,OAAO;AACjE,SAAK,OAAO,EAAE,IAAI,gBAAgB;AAAA,EACpC,SAAS,GAAG;AAAA,EAEZ;AAGA,KAAG,IAAI;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EACL,CAAC;AAGD,QAAM,WAAW,gBAAgB,SAAS,IACtC,kBACA,CAAC,mDAAmD;AAGxD,QAAM,WAAqB,CAAC;AAE5B,aAAW,WAAW,UAAU;AAC9B,UAAM,QAAQ,MAAM,KAAK,SAAS;AAAA,MAChC,KAAK;AAAA,MACL,UAAU;AAAA,MACV,OAAO;AAAA,MACP,QAAQ,CAAC,mBAAmB,SAAS;AAAA,IACvC,CAAC;AACD,aAAS,KAAK,GAAG,KAAK;AAAA,EACxB;AAGA,QAAM,cAAc,MAAM,KAAK,IAAI,IAAI,QAAQ,CAAC;AAGhD,SAAO,YAAY,OAAO,UAAQ;AAChC,UAAM,eAAeC,OAAK,SAAS,SAAS,IAAI;AAChD,WAAO,CAAC,GAAG,QAAQ,YAAY;AAAA,EACjC,CAAC;AACH;AAEO,SAAS,eAAe,UAA0B;AACvD,QAAM,MAAMA,OAAK,QAAQ,QAAQ,EAAE,YAAY;AAE/C,QAAM,cAAsC;AAAA,IAC1C,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,UAAU;AAAA,IACV,OAAO;AAAA,IACP,OAAO;AAAA,IACP,UAAU;AAAA,IACV,WAAW;AAAA,IACX,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,aAAa;AAAA,EACf;AAEA,SAAO,YAAY,GAAG,KAAK;AAC7B;AAxLA;AAAA;AAAA;AAAA;AAAA;;;ACkBO,SAAS,eACd,SACA,UACkB;AAClB,QAAM,UAA4B;AAAA,IAChC,WAAW,CAAC;AAAA,IACZ,SAAS,CAAC;AAAA,IACV,YAAY,CAAC;AAAA,EACf;AAEA,QAAM,iBAAiB,SAAS,YAAY;AAE5C,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,mBAAmB,OAAO;AAC9C,cAAQ,UAAU,iBAAiB,OAAO;AAC1C,cAAQ,aAAa,oBAAoB,OAAO;AAChD;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,mBAAmB,OAAO;AAC9C,cAAQ,UAAU,iBAAiB,OAAO;AAC1C;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,uBAAuB,OAAO;AAClD,cAAQ,UAAU,qBAAqB,OAAO;AAC9C;AAAA,IAEF,KAAK;AACH,cAAQ,YAAY,oBAAoB,OAAO;AAC/C,cAAQ,UAAU,kBAAkB,OAAO;AAC3C,cAAQ,aAAa,qBAAqB,OAAO;AACjD;AAAA,IAEF,KAAK;AAEH,cAAQ,YAAY,oBAAoB,OAAO;AAC/C,cAAQ,UAAU,qBAAqB,OAAO;AAC9C;AAAA,IAEF,KAAK;AACH,cAAQ,YAAY,mBAAmB,OAAO;AAC9C,cAAQ,aAAa,oBAAoB,OAAO;AAChD;AAAA,IAEF,KAAK;AACH,cAAQ,YAAY,qBAAqB,OAAO;AAChD,cAAQ,UAAU,mBAAmB,OAAO;AAC5C,cAAQ,aAAa,sBAAsB,OAAO;AAClD;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,uBAAuB,OAAO;AAClD,cAAQ,UAAU,qBAAqB,OAAO;AAC9C,cAAQ,aAAa,wBAAwB,OAAO;AACpD;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,qBAAqB,OAAO;AAChD,cAAQ,UAAU,mBAAmB,OAAO;AAC5C;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,qBAAqB,OAAO;AAChD;AAAA,EACJ;AAEA,SAAO;AACT;AAGA,SAAS,mBAAmB,SAA2B;AACrD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,qCAAqC;AAC9E,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,eAAe,QAAQ,SAAS,8DAA8D;AACpG,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,QAAQ,SAAS,wCAAwC;AAC/E,aAAW,SAAS,eAAe;AAEjC,QAAI,CAAC,CAAC,MAAM,OAAO,SAAS,UAAU,OAAO,EAAE,SAAS,MAAM,CAAC,CAAC,GAAG;AACjE,YAAM,IAAI,MAAM,CAAC,CAAC;AAAA,IACpB;AAAA,EACF;AAGA,QAAM,gBAAgB,QAAQ,SAAS,8CAA8C;AACrF,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,mBAAmB,SAA2B;AACrD,SAAO,mBAAmB,OAAO;AACnC;AAEA,SAAS,iBAAiB,SAA2B;AACnD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,8CAA8C;AACpF,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,iBAAiB,SAA2B;AACnD,SAAO,iBAAiB,OAAO;AACjC;AAEA,SAAS,oBAAoB,SAA2B;AACtD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,kCAAkC;AAC5E,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,cAAc,QAAQ,SAAS,iCAAiC;AACtE,aAAW,SAAS,aAAa;AAC/B,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,uBAAuB,SAA2B;AACzD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,mBAAmB;AAC5D,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,eAAe,QAAQ,SAAS,2BAA2B;AACjE,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,2BAA2B;AACjE,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,oBAAoB,SAA2B;AACtD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,wDAAwD;AACjG,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,kBAAkB,SAA2B;AACpD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,gCAAgC;AACtE,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,oBAAoB;AAC9D,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,eAAe,QAAQ,SAAS,gBAAgB;AACtD,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,mBAAmB,SAA2B;AACrD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,4CAA4C;AACrF,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,oBAAoB,SAA2B;AACtD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,gCAAgC;AAC1E,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,QAAQ,SAAS,6BAA6B;AACpE,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,gBAAgB,QAAQ,SAAS,gFAAgF;AACvH,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,mBAAmB,SAA2B;AACrD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,8CAA8C;AACpF,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,sBAAsB,SAA2B;AACxD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,kCAAkC;AAC5E,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,uBAAuB,SAA2B;AACzD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,gBAAgB,QAAQ,SAAS,sGAAsG;AAC7I,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,uDAAuD;AAC7F,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,wBAAwB,SAA2B;AAC1D,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,2CAA2C;AACrF,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,gBAAgB,QAAQ,SAAS,yBAAyB;AAChE,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,mBAAmB,SAA2B;AACrD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,gBAAgB;AACtD,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,QAAQ,SAAS,iBAAiB;AACxD,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,6BAA6B;AACtE,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,QAAQ,SAAS,4BAA4B;AACnE,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,eAAe,QAAQ,SAAS,2BAA2B;AACjE,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,oBAAoB,SAA2B;AACtD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,cAAc,QAAQ,MAAM,mCAAmC;AACrE,MAAI,CAAC,YAAa,QAAO,CAAC;AAE1B,QAAM,gBAAgB,YAAY,CAAC;AAGnC,QAAM,qBAAqB,cAAc,SAAS,iCAAiC;AACnF,aAAW,SAAS,oBAAoB;AACtC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,cAAc,SAAS,wBAAwB;AACrE,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,cAAc,QAAQ,MAAM,mCAAmC;AACrE,MAAI,CAAC,YAAa,QAAO,CAAC;AAE1B,QAAM,gBAAgB,YAAY,CAAC;AAGnC,QAAM,YAAY,cAAc,MAAM,uBAAuB;AAC7D,MAAI,WAAW;AACb,UAAM,IAAI,UAAU,CAAC,CAAC;AAAA,EACxB;AAGA,QAAM,uBAAuB,cAAc,MAAM,sBAAsB;AACvE,MAAI,sBAAsB;AACxB,UAAM,IAAI,cAAc;AAAA,EAC1B;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAlcA;AAAA;AAAA;AAAA;AAAA;;;ACSO,SAAS,UACd,UACA,SACA,UAAwB,CAAC,GACZ;AACb,QAAM,EAAE,YAAY,IAAI,eAAe,GAAG,IAAI;AAE9C,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAsB,CAAC;AAC7B,QAAM,WAAW,eAAe,QAAQ;AAGxC,MAAI,MAAM,WAAW,KAAM,MAAM,WAAW,KAAK,MAAM,CAAC,EAAE,KAAK,MAAM,IAAK;AACxE,WAAO;AAAA,EACT;AAGA,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,YAAY,cAAc;AAC/D,UAAM,UAAU,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM;AACpD,UAAM,aAAa,MAAM,MAAM,GAAG,OAAO;AACzC,UAAM,eAAe,WAAW,KAAK,IAAI;AAGzC,QAAI,aAAa,KAAK,EAAE,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,UAAM,UAAU,eAAe,cAAc,QAAQ;AAErD,WAAO,KAAK;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,QACR,MAAM;AAAA,QACN,WAAW,IAAI;AAAA,QACf;AAAA,QACA,MAAM;AAAA;AAAA,QACN;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,WAAW,MAAM,QAAQ;AAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AA1DA;AAAA;AAAA;AACA;AACA;AAAA;AAAA;;;ACFA,SAAS,UAAU,WAA2C;AAA9D,IASa;AATb;AAAA;AAAA;AAEA;AACA;AAGA,QAAI,oBAAoB;AACxB,QAAI,mBAAmB;AAEhB,IAAM,kBAAN,MAAkD;AAAA,MAC/C,YAA8C;AAAA,MACrC,YAAY;AAAA,MACrB,cAAoC;AAAA,MAE5C,MAAM,aAA4B;AAEhC,YAAI,KAAK,aAAa;AACpB,iBAAO,KAAK;AAAA,QACd;AAEA,YAAI,KAAK,WAAW;AAClB;AAAA,QACF;AAEA,aAAK,eAAe,YAAY;AAC9B,cAAI;AAEF,iBAAK,YAAY,MAAM,SAAS,sBAAsB,KAAK,SAAS;AAAA,UACtE,SAAS,OAAgB;AACvB,iBAAK,cAAc;AACnB,kBAAM,UAAU,OAAO,sCAAsC;AAAA,UAC/D;AAAA,QACF,GAAG;AAEH,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,MAAM,MAAM,MAAqC;AAC/C,cAAM,KAAK,WAAW;AAEtB,YAAI,CAAC,KAAK,WAAW;AACnB,gBAAM,IAAI,eAAe,iCAAiC;AAAA,QAC5D;AAEA,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK,UAAU,MAAM;AAAA,YACxC,SAAS;AAAA,YACT,WAAW;AAAA,UACb,CAAC;AAED,iBAAO,OAAO;AAAA,QAChB,SAAS,OAAgB;AACvB,gBAAM,UAAU,OAAO,gCAAgC,EAAE,YAAY,KAAK,OAAO,CAAC;AAAA,QACpF;AAAA,MACF;AAAA,MAEA,MAAM,WAAW,OAA0C;AACzD,cAAM,KAAK,WAAW;AAEtB,YAAI,CAAC,KAAK,WAAW;AACnB,gBAAM,IAAI,eAAe,iCAAiC;AAAA,QAC5D;AAEA,YAAI;AAGF,gBAAM,UAAU,MAAM,QAAQ;AAAA,YAC5B,MAAM,IAAI,UAAQ,KAAK,MAAM,IAAI,CAAC;AAAA,UACpC;AACA,iBAAO;AAAA,QACT,SAAS,OAAgB;AACvB,gBAAM,UAAU,OAAO,uCAAuC,EAAE,WAAW,MAAM,OAAO,CAAC;AAAA,QAC3F;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;AC1EA,IAQa;AARb;AAAA;AAAA;AAAA;AAQO,IAAM,sBAAsB;AAAA;AAAA;;;ACM5B,SAAS,mBAAmB,OAAkC;AACnE,MAAI,QAAQ,EAAK,QAAO;AACxB,MAAI,QAAQ,IAAK,QAAO;AACxB,MAAI,QAAQ,IAAK,QAAO;AACxB,SAAO;AACT;AAnBA;AAAA;AAAA;AAAA;AAAA;;;AC8CO,SAAS,oBAAoB,OAA4B;AAC9D,QAAM,QAAQ,MAAM,YAAY,EAAE,KAAK;AAIvC,MACE,MAAM,MAAM,sCAAsC,KAClD,MAAM,MAAM,eAAe,KAC3B,MAAM,MAAM,WAAW,GACvB;AACA,WAAO;AAAA,EACT;AAIA,MACE,MAAM,MAAM,wBAAwB,KACpC,MAAM,MAAM,sBAAsB,KAClC,MAAM,MAAM,YAAY,KACxB,MAAM,MAAM,eAAe,KAC3B,MAAM,MAAM,qCAAqC,GACjD;AACA,WAAO;AAAA,EACT;AAIA,MACE,MAAM,MAAM,iDAAiD,KAC7D,MAAM,MAAM,qBAAqB,KACjC,MAAM,MAAM,qBAAqB,GACjC;AACA,WAAO;AAAA,EACT;AAIA,SAAO;AACT;AApFA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA,YAAY,aAAa;AACzB,OAAOE,YAAU;AACjB,OAAOC,SAAQ;AACf,OAAOC,aAAY;AAqBnB,SAAS,oBAAoB,UAA2B;AACtD,QAAM,QAAQ,SAAS,YAAY;AACnC,QAAM,WAAWF,OAAK,SAAS,QAAQ,EAAE,YAAY;AAGrD,MAAI,SAAS,WAAW,QAAQ,EAAG,QAAO;AAG1C,MAAI,SAAS,WAAW,WAAW,EAAG,QAAO;AAG7C,MAAI,SAAS,SAAS,KAAK,KAAK,SAAS,SAAS,MAAM,KAAK,SAAS,SAAS,WAAW,GAAG;AAC3F,WAAO;AAAA,EACT;AAGA,MACE,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,iBAAiB,KAChC,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,WAAW,GAC1B;AACA,WAAO;AAAA,EACT;AAGA,MACE,MAAM,SAAS,cAAc,KAC7B,MAAM,SAAS,UAAU,KACzB,MAAM,SAAS,QAAQ,GACvB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AASA,SAAS,WAAW,UAA2B;AAC7C,QAAM,QAAQ,SAAS,YAAY;AAGnC,MACE,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,SAAS,KACxB,MAAM,SAAS,aAAa,GAC5B;AACA,WAAO;AAAA,EACT;AAGA,MACE,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,QAAQ,GACvB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AASA,SAAS,cAAc,UAA2B;AAChD,QAAM,QAAQ,SAAS,YAAY;AAGnC,MACE,MAAM,SAAS,SAAS,KACxB,MAAM,SAAS,aAAa,KAC5B,MAAM,SAAS,WAAW,KAC1B,MAAM,SAAS,OAAO,GACtB;AACA,WAAO;AAAA,EACT;AAGA,MACE,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,UAAU,KACzB,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,UAAU,GACzB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAWA,SAAS,mBACP,OACA,UACA,WACQ;AACR,QAAM,cAAc,MAAM,YAAY,EAAE,MAAM,KAAK;AACnD,QAAM,eAAe,SAAS,YAAY,EAAE,MAAM,GAAG;AAErD,MAAI,cAAc;AAGlB,aAAW,SAAS,aAAa;AAE/B,QAAI,MAAM,UAAU,EAAG;AAGvB,QAAI,aAAa,KAAK,SAAO,IAAI,SAAS,KAAK,CAAC,GAAG;AACjD,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,YAAY;AACrB;AAYA,SAAS,uBACP,OACA,UACA,WACQ;AACR,QAAM,WAAWA,OAAK,SAAS,UAAUA,OAAK,QAAQ,QAAQ,CAAC,EAAE,YAAY;AAC7E,QAAM,cAAc,MAAM,YAAY,EAAE,MAAM,KAAK;AAEnD,MAAI,cAAc;AAGlB,aAAW,SAAS,aAAa;AAE/B,QAAI,MAAM,UAAU,EAAG;AAGvB,QAAI,aAAa,OAAO;AACtB,qBAAe;AAAA,IACjB,WAES,SAAS,SAAS,KAAK,GAAG;AACjC,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,YAAY;AACrB;AAuBA,SAAS,uBACP,OACA,UACA,WACQ;AACR,MAAI,QAAQ;AAGZ,QAAM,WAAWA,OAAK,SAAS,UAAUA,OAAK,QAAQ,QAAQ,CAAC,EAAE,YAAY;AAC7E,QAAM,cAAc,MAAM,YAAY,EAAE,MAAM,KAAK;AAEnD,aAAW,SAAS,aAAa;AAC/B,QAAI,MAAM,UAAU,EAAG;AAGvB,QAAI,aAAa,OAAO;AACtB,eAAS;AAAA,IACX,WAES,SAAS,SAAS,KAAK,GAAG;AACjC,eAAS;AAAA,IACX;AAAA,EACF;AAGA,UAAQ,mBAAmB,OAAO,UAAU,KAAK;AAIjD,MAAI,WAAW,QAAQ,GAAG;AACxB,aAAS;AAAA,EACX;AAEA,SAAO;AACT;AAmBA,SAAS,yBACP,OACA,UACA,WACQ;AACR,MAAI,QAAQ;AAGZ,MAAI,oBAAoB,QAAQ,GAAG;AACjC,aAAS;AAGT,UAAM,QAAQ,SAAS,YAAY;AACnC,QACE,MAAM,SAAS,cAAc,KAC7B,MAAM,SAAS,UAAU,KACzB,MAAM,SAAS,MAAM,GACrB;AACA,eAAS;AAAA,IACX;AAAA,EACF;AAGA,MAAI,cAAc,QAAQ,GAAG;AAC3B,aAAS;AAAA,EACX;AAGA,QAAM,WAAWA,OAAK,SAAS,UAAUA,OAAK,QAAQ,QAAQ,CAAC,EAAE,YAAY;AAC7E,QAAM,cAAc,MAAM,YAAY,EAAE,MAAM,KAAK;AAEnD,aAAW,SAAS,aAAa;AAC/B,QAAI,MAAM,UAAU,EAAG;AAGvB,QAAI,SAAS,SAAS,KAAK,GAAG;AAC5B,eAAS;AAAA,IACX;AAAA,EACF;AAGA,QAAM,eAAe,SAAS,YAAY,EAAE,MAAMA,OAAK,GAAG;AAC1D,aAAW,SAAS,aAAa;AAC/B,QAAI,MAAM,UAAU,EAAG;AAEvB,eAAW,WAAW,cAAc;AAClC,UAAI,QAAQ,SAAS,KAAK,GAAG;AAC3B,iBAAS;AACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAqBA,SAAS,6BACP,OACA,UACA,WACQ;AACR,MAAI,QAAQ;AAGZ,UAAQ,uBAAuB,OAAO,UAAU,KAAK;AAGrD,UAAQ,mBAAmB,OAAO,UAAU,KAAK;AAGjD,MAAI,WAAW,QAAQ,GAAG;AACxB,aAAS;AAAA,EACX;AAEA,SAAO;AACT;AAeA,SAAS,uBACP,OACA,UACA,WACQ;AACR,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,oBAAoB,KAAK;AAGxC,UAAQ,QAAQ;AAAA,IACd;AACE,aAAO,uBAAuB,OAAO,UAAU,SAAS;AAAA,IAE1D;AACE,aAAO,yBAAyB,OAAO,UAAU,SAAS;AAAA,IAE5D;AACE,aAAO,6BAA6B,OAAO,UAAU,SAAS;AAAA,IAEhE;AAEE,aAAO,6BAA6B,OAAO,UAAU,SAAS;AAAA,EAClE;AACF;AAvZA,IA6aa;AA7ab;AAAA;AAAA;AAMA;AACA;AACA;AACA;AACA;AACA;AAkaO,IAAM,WAAN,MAAM,UAAsC;AAAA,MACzC,KAA+B;AAAA,MAC/B,QAA6B;AAAA,MACrB;AAAA,MACC,YAAY;AAAA,MACrB,mBAA2B;AAAA,MAC3B,iBAAyB;AAAA,MAEjC,YAAY,aAAqB;AAE/B,cAAM,cAAcA,OAAK,SAAS,WAAW;AAI7C,cAAM,WAAWE,QACd,WAAW,KAAK,EAChB,OAAO,WAAW,EAClB,OAAO,KAAK,EACZ,UAAU,GAAG,CAAC;AAEjB,aAAK,SAASF,OAAK;AAAA,UACjBC,IAAG,QAAQ;AAAA,UACX;AAAA,UACA;AAAA,UACA,GAAG,WAAW,IAAI,QAAQ;AAAA,QAC5B;AAAA,MACF;AAAA,MAEA,MAAM,aAA4B;AAChC,YAAI;AACF,eAAK,KAAK,MAAc,gBAAQ,KAAK,MAAM;AAE3C,cAAI;AACF,iBAAK,QAAQ,MAAM,KAAK,GAAG,UAAU,KAAK,SAAS;AAAA,UACrD,QAAQ;AAGN,iBAAK,QAAQ;AAAA,UACf;AAGA,cAAI;AACF,iBAAK,iBAAiB,MAAM,gBAAgB,KAAK,MAAM;AAAA,UACzD,QAAQ;AAEN,iBAAK,iBAAiB;AAAA,UACxB;AAAA,QACF,SAAS,OAAgB;AACvB,gBAAM,UAAU,OAAO,wCAAwC,EAAE,QAAQ,KAAK,OAAO,CAAC;AAAA,QACxF;AAAA,MACF;AAAA,MAEA,MAAM,YACJ,SACA,WACA,UACe;AACf,YAAI,CAAC,KAAK,IAAI;AACZ,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAEA,YAAI,QAAQ,WAAW,UAAU,UAAU,QAAQ,WAAW,SAAS,QAAQ;AAC7E,gBAAM,IAAI,cAAc,qEAAqE;AAAA,YAC3F,eAAe,QAAQ;AAAA,YACvB,iBAAiB,UAAU;AAAA,YAC3B,gBAAgB,SAAS;AAAA,UAC3B,CAAC;AAAA,QACH;AAGA,YAAI,QAAQ,WAAW,GAAG;AACxB;AAAA,QACF;AAGA,YAAI,QAAQ,SAAS,0BAA0B;AAE7C,mBAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,0BAA0B;AACjE,kBAAM,eAAe,QAAQ,MAAM,GAAG,KAAK,IAAI,IAAI,0BAA0B,QAAQ,MAAM,CAAC;AAC5F,kBAAM,gBAAgB,UAAU,MAAM,GAAG,KAAK,IAAI,IAAI,0BAA0B,QAAQ,MAAM,CAAC;AAC/F,kBAAM,gBAAgB,SAAS,MAAM,GAAG,KAAK,IAAI,IAAI,0BAA0B,QAAQ,MAAM,CAAC;AAE9F,kBAAM,KAAK,qBAAqB,cAAc,eAAe,aAAa;AAAA,UAC5E;AAAA,QACF,OAAO;AACL,gBAAM,KAAK,qBAAqB,SAAS,WAAW,QAAQ;AAAA,QAC9D;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAc,qBACZ,SACA,WACA,UACe;AAQf,cAAM,QAA0B,CAAC,EAAE,SAAS,WAAW,SAAS,CAAC;AACjE,cAAM,gBAAkC,CAAC;AAGzC,eAAO,MAAM,SAAS,GAAG;AACvB,gBAAM,QAAQ,MAAM,MAAM;AAE1B,cAAI;AACF,kBAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,QAAQ,OAAO;AAAA,cAChD,QAAQ,MAAM,KAAK,MAAM;AAAA,cACzB,SAAS,MAAM,SAAS,CAAC;AAAA,cACzB,MAAM,MAAM,UAAU,CAAC,EAAE;AAAA,cACzB,WAAW,MAAM,UAAU,CAAC,EAAE;AAAA,cAC9B,SAAS,MAAM,UAAU,CAAC,EAAE;AAAA,cAC5B,MAAM,MAAM,UAAU,CAAC,EAAE;AAAA,cACzB,UAAU,MAAM,UAAU,CAAC,EAAE;AAAA;AAAA,cAE7B,eAAgB,MAAM,UAAU,CAAC,EAAE,SAAS,aAAa,MAAM,UAAU,CAAC,EAAE,QAAQ,UAAU,SAAS,IAAK,MAAM,UAAU,CAAC,EAAE,QAAQ,YAAY,CAAC,EAAE;AAAA,cACtJ,YAAa,MAAM,UAAU,CAAC,EAAE,SAAS,WAAW,MAAM,UAAU,CAAC,EAAE,QAAQ,QAAQ,SAAS,IAAK,MAAM,UAAU,CAAC,EAAE,QAAQ,UAAU,CAAC,EAAE;AAAA,cAC7I,gBAAiB,MAAM,UAAU,CAAC,EAAE,SAAS,cAAc,MAAM,UAAU,CAAC,EAAE,QAAQ,WAAW,SAAS,IAAK,MAAM,UAAU,CAAC,EAAE,QAAQ,aAAa,CAAC,EAAE;AAAA,YAC5J,EAAE;AAGF,gBAAI,CAAC,KAAK,OAAO;AAEf,mBAAK,QAAQ,MAAM,KAAK,GAAI,YAAY,KAAK,WAAW,OAAO;AAAA,YACjE,OAAO;AACL,oBAAM,KAAK,MAAM,IAAI,OAAO;AAAA,YAC9B;AAAA,UACF,SAAS,OAAO;AAEd,gBAAI,MAAM,QAAQ,SAAS,0BAA0B;AACnD,oBAAM,OAAO,KAAK,MAAM,MAAM,QAAQ,SAAS,CAAC;AAGhD,oBAAM,KAAK;AAAA,gBACT,SAAS,MAAM,QAAQ,MAAM,GAAG,IAAI;AAAA,gBACpC,WAAW,MAAM,UAAU,MAAM,GAAG,IAAI;AAAA,gBACxC,UAAU,MAAM,SAAS,MAAM,GAAG,IAAI;AAAA,cACxC,CAAC;AACD,oBAAM,KAAK;AAAA,gBACT,SAAS,MAAM,QAAQ,MAAM,IAAI;AAAA,gBACjC,WAAW,MAAM,UAAU,MAAM,IAAI;AAAA,gBACrC,UAAU,MAAM,SAAS,MAAM,IAAI;AAAA,cACrC,CAAC;AAAA,YACH,OAAO;AAEL,4BAAc,KAAK,KAAK;AAAA,YAC1B;AAAA,UACF;AAAA,QACF;AAGA,YAAI,cAAc,SAAS,GAAG;AAC5B,gBAAM,cAAc,cAAc,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,QAAQ,CAAC;AACtF,gBAAM,IAAI;AAAA,YACR,oBAAoB,WAAW;AAAA,YAC/B;AAAA,cACE,eAAe,cAAc;AAAA,cAC7B,cAAc;AAAA,cACd,YAAY,cAAc,CAAC,EAAE,UAAU,CAAC,EAAE;AAAA,YAC5C;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,OACJ,aACA,QAAgB,GAChB,OACyB;AACzB,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAEA,YAAI;AAEF,gBAAM,UAAU,MAAM,KAAK,MACxB,OAAO,MAAM,KAAK,WAAW,CAAC,EAC9B,MAAM,QAAQ,EAAE,EAChB,QAAQ;AAGX,gBAAM,WAAY,QACf;AAAA,YAAO,CAAC,MACP,EAAE,WACF,EAAE,QAAQ,KAAK,EAAE,SAAS,KAC1B,EAAE,QACF,EAAE,KAAK,SAAS;AAAA,UAClB,EACC,IAAI,CAAC,MAAgB;AACpB,kBAAM,YAAY,EAAE,aAAa;AACjC,kBAAM,eAAe,uBAAuB,OAAO,EAAE,MAAM,SAAS;AAEpE,mBAAO;AAAA,cACL,SAAS,EAAE;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,EAAE;AAAA,gBACR,WAAW,EAAE;AAAA,gBACb,SAAS,EAAE;AAAA,gBACX,MAAM,EAAE;AAAA,gBACR,UAAU,EAAE;AAAA,cACd;AAAA,cACA,OAAO;AAAA,cACP,WAAW,mBAAmB,YAAY;AAAA,YAC5C;AAAA,UACF,CAAC,EACA,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,KAAK;AAEjB,iBAAO;AAAA,QACT,SAAS,OAAO;AACd,gBAAM,WAAW,OAAO,KAAK;AAG7B,cAAI,SAAS,SAAS,YAAY,KAAK,SAAS,SAAS,QAAQ,GAAG;AAElE,gBAAI;AACF,oBAAM,KAAK,WAAW;AAGtB,oBAAM,UAAU,MAAM,KAAK,MACxB,OAAO,MAAM,KAAK,WAAW,CAAC,EAC9B,MAAM,QAAQ,EAAE,EAChB,QAAQ;AAEX,qBAAQ,QACL;AAAA,gBAAO,CAAC,MACP,EAAE,WACF,EAAE,QAAQ,KAAK,EAAE,SAAS,KAC1B,EAAE,QACF,EAAE,KAAK,SAAS;AAAA,cAClB,EACC,IAAI,CAAC,MAAgB;AACpB,sBAAM,YAAY,EAAE,aAAa;AACjC,sBAAM,eAAe,uBAAuB,OAAO,EAAE,MAAM,SAAS;AAEpE,uBAAO;AAAA,kBACL,SAAS,EAAE;AAAA,kBACX,UAAU;AAAA,oBACR,MAAM,EAAE;AAAA,oBACR,WAAW,EAAE;AAAA,oBACb,SAAS,EAAE;AAAA,oBACX,MAAM,EAAE;AAAA,oBACR,UAAU,EAAE;AAAA,kBACd;AAAA,kBACA,OAAO;AAAA,kBACP,WAAW,mBAAmB,YAAY;AAAA,gBAC5C;AAAA,cACF,CAAC,EACA,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,KAAK;AAAA,YACnB,SAAS,YAAqB;AAC5B,oBAAM,IAAI;AAAA,gBACR;AAAA,gBACA,EAAE,eAAe,WAAW;AAAA,cAC9B;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,UAAU,OAAO,kCAAkC;AAAA,QAC3D;AAAA,MACF;AAAA,MAEA,MAAM,eAAe,SAIO;AAC1B,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAEA,cAAM,EAAE,UAAU,SAAS,QAAQ,IAAI,IAAI;AAE3C,YAAI;AAGF,gBAAM,aAAa,MAAM,mBAAmB,EAAE,KAAK,CAAC;AACpD,gBAAM,QAAQ,KAAK,MAAM,OAAO,UAAU,EACvC,MAAM,YAAY,EAClB,MAAM,KAAK,IAAI,QAAQ,GAAG,GAAG,CAAC;AAEjC,gBAAM,UAAU,MAAM,MAAM,QAAQ;AAGpC,cAAI,WAAY,QAAkC;AAAA,YAAO,CAAC,MACxD,EAAE,WACF,EAAE,QAAQ,KAAK,EAAE,SAAS,KAC1B,EAAE,QACF,EAAE,KAAK,SAAS;AAAA,UAClB;AAGA,cAAI,UAAU;AACZ,uBAAW,SAAS;AAAA,cAAO,CAAC,MAC1B,EAAE,YAAY,EAAE,SAAS,YAAY,MAAM,SAAS,YAAY;AAAA,YAClE;AAAA,UACF;AAGA,cAAI,SAAS;AACX,kBAAM,QAAQ,IAAI,OAAO,SAAS,GAAG;AACrC,uBAAW,SAAS;AAAA,cAAO,CAAC,MAC1B,MAAM,KAAK,EAAE,OAAO,KAAK,MAAM,KAAK,EAAE,IAAI;AAAA,YAC5C;AAAA,UACF;AAEA,iBAAO,SAAS,MAAM,GAAG,KAAK,EAAE,IAAI,CAAC,MAAgB;AACnD,kBAAM,QAAQ;AACd,mBAAO;AAAA,cACL,SAAS,EAAE;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,EAAE;AAAA,gBACR,WAAW,EAAE;AAAA,gBACb,SAAS,EAAE;AAAA,gBACX,MAAM,EAAE;AAAA,gBACR,UAAU,EAAE;AAAA,cACd;AAAA,cACA;AAAA,cACA,WAAW,mBAAmB,KAAK;AAAA,YACrC;AAAA,UACF,CAAC;AAAA,QACH,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,4BAA4B;AAAA,QACrD;AAAA,MACF;AAAA,MAEA,MAAM,aAAa,SAKS;AAC1B,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAEA,cAAM,EAAE,UAAU,SAAS,YAAY,QAAQ,GAAG,IAAI;AAEtD,YAAI;AAEF,gBAAM,aAAa,MAAM,mBAAmB,EAAE,KAAK,CAAC;AACpD,gBAAM,QAAQ,KAAK,MAAM,OAAO,UAAU,EACvC,MAAM,YAAY,EAClB,MAAM,KAAK,IAAI,QAAQ,IAAI,GAAG,CAAC;AAElC,gBAAM,UAAU,MAAM,MAAM,QAAQ;AAGpC,cAAI,WAAY,QAAkC,OAAO,CAAC,MAAgB;AAExE,gBAAI,CAAC,EAAE,WAAW,EAAE,QAAQ,KAAK,EAAE,WAAW,GAAG;AAC/C,qBAAO;AAAA,YACT;AACA,gBAAI,CAAC,EAAE,QAAQ,EAAE,KAAK,WAAW,GAAG;AAClC,qBAAO;AAAA,YACT;AAGA,gBAAI,aAAa,CAAC,EAAE,YAAY,EAAE,SAAS,YAAY,MAAM,SAAS,YAAY,IAAI;AACpF,qBAAO;AAAA,YACT;AAGA,kBAAM,UAAU,eAAe,aAAc,EAAE,iBAAiB,CAAC,IAClD,eAAe,UAAW,EAAE,cAAc,CAAC,IAC3C,eAAe,cAAe,EAAE,kBAAkB,CAAC,IACnD,CAAC,GAAI,EAAE,iBAAiB,CAAC,GAAI,GAAI,EAAE,cAAc,CAAC,GAAI,GAAI,EAAE,kBAAkB,CAAC,CAAE;AAGhG,gBAAI,QAAQ,WAAW,GAAG;AACxB,qBAAO;AAAA,YACT;AAGA,gBAAI,SAAS;AACX,oBAAM,QAAQ,IAAI,OAAO,SAAS,GAAG;AACrC,qBAAO,QAAQ,KAAK,CAAC,MAAc,MAAM,KAAK,CAAC,CAAC;AAAA,YAClD;AAEA,mBAAO;AAAA,UACT,CAAC;AAED,iBAAO,SAAS,MAAM,GAAG,KAAK,EAAE,IAAI,CAAC,MAAgB;AACnD,kBAAM,QAAQ;AACd,mBAAO;AAAA,cACL,SAAS,EAAE;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,EAAE;AAAA,gBACR,WAAW,EAAE;AAAA,gBACb,SAAS,EAAE;AAAA,gBACX,MAAM,EAAE;AAAA,gBACR,UAAU,EAAE;AAAA,gBACZ,SAAS;AAAA,kBACP,WAAW,EAAE,iBAAiB,CAAC;AAAA,kBAC/B,SAAS,EAAE,cAAc,CAAC;AAAA,kBAC1B,YAAY,EAAE,kBAAkB,CAAC;AAAA,gBACnC;AAAA,cACF;AAAA,cACA;AAAA,cACA,WAAW,mBAAmB,KAAK;AAAA,YACrC;AAAA,UACF,CAAC;AAAA,QACH,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,yBAAyB;AAAA,QAClD;AAAA,MACF;AAAA,MAEA,MAAM,QAAuB;AAC3B,YAAI,CAAC,KAAK,IAAI;AACZ,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAEA,YAAI;AAEF,cAAI,KAAK,OAAO;AACd,kBAAM,KAAK,GAAG,UAAU,KAAK,SAAS;AAAA,UACxC;AAEA,eAAK,QAAQ;AAAA,QACf,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,iCAAiC;AAAA,QAC1D;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,aAAa,UAAiC;AAClD,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAEA,YAAI;AAEF,gBAAM,KAAK,MAAM,OAAO,WAAW,QAAQ,GAAG;AAAA,QAChD,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,4CAA4C;AAAA,QACrE;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWA,MAAM,WACJ,UACA,SACA,WACA,UACe;AACf,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAEA,YAAI;AAEF,gBAAM,KAAK,aAAa,QAAQ;AAGhC,cAAI,QAAQ,SAAS,GAAG;AACtB,kBAAM,KAAK,YAAY,SAAS,WAAW,QAAQ;AAAA,UACrD;AAGA,gBAAM,iBAAiB,KAAK,MAAM;AAAA,QACpC,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,0CAA0C;AAAA,QACnE;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,eAAiC;AACrC,cAAM,MAAM,KAAK,IAAI;AAGrB,YAAI,MAAM,KAAK,mBAAmB,KAAM;AACtC,iBAAO;AAAA,QACT;AAEA,aAAK,mBAAmB;AAExB,YAAI;AACF,gBAAM,UAAU,MAAM,gBAAgB,KAAK,MAAM;AAEjD,cAAI,UAAU,KAAK,gBAAgB;AACjC,iBAAK,iBAAiB;AACtB,mBAAO;AAAA,UACT;AAEA,iBAAO;AAAA,QACT,SAAS,OAAO;AAEd,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,YAA2B;AAC/B,YAAI;AAEF,eAAK,QAAQ;AACb,eAAK,KAAK;AAGV,gBAAM,KAAK,WAAW;AAAA,QACxB,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,wCAAwC;AAAA,QACjE;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,oBAA4B;AAC1B,eAAO,KAAK;AAAA,MACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,iBAAyB;AACvB,YAAI,KAAK,mBAAmB,GAAG;AAC7B,iBAAO;AAAA,QACT;AACA,eAAO,IAAI,KAAK,KAAK,cAAc,EAAE,eAAe;AAAA,MACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,UAA4B;AAChC,YAAI,CAAC,KAAK,OAAO;AACf,iBAAO;AAAA,QACT;AAEA,YAAI;AACF,gBAAM,QAAQ,MAAM,KAAK,MAAM,UAAU;AAGzC,cAAI,UAAU,GAAG;AACf,mBAAO;AAAA,UACT;AAIA,gBAAM,SAAS,MAAM,KAAK,MACvB,OAAO,MAAM,mBAAmB,EAAE,KAAK,CAAC,CAAC,EACzC,MAAM,KAAK,IAAI,OAAO,CAAC,CAAC,EACxB,QAAQ;AAEX,gBAAM,cAAe,OAAiC;AAAA,YAAK,CAAC,MAC1D,EAAE,WACF,EAAE,QAAQ,KAAK,EAAE,SAAS;AAAA,UAC5B;AAEA,iBAAO;AAAA,QACT,QAAQ;AAEN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,MAEA,aAAa,KAAK,aAAwC;AACxD,cAAM,KAAK,IAAI,UAAS,WAAW;AACnC,cAAM,GAAG,WAAW;AACpB,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;;;ACngCA,SAAS,iBAAAE,sBAAqB;AAC9B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,aAAY;AAoCvB,SAAS,oBAA4B;AAC1C,SAAOC,aAAY;AACrB;AAxCA,IAeMC,aACAC,YACAC,UAEFH;AAnBJ,IAAAI,gBAAA;AAAA;AAAA;AAeA,IAAMH,cAAaJ,eAAc,YAAY,GAAG;AAChD,IAAMK,aAAYJ,SAAQG,WAAU;AACpC,IAAME,WAAUP,eAAc,YAAY,GAAG;AAI7C,QAAI;AAEF,MAAAI,eAAcG,SAAQJ,MAAKG,YAAW,iBAAiB,CAAC;AAAA,IAC1D,QAAQ;AACN,UAAI;AAEF,QAAAF,eAAcG,SAAQJ,MAAKG,YAAW,oBAAoB,CAAC;AAAA,MAC7D,QAAQ;AAEN,gBAAQ,KAAK,qEAAqE;AAClF,QAAAF,eAAc,EAAE,SAAS,gBAAgB;AAAA,MAC3C;AAAA,IACF;AAAA;AAAA;;;ACjCA;AAAA;AAAA;AAAA;AAAA,OAAOK,UAAQ;AACf,OAAOC,YAAU;AADjB,IAMM,eA8BO;AApCb;AAAA;AAAA;AAEA;AAEA,IAAAC;AAEA,IAAM,gBAAgB;AA8Bf,IAAM,kBAAN,MAAsB;AAAA,MACnB;AAAA,MACA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,aAAa,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,MAMrC,YAAY,WAAmB;AAC7B,aAAK,YAAY;AACjB,aAAK,eAAeD,OAAK,KAAK,WAAW,aAAa;AAAA,MACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWA,MAAM,OAAsC;AAC1C,YAAI;AACF,gBAAM,UAAU,MAAMD,KAAG,SAAS,KAAK,cAAc,OAAO;AAC5D,gBAAM,WAAW,KAAK,MAAM,OAAO;AAGnC,cAAI,SAAS,kBAAkB,sBAAsB;AACnD,oBAAQ;AAAA,cACN,wBAAwB,SAAS,aAAa,kCAAkC,oBAAoB;AAAA,YACtG;AACA,oBAAQ,MAAM,iDAAiD;AAG/D,kBAAM,KAAK,MAAM;AACjB,mBAAO;AAAA,UACT;AAEA,iBAAO;AAAA,QACT,SAAS,OAAO;AAEd,cAAK,MAAgC,SAAS,UAAU;AACtD,mBAAO;AAAA,UACT;AAGA,kBAAQ,MAAM,4CAA4C,KAAK,EAAE;AACjE,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,KAAK,UAAwC;AACjD,YAAI;AAEF,gBAAMA,KAAG,MAAM,KAAK,WAAW,EAAE,WAAW,KAAK,CAAC;AAGlD,gBAAM,iBAAgC;AAAA,YACpC,GAAG;AAAA,YACH,eAAe;AAAA,YACf,aAAa,kBAAkB;AAAA,YAC/B,aAAa,KAAK,IAAI;AAAA,UACxB;AAEA,gBAAM,UAAU,KAAK,UAAU,gBAAgB,MAAM,CAAC;AACtD,gBAAMA,KAAG,UAAU,KAAK,cAAc,SAAS,OAAO;AAAA,QACxD,SAAS,OAAO;AAEd,kBAAQ,MAAM,4CAA4C,KAAK,EAAE;AAAA,QACnE;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,MAAM,WAAW,UAAkB,OAAiC;AAElE,aAAK,aAAa,KAAK,WAAW,KAAK,YAAY;AACjD,gBAAM,WAAW,MAAM,KAAK,KAAK,KAAK,KAAK,YAAY;AACvD,mBAAS,MAAM,QAAQ,IAAI;AAC3B,gBAAM,KAAK,KAAK,QAAQ;AAAA,QAC1B,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAQ,MAAM,wCAAwC,QAAQ,KAAK,KAAK,EAAE;AAE1E,iBAAO;AAAA,QACT,CAAC;AAGD,cAAM,KAAK;AAAA,MACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWA,MAAM,WAAW,UAAiC;AAEhD,aAAK,aAAa,KAAK,WAAW,KAAK,YAAY;AACjD,gBAAM,WAAW,MAAM,KAAK,KAAK;AACjC,cAAI,CAAC,UAAU;AAEb;AAAA,UACF;AAEA,iBAAO,SAAS,MAAM,QAAQ;AAC9B,gBAAM,KAAK,KAAK,QAAQ;AAAA,QAC1B,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAQ,MAAM,8CAA8C,QAAQ,KAAK,KAAK,EAAE;AAEhF,iBAAO;AAAA,QACT,CAAC;AAGD,cAAM,KAAK;AAAA,MACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,YAAY,SAAqC;AAErD,aAAK,aAAa,KAAK,WAAW,KAAK,YAAY;AACjD,gBAAM,WAAW,MAAM,KAAK,KAAK,KAAK,KAAK,YAAY;AAEvD,qBAAW,SAAS,SAAS;AAC3B,qBAAS,MAAM,MAAM,QAAQ,IAAI;AAAA,UACnC;AAEA,gBAAM,KAAK,KAAK,QAAQ;AAAA,QAC1B,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAQ,MAAM,wCAAwC,QAAQ,MAAM,WAAW,KAAK,EAAE;AAEtF,iBAAO;AAAA,QACT,CAAC;AAGD,cAAM,KAAK;AAAA,MACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,eAAe,UAAmC;AAEtD,aAAK,aAAa,KAAK,WAAW,KAAK,YAAY;AACjD,gBAAM,WAAW,MAAM,KAAK,KAAK,KAAK,KAAK,YAAY;AAEvD,mBAAS,WAAW;AACpB,gBAAM,KAAK,KAAK,QAAQ;AAAA,QAC1B,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAQ,MAAM,kDAAkD,KAAK,EAAE;AAEvE,iBAAO;AAAA,QACT,CAAC;AAGD,cAAM,KAAK;AAAA,MACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,kBAAqC;AACzC,cAAM,WAAW,MAAM,KAAK,KAAK;AACjC,YAAI,CAAC,SAAU,QAAO,CAAC;AAEvB,eAAO,OAAO,KAAK,SAAS,KAAK;AAAA,MACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,gBAAgB,cAAsD;AAC1E,cAAM,WAAW,MAAM,KAAK,KAAK;AACjC,YAAI,CAAC,UAAU;AAEb,iBAAO,MAAM,KAAK,aAAa,KAAK,CAAC;AAAA,QACvC;AAEA,cAAM,eAAyB,CAAC;AAEhC,mBAAW,CAAC,UAAU,KAAK,KAAK,cAAc;AAC5C,gBAAM,QAAQ,SAAS,MAAM,QAAQ;AAErC,cAAI,CAAC,OAAO;AAEV,yBAAa,KAAK,QAAQ;AAAA,UAC5B,WAAW,MAAM,eAAe,OAAO;AAErC,yBAAa,KAAK,QAAQ;AAAA,UAC5B;AAAA,QACF;AAEA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,MAAM,gBAAgB,cAA8C;AAClE,cAAM,WAAW,MAAM,KAAK,KAAK;AACjC,YAAI,CAAC,SAAU,QAAO,CAAC;AAEvB,cAAM,eAAyB,CAAC;AAEhC,mBAAW,YAAY,OAAO,KAAK,SAAS,KAAK,GAAG;AAClD,cAAI,CAAC,aAAa,IAAI,QAAQ,GAAG;AAC/B,yBAAa,KAAK,QAAQ;AAAA,UAC5B;AAAA,QACF;AAEA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA;AAAA,MAKA,MAAM,QAAuB;AAC3B,YAAI;AACF,gBAAMA,KAAG,OAAO,KAAK,YAAY;AAAA,QACnC,SAAS,OAAO;AAEd,cAAK,MAAgC,SAAS,UAAU;AACtD,oBAAQ,MAAM,6CAA6C,KAAK,EAAE;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOQ,cAA6B;AACnC,eAAO;AAAA,UACL,eAAe;AAAA,UACf,aAAa,kBAAkB;AAAA,UAC/B,aAAa,KAAK,IAAI;AAAA,UACtB,OAAO,CAAC;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;AC1TA;AAAA;AAAA;AAAA;AAAA,OAAOG,UAAQ;AACf,OAAOC,YAAU;AADjB,IAoBa;AApBb;AAAA;AAAA;AAEA;AAkBO,IAAM,kBAAN,MAAsB;AAAA,MACnB;AAAA,MACA;AAAA,MACA,eAAgC;AAAA,MAExC,YAAY,SAAiB,WAAmB;AAC9C,aAAK,UAAU;AACf,aAAK,YAAYA,OAAK,KAAK,WAAW,iBAAiB;AAAA,MACzD;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAc,YAAsC;AAClD,YAAI;AACF,gBAAM,UAAU,MAAMD,KAAG,SAAS,KAAK,WAAW,OAAO;AACzD,iBAAO,KAAK,MAAM,OAAO;AAAA,QAC3B,QAAQ;AAEN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,MAAc,UAAU,OAAgC;AACtD,YAAI;AACF,gBAAM,UAAU,KAAK,UAAU,OAAO,MAAM,CAAC;AAC7C,gBAAMA,KAAG,UAAU,KAAK,WAAW,SAAS,OAAO;AAAA,QACrD,SAAS,OAAO;AAEd,kBAAQ,MAAM,6CAA6C,KAAK,EAAE;AAAA,QACpE;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAc,qBAAwC;AACpD,cAAM,SAAS,MAAM,iBAAiB,KAAK,OAAO;AAClD,cAAM,SAAS,MAAM,iBAAiB,KAAK,OAAO;AAElD,eAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA,WAAW,KAAK,IAAI;AAAA,QACtB;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,aAAuC;AAE3C,cAAM,SAAS,MAAM,UAAU,KAAK,OAAO;AAC3C,YAAI,CAAC,QAAQ;AACX,iBAAO;AAAA,QACT;AAEA,YAAI;AAEF,eAAK,eAAe,MAAM,KAAK,mBAAmB;AAGlD,gBAAM,gBAAgB,MAAM,KAAK,UAAU;AAE3C,cAAI,CAAC,eAAe;AAElB,kBAAM,KAAK,UAAU,KAAK,YAAY;AACtC,mBAAO;AAAA,UACT;AAGA,gBAAM,gBAAgB,cAAc,WAAW,KAAK,aAAa;AACjE,gBAAM,gBAAgB,cAAc,WAAW,KAAK,aAAa;AAEjE,cAAI,CAAC,iBAAiB,CAAC,eAAe;AAEpC,mBAAO;AAAA,UACT;AAGA,cAAI,eAAyB,CAAC;AAE9B,cAAI,eAAe;AAEjB,gBAAI;AACF,6BAAe,MAAM;AAAA,gBACnB,KAAK;AAAA,gBACL,cAAc;AAAA,gBACd,KAAK,aAAa;AAAA,cACpB;AAAA,YACF,SAAS,OAAO;AAEd,sBAAQ,MAAM,iDAAiD,KAAK,EAAE;AACtE,6BAAe,MAAM;AAAA,gBACnB,KAAK;AAAA,gBACL,cAAc;AAAA,gBACd,KAAK,aAAa;AAAA,cACpB;AAAA,YACF;AAAA,UACF,WAAW,eAAe;AAExB,2BAAe,MAAM;AAAA,cACnB,KAAK;AAAA,cACL,cAAc;AAAA,cACd,KAAK,aAAa;AAAA,YACpB;AAAA,UACF;AAGA,gBAAM,KAAK,UAAU,KAAK,YAAY;AAEtC,iBAAO;AAAA,QACT,SAAS,OAAO;AACd,kBAAQ,MAAM,4CAA4C,KAAK,EAAE;AACjE,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,gBAA0C;AAE9C,cAAM,SAAS,MAAM,UAAU,KAAK,OAAO;AAC3C,YAAI,CAAC,QAAQ;AACX,iBAAO;AAAA,QACT;AAEA,YAAI;AAEF,gBAAM,WAAW,MAAM,KAAK,mBAAmB;AAG/C,cAAI,CAAC,KAAK,cAAc;AACtB,iBAAK,eAAe;AACpB,kBAAM,KAAK,UAAU,QAAQ;AAC7B,mBAAO;AAAA,UACT;AAGA,gBAAM,gBAAgB,KAAK,aAAa,WAAW,SAAS;AAC5D,gBAAM,gBAAgB,KAAK,aAAa,WAAW,SAAS;AAE5D,cAAI,CAAC,iBAAiB,CAAC,eAAe;AAEpC,mBAAO;AAAA,UACT;AAGA,cAAI,eAAyB,CAAC;AAE9B,cAAI,eAAe;AAEjB,gBAAI;AACF,6BAAe,MAAM;AAAA,gBACnB,KAAK;AAAA,gBACL,KAAK,aAAa;AAAA,gBAClB,SAAS;AAAA,cACX;AAAA,YACF,SAAS,OAAO;AAEd,sBAAQ,MAAM,iDAAiD,KAAK,EAAE;AACtE,6BAAe,MAAM;AAAA,gBACnB,KAAK;AAAA,gBACL,KAAK,aAAa;AAAA,gBAClB,SAAS;AAAA,cACX;AAAA,YACF;AAAA,UACF,WAAW,eAAe;AAExB,2BAAe,MAAM;AAAA,cACnB,KAAK;AAAA,cACL,KAAK,aAAa;AAAA,cAClB,SAAS;AAAA,YACX;AAAA,UACF;AAGA,eAAK,eAAe;AACpB,gBAAM,KAAK,UAAU,QAAQ;AAE7B,iBAAO;AAAA,QACT,SAAS,OAAO;AACd,kBAAQ,MAAM,wCAAwC,KAAK,EAAE;AAC7D,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,WAA4B;AAC1B,eAAO,KAAK;AAAA,MACd;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAM,cAA6B;AACjC,YAAI;AACF,eAAK,eAAe,MAAM,KAAK,mBAAmB;AAClD,gBAAM,KAAK,UAAU,KAAK,YAAY;AAAA,QACxC,SAAS,OAAO;AACd,kBAAQ,MAAM,sCAAsC,KAAK,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;ACjPA,OAAOE,UAAQ;AA2Bf,eAAsB,cACpB,SACA,UACA,QACgC;AAChC,QAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,QAAM,gBAAgB,MAAM,SAAS,KAAK;AAG1C,MAAI,CAAC,eAAe;AAClB,UAAM,WAAW,MAAM,YAAY,SAAS,MAAM;AAClD,WAAO;AAAA,MACL,OAAO;AAAA,MACP,UAAU,CAAC;AAAA,MACX,SAAS,CAAC;AAAA,MACV,QAAQ;AAAA,IACV;AAAA,EACF;AAIA,QAAM,eAAe,MAAM,eAAe;AAC1C,QAAM,SAAS,MAAM,UAAU,OAAO;AAEtC,MAAI,gBAAgB,UAAU,cAAc,UAAU;AACpD,UAAM,aAAa,IAAI,gBAAgB,SAAS,SAAS,MAAM;AAC/D,UAAM,WAAW,WAAW;AAE5B,UAAM,eAAe,WAAW,SAAS;AAGzC,QAAI,iBACC,aAAa,WAAW,cAAc,SAAS,UAC/C,aAAa,WAAW,cAAc,SAAS,SAAS;AAE3D,UAAI;AAEF,cAAM,oBAAoB,MAAM;AAAA,UAC9B;AAAA,UACA,cAAc,SAAS;AAAA,UACvB,aAAa;AAAA,QACf;AACA,cAAM,kBAAkB,IAAI,IAAI,iBAAiB;AAGjD,cAAM,WAAW,MAAM,YAAY,SAAS,MAAM;AAClD,cAAM,iBAAiB,IAAI,IAAI,QAAQ;AAEvC,cAAM,QAAkB,CAAC;AACzB,cAAM,WAAqB,CAAC;AAC5B,cAAM,UAAoB,CAAC;AAG3B,mBAAW,YAAY,mBAAmB;AACxC,cAAI,eAAe,IAAI,QAAQ,GAAG;AAEhC,gBAAI,cAAc,MAAM,QAAQ,GAAG;AACjC,uBAAS,KAAK,QAAQ;AAAA,YACxB,OAAO;AACL,oBAAM,KAAK,QAAQ;AAAA,YACrB;AAAA,UACF;AAAA,QAEF;AAGA,mBAAW,YAAY,UAAU;AAC/B,cAAI,CAAC,cAAc,MAAM,QAAQ,KAAK,CAAC,gBAAgB,IAAI,QAAQ,GAAG;AACpE,kBAAM,KAAK,QAAQ;AAAA,UACrB;AAAA,QACF;AAGA,mBAAW,YAAY,OAAO,KAAK,cAAc,KAAK,GAAG;AACvD,cAAI,CAAC,eAAe,IAAI,QAAQ,GAAG;AACjC,oBAAQ,KAAK,QAAQ;AAAA,UACvB;AAAA,QACF;AAEA,eAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,QACV;AAAA,MACF,SAAS,OAAO;AAEd,gBAAQ,KAAK,yDAAyD,KAAK,EAAE;AAC7E,cAAM,WAAW,MAAM,YAAY,SAAS,MAAM;AAClD,cAAM,iBAAiB,IAAI,IAAI,QAAQ;AAEvC,cAAM,UAAoB,CAAC;AAC3B,mBAAW,YAAY,OAAO,KAAK,cAAc,KAAK,GAAG;AACvD,cAAI,CAAC,eAAe,IAAI,QAAQ,GAAG;AACjC,oBAAQ,KAAK,QAAQ;AAAA,UACvB;AAAA,QACF;AAEA,eAAO;AAAA,UACL,OAAO;AAAA,UACP,UAAU,CAAC;AAAA,UACX;AAAA,UACA,QAAQ;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,SAAO,MAAM,oBAAoB,SAAS,eAAe,MAAM;AACjE;AAKA,eAAe,YACb,SACA,QACmB;AACnB,MAAI,eAAe,MAAM,KAAK,OAAO,WAAW,SAAS,GAAG;AAC1D,WAAO,MAAM,2BAA2B,SAAS,MAAM;AAAA,EACzD,WAAW,eAAe,MAAM,GAAG;AACjC,WAAO,MAAM,aAAa;AAAA,MACxB;AAAA,MACA,iBAAiB,OAAO,SAAS;AAAA,MACjC,iBAAiB,OAAO,SAAS;AAAA,IACnC,CAAC;AAAA,EACH,OAAO;AACL,WAAO,MAAM,aAAa;AAAA,MACxB;AAAA,MACA,iBAAiB,CAAC;AAAA,MAClB,iBAAiB,CAAC;AAAA,IACpB,CAAC;AAAA,EACH;AACF;AAKA,eAAe,oBACb,SACA,eACA,QACgC;AAChC,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAqB,CAAC;AAC5B,QAAM,UAAoB,CAAC;AAG3B,QAAM,eAAe,MAAM,YAAY,SAAS,MAAM;AACtD,QAAM,iBAAiB,IAAI,IAAI,YAAY;AAG3C,QAAM,YAAY,oBAAI,IAAoB;AAE1C,aAAW,YAAY,cAAc;AACnC,QAAI;AACF,YAAM,QAAQ,MAAMA,KAAG,KAAK,QAAQ;AACpC,gBAAU,IAAI,UAAU,MAAM,OAAO;AAAA,IACvC,QAAQ;AAEN;AAAA,IACF;AAAA,EACF;AAGA,aAAW,CAAC,UAAU,KAAK,KAAK,WAAW;AACzC,UAAM,QAAQ,cAAc,MAAM,QAAQ;AAE1C,QAAI,CAAC,OAAO;AAEV,YAAM,KAAK,QAAQ;AAAA,IACrB,WAAW,MAAM,eAAe,OAAO;AAErC,eAAS,KAAK,QAAQ;AAAA,IACxB;AAAA,EACF;AAGA,aAAW,YAAY,OAAO,KAAK,cAAc,KAAK,GAAG;AACvD,QAAI,CAAC,eAAe,IAAI,QAAQ,GAAG;AACjC,cAAQ,KAAK,QAAQ;AAAA,IACvB;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,EACV;AACF;AA1NA;AAAA;AAAA;AAEA;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACNA,OAAOC,UAAQ;AAoCf,eAAe,mBACb,UACA,SACA,YACA,QACA,SACmC;AAEnC,QAAM,YAAY,eAAe,MAAM,IACnC,OAAO,KAAK,YACX,eAAe,MAAM,IAAI,OAAO,SAAS,YAAY;AAC1D,QAAM,eAAe,eAAe,MAAM,IACtC,OAAO,KAAK,eACX,eAAe,MAAM,IAAI,OAAO,SAAS,eAAe;AAG7D,QAAM,SAAS,UAAU,UAAU,SAAS;AAAA,IAC1C;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,OAAO,WAAW,GAAG;AAEvB,QAAI,SAAS;AACX,cAAQ,MAAM,sBAAsB,QAAQ,EAAE;AAAA,IAChD;AACA,WAAO;AAAA,EACT;AAIA,QAAM,QAAQ,OAAO,IAAI,OAAK,EAAE,OAAO;AACvC,QAAM,UAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,4BAA4B;AACjE,UAAM,aAAa,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,4BAA4B,MAAM,MAAM,CAAC;AACxF,UAAM,eAAe,MAAM,WAAW,WAAW,UAAU;AAC3D,YAAQ,KAAK,GAAG,YAAY;AAG5B,QAAI,MAAM,SAAS,4BAA4B;AAC7C,YAAM,IAAI,QAAQ,aAAW,aAAa,OAAO,CAAC;AAAA,IACpD;AAAA,EACF;AAEA,SAAO;AAAA,IACL,YAAY,OAAO;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAaA,eAAsB,gBACpB,UACA,UACA,YACA,QACA,UAAmC,CAAC,GACrB;AACf,QAAM,EAAE,QAAQ,IAAI;AAEpB,MAAI;AAEF,QAAI;AACF,YAAMA,KAAG,OAAO,QAAQ;AAAA,IAC1B,QAAQ;AAEN,UAAI,SAAS;AACX,gBAAQ,MAAM,wBAAwB,QAAQ,EAAE;AAAA,MAClD;AACA,YAAM,SAAS,aAAa,QAAQ;AAEpC,YAAMC,YAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,YAAMA,UAAS,WAAW,QAAQ;AAClC;AAAA,IACF;AAGA,UAAM,UAAU,MAAMD,KAAG,SAAS,UAAU,OAAO;AAGnD,UAAM,SAAS,MAAM,mBAAmB,UAAU,SAAS,YAAY,QAAQ,WAAW,KAAK;AAG/F,UAAM,QAAQ,MAAMA,KAAG,KAAK,QAAQ;AACpC,UAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AAEpD,QAAI,WAAW,MAAM;AAEnB,YAAM,SAAS,aAAa,QAAQ;AACpC,YAAM,SAAS,WAAW,UAAU;AAAA,QAClC;AAAA,QACA,cAAc,MAAM;AAAA,QACpB,YAAY;AAAA,MACd,CAAC;AACD;AAAA,IACF;AAGA,UAAM,SAAS;AAAA,MACb;AAAA,MACA,OAAO;AAAA,MACP,OAAO,OAAO,IAAI,OAAK,EAAE,QAAQ;AAAA,MACjC,OAAO;AAAA,IACT;AAGA,UAAM,SAAS,WAAW,UAAU;AAAA,MAClC;AAAA,MACA,cAAc,MAAM;AAAA,MACpB,YAAY,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,SAAS;AACX,cAAQ,MAAM,yBAAoB,QAAQ,KAAK,OAAO,UAAU,UAAU;AAAA,IAC5E;AAAA,EACF,SAAS,OAAO;AAEd,YAAQ,MAAM,wCAA8B,QAAQ,KAAK,KAAK,EAAE;AAAA,EAClE;AACF;AAgBA,eAAsB,mBACpB,WACA,UACA,YACA,QACA,UAAmC,CAAC,GACnB;AACjB,QAAM,EAAE,QAAQ,IAAI;AACpB,MAAI,iBAAiB;AAGrB,QAAM,kBAAkF,CAAC;AAGzF,aAAW,YAAY,WAAW;AAEhC,QAAI;AACJ,QAAI;AACJ,QAAI;AACF,YAAM,QAAQ,MAAMA,KAAG,KAAK,QAAQ;AACpC,kBAAY,MAAM;AAClB,gBAAU,MAAMA,KAAG,SAAS,UAAU,OAAO;AAAA,IAC/C,SAAS,OAAO;AAEd,UAAI,SAAS;AACX,gBAAQ,MAAM,6BAA6B,QAAQ,EAAE;AAAA,MACvD;AACA,UAAI;AACF,cAAM,SAAS,aAAa,QAAQ;AACpC,cAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,cAAM,SAAS,WAAW,QAAQ;AAAA,MACpC,SAASE,QAAO;AAEd,YAAI,SAAS;AACX,kBAAQ,MAAM,gBAAgB,QAAQ,eAAe;AAAA,QACvD;AAAA,MACF;AAEA;AACA;AAAA,IACF;AAEA,QAAI;AAEF,YAAM,SAAS,MAAM,mBAAmB,UAAU,SAAS,YAAY,QAAQ,WAAW,KAAK;AAE/F,UAAI,WAAW,MAAM;AAEnB,YAAI;AACF,gBAAM,SAAS,aAAa,QAAQ;AAAA,QACtC,SAAS,OAAO;AAAA,QAEhB;AAGA,cAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,cAAM,SAAS,WAAW,UAAU;AAAA,UAClC;AAAA,UACA,cAAc;AAAA,UACd,YAAY;AAAA,QACd,CAAC;AAGD;AACA;AAAA,MACF;AAGA,UAAI;AACF,cAAM,SAAS,aAAa,QAAQ;AAAA,MACtC,SAAS,OAAO;AAAA,MAEhB;AAGA,YAAM,SAAS;AAAA,QACb,OAAO;AAAA,QACP,OAAO,OAAO,IAAI,OAAK,EAAE,QAAQ;AAAA,QACjC,OAAO;AAAA,MACT;AAGA,sBAAgB,KAAK;AAAA,QACnB;AAAA,QACA,YAAY,OAAO;AAAA,QACnB,OAAO;AAAA,MACT,CAAC;AAED,UAAI,SAAS;AACX,gBAAQ,MAAM,yBAAoB,QAAQ,KAAK,OAAO,UAAU,UAAU;AAAA,MAC5E;AAEA;AAAA,IACF,SAAS,OAAO;AAEd,cAAQ,MAAM,wCAA8B,QAAQ,KAAK,KAAK,EAAE;AAAA,IAChE;AAAA,EACF;AAGF,MAAI,gBAAgB,SAAS,GAAG;AAC9B,UAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,UAAM,SAAS;AAAA,MACb,gBAAgB,IAAI,YAAU;AAAA,QAC5B,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA;AAAA,QACpB,YAAY,MAAM;AAAA,MACpB,EAAE;AAAA,IACJ;AAAA,EACF;AAEA,SAAO;AACT;AAxSA;AAAA;AAAA;AACA;AAGA;AACA;AACA;AAAA;AAAA;;;ACkEO,SAAS,qBAA6B;AAC3C,QAAM,UAAU,kBAAkB,uBAAuB,kBAAkB,MAAM;AACjF;AACA,SAAO;AACT;AAMO,SAAS,sBAA8B;AAC5C,QAAM,UAAU,mBAAmB,wBAAwB,mBAAmB,MAAM;AACpF;AACA,SAAO;AACT;AAMO,SAAS,yBAAiC;AAC/C,QAAM,UAAU,uBAAuB,oBAAoB,uBAAuB,MAAM;AACxF;AACA,SAAO;AACT;AAhGA,IAKM,mBA4BA,oBAkBA,wBAaF,sBACA,uBACA;AAlEJ;AAAA;AAAA;AAKA,IAAM,oBAAoB;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,IAAM,qBAAqB;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,IAAM,yBAAyB;AAAA,MAC7B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,IAAI,uBAAuB;AAC3B,IAAI,wBAAwB;AAC5B,IAAI,oBAAoB;AAAA;AAAA;;;AClExB;AAAA;AAAA;AAAA;AAAA,OAAOC,UAAQ;AACf,OAAO,SAAS;AAChB,OAAOC,YAAW;AAClB,OAAO,YAAY;AA0BnB,eAAsB,cAAc,UAA2B,CAAC,GAAkB;AAChF,QAAM,UAAU,QAAQ,WAAW,QAAQ,IAAI;AAC/C,QAAM,UAAU,IAAI,8BAA8B,EAAE,MAAM;AAC1D,MAAI;AAEJ,MAAI;AAEF,YAAQ,OAAO;AACf,UAAM,SAAS,MAAM,cAAc,KAAK,OAAO;AAG/C,YAAQ,OAAO;AACf,UAAM,WAAW,IAAI,SAAS,OAAO;AACrC,UAAM,SAAS,WAAW;AAG1B,QAAI,CAAC,QAAQ,OAAO;AAClB,cAAQ,OAAO;AACf,YAAMC,YAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,YAAM,gBAAgB,MAAMA,UAAS,KAAK;AAE1C,UAAI,eAAe;AAEjB,cAAM,UAAU,MAAM,cAAc,SAAS,UAAU,MAAM;AAE7D,YAAI,QAAQ,WAAW,QAAQ;AAC7B,gBAAM,eAAe,QAAQ,MAAM,SAAS,QAAQ,SAAS;AAC7D,gBAAM,eAAe,QAAQ,QAAQ;AAErC,cAAI,iBAAiB,KAAK,iBAAiB,GAAG;AAC5C,oBAAQ,QAAQ,4CAA4C;AAC5D;AAAA,UACF;AAEA,kBAAQ;AAAA,YACN,qBAAqB,YAAY,oBAAoB,YAAY,eAAe,QAAQ,MAAM;AAAA,UAChG;AAGA,kBAAQ,MAAM,uBAAuB,CAAC;AACtC,gBAAMC,cAAa,IAAI,gBAAgB;AACvC,gBAAMA,YAAW,WAAW;AAC5B,kBAAQ,QAAQ,wBAAwB;AAGxC,cAAI,eAAe,GAAG;AACpB,oBAAQ,MAAM,YAAY,YAAY,mBAAmB;AACzD,gBAAI,eAAe;AACnB,uBAAW,YAAY,QAAQ,SAAS;AACtC,kBAAI;AACF,sBAAM,SAAS,aAAa,QAAQ;AACpC,sBAAMD,UAAS,WAAW,QAAQ;AAClC;AAAA,cACF,SAAS,KAAK;AACZ,wBAAQ,KAAK,0BAA0B,QAAQ,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,cACzG;AAAA,YACF;AACA,oBAAQ,QAAQ,WAAW,YAAY,IAAI,YAAY,gBAAgB;AAAA,UACzE;AAGA,cAAI,eAAe,GAAG;AACpB,oBAAQ,MAAM,cAAc,YAAY,mBAAmB;AAC3D,kBAAM,eAAe,CAAC,GAAG,QAAQ,OAAO,GAAG,QAAQ,QAAQ;AAC3D,kBAAM,QAAQ,MAAM;AAAA,cAClB;AAAA,cACA;AAAA,cACAC;AAAA,cACA;AAAA,cACA,EAAE,SAAS,QAAQ,QAAQ;AAAA,YAC7B;AAGA,kBAAM,iBAAiB,SAAS,MAAM;AAEtC,oBAAQ;AAAA,cACN,iCAAiC,KAAK,IAAI,YAAY;AAAA,YACxD;AAAA,UACF;AAGA,gBAAM,EAAE,gBAAAC,iBAAgB,WAAAC,WAAU,IAAI,MAAM;AAC5C,gBAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAClC,gBAAMC,gBAAe,MAAMH,gBAAe;AAC1C,gBAAMI,UAAS,MAAMH,WAAU,OAAO;AAEtC,cAAIE,iBAAgBC,SAAQ;AAC1B,kBAAM,aAAa,IAAIF,iBAAgB,SAAS,SAAS,MAAM;AAC/D,kBAAM,WAAW,WAAW;AAC5B,kBAAM,WAAW,WAAW,SAAS;AACrC,gBAAI,UAAU;AAEZ,oBAAMJ,UAAS,eAAe,QAAQ;AAAA,YACxC;AAAA,UACF;AAEA,kBAAQ,IAAID,OAAM,IAAI,kBAAkB,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,IAAI,yBAAyB,CAAC;AACzG;AAAA,QACF;AAGA,gBAAQ,OAAO;AAAA,MACjB;AAAA,IACF,OAAO;AACL,cAAQ,OAAO;AAAA,IACjB;AAGA,YAAQ,OAAO;AACf,QAAI;AAEJ,QAAI,eAAe,MAAM,KAAK,OAAO,WAAW,SAAS,GAAG;AAE1D,cAAQ,MAAM,2BAA2B,SAAS,MAAM;AAAA,IAC1D,WAAW,eAAe,MAAM,GAAG;AAEjC,cAAQ,MAAM,aAAa;AAAA,QACzB;AAAA,QACA,iBAAiB,OAAO,SAAS;AAAA,QACjC,iBAAiB,OAAO,SAAS;AAAA,MACnC,CAAC;AAAA,IACH,OAAO;AAEL,cAAQ,MAAM,aAAa;AAAA,QACzB;AAAA,QACA,iBAAiB,CAAC;AAAA,QAClB,iBAAiB,CAAC;AAAA,MACpB,CAAC;AAAA,IACH;AAEA,QAAI,MAAM,WAAW,GAAG;AACtB,cAAQ,KAAK,yBAAyB;AACtC;AAAA,IACF;AAEA,YAAQ,OAAO,SAAS,MAAM,MAAM;AAGpC,YAAQ,OAAO,uBAAuB;AACtC,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,WAAW,WAAW;AAC5B,YAAQ,QAAQ,wBAAwB;AAGxC,UAAM,cAAc,eAAe,MAAM,IACrC,OAAO,KAAK,cACZ;AACJ,UAAM,qBAAqB,eAAe,MAAM,IAC5C,OAAO,KAAK,qBACZ;AAEJ,UAAM,oBAAoB;AAE1B,YAAQ,MAAM,yBAAyB,WAAW,kBAAkB;AAEpE,UAAM,YAAY,KAAK,IAAI;AAC3B,QAAI,iBAAiB;AACrB,QAAI,kBAAkB;AAGtB,UAAM,mBAAuC,CAAC;AAC9C,UAAM,QAAQ,OAAO,WAAW;AAGhC,UAAM,qBAAqF,CAAC;AAG5F,UAAM,gBAAgB;AAAA,MACpB,gBAAgB;AAAA,MAChB,YAAY,MAAM;AAAA,MAClB,cAAc,mBAAmB;AAAA,IACnC;AAGA,UAAM,6BAA6B;AACnC,UAAM,+BAA+B;AACrC,UAAM,yBAAyB,KAAK,MAAM,+BAA+B,0BAA0B;AAEnG,QAAI,cAAc;AAClB,qBAAiB,YAAY,MAAM;AAEjC;AACA,UAAI,eAAe,wBAAwB;AACzC,sBAAc,eAAe,mBAAmB;AAChD,sBAAc;AAAA,MAChB;AAEA,cAAQ,OAAO,GAAG,cAAc,cAAc,IAAI,cAAc,UAAU,YAAY,cAAc,YAAY;AAAA,IAClH,GAAG,0BAA0B;AAG7B,UAAM,2BAA2B,YAAY;AAC3C,UAAI,iBAAiB,WAAW,EAAG;AAEnC,YAAM,YAAY,iBAAiB,OAAO,GAAG,iBAAiB,MAAM;AAGpE,eAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK,oBAAoB;AAC7D,cAAM,QAAQ,UAAU,MAAM,GAAG,KAAK,IAAI,IAAI,oBAAoB,UAAU,MAAM,CAAC;AAGnF,sBAAc,eAAe,oBAAoB;AAIjD,cAAM,QAAQ,MAAM,IAAI,UAAQ,KAAK,OAAO;AAC5C,cAAM,mBAAmC,CAAC;AAE1C,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,4BAA4B;AACjE,gBAAM,aAAa,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,4BAA4B,MAAM,MAAM,CAAC;AACxF,gBAAM,eAAe,MAAM,WAAW,WAAW,UAAU;AAC3D,2BAAiB,KAAK,GAAG,YAAY;AAGrC,gBAAM,IAAI,QAAQ,aAAW,aAAa,OAAO,CAAC;AAAA,QACpD;AAEA,2BAAmB,MAAM;AAGzB,sBAAc,eAAe,aAAa,MAAM,MAAM;AAEtD,cAAM,SAAS;AAAA,UACb;AAAA,UACA,MAAM,IAAI,UAAQ,KAAK,MAAM,QAAQ;AAAA,UACrC;AAAA,QACF;AAGA,cAAM,IAAI,QAAQ,aAAW,aAAa,OAAO,CAAC;AAAA,MACpD;AAEA,oBAAc,eAAe,mBAAmB;AAAA,IAClD;AAGA,UAAM,eAAe,MAAM;AAAA,MAAI,CAAC,SAC9B,MAAM,YAAY;AAChB,YAAI;AAEF,gBAAM,QAAQ,MAAMD,KAAG,KAAK,IAAI;AAChC,gBAAM,UAAU,MAAMA,KAAG,SAAS,MAAM,OAAO;AAC/C,gBAAM,YAAY,eAAe,MAAM,IACnC,OAAO,KAAK,YACZ;AACJ,gBAAM,eAAe,eAAe,MAAM,IACtC,OAAO,KAAK,eACZ;AAEJ,gBAAM,SAAS,UAAU,MAAM,SAAS;AAAA,YACtC;AAAA,YACA;AAAA,UACF,CAAC;AAED,cAAI,OAAO,WAAW,GAAG;AACvB;AACA,0BAAc,iBAAiB;AAC/B;AAAA,UACF;AAGA,qBAAW,SAAS,QAAQ;AAC1B,6BAAiB,KAAK;AAAA,cACpB;AAAA,cACA,SAAS,MAAM;AAAA,YACjB,CAAC;AAAA,UACH;AAGA,6BAAmB,KAAK;AAAA,YACtB,UAAU;AAAA,YACV,YAAY,OAAO;AAAA,YACnB,OAAO,MAAM;AAAA,UACf,CAAC;AAED;AACA,wBAAc,iBAAiB;AAG/B,cAAI,iBAAiB,UAAU,mBAAmB;AAChD,kBAAM,yBAAyB;AAAA,UACjC;AAAA,QACF,SAAS,OAAO;AACd,cAAI,QAAQ,SAAS;AACnB,oBAAQ,MAAMC,OAAM,OAAO;AAAA,yBAAkB,IAAI,KAAK,KAAK,EAAE,CAAC;AAAA,UAChE;AACA;AACA,wBAAc,iBAAiB;AAAA,QACjC;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,QAAQ,IAAI,YAAY;AAG9B,kBAAc,eAAe;AAC7B,UAAM,yBAAyB;AAG/B,kBAAc,cAAc;AAG5B,YAAQ,MAAM,0BAA0B;AACxC,UAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,UAAM,SAAS;AAAA,MACb,mBAAmB,IAAI,YAAU;AAAA,QAC/B,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA;AAAA,QACpB,YAAY,MAAM;AAAA,MACpB,EAAE;AAAA,IACJ;AAGA,UAAM,EAAE,gBAAAG,iBAAgB,WAAAC,WAAU,IAAI,MAAM;AAC5C,UAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAClC,UAAM,eAAe,MAAMF,gBAAe;AAC1C,UAAM,SAAS,MAAMC,WAAU,OAAO;AAEtC,QAAI,gBAAgB,QAAQ;AAC1B,YAAM,aAAa,IAAIC,iBAAgB,SAAS,SAAS,MAAM;AAC/D,YAAM,WAAW,WAAW;AAC5B,YAAM,WAAW,WAAW,SAAS;AACrC,UAAI,UAAU;AACZ,cAAM,SAAS,eAAe,QAAQ;AAAA,MACxC;AAAA,IACF;AAEA,YAAQ,QAAQ,gBAAgB;AAIhC,UAAM,iBAAiB,SAAS,MAAM;AAEtC,UAAM,cAAc,KAAK,IAAI,IAAI,aAAa,KAAM,QAAQ,CAAC;AAC7D,YAAQ;AAAA,MACN,WAAW,cAAc,WAAW,eAAe,eAAe,SAAS,WAAW,WAAW;AAAA,IACnG;AAEA,YAAQ,IAAIL,OAAM,IAAI,kBAAkB,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,IAAI,yBAAyB,CAAC;AAAA,EAC3G,SAAS,OAAO;AAEd,QAAI,gBAAgB;AAClB,oBAAc,cAAc;AAAA,IAC9B;AACA,YAAQ,KAAK,oBAAoB,KAAK,EAAE;AACxC,UAAM;AAAA,EACR;AACF;AAzXA;AAAA;AAAA;AAIA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAAA;;;AChBA,SAAS,eAAe;AACxB,SAAS,iBAAAQ,sBAAqB;AAC9B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,aAAY;;;ACE9B;AACA;AANA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,SAAS,iBAAAC,sBAAqB;AAC9B,OAAOC,YAAW;AAClB,OAAO,cAAc;;;ACJrB,OAAO,YAAY;AACnB,OAAO,WAAW;AAClB,SAAS,qBAAqB;AAC9B,SAAS,qBAAqB;AAC9B,SAAS,SAAS,YAAY;AAK9B,IAAM,aAAa,cAAc,YAAY,GAAG;AAChD,IAAM,YAAY,QAAQ,UAAU;AACpC,IAAMC,WAAU,cAAc,YAAY,GAAG;AAG7C,IAAI;AACJ,IAAI;AACF,gBAAcA,SAAQ,KAAK,WAAW,iBAAiB,CAAC;AAC1D,QAAQ;AACN,gBAAcA,SAAQ,KAAK,WAAW,oBAAoB,CAAC;AAC7D;AAGA,IAAM,eAAe,YAAY;AACjC,IAAM,UAAU,YAAY;AAK5B,SAAS,UAAU,MAAc,QAAgB,UAAU,GAAW;AACpE,QAAM,QAAQ,KAAK,MAAM,IAAI,EAAE,OAAO,UAAQ,KAAK,KAAK,EAAE,SAAS,CAAC;AAGpE,QAAM,YAAY,KAAK,IAAI,GAAG,MAAM,IAAI,UAAQ,KAAK,MAAM,CAAC;AAE5D,QAAM,mBAAmB,SAAI,OAAO,YAAY,UAAU,CAAC;AAC3D,QAAM,MAAM,SAAI,gBAAgB;AAChC,QAAM,SAAS,SAAI,gBAAgB;AACnC,QAAM,YAAY,SAAI,gBAAgB;AAEtC,QAAM,cAAc,MAAM,IAAI,UAAQ;AACpC,UAAM,WAAW,IAAI,OAAO,YAAY,KAAK,SAAS,OAAO;AAC7D,UAAM,UAAU,IAAI,OAAO,OAAO;AAClC,WAAO,SAAI,OAAO,GAAG,IAAI,GAAG,QAAQ;AAAA,EACtC,CAAC;AAGD,QAAM,WAAW,YAAY,OAAO;AACpC,QAAM,UAAU,KAAK,MAAM,WAAW,CAAC;AACvC,QAAM,WAAW,WAAW;AAC5B,QAAM,iBAAiB,IAAI,OAAO,OAAO,IAAI,SAAS,IAAI,OAAO,QAAQ;AAEzE,QAAM,eAAe,SAAI,IAAI,OAAO,OAAO,CAAC,GAAG,cAAc,GAAG,IAAI,OAAO,OAAO,CAAC;AAEnF,SAAO,CAAC,KAAK,GAAG,aAAa,WAAW,cAAc,MAAM,EAAE,KAAK,IAAI;AACzE;AAKO,SAAS,aAAmB;AACjC,QAAM,SAAS,OAAO,SAAS,QAAQ;AAAA,IACrC,MAAM;AAAA,IACN,kBAAkB;AAAA,IAClB,gBAAgB;AAAA,EAClB,CAAC;AAED,QAAM,SAAS,GAAG,YAAY,OAAO,OAAO;AAC5C,QAAM,cAAc,UAAU,OAAO,KAAK,GAAG,MAAM;AACnD,UAAQ,MAAM,MAAM,KAAK,WAAW,CAAC;AACrC,UAAQ,MAAM;AAChB;AAKO,SAAS,oBAA0B;AACxC,QAAM,SAAS,OAAO,SAAS,QAAQ;AAAA,IACrC,MAAM;AAAA,IACN,kBAAkB;AAAA,IAClB,gBAAgB;AAAA,EAClB,CAAC;AAED,QAAM,SAAS,GAAG,YAAY,OAAO,OAAO;AAC5C,QAAM,cAAc,UAAU,OAAO,KAAK,GAAG,MAAM;AACnD,UAAQ,IAAI,MAAM,KAAK,WAAW,CAAC;AACnC,UAAQ,IAAI;AACd;;;AD9EA;;;AERA,OAAOC,SAAQ;AACf,OAAOC,WAAU;;;ACuDV,IAAM,0BAA4C;AAAA,EACvD,UAAU;AAAA,EACV,UAAU;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACxEA,OAAO,QAAQ;AACf,OAAO,UAAU;;;ACIjB,eAAsB,qBACpB,UACA,eAC0B;AAC1B,SAAO;AAAA,IACL,SAAS;AAAA;AAAA;AAAA,MAGP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;AD3CO,IAAM,iBAAoC;AAAA,EAC/C,MAAM;AAAA,EACN,UAAU;AAAA;AAAA,EAEV,MAAM,OAAO,SAAiB,cAAgD;AAC5E,UAAM,WAAW,KAAK,KAAK,SAAS,YAAY;AAChD,UAAM,SAA0B;AAAA,MAC9B,UAAU;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,kBAAkB,KAAK,KAAK,UAAU,cAAc;AAC1D,QAAIC,eAAmB;AAEvB,QAAI;AACF,YAAM,UAAU,MAAM,GAAG,SAAS,iBAAiB,OAAO;AAC1D,MAAAA,eAAc,KAAK,MAAM,OAAO;AAChC,aAAO,SAAS,KAAK,oBAAoB;AAAA,IAC3C,QAAQ;AAEN,aAAO;AAAA,IACT;AAGA,WAAO,WAAW;AAClB,WAAO,aAAa;AAGpB,QAAIA,aAAY,iBAAiB,cAAcA,aAAY,cAAc,YAAY;AACnF,aAAO,SAAS,KAAK,qBAAqB;AAAA,IAC5C;AAGA,UAAM,iBAAiB;AAAA,MACrB,EAAE,MAAM,QAAQ,SAAS,OAAO;AAAA,MAChC,EAAE,MAAM,UAAU,SAAS,SAAS;AAAA,MACpC,EAAE,MAAM,SAAS,SAAS,QAAQ;AAAA,MAClC,EAAE,MAAM,OAAO,SAAS,MAAM;AAAA,MAC9B,EAAE,MAAM,oBAAoB,SAAS,aAAa;AAAA,IACpD;AAEA,eAAW,aAAa,gBAAgB;AACtC,UACEA,aAAY,kBAAkB,UAAU,IAAI,KAC5CA,aAAY,eAAe,UAAU,IAAI,GACzC;AACA,eAAO,SAAS,KAAK,GAAG,UAAU,OAAO,0BAA0B;AACnE;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa;AAAA,MACjB,EAAE,MAAM,QAAQ,SAAS,UAAU;AAAA,MACnC,EAAE,MAAM,SAAS,SAAS,QAAQ;AAAA,MAClC,EAAE,MAAM,OAAO,SAAS,MAAM;AAAA,MAC9B,EAAE,MAAM,WAAW,SAAS,UAAU;AAAA,MACtC,EAAE,MAAM,gBAAgB,SAAS,SAAS;AAAA,IAC5C;AAEA,eAAW,MAAM,YAAY;AAC3B,UAAIA,aAAY,eAAe,GAAG,IAAI,GAAG;AACvC,eAAO,SAAS,KAAK,GAAG,GAAG,OAAO,WAAW;AAC7C;AAAA,MACF;AAAA,IACF;AAGA,QAAIA,aAAY,SAAS,MAAM;AAC7B,aAAO,UAAUA,aAAY,QAAQ;AAAA,IACvC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,eAAe,SAAiB,cAAsB;AAC1D,WAAO,qBAAqB,SAAS,YAAY;AAAA,EACnD;AACF;;;AE1FA,OAAOC,SAAQ;AACf,OAAOC,WAAU;;;ACIjB,eAAsB,sBACpB,UACA,eAC0B;AAC1B,SAAO;AAAA,IACL,SAAS;AAAA;AAAA,MAEP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAEA;AAAA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;ADjDO,IAAM,kBAAqC;AAAA,EAChD,MAAM;AAAA,EACN,UAAU;AAAA;AAAA,EAEV,MAAM,OAAO,SAAiB,cAAgD;AAC5E,UAAM,WAAWC,MAAK,KAAK,SAAS,YAAY;AAChD,UAAM,SAA0B;AAAA,MAC9B,UAAU;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,mBAAmBA,MAAK,KAAK,UAAU,eAAe;AAC5D,QAAI,eAAoB;AAExB,QAAI;AACF,YAAM,UAAU,MAAMC,IAAG,SAAS,kBAAkB,OAAO;AAC3D,qBAAe,KAAK,MAAM,OAAO;AACjC,aAAO,SAAS,KAAK,qBAAqB;AAAA,IAC5C,QAAQ;AAEN,aAAO;AAAA,IACT;AAGA,UAAM,aACJ,aAAa,UAAU,mBAAmB,KAC1C,aAAa,aAAa,IAAI,mBAAmB;AAEnD,QAAI,CAAC,YAAY;AAEf,aAAO;AAAA,IACT;AAEA,WAAO,SAAS,KAAK,6CAA6C;AAGlE,UAAM,cAAcD,MAAK,KAAK,UAAU,SAAS;AACjD,QAAI;AACF,YAAMC,IAAG,OAAO,WAAW;AAC3B,aAAO,SAAS,KAAK,oBAAoB;AACzC,aAAO,aAAa;AAAA,IACtB,QAAQ;AACN,aAAO,aAAa;AAAA,IACtB;AAGA,UAAM,cAAc,CAAC,OAAO,UAAU,UAAU,UAAU;AAC1D,QAAI,YAAY;AAEhB,eAAW,OAAO,aAAa;AAC7B,UAAI;AACF,cAAM,UAAUD,MAAK,KAAK,UAAU,GAAG;AACvC,cAAM,QAAQ,MAAMC,IAAG,KAAK,OAAO;AACnC,YAAI,MAAM,YAAY,GAAG;AACvB;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,QAAI,aAAa,GAAG;AAClB,aAAO,SAAS,KAAK,yCAAyC,SAAS,IAAI,YAAY,MAAM,QAAQ;AACrG,aAAO,aAAa;AAAA,IACtB;AAGA,UAAM,kBAAkB;AAAA,MACtBD,MAAK,KAAK,UAAU,SAAS,SAAS;AAAA,MACtCA,MAAK,KAAK,UAAU,SAAS,MAAM;AAAA,IACrC;AAEA,eAAW,WAAW,iBAAiB;AACrC,UAAI;AACF,cAAM,QAAQ,MAAMC,IAAG,KAAK,OAAO;AACnC,YAAI,MAAM,YAAY,GAAG;AACvB,gBAAM,UAAUD,MAAK,SAASA,MAAK,QAAQ,OAAO,CAAC,IAAI,MAAMA,MAAK,SAAS,OAAO;AAClF,iBAAO,SAAS,KAAK,SAAS,OAAO,iBAAiB;AAAA,QACxD;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,QAAI,aAAa,UAAU,mBAAmB,GAAG;AAC/C,aAAO,UAAU,aAAa,QAAQ,mBAAmB;AAAA,IAC3D;AAEA,WAAO,WAAW;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,eAAe,SAAiB,cAAsB;AAC1D,WAAO,sBAAsB,SAAS,YAAY;AAAA,EACpD;AACF;;;AE5GA,OAAOE,SAAQ;AACf,OAAOC,WAAU;;;ACIjB,eAAsB,sBACpB,UACA,eAC0B;AAC1B,SAAO;AAAA,IACL,SAAS;AAAA;AAAA,MAEP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;ADrDO,IAAM,kBAAqC;AAAA,EAChD,MAAM;AAAA,EACN,UAAU;AAAA;AAAA,EAEV,MAAM,OAAO,SAAiB,cAAgD;AAC5E,UAAM,WAAWC,MAAK,KAAK,SAAS,YAAY;AAChD,UAAM,SAA0B;AAAA,MAC9B,UAAU;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,qBAAqBA,MAAK,KAAK,UAAU,UAAU,sBAAsB;AAC/E,QAAI,oBAAoB;AAExB,QAAI;AACF,YAAMC,IAAG,OAAO,kBAAkB;AAClC,0BAAoB;AACpB,aAAO,SAAS,KAAK,mCAAmC;AAAA,IAC1D,QAAQ;AAAA,IAER;AAGA,UAAM,kBAAkBD,MAAK,KAAK,UAAU,UAAU,cAAc;AACpE,QAAI,iBAAiB;AAErB,QAAI;AACF,YAAMC,IAAG,OAAO,eAAe;AAC/B,uBAAiB;AACjB,aAAO,SAAS,KAAK,2BAA2B;AAAA,IAClD,QAAQ;AAAA,IAER;AAGA,UAAM,cAAc,CAAC,YAAY,YAAY,aAAa,SAAS;AACnE,QAAI,YAAY;AAEhB,eAAW,OAAO,aAAa;AAC7B,UAAI;AACF,cAAM,UAAUD,MAAK,KAAK,UAAU,GAAG;AACvC,cAAM,QAAQ,MAAMC,IAAG,KAAK,OAAO;AACnC,YAAI,MAAM,YAAY,GAAG;AACvB;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,QAAI,aAAa,GAAG;AAClB,aAAO,SAAS,KAAK,yCAAyC,SAAS,IAAI,YAAY,MAAM,QAAQ;AAAA,IACvG;AAGA,QAAI;AACF,YAAM,WAAWD,MAAK,KAAK,UAAU,oBAAoB;AACzD,YAAMC,IAAG,OAAO,QAAQ;AACxB,aAAO,SAAS,KAAK,0BAA0B;AAAA,IACjD,QAAQ;AAAA,IAER;AAGA,QAAI;AACF,YAAM,aAAaD,MAAK,KAAK,UAAU,gBAAgB;AACvD,YAAMC,IAAG,OAAO,UAAU;AAC1B,aAAO,SAAS,KAAK,sBAAsB;AAAA,IAC7C,QAAQ;AAAA,IAER;AAIA,QAAI,qBAAqB,aAAa,GAAG;AACvC,aAAO,WAAW;AAClB,aAAO,aAAa;AACpB,aAAO;AAAA,IACT;AAGA,QAAI,qBAAsB,kBAAkB,aAAa,GAAI;AAC3D,aAAO,WAAW;AAClB,aAAO,aAAa;AACpB,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,GAAG;AAClB,aAAO,WAAW;AAClB,aAAO,aAAa;AACpB,aAAO;AAAA,IACT;AAGA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,eAAe,SAAiB,cAAsB;AAC1D,WAAO,sBAAsB,SAAS,YAAY;AAAA,EACpD;AACF;;;AExGO,IAAM,qBAA0C;AAAA,EACrD;AAAA,EACA;AAAA,EACA;AACF;AAmBO,SAAS,qBAAqB,MAA6C;AAChF,SAAO,mBAAmB,KAAK,OAAK,EAAE,SAAS,IAAI;AACrD;;;ARvBA,eAAsB,oBACpB,SACA,UAAqC,CAAC,GACV;AAC5B,QAAM,OAAO,EAAE,GAAG,yBAAyB,GAAG,QAAQ;AACtD,QAAM,UAA6B,CAAC;AACpC,QAAM,UAAU,oBAAI,IAAY;AAGhC,QAAM,aAAa,SAAS,KAAK,SAAS,OAAO;AAGjD,QAAM,mBAAmB,SAAS,KAAK,SAAS,SAAS,GAAG,IAAI;AAEhE,SAAO;AACT;AAKA,eAAe,aACb,SACA,cACA,SACA,SACe;AAEf,QAAM,WAAWC,MAAK,KAAK,SAAS,YAAY;AAChD,MAAI,QAAQ,IAAI,QAAQ,GAAG;AACzB;AAAA,EACF;AACA,UAAQ,IAAI,QAAQ;AAGpB,QAAM,iBAAgE,CAAC;AAEvE,aAAW,YAAY,oBAAoB;AACzC,QAAI;AACF,YAAM,SAAS,MAAM,SAAS,OAAO,SAAS,YAAY;AAC1D,UAAI,OAAO,UAAU;AACnB,uBAAe,KAAK;AAAA,UAClB,GAAG;AAAA,UACH,UAAU,SAAS,YAAY;AAAA,QACjC,CAAC;AAAA,MACH;AAAA,IACF,SAAS,OAAO;AAEd,cAAQ,MAAM,2BAA2B,SAAS,IAAI,QAAQ,YAAY,KAAK,KAAK;AAAA,IACtF;AAAA,EACF;AAIA,MAAI,eAAe,SAAS,GAAG;AAE7B,UAAM,iBAAiB,eAAe,OAAO,OAAK,EAAE,eAAe,MAAM;AACzE,UAAM,mBAAmB,eAAe,OAAO,OAAK,EAAE,eAAe,QAAQ;AAC7E,UAAM,gBAAgB,eAAe,OAAO,OAAK,EAAE,eAAe,KAAK;AAEvE,QAAI,eAAe,SAAS,GAAG;AAG7B,YAAM,eAAe,eAAe,IAAI,CAAC,EAAE,UAAU,GAAG,OAAO,MAAM,MAAM;AAC3E,cAAQ,KAAK,GAAG,YAAY;AAC5B,YAAM,QAAQ,eAAe,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,KAAK;AACxD,cAAQ,IAAI,qCAAgC,KAAK,EAAE;AAGnD,UAAI,iBAAiB,SAAS,KAAK,cAAc,SAAS,GAAG;AAC3D,cAAM,eAAe,CAAC,GAAG,kBAAkB,GAAG,aAAa,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AACvF,gBAAQ,IAAI,kDAA6C,YAAY,EAAE;AAAA,MACzE;AAAA,IACF,WAAW,eAAe,WAAW,GAAG;AAEtC,YAAM,EAAE,UAAU,GAAG,OAAO,IAAI,eAAe,CAAC;AAChD,cAAQ,KAAK,MAAM;AAGnB,UAAI,iBAAiB,SAAS,KAAK,cAAc,SAAS,GAAG;AAC3D,cAAM,eAAe,CAAC,GAAG,kBAAkB,GAAG,aAAa,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AACvF,gBAAQ,IAAI,kDAA6C,YAAY,EAAE;AAAA,MACzE;AAAA,IACF,WAAW,iBAAiB,SAAS,GAAG;AAEtC,uBAAiB,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ;AACvD,YAAM,EAAE,UAAU,GAAG,OAAO,IAAI,iBAAiB,CAAC;AAClD,cAAQ,KAAK,MAAM;AAGnB,YAAM,UAAU,CAAC,GAAG,iBAAiB,MAAM,CAAC,GAAG,GAAG,aAAa;AAC/D,UAAI,QAAQ,SAAS,GAAG;AACtB,cAAM,eAAe,QAAQ,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AACvD,gBAAQ,IAAI,qBAAgB,YAAY,OAAO,YAAY,KAAK,OAAO,IAAI,oBAAoB;AAAA,MACjG;AAAA,IACF,WAAW,cAAc,SAAS,GAAG;AAEnC,oBAAc,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ;AACpD,YAAM,EAAE,UAAU,GAAG,OAAO,IAAI,cAAc,CAAC;AACjD,cAAQ,KAAK,MAAM;AAGjB,YAAM,UAAU,cAAc,MAAM,CAAC;AACvC,UAAI,QAAQ,SAAS,GAAG;AACtB,cAAM,eAAe,QAAQ,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AACvD,gBAAQ,IAAI,qBAAgB,YAAY,OAAO,YAAY,KAAK,OAAO,IAAI,oBAAoB;AAAA,MAC/F;AAAA,IACF;AAAA,EACF,WAAW,eAAe,WAAW,GAAG;AACtC,UAAM,EAAE,UAAU,GAAG,OAAO,IAAI,eAAe,CAAC;AAChD,YAAQ,KAAK,MAAM;AAAA,EACrB;AACF;AAKA,eAAe,mBACb,SACA,cACA,SACA,SACA,OACA,SACe;AAEf,MAAI,SAAS,QAAQ,UAAU;AAC7B;AAAA,EACF;AAEA,QAAM,WAAWA,MAAK,KAAK,SAAS,YAAY;AAEhD,MAAI;AACF,UAAM,UAAU,MAAMC,IAAG,QAAQ,UAAU,EAAE,eAAe,KAAK,CAAC;AAGlE,UAAM,OAAO,QAAQ,OAAO,OAAK,EAAE,YAAY,CAAC;AAEhD,eAAW,OAAO,MAAM;AAEtB,UAAI,QAAQ,SAAS,SAAS,IAAI,IAAI,GAAG;AACvC;AAAA,MACF;AAGA,UAAI,IAAI,KAAK,WAAW,GAAG,GAAG;AAC5B;AAAA,MACF;AAEA,YAAM,UAAU,iBAAiB,MAC7B,IAAI,OACJD,MAAK,KAAK,cAAc,IAAI,IAAI;AAGpC,YAAM,aAAa,SAAS,SAAS,SAAS,OAAO;AAGrD,YAAM,mBAAmB,SAAS,SAAS,SAAS,SAAS,QAAQ,GAAG,OAAO;AAAA,IACjF;AAAA,EACF,SAAS,OAAO;AAEd;AAAA,EACF;AACF;;;AFhKA,IAAME,cAAaC,eAAc,YAAY,GAAG;AAChD,IAAMC,aAAYC,MAAK,QAAQH,WAAU;AAQzC,eAAsB,YAAY,UAAuB,CAAC,GAAG;AAC3D,QAAM,UAAU,QAAQ,QAAQ,QAAQ,IAAI;AAC5C,QAAM,aAAaG,MAAK,KAAK,SAAS,mBAAmB;AAEzD,MAAI;AAEF,QAAI,eAAe;AACnB,QAAI;AACF,YAAMC,IAAG,OAAO,UAAU;AAC1B,qBAAe;AAAA,IACjB,QAAQ;AAAA,IAER;AAGA,QAAI,gBAAgB,QAAQ,SAAS;AACnC,YAAM,cAAc,UAAU;AAC9B;AAAA,IACF;AAGA,QAAI,gBAAgB,CAAC,QAAQ,SAAS;AACpC,cAAQ,IAAIC,OAAM,OAAO,gDAAsC,CAAC;AAChE,cAAQ,IAAIA,OAAM,IAAI,KAAK,GAAGA,OAAM,KAAK,qBAAqB,GAAGA,OAAM,IAAI,6BAA6B,CAAC;AACzG;AAAA,IACF;AAGA,QAAI,CAAC,cAAc;AACjB,YAAM,gBAAgB,SAAS,OAAO;AAAA,IACxC;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAMA,OAAM,IAAI,6BAA6B,GAAG,KAAK;AAC7D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEA,eAAe,gBAAgB,SAAiB,SAAsB;AAEpE,oBAAkB;AAClB,UAAQ,IAAIA,OAAM,KAAK,wBAAwB,CAAC;AAGhD,UAAQ,IAAIA,OAAM,IAAI,mCAA4B,GAAGA,OAAM,KAAK,OAAO,CAAC;AACxE,QAAM,aAAa,MAAM,oBAAoB,OAAO;AAEpD,MAAI,aAAkC,CAAC;AAEvC,MAAI,WAAW,WAAW,GAAG;AAC3B,YAAQ,IAAIA,OAAM,OAAO,wCAA8B,CAAC;AAExD,QAAI,CAAC,QAAQ,KAAK;AAChB,YAAM,EAAE,WAAW,IAAI,MAAM,SAAS,OAAO;AAAA,QAC3C;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAED,UAAI,CAAC,YAAY;AACf,gBAAQ,IAAIA,OAAM,IAAI,UAAU,CAAC;AACjC;AAAA,MACF;AAAA,IACF;AAGA,eAAW,KAAK;AAAA,MACd,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ;AAAA,QACN,SAAS,CAAC,6CAA6C;AAAA,QACvD,SAAS;AAAA,UACP;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH,OAAO;AAEL,YAAQ,IAAIA,OAAM,MAAM;AAAA,eAAa,WAAW,MAAM;AAAA,CAAkB,CAAC;AAEzE,eAAW,OAAO,YAAY;AAC5B,YAAM,cAAc,IAAI,SAAS,MAAM,SAAS,IAAI;AACpD,cAAQ,IAAIA,OAAM,KAAK,KAAK,IAAI,IAAI,EAAE,GAAGA,OAAM,IAAI,IAAI,IAAI,UAAU,cAAc,CAAC;AACpF,cAAQ,IAAIA,OAAM,IAAI,iBAAiB,WAAW,EAAE,CAAC;AAErD,UAAI,IAAI,SAAS,SAAS,GAAG;AAC3B,YAAI,SAAS,QAAQ,CAAC,MAAM;AAC1B,kBAAQ,IAAIA,OAAM,IAAI,cAAS,CAAC,EAAE,CAAC;AAAA,QACrC,CAAC;AAAA,MACH;AACA,cAAQ,IAAI;AAAA,IACd;AAGA,QAAI,CAAC,QAAQ,KAAK;AAChB,YAAM,EAAE,QAAQ,IAAI,MAAM,SAAS,OAAO;AAAA,QACxC;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAED,UAAI,CAAC,SAAS;AACZ,gBAAQ,IAAIA,OAAM,IAAI,UAAU,CAAC;AACjC;AAAA,MACF;AAAA,IACF;AAGA,eAAW,OAAO,YAAY;AAC5B,YAAM,WAAW,qBAAqB,IAAI,IAAI;AAC9C,UAAI,CAAC,UAAU;AACb,gBAAQ,KAAKA,OAAM,OAAO,uCAA6B,IAAI,IAAI,YAAY,CAAC;AAC5E;AAAA,MACF;AAGA,YAAM,kBAAkB,MAAM,SAAS,eAAe,SAAS,IAAI,IAAI;AAGvE,UAAI,kBAAkB;AACtB,UAAI,CAAC,QAAQ,KAAK;AAChB,cAAM,EAAE,UAAU,IAAI,MAAM,SAAS,OAAO;AAAA,UAC1C;AAAA,YACE,MAAM;AAAA,YACN,MAAM;AAAA,YACN,SAAS,aAAa,IAAI,IAAI;AAAA,YAC9B,SAAS;AAAA,UACX;AAAA,QACF,CAAC;AACD,0BAAkB;AAAA,MACpB;AAEA,UAAI,cAAc;AAClB,UAAI,iBAAiB;AACnB,cAAM,aAAa,MAAM,uBAAuB,IAAI,MAAM,eAAe;AACzE,sBAAc,EAAE,GAAG,iBAAiB,GAAG,WAAW;AAAA,MACpD,OAAO;AACL,cAAM,cAAc,IAAI,SAAS,MAAM,SAAS,IAAI;AACpD,gBAAQ,IAAIA,OAAM,IAAI,+BAA0B,IAAI,IAAI,OAAO,WAAW,EAAE,CAAC;AAAA,MAC/E;AAEA,iBAAW,KAAK;AAAA,QACd,MAAM,IAAI;AAAA,QACV,MAAM,IAAI;AAAA,QACV,SAAS;AAAA,QACT,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAAA,EACF;AAGA,MAAI,CAAC,QAAQ,KAAK;AAChB,UAAM,EAAE,mBAAmB,IAAI,MAAM,SAAS,OAAO;AAAA,MACnD;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX;AAAA,IACF,CAAC;AAED,QAAI,oBAAoB;AACtB,UAAI;AACF,cAAM,iBAAiBF,MAAK,KAAK,SAAS,SAAS;AACnD,cAAMC,IAAG,MAAM,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAKlD,cAAM,eAAeD,MAAK,KAAKD,YAAW,6BAA6B;AAEvE,cAAM,YAAYC,MAAK,KAAK,gBAAgB,OAAO;AACnD,YAAI;AACJ,YAAI,cAAc;AAClB,YAAI,SAAS;AAEb,YAAI;AACF,gBAAM,QAAQ,MAAMC,IAAG,KAAK,SAAS;AACrC,wBAAc,MAAM,YAAY;AAChC,mBAAS,MAAM,OAAO;AAAA,QACxB,QAAQ;AAAA,QAER;AAEA,YAAI,aAAa;AAEf,uBAAaD,MAAK,KAAK,WAAW,UAAU;AAC5C,gBAAMC,IAAG,SAAS,cAAc,UAAU;AAC1C,kBAAQ,IAAIC,OAAM,MAAM,yDAAoD,CAAC;AAAA,QAC/E,WAAW,QAAQ;AAEjB,gBAAM,EAAE,aAAa,IAAI,MAAM,SAAS,OAAO;AAAA,YAC7C;AAAA,cACE,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,SAAS;AAAA,YACX;AAAA,UACF,CAAC;AAED,cAAI,cAAc;AAGhB,kBAAM,gBAAgB,MAAMD,IAAG,SAAS,WAAW,OAAO;AAE1D,kBAAMA,IAAG,OAAO,SAAS;AAEzB,kBAAMA,IAAG,MAAM,SAAS;AAExB,kBAAMA,IAAG,UAAUD,MAAK,KAAK,WAAW,aAAa,GAAG,aAAa;AAErE,kBAAMC,IAAG,SAAS,cAAcD,MAAK,KAAK,WAAW,UAAU,CAAC;AAChE,oBAAQ,IAAIE,OAAM,MAAM,6CAAwC,CAAC;AACjE,oBAAQ,IAAIA,OAAM,MAAM,mDAAmD,CAAC;AAC5E,oBAAQ,IAAIA,OAAM,MAAM,wCAAwC,CAAC;AAAA,UACnE,OAAO;AACL,oBAAQ,IAAIA,OAAM,IAAI,8DAA8D,CAAC;AAAA,UACvF;AAAA,QACF,OAAO;AAEL,gBAAMD,IAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,uBAAaD,MAAK,KAAK,WAAW,UAAU;AAC5C,gBAAMC,IAAG,SAAS,cAAc,UAAU;AAC1C,kBAAQ,IAAIC,OAAM,MAAM,yDAAoD,CAAC;AAAA,QAC/E;AAAA,MACF,SAAS,OAAO;AACd,gBAAQ,IAAIA,OAAM,OAAO,8CAAoC,CAAC;AAC9D,gBAAQ,IAAIA,OAAM,IAAI,UAAU,iBAAiB,QAAQ,MAAM,UAAU,eAAe,EAAE,CAAC;AAC3F,gBAAQ,IAAIA,OAAM,IAAI,0EAA0E,CAAC;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAGA,QAAM,SAAqB;AAAA,IACzB,GAAG;AAAA,IACH;AAAA,EACF;AAGA,QAAM,aAAaF,MAAK,KAAK,SAAS,mBAAmB;AACzD,QAAMC,IAAG,UAAU,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI,MAAM,OAAO;AAG9E,UAAQ,IAAIC,OAAM,MAAM,oCAA+B,CAAC;AACxD,UAAQ,IAAIA,OAAM,MAAM,qBAAgB,WAAW,MAAM,eAAe,CAAC;AACzE,UAAQ,IAAIA,OAAM,IAAI,eAAe,CAAC;AACtC,UAAQ,IAAIA,OAAM,IAAI,UAAU,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,IAAI,wBAAwB,CAAC;AAChG,UAAQ,IAAIA,OAAM,IAAI,UAAU,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,IAAI,yBAAyB,CAAC;AACjG,UAAQ,IAAIA,OAAM,IAAI,6DAA6D,CAAC;AACtF;AAEA,eAAe,uBAAuB,eAAuB,QAA4D;AACvH,UAAQ,IAAIA,OAAM,KAAK;AAAA,cAAiB,aAAa,YAAY,CAAC;AAElE,QAAM,UAAU,MAAM,SAAS,OAAO;AAAA,IACpC;AAAA,MACE,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,OAAO,QAAQ,KAAK,IAAI;AAAA,MACjC,QAAQ,CAAC,UAAkB,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAAA,IAC/D;AAAA,IACA;AAAA,MACE,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,OAAO,QAAQ,KAAK,IAAI;AAAA,MACjC,QAAQ,CAAC,UAAkB,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAAA,IAC/D;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL,SAAS,QAAQ;AAAA,IACjB,SAAS,QAAQ;AAAA,EACnB;AACF;AAEA,eAAe,cAAc,YAAoB;AAC/C,MAAI;AAEF,UAAM,aAAa,GAAG,UAAU;AAChC,UAAMD,IAAG,SAAS,YAAY,UAAU;AAGxC,UAAM,kBAAkB,MAAMA,IAAG,SAAS,YAAY,OAAO;AAC7D,UAAM,iBAAiB,KAAK,MAAM,eAAe;AAEjD,QAAI;AACJ,QAAI,WAAW;AAGf,QAAI,eAAe,cAAc,GAAG;AAClC,cAAQ,IAAIC,OAAM,KAAK,qDAA8C,CAAC;AACtE,uBAAiB,cAAc,cAAc;AAC7C,iBAAW;AAAA,IACb,OAAO;AAEL,YAAM,YAAY,gBAAgB,gBAAgB,aAAa;AAC/D,uBAAiB,gBAAgB,eAAe,cAAqC;AAErF,UAAI,UAAU,SAAS,GAAG;AACxB,gBAAQ,IAAIA,OAAM,IAAI,sBAAsB,CAAC;AAC7C,kBAAU,QAAQ,WAAS,QAAQ,IAAIA,OAAM,IAAI,UAAK,GAAGA,OAAM,KAAK,KAAK,CAAC,CAAC;AAAA,MAC7E;AAAA,IACF;AAGA,UAAMD,IAAG;AAAA,MACP;AAAA,MACA,KAAK,UAAU,gBAAgB,MAAM,CAAC,IAAI;AAAA,MAC1C;AAAA,IACF;AAGA,YAAQ,IAAIC,OAAM,MAAM,qCAAgC,CAAC;AACzD,YAAQ,IAAIA,OAAM,IAAI,kBAAkB,GAAG,UAAU;AAErD,QAAI,UAAU;AACZ,cAAQ,IAAIA,OAAM,IAAI,iEAA0D,CAAC;AAAA,IACnF;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAMA,OAAM,IAAI,yBAAyB,GAAG,KAAK;AACzD,UAAM;AAAA,EACR;AACF;;;AWnWA;AACA;AACA;AAPA,OAAOC,YAAW;AAClB,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAO,QAAQ;AACf,OAAO,YAAY;AAKnB;AAEA,eAAsB,gBAAgB;AACpC,QAAM,UAAU,QAAQ,IAAI;AAC5B,QAAM,cAAcC,MAAK,SAAS,OAAO;AAGzC,QAAM,WAAW,OACd,WAAW,KAAK,EAChB,OAAO,OAAO,EACd,OAAO,KAAK,EACZ,UAAU,GAAG,CAAC;AAEjB,QAAM,YAAYA,MAAK,KAAK,GAAG,QAAQ,GAAG,SAAS,WAAW,GAAG,WAAW,IAAI,QAAQ,EAAE;AAE1F,oBAAkB;AAClB,UAAQ,IAAIC,OAAM,KAAK,UAAU,CAAC;AAGlC,QAAM,YAAY,MAAM,cAAc,OAAO,OAAO;AACpD,UAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAG,YAAYA,OAAM,MAAM,cAAS,IAAIA,OAAM,IAAI,wBAAmB,CAAC;AAE5G,MAAI,CAAC,WAAW;AACd,YAAQ,IAAIA,OAAM,OAAO,OAAO,GAAGA,OAAM,KAAK,WAAW,GAAGA,OAAM,OAAO,eAAe,CAAC;AACzF;AAAA,EACF;AAGA,MAAI;AACF,UAAM,QAAQ,MAAMC,IAAG,KAAK,SAAS;AACrC,YAAQ,IAAID,OAAM,IAAI,iBAAiB,GAAG,SAAS;AACnD,YAAQ,IAAIA,OAAM,IAAI,eAAe,GAAGA,OAAM,MAAM,eAAU,CAAC;AAG/D,QAAI;AACF,YAAM,QAAQ,MAAMC,IAAG,QAAQ,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7D,cAAQ,IAAID,OAAM,IAAI,cAAc,GAAG,MAAM,MAAM;AAAA,IACrD,SAAS,GAAG;AAAA,IAEZ;AAEA,YAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAG,MAAM,MAAM,eAAe,CAAC;AAGrE,QAAI;AACF,YAAM,UAAU,MAAM,gBAAgB,SAAS;AAC/C,UAAI,UAAU,GAAG;AACf,cAAM,cAAc,IAAI,KAAK,OAAO;AACpC,gBAAQ,IAAIA,OAAM,IAAI,eAAe,GAAG,YAAY,eAAe,CAAC;AAAA,MACtE;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,IAAIA,OAAM,IAAI,eAAe,GAAGA,OAAM,OAAO,oBAAe,CAAC;AACrE,YAAQ,IAAIA,OAAM,OAAO,OAAO,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,OAAO,wBAAwB,CAAC;AAAA,EACrG;AAGA,MAAI;AACF,UAAM,SAAS,MAAM,cAAc,KAAK,OAAO;AAE/C,YAAQ,IAAIA,OAAM,KAAK,aAAa,CAAC;AAGrC,UAAM,SAAS,MAAM,UAAU,OAAO;AACtC,QAAI,OAAO,aAAa,WAAW,QAAQ;AACzC,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,MAAM,gBAAW,CAAC;AACjE,cAAQ,IAAIA,OAAM,IAAI,kBAAkB,GAAG,GAAG,OAAO,aAAa,iBAAiB,GAAI,GAAG;AAG1F,UAAI;AACF,cAAM,SAAS,MAAM,iBAAiB,OAAO;AAC7C,cAAM,SAAS,MAAM,iBAAiB,OAAO;AAC7C,gBAAQ,IAAIA,OAAM,IAAI,mBAAmB,GAAG,MAAM;AAClD,gBAAQ,IAAIA,OAAM,IAAI,mBAAmB,GAAG,OAAO,UAAU,GAAG,CAAC,CAAC;AAGlE,cAAM,eAAeD,MAAK,KAAK,WAAW,iBAAiB;AAC3D,YAAI;AACF,gBAAM,kBAAkB,MAAME,IAAG,SAAS,cAAc,OAAO;AAC/D,gBAAM,WAAW,KAAK,MAAM,eAAe;AAC3C,cAAI,SAAS,WAAW,UAAU,SAAS,WAAW,QAAQ;AAC5D,oBAAQ,IAAID,OAAM,OAAO,gEAAsD,CAAC;AAAA,UAClF;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF,WAAW,OAAO,aAAa,WAAW,CAAC,QAAQ;AACjD,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,OAAO,0BAA0B,CAAC;AAAA,IACnF,OAAO;AACL,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,KAAK,UAAU,CAAC;AAAA,IACjE;AAGA,QAAI,OAAO,aAAa,SAAS;AAC/B,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,MAAM,gBAAW,CAAC;AACjE,cAAQ,IAAIA,OAAM,IAAI,aAAa,GAAG,GAAG,OAAO,aAAa,UAAU,IAAI;AAAA,IAC7E,OAAO;AACL,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,KAAK,UAAU,CAAC;AAC/D,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,KAAK,oBAAoB,CAAC;AAAA,IAC3E;AAGA,YAAQ,IAAIA,OAAM,KAAK,sBAAsB,CAAC;AAC9C,QAAI,eAAe,MAAM,GAAG;AAC1B,cAAQ,IAAIA,OAAM,IAAI,cAAc,GAAG,OAAO,KAAK,WAAW;AAC9D,cAAQ,IAAIA,OAAM,IAAI,aAAa,GAAG,OAAO,KAAK,kBAAkB;AACpE,cAAQ,IAAIA,OAAM,IAAI,aAAa,GAAG,OAAO,KAAK,SAAS;AAC3D,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAG,OAAO,KAAK,YAAY;AAAA,IACnE;AAAA,EAEF,SAAS,OAAO;AACd,YAAQ,IAAIA,OAAM,OAAO,yCAAyC,CAAC;AAAA,EACrE;AACF;;;AC9HA;AADA,OAAOE,YAAW;AAIlB,eAAsB,aAAa,SAAkE;AACnG,oBAAkB;AAElB,MAAI;AAEF,QAAI,QAAQ,OAAO;AACjB,YAAM,EAAE,UAAAC,UAAS,IAAI,MAAM;AAC3B,YAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAElC,cAAQ,IAAIC,OAAM,OAAO,yCAAyC,CAAC;AACnE,YAAM,WAAW,IAAIF,UAAS,QAAQ,IAAI,CAAC;AAC3C,YAAM,SAAS,WAAW;AAC1B,YAAM,SAAS,MAAM;AAGrB,YAAM,WAAW,IAAIC,iBAAgB,SAAS,MAAM;AACpD,YAAM,SAAS,MAAM;AAErB,cAAQ,IAAIC,OAAM,MAAM,qCAAgC,CAAC;AAAA,IAC3D;AAEA,UAAM,cAAc;AAAA,MAClB,SAAS,QAAQ,IAAI;AAAA,MACrB,SAAS,QAAQ,WAAW;AAAA,MAC5B,OAAO,QAAQ,SAAS;AAAA,IAC1B,CAAC;AAED,QAAI,QAAQ,OAAO;AACjB,cAAQ,IAAIA,OAAM,OAAO,gDAAsC,CAAC;AAAA,IAElE;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAMA,OAAM,IAAI,wBAAwB,GAAG,KAAK;AACxD,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;;;ACvCA,OAAOC,YAAW;AAClB,OAAOC,UAAQ;AACf,OAAOC,YAAU;;;ACFjB,SAAS,cAAc;AACvB,SAAS,4BAA4B;AACrC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,aAAY;;;ACP9B,SAAS,uBAAuB;AA2BzB,SAAS,gBACd,WACA,MACA,aACA;AACA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa,gBAAgB,WAAW;AAAA,MACtC,QAAQ;AAAA,MACR,cAAc;AAAA,IAChB,CAAC;AAAA,EACH;AACF;;;ACzCA,SAAS,SAAS;AAQX,IAAM,uBAAuB,EAAE,OAAO;AAAA,EAC3C,OAAO,EAAE,OAAO,EACb,IAAI,GAAG,qCAAqC,EAC5C,IAAI,KAAK,qCAAqC,EAC9C;AAAA,IACC;AAAA,EASF;AAAA,EAEF,OAAO,EAAE,OAAO,EACb,IAAI,EACJ,IAAI,GAAG,0BAA0B,EACjC,IAAI,IAAI,wBAAwB,EAChC,QAAQ,CAAC,EACT;AAAA,IACC;AAAA,EAGF;AACJ,CAAC;;;AClCD,SAAS,KAAAC,UAAS;AAOX,IAAM,oBAAoBA,GAAE,OAAO;AAAA,EACxC,MAAMA,GAAE,OAAO,EACZ,IAAI,IAAI,6CAA6C,EACrD;AAAA,IACC;AAAA,EAGF;AAAA,EAEF,OAAOA,GAAE,OAAO,EACb,IAAI,EACJ,IAAI,GAAG,0BAA0B,EACjC,IAAI,IAAI,wBAAwB,EAChC,QAAQ,CAAC,EACT;AAAA,IACC;AAAA,EAEF;AACJ,CAAC;;;ACzBD,SAAS,KAAAC,UAAS;AAOX,IAAM,uBAAuBA,GAAE,OAAO;AAAA,EAC3C,UAAUA,GAAE,OAAO,EAChB,IAAI,GAAG,0BAA0B,EACjC;AAAA,IACC;AAAA,EAEF;AAAA,EAEF,gBAAgBA,GAAE,QAAQ,EACvB,QAAQ,IAAI,EACZ;AAAA,IACC;AAAA,EAIF;AACJ,CAAC;;;ACvBD,SAAS,KAAAC,UAAS;AAOX,IAAM,sBAAsBA,GAAE,OAAO;AAAA,EAC1C,SAASA,GAAE,OAAO,EACf,SAAS,EACT;AAAA,IACC;AAAA,EAMF;AAAA,EAEF,UAAUA,GAAE,OAAO,EAChB,SAAS,EACT;AAAA,IACC;AAAA,EAGF;AACJ,CAAC;;;ACZM,IAAM,QAAQ;AAAA,EACnB;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ANzBA;AACA;AACA;AACA;AACA;AACA;AACA;;;AOfA;AADA,OAAO,cAAc;AAcd,IAAM,cAAN,MAAkB;AAAA,EACf,UAAqC;AAAA,EACrC,iBAA8C,oBAAI,IAAI;AAAA,EACtD;AAAA,EACA;AAAA,EACA,kBAA4C;AAAA,EAEpD,YAAY,SAAiB,QAAuC;AAClE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,MAAM,SAA2C;AACrD,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAEA,SAAK,kBAAkB;AAGvB,QAAI;AACJ,QAAI;AAEJ,QAAI,eAAe,KAAK,MAAM,GAAG;AAC/B,wBAAkB,KAAK,OAAO,SAAS;AACvC,wBAAkB,KAAK,OAAO,SAAS;AAAA,IACzC,WAAW,eAAe,KAAK,MAAM,GAAG;AAEtC,wBAAkB,KAAK,OAAO,WAAW,QAAQ,OAAK,EAAE,OAAO,OAAO;AACtE,wBAAkB,KAAK,OAAO,WAAW,QAAQ,OAAK,EAAE,OAAO,OAAO;AAAA,IACxE,OAAO;AACL,wBAAkB,CAAC,MAAM;AACzB,wBAAkB,CAAC;AAAA,IACrB;AAGA,SAAK,UAAU,SAAS,MAAM,iBAAiB;AAAA,MAC7C,KAAK,KAAK;AAAA,MACV,SAAS;AAAA,MACT,YAAY;AAAA,MACZ,eAAe;AAAA;AAAA,MACf,kBAAkB;AAAA,QAChB,oBAAoB;AAAA;AAAA,QACpB,cAAc;AAAA,MAChB;AAAA;AAAA,MAEA,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,gBAAgB;AAAA,IAClB,CAAC;AAGD,SAAK,QACF,GAAG,OAAO,CAAC,aAAa,KAAK,aAAa,OAAO,QAAQ,CAAC,EAC1D,GAAG,UAAU,CAAC,aAAa,KAAK,aAAa,UAAU,QAAQ,CAAC,EAChE,GAAG,UAAU,CAAC,aAAa,KAAK,aAAa,UAAU,QAAQ,CAAC,EAChE,GAAG,SAAS,CAAC,UAAU;AACtB,cAAQ,MAAM,8BAA8B,KAAK,EAAE;AAAA,IACrD,CAAC;AAGH,UAAM,IAAI,QAAc,CAAC,YAAY;AACnC,WAAK,QAAS,GAAG,SAAS,MAAM;AAC9B,gBAAQ;AAAA,MACV,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,aAAa,MAAmC,UAAwB;AAE9E,UAAM,gBAAgB,KAAK,eAAe,IAAI,QAAQ;AACtD,QAAI,eAAe;AACjB,mBAAa,aAAa;AAAA,IAC5B;AAGA,UAAM,QAAQ,WAAW,MAAM;AAC7B,WAAK,eAAe,OAAO,QAAQ;AAGnC,UAAI,KAAK,iBAAiB;AACxB,cAAM,eAAe,SAAS,WAAW,GAAG,IACxC,WACA,GAAG,KAAK,OAAO,IAAI,QAAQ;AAE/B,YAAI;AACF,gBAAM,SAAS,KAAK,gBAAgB;AAAA,YAClC;AAAA,YACA,UAAU;AAAA,UACZ,CAAC;AAGD,cAAI,kBAAkB,SAAS;AAC7B,mBAAO,MAAM,CAAC,UAAU;AACtB,sBAAQ,MAAM,sCAAsC,KAAK,EAAE;AAAA,YAC7D,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAO;AACd,kBAAQ,MAAM,sCAAsC,KAAK,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF,GAAG,KAAK,OAAO,aAAa,UAAU;AAEtC,SAAK,eAAe,IAAI,UAAU,KAAK;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAsB;AAC1B,QAAI,CAAC,KAAK,SAAS;AACjB;AAAA,IACF;AAGA,eAAW,SAAS,KAAK,eAAe,OAAO,GAAG;AAChD,mBAAa,KAAK;AAAA,IACpB;AACA,SAAK,eAAe,MAAM;AAG1B,UAAM,KAAK,QAAQ,MAAM;AACzB,SAAK,UAAU;AACf,SAAK,kBAAkB;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,kBAA4B;AAC1B,QAAI,CAAC,KAAK,SAAS;AACjB,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,UAAU,KAAK,QAAQ,WAAW;AACxC,UAAM,QAAkB,CAAC;AAEzB,eAAW,CAAC,KAAK,SAAS,KAAK,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAW,YAAY,WAAW;AAChC,cAAM,KAAK,GAAG,GAAG,IAAI,QAAQ,EAAE;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,YAAqB;AACnB,WAAO,KAAK,YAAY;AAAA,EAC1B;AACF;;;AP7JA;;;AQjBA;AADA,SAAoB,gBAAgB;AAoC7B,SAAS,gBACd,QACA,SACA;AACA,SAAO,OAAO,SAAkB;AAC9B,QAAI;AAEF,YAAM,YAAY,OAAO,MAAM,IAAI;AAGnC,YAAM,SAAS,MAAM,QAAQ,SAAS;AAGtC,aAAO;AAAA,QACL,SAAS,CAAC;AAAA,UACR,MAAM;AAAA,UACN,MAAM,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QACtC,CAAC;AAAA,MACH;AAAA,IAEF,SAAS,OAAO;AAEd,UAAI,iBAAiB,UAAU;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS,CAAC;AAAA,YACR,MAAM;AAAA,YACN,MAAM,KAAK,UAAU;AAAA,cACnB,OAAO;AAAA,cACP;AAAA,cACA,SAAS,MAAM,OAAO,IAAI,QAAM;AAAA,gBAC9B,OAAO,EAAE,KAAK,KAAK,GAAG;AAAA,gBACtB,SAAS,EAAE;AAAA,cACb,EAAE;AAAA,YACJ,GAAG,MAAM,CAAC;AAAA,UACZ,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,iBAAiB,WAAW;AAC9B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS,CAAC;AAAA,YACR,MAAM;AAAA,YACN,MAAM,KAAK,UAAU,MAAM,OAAO,GAAG,MAAM,CAAC;AAAA,UAC9C,CAAC;AAAA,QACH;AAAA,MACF;AAGA,cAAQ,MAAM,qCAAqC,KAAK;AACxD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,CAAC;AAAA,UACR,MAAM;AAAA,UACN,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,YAChD;AAAA,UACF,GAAG,MAAM,CAAC;AAAA,QACZ,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;AR1EA;AAGA,IAAMC,cAAaC,eAAc,YAAY,GAAG;AAChD,IAAMC,aAAYC,SAAQH,WAAU;AACpC,IAAMI,WAAUC,eAAc,YAAY,GAAG;AAE7C,IAAIC;AACJ,IAAI;AACF,EAAAA,eAAcF,SAAQG,MAAKL,YAAW,iBAAiB,CAAC;AAC1D,QAAQ;AACN,EAAAI,eAAcF,SAAQG,MAAKL,YAAW,oBAAoB,CAAC;AAC7D;AAQA,eAAsB,eAAe,SAA0C;AAC7E,QAAM,EAAE,SAAS,SAAS,MAAM,IAAI;AAGpC,QAAM,MAAM,CAAC,YAAoB;AAC/B,QAAI,SAAS;AACX,cAAQ,MAAM,cAAc,OAAO,EAAE;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,4BAA4B;AAGhC,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,WAAW,IAAI,SAAS,OAAO;AAErC,MAAI;AACF,QAAI,4BAA4B;AAChC,UAAM,WAAW,WAAW;AAE5B,QAAI,4BAA4B;AAChC,UAAM,SAAS,WAAW;AAE1B,QAAI,gCAAgC;AAAA,EACtC,SAAS,OAAO;AACd,YAAQ,MAAM,yBAAyB,KAAK,EAAE;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,QAAM,SAAS,IAAI;AAAA,IACjB;AAAA,MACE,MAAM;AAAA,MACN,SAASI,aAAY;AAAA,IACvB;AAAA,IACA;AAAA,MACE,cAAc;AAAA,QACZ,OAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAGA,SAAO,kBAAkB,wBAAwB,aAAa;AAAA,IAC5D;AAAA,EACF,EAAE;AAGF,QAAM,oBAAoB,YAAY;AACpC,QAAI;AACF,YAAM,iBAAiB,MAAM,SAAS,aAAa;AACnD,UAAI,gBAAgB;AAClB,YAAI,oDAAoD;AACxD,cAAM,SAAS,UAAU;AACzB,YAAI,8BAA8B;AAAA,MACpC;AAAA,IACF,SAAS,OAAO;AAEd,UAAI,yBAAyB,KAAK,EAAE;AAAA,IACtC;AAAA,EACF;AAGA,QAAM,mBAAmB,OAAO;AAAA,IAC9B,cAAc,SAAS,kBAAkB;AAAA,IACzC,WAAW,SAAS,eAAe;AAAA,EACrC;AAIA,QAAM,uBAAuB,YAAY,YAAY;AACnD,UAAM,kBAAkB;AAAA,EAC1B,GAAG,yBAAyB;AAG5B,SAAO,kBAAkB,uBAAuB,OAAO,YAAY;AACjE,UAAM,EAAE,MAAM,WAAW,KAAK,IAAI,QAAQ;AAE1C,QAAI,uBAAuB,IAAI,EAAE;AAEjC,QAAI;AACF,cAAQ,MAAM;AAAA,QACd,KAAK;AACH,iBAAO,MAAM;AAAA,YACX;AAAA,YACA,OAAO,kBAAkB;AACvB,kBAAI,mBAAmB,cAAc,KAAK,GAAG;AAG7C,oBAAM,kBAAkB;AAExB,oBAAM,iBAAiB,MAAM,WAAW,MAAM,cAAc,KAAK;AACjE,oBAAM,UAAU,MAAM,SAAS,OAAO,gBAAgB,cAAc,OAAO,cAAc,KAAK;AAE9F,kBAAI,SAAS,QAAQ,MAAM,UAAU;AAErC,qBAAO;AAAA,gBACL,WAAW,iBAAiB;AAAA,gBAC5B;AAAA,cACF;AAAA,YACF;AAAA,UACF,EAAE,IAAI;AAAA,QAER,KAAK;AACH,iBAAO,MAAM;AAAA,YACX;AAAA,YACA,OAAO,kBAAkB;AACvB,kBAAI,yBAAyB;AAG7B,oBAAM,kBAAkB;AAExB,oBAAM,gBAAgB,MAAM,WAAW,MAAM,cAAc,IAAI;AAE/D,oBAAM,UAAU,MAAM,SAAS,OAAO,eAAe,cAAc,OAAO,cAAc,IAAI;AAE5F,kBAAI,SAAS,QAAQ,MAAM,iBAAiB;AAE5C,qBAAO;AAAA,gBACL,WAAW,iBAAiB;AAAA,gBAC5B;AAAA,cACF;AAAA,YACF;AAAA,UACF,EAAE,IAAI;AAAA,QAER,KAAK;AACH,iBAAO,MAAM;AAAA,YACX;AAAA,YACA,OAAO,kBAAkB;AACvB,kBAAI,wBAAwB,cAAc,QAAQ,EAAE;AAGpD,oBAAM,kBAAkB;AAIxB,oBAAM,gBAAgB,MAAM,WAAW,MAAM,cAAc,QAAQ;AACnE,oBAAM,aAAa,MAAM,SAAS,OAAO,eAAe,IAAI,cAAc,QAAQ;AAGlF,oBAAM,aAAa,WAAW;AAAA,gBAAO,OACnC,EAAE,SAAS,KAAK,SAAS,cAAc,QAAQ,KAAK,cAAc,SAAS,SAAS,EAAE,SAAS,IAAI;AAAA,cACrG;AAEA,kBAAI,UAAU;AAEd,kBAAI,cAAc,kBAAkB,WAAW,SAAS,GAAG;AAEzD,sBAAM,mBAAmB,MAAM,WAAW,MAAM,WAAW,CAAC,EAAE,OAAO;AACrE,sBAAM,UAAU,MAAM,SAAS,OAAO,kBAAkB,GAAG,WAAW,CAAC,EAAE,OAAO;AAGhF,sBAAM,oBAAoB,QAAQ;AAAA,kBAAO,OACvC,CAAC,EAAE,SAAS,KAAK,SAAS,cAAc,QAAQ,KAAK,CAAC,cAAc,SAAS,SAAS,EAAE,SAAS,IAAI;AAAA,gBACvG;AAEA,0BAAU,CAAC,GAAG,YAAY,GAAG,iBAAiB;AAAA,cAChD;AAEA,kBAAI,SAAS,QAAQ,MAAM,SAAS;AAEpC,qBAAO;AAAA,gBACL,WAAW,iBAAiB;AAAA,gBAC5B,MAAM,cAAc;AAAA,gBACpB,QAAQ;AAAA,cACV;AAAA,YACF;AAAA,UACF,EAAE,IAAI;AAAA,QAER,KAAK;AACH,iBAAO,MAAM;AAAA,YACX;AAAA,YACA,OAAO,kBAAkB;AACvB,kBAAI,2CAA2C;AAG/C,oBAAM,kBAAkB;AAExB,kBAAI;AACJ,kBAAI,aAAa;AAEjB,kBAAI;AAEF,0BAAU,MAAM,SAAS,aAAa;AAAA,kBACpC,UAAU,cAAc;AAAA,kBACxB,SAAS,cAAc;AAAA,kBACvB,OAAO;AAAA,gBACT,CAAC;AAID,oBAAI,QAAQ,WAAW,MAAM,cAAc,YAAY,cAAc,UAAU;AAC7E,sBAAI,oDAAoD;AACxD,4BAAU,MAAM,SAAS,eAAe;AAAA,oBACtC,UAAU,cAAc;AAAA,oBACxB,SAAS,cAAc;AAAA,oBACvB,OAAO;AAAA,kBACT,CAAC;AACD,+BAAa;AAAA,gBACf;AAAA,cACF,SAAS,OAAO;AAEd,oBAAI,sDAAsD,KAAK,EAAE;AACjE,0BAAU,MAAM,SAAS,eAAe;AAAA,kBACtC,UAAU,cAAc;AAAA,kBACxB,SAAS,cAAc;AAAA,kBACvB,OAAO;AAAA,gBACT,CAAC;AACD,6BAAa;AAAA,cACf;AAEA,kBAAI,SAAS,QAAQ,MAAM,kBAAkB,UAAU,SAAS;AAEhE,qBAAO;AAAA,gBACL,WAAW,iBAAiB;AAAA,gBAC5B,QAAQ;AAAA,gBACR;AAAA,gBACA,MAAM,eAAe,YACjB,oFACA;AAAA,cACN;AAAA,YACF;AAAA,UACF,EAAE,IAAI;AAAA,QAER;AACE,gBAAM,IAAI;AAAA,YACR,iBAAiB,IAAI;AAAA;AAAA,YAErB,EAAE,eAAe,MAAM,gBAAgB,MAAM,IAAI,OAAK,EAAE,IAAI,EAAE;AAAA,YAC9D;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AAEd,UAAI,iBAAiB,WAAW;AAC9B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS,CAAC;AAAA,YACR,MAAM;AAAA,YACN,MAAM,KAAK,UAAU,MAAM,OAAO,GAAG,MAAM,CAAC;AAAA,UAC9C,CAAC;AAAA,QACH;AAAA,MACF;AAGA,cAAQ,MAAM,uCAAuC,IAAI,KAAK,KAAK;AACnE,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,CAAC;AAAA,UACR,MAAM;AAAA,UACN,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,YAChD;AAAA,YACA,MAAM;AAAA,UACR,GAAG,MAAM,CAAC;AAAA,QACZ,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,CAAC;AAGD,QAAM,SAAS,MAAM,cAAc,KAAK,OAAO;AAG/C,QAAM,WAAW,MAAM,SAAS,QAAQ;AAExC,MAAI,CAAC,YAAY,OAAO,IAAI,qBAAqB;AAC/C,QAAI,wDAAiD;AACrD,QAAI,oEAA0D;AAE9D,QAAI;AAEF,YAAM,EAAE,eAAAE,eAAc,IAAI,MAAM;AAChC,YAAMA,eAAc,EAAE,SAAS,SAAS,KAAK,CAAC;AAC9C,UAAI,mCAA8B;AAAA,IACpC,SAAS,OAAO;AACd,UAAI,0CAAgC,KAAK,EAAE;AAC3C,UAAI,kCAAkC;AAAA,IAExC;AAAA,EACF,WAAW,CAAC,UAAU;AACpB,QAAI,oEAA0D;AAC9D,QAAI,0CAA0C;AAAA,EAChD;AAGA,MAAI,aAAqC;AACzC,MAAI,kBAAyC;AAC7C,MAAI,cAAkC;AAEtC,MAAI,OAAO,aAAa,SAAS;AAC/B,UAAM,eAAe,MAAM,eAAe;AAC1C,UAAM,SAAS,MAAM,UAAU,OAAO;AAEtC,QAAI,gBAAgB,QAAQ;AAC1B,UAAI,gCAA2B;AAC/B,mBAAa,IAAI,gBAAgB,SAAS,SAAS,MAAM;AAGzD,UAAI;AACF,YAAI,6BAA6B;AACjC,cAAM,eAAe,MAAM,WAAW,WAAW;AAEjD,YAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,cAAI,mCAA4B,aAAa,MAAM,gBAAgB;AACnE,cAAI,6BAA6B;AAEjC,gBAAM,QAAQ,MAAM;AAAA,YAClB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA,EAAE,QAAQ;AAAA,UACZ;AAEA,cAAI,oBAAe,KAAK,QAAQ;AAAA,QAClC,OAAO;AACL,cAAI,2CAAsC;AAAA,QAC5C;AAAA,MACF,SAAS,OAAO;AACd,YAAI,kDAAkD,KAAK,EAAE;AAAA,MAC/D;AAGA,UAAI,gDAA2C,OAAO,aAAa,iBAAiB,GAAI,IAAI;AAE5F,wBAAkB,YAAY,YAAY;AACxC,YAAI;AACF,gBAAM,eAAe,MAAM,WAAY,cAAc;AAErD,cAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,gBAAI,kCAA2B,aAAa,MAAM,gBAAgB;AAClE,gBAAI,6BAA6B;AAGjC;AAAA,cACE;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA,EAAE,QAAQ;AAAA,YACZ,EAAE,KAAK,WAAS;AACd,kBAAI,uCAAkC,KAAK,QAAQ;AAAA,YACrD,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAI,uCAAuC,KAAK,EAAE;AAAA,YACpD,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAO;AACd,cAAI,wCAAwC,KAAK,EAAE;AAAA,QACrD;AAAA,MACF,GAAG,OAAO,aAAa,cAAc;AAAA,IACvC,OAAO;AACL,UAAI,CAAC,cAAc;AACjB,YAAI,4CAA4C;AAAA,MAClD,WAAW,CAAC,QAAQ;AAClB,YAAI,+CAA+C;AAAA,MACrD;AAAA,IACF;AAAA,EACF,OAAO;AACL,QAAI,yCAAyC;AAAA,EAC/C;AAIA,QAAM,sBAAsB,UAAU,SAAY,QAAQ,OAAO,aAAa;AAE9E,MAAI,qBAAqB;AACvB,QAAI,oCAA6B;AACjC,kBAAc,IAAI,YAAY,SAAS,MAAM;AAE7C,QAAI;AACF,YAAM,YAAY,MAAM,OAAO,UAAU;AACvC,cAAM,EAAE,MAAM,SAAS,IAAI;AAE3B,YAAI,SAAS,UAAU;AAErB,cAAI,kCAAsB,QAAQ,EAAE;AACpC,cAAI;AACF,kBAAM,SAAS,aAAa,QAAQ;AAGpC,kBAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,kBAAM,SAAS,WAAW,QAAQ;AAElC,gBAAI,kBAAa,QAAQ,aAAa;AAAA,UACxC,SAAS,OAAO;AACd,gBAAI,6BAA6B,QAAQ,KAAK,KAAK,EAAE;AAAA,UACvD;AAAA,QACF,OAAO;AAEL,gBAAM,SAAS,SAAS,QAAQ,UAAU;AAC1C,cAAI,kBAAW,MAAM,KAAK,QAAQ,EAAE;AAGpC,0BAAgB,UAAU,UAAU,YAAY,QAAQ,EAAE,QAAQ,CAAC,EAChE,MAAM,CAAC,UAAU;AAChB,gBAAI,8BAA8B,QAAQ,KAAK,KAAK,EAAE;AAAA,UACxD,CAAC;AAAA,QACL;AAAA,MACF,CAAC;AAED,YAAM,eAAe,YAAY,gBAAgB,EAAE;AACnD,UAAI,0CAAqC,YAAY,SAAS;AAAA,IAChE,SAAS,OAAO;AACd,UAAI,0CAA0C,KAAK,EAAE;AACrD,oBAAc;AAAA,IAChB;AAAA,EACF;AAGA,QAAM,UAAU,YAAY;AAC1B,QAAI,6BAA6B;AACjC,kBAAc,oBAAoB;AAClC,QAAI,iBAAiB;AACnB,oBAAc,eAAe;AAAA,IAC/B;AACA,QAAI,aAAa;AACf,YAAM,YAAY,KAAK;AAAA,IACzB;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,UAAQ,GAAG,UAAU,OAAO;AAC5B,UAAQ,GAAG,WAAW,OAAO;AAG7B,QAAM,YAAY,IAAI,qBAAqB;AAI3C,YAAU,UAAU,MAAM;AACxB,QAAI,oDAAoD;AACxD,YAAQ,EAAE,MAAM,MAAM,QAAQ,KAAK,CAAC,CAAC;AAAA,EACvC;AAEA,YAAU,UAAU,CAAC,UAAU;AAC7B,QAAI,oBAAoB,KAAK,EAAE;AAAA,EAEjC;AAEA,QAAM,OAAO,QAAQ,SAAS;AAE9B,MAAI,2CAA2C;AACjD;;;ADteA,eAAsB,aAAa,SAA+E;AAChH,QAAM,UAAU,QAAQ,OAAOC,OAAK,QAAQ,QAAQ,IAAI,IAAI,QAAQ,IAAI;AAExE,MAAI;AAEF,QAAI,QAAQ,MAAM;AAChB,UAAI;AACF,cAAM,QAAQ,MAAMC,KAAG,KAAK,OAAO;AACnC,YAAI,CAAC,MAAM,YAAY,GAAG;AACxB,kBAAQ,MAAMC,OAAM,IAAI,0CAA0C,OAAO,EAAE,CAAC;AAC5E,kBAAQ,KAAK,CAAC;AAAA,QAChB;AAAA,MACF,SAAS,OAAO;AACd,YAAK,MAAgC,SAAS,UAAU;AACtD,kBAAQ,MAAMA,OAAM,IAAI,2CAA2C,OAAO,EAAE,CAAC;AAAA,QAC/E,WAAY,MAAgC,SAAS,UAAU;AAC7D,kBAAQ,MAAMA,OAAM,IAAI,8CAA8C,OAAO,EAAE,CAAC;AAAA,QAClF,OAAO;AACL,kBAAQ,MAAMA,OAAM,IAAI,6CAA6C,OAAO,EAAE,CAAC;AAC/E,kBAAQ,MAAMA,OAAM,IAAK,MAAgB,OAAO,CAAC;AAAA,QACnD;AACA,gBAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,IACF;AAGA,eAAW;AACX,YAAQ,MAAMA,OAAM,KAAK,0BAA0B,CAAC;AAEpD,QAAI,QAAQ,MAAM;AAChB,cAAQ,MAAMA,OAAM,IAAI,iBAAiB,OAAO;AAAA,CAAI,CAAC;AAAA,IACvD;AAGA,QAAI,QAAQ,OAAO;AACjB,cAAQ,MAAMA,OAAM,OAAO,yEAA+D,CAAC;AAC3F,cAAQ,MAAMA,OAAM,IAAI,+CAA+C,CAAC;AAAA,IAC1E;AAIA,UAAM,QAAQ,QAAQ,UAAU,QAAQ,QAAQ,QAAQ,OAAO;AAE/D,UAAM,eAAe;AAAA,MACnB;AAAA,MACA,SAAS;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,MAAMA,OAAM,IAAI,6BAA6B,GAAG,KAAK;AAC7D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;;;AdhDA,IAAMC,cAAaC,eAAc,YAAY,GAAG;AAChD,IAAMC,aAAYC,SAAQH,WAAU;AACpC,IAAMI,WAAUC,eAAc,YAAY,GAAG;AAE7C,IAAIC;AACJ,IAAI;AACF,EAAAA,eAAcF,SAAQG,MAAKL,YAAW,iBAAiB,CAAC;AAC1D,QAAQ;AACN,EAAAI,eAAcF,SAAQG,MAAKL,YAAW,oBAAoB,CAAC;AAC7D;AAEO,IAAM,UAAU,IAAI,QAAQ;AAEnC,QACG,KAAK,MAAM,EACX,YAAY,sDAAsD,EAClE,QAAQI,aAAY,OAAO;AAE9B,QACG,QAAQ,MAAM,EACd,YAAY,0CAA0C,EACtD,OAAO,iBAAiB,0CAA0C,EAClE,OAAO,aAAa,2CAA2C,EAC/D,OAAO,qBAAqB,oDAAoD,EAChF,OAAO,WAAW;AAErB,QACG,QAAQ,OAAO,EACf,YAAY,wCAAwC,EACpD,OAAO,eAAe,uCAAuC,EAC7D,OAAO,eAAe,8CAA8C,EACpE,OAAO,iBAAiB,uCAAuC,EAC/D,OAAO,YAAY;AAEtB,QACG,QAAQ,OAAO,EACf,YAAY,6CAA6C,EACzD,OAAO,qBAAqB,gCAAgC,MAAM,EAClE,OAAO,cAAc,wCAAwC,EAC7D,OAAO,eAAe,sDAAsD,EAC5E,OAAO,qBAAqB,yDAAyD,EACrF,OAAO,YAAY;AAEtB,QACG,QAAQ,QAAQ,EAChB,YAAY,qCAAqC,EACjD,OAAO,aAAa;;;AwBtDvB,QAAQ,MAAM;","names":["fs","path","fs","path","fs","path","fs","path","e","path","os","crypto","createRequire","fileURLToPath","dirname","join","packageJson","__filename","__dirname","require","init_version","fs","path","init_version","fs","path","fs","fs","manifest","error","fs","chalk","manifest","embeddings","isGitAvailable","isGitRepo","GitStateTracker","gitAvailable","isRepo","createRequire","fileURLToPath","dirname","join","fs","path","fileURLToPath","chalk","require","fs","path","packageJson","fs","path","path","fs","fs","path","path","fs","path","fs","__filename","fileURLToPath","__dirname","path","fs","chalk","chalk","fs","path","path","chalk","fs","chalk","VectorDB","ManifestManager","chalk","chalk","fs","path","createRequire","fileURLToPath","dirname","join","z","z","z","__filename","fileURLToPath","__dirname","dirname","require","createRequire","packageJson","join","indexCodebase","path","fs","chalk","__filename","fileURLToPath","__dirname","dirname","require","createRequire","packageJson","join"]}
1
+ {"version":3,"sources":["../src/utils/version.ts","../src/constants.ts","../src/config/schema.ts","../src/config/migration.ts","../src/config/merge.ts","../src/errors/codes.ts","../src/errors/index.ts","../src/config/service.ts","../src/git/utils.ts","../src/vectordb/version.ts","../src/indexer/scanner.ts","../src/indexer/symbol-extractor.ts","../src/indexer/ast/parser.ts","../src/indexer/ast/symbols.ts","../src/indexer/ast/traversers/typescript.ts","../src/indexer/ast/traversers/php.ts","../src/indexer/ast/traversers/index.ts","../src/indexer/ast/chunker.ts","../src/indexer/liquid-chunker.ts","../src/indexer/json-template-chunker.ts","../src/indexer/chunker.ts","../src/embeddings/local.ts","../src/embeddings/types.ts","../src/vectordb/relevance.ts","../src/vectordb/intent-classifier.ts","../src/vectordb/query.ts","../src/vectordb/batch-insert.ts","../src/vectordb/maintenance.ts","../src/vectordb/lancedb.ts","../src/indexer/manifest.ts","../src/git/tracker.ts","../src/indexer/change-detector.ts","../src/indexer/incremental.ts","../src/utils/loading-messages.ts","../src/indexer/index.ts","../src/cli/index.ts","../src/cli/init.ts","../src/utils/banner.ts","../src/config/migration-manager.ts","../src/frameworks/detector-service.ts","../src/frameworks/types.ts","../src/frameworks/nodejs/detector.ts","../src/frameworks/nodejs/config.ts","../src/frameworks/laravel/detector.ts","../src/frameworks/laravel/config.ts","../src/frameworks/shopify/detector.ts","../src/frameworks/shopify/config.ts","../src/frameworks/registry.ts","../src/cli/status.ts","../src/cli/index-cmd.ts","../src/cli/serve.ts","../src/mcp/server.ts","../src/mcp/utils/zod-to-json-schema.ts","../src/mcp/schemas/search.schema.ts","../src/mcp/schemas/similarity.schema.ts","../src/mcp/schemas/file.schema.ts","../src/mcp/schemas/symbols.schema.ts","../src/mcp/tools.ts","../src/watcher/index.ts","../src/mcp/utils/tool-wrapper.ts","../src/index.ts"],"sourcesContent":["import { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\n\n/**\n * Centralized package version loader.\n * Handles different build output structures (development vs production).\n * \n * Build scenarios:\n * - Development (ts-node): src/utils/version.ts → ../package.json\n * - Production (dist): dist/utils/version.js → ../package.json\n * - Nested builds: dist/something/version.js → ../../package.json\n */\n\n// Setup require for ESM\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst require = createRequire(import.meta.url);\n\nlet packageJson: { version: string; name?: string };\n\ntry {\n // Try relative to current file (works in most scenarios)\n packageJson = require(join(__dirname, '../package.json'));\n} catch {\n try {\n // Fallback: go up one more level (nested build output)\n packageJson = require(join(__dirname, '../../package.json'));\n } catch {\n // Last resort: hardcoded fallback (should never happen in production)\n console.warn('[Lien] Warning: Could not load package.json, using fallback version');\n packageJson = { version: '0.0.0-unknown' };\n }\n}\n\n/**\n * Get the current package version\n */\nexport function getPackageVersion(): string {\n return packageJson.version;\n}\n\n/**\n * Get the full package.json (for compatibility)\n */\nexport function getPackageInfo(): { version: string; name?: string } {\n return packageJson;\n}\n\n","/**\n * Centralized constants for the Lien project.\n * This file contains all magic numbers and configuration defaults\n * to ensure consistency across the codebase.\n */\n\nimport { getPackageVersion } from './utils/version.js';\n\n// Chunking settings\nexport const DEFAULT_CHUNK_SIZE = 75;\nexport const DEFAULT_CHUNK_OVERLAP = 10;\n\n// Concurrency and batching\nexport const DEFAULT_CONCURRENCY = 4;\nexport const DEFAULT_EMBEDDING_BATCH_SIZE = 50;\n\n// Micro-batching for event loop yielding\n// Process N embeddings at a time, then yield to event loop\n// This prevents UI freezing during CPU-intensive embedding generation\nexport const EMBEDDING_MICRO_BATCH_SIZE = 10;\n\n// Vector database batch size limits\n// Maximum batch size before splitting (prevents LanceDB errors on very large batches)\nexport const VECTOR_DB_MAX_BATCH_SIZE = 1000;\n// Minimum batch size for retry logic (stop splitting below this size)\nexport const VECTOR_DB_MIN_BATCH_SIZE = 10;\n\n// Embedding model configuration\nexport const EMBEDDING_DIMENSIONS = 384; // all-MiniLM-L6-v2\nexport const DEFAULT_EMBEDDING_MODEL = 'Xenova/all-MiniLM-L6-v2';\n\n// MCP server configuration\nexport const DEFAULT_PORT = 7133; // LIEN in leetspeak\nexport const VERSION_CHECK_INTERVAL_MS = 2000;\n\n// Git detection\nexport const DEFAULT_GIT_POLL_INTERVAL_MS = 10000; // Check every 10 seconds\n\n// File watching\nexport const DEFAULT_DEBOUNCE_MS = 1000;\n\n// Configuration version - always matches package version\n// Config format is tied to the package release that introduces it\nexport const CURRENT_CONFIG_VERSION = getPackageVersion();\n\n// Index format version - bump on ANY breaking change to indexing\n// Examples that require version bump:\n// - Chunking algorithm changes\n// - Embedding model changes (e.g., switch from all-MiniLM-L6-v2 to another model)\n// - Vector DB schema changes (new metadata fields)\n// - Metadata structure changes\n// v2: AST-based chunking + enhanced metadata (symbolName, complexity, etc.)\nexport const INDEX_FORMAT_VERSION = 2;\n\n","import {\n DEFAULT_CHUNK_SIZE,\n DEFAULT_CHUNK_OVERLAP,\n DEFAULT_CONCURRENCY,\n DEFAULT_EMBEDDING_BATCH_SIZE,\n DEFAULT_PORT,\n DEFAULT_GIT_POLL_INTERVAL_MS,\n DEFAULT_DEBOUNCE_MS,\n CURRENT_CONFIG_VERSION,\n} from '../constants.js';\n\n/**\n * Framework-specific configuration\n */\nexport interface FrameworkConfig {\n include: string[]; // File patterns relative to framework path\n exclude: string[]; // Exclude patterns relative to framework path\n}\n\n/**\n * Framework instance in a monorepo\n */\nexport interface FrameworkInstance {\n name: string; // 'nodejs', 'laravel'\n path: string; // '.', 'cognito-backend', 'packages/cli'\n enabled: boolean;\n config: FrameworkConfig;\n}\n\n/**\n * Main Lien configuration supporting monorepo setups\n */\nexport interface LienConfig {\n version: string;\n core: {\n chunkSize: number;\n chunkOverlap: number;\n concurrency: number;\n embeddingBatchSize: number;\n };\n chunking: {\n useAST: boolean; // Enable AST-based chunking (v0.13.0)\n astFallback: 'line-based' | 'error'; // Fallback strategy on AST errors\n };\n mcp: {\n port: number;\n transport: 'stdio' | 'socket';\n autoIndexOnFirstRun: boolean;\n };\n gitDetection: {\n enabled: boolean;\n pollIntervalMs: number;\n };\n fileWatching: {\n enabled: boolean;\n debounceMs: number;\n };\n frameworks: FrameworkInstance[];\n}\n\n/**\n * Legacy config format for backwards compatibility\n * @deprecated Use LienConfig with frameworks array instead\n */\nexport interface LegacyLienConfig {\n version: string;\n indexing: {\n exclude: string[];\n include: string[];\n chunkSize: number;\n chunkOverlap: number;\n concurrency: number;\n embeddingBatchSize: number;\n };\n mcp: {\n port: number;\n transport: 'stdio' | 'socket';\n autoIndexOnFirstRun: boolean;\n };\n gitDetection: {\n enabled: boolean;\n pollIntervalMs: number;\n };\n fileWatching: {\n enabled: boolean;\n debounceMs: number;\n };\n}\n\n/**\n * Type guard to check if a config is the legacy format\n * @param config - Config object to check\n * @returns True if config is LegacyLienConfig\n */\nexport function isLegacyConfig(\n config: LienConfig | LegacyLienConfig\n): config is LegacyLienConfig {\n return 'indexing' in config && !('frameworks' in config);\n}\n\n/**\n * Type guard to check if a config is the modern format\n * @param config - Config object to check\n * @returns True if config is LienConfig\n */\nexport function isModernConfig(\n config: LienConfig | LegacyLienConfig\n): config is LienConfig {\n return 'frameworks' in config;\n}\n\n/**\n * Default configuration with empty frameworks array\n * Frameworks should be detected and added via lien init\n */\nexport const defaultConfig: LienConfig = {\n version: CURRENT_CONFIG_VERSION,\n core: {\n chunkSize: DEFAULT_CHUNK_SIZE,\n chunkOverlap: DEFAULT_CHUNK_OVERLAP,\n concurrency: DEFAULT_CONCURRENCY,\n embeddingBatchSize: DEFAULT_EMBEDDING_BATCH_SIZE,\n },\n chunking: {\n useAST: true, // AST-based chunking enabled by default (v0.13.0)\n astFallback: 'line-based', // Fallback to line-based on errors\n },\n mcp: {\n port: DEFAULT_PORT,\n transport: 'stdio',\n autoIndexOnFirstRun: true,\n },\n gitDetection: {\n enabled: true,\n pollIntervalMs: DEFAULT_GIT_POLL_INTERVAL_MS,\n },\n fileWatching: {\n enabled: true, // Enabled by default (fast with incremental indexing!)\n debounceMs: DEFAULT_DEBOUNCE_MS,\n },\n frameworks: [], // Will be populated by lien init via framework detection\n};\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { LienConfig, LegacyLienConfig, FrameworkInstance, defaultConfig } from './schema.js';\nimport { CURRENT_CONFIG_VERSION } from '../constants.js';\n\n/**\n * Checks if a config object needs migration from v0.2.0 to v0.3.0\n */\nexport function needsMigration(config: any): boolean {\n // Check if config uses old structure:\n // - Has 'indexing' field instead of 'core' and 'frameworks'\n // - Or has no 'frameworks' field at all\n // - Or version is explicitly set to something < 0.3.0\n // - Or missing 'chunking' field (v0.13.0)\n if (!config) {\n return false;\n }\n\n // If missing chunking config, needs migration to v0.13.0\n if (config.frameworks !== undefined && !config.chunking) {\n return true;\n }\n\n // If it has frameworks array and chunking, it's already in new format\n if (config.frameworks !== undefined && config.chunking !== undefined) {\n return false;\n }\n\n // If it has 'indexing' field, it's the old format\n if (config.indexing !== undefined) {\n return true;\n }\n\n // If version is explicitly < 0.3.0\n if (config.version && config.version.startsWith('0.2')) {\n return true;\n }\n\n return false;\n}\n\n/**\n * Migrates a v0.2.0 config to v0.3.0+ format\n */\nexport function migrateConfig(oldConfig: Partial<LegacyLienConfig | LienConfig>): LienConfig {\n // Start with default config structure\n const newConfig: LienConfig = {\n version: CURRENT_CONFIG_VERSION,\n core: {\n chunkSize: (oldConfig as any).indexing?.chunkSize ?? (oldConfig as any).core?.chunkSize ?? defaultConfig.core.chunkSize,\n chunkOverlap: (oldConfig as any).indexing?.chunkOverlap ?? (oldConfig as any).core?.chunkOverlap ?? defaultConfig.core.chunkOverlap,\n concurrency: (oldConfig as any).indexing?.concurrency ?? (oldConfig as any).core?.concurrency ?? defaultConfig.core.concurrency,\n embeddingBatchSize: (oldConfig as any).indexing?.embeddingBatchSize ?? (oldConfig as any).core?.embeddingBatchSize ?? defaultConfig.core.embeddingBatchSize,\n },\n chunking: {\n useAST: (oldConfig as any).chunking?.useAST ?? defaultConfig.chunking.useAST,\n astFallback: (oldConfig as any).chunking?.astFallback ?? defaultConfig.chunking.astFallback,\n },\n mcp: {\n port: oldConfig.mcp?.port ?? defaultConfig.mcp.port,\n transport: oldConfig.mcp?.transport ?? defaultConfig.mcp.transport,\n autoIndexOnFirstRun: oldConfig.mcp?.autoIndexOnFirstRun ?? defaultConfig.mcp.autoIndexOnFirstRun,\n },\n gitDetection: {\n enabled: oldConfig.gitDetection?.enabled ?? defaultConfig.gitDetection.enabled,\n pollIntervalMs: oldConfig.gitDetection?.pollIntervalMs ?? defaultConfig.gitDetection.pollIntervalMs,\n },\n fileWatching: {\n enabled: oldConfig.fileWatching?.enabled ?? defaultConfig.fileWatching.enabled,\n debounceMs: oldConfig.fileWatching?.debounceMs ?? defaultConfig.fileWatching.debounceMs,\n },\n frameworks: (oldConfig as any).frameworks ?? [],\n };\n\n // Convert old indexing config to a single \"generic\" framework (only for legacy configs)\n if ((oldConfig as any).indexing && newConfig.frameworks.length === 0) {\n const genericFramework: FrameworkInstance = {\n name: 'generic',\n path: '.',\n enabled: true,\n config: {\n include: (oldConfig as any).indexing.include ?? ['**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}'],\n exclude: (oldConfig as any).indexing.exclude ?? [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/coverage/**',\n '**/.next/**',\n '**/.nuxt/**',\n '**/vendor/**',\n ],\n },\n };\n\n newConfig.frameworks.push(genericFramework);\n } else if (newConfig.frameworks.length === 0) {\n // No indexing config and no frameworks present, use defaults for generic framework\n const genericFramework: FrameworkInstance = {\n name: 'generic',\n path: '.',\n enabled: true,\n config: {\n include: ['**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}'],\n exclude: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/coverage/**',\n '**/.next/**',\n '**/.nuxt/**',\n '**/vendor/**',\n ],\n },\n };\n\n newConfig.frameworks.push(genericFramework);\n }\n\n return newConfig;\n}\n\n/**\n * Migrates config file and creates backup\n */\nexport async function migrateConfigFile(rootDir: string = process.cwd()): Promise<{\n migrated: boolean;\n backupPath?: string;\n config: LienConfig;\n}> {\n const configPath = path.join(rootDir, '.lien.config.json');\n\n try {\n // Read existing config\n const configContent = await fs.readFile(configPath, 'utf-8');\n const oldConfig = JSON.parse(configContent);\n\n // Check if migration is needed\n if (!needsMigration(oldConfig)) {\n return {\n migrated: false,\n config: oldConfig as LienConfig,\n };\n }\n\n // Perform migration\n const newConfig = migrateConfig(oldConfig);\n\n // Create backup\n const backupPath = `${configPath}.v0.2.0.backup`;\n await fs.copyFile(configPath, backupPath);\n\n // Write migrated config\n await fs.writeFile(configPath, JSON.stringify(newConfig, null, 2) + '\\n', 'utf-8');\n\n return {\n migrated: true,\n backupPath,\n config: newConfig,\n };\n } catch (error) {\n // If config doesn't exist, return default\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return {\n migrated: false,\n config: defaultConfig,\n };\n }\n throw error;\n }\n}\n\n","import { LienConfig } from './schema.js';\n\n/**\n * Deep merges user config with defaults, preserving user customizations.\n * User values always take precedence over defaults.\n * \n * @param defaults - The default configuration\n * @param user - The user's partial configuration\n * @returns Complete merged configuration\n */\nexport function deepMergeConfig(defaults: LienConfig, user: Partial<LienConfig>): LienConfig {\n return {\n version: user.version ?? defaults.version,\n core: {\n ...defaults.core,\n ...user.core,\n },\n chunking: {\n ...defaults.chunking,\n ...user.chunking,\n },\n mcp: {\n ...defaults.mcp,\n ...user.mcp,\n },\n gitDetection: {\n ...defaults.gitDetection,\n ...user.gitDetection,\n },\n fileWatching: {\n ...defaults.fileWatching,\n ...user.fileWatching,\n },\n frameworks: user.frameworks ?? defaults.frameworks,\n };\n}\n\n/**\n * Detects new fields that exist in the 'after' config but not in the 'before' config.\n * Returns a list of human-readable field paths.\n * \n * @param before - The existing config (potentially missing fields)\n * @param after - The complete config with all fields\n * @returns Array of new field paths (e.g., [\"mcp.autoIndexOnFirstRun\", \"gitDetection\"])\n */\nexport function detectNewFields(before: Record<string, any>, after: Record<string, any>): string[] {\n const newFields: string[] = [];\n\n // Check top-level sections\n for (const key of Object.keys(after)) {\n if (!(key in before)) {\n newFields.push(key);\n continue;\n }\n\n // Check nested fields for object sections\n if (typeof after[key] === 'object' && after[key] !== null && !Array.isArray(after[key])) {\n const beforeSection = (before[key] as Record<string, any>) || {};\n const afterSection = after[key] as Record<string, any>;\n\n for (const nestedKey of Object.keys(afterSection)) {\n if (!(nestedKey in beforeSection)) {\n newFields.push(`${key}.${nestedKey}`);\n }\n }\n }\n }\n\n return newFields;\n}\n\n","/**\n * Error codes for all Lien-specific errors.\n * Used to identify error types programmatically.\n */\nexport enum LienErrorCode {\n // Configuration\n CONFIG_NOT_FOUND = 'CONFIG_NOT_FOUND',\n CONFIG_INVALID = 'CONFIG_INVALID',\n \n // Index\n INDEX_NOT_FOUND = 'INDEX_NOT_FOUND',\n INDEX_CORRUPTED = 'INDEX_CORRUPTED',\n \n // Embeddings\n EMBEDDING_MODEL_FAILED = 'EMBEDDING_MODEL_FAILED',\n EMBEDDING_GENERATION_FAILED = 'EMBEDDING_GENERATION_FAILED',\n \n // File System\n FILE_NOT_FOUND = 'FILE_NOT_FOUND',\n FILE_NOT_READABLE = 'FILE_NOT_READABLE',\n INVALID_PATH = 'INVALID_PATH',\n \n // Tool Input\n INVALID_INPUT = 'INVALID_INPUT',\n \n // System\n INTERNAL_ERROR = 'INTERNAL_ERROR',\n}\n\n","import { LienErrorCode } from './codes.js';\n\n// Re-export for consumers\nexport { LienErrorCode } from './codes.js';\n\n/**\n * Severity levels for errors\n */\nexport type ErrorSeverity = 'low' | 'medium' | 'high' | 'critical';\n\n/**\n * Base error class for all Lien-specific errors\n */\nexport class LienError extends Error {\n constructor(\n message: string,\n public readonly code: LienErrorCode,\n public readonly context?: Record<string, unknown>,\n public readonly severity: ErrorSeverity = 'medium',\n public readonly recoverable: boolean = true,\n public readonly retryable: boolean = false\n ) {\n super(message);\n this.name = 'LienError';\n \n // Maintains proper stack trace for where our error was thrown (only available on V8)\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n \n /**\n * Serialize error to JSON for MCP responses\n */\n toJSON() {\n return {\n error: this.message,\n code: this.code,\n severity: this.severity,\n recoverable: this.recoverable,\n context: this.context,\n };\n }\n \n /**\n * Check if this error is retryable\n */\n isRetryable(): boolean {\n return this.retryable;\n }\n \n /**\n * Check if this error is recoverable\n */\n isRecoverable(): boolean {\n return this.recoverable;\n }\n}\n\n/**\n * Configuration-related errors (loading, parsing, migration)\n */\nexport class ConfigError extends LienError {\n constructor(message: string, context?: Record<string, unknown>) {\n super(message, LienErrorCode.CONFIG_INVALID, context, 'medium', true, false);\n this.name = 'ConfigError';\n }\n}\n\n/**\n * Indexing-related errors (file processing, chunking)\n */\nexport class IndexingError extends LienError {\n constructor(\n message: string,\n public readonly file?: string,\n context?: Record<string, unknown>\n ) {\n super(message, LienErrorCode.INTERNAL_ERROR, { ...context, file }, 'medium', true, false);\n this.name = 'IndexingError';\n }\n}\n\n/**\n * Embedding generation errors\n */\nexport class EmbeddingError extends LienError {\n constructor(message: string, context?: Record<string, unknown>) {\n super(message, LienErrorCode.EMBEDDING_GENERATION_FAILED, context, 'high', true, true);\n this.name = 'EmbeddingError';\n }\n}\n\n/**\n * Vector database errors (connection, query, storage)\n */\nexport class DatabaseError extends LienError {\n constructor(message: string, context?: Record<string, unknown>) {\n super(message, LienErrorCode.INTERNAL_ERROR, context, 'high', true, true);\n this.name = 'DatabaseError';\n }\n}\n\n/**\n * Helper function to wrap unknown errors with context\n * @param error - Unknown error object to wrap\n * @param context - Context message describing what operation failed\n * @param additionalContext - Optional additional context data\n * @returns LienError with proper message and context\n */\nexport function wrapError(\n error: unknown,\n context: string,\n additionalContext?: Record<string, unknown>\n): LienError {\n const message = error instanceof Error ? error.message : String(error);\n const stack = error instanceof Error ? error.stack : undefined;\n \n const wrappedError = new LienError(\n `${context}: ${message}`,\n LienErrorCode.INTERNAL_ERROR,\n additionalContext\n );\n \n // Preserve original stack trace if available\n if (stack) {\n wrappedError.stack = `${wrappedError.stack}\\n\\nCaused by:\\n${stack}`;\n }\n \n return wrappedError;\n}\n\n/**\n * Type guard to check if an error is a LienError\n */\nexport function isLienError(error: unknown): error is LienError {\n return error instanceof LienError;\n}\n\n/**\n * Extract error message from unknown error type\n * @param error - Unknown error object\n * @returns Error message string\n */\nexport function getErrorMessage(error: unknown): string {\n if (error instanceof Error) {\n return error.message;\n }\n return String(error);\n}\n\n/**\n * Extract stack trace from unknown error type\n * @param error - Unknown error object\n * @returns Stack trace string or undefined\n */\nexport function getErrorStack(error: unknown): string | undefined {\n if (error instanceof Error) {\n return error.stack;\n }\n return undefined;\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { LienConfig, LegacyLienConfig, defaultConfig, isLegacyConfig, isModernConfig } from './schema.js';\nimport { deepMergeConfig } from './merge.js';\nimport { needsMigration as checkNeedsMigration, migrateConfig as performMigration } from './migration.js';\nimport { ConfigError, wrapError } from '../errors/index.js';\n\n/**\n * Validation result with errors and warnings\n */\nexport interface ValidationResult {\n valid: boolean;\n errors: string[];\n warnings: string[];\n}\n\n/**\n * Migration result with status and config\n */\nexport interface MigrationResult {\n migrated: boolean;\n backupPath?: string;\n config: LienConfig;\n}\n\n/**\n * ConfigService encapsulates all configuration operations including\n * loading, saving, migration, and validation.\n * \n * This service provides a single point of truth for config management\n * with comprehensive error handling and validation.\n */\nexport class ConfigService {\n private static readonly CONFIG_FILENAME = '.lien.config.json';\n \n /**\n * Load configuration from the specified directory.\n * Automatically handles migration if needed.\n * \n * @param rootDir - Root directory containing the config file\n * @returns Loaded and validated configuration\n * @throws {ConfigError} If config is invalid or cannot be loaded\n */\n async load(rootDir: string = process.cwd()): Promise<LienConfig> {\n const configPath = this.getConfigPath(rootDir);\n \n try {\n const configContent = await fs.readFile(configPath, 'utf-8');\n const userConfig = JSON.parse(configContent);\n \n // Check if migration is needed\n if (this.needsMigration(userConfig)) {\n console.log('🔄 Migrating config from v0.2.0 to v0.3.0...');\n \n const result = await this.migrate(rootDir);\n \n if (result.migrated && result.backupPath) {\n const backupFilename = path.basename(result.backupPath);\n console.log(`✅ Migration complete! Backup saved as ${backupFilename}`);\n console.log('📝 Your config now uses the framework-based structure.');\n }\n \n return result.config;\n }\n \n // Merge with defaults first\n const mergedConfig = deepMergeConfig(defaultConfig, userConfig as Partial<LienConfig>);\n \n // Then validate the merged config\n const validation = this.validate(mergedConfig);\n if (!validation.valid) {\n throw new ConfigError(\n `Invalid configuration:\\n${validation.errors.join('\\n')}`,\n { errors: validation.errors, warnings: validation.warnings }\n );\n }\n \n // Show warnings if any\n if (validation.warnings.length > 0) {\n console.warn('⚠️ Configuration warnings:');\n validation.warnings.forEach(warning => console.warn(` ${warning}`));\n }\n \n return mergedConfig;\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n // Config doesn't exist, return defaults\n return defaultConfig;\n }\n \n if (error instanceof ConfigError) {\n throw error;\n }\n \n if (error instanceof SyntaxError) {\n throw new ConfigError(\n 'Failed to parse config file: Invalid JSON syntax',\n { path: configPath, originalError: error.message }\n );\n }\n \n throw wrapError(error, 'Failed to load configuration', { path: configPath });\n }\n }\n \n /**\n * Save configuration to the specified directory.\n * Validates the config before saving.\n * \n * @param rootDir - Root directory to save the config file\n * @param config - Configuration to save\n * @throws {ConfigError} If config is invalid or cannot be saved\n */\n async save(rootDir: string, config: LienConfig): Promise<void> {\n const configPath = this.getConfigPath(rootDir);\n \n // Validate before saving\n const validation = this.validate(config);\n if (!validation.valid) {\n throw new ConfigError(\n `Cannot save invalid configuration:\\n${validation.errors.join('\\n')}`,\n { errors: validation.errors }\n );\n }\n \n try {\n const configJson = JSON.stringify(config, null, 2) + '\\n';\n await fs.writeFile(configPath, configJson, 'utf-8');\n } catch (error) {\n throw wrapError(error, 'Failed to save configuration', { path: configPath });\n }\n }\n \n /**\n * Check if a configuration file exists in the specified directory.\n * \n * @param rootDir - Root directory to check\n * @returns True if config file exists\n */\n async exists(rootDir: string = process.cwd()): Promise<boolean> {\n const configPath = this.getConfigPath(rootDir);\n try {\n await fs.access(configPath);\n return true;\n } catch {\n return false;\n }\n }\n \n /**\n * Migrate configuration from v0.2.0 to v0.3.0 format.\n * Creates a backup of the original config file.\n * \n * @param rootDir - Root directory containing the config file\n * @returns Migration result with status and new config\n * @throws {ConfigError} If migration fails\n */\n async migrate(rootDir: string = process.cwd()): Promise<MigrationResult> {\n const configPath = this.getConfigPath(rootDir);\n \n try {\n // Read existing config\n const configContent = await fs.readFile(configPath, 'utf-8');\n const oldConfig = JSON.parse(configContent);\n \n // Check if migration is needed\n if (!this.needsMigration(oldConfig)) {\n return {\n migrated: false,\n config: oldConfig as LienConfig,\n };\n }\n \n // Perform migration\n const newConfig = performMigration(oldConfig);\n \n // Validate migrated config\n const validation = this.validate(newConfig);\n if (!validation.valid) {\n throw new ConfigError(\n `Migration produced invalid configuration:\\n${validation.errors.join('\\n')}`,\n { errors: validation.errors }\n );\n }\n \n // Create backup\n const backupPath = `${configPath}.v0.2.0.backup`;\n await fs.copyFile(configPath, backupPath);\n \n // Write migrated config\n await this.save(rootDir, newConfig);\n \n return {\n migrated: true,\n backupPath,\n config: newConfig,\n };\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return {\n migrated: false,\n config: defaultConfig,\n };\n }\n \n if (error instanceof ConfigError) {\n throw error;\n }\n \n throw wrapError(error, 'Configuration migration failed', { path: configPath });\n }\n }\n \n /**\n * Check if a config object needs migration from v0.2.0 to v0.3.0.\n * \n * @param config - Config object to check\n * @returns True if migration is needed\n */\n needsMigration(config: unknown): boolean {\n return checkNeedsMigration(config);\n }\n \n /**\n * Validate a configuration object.\n * Checks all constraints and returns detailed validation results.\n * \n * @param config - Configuration to validate\n * @returns Validation result with errors and warnings\n */\n validate(config: unknown): ValidationResult {\n const errors: string[] = [];\n const warnings: string[] = [];\n \n // Type check\n if (!config || typeof config !== 'object') {\n return {\n valid: false,\n errors: ['Configuration must be an object'],\n warnings: [],\n };\n }\n \n const cfg = config as Partial<LienConfig>;\n \n // Check for required top-level fields\n if (!cfg.version) {\n errors.push('Missing required field: version');\n }\n \n // Validate based on config type\n if (isModernConfig(cfg as LienConfig | LegacyLienConfig)) {\n this.validateModernConfig(cfg as LienConfig, errors, warnings);\n } else if (isLegacyConfig(cfg as LienConfig | LegacyLienConfig)) {\n this.validateLegacyConfig(cfg as LegacyLienConfig, errors, warnings);\n } else {\n errors.push('Configuration format not recognized. Must have either \"frameworks\" or \"indexing\" field');\n }\n \n return {\n valid: errors.length === 0,\n errors,\n warnings,\n };\n }\n \n /**\n * Validate a partial configuration object.\n * Useful for validating user input before merging with defaults.\n * \n * @param config - Partial configuration to validate\n * @returns Validation result with errors and warnings\n */\n validatePartial(config: Partial<LienConfig>): ValidationResult {\n const errors: string[] = [];\n const warnings: string[] = [];\n \n // Validate core settings if present\n if (config.core) {\n this.validateCoreConfig(config.core, errors, warnings);\n }\n \n // Validate MCP settings if present\n if (config.mcp) {\n this.validateMCPConfig(config.mcp, errors, warnings);\n }\n \n // Validate git detection settings if present\n if (config.gitDetection) {\n this.validateGitDetectionConfig(config.gitDetection, errors, warnings);\n }\n \n // Validate file watching settings if present\n if (config.fileWatching) {\n this.validateFileWatchingConfig(config.fileWatching, errors, warnings);\n }\n \n // Validate frameworks if present\n if (config.frameworks) {\n this.validateFrameworks(config.frameworks, errors, warnings);\n }\n \n return {\n valid: errors.length === 0,\n errors,\n warnings,\n };\n }\n \n /**\n * Get the full path to the config file\n */\n private getConfigPath(rootDir: string): string {\n return path.join(rootDir, ConfigService.CONFIG_FILENAME);\n }\n \n /**\n * Validate modern (v0.3.0+) configuration\n */\n private validateModernConfig(\n config: LienConfig,\n errors: string[],\n warnings: string[]\n ): void {\n // Validate core settings\n if (!config.core) {\n errors.push('Missing required field: core');\n return;\n }\n this.validateCoreConfig(config.core, errors, warnings);\n \n // Validate MCP settings\n if (!config.mcp) {\n errors.push('Missing required field: mcp');\n return;\n }\n this.validateMCPConfig(config.mcp, errors, warnings);\n \n // Validate git detection settings\n if (!config.gitDetection) {\n errors.push('Missing required field: gitDetection');\n return;\n }\n this.validateGitDetectionConfig(config.gitDetection, errors, warnings);\n \n // Validate file watching settings\n if (!config.fileWatching) {\n errors.push('Missing required field: fileWatching');\n return;\n }\n this.validateFileWatchingConfig(config.fileWatching, errors, warnings);\n \n // Validate frameworks\n if (!config.frameworks) {\n errors.push('Missing required field: frameworks');\n return;\n }\n this.validateFrameworks(config.frameworks, errors, warnings);\n }\n \n /**\n * Validate legacy (v0.2.0) configuration\n */\n private validateLegacyConfig(\n config: LegacyLienConfig,\n errors: string[],\n warnings: string[]\n ): void {\n warnings.push('Using legacy configuration format. Consider running \"lien init\" to migrate to v0.3.0');\n \n // Validate indexing settings\n if (!config.indexing) {\n errors.push('Missing required field: indexing');\n return;\n }\n \n const { indexing } = config;\n \n if (typeof indexing.chunkSize !== 'number' || indexing.chunkSize <= 0) {\n errors.push('indexing.chunkSize must be a positive number');\n }\n \n if (typeof indexing.chunkOverlap !== 'number' || indexing.chunkOverlap < 0) {\n errors.push('indexing.chunkOverlap must be a non-negative number');\n }\n \n if (typeof indexing.concurrency !== 'number' || indexing.concurrency < 1 || indexing.concurrency > 16) {\n errors.push('indexing.concurrency must be between 1 and 16');\n }\n \n if (typeof indexing.embeddingBatchSize !== 'number' || indexing.embeddingBatchSize <= 0) {\n errors.push('indexing.embeddingBatchSize must be a positive number');\n }\n \n // Validate MCP settings (same for both)\n if (config.mcp) {\n this.validateMCPConfig(config.mcp, errors, warnings);\n }\n }\n \n /**\n * Validate core configuration settings\n */\n private validateCoreConfig(\n core: Partial<LienConfig['core']>,\n errors: string[],\n warnings: string[]\n ): void {\n if (core.chunkSize !== undefined) {\n if (typeof core.chunkSize !== 'number' || core.chunkSize <= 0) {\n errors.push('core.chunkSize must be a positive number');\n } else if (core.chunkSize < 50) {\n warnings.push('core.chunkSize is very small (<50 lines). This may result in poor search quality');\n } else if (core.chunkSize > 500) {\n warnings.push('core.chunkSize is very large (>500 lines). This may impact performance');\n }\n }\n \n if (core.chunkOverlap !== undefined) {\n if (typeof core.chunkOverlap !== 'number' || core.chunkOverlap < 0) {\n errors.push('core.chunkOverlap must be a non-negative number');\n }\n }\n \n if (core.concurrency !== undefined) {\n if (typeof core.concurrency !== 'number' || core.concurrency < 1 || core.concurrency > 16) {\n errors.push('core.concurrency must be between 1 and 16');\n }\n }\n \n if (core.embeddingBatchSize !== undefined) {\n if (typeof core.embeddingBatchSize !== 'number' || core.embeddingBatchSize <= 0) {\n errors.push('core.embeddingBatchSize must be a positive number');\n } else if (core.embeddingBatchSize > 100) {\n warnings.push('core.embeddingBatchSize is very large (>100). This may cause memory issues');\n }\n }\n }\n \n /**\n * Validate MCP configuration settings\n */\n private validateMCPConfig(\n mcp: Partial<LienConfig['mcp']>,\n errors: string[],\n _warnings: string[]\n ): void {\n if (mcp.port !== undefined) {\n if (typeof mcp.port !== 'number' || mcp.port < 1024 || mcp.port > 65535) {\n errors.push('mcp.port must be between 1024 and 65535');\n }\n }\n \n if (mcp.transport !== undefined) {\n if (mcp.transport !== 'stdio' && mcp.transport !== 'socket') {\n errors.push('mcp.transport must be either \"stdio\" or \"socket\"');\n }\n }\n \n if (mcp.autoIndexOnFirstRun !== undefined) {\n if (typeof mcp.autoIndexOnFirstRun !== 'boolean') {\n errors.push('mcp.autoIndexOnFirstRun must be a boolean');\n }\n }\n }\n \n /**\n * Validate git detection configuration settings\n */\n private validateGitDetectionConfig(\n gitDetection: Partial<LienConfig['gitDetection']>,\n errors: string[],\n _warnings: string[]\n ): void {\n if (gitDetection.enabled !== undefined) {\n if (typeof gitDetection.enabled !== 'boolean') {\n errors.push('gitDetection.enabled must be a boolean');\n }\n }\n \n if (gitDetection.pollIntervalMs !== undefined) {\n if (typeof gitDetection.pollIntervalMs !== 'number' || gitDetection.pollIntervalMs < 100) {\n errors.push('gitDetection.pollIntervalMs must be at least 100ms');\n } else if (gitDetection.pollIntervalMs < 1000) {\n _warnings.push('gitDetection.pollIntervalMs is very short (<1s). This may impact performance');\n }\n }\n }\n \n /**\n * Validate file watching configuration settings\n */\n private validateFileWatchingConfig(\n fileWatching: Partial<LienConfig['fileWatching']>,\n errors: string[],\n warnings: string[]\n ): void {\n if (fileWatching.enabled !== undefined) {\n if (typeof fileWatching.enabled !== 'boolean') {\n errors.push('fileWatching.enabled must be a boolean');\n }\n }\n \n if (fileWatching.debounceMs !== undefined) {\n if (typeof fileWatching.debounceMs !== 'number' || fileWatching.debounceMs < 0) {\n errors.push('fileWatching.debounceMs must be a non-negative number');\n } else if (fileWatching.debounceMs < 100) {\n warnings.push('fileWatching.debounceMs is very short (<100ms). This may cause excessive reindexing');\n }\n }\n }\n \n /**\n * Validate frameworks configuration\n */\n private validateFrameworks(\n frameworks: unknown[],\n errors: string[],\n warnings: string[]\n ): void {\n if (!Array.isArray(frameworks)) {\n errors.push('frameworks must be an array');\n return;\n }\n \n frameworks.forEach((framework, index) => {\n if (!framework || typeof framework !== 'object') {\n errors.push(`frameworks[${index}] must be an object`);\n return;\n }\n \n const fw = framework as Partial<any>;\n \n // Validate required fields\n if (!fw.name) {\n errors.push(`frameworks[${index}] missing required field: name`);\n }\n \n if (fw.path === undefined) {\n errors.push(`frameworks[${index}] missing required field: path`);\n } else if (typeof fw.path !== 'string') {\n errors.push(`frameworks[${index}].path must be a string`);\n } else if (path.isAbsolute(fw.path)) {\n errors.push(`frameworks[${index}].path must be relative, got: ${fw.path}`);\n }\n \n if (fw.enabled === undefined) {\n errors.push(`frameworks[${index}] missing required field: enabled`);\n } else if (typeof fw.enabled !== 'boolean') {\n errors.push(`frameworks[${index}].enabled must be a boolean`);\n }\n \n if (!fw.config) {\n errors.push(`frameworks[${index}] missing required field: config`);\n } else {\n this.validateFrameworkConfig(fw.config, `frameworks[${index}].config`, errors, warnings);\n }\n });\n }\n \n /**\n * Validate framework-specific configuration\n */\n private validateFrameworkConfig(\n config: any,\n prefix: string,\n errors: string[],\n _warnings: string[]\n ): void {\n if (!config || typeof config !== 'object') {\n errors.push(`${prefix} must be an object`);\n return;\n }\n \n // Validate include patterns\n if (!Array.isArray(config.include)) {\n errors.push(`${prefix}.include must be an array`);\n } else {\n config.include.forEach((pattern: unknown, i: number) => {\n if (typeof pattern !== 'string') {\n errors.push(`${prefix}.include[${i}] must be a string`);\n }\n });\n }\n \n // Validate exclude patterns\n if (!Array.isArray(config.exclude)) {\n errors.push(`${prefix}.exclude must be an array`);\n } else {\n config.exclude.forEach((pattern: unknown, i: number) => {\n if (typeof pattern !== 'string') {\n errors.push(`${prefix}.exclude[${i}] must be a string`);\n }\n });\n }\n }\n}\n\n// Export a singleton instance for convenience\nexport const configService = new ConfigService();\n\n","import { exec } from 'child_process';\nimport { promisify } from 'util';\nimport fs from 'fs/promises';\nimport path from 'path';\n\nconst execAsync = promisify(exec);\n\n/**\n * Checks if a directory is a git repository.\n * \n * @param rootDir - Directory to check\n * @returns true if directory is a git repo, false otherwise\n */\nexport async function isGitRepo(rootDir: string): Promise<boolean> {\n try {\n const gitDir = path.join(rootDir, '.git');\n await fs.access(gitDir);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Gets the current git branch name.\n * \n * @param rootDir - Root directory of the git repository\n * @returns Branch name (e.g., \"main\", \"feature-branch\")\n * @throws Error if not a git repo or git command fails\n */\nexport async function getCurrentBranch(rootDir: string): Promise<string> {\n try {\n const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {\n cwd: rootDir,\n timeout: 5000, // 5 second timeout\n });\n return stdout.trim();\n } catch (error) {\n throw new Error(`Failed to get current branch: ${error}`);\n }\n}\n\n/**\n * Gets the current git commit SHA (HEAD).\n * \n * @param rootDir - Root directory of the git repository\n * @returns Commit SHA (full 40-character hash)\n * @throws Error if not a git repo or git command fails\n */\nexport async function getCurrentCommit(rootDir: string): Promise<string> {\n try {\n const { stdout } = await execAsync('git rev-parse HEAD', {\n cwd: rootDir,\n timeout: 5000,\n });\n return stdout.trim();\n } catch (error) {\n throw new Error(`Failed to get current commit: ${error}`);\n }\n}\n\n/**\n * Gets the list of files that changed between two git references.\n * \n * @param rootDir - Root directory of the git repository\n * @param fromRef - Starting reference (branch name, commit SHA, or tag)\n * @param toRef - Ending reference (branch name, commit SHA, or tag)\n * @returns Array of file paths (relative to repo root) that changed\n * @throws Error if git command fails\n */\nexport async function getChangedFiles(\n rootDir: string,\n fromRef: string,\n toRef: string\n): Promise<string[]> {\n try {\n const { stdout } = await execAsync(\n `git diff --name-only ${fromRef}...${toRef}`,\n {\n cwd: rootDir,\n timeout: 10000, // 10 second timeout for diffs\n }\n );\n \n const files = stdout\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(file => path.join(rootDir, file)); // Convert to absolute paths\n \n return files;\n } catch (error) {\n throw new Error(`Failed to get changed files: ${error}`);\n }\n}\n\n/**\n * Gets the list of files that changed in a specific commit.\n * \n * @param rootDir - Root directory of the git repository\n * @param commitSha - Commit SHA to check\n * @returns Array of file paths (absolute) that changed in this commit\n * @throws Error if git command fails\n */\nexport async function getChangedFilesInCommit(\n rootDir: string,\n commitSha: string\n): Promise<string[]> {\n try {\n const { stdout } = await execAsync(\n `git diff-tree --no-commit-id --name-only -r ${commitSha}`,\n {\n cwd: rootDir,\n timeout: 10000,\n }\n );\n \n const files = stdout\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(file => path.join(rootDir, file)); // Convert to absolute paths\n \n return files;\n } catch (error) {\n throw new Error(`Failed to get changed files in commit: ${error}`);\n }\n}\n\n/**\n * Gets the list of files that changed between two commits.\n * More efficient than getChangedFiles for commit-to-commit comparisons.\n * \n * @param rootDir - Root directory of the git repository\n * @param fromCommit - Starting commit SHA\n * @param toCommit - Ending commit SHA\n * @returns Array of file paths (absolute) that changed between commits\n * @throws Error if git command fails\n */\nexport async function getChangedFilesBetweenCommits(\n rootDir: string,\n fromCommit: string,\n toCommit: string\n): Promise<string[]> {\n try {\n const { stdout } = await execAsync(\n `git diff --name-only ${fromCommit} ${toCommit}`,\n {\n cwd: rootDir,\n timeout: 10000,\n }\n );\n \n const files = stdout\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(file => path.join(rootDir, file)); // Convert to absolute paths\n \n return files;\n } catch (error) {\n throw new Error(`Failed to get changed files between commits: ${error}`);\n }\n}\n\n/**\n * Checks if git is installed and available.\n * \n * @returns true if git is available, false otherwise\n */\nexport async function isGitAvailable(): Promise<boolean> {\n try {\n await execAsync('git --version', { timeout: 3000 });\n return true;\n } catch {\n return false;\n }\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\n\nconst VERSION_FILE = '.lien-index-version';\n\n/**\n * Writes a version timestamp file to mark when the index was last updated.\n * This file is used by the MCP server to detect when it needs to reconnect.\n * \n * @param indexPath - Path to the index directory\n */\nexport async function writeVersionFile(indexPath: string): Promise<void> {\n try {\n const versionFilePath = path.join(indexPath, VERSION_FILE);\n const timestamp = Date.now().toString();\n await fs.writeFile(versionFilePath, timestamp, 'utf-8');\n } catch (error) {\n // Don't throw - version file is a convenience feature, not critical\n console.error(`Warning: Failed to write version file: ${error}`);\n }\n}\n\n/**\n * Reads the version timestamp from the index directory.\n * Returns 0 if the file doesn't exist (e.g., old index).\n * \n * @param indexPath - Path to the index directory\n * @returns Version timestamp, or 0 if not found\n */\nexport async function readVersionFile(indexPath: string): Promise<number> {\n try {\n const versionFilePath = path.join(indexPath, VERSION_FILE);\n const content = await fs.readFile(versionFilePath, 'utf-8');\n const timestamp = parseInt(content.trim(), 10);\n return isNaN(timestamp) ? 0 : timestamp;\n } catch (error) {\n // File doesn't exist or can't be read - treat as version 0\n return 0;\n }\n}\n\n","import { glob } from 'glob';\nimport ignore from 'ignore';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { ScanOptions } from './types.js';\nimport { LienConfig, FrameworkInstance } from '../config/schema.js';\n\n/**\n * Scan codebase using framework-aware configuration\n * @param rootDir - Project root directory\n * @param config - Lien configuration with frameworks\n * @returns Array of file paths relative to rootDir\n */\nexport async function scanCodebaseWithFrameworks(\n rootDir: string,\n config: LienConfig\n): Promise<string[]> {\n const allFiles: string[] = [];\n \n // Scan each framework\n for (const framework of config.frameworks) {\n if (!framework.enabled) {\n continue;\n }\n \n const frameworkFiles = await scanFramework(rootDir, framework);\n allFiles.push(...frameworkFiles);\n }\n \n return allFiles;\n}\n\n/**\n * Scan files for a specific framework instance\n */\nasync function scanFramework(\n rootDir: string,\n framework: FrameworkInstance\n): Promise<string[]> {\n const frameworkPath = path.join(rootDir, framework.path);\n \n // Load .gitignore from framework path\n const gitignorePath = path.join(frameworkPath, '.gitignore');\n let ig = ignore();\n \n try {\n const gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');\n ig = ignore().add(gitignoreContent);\n } catch (e) {\n // No .gitignore in framework path, try root\n const rootGitignorePath = path.join(rootDir, '.gitignore');\n try {\n const gitignoreContent = await fs.readFile(rootGitignorePath, 'utf-8');\n ig = ignore().add(gitignoreContent);\n } catch (e) {\n // No .gitignore at all, that's fine\n }\n }\n \n // Add framework-specific exclusions\n ig.add([\n ...framework.config.exclude,\n '.lien/**',\n ]);\n \n // Find all files matching framework patterns\n const allFiles: string[] = [];\n \n for (const pattern of framework.config.include) {\n const files = await glob(pattern, {\n cwd: frameworkPath,\n absolute: false, // Get paths relative to framework path\n nodir: true,\n ignore: framework.config.exclude,\n });\n allFiles.push(...files);\n }\n \n // Remove duplicates\n const uniqueFiles = Array.from(new Set(allFiles));\n \n // Filter using ignore patterns and prefix with framework path\n return uniqueFiles\n .filter(file => !ig.ignores(file))\n .map(file => {\n // Return path relative to root: framework.path/file\n return framework.path === '.' \n ? file \n : path.join(framework.path, file);\n });\n}\n\n/**\n * Legacy scan function for backwards compatibility\n * @deprecated Use scanCodebaseWithFrameworks instead\n */\nexport async function scanCodebase(options: ScanOptions): Promise<string[]> {\n const { rootDir, includePatterns = [], excludePatterns = [] } = options;\n \n // Load .gitignore\n const gitignorePath = path.join(rootDir, '.gitignore');\n let ig = ignore();\n \n try {\n const gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');\n ig = ignore().add(gitignoreContent);\n } catch (e) {\n // No .gitignore, that's fine\n }\n \n // Add default exclusions\n ig.add([\n 'node_modules/**',\n '.git/**',\n 'dist/**',\n 'build/**',\n '*.min.js',\n '*.min.css',\n '.lien/**',\n ...excludePatterns,\n ]);\n \n // Determine patterns to search for\n const patterns = includePatterns.length > 0 \n ? includePatterns \n : ['**/*.{ts,tsx,js,jsx,py,go,rs,java,cpp,c,h,md,mdx}'];\n \n // Find all code files\n const allFiles: string[] = [];\n \n for (const pattern of patterns) {\n const files = await glob(pattern, {\n cwd: rootDir,\n absolute: true,\n nodir: true,\n ignore: ['node_modules/**', '.git/**'],\n });\n allFiles.push(...files);\n }\n \n // Remove duplicates\n const uniqueFiles = Array.from(new Set(allFiles));\n \n // Filter using ignore patterns\n return uniqueFiles.filter(file => {\n const relativePath = path.relative(rootDir, file);\n return !ig.ignores(relativePath);\n });\n}\n\nexport function detectLanguage(filepath: string): string {\n const ext = path.extname(filepath).toLowerCase();\n \n const languageMap: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.mjs': 'javascript',\n '.cjs': 'javascript',\n '.vue': 'vue',\n '.py': 'python',\n '.go': 'go',\n '.rs': 'rust',\n '.java': 'java',\n '.cpp': 'cpp',\n '.cc': 'cpp',\n '.cxx': 'cpp',\n '.c': 'c',\n '.h': 'c',\n '.hpp': 'cpp',\n '.php': 'php',\n '.rb': 'ruby',\n '.swift': 'swift',\n '.kt': 'kotlin',\n '.cs': 'csharp',\n '.scala': 'scala',\n '.liquid': 'liquid',\n '.md': 'markdown',\n '.mdx': 'markdown',\n '.markdown': 'markdown',\n };\n \n return languageMap[ext] || 'unknown';\n}\n\n","/**\n * Symbol extraction utilities for different programming languages.\n * Extracts function, class, and interface names from code chunks for better indexing.\n */\n\nexport interface ExtractedSymbols {\n functions: string[];\n classes: string[];\n interfaces: string[];\n}\n\n/**\n * Extract symbols (functions, classes, interfaces) from code content.\n * \n * @param content - The code content to extract symbols from\n * @param language - The programming language of the content\n * @returns Extracted symbols organized by type\n */\nexport function extractSymbols(\n content: string,\n language: string\n): ExtractedSymbols {\n const symbols: ExtractedSymbols = {\n functions: [],\n classes: [],\n interfaces: [],\n };\n \n const normalizedLang = language.toLowerCase();\n \n switch (normalizedLang) {\n case 'typescript':\n case 'tsx':\n symbols.functions = extractTSFunctions(content);\n symbols.classes = extractTSClasses(content);\n symbols.interfaces = extractTSInterfaces(content);\n break;\n \n case 'javascript':\n case 'jsx':\n symbols.functions = extractJSFunctions(content);\n symbols.classes = extractJSClasses(content);\n break;\n \n case 'python':\n case 'py':\n symbols.functions = extractPythonFunctions(content);\n symbols.classes = extractPythonClasses(content);\n break;\n \n case 'php':\n symbols.functions = extractPHPFunctions(content);\n symbols.classes = extractPHPClasses(content);\n symbols.interfaces = extractPHPInterfaces(content);\n break;\n \n case 'vue':\n // Extract from <script> blocks (handles both Options API and Composition API)\n symbols.functions = extractVueFunctions(content);\n symbols.classes = extractVueComponents(content);\n break;\n \n case 'go':\n symbols.functions = extractGoFunctions(content);\n symbols.interfaces = extractGoInterfaces(content);\n break;\n \n case 'java':\n symbols.functions = extractJavaFunctions(content);\n symbols.classes = extractJavaClasses(content);\n symbols.interfaces = extractJavaInterfaces(content);\n break;\n \n case 'csharp':\n case 'cs':\n symbols.functions = extractCSharpFunctions(content);\n symbols.classes = extractCSharpClasses(content);\n symbols.interfaces = extractCSharpInterfaces(content);\n break;\n \n case 'ruby':\n case 'rb':\n symbols.functions = extractRubyFunctions(content);\n symbols.classes = extractRubyClasses(content);\n break;\n \n case 'rust':\n case 'rs':\n symbols.functions = extractRustFunctions(content);\n break;\n }\n \n return symbols;\n}\n\n// TypeScript / JavaScript Functions\nfunction extractTSFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Regular functions: function name(...) or async function name(...)\n const functionMatches = content.matchAll(/(?:async\\s+)?function\\s+(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n // Arrow functions: const/let/var name = (...) =>\n const arrowMatches = content.matchAll(/(?:const|let|var)\\s+(\\w+)\\s*=\\s*(?:async\\s*)?\\([^)]*\\)\\s*=>/g);\n for (const match of arrowMatches) {\n names.add(match[1]);\n }\n \n // Method definitions: name(...) { or async name(...) {\n const methodMatches = content.matchAll(/(?:async\\s+)?(\\w+)\\s*\\([^)]*\\)\\s*[:{]/g);\n for (const match of methodMatches) {\n // Exclude common keywords\n if (!['if', 'for', 'while', 'switch', 'catch'].includes(match[1])) {\n names.add(match[1]);\n }\n }\n \n // Export function\n const exportMatches = content.matchAll(/export\\s+(?:async\\s+)?function\\s+(\\w+)\\s*\\(/g);\n for (const match of exportMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractJSFunctions(content: string): string[] {\n return extractTSFunctions(content); // Same patterns\n}\n\nfunction extractTSClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class declarations: class Name or export class Name\n const classMatches = content.matchAll(/(?:export\\s+)?(?:abstract\\s+)?class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractJSClasses(content: string): string[] {\n return extractTSClasses(content); // Same patterns\n}\n\nfunction extractTSInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface declarations: interface Name or export interface Name\n const interfaceMatches = content.matchAll(/(?:export\\s+)?interface\\s+(\\w+)/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n // Type aliases: type Name = or export type Name =\n const typeMatches = content.matchAll(/(?:export\\s+)?type\\s+(\\w+)\\s*=/g);\n for (const match of typeMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Python Functions\nfunction extractPythonFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Function definitions: def name(...):\n const functionMatches = content.matchAll(/def\\s+(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n // Async functions: async def name(...):\n const asyncMatches = content.matchAll(/async\\s+def\\s+(\\w+)\\s*\\(/g);\n for (const match of asyncMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractPythonClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: class Name or class Name(Base):\n const classMatches = content.matchAll(/class\\s+(\\w+)(?:\\s*\\(|:)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// PHP Functions\nfunction extractPHPFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Function definitions: function name(...) or public function name(...)\n const functionMatches = content.matchAll(/(?:public|private|protected)?\\s*function\\s+(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractPHPClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: class Name or abstract class Name\n const classMatches = content.matchAll(/(?:abstract\\s+)?class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractPHPInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface definitions: interface Name\n const interfaceMatches = content.matchAll(/interface\\s+(\\w+)/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n // Trait definitions: trait Name\n const traitMatches = content.matchAll(/trait\\s+(\\w+)/g);\n for (const match of traitMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Go Functions\nfunction extractGoFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Function definitions: func Name(...) or func (r *Receiver) Name(...)\n const functionMatches = content.matchAll(/func\\s+(?:\\(\\w+\\s+\\*?\\w+\\)\\s+)?(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractGoInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface definitions: type Name interface {\n const interfaceMatches = content.matchAll(/type\\s+(\\w+)\\s+interface\\s*\\{/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n // Struct definitions: type Name struct {\n const structMatches = content.matchAll(/type\\s+(\\w+)\\s+struct\\s*\\{/g);\n for (const match of structMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Java Functions\nfunction extractJavaFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Method definitions: public/private/protected return_type name(...)\n const methodMatches = content.matchAll(/(?:public|private|protected)\\s+(?:static\\s+)?(?:\\w+(?:<[^>]+>)?)\\s+(\\w+)\\s*\\(/g);\n for (const match of methodMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractJavaClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: public class Name or abstract class Name\n const classMatches = content.matchAll(/(?:public\\s+)?(?:abstract\\s+)?class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractJavaInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface definitions: public interface Name\n const interfaceMatches = content.matchAll(/(?:public\\s+)?interface\\s+(\\w+)/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// C# Functions\nfunction extractCSharpFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Method definitions: public/private/protected return_type Name(...)\n const methodMatches = content.matchAll(/(?:public|private|protected|internal)\\s+(?:static\\s+)?(?:async\\s+)?(?:\\w+(?:<[^>]+>)?)\\s+(\\w+)\\s*\\(/g);\n for (const match of methodMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractCSharpClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: public class Name or abstract class Name\n const classMatches = content.matchAll(/(?:public|internal)?\\s*(?:abstract\\s+)?class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractCSharpInterfaces(content: string): string[] {\n const names = new Set<string>();\n \n // Interface definitions: public interface Name\n const interfaceMatches = content.matchAll(/(?:public|internal)?\\s*interface\\s+(\\w+)/g);\n for (const match of interfaceMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Ruby Functions\nfunction extractRubyFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Method definitions: def name or def self.name\n const methodMatches = content.matchAll(/def\\s+(?:self\\.)?(\\w+)/g);\n for (const match of methodMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\nfunction extractRubyClasses(content: string): string[] {\n const names = new Set<string>();\n \n // Class definitions: class Name or class Name < Base\n const classMatches = content.matchAll(/class\\s+(\\w+)/g);\n for (const match of classMatches) {\n names.add(match[1]);\n }\n \n // Module definitions: module Name\n const moduleMatches = content.matchAll(/module\\s+(\\w+)/g);\n for (const match of moduleMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Rust Functions\nfunction extractRustFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Function definitions: fn name(...) or pub fn name(...)\n const functionMatches = content.matchAll(/(?:pub\\s+)?fn\\s+(\\w+)\\s*\\(/g);\n for (const match of functionMatches) {\n names.add(match[1]);\n }\n \n // Struct definitions: struct Name {\n const structMatches = content.matchAll(/(?:pub\\s+)?struct\\s+(\\w+)/g);\n for (const match of structMatches) {\n names.add(match[1]);\n }\n \n // Trait definitions: trait Name {\n const traitMatches = content.matchAll(/(?:pub\\s+)?trait\\s+(\\w+)/g);\n for (const match of traitMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Vue Functions\nfunction extractVueFunctions(content: string): string[] {\n const names = new Set<string>();\n \n // Extract script content from Vue SFC\n const scriptMatch = content.match(/<script[^>]*>([\\s\\S]*?)<\\/script>/);\n if (!scriptMatch) return [];\n \n const scriptContent = scriptMatch[1];\n \n // Composition API: const/function name = ...\n const compositionMatches = scriptContent.matchAll(/(?:const|function)\\s+(\\w+)\\s*=/g);\n for (const match of compositionMatches) {\n names.add(match[1]);\n }\n \n // Options API methods\n const methodMatches = scriptContent.matchAll(/(\\w+)\\s*\\([^)]*\\)\\s*{/g);\n for (const match of methodMatches) {\n names.add(match[1]);\n }\n \n return Array.from(names);\n}\n\n// Vue Components\nfunction extractVueComponents(content: string): string[] {\n const names = new Set<string>();\n \n // Extract component name from filename convention or export\n const scriptMatch = content.match(/<script[^>]*>([\\s\\S]*?)<\\/script>/);\n if (!scriptMatch) return [];\n \n const scriptContent = scriptMatch[1];\n \n // export default { name: 'ComponentName' }\n const nameMatch = scriptContent.match(/name:\\s*['\"](\\w+)['\"]/);\n if (nameMatch) {\n names.add(nameMatch[1]);\n }\n \n // defineComponent or <script setup> components\n const defineComponentMatch = scriptContent.match(/defineComponent\\s*\\(/);\n if (defineComponentMatch) {\n names.add('VueComponent');\n }\n \n return Array.from(names);\n}\n\n","import Parser from 'tree-sitter';\nimport TypeScript from 'tree-sitter-typescript';\nimport JavaScript from 'tree-sitter-javascript';\nimport PHPParser from 'tree-sitter-php';\nimport { extname } from 'path';\nimport type { ASTParseResult, SupportedLanguage } from './types.js';\n\n/**\n * Cache for parser instances to avoid recreating them\n */\nconst parserCache = new Map<SupportedLanguage, Parser>();\n\n/**\n * Tree-sitter language grammar type\n * Using any here due to type incompatibility between parser packages and tree-sitter core\n */\ntype TreeSitterLanguage = any;\n\n/**\n * Language configuration mapping\n */\nconst languageConfig: Record<SupportedLanguage, TreeSitterLanguage> = {\n typescript: TypeScript.typescript,\n javascript: JavaScript,\n php: PHPParser.php, // Note: tree-sitter-php exports both 'php' (mixed HTML/PHP) and 'php_only'\n};\n\n/**\n * Get or create a cached parser instance for a language\n */\nfunction getParser(language: SupportedLanguage): Parser {\n if (!parserCache.has(language)) {\n const parser = new Parser();\n const grammar = languageConfig[language];\n \n if (!grammar) {\n throw new Error(`No grammar available for language: ${language}`);\n }\n \n parser.setLanguage(grammar);\n parserCache.set(language, parser);\n }\n \n return parserCache.get(language)!;\n}\n\n/**\n * Detect language from file extension\n * Uses path.extname() to handle edge cases like multiple dots in filenames\n */\nexport function detectLanguage(filePath: string): SupportedLanguage | null {\n // extname returns extension with leading dot (e.g., '.ts')\n // Remove the dot and convert to lowercase\n const ext = extname(filePath).slice(1).toLowerCase();\n \n switch (ext) {\n case 'ts':\n case 'tsx':\n return 'typescript';\n case 'js':\n case 'jsx':\n case 'mjs':\n case 'cjs':\n return 'javascript';\n case 'php':\n return 'php';\n default:\n return null;\n }\n}\n\n/**\n * Check if a file is supported for AST parsing\n */\nexport function isASTSupported(filePath: string): boolean {\n return detectLanguage(filePath) !== null;\n}\n\n/**\n * Parse source code into an AST using Tree-sitter\n * \n * **Known Limitation:** Tree-sitter may throw \"Invalid argument\" errors on very large files\n * (1000+ lines). This is a limitation of Tree-sitter's internal buffer handling. When this\n * occurs, callers should fall back to line-based chunking (handled automatically by chunker.ts).\n * \n * @param content - Source code to parse\n * @param language - Programming language\n * @returns Parse result with tree or error\n */\nexport function parseAST(content: string, language: SupportedLanguage): ASTParseResult {\n try {\n const parser = getParser(language);\n const tree = parser.parse(content);\n \n // Check for parse errors (hasError is a property, not a method)\n if (tree.rootNode.hasError) {\n return {\n tree,\n error: 'Parse completed with errors',\n };\n }\n \n return { tree };\n } catch (error) {\n return {\n tree: null,\n error: error instanceof Error ? error.message : 'Unknown parse error',\n };\n }\n}\n\n/**\n * Clear parser cache (useful for testing)\n */\nexport function clearParserCache(): void {\n parserCache.clear();\n}\n\n","import type Parser from 'tree-sitter';\nimport type { SymbolInfo } from './types.js';\n\n/**\n * Type for symbol extractor functions\n */\ntype SymbolExtractor = (\n node: Parser.SyntaxNode,\n content: string,\n parentClass?: string\n) => SymbolInfo | null;\n\n/**\n * Extract function declaration info (function_declaration, function)\n */\nfunction extractFunctionInfo(\n node: Parser.SyntaxNode,\n content: string,\n parentClass?: string\n): SymbolInfo | null {\n const nameNode = node.childForFieldName('name');\n if (!nameNode) return null;\n \n return {\n name: nameNode.text,\n type: parentClass ? 'method' : 'function',\n startLine: node.startPosition.row + 1,\n endLine: node.endPosition.row + 1,\n parentClass,\n signature: extractSignature(node, content),\n parameters: extractParameters(node, content),\n returnType: extractReturnType(node, content),\n complexity: calculateComplexity(node),\n };\n }\n \n/**\n * Extract arrow function or function expression info\n */\nfunction extractArrowFunctionInfo(\n node: Parser.SyntaxNode,\n content: string,\n parentClass?: string\n): SymbolInfo | null {\n // Try to find variable name for arrow functions\n const parent = node.parent;\n let name = 'anonymous';\n \n if (parent?.type === 'variable_declarator') {\n const nameNode = parent.childForFieldName('name');\n name = nameNode?.text || 'anonymous';\n }\n \n return {\n name,\n type: parentClass ? 'method' : 'function',\n startLine: node.startPosition.row + 1,\n endLine: node.endPosition.row + 1,\n parentClass,\n signature: extractSignature(node, content),\n parameters: extractParameters(node, content),\n complexity: calculateComplexity(node),\n };\n }\n \n/**\n * Extract method definition info\n */\nfunction extractMethodInfo(\n node: Parser.SyntaxNode,\n content: string,\n parentClass?: string\n): SymbolInfo | null {\n const nameNode = node.childForFieldName('name');\n if (!nameNode) return null;\n \n return {\n name: nameNode.text,\n type: 'method',\n startLine: node.startPosition.row + 1,\n endLine: node.endPosition.row + 1,\n parentClass,\n signature: extractSignature(node, content),\n parameters: extractParameters(node, content),\n returnType: extractReturnType(node, content),\n complexity: calculateComplexity(node),\n };\n }\n \n/**\n * Extract class declaration info\n */\nfunction extractClassInfo(\n node: Parser.SyntaxNode,\n _content: string,\n _parentClass?: string\n): SymbolInfo | null {\n const nameNode = node.childForFieldName('name');\n if (!nameNode) return null;\n \n return {\n name: nameNode.text,\n type: 'class',\n startLine: node.startPosition.row + 1,\n endLine: node.endPosition.row + 1,\n signature: `class ${nameNode.text}`,\n };\n }\n \n/**\n * Extract interface declaration info (TypeScript)\n */\nfunction extractInterfaceInfo(\n node: Parser.SyntaxNode,\n _content: string,\n _parentClass?: string\n): SymbolInfo | null {\n const nameNode = node.childForFieldName('name');\n if (!nameNode) return null;\n \n return {\n name: nameNode.text,\n type: 'interface',\n startLine: node.startPosition.row + 1,\n endLine: node.endPosition.row + 1,\n signature: `interface ${nameNode.text}`,\n };\n }\n \n/**\n * Map of AST node types to their specialized extractors\n */\nconst symbolExtractors: Record<string, SymbolExtractor> = {\n // TypeScript/JavaScript\n 'function_declaration': extractFunctionInfo,\n 'function': extractFunctionInfo,\n 'arrow_function': extractArrowFunctionInfo,\n 'function_expression': extractArrowFunctionInfo,\n 'method_definition': extractMethodInfo,\n 'class_declaration': extractClassInfo,\n 'interface_declaration': extractInterfaceInfo,\n \n // PHP\n 'function_definition': extractFunctionInfo, // PHP functions\n 'method_declaration': extractMethodInfo, // PHP methods\n};\n\n/**\n * Extract symbol information from an AST node using specialized extractors\n * \n * @param node - AST node to extract info from\n * @param content - Source code content\n * @param parentClass - Parent class name if this is a method\n * @returns Symbol information or null\n */\nexport function extractSymbolInfo(\n node: Parser.SyntaxNode,\n content: string,\n parentClass?: string\n): SymbolInfo | null {\n const extractor = symbolExtractors[node.type];\n return extractor ? extractor(node, content, parentClass) : null;\n}\n\n/**\n * Extract function/method signature\n */\nfunction extractSignature(node: Parser.SyntaxNode, content: string): string {\n // Get the first line of the function (up to opening brace or arrow)\n const startLine = node.startPosition.row;\n const lines = content.split('\\n');\n let signature = lines[startLine] || '';\n \n // If signature spans multiple lines, try to get up to the opening brace\n let currentLine = startLine;\n while (currentLine < node.endPosition.row && !signature.includes('{') && !signature.includes('=>')) {\n currentLine++;\n signature += ' ' + (lines[currentLine] || '');\n }\n \n // Clean up signature\n signature = signature.split('{')[0].split('=>')[0].trim();\n \n // Limit length\n if (signature.length > 200) {\n signature = signature.substring(0, 197) + '...';\n }\n \n return signature;\n}\n\n/**\n * Extract parameter list from function node\n * \n * Note: The `_content` parameter is unused in this function, but is kept for API consistency\n * with other extract functions (e.g., extractSignature).\n */\nfunction extractParameters(node: Parser.SyntaxNode, _content: string): string[] {\n const parameters: string[] = [];\n \n // Find parameters node\n const paramsNode = node.childForFieldName('parameters');\n if (!paramsNode) return parameters;\n \n // Traverse parameter nodes\n for (let i = 0; i < paramsNode.namedChildCount; i++) {\n const param = paramsNode.namedChild(i);\n if (param) {\n parameters.push(param.text);\n }\n }\n \n return parameters;\n}\n\n/**\n * Extract return type from function node (TypeScript)\n * \n * Note: The `_content` parameter is unused in this function, but is kept for API consistency\n * with other extract functions (e.g., extractSignature).\n */\nfunction extractReturnType(node: Parser.SyntaxNode, _content: string): string | undefined {\n const returnTypeNode = node.childForFieldName('return_type');\n if (!returnTypeNode) return undefined;\n \n return returnTypeNode.text;\n}\n\n/**\n * Calculate cyclomatic complexity of a function\n * \n * Complexity = 1 (base) + number of decision points\n * Decision points: if, while, do...while, for, for...in, for...of, foreach, case, catch, &&, ||, ?:\n */\nexport function calculateComplexity(node: Parser.SyntaxNode): number {\n let complexity = 1; // Base complexity\n \n const decisionPoints = [\n // TypeScript/JavaScript\n 'if_statement',\n 'while_statement',\n 'do_statement', // do...while loops\n 'for_statement',\n 'for_in_statement',\n 'for_of_statement', // for...of loops\n 'switch_case',\n 'catch_clause',\n 'ternary_expression',\n 'binary_expression', // For && and ||\n \n // PHP\n 'foreach_statement', // PHP foreach loops\n ];\n \n function traverse(n: Parser.SyntaxNode) {\n if (decisionPoints.includes(n.type)) {\n // For binary expressions, only count && and ||\n if (n.type === 'binary_expression') {\n const operator = n.childForFieldName('operator');\n if (operator && (operator.text === '&&' || operator.text === '||')) {\n complexity++;\n }\n } else {\n complexity++;\n }\n }\n \n // Traverse children\n for (let i = 0; i < n.namedChildCount; i++) {\n const child = n.namedChild(i);\n if (child) traverse(child);\n }\n }\n \n traverse(node);\n return complexity;\n}\n\n/**\n * Extract import statements from a file\n */\nexport function extractImports(rootNode: Parser.SyntaxNode): string[] {\n const imports: string[] = [];\n \n function traverse(node: Parser.SyntaxNode) {\n if (node.type === 'import_statement') {\n // Get the source (the string after 'from')\n const sourceNode = node.childForFieldName('source');\n if (sourceNode) {\n const importPath = sourceNode.text.replace(/['\"]/g, '');\n imports.push(importPath);\n }\n }\n \n // Only traverse top-level nodes for imports\n if (node === rootNode) {\n for (let i = 0; i < node.namedChildCount; i++) {\n const child = node.namedChild(i);\n if (child) traverse(child);\n }\n }\n }\n \n traverse(rootNode);\n return imports;\n}\n\n","import type Parser from 'tree-sitter';\nimport type { LanguageTraverser, DeclarationFunctionInfo } from './types.js';\n\n/**\n * TypeScript/JavaScript AST traverser\n * \n * Handles TypeScript and JavaScript AST node types and traversal patterns.\n * Both languages share the same AST structure (via tree-sitter-typescript).\n */\nexport class TypeScriptTraverser implements LanguageTraverser {\n targetNodeTypes = [\n 'function_declaration',\n 'function',\n 'interface_declaration',\n 'method_definition',\n 'lexical_declaration', // For const/let with arrow functions\n 'variable_declaration', // For var with functions\n ];\n \n containerTypes = [\n 'class_declaration', // We extract methods, not the class itself\n ];\n \n declarationTypes = [\n 'lexical_declaration', // const/let\n 'variable_declaration', // var\n ];\n \n functionTypes = [\n 'arrow_function',\n 'function_expression',\n 'function',\n ];\n \n shouldExtractChildren(node: Parser.SyntaxNode): boolean {\n return this.containerTypes.includes(node.type);\n }\n \n isDeclarationWithFunction(node: Parser.SyntaxNode): boolean {\n return this.declarationTypes.includes(node.type);\n }\n \n getContainerBody(node: Parser.SyntaxNode): Parser.SyntaxNode | null {\n if (node.type === 'class_declaration') {\n return node.childForFieldName('body');\n }\n return null;\n }\n \n shouldTraverseChildren(node: Parser.SyntaxNode): boolean {\n return node.type === 'program' || \n node.type === 'export_statement' ||\n node.type === 'class_body';\n }\n \n findParentContainerName(node: Parser.SyntaxNode): string | undefined {\n let current = node.parent;\n while (current) {\n if (current.type === 'class_declaration') {\n const nameNode = current.childForFieldName('name');\n return nameNode?.text;\n }\n current = current.parent;\n }\n return undefined;\n }\n \n /**\n * Check if a declaration node contains a function (arrow, function expression, etc.)\n */\n findFunctionInDeclaration(node: Parser.SyntaxNode): DeclarationFunctionInfo {\n const search = (n: Parser.SyntaxNode, depth: number): Parser.SyntaxNode | null => {\n if (depth > 3) return null; // Don't search too deep\n \n if (this.functionTypes.includes(n.type)) {\n return n;\n }\n \n for (let i = 0; i < n.childCount; i++) {\n const child = n.child(i);\n if (child) {\n const result = search(child, depth + 1);\n if (result) return result;\n }\n }\n \n return null;\n };\n \n const functionNode = search(node, 0);\n return {\n hasFunction: functionNode !== null,\n functionNode,\n };\n }\n}\n\n/**\n * JavaScript uses the same traverser as TypeScript\n */\nexport class JavaScriptTraverser extends TypeScriptTraverser {}\n\n","import type Parser from 'tree-sitter';\nimport type { LanguageTraverser, DeclarationFunctionInfo } from './types.js';\n\n/**\n * PHP AST traverser\n * \n * Handles PHP AST node types and traversal patterns.\n * PHP uses tree-sitter-php grammar.\n */\nexport class PHPTraverser implements LanguageTraverser {\n targetNodeTypes = [\n 'function_definition', // function foo() {}\n 'method_declaration', // public function bar() {}\n ];\n \n containerTypes = [\n 'class_declaration', // We extract methods, not the class itself\n 'trait_declaration', // PHP traits\n 'interface_declaration', // PHP interfaces (for interface methods)\n ];\n \n declarationTypes = [\n // PHP doesn't have arrow functions or const/let like JS\n // Functions are always defined with 'function' keyword\n ];\n \n functionTypes = [\n 'function_definition',\n 'method_declaration',\n ];\n \n shouldExtractChildren(node: Parser.SyntaxNode): boolean {\n return this.containerTypes.includes(node.type);\n }\n \n isDeclarationWithFunction(_node: Parser.SyntaxNode): boolean {\n // PHP doesn't have variable declarations with functions like JS/TS\n // Functions are always defined with 'function' keyword\n return false;\n }\n \n getContainerBody(node: Parser.SyntaxNode): Parser.SyntaxNode | null {\n if (node.type === 'class_declaration' || \n node.type === 'trait_declaration' ||\n node.type === 'interface_declaration') {\n // In PHP, the body is called 'declaration_list'\n return node.childForFieldName('body');\n }\n return null;\n }\n \n shouldTraverseChildren(node: Parser.SyntaxNode): boolean {\n return node.type === 'program' || // Top-level PHP file\n node.type === 'php' || // PHP block\n node.type === 'declaration_list'; // Body of class/trait/interface\n }\n \n findParentContainerName(node: Parser.SyntaxNode): string | undefined {\n let current = node.parent;\n while (current) {\n if (current.type === 'class_declaration' || \n current.type === 'trait_declaration') {\n const nameNode = current.childForFieldName('name');\n return nameNode?.text;\n }\n current = current.parent;\n }\n return undefined;\n }\n \n findFunctionInDeclaration(_node: Parser.SyntaxNode): DeclarationFunctionInfo {\n // PHP doesn't have this pattern\n return {\n hasFunction: false,\n functionNode: null,\n };\n }\n}\n\n","import type { SupportedLanguage } from '../types.js';\nimport type { LanguageTraverser } from './types.js';\nimport { TypeScriptTraverser, JavaScriptTraverser } from './typescript.js';\nimport { PHPTraverser } from './php.js';\n\nexport type { LanguageTraverser, DeclarationFunctionInfo } from './types.js';\n\n/**\n * Registry of language traversers\n * \n * Maps each supported language to its traverser implementation.\n * When adding a new language:\n * 1. Create a new traverser class implementing LanguageTraverser\n * 2. Add it to this registry\n * 3. Update SupportedLanguage type in ../types.ts\n */\nconst traverserRegistry: Record<SupportedLanguage, LanguageTraverser> = {\n typescript: new TypeScriptTraverser(),\n javascript: new JavaScriptTraverser(),\n php: new PHPTraverser(),\n};\n\n/**\n * Get the traverser for a specific language\n * \n * @param language - Programming language\n * @returns Language-specific traverser\n * @throws Error if language is not supported\n */\nexport function getTraverser(language: SupportedLanguage): LanguageTraverser {\n const traverser = traverserRegistry[language];\n \n if (!traverser) {\n throw new Error(`No traverser available for language: ${language}`);\n }\n \n return traverser;\n}\n\n/**\n * Check if a language has a traverser implementation\n * \n * @param language - Programming language\n * @returns True if traverser exists\n */\nexport function hasTraverser(language: SupportedLanguage): boolean {\n return language in traverserRegistry;\n}\n\n","import type Parser from 'tree-sitter';\nimport type { ASTChunk } from './types.js';\nimport { parseAST, detectLanguage, isASTSupported } from './parser.js';\nimport { extractSymbolInfo, extractImports } from './symbols.js';\nimport { getTraverser } from './traversers/index.js';\n\nexport interface ASTChunkOptions {\n maxChunkSize?: number; // Reserved for future use (smart splitting of large functions)\n minChunkSize?: number;\n}\n\n/**\n * Chunk a file using AST-based semantic boundaries\n * \n * Uses Tree-sitter to parse code into an AST and extract semantic chunks\n * (functions, classes, methods) that respect code structure.\n * \n * **Known Limitations:**\n * - Tree-sitter may fail with \"Invalid argument\" error on very large files (1000+ lines)\n * - When this occurs, Lien automatically falls back to line-based chunking\n * - Configure fallback behavior via `chunking.astFallback` ('line-based' or 'error')\n * \n * @param filepath - Path to the file\n * @param content - File content\n * @param options - Chunking options\n * @returns Array of AST-aware chunks\n * @throws Error if AST parsing fails and astFallback is 'error'\n */\nexport function chunkByAST(\n filepath: string,\n content: string,\n options: ASTChunkOptions = {}\n): ASTChunk[] {\n const { minChunkSize = 5 } = options;\n \n // Check if AST is supported for this file\n const language = detectLanguage(filepath);\n if (!language) {\n throw new Error(`Unsupported language for file: ${filepath}`);\n }\n \n // Parse the file\n const parseResult = parseAST(content, language);\n \n // If parsing failed, throw error (caller should fallback to line-based)\n if (!parseResult.tree) {\n throw new Error(`Failed to parse ${filepath}: ${parseResult.error}`);\n }\n \n const chunks: ASTChunk[] = [];\n const lines = content.split('\\n');\n const rootNode = parseResult.tree.rootNode;\n \n // Get language-specific traverser\n const traverser = getTraverser(language);\n \n // Extract file-level imports once\n const fileImports = extractImports(rootNode);\n \n // Find all top-level function and class declarations\n const topLevelNodes = findTopLevelNodes(rootNode, traverser);\n \n for (const node of topLevelNodes) {\n // For variable declarations, try to find the function inside\n let actualNode = node;\n if (traverser.isDeclarationWithFunction(node)) {\n const declInfo = traverser.findFunctionInDeclaration(node);\n if (declInfo.functionNode) {\n actualNode = declInfo.functionNode;\n }\n }\n \n // For methods, find the parent container name (e.g., class name)\n const parentClassName = traverser.findParentContainerName(actualNode);\n \n const symbolInfo = extractSymbolInfo(actualNode, content, parentClassName);\n \n // Extract the code for this node (use original node for full declaration)\n const nodeContent = getNodeContent(node, lines);\n \n // Create a chunk for this semantic unit\n // Note: Large functions are kept as single chunks (may exceed maxChunkSize)\n // This preserves semantic boundaries - better than splitting mid-function\n chunks.push(createChunk(filepath, node, nodeContent, symbolInfo, fileImports, language));\n }\n \n // Handle remaining code (imports, exports, top-level statements)\n const coveredRanges = topLevelNodes.map(n => ({\n start: n.startPosition.row,\n end: n.endPosition.row,\n }));\n \n const uncoveredChunks = extractUncoveredCode(\n lines,\n coveredRanges,\n filepath,\n minChunkSize,\n fileImports,\n language\n );\n \n chunks.push(...uncoveredChunks);\n \n // Sort chunks by line number\n chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);\n \n return chunks;\n}\n\n/**\n * Find all top-level nodes that should become chunks\n * \n * Uses a language-specific traverser to handle different AST structures.\n * This function is now language-agnostic - all language-specific logic\n * is delegated to the traverser.\n * \n * @param rootNode - Root AST node\n * @param traverser - Language-specific traverser\n * @returns Array of nodes to extract as chunks\n */\nfunction findTopLevelNodes(\n rootNode: Parser.SyntaxNode,\n traverser: ReturnType<typeof getTraverser>\n): Parser.SyntaxNode[] {\n const nodes: Parser.SyntaxNode[] = [];\n \n function traverse(node: Parser.SyntaxNode, depth: number) {\n // Check if this is a declaration that might contain a function\n if (traverser.isDeclarationWithFunction(node) && depth === 0) {\n const declInfo = traverser.findFunctionInDeclaration(node);\n if (declInfo.hasFunction) {\n nodes.push(node);\n return;\n }\n }\n \n // Check if this is a target node type (function, method, etc.)\n if (depth <= 1 && traverser.targetNodeTypes.includes(node.type)) {\n nodes.push(node);\n return; // Don't traverse into this node\n }\n \n // Check if this is a container whose children should be extracted\n if (traverser.shouldExtractChildren(node)) {\n const body = traverser.getContainerBody(node);\n if (body) {\n traverse(body, depth + 1);\n }\n return;\n }\n \n // Check if we should traverse this node's children\n if (traverser.shouldTraverseChildren(node)) {\n for (let i = 0; i < node.namedChildCount; i++) {\n const child = node.namedChild(i);\n if (child) traverse(child, depth);\n }\n }\n }\n \n traverse(rootNode, 0);\n return nodes;\n}\n\n/**\n * Extract content for a specific AST node\n */\nfunction getNodeContent(node: Parser.SyntaxNode, lines: string[]): string {\n const startLine = node.startPosition.row;\n const endLine = node.endPosition.row;\n \n return lines.slice(startLine, endLine + 1).join('\\n');\n}\n\n/**\n * Create a chunk from an AST node\n */\nfunction createChunk(\n filepath: string,\n node: Parser.SyntaxNode,\n content: string,\n symbolInfo: ReturnType<typeof extractSymbolInfo>,\n imports: string[],\n language: string\n): ASTChunk {\n // Populate legacy symbols field for backward compatibility\n const symbols = {\n functions: [] as string[],\n classes: [] as string[],\n interfaces: [] as string[],\n };\n \n if (symbolInfo?.name) {\n // Populate legacy symbols arrays based on symbol type\n if (symbolInfo.type === 'function' || symbolInfo.type === 'method') {\n symbols.functions.push(symbolInfo.name);\n } else if (symbolInfo.type === 'class') {\n symbols.classes.push(symbolInfo.name);\n } else if (symbolInfo.type === 'interface') {\n symbols.interfaces.push(symbolInfo.name);\n }\n }\n \n return {\n content,\n metadata: {\n file: filepath,\n startLine: node.startPosition.row + 1,\n endLine: node.endPosition.row + 1,\n type: symbolInfo == null ? 'block' : (symbolInfo.type === 'class' ? 'class' : 'function'),\n language,\n // Legacy symbols field for backward compatibility\n symbols,\n // New AST-derived metadata\n symbolName: symbolInfo?.name,\n symbolType: symbolInfo?.type,\n parentClass: symbolInfo?.parentClass,\n complexity: symbolInfo?.complexity,\n parameters: symbolInfo?.parameters,\n signature: symbolInfo?.signature,\n imports,\n },\n };\n}\n\n/**\n * Represents a range of lines in a file\n */\ninterface LineRange {\n start: number;\n end: number;\n}\n\n/**\n * Find gaps between covered ranges (uncovered code)\n */\nfunction findUncoveredRanges(\n coveredRanges: LineRange[],\n totalLines: number\n): LineRange[] {\n const uncoveredRanges: LineRange[] = [];\n let currentStart = 0;\n \n // Sort covered ranges\n const sortedRanges = [...coveredRanges].sort((a, b) => a.start - b.start);\n \n for (const range of sortedRanges) {\n if (currentStart < range.start) {\n // There's a gap before this range\n uncoveredRanges.push({\n start: currentStart,\n end: range.start - 1,\n });\n }\n currentStart = range.end + 1;\n }\n \n // Handle remaining code after last covered range\n if (currentStart < totalLines) {\n uncoveredRanges.push({\n start: currentStart,\n end: totalLines - 1,\n });\n }\n \n return uncoveredRanges;\n}\n\n/**\n * Create a chunk from a line range\n */\nfunction createChunkFromRange(\n range: LineRange,\n lines: string[],\n filepath: string,\n language: string,\n imports: string[]\n): ASTChunk {\n const uncoveredLines = lines.slice(range.start, range.end + 1);\n const content = uncoveredLines.join('\\n').trim();\n \n return {\n content,\n metadata: {\n file: filepath,\n startLine: range.start + 1,\n endLine: range.end + 1,\n type: 'block',\n language,\n // Empty symbols for uncovered code (imports, exports, etc.)\n symbols: { functions: [], classes: [], interfaces: [] },\n imports,\n },\n };\n}\n\n/**\n * Validate that a chunk meets the minimum size requirements\n */\nfunction isValidChunk(chunk: ASTChunk, minChunkSize: number): boolean {\n const lineCount = chunk.metadata.endLine - chunk.metadata.startLine + 1;\n return chunk.content.length > 0 && lineCount >= minChunkSize;\n}\n\n/**\n * Extract code that wasn't covered by function/class chunks\n * (imports, exports, top-level statements)\n */\nfunction extractUncoveredCode(\n lines: string[],\n coveredRanges: Array<{ start: number; end: number }>,\n filepath: string,\n minChunkSize: number,\n imports: string[],\n language: string\n): ASTChunk[] {\n const uncoveredRanges = findUncoveredRanges(coveredRanges, lines.length);\n \n return uncoveredRanges\n .map(range => createChunkFromRange(range, lines, filepath, language, imports))\n .filter(chunk => isValidChunk(chunk, minChunkSize));\n}\n\n/**\n * Check if AST chunking should be used for a file\n */\nexport function shouldUseAST(filepath: string): boolean {\n return isASTSupported(filepath);\n}\n\n","import type { CodeChunk } from './types.js';\n\n/**\n * Liquid-specific chunking for Shopify themes\n * \n * Uses regex to identify special Liquid blocks (schema, style, javascript)\n * and keeps them as single semantic units\n */\n\ninterface LiquidBlock {\n type: 'schema' | 'style' | 'javascript' | 'template';\n startLine: number;\n endLine: number;\n content: string;\n}\n\n/**\n * Extract schema name from JSON content\n * \n * Extracts the \"name\" field from Shopify schema JSON.\n * Uses JSON.parse to properly handle escaped quotes and other JSON edge cases.\n * \n * Example:\n * {% schema %}\n * {\n * \"name\": \"My \\\"Special\\\" Section\",\n * \"settings\": []\n * }\n * {% endschema %}\n * \n * Returns: 'My \"Special\" Section' (with literal quotes, unescaped)\n */\nfunction extractSchemaName(schemaContent: string): string | undefined {\n try {\n // Remove Liquid tags to isolate JSON content\n // Replace {% schema %} and {% endschema %} (with optional whitespace control)\n let jsonContent = schemaContent\n .replace(/\\{%-?\\s*schema\\s*-?%\\}/g, '')\n .replace(/\\{%-?\\s*endschema\\s*-?%\\}/g, '')\n .trim();\n \n // Parse the JSON\n const schema = JSON.parse(jsonContent);\n // Ensure name is a string before returning\n return typeof schema.name === 'string' ? schema.name : undefined;\n } catch (error) {\n // Invalid JSON - return undefined\n // This is acceptable: schema blocks with invalid JSON won't have names extracted\n }\n return undefined;\n}\n\n/**\n * Remove Liquid comment blocks from content to avoid extracting tags from comments\n * \n * Example:\n * {% comment %}Don't use {% render 'old-snippet' %}{% endcomment %}\n * → (removed)\n */\nfunction removeComments(content: string): string {\n // Remove {% comment %}...{% endcomment %} blocks (with optional whitespace control)\n return content.replace(/\\{%-?\\s*comment\\s*-?%\\}[\\s\\S]*?\\{%-?\\s*endcomment\\s*-?%\\}/g, '');\n}\n\n/**\n * Extract dependencies from {% render %}, {% include %}, and {% section %} tags\n * \n * Examples:\n * - {% render 'product-card' %} → 'product-card'\n * - {% render \"cart-item\", product: product %} → 'cart-item'\n * - {% include 'snippets/header' %} → 'snippets/header'\n * - {% section 'announcement-bar' %} → 'announcement-bar'\n * \n * Limitations:\n * - Does not handle escaped quotes in snippet names (e.g., {% render 'name\\'s' %})\n * - This is acceptable because Shopify snippet names map to filenames, and\n * filesystem restrictions prevent quotes in filenames (snippets/name's.liquid is invalid)\n * - In practice, Shopify snippet names use only alphanumeric, dash, and underscore\n * \n * Note: Expects content with comments already removed for performance\n * \n * @param contentWithoutComments - Content with Liquid comments already removed\n */\nfunction extractRenderTags(contentWithoutComments: string): string[] {\n const dependencies = new Set<string>();\n \n // Match {% render 'snippet-name' %} or {% render \"snippet-name\" %}\n // Note: Does not handle escaped quotes - see function docs for rationale\n const renderPattern = /\\{%-?\\s*render\\s+['\"]([^'\"]+)['\"]/g;\n let match;\n \n while ((match = renderPattern.exec(contentWithoutComments)) !== null) {\n dependencies.add(match[1]);\n }\n \n // Match {% include 'snippet-name' %} or {% include \"snippet-name\" %}\n const includePattern = /\\{%-?\\s*include\\s+['\"]([^'\"]+)['\"]/g;\n \n while ((match = includePattern.exec(contentWithoutComments)) !== null) {\n dependencies.add(match[1]);\n }\n \n // Match {% section 'section-name' %} or {% section \"section-name\" %}\n const sectionPattern = /\\{%-?\\s*section\\s+['\"]([^'\"]+)['\"]/g;\n \n while ((match = sectionPattern.exec(contentWithoutComments)) !== null) {\n dependencies.add(match[1]);\n }\n \n return Array.from(dependencies);\n}\n\n/**\n * Find all special Liquid blocks in the template\n * \n * Limitation: Does not support nested blocks of the same type.\n * - Matches first start tag with first end tag\n * - This is acceptable because Shopify Liquid does not allow nested blocks\n * - Example invalid: {% schema %}...{% schema %}...{% endschema %} (Shopify rejects this)\n * - If malformed input contains nested blocks, only outermost block is extracted\n */\nfunction findLiquidBlocks(content: string): LiquidBlock[] {\n const lines = content.split('\\n');\n const blocks: LiquidBlock[] = [];\n \n // Regex patterns for Liquid blocks\n // Note: Matches first start → first end (no nesting support, which is correct for Shopify)\n const blockPatterns = [\n { type: 'schema' as const, start: /\\{%-?\\s*schema\\s*-?%\\}/, end: /\\{%-?\\s*endschema\\s*-?%\\}/ },\n { type: 'style' as const, start: /\\{%-?\\s*style\\s*-?%\\}/, end: /\\{%-?\\s*endstyle\\s*-?%\\}/ },\n { type: 'javascript' as const, start: /\\{%-?\\s*javascript\\s*-?%\\}/, end: /\\{%-?\\s*endjavascript\\s*-?%\\}/ },\n ];\n \n for (const pattern of blockPatterns) {\n let searchStart = 0;\n \n while (searchStart < lines.length) {\n // Find start tag\n const startIdx = lines.findIndex((line, idx) => \n idx >= searchStart && pattern.start.test(line)\n );\n \n if (startIdx === -1) break;\n \n // Find end tag (allow same line for single-line blocks)\n const endIdx = lines.findIndex((line, idx) => \n idx >= startIdx && pattern.end.test(line)\n );\n \n if (endIdx === -1) {\n // No end tag found, treat rest as template\n break;\n }\n \n // Extract block content\n const blockContent = lines.slice(startIdx, endIdx + 1).join('\\n');\n \n blocks.push({\n type: pattern.type,\n startLine: startIdx,\n endLine: endIdx,\n content: blockContent,\n });\n \n searchStart = endIdx + 1;\n }\n }\n \n return blocks.sort((a, b) => a.startLine - b.startLine);\n}\n\n/**\n * Chunk a Liquid template file\n * \n * Special handling for:\n * - {% schema %} blocks (kept together, extract section name)\n * - {% style %} blocks (kept together) \n * - {% javascript %} blocks (kept together)\n * - {% render %}, {% include %}, and {% section %} tags (tracked as imports)\n * - Regular template content (chunked by lines)\n */\nexport function chunkLiquidFile(\n filepath: string,\n content: string,\n chunkSize: number = 75,\n chunkOverlap: number = 10\n): CodeChunk[] {\n const lines = content.split('\\n');\n const blocks = findLiquidBlocks(content);\n const chunks: CodeChunk[] = [];\n \n // Remove comments once for performance (avoids repeated regex operations)\n const contentWithoutComments = removeComments(content);\n const linesWithoutComments = contentWithoutComments.split('\\n');\n \n // Track which lines are covered by special blocks\n const coveredLines = new Set<number>();\n \n // Create chunks for special blocks\n for (const block of blocks) {\n // Mark lines as covered\n for (let i = block.startLine; i <= block.endLine; i++) {\n coveredLines.add(i);\n }\n \n // Extract metadata\n let symbolName: string | undefined;\n if (block.type === 'schema') {\n symbolName = extractSchemaName(block.content);\n }\n \n // Extract render/include tags from cleaned content (comments already removed)\n const blockContentWithoutComments = linesWithoutComments\n .slice(block.startLine, block.endLine + 1)\n .join('\\n');\n const imports = extractRenderTags(blockContentWithoutComments);\n \n const blockLineCount = block.endLine - block.startLine + 1;\n const maxBlockSize = chunkSize * 3; // Allow blocks up to 3x chunk size before splitting\n \n // If block is reasonably sized, keep it as one chunk\n if (blockLineCount <= maxBlockSize) {\n chunks.push({\n content: block.content,\n metadata: {\n file: filepath,\n startLine: block.startLine + 1, // 1-indexed\n endLine: block.endLine + 1,\n language: 'liquid',\n type: 'block',\n symbolName,\n symbolType: block.type,\n imports: imports.length > 0 ? imports : undefined,\n },\n });\n } else {\n // Block is too large - split it into multiple chunks with overlap\n const blockLines = block.content.split('\\n');\n \n for (let offset = 0; offset < blockLines.length; offset += chunkSize - chunkOverlap) {\n const endOffset = Math.min(offset + chunkSize, blockLines.length);\n const chunkContent = blockLines.slice(offset, endOffset).join('\\n');\n \n if (chunkContent.trim().length > 0) {\n chunks.push({\n content: chunkContent,\n metadata: {\n file: filepath,\n startLine: block.startLine + offset + 1, // 1-indexed\n endLine: block.startLine + endOffset, // 1-indexed (endOffset already accounts for exclusivity)\n language: 'liquid',\n type: 'block',\n symbolName, // Preserve symbol name for all split chunks\n symbolType: block.type,\n imports: imports.length > 0 ? imports : undefined,\n },\n });\n }\n \n if (endOffset >= blockLines.length) break;\n }\n }\n }\n \n // Chunk uncovered template content\n let currentChunk: string[] = [];\n let chunkStartLine = 0;\n \n for (let i = 0; i < lines.length; i++) {\n // Skip lines covered by special blocks\n if (coveredLines.has(i)) {\n // Flush current chunk if any\n if (currentChunk.length > 0) {\n const chunkContent = currentChunk.join('\\n');\n \n // Only push non-empty chunks\n if (chunkContent.trim().length > 0) {\n // Extract from cleaned content (comments already removed)\n const cleanedChunk = linesWithoutComments.slice(chunkStartLine, i).join('\\n');\n const imports = extractRenderTags(cleanedChunk);\n \n chunks.push({\n content: chunkContent,\n metadata: {\n file: filepath,\n startLine: chunkStartLine + 1,\n endLine: i,\n language: 'liquid',\n type: 'template',\n imports: imports.length > 0 ? imports : undefined,\n },\n });\n }\n currentChunk = [];\n }\n continue;\n }\n \n // Start new chunk if needed\n if (currentChunk.length === 0) {\n chunkStartLine = i;\n }\n \n currentChunk.push(lines[i]);\n \n // Flush if chunk is full\n if (currentChunk.length >= chunkSize) {\n const chunkContent = currentChunk.join('\\n');\n \n // Only push non-empty chunks\n if (chunkContent.trim().length > 0) {\n // Extract from cleaned content (comments already removed)\n const cleanedChunk = linesWithoutComments.slice(chunkStartLine, i + 1).join('\\n');\n const imports = extractRenderTags(cleanedChunk);\n \n chunks.push({\n content: chunkContent,\n metadata: {\n file: filepath,\n startLine: chunkStartLine + 1,\n endLine: i + 1,\n language: 'liquid',\n type: 'template',\n imports: imports.length > 0 ? imports : undefined,\n },\n });\n }\n \n // Add overlap for next chunk\n currentChunk = currentChunk.slice(-chunkOverlap);\n chunkStartLine = Math.max(0, i + 1 - chunkOverlap);\n }\n }\n \n // Flush remaining chunk\n if (currentChunk.length > 0) {\n const chunkContent = currentChunk.join('\\n');\n \n // Skip empty or whitespace-only chunks\n if (chunkContent.trim().length === 0) {\n return chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);\n }\n \n // Extract from cleaned content (comments already removed)\n const cleanedChunk = linesWithoutComments.slice(chunkStartLine, lines.length).join('\\n');\n const imports = extractRenderTags(cleanedChunk);\n \n chunks.push({\n content: chunkContent,\n metadata: {\n file: filepath,\n startLine: chunkStartLine + 1,\n endLine: lines.length,\n language: 'liquid',\n type: 'template',\n imports: imports.length > 0 ? imports : undefined,\n },\n });\n }\n \n // Sort by line number\n return chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);\n}\n\n","import type { CodeChunk } from './types.js';\n\n/**\n * Shopify JSON template chunking\n * \n * JSON template files define which sections appear on a template page.\n * We extract section references to track dependencies.\n * \n * Example structure:\n * {\n * \"sections\": {\n * \"main\": { \"type\": \"main-product\", \"settings\": {...} },\n * \"recommendations\": { \"type\": \"product-recommendations\", \"settings\": {...} }\n * },\n * \"order\": [\"main\", \"recommendations\"]\n * }\n */\n\n/**\n * Extract section types from a Shopify JSON template\n * \n * These are the actual section file names (e.g., \"main-product\" → sections/main-product.liquid)\n */\nfunction extractSectionReferences(jsonContent: string): string[] {\n try {\n const template = JSON.parse(jsonContent);\n const sectionTypes = new Set<string>();\n \n // Extract from sections object\n if (template.sections && typeof template.sections === 'object') {\n for (const section of Object.values(template.sections)) {\n if (\n typeof section === 'object' && \n section !== null && \n 'type' in section && \n typeof section.type === 'string'\n ) {\n sectionTypes.add(section.type);\n }\n }\n }\n \n return Array.from(sectionTypes);\n } catch (error) {\n // Invalid JSON - return empty array\n console.warn(`[Lien] Failed to parse JSON template: ${error instanceof Error ? error.message : String(error)}`);\n return [];\n }\n}\n\n/**\n * Extract the template name from the filepath\n * \n * templates/customers/account.json → \"customers/account\"\n * templates/product.json → \"product\"\n */\nfunction extractTemplateName(filepath: string): string | undefined {\n // Match everything after templates/ up to .json\n const match = filepath.match(/templates\\/(.+)\\.json$/);\n return match ? match[1] : undefined;\n}\n\n/**\n * Chunk a Shopify JSON template file\n * \n * JSON templates are typically small (define section layout),\n * so we keep them as a single chunk and extract section references.\n */\nexport function chunkJSONTemplate(\n filepath: string,\n content: string\n): CodeChunk[] {\n // Skip empty files\n if (content.trim().length === 0) {\n return [];\n }\n \n const lines = content.split('\\n');\n const templateName = extractTemplateName(filepath);\n const sectionReferences = extractSectionReferences(content);\n \n return [{\n content,\n metadata: {\n file: filepath,\n startLine: 1,\n endLine: lines.length,\n language: 'json',\n type: 'template',\n symbolName: templateName,\n symbolType: 'template',\n imports: sectionReferences.length > 0 ? sectionReferences : undefined,\n },\n }];\n}\n\n","import { CodeChunk } from './types.js';\nimport { detectLanguage } from './scanner.js';\nimport { extractSymbols } from './symbol-extractor.js';\nimport { shouldUseAST, chunkByAST } from './ast/chunker.js';\nimport { chunkLiquidFile } from './liquid-chunker.js';\nimport { chunkJSONTemplate } from './json-template-chunker.js';\n\nexport interface ChunkOptions {\n chunkSize?: number;\n chunkOverlap?: number;\n useAST?: boolean; // Flag to enable AST-based chunking\n astFallback?: 'line-based' | 'error'; // How to handle AST parsing errors\n}\n\nexport function chunkFile(\n filepath: string,\n content: string,\n options: ChunkOptions = {}\n): CodeChunk[] {\n const { chunkSize = 75, chunkOverlap = 10, useAST = true, astFallback = 'line-based' } = options;\n \n // Special handling for Liquid files\n if (filepath.endsWith('.liquid')) {\n return chunkLiquidFile(filepath, content, chunkSize, chunkOverlap);\n }\n \n // Special handling for Shopify JSON template files (templates/**/*.json)\n // Use regex to ensure 'templates/' is a path segment, not part of another name\n // Matches: templates/product.json OR some-path/templates/customers/account.json\n // Rejects: my-templates/config.json OR node_modules/pkg/templates/file.json (filtered by scanner)\n if (filepath.endsWith('.json') && /(?:^|\\/)templates\\//.test(filepath)) {\n return chunkJSONTemplate(filepath, content);\n }\n \n // Try AST-based chunking for supported languages\n if (useAST && shouldUseAST(filepath)) {\n try {\n return chunkByAST(filepath, content, {\n minChunkSize: Math.floor(chunkSize / 10),\n });\n } catch (error) {\n // Handle AST errors based on configuration\n if (astFallback === 'error') {\n // Throw error if user wants strict AST-only behavior\n throw new Error(`AST chunking failed for ${filepath}: ${error instanceof Error ? error.message : String(error)}`);\n }\n // Otherwise fallback to line-based chunking\n console.warn(`AST chunking failed for ${filepath}, falling back to line-based:`, error);\n }\n }\n \n // Line-based chunking (original implementation)\n return chunkByLines(filepath, content, chunkSize, chunkOverlap);\n}\n\n/**\n * Original line-based chunking implementation\n */\nfunction chunkByLines(\n filepath: string,\n content: string,\n chunkSize: number,\n chunkOverlap: number\n): CodeChunk[] {\n const lines = content.split('\\n');\n const chunks: CodeChunk[] = [];\n const language = detectLanguage(filepath);\n \n // Handle empty files\n if (lines.length === 0 || (lines.length === 1 && lines[0].trim() === '')) {\n return chunks;\n }\n \n // Chunk by lines with overlap\n for (let i = 0; i < lines.length; i += chunkSize - chunkOverlap) {\n const endLine = Math.min(i + chunkSize, lines.length);\n const chunkLines = lines.slice(i, endLine);\n const chunkContent = chunkLines.join('\\n');\n \n // Skip empty chunks\n if (chunkContent.trim().length === 0) {\n continue;\n }\n \n // Extract symbols from the chunk\n const symbols = extractSymbols(chunkContent, language);\n \n chunks.push({\n content: chunkContent,\n metadata: {\n file: filepath,\n startLine: i + 1,\n endLine: endLine,\n type: 'block', // MVP: all chunks are 'block' type\n language,\n symbols,\n },\n });\n \n // If we've reached the end, break\n if (endLine >= lines.length) {\n break;\n }\n }\n \n return chunks;\n}\n\nexport function chunkText(text: string, options: ChunkOptions = {}): string[] {\n const { chunkSize = 75, chunkOverlap = 10 } = options;\n \n const lines = text.split('\\n');\n const chunks: string[] = [];\n \n for (let i = 0; i < lines.length; i += chunkSize - chunkOverlap) {\n const endLine = Math.min(i + chunkSize, lines.length);\n const chunkLines = lines.slice(i, endLine);\n const chunkContent = chunkLines.join('\\n');\n \n if (chunkContent.trim().length > 0) {\n chunks.push(chunkContent);\n }\n \n if (endLine >= lines.length) {\n break;\n }\n }\n \n return chunks;\n}\n\n","import { pipeline, env, type FeatureExtractionPipeline } from '@xenova/transformers';\nimport { EmbeddingService } from './types.js';\nimport { EmbeddingError, wrapError } from '../errors/index.js';\nimport { DEFAULT_EMBEDDING_MODEL } from '../constants.js';\n\n// Configure transformers.js to cache models locally\nenv.allowRemoteModels = true;\nenv.allowLocalModels = true;\n\nexport class LocalEmbeddings implements EmbeddingService {\n private extractor: FeatureExtractionPipeline | null = null;\n private readonly modelName = DEFAULT_EMBEDDING_MODEL;\n private initPromise: Promise<void> | null = null;\n \n async initialize(): Promise<void> {\n // Prevent multiple simultaneous initializations\n if (this.initPromise) {\n return this.initPromise;\n }\n \n if (this.extractor) {\n return;\n }\n \n this.initPromise = (async () => {\n try {\n // This downloads ~100MB on first run, then caches in ~/.cache/huggingface\n this.extractor = await pipeline('feature-extraction', this.modelName) as FeatureExtractionPipeline;\n } catch (error: unknown) {\n this.initPromise = null;\n throw wrapError(error, 'Failed to initialize embedding model');\n }\n })();\n \n return this.initPromise;\n }\n \n async embed(text: string): Promise<Float32Array> {\n await this.initialize();\n \n if (!this.extractor) {\n throw new EmbeddingError('Embedding model not initialized');\n }\n \n try {\n const output = await this.extractor(text, {\n pooling: 'mean',\n normalize: true,\n });\n \n return output.data as Float32Array;\n } catch (error: unknown) {\n throw wrapError(error, 'Failed to generate embedding', { textLength: text.length });\n }\n }\n \n async embedBatch(texts: string[]): Promise<Float32Array[]> {\n await this.initialize();\n \n if (!this.extractor) {\n throw new EmbeddingError('Embedding model not initialized');\n }\n \n try {\n // Process embeddings with Promise.all for concurrent execution\n // Each call is sequential but Promise.all allows task interleaving\n const results = await Promise.all(\n texts.map(text => this.embed(text))\n );\n return results;\n } catch (error: unknown) {\n throw wrapError(error, 'Failed to generate batch embeddings', { batchSize: texts.length });\n }\n }\n}\n\n","import { EMBEDDING_DIMENSIONS } from '../constants.js';\n\nexport interface EmbeddingService {\n initialize(): Promise<void>;\n embed(text: string): Promise<Float32Array>;\n embedBatch(texts: string[]): Promise<Float32Array[]>;\n}\n\nexport const EMBEDDING_DIMENSION = EMBEDDING_DIMENSIONS;\n\n","/**\n * Relevance category based on semantic similarity score\n */\nexport type RelevanceCategory = 'highly_relevant' | 'relevant' | 'loosely_related' | 'not_relevant';\n\n/**\n * Calculate relevance category from cosine distance score.\n * \n * Lower scores indicate higher similarity (closer in vector space).\n * Thresholds based on observed score distributions from dogfooding.\n * \n * @param score - Cosine distance score from vector search\n * @returns Human-readable relevance category\n */\nexport function calculateRelevance(score: number): RelevanceCategory {\n if (score < 1.0) return 'highly_relevant';\n if (score < 1.3) return 'relevant';\n if (score < 1.5) return 'loosely_related';\n return 'not_relevant';\n}\n\n","/**\n * Query Intent Classification\n * \n * Classifies user search queries into three categories to apply\n * appropriate relevance boosting strategies:\n * \n * - LOCATION: \"Where is X?\" - User wants to find specific files/code\n * - CONCEPTUAL: \"How does X work?\" - User wants to understand concepts\n * - IMPLEMENTATION: \"How is X implemented?\" - User wants implementation details\n * \n * Examples:\n * - \"where is the auth handler\" → LOCATION\n * - \"how does authentication work\" → CONCEPTUAL\n * - \"how is authentication implemented\" → IMPLEMENTATION\n */\n\n/**\n * Query intent types for semantic search\n */\nexport enum QueryIntent {\n /** User wants to locate specific files or code (e.g., \"where is X\") */\n LOCATION = 'location',\n \n /** User wants to understand concepts/processes (e.g., \"how does X work\") */\n CONCEPTUAL = 'conceptual',\n \n /** User wants implementation details (e.g., \"how is X implemented\") */\n IMPLEMENTATION = 'implementation',\n}\n\n/**\n * Classifies a search query into one of three intent categories.\n * \n * Uses pattern matching to detect query intent:\n * - LOCATION: Queries about finding/locating code\n * - CONCEPTUAL: Queries about understanding processes/concepts\n * - IMPLEMENTATION: Queries about code implementation details\n * \n * @param query - The search query string\n * @returns The detected query intent (defaults to IMPLEMENTATION)\n * \n * @example\n * classifyQueryIntent(\"where is the user controller\") // → LOCATION\n * classifyQueryIntent(\"how does authentication work\") // → CONCEPTUAL\n * classifyQueryIntent(\"how is the API implemented\") // → IMPLEMENTATION\n */\nexport function classifyQueryIntent(query: string): QueryIntent {\n const lower = query.toLowerCase().trim();\n \n // LOCATION queries - user wants to find specific files\n // Patterns: \"where is/are\", \"find the\", \"locate\"\n if (\n lower.match(/where\\s+(is|are|does|can\\s+i\\s+find)/) ||\n lower.match(/find\\s+the\\s+/) ||\n lower.match(/locate\\s+/)\n ) {\n return QueryIntent.LOCATION;\n }\n \n // CONCEPTUAL queries - user wants to understand how things work\n // Patterns: \"how does X work\", \"what is/are\", \"explain\", \"understand\", etc.\n if (\n lower.match(/how\\s+does\\s+.*\\s+work/) ||\n lower.match(/what\\s+(is|are|does)/) ||\n lower.match(/explain\\s+/) ||\n lower.match(/understand\\s+/) ||\n lower.match(/\\b(process|workflow|architecture)\\b/)\n ) {\n return QueryIntent.CONCEPTUAL;\n }\n \n // IMPLEMENTATION queries - user wants code implementation details\n // Patterns: \"how is/are X implemented/built/coded\", \"implementation of\", \"source code for\"\n if (\n lower.match(/how\\s+(is|are)\\s+.*\\s+(implemented|built|coded)/) ||\n lower.match(/implementation\\s+of/) ||\n lower.match(/source\\s+code\\s+for/)\n ) {\n return QueryIntent.IMPLEMENTATION;\n }\n \n // Default to IMPLEMENTATION for ambiguous queries\n // This is the most common use case for code search\n return QueryIntent.IMPLEMENTATION;\n}\n\n","import path from 'path';\nimport { SearchResult } from './types.js';\nimport { EMBEDDING_DIMENSION } from '../embeddings/types.js';\nimport { DatabaseError, wrapError } from '../errors/index.js';\nimport { calculateRelevance } from './relevance.js';\nimport { QueryIntent, classifyQueryIntent } from './intent-classifier.js';\n\n// TODO: Replace with proper type from lancedb-types.ts\n// Currently using 'any' because tests use incomplete mocks that don't satisfy full LanceDB interface\n// See: https://github.com/getlien/lien/issues/XXX\ntype LanceDBTable = any;\n\n/**\n * Database record structure as stored in LanceDB\n */\ninterface DBRecord {\n vector: number[];\n content: string;\n file: string;\n startLine: number;\n endLine: number;\n type: string;\n language: string;\n functionNames: string[];\n classNames: string[];\n interfaceNames: string[];\n // AST-derived metadata (v0.13.0)\n symbolName?: string;\n symbolType?: string;\n parentClass?: string;\n complexity?: number;\n parameters?: string[];\n signature?: string;\n imports?: string[];\n _distance?: number; // Added by LanceDB for search results\n}\n\n/**\n * Helper Functions for File Type Detection\n */\n\n/**\n * Check if a file is a documentation file.\n */\nfunction isDocumentationFile(filepath: string): boolean {\n const lower = filepath.toLowerCase();\n const filename = path.basename(filepath).toLowerCase();\n \n if (filename.startsWith('readme')) return true;\n if (filename.startsWith('changelog')) return true;\n if (filename.endsWith('.md') || filename.endsWith('.mdx') || filename.endsWith('.markdown')) {\n return true;\n }\n if (\n lower.includes('/docs/') ||\n lower.includes('/documentation/') ||\n lower.includes('/wiki/') ||\n lower.includes('/.github/')\n ) {\n return true;\n }\n if (\n lower.includes('architecture') ||\n lower.includes('workflow') ||\n lower.includes('/flow/')\n ) {\n return true;\n }\n \n return false;\n}\n\n/**\n * Check if a file is a test file.\n */\nfunction isTestFile(filepath: string): boolean {\n const lower = filepath.toLowerCase();\n \n if (\n lower.includes('/test/') ||\n lower.includes('/tests/') ||\n lower.includes('/__tests__/')\n ) {\n return true;\n }\n \n if (\n lower.includes('.test.') ||\n lower.includes('.spec.') ||\n lower.includes('_test.') ||\n lower.includes('_spec.')\n ) {\n return true;\n }\n \n return false;\n}\n\n/**\n * Check if a file is a utility/helper file.\n */\nfunction isUtilityFile(filepath: string): boolean {\n const lower = filepath.toLowerCase();\n \n if (\n lower.includes('/utils/') ||\n lower.includes('/utilities/') ||\n lower.includes('/helpers/') ||\n lower.includes('/lib/')\n ) {\n return true;\n }\n \n if (\n lower.includes('.util.') ||\n lower.includes('.helper.') ||\n lower.includes('-util.') ||\n lower.includes('-helper.')\n ) {\n return true;\n }\n \n return false;\n}\n\n/**\n * Boost relevance score based on path matching.\n */\nfunction boostPathRelevance(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n const queryTokens = query.toLowerCase().split(/\\s+/);\n const pathSegments = filepath.toLowerCase().split('/');\n \n let boostFactor = 1.0;\n \n for (const token of queryTokens) {\n if (token.length <= 2) continue;\n if (pathSegments.some(seg => seg.includes(token))) {\n boostFactor *= 0.9;\n }\n }\n \n return baseScore * boostFactor;\n}\n\n/**\n * Boost relevance score based on filename matching.\n */\nfunction boostFilenameRelevance(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n const filename = path.basename(filepath, path.extname(filepath)).toLowerCase();\n const queryTokens = query.toLowerCase().split(/\\s+/);\n \n let boostFactor = 1.0;\n \n for (const token of queryTokens) {\n if (token.length <= 2) continue;\n \n if (filename === token) {\n boostFactor *= 0.70;\n } else if (filename.includes(token)) {\n boostFactor *= 0.80;\n }\n }\n \n return baseScore * boostFactor;\n}\n\n/**\n * Boost relevance for LOCATION intent queries.\n */\nfunction boostForLocationIntent(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n let score = baseScore;\n \n const filename = path.basename(filepath, path.extname(filepath)).toLowerCase();\n const queryTokens = query.toLowerCase().split(/\\s+/);\n \n for (const token of queryTokens) {\n if (token.length <= 2) continue;\n \n if (filename === token) {\n score *= 0.60;\n } else if (filename.includes(token)) {\n score *= 0.70;\n }\n }\n \n score = boostPathRelevance(query, filepath, score);\n \n if (isTestFile(filepath)) {\n score *= 1.10;\n }\n \n return score;\n}\n\n/**\n * Boost relevance for CONCEPTUAL intent queries.\n */\nfunction boostForConceptualIntent(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n let score = baseScore;\n \n if (isDocumentationFile(filepath)) {\n score *= 0.65;\n \n const lower = filepath.toLowerCase();\n if (\n lower.includes('architecture') ||\n lower.includes('workflow') ||\n lower.includes('flow')\n ) {\n score *= 0.90;\n }\n }\n \n if (isUtilityFile(filepath)) {\n score *= 1.05;\n }\n \n const filename = path.basename(filepath, path.extname(filepath)).toLowerCase();\n const queryTokens = query.toLowerCase().split(/\\s+/);\n \n for (const token of queryTokens) {\n if (token.length <= 2) continue;\n if (filename.includes(token)) {\n score *= 0.90;\n }\n }\n \n const pathSegments = filepath.toLowerCase().split(path.sep);\n for (const token of queryTokens) {\n if (token.length <= 2) continue;\n \n for (const segment of pathSegments) {\n if (segment.includes(token)) {\n score *= 0.95;\n break;\n }\n }\n }\n \n return score;\n}\n\n/**\n * Boost relevance for IMPLEMENTATION intent queries.\n */\nfunction boostForImplementationIntent(\n query: string,\n filepath: string,\n baseScore: number\n): number {\n let score = baseScore;\n \n score = boostFilenameRelevance(query, filepath, score);\n score = boostPathRelevance(query, filepath, score);\n \n if (isTestFile(filepath)) {\n score *= 0.90;\n }\n \n return score;\n}\n\n/**\n * Apply all relevance boosting strategies to a search score.\n */\nfunction applyRelevanceBoosting(\n query: string | undefined,\n filepath: string,\n baseScore: number\n): number {\n if (!query) {\n return baseScore;\n }\n \n const intent = classifyQueryIntent(query);\n \n switch (intent) {\n case QueryIntent.LOCATION:\n return boostForLocationIntent(query, filepath, baseScore);\n \n case QueryIntent.CONCEPTUAL:\n return boostForConceptualIntent(query, filepath, baseScore);\n \n case QueryIntent.IMPLEMENTATION:\n return boostForImplementationIntent(query, filepath, baseScore);\n \n default:\n return boostForImplementationIntent(query, filepath, baseScore);\n }\n}\n\n/**\n * Convert a DBRecord to a SearchResult\n */\nfunction dbRecordToSearchResult(\n r: DBRecord,\n query?: string\n): SearchResult {\n const baseScore = r._distance ?? 0;\n const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);\n \n return {\n content: r.content,\n metadata: {\n file: r.file,\n startLine: r.startLine,\n endLine: r.endLine,\n type: r.type as 'function' | 'class' | 'block',\n language: r.language,\n // AST-derived metadata (v0.13.0)\n symbolName: r.symbolName || undefined,\n symbolType: r.symbolType as 'function' | 'method' | 'class' | 'interface' | undefined,\n parentClass: r.parentClass || undefined,\n complexity: r.complexity || undefined,\n parameters: (r.parameters && r.parameters.length > 0 && r.parameters[0] !== '') ? r.parameters : undefined,\n signature: r.signature || undefined,\n imports: (r.imports && r.imports.length > 0 && r.imports[0] !== '') ? r.imports : undefined,\n },\n score: boostedScore,\n relevance: calculateRelevance(boostedScore),\n };\n}\n\n/**\n * Search the vector database\n */\nexport async function search(\n table: LanceDBTable,\n queryVector: Float32Array,\n limit: number = 5,\n query?: string\n): Promise<SearchResult[]> {\n if (!table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n const results = await table\n .search(Array.from(queryVector))\n .limit(limit + 20)\n .execute();\n \n const filtered = (results as unknown as DBRecord[])\n .filter((r: DBRecord) => \n r.content && \n r.content.trim().length > 0 &&\n r.file && \n r.file.length > 0\n )\n .map((r: DBRecord) => dbRecordToSearchResult(r, query))\n .sort((a, b) => a.score - b.score)\n .slice(0, limit);\n \n return filtered;\n } catch (error) {\n const errorMsg = String(error);\n \n // Detect corrupted index\n if (errorMsg.includes('Not found:') || errorMsg.includes('.lance')) {\n throw new DatabaseError(\n `Index appears corrupted or outdated. Please restart the MCP server or run 'lien reindex' in the project directory.`,\n { originalError: error }\n );\n }\n \n throw wrapError(error, 'Failed to search vector database');\n }\n}\n\n/**\n * Scan the database with filters\n */\nexport async function scanWithFilter(\n table: LanceDBTable,\n options: {\n language?: string;\n pattern?: string;\n limit?: number;\n }\n): Promise<SearchResult[]> {\n if (!table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n const { language, pattern, limit = 100 } = options;\n \n try {\n const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);\n const query = table.search(zeroVector)\n .where('file != \"\"')\n .limit(Math.max(limit * 5, 200));\n \n const results = await query.execute();\n \n let filtered = (results as unknown as DBRecord[]).filter((r: DBRecord) => \n r.content && \n r.content.trim().length > 0 &&\n r.file && \n r.file.length > 0\n );\n \n if (language) {\n filtered = filtered.filter((r: DBRecord) => \n r.language && r.language.toLowerCase() === language.toLowerCase()\n );\n }\n \n if (pattern) {\n const regex = new RegExp(pattern, 'i');\n filtered = filtered.filter((r: DBRecord) =>\n regex.test(r.content) || regex.test(r.file)\n );\n }\n \n return filtered.slice(0, limit).map((r: DBRecord) => {\n const score = 0;\n return {\n content: r.content,\n metadata: {\n file: r.file,\n startLine: r.startLine,\n endLine: r.endLine,\n type: r.type as 'function' | 'class' | 'block',\n language: r.language,\n // AST-derived metadata (v0.13.0)\n symbolName: r.symbolName || undefined,\n symbolType: r.symbolType as 'function' | 'method' | 'class' | 'interface' | undefined,\n parentClass: r.parentClass || undefined,\n complexity: r.complexity || undefined,\n parameters: (r.parameters && r.parameters.length > 0 && r.parameters[0] !== '') ? r.parameters : undefined,\n signature: r.signature || undefined,\n imports: (r.imports && r.imports.length > 0 && r.imports[0] !== '') ? r.imports : undefined,\n },\n score,\n relevance: calculateRelevance(score),\n };\n });\n } catch (error) {\n throw wrapError(error, 'Failed to scan with filter');\n }\n}\n\n/**\n * Helper to check if a record matches the requested symbol type\n */\nfunction matchesSymbolType(\n record: DBRecord,\n symbolType: 'function' | 'class' | 'interface',\n symbols: string[]\n): boolean {\n // If AST-based symbolType exists, use it (more accurate)\n if (record.symbolType) {\n if (symbolType === 'function') {\n return record.symbolType === 'function' || record.symbolType === 'method';\n } else if (symbolType === 'class') {\n return record.symbolType === 'class';\n } else if (symbolType === 'interface') {\n return record.symbolType === 'interface';\n }\n return false;\n }\n \n // Fallback: check if pre-AST symbols array has valid entries\n return symbols.length > 0 && symbols.some((s: string) => s.length > 0 && s !== '');\n}\n\n/**\n * Query symbols (functions, classes, interfaces)\n */\nexport async function querySymbols(\n table: LanceDBTable,\n options: {\n language?: string;\n pattern?: string;\n symbolType?: 'function' | 'class' | 'interface';\n limit?: number;\n }\n): Promise<SearchResult[]> {\n if (!table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n const { language, pattern, symbolType, limit = 50 } = options;\n \n try {\n const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);\n const query = table.search(zeroVector)\n .where('file != \"\"')\n .limit(Math.max(limit * 10, 500));\n \n const results = await query.execute();\n \n let filtered = (results as unknown as DBRecord[]).filter((r: DBRecord) => {\n if (!r.content || r.content.trim().length === 0) {\n return false;\n }\n if (!r.file || r.file.length === 0) {\n return false;\n }\n \n if (language && (!r.language || r.language.toLowerCase() !== language.toLowerCase())) {\n return false;\n }\n \n const symbols = symbolType === 'function' ? (r.functionNames || []) :\n symbolType === 'class' ? (r.classNames || []) :\n symbolType === 'interface' ? (r.interfaceNames || []) :\n [...(r.functionNames || []), ...(r.classNames || []), ...(r.interfaceNames || [])];\n \n const astSymbolName = r.symbolName || '';\n \n if (symbols.length === 0 && !astSymbolName) {\n return false;\n }\n \n if (pattern) {\n const regex = new RegExp(pattern, 'i');\n const matchesOldSymbols = symbols.some((s: string) => regex.test(s));\n const matchesASTSymbol = regex.test(astSymbolName);\n const nameMatches = matchesOldSymbols || matchesASTSymbol;\n \n if (!nameMatches) return false;\n \n if (symbolType) {\n return matchesSymbolType(r, symbolType, symbols);\n }\n \n return nameMatches;\n }\n \n if (symbolType) {\n return matchesSymbolType(r, symbolType, symbols);\n }\n \n return true;\n });\n \n return filtered.slice(0, limit).map((r: DBRecord) => {\n const score = 0;\n return {\n content: r.content,\n metadata: {\n file: r.file,\n startLine: r.startLine,\n endLine: r.endLine,\n type: r.type as 'function' | 'class' | 'block',\n language: r.language,\n symbols: {\n functions: (r.functionNames && r.functionNames.length > 0 && r.functionNames[0] !== '') ? r.functionNames : [],\n classes: (r.classNames && r.classNames.length > 0 && r.classNames[0] !== '') ? r.classNames : [],\n interfaces: (r.interfaceNames && r.interfaceNames.length > 0 && r.interfaceNames[0] !== '') ? r.interfaceNames : [],\n },\n // AST-derived metadata (v0.13.0)\n symbolName: r.symbolName || undefined,\n symbolType: r.symbolType as 'function' | 'method' | 'class' | 'interface' | undefined,\n parentClass: r.parentClass || undefined,\n complexity: r.complexity || undefined,\n parameters: (r.parameters && r.parameters.length > 0 && r.parameters[0] !== '') ? r.parameters : undefined,\n signature: r.signature || undefined,\n imports: (r.imports && r.imports.length > 0 && r.imports[0] !== '') ? r.imports : undefined,\n },\n score,\n relevance: calculateRelevance(score),\n };\n });\n } catch (error) {\n throw wrapError(error, 'Failed to query symbols');\n }\n}\n\n","import { ChunkMetadata } from '../indexer/types.js';\nimport { DatabaseError } from '../errors/index.js';\nimport { VECTOR_DB_MAX_BATCH_SIZE, VECTOR_DB_MIN_BATCH_SIZE } from '../constants.js';\n\n// TODO: Replace with proper types from lancedb-types.ts\n// Currently using 'any' because tests use incomplete mocks that don't satisfy full LanceDB interface\n// Proper types: Awaited<ReturnType<typeof lancedb.connect>> and Awaited<ReturnType<Connection['openTable']>>\ntype LanceDBConnection = any;\ntype LanceDBTable = any;\n\n/**\n * Insert a batch of vectors into the database\n * \n * @returns The table instance after insertion, or null only when:\n * - vectors.length === 0 AND table === null (no-op case)\n * For non-empty batches, always returns a valid table or throws.\n * @throws {DatabaseError} If database not initialized or insertion fails\n */\nexport async function insertBatch(\n db: LanceDBConnection,\n table: LanceDBTable | null,\n tableName: string,\n vectors: Float32Array[],\n metadatas: ChunkMetadata[],\n contents: string[]\n): Promise<LanceDBTable | null> {\n if (!db) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n if (vectors.length !== metadatas.length || vectors.length !== contents.length) {\n throw new DatabaseError('Vectors, metadatas, and contents arrays must have the same length', {\n vectorsLength: vectors.length,\n metadatasLength: metadatas.length,\n contentsLength: contents.length,\n });\n }\n \n // Handle empty batch gracefully - return table as-is (could be null)\n if (vectors.length === 0) {\n return table;\n }\n \n // Split large batches into smaller chunks\n if (vectors.length > VECTOR_DB_MAX_BATCH_SIZE) {\n let currentTable = table;\n for (let i = 0; i < vectors.length; i += VECTOR_DB_MAX_BATCH_SIZE) {\n const batchVectors = vectors.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));\n const batchMetadata = metadatas.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));\n const batchContents = contents.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));\n \n currentTable = await insertBatchInternal(db, currentTable, tableName, batchVectors, batchMetadata, batchContents);\n }\n if (!currentTable) {\n throw new DatabaseError('Failed to create table during batch insert');\n }\n return currentTable;\n } else {\n return insertBatchInternal(db, table, tableName, vectors, metadatas, contents);\n }\n}\n\n/**\n * Internal method to insert a single batch with iterative retry logic.\n * @returns Always returns a valid LanceDBTable or throws DatabaseError\n */\nasync function insertBatchInternal(\n db: LanceDBConnection,\n table: LanceDBTable | null,\n tableName: string,\n vectors: Float32Array[],\n metadatas: ChunkMetadata[],\n contents: string[]\n): Promise<LanceDBTable> {\n interface BatchToProcess {\n vectors: Float32Array[];\n metadatas: ChunkMetadata[];\n contents: string[];\n }\n \n const queue: BatchToProcess[] = [{ vectors, metadatas, contents }];\n const failedRecords: BatchToProcess[] = [];\n let currentTable = table;\n \n // Process batches iteratively\n while (queue.length > 0) {\n const batch = queue.shift();\n if (!batch) break; // Should never happen due to while condition, but satisfies type checker\n \n try {\n const records = batch.vectors.map((vector, i) => ({\n vector: Array.from(vector),\n content: batch.contents[i],\n file: batch.metadatas[i].file,\n startLine: batch.metadatas[i].startLine,\n endLine: batch.metadatas[i].endLine,\n type: batch.metadatas[i].type,\n language: batch.metadatas[i].language,\n // Ensure arrays have at least empty string for Arrow type inference\n functionNames: (batch.metadatas[i].symbols?.functions && batch.metadatas[i].symbols.functions.length > 0) ? batch.metadatas[i].symbols.functions : [''],\n classNames: (batch.metadatas[i].symbols?.classes && batch.metadatas[i].symbols.classes.length > 0) ? batch.metadatas[i].symbols.classes : [''],\n interfaceNames: (batch.metadatas[i].symbols?.interfaces && batch.metadatas[i].symbols.interfaces.length > 0) ? batch.metadatas[i].symbols.interfaces : [''],\n // AST-derived metadata (v0.13.0)\n symbolName: batch.metadatas[i].symbolName || '',\n symbolType: batch.metadatas[i].symbolType || '',\n parentClass: batch.metadatas[i].parentClass || '',\n complexity: batch.metadatas[i].complexity || 0,\n parameters: (batch.metadatas[i].parameters && batch.metadatas[i].parameters.length > 0) ? batch.metadatas[i].parameters : [''],\n signature: batch.metadatas[i].signature || '',\n imports: (batch.metadatas[i].imports && batch.metadatas[i].imports.length > 0) ? batch.metadatas[i].imports : [''],\n }));\n \n // Create table if it doesn't exist, otherwise add to existing table\n if (!currentTable) {\n currentTable = await db.createTable(tableName, records);\n } else {\n await currentTable.add(records);\n }\n } catch (error) {\n // If batch has more than min size records, split and retry\n if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {\n const half = Math.floor(batch.vectors.length / 2);\n \n // Split in half and add back to queue\n queue.push({\n vectors: batch.vectors.slice(0, half),\n metadatas: batch.metadatas.slice(0, half),\n contents: batch.contents.slice(0, half),\n });\n queue.push({\n vectors: batch.vectors.slice(half),\n metadatas: batch.metadatas.slice(half),\n contents: batch.contents.slice(half),\n });\n } else {\n // Small batch failed - collect for final error report\n failedRecords.push(batch);\n }\n }\n }\n \n // If any small batches failed, throw error with details\n if (failedRecords.length > 0) {\n const totalFailed = failedRecords.reduce((sum, batch) => sum + batch.vectors.length, 0);\n throw new DatabaseError(\n `Failed to insert ${totalFailed} record(s) after retry attempts`,\n {\n failedBatches: failedRecords.length,\n totalRecords: totalFailed,\n sampleFile: failedRecords[0].metadatas[0].file,\n }\n );\n }\n \n if (!currentTable) {\n throw new DatabaseError('Failed to create table during batch insert');\n }\n return currentTable;\n}\n\n","import { ChunkMetadata } from '../indexer/types.js';\nimport { DatabaseError, wrapError } from '../errors/index.js';\nimport { writeVersionFile } from './version.js';\nimport { insertBatch } from './batch-insert.js';\n\n// TODO: Replace with proper types from lancedb-types.ts\n// Currently using 'any' because tests use incomplete mocks that don't satisfy full LanceDB interface\n// Proper types: Awaited<ReturnType<typeof lancedb.connect>> and Awaited<ReturnType<Connection['openTable']>>\ntype LanceDBConnection = any;\ntype LanceDBTable = any;\n\n/**\n * Clear all data from the vector database\n */\nexport async function clear(\n db: LanceDBConnection,\n table: LanceDBTable | null,\n tableName: string\n): Promise<void> {\n if (!db) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n // Drop table if it exists\n if (table) {\n await db.dropTable(tableName);\n }\n } catch (error) {\n throw wrapError(error, 'Failed to clear vector database');\n }\n}\n\n/**\n * Delete all chunks from a specific file\n */\nexport async function deleteByFile(\n table: LanceDBTable,\n filepath: string\n): Promise<void> {\n if (!table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n await table.delete(`file = \"${filepath}\"`);\n } catch (error) {\n throw wrapError(error, 'Failed to delete file from vector database');\n }\n}\n\n/**\n * Update a file in the index by atomically deleting old chunks and inserting new ones\n */\nexport async function updateFile(\n db: LanceDBConnection,\n table: LanceDBTable | null,\n tableName: string,\n dbPath: string,\n filepath: string,\n vectors: Float32Array[],\n metadatas: ChunkMetadata[],\n contents: string[]\n): Promise<LanceDBTable> {\n if (!table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n // 1. Delete old chunks from this file\n await deleteByFile(table, filepath);\n \n // 2. Insert new chunks (if any)\n let updatedTable = table;\n if (vectors.length > 0) {\n updatedTable = await insertBatch(db, table, tableName, vectors, metadatas, contents);\n if (!updatedTable) {\n throw new DatabaseError('insertBatch unexpectedly returned null');\n }\n }\n \n // 3. Update version file to trigger MCP reconnection\n await writeVersionFile(dbPath);\n \n return updatedTable;\n } catch (error) {\n throw wrapError(error, 'Failed to update file in vector database');\n }\n}\n\n","import * as lancedb from 'vectordb';\nimport path from 'path';\nimport os from 'os';\nimport crypto from 'crypto';\nimport { SearchResult, VectorDBInterface } from './types.js';\nimport { ChunkMetadata } from '../indexer/types.js';\nimport { EMBEDDING_DIMENSION } from '../embeddings/types.js';\nimport { readVersionFile } from './version.js';\nimport { DatabaseError, wrapError } from '../errors/index.js';\nimport * as queryOps from './query.js';\nimport * as batchOps from './batch-insert.js';\nimport * as maintenanceOps from './maintenance.js';\n\ntype LanceDBConnection = Awaited<ReturnType<typeof lancedb.connect>>;\ntype LanceDBTable = Awaited<ReturnType<LanceDBConnection['openTable']>>;\n\nexport class VectorDB implements VectorDBInterface {\n private db: LanceDBConnection | null = null;\n private table: LanceDBTable | null = null;\n public readonly dbPath: string;\n private readonly tableName = 'code_chunks';\n private lastVersionCheck: number = 0;\n private currentVersion: number = 0;\n \n constructor(projectRoot: string) {\n // Store in user's home directory under ~/.lien/indices/{projectName-hash}\n const projectName = path.basename(projectRoot);\n \n // Create unique identifier from full path to prevent collisions\n const pathHash = crypto\n .createHash('md5')\n .update(projectRoot)\n .digest('hex')\n .substring(0, 8);\n \n this.dbPath = path.join(\n os.homedir(),\n '.lien',\n 'indices',\n `${projectName}-${pathHash}`\n );\n }\n \n async initialize(): Promise<void> {\n try {\n this.db = await lancedb.connect(this.dbPath);\n \n try {\n this.table = await this.db.openTable(this.tableName);\n } catch {\n // Table doesn't exist yet - will be created on first insert\n this.table = null;\n }\n \n // Read and cache the current version\n try {\n this.currentVersion = await readVersionFile(this.dbPath);\n } catch {\n // Version file doesn't exist yet, will be created on first index\n this.currentVersion = 0;\n }\n } catch (error: unknown) {\n throw wrapError(error, 'Failed to initialize vector database', { dbPath: this.dbPath });\n }\n }\n \n async insertBatch(\n vectors: Float32Array[],\n metadatas: ChunkMetadata[],\n contents: string[]\n ): Promise<void> {\n if (!this.db) {\n throw new DatabaseError('Vector database not initialized');\n }\n // Note: insertBatch may return null for empty batches when table is null\n // This is correct behavior - empty batches are no-ops and don't create tables\n this.table = await batchOps.insertBatch(\n this.db,\n this.table,\n this.tableName,\n vectors,\n metadatas,\n contents\n );\n }\n \n async search(\n queryVector: Float32Array,\n limit: number = 5,\n query?: string\n ): Promise<SearchResult[]> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n \n try {\n return await queryOps.search(this.table, queryVector, limit, query);\n } catch (error) {\n const errorMsg = String(error);\n \n // Detect corrupted index or missing data files\n if (errorMsg.includes('Not found:') || errorMsg.includes('.lance')) {\n // Attempt to reconnect - index may have been rebuilt\n try {\n await this.initialize();\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized after reconnection');\n }\n return await queryOps.search(this.table, queryVector, limit, query);\n } catch (retryError: unknown) {\n throw new DatabaseError(\n `Index appears corrupted or outdated. Please restart the MCP server or run 'lien reindex' in the project directory.`,\n { originalError: retryError }\n );\n }\n }\n \n throw error;\n }\n }\n \n async scanWithFilter(options: {\n language?: string;\n pattern?: string;\n limit?: number;\n }): Promise<SearchResult[]> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n return queryOps.scanWithFilter(this.table, options);\n }\n \n async querySymbols(options: {\n language?: string;\n pattern?: string;\n symbolType?: 'function' | 'class' | 'interface';\n limit?: number;\n }): Promise<SearchResult[]> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n return queryOps.querySymbols(this.table, options);\n }\n \n async clear(): Promise<void> {\n if (!this.db) {\n throw new DatabaseError('Vector database not initialized');\n }\n await maintenanceOps.clear(this.db, this.table, this.tableName);\n this.table = null;\n }\n \n async deleteByFile(filepath: string): Promise<void> {\n if (!this.table) {\n throw new DatabaseError('Vector database not initialized');\n }\n await maintenanceOps.deleteByFile(this.table, filepath);\n }\n \n async updateFile(\n filepath: string,\n vectors: Float32Array[],\n metadatas: ChunkMetadata[],\n contents: string[]\n ): Promise<void> {\n if (!this.db) {\n throw new DatabaseError('Vector database connection not initialized');\n }\n if (!this.table) {\n throw new DatabaseError('Vector database table not initialized');\n }\n this.table = await maintenanceOps.updateFile(\n this.db,\n this.table,\n this.tableName,\n this.dbPath,\n filepath,\n vectors,\n metadatas,\n contents\n );\n }\n \n async checkVersion(): Promise<boolean> {\n const now = Date.now();\n \n // Cache version checks for 1 second to minimize I/O\n if (now - this.lastVersionCheck < 1000) {\n return false;\n }\n \n this.lastVersionCheck = now;\n \n try {\n const version = await readVersionFile(this.dbPath);\n \n if (version > this.currentVersion) {\n this.currentVersion = version;\n return true;\n }\n \n return false;\n } catch (error) {\n // If we can't read version file, don't reconnect\n return false;\n }\n }\n \n async reconnect(): Promise<void> {\n try {\n // Close existing connections to force reload from disk\n this.table = null;\n this.db = null;\n \n // Reinitialize with fresh connection\n await this.initialize();\n } catch (error) {\n throw wrapError(error, 'Failed to reconnect to vector database');\n }\n }\n \n getCurrentVersion(): number {\n return this.currentVersion;\n }\n \n getVersionDate(): string {\n if (this.currentVersion === 0) {\n return 'Unknown';\n }\n return new Date(this.currentVersion).toLocaleString();\n }\n \n async hasData(): Promise<boolean> {\n if (!this.table) {\n return false;\n }\n \n try {\n const count = await this.table.countRows();\n \n if (count === 0) {\n return false;\n }\n \n // Sample a few rows to verify they contain real data\n const sample = await this.table\n .search(Array(EMBEDDING_DIMENSION).fill(0))\n .limit(Math.min(count, 5))\n .execute();\n \n const hasRealData = (sample as unknown as any[]).some((r: any) => \n r.content && \n r.content.trim().length > 0\n );\n \n return hasRealData;\n } catch {\n // If any error occurs, assume no data\n return false;\n }\n }\n \n static async load(projectRoot: string): Promise<VectorDB> {\n const db = new VectorDB(projectRoot);\n await db.initialize();\n return db;\n }\n}\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { INDEX_FORMAT_VERSION } from '../constants.js';\nimport { GitState } from '../git/tracker.js';\nimport { getPackageVersion } from '../utils/version.js';\n\nconst MANIFEST_FILE = 'manifest.json';\n\n/**\n * Represents a single file in the index manifest\n */\nexport interface FileEntry {\n filepath: string;\n lastModified: number;\n chunkCount: number;\n}\n\n/**\n * Index manifest tracking all indexed files and version information\n */\nexport interface IndexManifest {\n formatVersion: number; // Index format version for compatibility checking\n lienVersion: string; // Lien package version (for reference)\n lastIndexed: number; // Timestamp of last indexing operation\n gitState?: GitState; // Last known git state\n files: Record<string, FileEntry>; // Map of filepath -> FileEntry (stored as object for JSON)\n}\n\n/**\n * Manages the index manifest file, tracking which files are indexed\n * and their metadata for incremental indexing support.\n * \n * The manifest includes version checking to invalidate indices when\n * Lien's indexing format changes (e.g., new chunking algorithm,\n * different embedding model, schema changes).\n */\nexport class ManifestManager {\n private manifestPath: string;\n private indexPath: string;\n \n /**\n * Promise-based lock to prevent race conditions during concurrent updates.\n * Ensures read-modify-write operations are atomic.\n */\n private updateLock = Promise.resolve();\n \n /**\n * Creates a new ManifestManager\n * @param indexPath - Path to the index directory (same as VectorDB path)\n */\n constructor(indexPath: string) {\n this.indexPath = indexPath;\n this.manifestPath = path.join(indexPath, MANIFEST_FILE);\n }\n \n /**\n * Loads the manifest from disk.\n * Returns null if:\n * - Manifest doesn't exist (first run)\n * - Manifest is corrupt\n * - Format version is incompatible (triggers full reindex)\n * \n * @returns Loaded manifest or null\n */\n async load(): Promise<IndexManifest | null> {\n try {\n const content = await fs.readFile(this.manifestPath, 'utf-8');\n const manifest = JSON.parse(content) as IndexManifest;\n \n // VERSION CHECK: Invalidate if format version doesn't match\n if (manifest.formatVersion !== INDEX_FORMAT_VERSION) {\n console.error(\n `[Lien] Index format v${manifest.formatVersion} is incompatible with current v${INDEX_FORMAT_VERSION}`\n );\n console.error(`[Lien] Full reindex required after Lien upgrade`);\n \n // Clear old manifest and return null (triggers full reindex)\n await this.clear();\n return null;\n }\n \n return manifest;\n } catch (error) {\n // File doesn't exist or is invalid - return null for first run\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return null;\n }\n \n // Corrupt manifest - log warning and return null\n console.error(`[Lien] Warning: Failed to load manifest: ${error}`);\n return null;\n }\n }\n \n /**\n * Saves the manifest to disk.\n * Always saves with current format and package versions.\n * \n * @param manifest - Manifest to save\n */\n async save(manifest: IndexManifest): Promise<void> {\n try {\n // Ensure index directory exists\n await fs.mkdir(this.indexPath, { recursive: true });\n \n // Always save with current versions\n const manifestToSave: IndexManifest = {\n ...manifest,\n formatVersion: INDEX_FORMAT_VERSION,\n lienVersion: getPackageVersion(),\n lastIndexed: Date.now(),\n };\n \n const content = JSON.stringify(manifestToSave, null, 2);\n await fs.writeFile(this.manifestPath, content, 'utf-8');\n } catch (error) {\n // Don't throw - manifest is best-effort\n console.error(`[Lien] Warning: Failed to save manifest: ${error}`);\n }\n }\n \n /**\n * Adds or updates a file entry in the manifest.\n * Protected by lock to prevent race conditions during concurrent updates.\n * \n * @param filepath - Path to the file\n * @param entry - File entry metadata\n */\n async updateFile(filepath: string, entry: FileEntry): Promise<void> {\n // Chain this operation to the lock to ensure atomicity\n this.updateLock = this.updateLock.then(async () => {\n const manifest = await this.load() || this.createEmpty();\n manifest.files[filepath] = entry;\n await this.save(manifest);\n }).catch(error => {\n console.error(`[Lien] Failed to update manifest for ${filepath}: ${error}`);\n // Return to reset lock - don't let errors block future operations\n return undefined;\n });\n \n // Wait for this operation to complete\n await this.updateLock;\n }\n \n /**\n * Removes a file entry from the manifest.\n * Protected by lock to prevent race conditions during concurrent updates.\n * \n * Note: If the manifest doesn't exist, this is a no-op (not an error).\n * This can happen legitimately after clearing the index or on fresh installs.\n * \n * @param filepath - Path to the file to remove\n */\n async removeFile(filepath: string): Promise<void> {\n // Chain this operation to the lock to ensure atomicity\n this.updateLock = this.updateLock.then(async () => {\n const manifest = await this.load();\n if (!manifest) {\n // No manifest exists - nothing to remove from (expected in some scenarios)\n return;\n }\n \n delete manifest.files[filepath];\n await this.save(manifest);\n }).catch(error => {\n console.error(`[Lien] Failed to remove manifest entry for ${filepath}: ${error}`);\n // Return to reset lock - don't let errors block future operations\n return undefined;\n });\n \n // Wait for this operation to complete\n await this.updateLock;\n }\n \n /**\n * Updates multiple files at once (more efficient than individual updates).\n * Protected by lock to prevent race conditions during concurrent updates.\n * \n * @param entries - Array of file entries to update\n */\n async updateFiles(entries: FileEntry[]): Promise<void> {\n // Chain this operation to the lock to ensure atomicity\n this.updateLock = this.updateLock.then(async () => {\n const manifest = await this.load() || this.createEmpty();\n \n for (const entry of entries) {\n manifest.files[entry.filepath] = entry;\n }\n \n await this.save(manifest);\n }).catch(error => {\n console.error(`[Lien] Failed to update manifest for ${entries.length} files: ${error}`);\n // Return to reset lock - don't let errors block future operations\n return undefined;\n });\n \n // Wait for this operation to complete\n await this.updateLock;\n }\n \n /**\n * Updates the git state in the manifest.\n * Protected by lock to prevent race conditions during concurrent updates.\n * \n * @param gitState - Current git state\n */\n async updateGitState(gitState: GitState): Promise<void> {\n // Chain this operation to the lock to ensure atomicity\n this.updateLock = this.updateLock.then(async () => {\n const manifest = await this.load() || this.createEmpty();\n \n manifest.gitState = gitState;\n await this.save(manifest);\n }).catch(error => {\n console.error(`[Lien] Failed to update git state in manifest: ${error}`);\n // Return to reset lock - don't let errors block future operations\n return undefined;\n });\n \n // Wait for this operation to complete\n await this.updateLock;\n }\n \n /**\n * Gets the list of files currently in the manifest\n * \n * @returns Array of filepaths\n */\n async getIndexedFiles(): Promise<string[]> {\n const manifest = await this.load();\n if (!manifest) return [];\n \n return Object.keys(manifest.files);\n }\n \n /**\n * Detects which files have changed based on mtime comparison\n * \n * @param currentFiles - Map of current files with their mtimes\n * @returns Array of filepaths that have changed\n */\n async getChangedFiles(currentFiles: Map<string, number>): Promise<string[]> {\n const manifest = await this.load();\n if (!manifest) {\n // No manifest = all files are \"changed\" (need full index)\n return Array.from(currentFiles.keys());\n }\n \n const changedFiles: string[] = [];\n \n for (const [filepath, mtime] of currentFiles) {\n const entry = manifest.files[filepath];\n \n if (!entry) {\n // New file\n changedFiles.push(filepath);\n } else if (entry.lastModified < mtime) {\n // File modified since last index\n changedFiles.push(filepath);\n }\n }\n \n return changedFiles;\n }\n \n /**\n * Gets files that are in the manifest but not in the current file list\n * (i.e., deleted files)\n * \n * @param currentFiles - Set of current file paths\n * @returns Array of deleted file paths\n */\n async getDeletedFiles(currentFiles: Set<string>): Promise<string[]> {\n const manifest = await this.load();\n if (!manifest) return [];\n \n const deletedFiles: string[] = [];\n \n for (const filepath of Object.keys(manifest.files)) {\n if (!currentFiles.has(filepath)) {\n deletedFiles.push(filepath);\n }\n }\n \n return deletedFiles;\n }\n \n /**\n * Clears the manifest file\n */\n async clear(): Promise<void> {\n try {\n await fs.unlink(this.manifestPath);\n } catch (error) {\n // Ignore error if file doesn't exist\n if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {\n console.error(`[Lien] Warning: Failed to clear manifest: ${error}`);\n }\n }\n }\n \n /**\n * Creates an empty manifest with current version information\n * \n * @returns Empty manifest\n */\n private createEmpty(): IndexManifest {\n return {\n formatVersion: INDEX_FORMAT_VERSION,\n lienVersion: getPackageVersion(),\n lastIndexed: Date.now(),\n files: {},\n };\n }\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport {\n isGitRepo,\n getCurrentBranch,\n getCurrentCommit,\n getChangedFiles,\n getChangedFilesBetweenCommits,\n} from './utils.js';\n\nexport interface GitState {\n branch: string;\n commit: string;\n timestamp: number;\n}\n\n/**\n * Tracks git state (branch and commit) and detects changes.\n * Persists state to disk to survive server restarts.\n */\nexport class GitStateTracker {\n private stateFile: string;\n private rootDir: string;\n private currentState: GitState | null = null;\n \n constructor(rootDir: string, indexPath: string) {\n this.rootDir = rootDir;\n this.stateFile = path.join(indexPath, '.git-state.json');\n }\n \n /**\n * Loads the last known git state from disk.\n * Returns null if no state file exists (first run).\n */\n private async loadState(): Promise<GitState | null> {\n try {\n const content = await fs.readFile(this.stateFile, 'utf-8');\n return JSON.parse(content);\n } catch {\n // File doesn't exist or is invalid - this is fine for first run\n return null;\n }\n }\n \n /**\n * Saves the current git state to disk.\n */\n private async saveState(state: GitState): Promise<void> {\n try {\n const content = JSON.stringify(state, null, 2);\n await fs.writeFile(this.stateFile, content, 'utf-8');\n } catch (error) {\n // Log but don't throw - state persistence is best-effort\n console.error(`[Lien] Warning: Failed to save git state: ${error}`);\n }\n }\n \n /**\n * Gets the current git state from the repository.\n * \n * @returns Current git state\n * @throws Error if git commands fail\n */\n private async getCurrentGitState(): Promise<GitState> {\n const branch = await getCurrentBranch(this.rootDir);\n const commit = await getCurrentCommit(this.rootDir);\n \n return {\n branch,\n commit,\n timestamp: Date.now(),\n };\n }\n \n /**\n * Initializes the tracker by loading saved state and checking current state.\n * Should be called once when MCP server starts.\n * \n * @returns Array of changed files if state changed, null if no changes or first run\n */\n async initialize(): Promise<string[] | null> {\n // Check if this is a git repo\n const isRepo = await isGitRepo(this.rootDir);\n if (!isRepo) {\n return null;\n }\n \n try {\n // Get current state\n this.currentState = await this.getCurrentGitState();\n \n // Load previous state\n const previousState = await this.loadState();\n \n if (!previousState) {\n // First run - save current state\n await this.saveState(this.currentState);\n return null;\n }\n \n // Check if state changed\n const branchChanged = previousState.branch !== this.currentState.branch;\n const commitChanged = previousState.commit !== this.currentState.commit;\n \n if (!branchChanged && !commitChanged) {\n // No changes\n return null;\n }\n \n // State changed - get list of changed files\n let changedFiles: string[] = [];\n \n if (branchChanged) {\n // Branch changed - compare current branch with previous branch\n try {\n changedFiles = await getChangedFiles(\n this.rootDir,\n previousState.branch,\n this.currentState.branch\n );\n } catch (error) {\n // If branches diverged too much or don't exist, fall back to commit diff\n console.error(`[Lien] Branch diff failed, using commit diff: ${error}`);\n changedFiles = await getChangedFilesBetweenCommits(\n this.rootDir,\n previousState.commit,\n this.currentState.commit\n );\n }\n } else if (commitChanged) {\n // Same branch, different commit\n changedFiles = await getChangedFilesBetweenCommits(\n this.rootDir,\n previousState.commit,\n this.currentState.commit\n );\n }\n \n // Save new state\n await this.saveState(this.currentState);\n \n return changedFiles;\n } catch (error) {\n console.error(`[Lien] Failed to initialize git tracker: ${error}`);\n return null;\n }\n }\n \n /**\n * Checks for git state changes since last check.\n * This is called periodically by the MCP server.\n * \n * @returns Array of changed files if state changed, null if no changes\n */\n async detectChanges(): Promise<string[] | null> {\n // Check if this is a git repo\n const isRepo = await isGitRepo(this.rootDir);\n if (!isRepo) {\n return null;\n }\n \n try {\n // Get current state\n const newState = await this.getCurrentGitState();\n \n // If we don't have a previous state, just save current and return\n if (!this.currentState) {\n this.currentState = newState;\n await this.saveState(newState);\n return null;\n }\n \n // Check if state changed\n const branchChanged = this.currentState.branch !== newState.branch;\n const commitChanged = this.currentState.commit !== newState.commit;\n \n if (!branchChanged && !commitChanged) {\n // No changes\n return null;\n }\n \n // State changed - get list of changed files\n let changedFiles: string[] = [];\n \n if (branchChanged) {\n // Branch changed\n try {\n changedFiles = await getChangedFiles(\n this.rootDir,\n this.currentState.branch,\n newState.branch\n );\n } catch (error) {\n // Fall back to commit diff\n console.error(`[Lien] Branch diff failed, using commit diff: ${error}`);\n changedFiles = await getChangedFilesBetweenCommits(\n this.rootDir,\n this.currentState.commit,\n newState.commit\n );\n }\n } else if (commitChanged) {\n // Same branch, different commit\n changedFiles = await getChangedFilesBetweenCommits(\n this.rootDir,\n this.currentState.commit,\n newState.commit\n );\n }\n \n // Update current state\n this.currentState = newState;\n await this.saveState(newState);\n \n return changedFiles;\n } catch (error) {\n console.error(`[Lien] Failed to detect git changes: ${error}`);\n return null;\n }\n }\n \n /**\n * Gets the current git state.\n * Useful for status display.\n */\n getState(): GitState | null {\n return this.currentState;\n }\n \n /**\n * Manually updates the saved state.\n * Useful after manual reindexing to sync state.\n */\n async updateState(): Promise<void> {\n try {\n this.currentState = await this.getCurrentGitState();\n await this.saveState(this.currentState);\n } catch (error) {\n console.error(`[Lien] Failed to update git state: ${error}`);\n }\n }\n}\n\n","import fs from 'fs/promises';\nimport { VectorDB } from '../vectordb/lancedb.js';\nimport { ManifestManager, IndexManifest } from './manifest.js';\nimport { scanCodebase, scanCodebaseWithFrameworks } from './scanner.js';\nimport { LienConfig, LegacyLienConfig, isModernConfig, isLegacyConfig } from '../config/schema.js';\nimport { GitStateTracker } from '../git/tracker.js';\nimport { isGitAvailable, isGitRepo, getChangedFiles } from '../git/utils.js';\n\n/**\n * Result of change detection, categorized by type of change\n */\nexport interface ChangeDetectionResult {\n added: string[]; // New files not in previous index\n modified: string[]; // Existing files that have been modified\n deleted: string[]; // Files that were indexed but no longer exist\n reason: 'mtime' | 'full' | 'git-state-changed'; // How changes were detected\n}\n\n/**\n * Detects which files have changed since last indexing.\n * Uses git state detection to handle branch switches, then falls back to mtime.\n * \n * @param rootDir - Root directory of the project\n * @param vectorDB - Initialized VectorDB instance\n * @param config - Lien configuration\n * @returns Change detection result\n */\nexport async function detectChanges(\n rootDir: string,\n vectorDB: VectorDB,\n config: LienConfig | LegacyLienConfig\n): Promise<ChangeDetectionResult> {\n const manifest = new ManifestManager(vectorDB.dbPath);\n const savedManifest = await manifest.load();\n \n // No manifest = first run = full index\n if (!savedManifest) {\n const allFiles = await getAllFiles(rootDir, config);\n return {\n added: allFiles,\n modified: [],\n deleted: [],\n reason: 'full',\n };\n }\n \n // Check if git state has changed (branch switch, new commits)\n // This is critical because git doesn't always update mtimes when checking out files\n const gitAvailable = await isGitAvailable();\n const isRepo = await isGitRepo(rootDir);\n \n if (gitAvailable && isRepo && savedManifest.gitState) {\n const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);\n await gitTracker.initialize();\n \n const currentState = gitTracker.getState();\n \n // If branch or commit changed, use git to detect which files actually changed\n if (currentState && \n (currentState.branch !== savedManifest.gitState.branch ||\n currentState.commit !== savedManifest.gitState.commit)) {\n \n try {\n // Get files that changed between old and new commit using git diff\n const changedFilesPaths = await getChangedFiles(\n rootDir,\n savedManifest.gitState.commit,\n currentState.commit\n );\n const changedFilesSet = new Set(changedFilesPaths);\n \n // Get all current files to determine new files and deletions\n const allFiles = await getAllFiles(rootDir, config);\n const currentFileSet = new Set(allFiles);\n \n const added: string[] = [];\n const modified: string[] = [];\n const deleted: string[] = [];\n \n // Categorize changed files\n for (const filepath of changedFilesPaths) {\n if (currentFileSet.has(filepath)) {\n // File exists - check if it's new or modified\n if (savedManifest.files[filepath]) {\n modified.push(filepath);\n } else {\n added.push(filepath);\n }\n }\n // If file doesn't exist in current set, it will be caught by deletion logic below\n }\n \n // Find truly new files (not in git diff, but not in old manifest)\n for (const filepath of allFiles) {\n if (!savedManifest.files[filepath] && !changedFilesSet.has(filepath)) {\n added.push(filepath);\n }\n }\n \n // Compute deleted files: files in old manifest but not in new branch\n for (const filepath of Object.keys(savedManifest.files)) {\n if (!currentFileSet.has(filepath)) {\n deleted.push(filepath);\n }\n }\n \n return {\n added,\n modified,\n deleted,\n reason: 'git-state-changed',\n };\n } catch (error) {\n // If git diff fails, fall back to full reindex\n console.warn(`[Lien] Git diff failed, falling back to full reindex: ${error}`);\n const allFiles = await getAllFiles(rootDir, config);\n const currentFileSet = new Set(allFiles);\n \n const deleted: string[] = [];\n for (const filepath of Object.keys(savedManifest.files)) {\n if (!currentFileSet.has(filepath)) {\n deleted.push(filepath);\n }\n }\n \n return {\n added: allFiles,\n modified: [],\n deleted,\n reason: 'git-state-changed',\n };\n }\n }\n }\n \n // Use mtime-based detection for file-level changes\n return await mtimeBasedDetection(rootDir, savedManifest, config);\n}\n\n/**\n * Gets all files in the project based on configuration\n */\nasync function getAllFiles(\n rootDir: string,\n config: LienConfig | LegacyLienConfig\n): Promise<string[]> {\n if (isModernConfig(config) && config.frameworks.length > 0) {\n return await scanCodebaseWithFrameworks(rootDir, config);\n } else if (isLegacyConfig(config)) {\n return await scanCodebase({\n rootDir,\n includePatterns: config.indexing.include,\n excludePatterns: config.indexing.exclude,\n });\n } else {\n return await scanCodebase({\n rootDir,\n includePatterns: [],\n excludePatterns: [],\n });\n }\n}\n\n/**\n * Detects changes by comparing file modification times\n */\nasync function mtimeBasedDetection(\n rootDir: string,\n savedManifest: IndexManifest,\n config: LienConfig | LegacyLienConfig\n): Promise<ChangeDetectionResult> {\n const added: string[] = [];\n const modified: string[] = [];\n const deleted: string[] = [];\n \n // Get all current files\n const currentFiles = await getAllFiles(rootDir, config);\n const currentFileSet = new Set(currentFiles);\n \n // Get mtimes for all current files\n const fileStats = new Map<string, number>();\n \n for (const filepath of currentFiles) {\n try {\n const stats = await fs.stat(filepath);\n fileStats.set(filepath, stats.mtimeMs);\n } catch {\n // Ignore files we can't stat\n continue;\n }\n }\n \n // Check for new and modified files\n for (const [filepath, mtime] of fileStats) {\n const entry = savedManifest.files[filepath];\n \n if (!entry) {\n // New file\n added.push(filepath);\n } else if (entry.lastModified < mtime) {\n // File modified since last index\n modified.push(filepath);\n }\n }\n \n // Check for deleted files\n for (const filepath of Object.keys(savedManifest.files)) {\n if (!currentFileSet.has(filepath)) {\n deleted.push(filepath);\n }\n }\n \n return {\n added,\n modified,\n deleted,\n reason: 'mtime',\n };\n}\n\n","import fs from 'fs/promises';\nimport { chunkFile } from './chunker.js';\nimport { EmbeddingService } from '../embeddings/types.js';\nimport { VectorDB } from '../vectordb/lancedb.js';\nimport { LienConfig, LegacyLienConfig, isModernConfig, isLegacyConfig } from '../config/schema.js';\nimport { ManifestManager } from './manifest.js';\nimport { EMBEDDING_MICRO_BATCH_SIZE } from '../constants.js';\nimport { CodeChunk } from './types.js';\n\nexport interface IncrementalIndexOptions {\n verbose?: boolean;\n}\n\n/**\n * Result of processing a file's content into chunks and embeddings.\n */\ninterface ProcessFileResult {\n chunkCount: number;\n vectors: Float32Array[];\n chunks: CodeChunk[];\n texts: string[];\n}\n\n/**\n * Shared helper that processes file content into chunks and embeddings.\n * This is the core logic shared between indexSingleFile and indexMultipleFiles.\n * \n * Returns null for empty files (0 chunks), which callers should handle appropriately.\n * \n * @param filepath - Path to the file being processed\n * @param content - File content\n * @param embeddings - Embeddings service\n * @param config - Lien configuration\n * @param verbose - Whether to log verbose output\n * @returns ProcessFileResult for non-empty files, null for empty files\n */\nasync function processFileContent(\n filepath: string,\n content: string,\n embeddings: EmbeddingService,\n config: LienConfig | LegacyLienConfig,\n verbose: boolean\n): Promise<ProcessFileResult | null> {\n // Get chunk settings (support both v0.3.0 and legacy v0.2.0 configs)\n const chunkSize = isModernConfig(config)\n ? config.core.chunkSize\n : (isLegacyConfig(config) ? config.indexing.chunkSize : 75);\n const chunkOverlap = isModernConfig(config)\n ? config.core.chunkOverlap\n : (isLegacyConfig(config) ? config.indexing.chunkOverlap : 10);\n const useAST = isModernConfig(config)\n ? config.chunking.useAST\n : true;\n const astFallback = isModernConfig(config)\n ? config.chunking.astFallback\n : 'line-based';\n \n // Chunk the file\n const chunks = chunkFile(filepath, content, {\n chunkSize,\n chunkOverlap,\n useAST,\n astFallback,\n });\n \n if (chunks.length === 0) {\n // Empty file - return null so caller can handle appropriately\n if (verbose) {\n console.error(`[Lien] Empty file: ${filepath}`);\n }\n return null;\n }\n \n // Generate embeddings for all chunks\n // Use micro-batching to prevent event loop blocking\n const texts = chunks.map(c => c.content);\n const vectors: Float32Array[] = [];\n \n for (let j = 0; j < texts.length; j += EMBEDDING_MICRO_BATCH_SIZE) {\n const microBatch = texts.slice(j, Math.min(j + EMBEDDING_MICRO_BATCH_SIZE, texts.length));\n const microResults = await embeddings.embedBatch(microBatch);\n vectors.push(...microResults);\n \n // Yield to event loop for responsiveness\n if (texts.length > EMBEDDING_MICRO_BATCH_SIZE) {\n await new Promise(resolve => setImmediate(resolve));\n }\n }\n \n return {\n chunkCount: chunks.length,\n vectors,\n chunks,\n texts,\n };\n}\n\n/**\n * Indexes a single file incrementally by updating its chunks in the vector database.\n * This is the core function for incremental reindexing - it handles file changes,\n * deletions, and additions.\n * \n * @param filepath - Absolute path to the file to index\n * @param vectorDB - Initialized VectorDB instance\n * @param embeddings - Initialized embeddings service\n * @param config - Lien configuration\n * @param options - Optional settings\n */\nexport async function indexSingleFile(\n filepath: string,\n vectorDB: VectorDB,\n embeddings: EmbeddingService,\n config: LienConfig | LegacyLienConfig,\n options: IncrementalIndexOptions = {}\n): Promise<void> {\n const { verbose } = options;\n \n try {\n // Check if file exists\n try {\n await fs.access(filepath);\n } catch {\n // File doesn't exist - delete from index and manifest\n if (verbose) {\n console.error(`[Lien] File deleted: ${filepath}`);\n }\n await vectorDB.deleteByFile(filepath);\n \n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.removeFile(filepath);\n return;\n }\n \n // Read file content\n const content = await fs.readFile(filepath, 'utf-8');\n \n // Process file content (chunking + embeddings) - shared logic\n const result = await processFileContent(filepath, content, embeddings, config, verbose || false);\n \n // Get actual file mtime for manifest\n const stats = await fs.stat(filepath);\n const manifest = new ManifestManager(vectorDB.dbPath);\n \n if (result === null) {\n // Empty file - remove from vector DB but keep in manifest with chunkCount: 0\n await vectorDB.deleteByFile(filepath);\n await manifest.updateFile(filepath, {\n filepath,\n lastModified: stats.mtimeMs,\n chunkCount: 0,\n });\n return;\n }\n \n // Non-empty file - update in database (atomic: delete old + insert new)\n await vectorDB.updateFile(\n filepath,\n result.vectors,\n result.chunks.map(c => c.metadata),\n result.texts\n );\n \n // Update manifest after successful indexing\n await manifest.updateFile(filepath, {\n filepath,\n lastModified: stats.mtimeMs,\n chunkCount: result.chunkCount,\n });\n \n if (verbose) {\n console.error(`[Lien] ✓ Updated ${filepath} (${result.chunkCount} chunks)`);\n }\n } catch (error) {\n // Log error but don't throw - we want to continue with other files\n console.error(`[Lien] ⚠️ Failed to index ${filepath}: ${error}`);\n }\n}\n\n/**\n * Indexes multiple files incrementally.\n * Processes files sequentially for simplicity and reliability.\n * \n * Note: This function counts both successfully indexed files AND successfully\n * handled deletions (files that don't exist but were removed from the index).\n * \n * @param filepaths - Array of absolute file paths to index\n * @param vectorDB - Initialized VectorDB instance\n * @param embeddings - Initialized embeddings service\n * @param config - Lien configuration\n * @param options - Optional settings\n * @returns Number of successfully processed files (indexed or deleted)\n */\nexport async function indexMultipleFiles(\n filepaths: string[],\n vectorDB: VectorDB,\n embeddings: EmbeddingService,\n config: LienConfig | LegacyLienConfig,\n options: IncrementalIndexOptions = {}\n): Promise<number> {\n const { verbose } = options;\n let processedCount = 0;\n \n // Batch manifest updates for performance\n const manifestEntries: Array<{ filepath: string; chunkCount: number; mtime: number }> = [];\n \n // Process each file sequentially (simple and reliable)\n for (const filepath of filepaths) {\n // Try to read the file and get its stats\n let content: string;\n let fileMtime: number;\n try {\n const stats = await fs.stat(filepath);\n fileMtime = stats.mtimeMs;\n content = await fs.readFile(filepath, 'utf-8');\n } catch (error) {\n // File doesn't exist or couldn't be read - delete from index\n if (verbose) {\n console.error(`[Lien] File not readable: ${filepath}`);\n }\n try {\n await vectorDB.deleteByFile(filepath);\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.removeFile(filepath);\n } catch (error) {\n // Ignore errors if file wasn't in index\n if (verbose) {\n console.error(`[Lien] Note: ${filepath} not in index`);\n }\n }\n // Count as successfully processed (we handled the deletion)\n processedCount++;\n continue;\n }\n \n try {\n // Process file content (chunking + embeddings) - shared logic\n const result = await processFileContent(filepath, content, embeddings, config, verbose || false);\n \n if (result === null) {\n // Empty file - remove from vector DB but keep in manifest with chunkCount: 0\n try {\n await vectorDB.deleteByFile(filepath);\n } catch (error) {\n // Ignore errors if file wasn't in index\n }\n \n // Update manifest immediately for empty files (not batched)\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.updateFile(filepath, {\n filepath,\n lastModified: fileMtime,\n chunkCount: 0,\n });\n \n // Count as successful processing (handled empty file)\n processedCount++;\n continue;\n }\n \n // Non-empty file - delete old chunks if they exist\n try {\n await vectorDB.deleteByFile(filepath);\n } catch (error) {\n // Ignore - file might not be in index yet\n }\n \n // Insert new chunks\n await vectorDB.insertBatch(\n result.vectors,\n result.chunks.map(c => c.metadata),\n result.texts\n );\n \n // Queue manifest update (batch at end) with actual file mtime\n manifestEntries.push({\n filepath,\n chunkCount: result.chunkCount,\n mtime: fileMtime,\n });\n \n if (verbose) {\n console.error(`[Lien] ✓ Updated ${filepath} (${result.chunkCount} chunks)`);\n }\n \n processedCount++;\n } catch (error) {\n // Log error but don't throw - we want to continue with other files\n console.error(`[Lien] ⚠️ Failed to index ${filepath}: ${error}`);\n }\n }\n \n // Batch update manifest at the end (much faster than updating after each file)\n if (manifestEntries.length > 0) {\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.updateFiles(\n manifestEntries.map(entry => ({\n filepath: entry.filepath,\n lastModified: entry.mtime, // Use actual file mtime for accurate change detection\n chunkCount: entry.chunkCount,\n }))\n );\n }\n \n return processedCount;\n}\n\n","/**\n * Witty loading messages to keep users entertained during long operations.\n * Inspired by tools like npm, yarn, and other personality-driven CLIs.\n */\n\nconst INDEXING_MESSAGES = [\n 'Teaching AI to read your spaghetti code...',\n 'Convincing the LLM that your variable names make sense...',\n 'Indexing your TODO comments (so many TODOs)...',\n 'Building semantic links faster than you can say \"grep\"...',\n 'Making your codebase searchable (the good, the bad, and the ugly)...',\n 'Chunking code like a boss...',\n \"Feeding your code to the neural network (it's hungry)...\",\n \"Creating embeddings (it's like compression, but fancier)...\",\n 'Teaching machines to understand your midnight commits...',\n 'Vectorizing your technical debt...',\n \"Indexing... because Ctrl+F wasn't cutting it anymore...\",\n 'Making semantic connections (unlike your last refactor)...',\n 'Processing files faster than your CI pipeline...',\n 'Embedding wisdom from your comments (all 3 of them)...',\n 'Analyzing code semantics (yes, even that one function)...',\n 'Building search index (now with 100% more AI)...',\n \"Crunching vectors like it's nobody's business...\",\n 'Linking code fragments across the spacetime continuum...',\n 'Teaching transformers about your coding style...',\n 'Preparing for semantic search domination...',\n 'Indexing your genius (and that hacky workaround from 2019)...',\n \"Making your codebase AI-readable (you're welcome, future you)...\",\n 'Converting code to math (engineers love this trick)...',\n \"Building the neural net's mental model of your app...\",\n 'Chunking files like a lumberjack, but for code...',\n];\n\nconst EMBEDDING_MESSAGES = [\n 'Generating embeddings (math is happening)...',\n 'Teaching transformers about your forEach loops...',\n 'Converting code to 384-dimensional space (wild, right?)...',\n 'Running the neural network (the Matrix, but for code)...',\n 'Creating semantic vectors (fancy word for AI magic)...',\n 'Embedding your code into hyperspace...',\n 'Teaching the model what \"clean code\" means in your codebase...',\n 'Generating vectors faster than you can say \"AI\"...',\n 'Making math from your methods...',\n 'Transforming code into numbers (the AI way)...',\n 'Processing with transformers.js (yes, it runs locally!)...',\n \"Embedding semantics (your code's hidden meaning)...\",\n 'Vectorizing variables (alliteration achieved)...',\n 'Teaching AI the difference between foo and bar...',\n 'Creating embeddings (384 dimensions of awesome)...',\n];\n\nconst MODEL_LOADING_MESSAGES = [\n 'Waking up the neural network...',\n 'Loading transformer model (patience, young padawan)...',\n 'Downloading AI brain (first run only, promise!)...',\n 'Initializing the semantic search engine...',\n 'Booting up the language model (coffee break recommended)...',\n 'Loading 100MB of pure AI goodness...',\n 'Preparing the transformer for action...',\n 'Model loading (this is why we run locally)...',\n 'Spinning up the embedding generator...',\n 'Getting the AI ready for your codebase...',\n];\n\nlet currentIndexingIndex = 0;\nlet currentEmbeddingIndex = 0;\nlet currentModelIndex = 0;\n\n/**\n * Get the next witty message for the indexing process.\n * Messages are returned sequentially in a round-robin fashion.\n */\nexport function getIndexingMessage(): string {\n const message = INDEXING_MESSAGES[currentIndexingIndex % INDEXING_MESSAGES.length];\n currentIndexingIndex++;\n return message;\n}\n\n/**\n * Get the next witty message for the embedding generation process.\n * Messages are returned sequentially in a round-robin fashion.\n */\nexport function getEmbeddingMessage(): string {\n const message = EMBEDDING_MESSAGES[currentEmbeddingIndex % EMBEDDING_MESSAGES.length];\n currentEmbeddingIndex++;\n return message;\n}\n\n/**\n * Get the next witty message for the model loading process.\n * Messages are returned sequentially in a round-robin fashion.\n */\nexport function getModelLoadingMessage(): string {\n const message = MODEL_LOADING_MESSAGES[currentModelIndex % MODEL_LOADING_MESSAGES.length];\n currentModelIndex++;\n return message;\n}\n\n/**\n * Reset all message counters (useful for testing)\n */\nexport function resetMessageCounters(): void {\n currentIndexingIndex = 0;\n currentEmbeddingIndex = 0;\n currentModelIndex = 0;\n}\n\n","import fs from 'fs/promises';\nimport ora from 'ora';\nimport chalk from 'chalk';\nimport pLimit from 'p-limit';\nimport { scanCodebase, scanCodebaseWithFrameworks } from './scanner.js';\nimport { chunkFile } from './chunker.js';\nimport { LocalEmbeddings } from '../embeddings/local.js';\nimport { VectorDB } from '../vectordb/lancedb.js';\nimport { configService } from '../config/service.js';\nimport { CodeChunk } from './types.js';\nimport { writeVersionFile } from '../vectordb/version.js';\nimport { isLegacyConfig, isModernConfig } from '../config/schema.js';\nimport { ManifestManager } from './manifest.js';\nimport { detectChanges } from './change-detector.js';\nimport { indexMultipleFiles } from './incremental.js';\nimport { getIndexingMessage, getEmbeddingMessage, getModelLoadingMessage } from '../utils/loading-messages.js';\nimport { EMBEDDING_MICRO_BATCH_SIZE } from '../constants.js';\n\nexport interface IndexingOptions {\n rootDir?: string;\n verbose?: boolean;\n force?: boolean; // Force full reindex, skip incremental\n}\n\ninterface ChunkWithContent {\n chunk: CodeChunk;\n content: string;\n}\n\nexport async function indexCodebase(options: IndexingOptions = {}): Promise<void> {\n const rootDir = options.rootDir ?? process.cwd();\n const spinner = ora('Starting indexing process...').start();\n let updateInterval: NodeJS.Timeout | undefined;\n \n try {\n // 1. Load configuration\n spinner.text = 'Loading configuration...';\n const config = await configService.load(rootDir);\n \n // 1.5. Initialize vector database early (needed for manifest)\n spinner.text = 'Initializing vector database...';\n const vectorDB = new VectorDB(rootDir);\n await vectorDB.initialize();\n \n // 1.6. Try incremental indexing if manifest exists and not forced\n if (!options.force) {\n spinner.text = 'Checking for changes...';\n const manifest = new ManifestManager(vectorDB.dbPath);\n const savedManifest = await manifest.load();\n \n if (savedManifest) {\n // Detect changes using mtime\n const changes = await detectChanges(rootDir, vectorDB, config);\n \n if (changes.reason !== 'full') {\n const totalChanges = changes.added.length + changes.modified.length;\n const totalDeleted = changes.deleted.length;\n \n if (totalChanges === 0 && totalDeleted === 0) {\n spinner.succeed('No changes detected - index is up to date!');\n return;\n }\n \n spinner.succeed(\n `Detected changes: ${totalChanges} files to index, ${totalDeleted} to remove (${changes.reason} detection)`\n );\n \n // Initialize embeddings for incremental update\n spinner.start(getModelLoadingMessage());\n const embeddings = new LocalEmbeddings();\n await embeddings.initialize();\n spinner.succeed('Embedding model loaded');\n \n // Handle deletions\n if (totalDeleted > 0) {\n spinner.start(`Removing ${totalDeleted} deleted files...`);\n let removedCount = 0;\n for (const filepath of changes.deleted) {\n try {\n await vectorDB.deleteByFile(filepath);\n await manifest.removeFile(filepath);\n removedCount++;\n } catch (err) {\n spinner.warn(`Failed to remove file \"${filepath}\": ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n spinner.succeed(`Removed ${removedCount}/${totalDeleted} deleted files`);\n }\n \n // Handle additions and modifications\n if (totalChanges > 0) {\n spinner.start(`Reindexing ${totalChanges} changed files...`);\n const filesToIndex = [...changes.added, ...changes.modified];\n const count = await indexMultipleFiles(\n filesToIndex,\n vectorDB,\n embeddings,\n config,\n { verbose: options.verbose }\n );\n \n // Update version file to trigger MCP reconnection\n await writeVersionFile(vectorDB.dbPath);\n \n spinner.succeed(\n `Incremental reindex complete: ${count}/${totalChanges} files indexed successfully`\n );\n }\n \n // Update git state after incremental indexing (for branch switch detection)\n const { isGitAvailable, isGitRepo } = await import('../git/utils.js');\n const { GitStateTracker } = await import('../git/tracker.js');\n const gitAvailable = await isGitAvailable();\n const isRepo = await isGitRepo(rootDir);\n \n if (gitAvailable && isRepo) {\n const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);\n await gitTracker.initialize();\n const gitState = gitTracker.getState();\n if (gitState) {\n // Reuse existing manifest instance\n await manifest.updateGitState(gitState);\n }\n }\n \n console.log(chalk.dim('\\nNext step: Run'), chalk.bold('lien serve'), chalk.dim('to start the MCP server'));\n return; // Exit early - incremental index complete!\n }\n \n // If we get here, changes.reason === 'full', so continue with full index below\n spinner.text = 'Full reindex required...';\n }\n } else {\n spinner.text = 'Force flag enabled, performing full reindex...';\n }\n \n // 2. Scan for files (framework-aware if frameworks configured)\n spinner.text = 'Scanning codebase...';\n let files: string[];\n \n if (isModernConfig(config) && config.frameworks.length > 0) {\n // Use framework-aware scanning for new configs\n files = await scanCodebaseWithFrameworks(rootDir, config);\n } else if (isLegacyConfig(config)) {\n // Fall back to legacy scanning for old configs\n files = await scanCodebase({\n rootDir,\n includePatterns: config.indexing.include,\n excludePatterns: config.indexing.exclude,\n });\n } else {\n // Modern config with no frameworks - use empty patterns\n files = await scanCodebase({\n rootDir,\n includePatterns: [],\n excludePatterns: [],\n });\n }\n \n if (files.length === 0) {\n spinner.fail('No files found to index');\n return;\n }\n \n spinner.text = `Found ${files.length} files`;\n \n // 3. Initialize embeddings model\n spinner.text = getModelLoadingMessage();\n const embeddings = new LocalEmbeddings();\n await embeddings.initialize();\n spinner.succeed('Embedding model loaded');\n \n // 5. Process files concurrently\n const concurrency = isModernConfig(config) \n ? config.core.concurrency \n : 4;\n const embeddingBatchSize = isModernConfig(config)\n ? config.core.embeddingBatchSize\n : 50;\n // Use smaller batch size to keep UI responsive (process more frequently)\n const vectorDBBatchSize = 100;\n \n spinner.start(`Processing files with ${concurrency}x concurrency...`);\n \n const startTime = Date.now();\n let processedFiles = 0;\n let processedChunks = 0;\n \n // Accumulator for chunks across multiple files\n const chunkAccumulator: ChunkWithContent[] = [];\n const limit = pLimit(concurrency);\n \n // Track successfully indexed files for manifest\n const indexedFileEntries: Array<{ filepath: string; chunkCount: number; mtime: number }> = [];\n \n // Shared state for progress updates (decoupled from actual work)\n const progressState = {\n processedFiles: 0,\n totalFiles: files.length,\n wittyMessage: getIndexingMessage(),\n };\n \n // Start a periodic timer to update the spinner independently\n const SPINNER_UPDATE_INTERVAL_MS = 200; // How often to update spinner\n const MESSAGE_ROTATION_INTERVAL_MS = 8000; // How often to rotate message\n const MESSAGE_ROTATION_TICKS = Math.floor(MESSAGE_ROTATION_INTERVAL_MS / SPINNER_UPDATE_INTERVAL_MS);\n \n let spinnerTick = 0;\n updateInterval = setInterval(() => {\n // Rotate witty message periodically\n spinnerTick++;\n if (spinnerTick >= MESSAGE_ROTATION_TICKS) {\n progressState.wittyMessage = getIndexingMessage();\n spinnerTick = 0; // Reset counter to prevent unbounded growth\n }\n \n spinner.text = `${progressState.processedFiles}/${progressState.totalFiles} files | ${progressState.wittyMessage}`;\n }, SPINNER_UPDATE_INTERVAL_MS);\n \n // Function to process accumulated chunks\n const processAccumulatedChunks = async () => {\n if (chunkAccumulator.length === 0) return;\n \n const toProcess = chunkAccumulator.splice(0, chunkAccumulator.length);\n \n // Process embeddings in smaller batches AND insert incrementally to keep UI responsive\n for (let i = 0; i < toProcess.length; i += embeddingBatchSize) {\n const batch = toProcess.slice(i, Math.min(i + embeddingBatchSize, toProcess.length));\n \n // Update shared state (spinner updates automatically via interval)\n progressState.wittyMessage = getEmbeddingMessage();\n \n // Process embeddings in micro-batches to prevent event loop blocking\n // Transformers.js is CPU-intensive, so we yield control periodically\n const texts = batch.map(item => item.content);\n const embeddingVectors: Float32Array[] = [];\n \n for (let j = 0; j < texts.length; j += EMBEDDING_MICRO_BATCH_SIZE) {\n const microBatch = texts.slice(j, Math.min(j + EMBEDDING_MICRO_BATCH_SIZE, texts.length));\n const microResults = await embeddings.embedBatch(microBatch);\n embeddingVectors.push(...microResults);\n \n // Yield to event loop so spinner can update\n await new Promise(resolve => setImmediate(resolve));\n }\n \n processedChunks += batch.length;\n \n // Update state before DB insertion\n progressState.wittyMessage = `Inserting ${batch.length} chunks into vector space...`;\n \n await vectorDB.insertBatch(\n embeddingVectors,\n batch.map(item => item.chunk.metadata),\n texts\n );\n \n // Yield after DB insertion too\n await new Promise(resolve => setImmediate(resolve));\n }\n \n progressState.wittyMessage = getIndexingMessage();\n };\n \n // Process files with concurrency limit\n const filePromises = files.map((file) =>\n limit(async () => {\n try {\n // Get file stats to capture actual modification time\n const stats = await fs.stat(file);\n const content = await fs.readFile(file, 'utf-8');\n const chunkSize = isModernConfig(config)\n ? config.core.chunkSize\n : 75;\n const chunkOverlap = isModernConfig(config)\n ? config.core.chunkOverlap\n : 10;\n const useAST = isModernConfig(config)\n ? config.chunking.useAST\n : true;\n const astFallback = isModernConfig(config)\n ? config.chunking.astFallback\n : 'line-based';\n \n const chunks = chunkFile(file, content, {\n chunkSize,\n chunkOverlap,\n useAST,\n astFallback,\n });\n \n if (chunks.length === 0) {\n processedFiles++;\n progressState.processedFiles = processedFiles;\n return;\n }\n \n // Add chunks to accumulator\n for (const chunk of chunks) {\n chunkAccumulator.push({\n chunk,\n content: chunk.content,\n });\n }\n \n // Track this file for manifest with actual file mtime\n indexedFileEntries.push({\n filepath: file,\n chunkCount: chunks.length,\n mtime: stats.mtimeMs,\n });\n \n processedFiles++;\n progressState.processedFiles = processedFiles;\n \n // Process when batch is large enough (use smaller batch for responsiveness)\n if (chunkAccumulator.length >= vectorDBBatchSize) {\n await processAccumulatedChunks();\n }\n } catch (error) {\n if (options.verbose) {\n console.error(chalk.yellow(`\\n⚠️ Skipping ${file}: ${error}`));\n }\n processedFiles++;\n progressState.processedFiles = processedFiles;\n }\n })\n );\n \n // Wait for all files to be processed\n await Promise.all(filePromises);\n \n // Process remaining chunks\n progressState.wittyMessage = 'Processing final chunks...';\n await processAccumulatedChunks();\n \n // Stop the progress update interval\n clearInterval(updateInterval);\n \n // Save manifest with all indexed files\n spinner.start('Saving index manifest...');\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.updateFiles(\n indexedFileEntries.map(entry => ({\n filepath: entry.filepath,\n lastModified: entry.mtime, // Use actual file mtime for accurate change detection\n chunkCount: entry.chunkCount,\n }))\n );\n \n // Save git state if in a git repo (for branch switch detection)\n const { isGitAvailable, isGitRepo } = await import('../git/utils.js');\n const { GitStateTracker } = await import('../git/tracker.js');\n const gitAvailable = await isGitAvailable();\n const isRepo = await isGitRepo(rootDir);\n \n if (gitAvailable && isRepo) {\n const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);\n await gitTracker.initialize();\n const gitState = gitTracker.getState();\n if (gitState) {\n await manifest.updateGitState(gitState);\n }\n }\n \n spinner.succeed('Manifest saved');\n \n // Write version file to mark successful completion\n // This allows the MCP server to detect when reindexing is complete\n await writeVersionFile(vectorDB.dbPath);\n \n const totalTime = ((Date.now() - startTime) / 1000).toFixed(1);\n spinner.succeed(\n `Indexed ${processedFiles} files (${processedChunks} chunks) in ${totalTime}s using ${concurrency}x concurrency`\n );\n \n console.log(chalk.dim('\\nNext step: Run'), chalk.bold('lien serve'), chalk.dim('to start the MCP server'));\n } catch (error) {\n // Make sure to clear interval on error too\n if (updateInterval) {\n clearInterval(updateInterval);\n }\n spinner.fail(`Indexing failed: ${error}`);\n throw error;\n }\n}\n\n","import { Command } from 'commander';\nimport { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\nimport { initCommand } from './init.js';\nimport { statusCommand } from './status.js';\nimport { indexCommand } from './index-cmd.js';\nimport { serveCommand } from './serve.js';\n\n// Get version from package.json dynamically\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst require = createRequire(import.meta.url);\n\nlet packageJson;\ntry {\n packageJson = require(join(__dirname, '../package.json'));\n} catch {\n packageJson = require(join(__dirname, '../../package.json'));\n}\n\nexport const program = new Command();\n\nprogram\n .name('lien')\n .description('Local semantic code search for AI assistants via MCP')\n .version(packageJson.version);\n\nprogram\n .command('init')\n .description('Initialize Lien in the current directory')\n .option('-u, --upgrade', 'Upgrade existing config with new options')\n .option('-y, --yes', 'Skip interactive prompts and use defaults')\n .option('-p, --path <path>', 'Path to initialize (defaults to current directory)')\n .action(initCommand);\n\nprogram\n .command('index')\n .description('Index the codebase for semantic search')\n .option('-f, --force', 'Force full reindex (skip incremental)')\n .option('-w, --watch', 'Watch for changes and re-index automatically')\n .option('-v, --verbose', 'Show detailed logging during indexing')\n .action(indexCommand);\n\nprogram\n .command('serve')\n .description('Start the MCP server for Cursor integration')\n .option('-p, --port <port>', 'Port number (for future use)', '7133')\n .option('--no-watch', 'Disable file watching for this session')\n .option('-w, --watch', '[DEPRECATED] File watching is now enabled by default')\n .option('-r, --root <path>', 'Root directory to serve (defaults to current directory)')\n .action(serveCommand);\n\nprogram\n .command('status')\n .description('Show indexing status and statistics')\n .action(statusCommand);\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\nimport chalk from 'chalk';\nimport inquirer from 'inquirer';\nimport { defaultConfig, LienConfig, FrameworkInstance, FrameworkConfig } from '../config/schema.js';\nimport { showCompactBanner } from '../utils/banner.js';\nimport { MigrationManager } from '../config/migration-manager.js';\nimport { detectAllFrameworks } from '../frameworks/detector-service.js';\nimport { getFrameworkDetector } from '../frameworks/registry.js';\n\n// ES module equivalent of __dirname\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\n\nexport interface InitOptions {\n upgrade?: boolean;\n yes?: boolean;\n path?: string;\n}\n\nexport async function initCommand(options: InitOptions = {}) {\n const rootDir = options.path || process.cwd();\n const configPath = path.join(rootDir, '.lien.config.json');\n \n try {\n // Check if config already exists\n let configExists = false;\n try {\n await fs.access(configPath);\n configExists = true;\n } catch {\n // File doesn't exist\n }\n \n // Handle upgrade scenario\n if (configExists && options.upgrade) {\n const migrationManager = new MigrationManager(rootDir);\n await migrationManager.upgradeInteractive();\n return;\n }\n \n // Warn if config exists and not upgrading\n if (configExists && !options.upgrade) {\n console.log(chalk.yellow('⚠️ .lien.config.json already exists'));\n console.log(chalk.dim('Run'), chalk.bold('lien init --upgrade'), chalk.dim('to merge new config options'));\n return;\n }\n \n // Create new config with framework detection\n if (!configExists) {\n await createNewConfig(rootDir, options);\n }\n } catch (error) {\n console.error(chalk.red('Error creating config file:'), error);\n process.exit(1);\n }\n}\n\nasync function createNewConfig(rootDir: string, options: InitOptions) {\n // Show banner for new initialization\n showCompactBanner();\n console.log(chalk.bold('Initializing Lien...\\n'));\n \n // 1. Run framework detection\n console.log(chalk.dim('🔍 Detecting frameworks in'), chalk.bold(rootDir));\n const detections = await detectAllFrameworks(rootDir);\n \n let frameworks: FrameworkInstance[] = [];\n \n if (detections.length === 0) {\n console.log(chalk.yellow('\\n⚠️ No frameworks detected'));\n \n if (!options.yes) {\n const { useGeneric } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'useGeneric',\n message: 'Create a generic config (index all supported file types)?',\n default: true,\n },\n ]);\n \n if (!useGeneric) {\n console.log(chalk.dim('Aborted.'));\n return;\n }\n }\n \n // Create generic framework\n frameworks.push({\n name: 'generic',\n path: '.',\n enabled: true,\n config: {\n include: ['**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}'],\n exclude: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/coverage/**',\n '**/.next/**',\n '**/.nuxt/**',\n '**/vendor/**',\n ],\n },\n });\n } else {\n // 2. Display detected frameworks\n console.log(chalk.green(`\\n✓ Found ${detections.length} framework(s):\\n`));\n \n for (const det of detections) {\n const pathDisplay = det.path === '.' ? 'root' : det.path;\n console.log(chalk.bold(` ${det.name}`), chalk.dim(`(${det.confidence} confidence)`));\n console.log(chalk.dim(` Location: ${pathDisplay}`));\n \n if (det.evidence.length > 0) {\n det.evidence.forEach((e) => {\n console.log(chalk.dim(` • ${e}`));\n });\n }\n console.log();\n }\n \n // 3. Interactive confirmation\n if (!options.yes) {\n const { confirm } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'confirm',\n message: 'Configure these frameworks?',\n default: true,\n },\n ]);\n \n if (!confirm) {\n console.log(chalk.dim('Aborted.'));\n return;\n }\n }\n \n // 4. Generate configs for each detected framework\n for (const det of detections) {\n const detector = getFrameworkDetector(det.name);\n if (!detector) {\n console.warn(chalk.yellow(`⚠️ No detector found for ${det.name}, skipping`));\n continue;\n }\n \n // Generate default config\n const frameworkConfig = await detector.generateConfig(rootDir, det.path);\n \n // Optional: Ask to customize (only in interactive mode)\n let shouldCustomize = false;\n if (!options.yes) {\n const { customize } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'customize',\n message: `Customize ${det.name} settings?`,\n default: false,\n },\n ]);\n shouldCustomize = customize;\n }\n \n let finalConfig = frameworkConfig;\n if (shouldCustomize) {\n const customized = await promptForCustomization(det.name, frameworkConfig);\n finalConfig = { ...frameworkConfig, ...customized };\n } else {\n const pathDisplay = det.path === '.' ? 'root' : det.path;\n console.log(chalk.dim(` → Using defaults for ${det.name} at ${pathDisplay}`));\n }\n \n frameworks.push({\n name: det.name,\n path: det.path,\n enabled: true,\n config: finalConfig,\n });\n }\n }\n \n // 5. Ask about Cursor rules installation\n if (!options.yes) {\n const { installCursorRules } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'installCursorRules',\n message: 'Install recommended Cursor rules?',\n default: true,\n },\n ]);\n \n if (installCursorRules) {\n try {\n const cursorRulesDir = path.join(rootDir, '.cursor');\n await fs.mkdir(cursorRulesDir, { recursive: true });\n \n // Find template - it's in the package root (same dir as package.json)\n // When compiled: everything bundles to dist/index.js, so __dirname is dist/\n // Go up one level from dist/ to reach package root\n const templatePath = path.join(__dirname, '../CURSOR_RULES_TEMPLATE.md');\n \n const rulesPath = path.join(cursorRulesDir, 'rules');\n let targetPath: string;\n let isDirectory = false;\n let isFile = false;\n\n try {\n const stats = await fs.stat(rulesPath);\n isDirectory = stats.isDirectory();\n isFile = stats.isFile();\n } catch {\n // Doesn't exist, that's fine\n }\n\n if (isDirectory) {\n // .cursor/rules is already a directory, create lien.mdc inside it\n targetPath = path.join(rulesPath, 'lien.mdc');\n await fs.copyFile(templatePath, targetPath);\n console.log(chalk.green('✓ Installed Cursor rules as .cursor/rules/lien.mdc'));\n } else if (isFile) {\n // .cursor/rules exists as a file - ask to convert to directory structure\n const { convertToDir } = await inquirer.prompt([\n {\n type: 'confirm',\n name: 'convertToDir',\n message: 'Existing .cursor/rules file found. Convert to directory and preserve your rules?',\n default: true,\n },\n ]);\n\n if (convertToDir) {\n // Convert file to directory structure\n // 1. Read existing rules\n const existingRules = await fs.readFile(rulesPath, 'utf-8');\n // 2. Delete the file\n await fs.unlink(rulesPath);\n // 3. Create rules as a directory\n await fs.mkdir(rulesPath);\n // 4. Save original rules as project.mdc\n await fs.writeFile(path.join(rulesPath, 'project.mdc'), existingRules);\n // 5. Add Lien rules as lien.mdc\n await fs.copyFile(templatePath, path.join(rulesPath, 'lien.mdc'));\n console.log(chalk.green('✓ Converted .cursor/rules to directory'));\n console.log(chalk.green(' - Your project rules: .cursor/rules/project.mdc'));\n console.log(chalk.green(' - Lien rules: .cursor/rules/lien.mdc'));\n } else {\n console.log(chalk.dim('Skipped Cursor rules installation (preserving existing file)'));\n }\n } else {\n // .cursor/rules doesn't exist, create it as a directory\n await fs.mkdir(rulesPath, { recursive: true });\n targetPath = path.join(rulesPath, 'lien.mdc');\n await fs.copyFile(templatePath, targetPath);\n console.log(chalk.green('✓ Installed Cursor rules as .cursor/rules/lien.mdc'));\n }\n } catch (error) {\n console.log(chalk.yellow('⚠️ Could not install Cursor rules'));\n console.log(chalk.dim(`Error: ${error instanceof Error ? error.message : 'Unknown error'}`));\n console.log(chalk.dim('You can manually copy CURSOR_RULES_TEMPLATE.md to .cursor/rules/lien.mdc'));\n }\n }\n }\n \n // 6. Build final config\n const config: LienConfig = {\n ...defaultConfig,\n frameworks,\n };\n \n // 7. Write config\n const configPath = path.join(rootDir, '.lien.config.json');\n await fs.writeFile(configPath, JSON.stringify(config, null, 2) + '\\n', 'utf-8');\n \n // 8. Show success message\n console.log(chalk.green('\\n✓ Created .lien.config.json'));\n console.log(chalk.green(`✓ Configured ${frameworks.length} framework(s)`));\n console.log(chalk.dim('\\nNext steps:'));\n console.log(chalk.dim(' 1. Run'), chalk.bold('lien index'), chalk.dim('to index your codebase'));\n console.log(chalk.dim(' 2. Run'), chalk.bold('lien serve'), chalk.dim('to start the MCP server'));\n console.log(chalk.dim(' 3. Configure Cursor to use the MCP server (see README.md)'));\n}\n\nasync function promptForCustomization(frameworkName: string, config: FrameworkConfig): Promise<Partial<FrameworkConfig>> {\n console.log(chalk.bold(`\\nCustomizing ${frameworkName} settings:`));\n \n const answers = await inquirer.prompt([\n {\n type: 'input',\n name: 'include',\n message: 'File patterns to include (comma-separated):',\n default: config.include.join(', '),\n filter: (input: string) => input.split(',').map(s => s.trim()),\n },\n {\n type: 'input',\n name: 'exclude',\n message: 'File patterns to exclude (comma-separated):',\n default: config.exclude.join(', '),\n filter: (input: string) => input.split(',').map(s => s.trim()),\n },\n ]);\n \n return {\n include: answers.include,\n exclude: answers.exclude,\n };\n}\n\n// Removed: upgradeConfig function is now handled by MigrationManager.upgradeInteractive()\n","import figlet from 'figlet';\nimport chalk from 'chalk';\nimport { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\n\n// Get package.json dynamically\n// In development: src/utils/banner.ts -> ../../package.json\n// In production (bundled): dist/index.js -> ../package.json\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst require = createRequire(import.meta.url);\n\n// Try production path first (dist -> package.json), then dev path\nlet packageJson;\ntry {\n packageJson = require(join(__dirname, '../package.json'));\n} catch {\n packageJson = require(join(__dirname, '../../package.json'));\n}\n\n// Package info\nconst PACKAGE_NAME = packageJson.name;\nconst VERSION = packageJson.version;\n\n/**\n * Wrap text in a box with a footer line\n */\nfunction wrapInBox(text: string, footer: string, padding = 1): string {\n const lines = text.split('\\n').filter(line => line.trim().length > 0);\n \n // Use only the main content (logo) to determine box width\n const maxLength = Math.max(...lines.map(line => line.length));\n \n const horizontalBorder = '─'.repeat(maxLength + padding * 2);\n const top = `┌${horizontalBorder}┐`;\n const bottom = `└${horizontalBorder}┘`;\n const separator = `├${horizontalBorder}┤`;\n \n const paddedLines = lines.map(line => {\n const padRight = ' '.repeat(maxLength - line.length + padding);\n const padLeft = ' '.repeat(padding);\n return `│${padLeft}${line}${padRight}│`;\n });\n \n // Center the footer line\n const totalPad = maxLength - footer.length;\n const leftPad = Math.floor(totalPad / 2);\n const rightPad = totalPad - leftPad;\n const centeredFooter = ' '.repeat(leftPad) + footer + ' '.repeat(rightPad);\n \n const paddedFooter = `│${' '.repeat(padding)}${centeredFooter}${' '.repeat(padding)}│`;\n \n return [top, ...paddedLines, separator, paddedFooter, bottom].join('\\n');\n}\n\n/**\n * Display the gorgeous ANSI Shadow banner (uses stderr for MCP server)\n */\nexport function showBanner(): void {\n const banner = figlet.textSync('LIEN', {\n font: 'ANSI Shadow',\n horizontalLayout: 'fitted',\n verticalLayout: 'fitted',\n });\n\n const footer = `${PACKAGE_NAME} - v${VERSION}`;\n const boxedBanner = wrapInBox(banner.trim(), footer);\n console.error(chalk.cyan(boxedBanner));\n console.error(); // Empty line\n}\n\n/**\n * Display the gorgeous ANSI Shadow banner (uses stdout for CLI commands)\n */\nexport function showCompactBanner(): void {\n const banner = figlet.textSync('LIEN', {\n font: 'ANSI Shadow',\n horizontalLayout: 'fitted',\n verticalLayout: 'fitted',\n });\n\n const footer = `${PACKAGE_NAME} - v${VERSION}`;\n const boxedBanner = wrapInBox(banner.trim(), footer);\n console.log(chalk.cyan(boxedBanner));\n console.log(); // Empty line\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport chalk from 'chalk';\nimport { LienConfig, defaultConfig } from './schema.js';\nimport { needsMigration, migrateConfig, migrateConfigFile } from './migration.js';\nimport { deepMergeConfig, detectNewFields } from './merge.js';\nimport { CURRENT_CONFIG_VERSION } from '../constants.js';\n\n/**\n * Result of a migration operation\n */\nexport interface MigrationResult {\n migrated: boolean;\n backupPath?: string;\n config: LienConfig;\n}\n\n/**\n * Centralized migration orchestration service\n * \n * Handles all config migration scenarios:\n * - Auto-migration during config loading\n * - Interactive upgrade via CLI\n * - Migration status checking\n */\nexport class MigrationManager {\n constructor(private readonly rootDir: string = process.cwd()) {}\n \n /**\n * Get the config file path\n */\n private getConfigPath(): string {\n return path.join(this.rootDir, '.lien.config.json');\n }\n \n /**\n * Check if the current config needs migration\n */\n async needsMigration(): Promise<boolean> {\n try {\n const configPath = this.getConfigPath();\n const content = await fs.readFile(configPath, 'utf-8');\n const config = JSON.parse(content);\n return needsMigration(config);\n } catch (error) {\n // If config doesn't exist or can't be read, no migration needed\n return false;\n }\n }\n \n /**\n * Perform silent migration (for auto-migration during load)\n * Returns the migrated config without user interaction\n */\n async autoMigrate(): Promise<LienConfig> {\n const result = await migrateConfigFile(this.rootDir);\n \n if (result.migrated && result.backupPath) {\n const backupFilename = path.basename(result.backupPath);\n console.log(`✅ Migration complete! Backup saved as ${backupFilename}`);\n console.log('📝 Your config now uses the framework-based structure.');\n }\n \n return result.config;\n }\n \n /**\n * Perform interactive upgrade (for CLI upgrade command)\n * Provides detailed feedback and handles edge cases\n */\n async upgradeInteractive(): Promise<void> {\n const configPath = this.getConfigPath();\n \n try {\n // 1. Read existing config\n const existingContent = await fs.readFile(configPath, 'utf-8');\n const existingConfig = JSON.parse(existingContent);\n \n // 2. Check if any changes are needed\n const migrationNeeded = needsMigration(existingConfig);\n const newFields = migrationNeeded ? [] : detectNewFields(existingConfig, defaultConfig);\n const hasChanges = migrationNeeded || newFields.length > 0;\n \n if (!hasChanges) {\n console.log(chalk.green('✓ Config is already up to date'));\n console.log(chalk.dim('No changes needed'));\n return;\n }\n \n // 3. Backup existing config (only if changes are needed)\n const backupPath = `${configPath}.backup`;\n await fs.copyFile(configPath, backupPath);\n \n // 4. Perform upgrade\n let upgradedConfig: LienConfig;\n let migrated = false;\n \n if (migrationNeeded) {\n console.log(chalk.blue(`🔄 Migrating config from v0.2.0 to v${CURRENT_CONFIG_VERSION}...`));\n upgradedConfig = migrateConfig(existingConfig);\n migrated = true;\n } else {\n // Just merge with defaults for current version configs\n upgradedConfig = deepMergeConfig(defaultConfig, existingConfig as Partial<LienConfig>);\n \n console.log(chalk.dim('\\nNew options added:'));\n newFields.forEach(field => console.log(chalk.dim(' •'), chalk.bold(field)));\n }\n \n // 5. Write upgraded config\n await fs.writeFile(\n configPath,\n JSON.stringify(upgradedConfig, null, 2) + '\\n',\n 'utf-8'\n );\n \n // 6. Show results\n console.log(chalk.green('✓ Config upgraded successfully'));\n console.log(chalk.dim('Backup saved to:'), backupPath);\n \n if (migrated) {\n console.log(chalk.dim('\\n📝 Your config now uses the framework-based structure.'));\n }\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n console.log(chalk.red('Error: No config file found'));\n console.log(chalk.dim('Run'), chalk.bold('lien init'), chalk.dim('to create a config file'));\n return;\n }\n throw error;\n }\n }\n \n /**\n * Perform migration and return result\n * Used when programmatic access to migration result is needed\n */\n async migrate(): Promise<MigrationResult> {\n return migrateConfigFile(this.rootDir);\n }\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { DetectionResult, DetectionOptions, defaultDetectionOptions } from './types.js';\nimport { frameworkDetectors } from './registry.js';\n\n/**\n * Detect all frameworks in a monorepo by recursively scanning subdirectories\n * @param rootDir - Absolute path to project root\n * @param options - Detection options (max depth, skip dirs)\n * @returns Array of detected frameworks with their paths\n */\nexport async function detectAllFrameworks(\n rootDir: string,\n options: Partial<DetectionOptions> = {}\n): Promise<DetectionResult[]> {\n const opts = { ...defaultDetectionOptions, ...options };\n const results: DetectionResult[] = [];\n const visited = new Set<string>();\n \n // Detect at root first\n await detectAtPath(rootDir, '.', results, visited);\n \n // Recursively scan subdirectories\n await scanSubdirectories(rootDir, '.', results, visited, 0, opts);\n \n return results;\n}\n\n/**\n * Detect frameworks at a specific path\n */\nasync function detectAtPath(\n rootDir: string,\n relativePath: string,\n results: DetectionResult[],\n visited: Set<string>\n): Promise<void> {\n // Mark as visited\n const fullPath = path.join(rootDir, relativePath);\n if (visited.has(fullPath)) {\n return;\n }\n visited.add(fullPath);\n \n // Run all detectors and collect results\n const detectedAtPath: Array<DetectionResult & { priority: number }> = [];\n \n for (const detector of frameworkDetectors) {\n try {\n const result = await detector.detect(rootDir, relativePath);\n if (result.detected) {\n detectedAtPath.push({\n ...result,\n priority: detector.priority ?? 0,\n });\n }\n } catch (error) {\n // Log error but continue with other detectors\n console.error(`Error running detector '${detector.name}' at ${relativePath}:`, error);\n }\n }\n \n // Conflict resolution: Allow multiple HIGH-confidence frameworks to coexist\n // This enables hybrid projects (e.g., Shopify + Node.js, Laravel + Node.js)\n if (detectedAtPath.length > 1) {\n // Separate frameworks by confidence level\n const highConfidence = detectedAtPath.filter(d => d.confidence === 'high');\n const mediumConfidence = detectedAtPath.filter(d => d.confidence === 'medium');\n const lowConfidence = detectedAtPath.filter(d => d.confidence === 'low');\n \n if (highConfidence.length > 1) {\n // Multiple HIGH-confidence frameworks -> keep all (hybrid/monorepo behavior)\n // Strip internal priority property before adding to results\n const cleanResults = highConfidence.map(({ priority, ...result }) => result);\n results.push(...cleanResults);\n const names = highConfidence.map(d => d.name).join(' + ');\n console.log(` → Detected hybrid project: ${names}`);\n \n // Log skipped medium/low confidence detections\n if (mediumConfidence.length > 0 || lowConfidence.length > 0) {\n const skippedNames = [...mediumConfidence, ...lowConfidence].map(d => d.name).join(', ');\n console.log(` → Skipping lower confidence detections: ${skippedNames}`);\n }\n } else if (highConfidence.length === 1) {\n // Only one HIGH-confidence framework\n const { priority, ...result } = highConfidence[0];\n results.push(result);\n \n // Log skipped medium/low confidence detections\n if (mediumConfidence.length > 0 || lowConfidence.length > 0) {\n const skippedNames = [...mediumConfidence, ...lowConfidence].map(d => d.name).join(', ');\n console.log(` → Skipping lower confidence detections: ${skippedNames}`);\n }\n } else if (mediumConfidence.length > 0) {\n // No HIGH confidence, but have MEDIUM -> use priority system\n mediumConfidence.sort((a, b) => b.priority - a.priority);\n const { priority, ...winner } = mediumConfidence[0];\n results.push(winner);\n \n // Skipped = remaining medium + all low confidence\n const skipped = [...mediumConfidence.slice(1), ...lowConfidence];\n if (skipped.length > 0) {\n const skippedNames = skipped.map(d => d.name).join(', ');\n console.log(` → Skipping ${skippedNames} at ${relativePath} (${winner.name} takes precedence)`);\n }\n } else if (lowConfidence.length > 0) {\n // Only LOW confidence -> use priority system\n lowConfidence.sort((a, b) => b.priority - a.priority);\n const { priority, ...winner } = lowConfidence[0];\n results.push(winner);\n \n // Skipped = remaining low confidence\n const skipped = lowConfidence.slice(1);\n if (skipped.length > 0) {\n const skippedNames = skipped.map(d => d.name).join(', ');\n console.log(` → Skipping ${skippedNames} at ${relativePath} (${winner.name} takes precedence)`);\n }\n }\n } else if (detectedAtPath.length === 1) {\n const { priority, ...result } = detectedAtPath[0];\n results.push(result);\n }\n}\n\n/**\n * Recursively scan subdirectories for frameworks\n */\nasync function scanSubdirectories(\n rootDir: string,\n relativePath: string,\n results: DetectionResult[],\n visited: Set<string>,\n depth: number,\n options: DetectionOptions\n): Promise<void> {\n // Check depth limit\n if (depth >= options.maxDepth) {\n return;\n }\n \n const fullPath = path.join(rootDir, relativePath);\n \n try {\n const entries = await fs.readdir(fullPath, { withFileTypes: true });\n \n // Process only directories\n const dirs = entries.filter(e => e.isDirectory());\n \n for (const dir of dirs) {\n // Skip directories in the skip list\n if (options.skipDirs.includes(dir.name)) {\n continue;\n }\n \n // Skip hidden directories (except .git, .github which are already in skipDirs)\n if (dir.name.startsWith('.')) {\n continue;\n }\n \n const subPath = relativePath === '.' \n ? dir.name \n : path.join(relativePath, dir.name);\n \n // Detect at this subdirectory\n await detectAtPath(rootDir, subPath, results, visited);\n \n // Recurse deeper\n await scanSubdirectories(rootDir, subPath, results, visited, depth + 1, options);\n }\n } catch (error) {\n // Silently skip directories we can't read (permission errors, etc.)\n return;\n }\n}\n\n/**\n * Get a human-readable summary of detected frameworks\n */\nexport function getDetectionSummary(results: DetectionResult[]): string {\n if (results.length === 0) {\n return 'No frameworks detected';\n }\n \n const lines: string[] = [];\n \n for (const result of results) {\n const pathDisplay = result.path === '.' ? 'root' : result.path;\n lines.push(`${result.name} at ${pathDisplay} (${result.confidence} confidence)`);\n \n if (result.evidence.length > 0) {\n result.evidence.forEach(e => {\n lines.push(` - ${e}`);\n });\n }\n }\n \n return lines.join('\\n');\n}\n\n","import { FrameworkConfig } from '../config/schema.js';\n\n/**\n * Result of framework detection\n */\nexport interface DetectionResult {\n detected: boolean;\n name: string; // 'nodejs', 'laravel'\n path: string; // Relative path from root: '.', 'packages/cli', 'cognito-backend'\n confidence: 'high' | 'medium' | 'low';\n evidence: string[]; // Human-readable evidence (e.g., \"Found package.json with jest\")\n version?: string; // Framework/language version if detectable\n}\n\n/**\n * Interface for framework detectors\n */\nexport interface FrameworkDetector {\n name: string; // Unique framework identifier\n \n /**\n * Priority for conflict resolution (higher = takes precedence)\n * - 100: Specific frameworks (Laravel, Rails, Django)\n * - 50: Generic frameworks (Node.js, Python)\n * - 0: Fallback/generic\n */\n priority?: number;\n \n /**\n * Detect if this framework exists at the given path\n * @param rootDir - Absolute path to project root\n * @param relativePath - Relative path from root to check (e.g., '.' or 'packages/cli')\n * @returns Detection result with evidence\n */\n detect(rootDir: string, relativePath: string): Promise<DetectionResult>;\n \n /**\n * Generate default configuration for this framework\n * @param rootDir - Absolute path to project root\n * @param relativePath - Relative path where framework was detected\n * @returns Framework-specific configuration\n */\n generateConfig(rootDir: string, relativePath: string): Promise<FrameworkConfig>;\n}\n\n/**\n * Options for framework detection\n */\nexport interface DetectionOptions {\n maxDepth: number; // Maximum directory depth to scan\n skipDirs: string[]; // Directories to skip (node_modules, vendor, etc.)\n}\n\n/**\n * Default detection options\n */\nexport const defaultDetectionOptions: DetectionOptions = {\n maxDepth: 3,\n skipDirs: [\n 'node_modules',\n 'vendor',\n 'dist',\n 'build',\n '.next',\n '.nuxt',\n 'coverage',\n '.git',\n '.idea',\n '.vscode',\n 'tmp',\n 'temp',\n ],\n};\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { FrameworkDetector, DetectionResult } from '../types.js';\nimport { generateNodeJsConfig } from './config.js';\n\n/**\n * Node.js/TypeScript/JavaScript framework detector\n */\nexport const nodejsDetector: FrameworkDetector = {\n name: 'nodejs',\n priority: 50, // Generic, yields to specific frameworks like Laravel\n \n async detect(rootDir: string, relativePath: string): Promise<DetectionResult> {\n const fullPath = path.join(rootDir, relativePath);\n const result: DetectionResult = {\n detected: false,\n name: 'nodejs',\n path: relativePath,\n confidence: 'low',\n evidence: [],\n };\n \n // Check for package.json\n const packageJsonPath = path.join(fullPath, 'package.json');\n let packageJson: any = null;\n \n try {\n const content = await fs.readFile(packageJsonPath, 'utf-8');\n packageJson = JSON.parse(content);\n result.evidence.push('Found package.json');\n } catch {\n // No package.json, not a Node.js project\n return result;\n }\n \n // At this point, we know it's a Node.js project\n result.detected = true;\n result.confidence = 'high';\n \n // Check for TypeScript\n if (packageJson.devDependencies?.typescript || packageJson.dependencies?.typescript) {\n result.evidence.push('TypeScript detected');\n }\n \n // Check for testing frameworks\n const testFrameworks = [\n { name: 'jest', display: 'Jest' },\n { name: 'vitest', display: 'Vitest' },\n { name: 'mocha', display: 'Mocha' },\n { name: 'ava', display: 'AVA' },\n { name: '@playwright/test', display: 'Playwright' },\n ];\n \n for (const framework of testFrameworks) {\n if (\n packageJson.devDependencies?.[framework.name] || \n packageJson.dependencies?.[framework.name]\n ) {\n result.evidence.push(`${framework.display} test framework detected`);\n break; // Only mention first test framework found\n }\n }\n \n // Check for common frameworks/libraries\n const frameworks = [\n { name: 'next', display: 'Next.js' },\n { name: 'react', display: 'React' },\n { name: 'vue', display: 'Vue' },\n { name: 'express', display: 'Express' },\n { name: '@nestjs/core', display: 'NestJS' },\n ];\n \n for (const fw of frameworks) {\n if (packageJson.dependencies?.[fw.name]) {\n result.evidence.push(`${fw.display} detected`);\n break; // Only mention first framework found\n }\n }\n \n // Try to detect version from package.json engines or node version\n if (packageJson.engines?.node) {\n result.version = packageJson.engines.node;\n }\n \n return result;\n },\n \n async generateConfig(rootDir: string, relativePath: string) {\n return generateNodeJsConfig(rootDir, relativePath);\n },\n};\n\n","import { FrameworkConfig } from '../../config/schema.js';\n\n/**\n * Generate Node.js framework configuration\n */\nexport async function generateNodeJsConfig(\n _rootDir: string,\n _relativePath: string\n): Promise<FrameworkConfig> {\n return {\n include: [\n // Broader patterns to catch all common project structures\n // (frontend/, src/, lib/, app/, components/, etc.)\n '**/*.ts',\n '**/*.tsx',\n '**/*.js',\n '**/*.jsx',\n '**/*.vue',\n '**/*.mjs',\n '**/*.cjs',\n '**/*.md',\n '**/*.mdx',\n ],\n exclude: [\n 'node_modules/**',\n 'dist/**',\n 'build/**',\n 'coverage/**',\n '.next/**',\n '.nuxt/**',\n '.vite/**',\n '.lien/**',\n 'out/**',\n '*.min.js',\n '*.min.css',\n '*.bundle.js',\n \n // Test artifacts (source files are indexed, but not output)\n 'playwright-report/**',\n 'test-results/**',\n \n // Build/generated artifacts\n '__generated__/**',\n \n // Common build/cache directories\n '.cache/**',\n '.turbo/**',\n '.vercel/**',\n '.netlify/**',\n ],\n };\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { FrameworkDetector, DetectionResult } from '../types.js';\nimport { generateLaravelConfig } from './config.js';\n\n/**\n * Laravel/PHP framework detector\n */\nexport const laravelDetector: FrameworkDetector = {\n name: 'laravel',\n priority: 100, // Laravel takes precedence over Node.js\n \n async detect(rootDir: string, relativePath: string): Promise<DetectionResult> {\n const fullPath = path.join(rootDir, relativePath);\n const result: DetectionResult = {\n detected: false,\n name: 'laravel',\n path: relativePath,\n confidence: 'low',\n evidence: [],\n };\n \n // Check for composer.json with Laravel\n const composerJsonPath = path.join(fullPath, 'composer.json');\n let composerJson: any = null;\n \n try {\n const content = await fs.readFile(composerJsonPath, 'utf-8');\n composerJson = JSON.parse(content);\n result.evidence.push('Found composer.json');\n } catch {\n // No composer.json, not a Laravel project\n return result;\n }\n \n // Check if Laravel framework is in dependencies\n const hasLaravel = \n composerJson.require?.['laravel/framework'] ||\n composerJson['require-dev']?.['laravel/framework'];\n \n if (!hasLaravel) {\n // Has composer.json but not Laravel\n return result;\n }\n \n result.evidence.push('Laravel framework detected in composer.json');\n \n // Check for artisan file (strong indicator of Laravel)\n const artisanPath = path.join(fullPath, 'artisan');\n try {\n await fs.access(artisanPath);\n result.evidence.push('Found artisan file');\n result.confidence = 'high';\n } catch {\n result.confidence = 'medium';\n }\n \n // Check for typical Laravel directory structure\n const laravelDirs = ['app', 'routes', 'config', 'database'];\n let foundDirs = 0;\n \n for (const dir of laravelDirs) {\n try {\n const dirPath = path.join(fullPath, dir);\n const stats = await fs.stat(dirPath);\n if (stats.isDirectory()) {\n foundDirs++;\n }\n } catch {\n // Directory doesn't exist\n }\n }\n \n if (foundDirs >= 2) {\n result.evidence.push(`Laravel directory structure detected (${foundDirs}/${laravelDirs.length} dirs)`);\n result.confidence = 'high';\n }\n \n // Check for test directories\n const testDirsToCheck = [\n path.join(fullPath, 'tests', 'Feature'),\n path.join(fullPath, 'tests', 'Unit'),\n ];\n \n for (const testDir of testDirsToCheck) {\n try {\n const stats = await fs.stat(testDir);\n if (stats.isDirectory()) {\n const dirName = path.basename(path.dirname(testDir)) + '/' + path.basename(testDir);\n result.evidence.push(`Found ${dirName} test directory`);\n }\n } catch {\n // Test directory doesn't exist\n }\n }\n \n // Extract Laravel version if available\n if (composerJson.require?.['laravel/framework']) {\n result.version = composerJson.require['laravel/framework'];\n }\n \n result.detected = true;\n return result;\n },\n \n async generateConfig(rootDir: string, relativePath: string) {\n return generateLaravelConfig(rootDir, relativePath);\n },\n};\n\n","import { FrameworkConfig } from '../../config/schema.js';\n\n/**\n * Generate Laravel framework configuration\n */\nexport async function generateLaravelConfig(\n _rootDir: string,\n _relativePath: string\n): Promise<FrameworkConfig> {\n return {\n include: [\n // PHP backend\n 'app/**/*.php',\n 'routes/**/*.php',\n 'config/**/*.php',\n 'database/**/*.php',\n 'resources/**/*.php',\n 'tests/**/*.php',\n '*.php',\n // Frontend assets (Vue/React/Inertia) - Broadened for flexibility\n '**/*.js',\n '**/*.ts',\n '**/*.jsx',\n '**/*.tsx',\n '**/*.vue',\n // Blade templates\n 'resources/views/**/*.blade.php',\n // Documentation\n '**/*.md',\n '**/*.mdx',\n 'docs/**/*.md',\n 'README.md',\n 'CHANGELOG.md',\n ],\n exclude: [\n 'vendor/**',\n 'storage/**',\n 'bootstrap/cache/**',\n 'public/**',\n 'node_modules/**',\n 'dist/**',\n 'build/**',\n \n // Test artifacts (source files are indexed, but not output)\n 'playwright-report/**',\n 'test-results/**',\n 'coverage/**',\n \n // Build/generated artifacts\n '__generated__/**',\n \n // Frontend build outputs\n '.vite/**',\n '.nuxt/**',\n '.next/**',\n ],\n };\n}\n\n","import fs from 'fs/promises';\nimport path from 'path';\nimport { FrameworkDetector, DetectionResult } from '../types.js';\nimport { generateShopifyConfig } from './config.js';\n\n/**\n * Shopify Liquid theme framework detector\n */\nexport const shopifyDetector: FrameworkDetector = {\n name: 'shopify',\n priority: 100, // High priority (same as Laravel)\n \n async detect(rootDir: string, relativePath: string): Promise<DetectionResult> {\n const fullPath = path.join(rootDir, relativePath);\n const result: DetectionResult = {\n detected: false,\n name: 'shopify',\n path: relativePath,\n confidence: 'low',\n evidence: [],\n };\n \n // 1. Check for config/settings_schema.json (STRONGEST signal)\n const settingsSchemaPath = path.join(fullPath, 'config', 'settings_schema.json');\n let hasSettingsSchema = false;\n \n try {\n await fs.access(settingsSchemaPath);\n hasSettingsSchema = true;\n result.evidence.push('Found config/settings_schema.json');\n } catch {\n // Not present, continue checking other markers\n }\n \n // 2. Check for layout/theme.liquid\n const themeLayoutPath = path.join(fullPath, 'layout', 'theme.liquid');\n let hasThemeLayout = false;\n \n try {\n await fs.access(themeLayoutPath);\n hasThemeLayout = true;\n result.evidence.push('Found layout/theme.liquid');\n } catch {\n // Not present\n }\n \n // 3. Check for typical Shopify directories\n const shopifyDirs = ['sections', 'snippets', 'templates', 'locales'];\n let foundDirs = 0;\n \n for (const dir of shopifyDirs) {\n try {\n const dirPath = path.join(fullPath, dir);\n const stats = await fs.stat(dirPath);\n if (stats.isDirectory()) {\n foundDirs++;\n }\n } catch {\n // Directory doesn't exist\n }\n }\n \n if (foundDirs >= 2) {\n result.evidence.push(`Shopify directory structure detected (${foundDirs}/${shopifyDirs.length} dirs)`);\n }\n \n // 4. Check for shopify.theme.toml (Shopify CLI)\n try {\n const tomlPath = path.join(fullPath, 'shopify.theme.toml');\n await fs.access(tomlPath);\n result.evidence.push('Found shopify.theme.toml');\n } catch {\n // Optional file\n }\n \n // 5. Check for .shopifyignore\n try {\n const ignorePath = path.join(fullPath, '.shopifyignore');\n await fs.access(ignorePath);\n result.evidence.push('Found .shopifyignore');\n } catch {\n // Optional file\n }\n \n // Determine detection confidence with early returns\n // High: Has settings_schema.json + 2+ directories\n if (hasSettingsSchema && foundDirs >= 2) {\n result.detected = true;\n result.confidence = 'high';\n return result;\n }\n \n // Medium: Has settings_schema alone, OR has theme.liquid + 1+ directory\n if (hasSettingsSchema || (hasThemeLayout && foundDirs >= 1)) {\n result.detected = true;\n result.confidence = 'medium';\n return result;\n }\n \n // Medium: Has 3+ typical directories but no strong markers\n if (foundDirs >= 3) {\n result.detected = true;\n result.confidence = 'medium';\n return result;\n }\n \n // Not detected\n return result;\n },\n \n async generateConfig(rootDir: string, relativePath: string) {\n return generateShopifyConfig(rootDir, relativePath);\n },\n};\n\n","import { FrameworkConfig } from '../../config/schema.js';\n\n/**\n * Generate Shopify theme framework configuration\n */\nexport async function generateShopifyConfig(\n _rootDir: string,\n _relativePath: string\n): Promise<FrameworkConfig> {\n return {\n include: [\n // Core Liquid templates\n 'layout/**/*.liquid',\n 'sections/**/*.liquid',\n 'snippets/**/*.liquid',\n 'templates/**/*.liquid', // Matches any nesting level (e.g., templates/customers/account.liquid)\n 'templates/**/*.json', // JSON template definitions (Shopify 2.0+)\n \n // Theme editor blocks (Online Store 2.0)\n 'blocks/**/*.liquid',\n \n // Assets (CSS, JS with optional Liquid templating)\n 'assets/**/*.js',\n 'assets/**/*.js.liquid',\n 'assets/**/*.css',\n 'assets/**/*.css.liquid',\n 'assets/**/*.scss',\n 'assets/**/*.scss.liquid',\n \n // Configuration files\n 'config/*.json',\n \n // Locales (i18n)\n 'locales/*.json',\n \n // Documentation\n '*.md',\n 'docs/**/*.md',\n \n // Shopify-specific config files\n 'shopify.theme.toml',\n '.shopifyignore',\n ],\n exclude: [\n 'node_modules/**',\n 'dist/**',\n 'build/**',\n '.git/**',\n \n // Playwright/testing artifacts\n 'playwright-report/**',\n 'test-results/**',\n \n // Build/generated artifacts\n '__generated__/**',\n \n // Common frontend build outputs\n '.vite/**',\n '.nuxt/**',\n '.next/**',\n ],\n };\n}\n\n","import { FrameworkDetector } from './types.js';\nimport { nodejsDetector } from './nodejs/detector.js';\nimport { laravelDetector } from './laravel/detector.js';\nimport { shopifyDetector } from './shopify/detector.js';\n\n/**\n * Registry of all available framework detectors\n * Frameworks will be added as they are implemented\n */\nexport const frameworkDetectors: FrameworkDetector[] = [\n nodejsDetector,\n laravelDetector,\n shopifyDetector,\n];\n\n/**\n * Register a framework detector\n */\nexport function registerFramework(detector: FrameworkDetector): void {\n // Check if already registered\n const existing = frameworkDetectors.find(d => d.name === detector.name);\n if (existing) {\n console.warn(`Framework detector '${detector.name}' is already registered, skipping`);\n return;\n }\n \n frameworkDetectors.push(detector);\n}\n\n/**\n * Get a framework detector by name\n */\nexport function getFrameworkDetector(name: string): FrameworkDetector | undefined {\n return frameworkDetectors.find(d => d.name === name);\n}\n\n","import chalk from 'chalk';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport os from 'os';\nimport crypto from 'crypto';\nimport { configService } from '../config/service.js';\nimport { isGitRepo, getCurrentBranch, getCurrentCommit } from '../git/utils.js';\nimport { readVersionFile } from '../vectordb/version.js';\nimport { showCompactBanner } from '../utils/banner.js';\nimport { isModernConfig } from '../config/schema.js';\n\nexport async function statusCommand() {\n const rootDir = process.cwd();\n const projectName = path.basename(rootDir);\n \n // Use same hashing logic as VectorDB to show correct path\n const pathHash = crypto\n .createHash('md5')\n .update(rootDir)\n .digest('hex')\n .substring(0, 8);\n \n const indexPath = path.join(os.homedir(), '.lien', 'indices', `${projectName}-${pathHash}`);\n \n showCompactBanner();\n console.log(chalk.bold('Status\\n'));\n \n // Check if config exists\n const hasConfig = await configService.exists(rootDir);\n console.log(chalk.dim('Configuration:'), hasConfig ? chalk.green('✓ Found') : chalk.red('✗ Not initialized'));\n \n if (!hasConfig) {\n console.log(chalk.yellow('\\nRun'), chalk.bold('lien init'), chalk.yellow('to initialize'));\n return;\n }\n \n // Check if index exists\n try {\n const stats = await fs.stat(indexPath);\n console.log(chalk.dim('Index location:'), indexPath);\n console.log(chalk.dim('Index status:'), chalk.green('✓ Exists'));\n \n // Try to get directory size\n try {\n const files = await fs.readdir(indexPath, { recursive: true });\n console.log(chalk.dim('Index files:'), files.length);\n } catch (e) {\n // Ignore\n }\n \n console.log(chalk.dim('Last modified:'), stats.mtime.toLocaleString());\n \n // Show version file info\n try {\n const version = await readVersionFile(indexPath);\n if (version > 0) {\n const versionDate = new Date(version);\n console.log(chalk.dim('Last reindex:'), versionDate.toLocaleString());\n }\n } catch {\n // Ignore\n }\n } catch (error) {\n console.log(chalk.dim('Index status:'), chalk.yellow('✗ Not indexed'));\n console.log(chalk.yellow('\\nRun'), chalk.bold('lien index'), chalk.yellow('to index your codebase'));\n }\n \n // Load and show configuration settings\n try {\n const config = await configService.load(rootDir);\n \n console.log(chalk.bold('\\nFeatures:'));\n \n // Git detection status\n const isRepo = await isGitRepo(rootDir);\n if (config.gitDetection.enabled && isRepo) {\n console.log(chalk.dim('Git detection:'), chalk.green('✓ Enabled'));\n console.log(chalk.dim(' Poll interval:'), `${config.gitDetection.pollIntervalMs / 1000}s`);\n \n // Show current git state\n try {\n const branch = await getCurrentBranch(rootDir);\n const commit = await getCurrentCommit(rootDir);\n console.log(chalk.dim(' Current branch:'), branch);\n console.log(chalk.dim(' Current commit:'), commit.substring(0, 8));\n \n // Check if git state file exists\n const gitStateFile = path.join(indexPath, '.git-state.json');\n try {\n const gitStateContent = await fs.readFile(gitStateFile, 'utf-8');\n const gitState = JSON.parse(gitStateContent);\n if (gitState.branch !== branch || gitState.commit !== commit) {\n console.log(chalk.yellow(' ⚠️ Git state changed - will reindex on next serve'));\n }\n } catch {\n // Git state file doesn't exist yet\n }\n } catch {\n // Ignore git command errors\n }\n } else if (config.gitDetection.enabled && !isRepo) {\n console.log(chalk.dim('Git detection:'), chalk.yellow('Enabled (not a git repo)'));\n } else {\n console.log(chalk.dim('Git detection:'), chalk.gray('Disabled'));\n }\n \n // File watching status\n if (config.fileWatching.enabled) {\n console.log(chalk.dim('File watching:'), chalk.green('✓ Enabled'));\n console.log(chalk.dim(' Debounce:'), `${config.fileWatching.debounceMs}ms`);\n } else {\n console.log(chalk.dim('File watching:'), chalk.gray('Disabled'));\n console.log(chalk.dim(' Enable with:'), chalk.bold('lien serve --watch'));\n }\n \n // Indexing settings\n console.log(chalk.bold('\\nIndexing Settings:'));\n if (isModernConfig(config)) {\n console.log(chalk.dim('Concurrency:'), config.core.concurrency);\n console.log(chalk.dim('Batch size:'), config.core.embeddingBatchSize);\n console.log(chalk.dim('Chunk size:'), config.core.chunkSize);\n console.log(chalk.dim('Chunk overlap:'), config.core.chunkOverlap);\n }\n \n } catch (error) {\n console.log(chalk.yellow('\\nWarning: Could not load configuration'));\n }\n}\n\n","import chalk from 'chalk';\nimport { indexCodebase } from '../indexer/index.js';\nimport { showCompactBanner } from '../utils/banner.js';\n\nexport async function indexCommand(options: { watch?: boolean; verbose?: boolean; force?: boolean }) {\n showCompactBanner();\n \n try {\n // If force flag is set, clear the index and manifest first (clean slate)\n if (options.force) {\n const { VectorDB } = await import('../vectordb/lancedb.js');\n const { ManifestManager } = await import('../indexer/manifest.js');\n \n console.log(chalk.yellow('Clearing existing index and manifest...'));\n const vectorDB = new VectorDB(process.cwd());\n await vectorDB.initialize();\n await vectorDB.clear();\n \n // Also clear manifest\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.clear();\n \n console.log(chalk.green('✓ Index and manifest cleared\\n'));\n }\n \n await indexCodebase({\n rootDir: process.cwd(),\n verbose: options.verbose || false,\n force: options.force || false,\n });\n \n if (options.watch) {\n console.log(chalk.yellow('\\n⚠️ Watch mode not yet implemented'));\n // TODO: Implement file watching with chokidar\n }\n } catch (error) {\n console.error(chalk.red('Error during indexing:'), error);\n process.exit(1);\n }\n}\n\n","import chalk from 'chalk';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { startMCPServer } from '../mcp/server.js';\nimport { showBanner } from '../utils/banner.js';\n\nexport async function serveCommand(options: { port?: string; watch?: boolean; noWatch?: boolean; root?: string }) {\n const rootDir = options.root ? path.resolve(options.root) : process.cwd();\n \n try {\n // Validate root directory if --root was specified\n if (options.root) {\n try {\n const stats = await fs.stat(rootDir);\n if (!stats.isDirectory()) {\n console.error(chalk.red(`Error: --root path is not a directory: ${rootDir}`));\n process.exit(1);\n }\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n console.error(chalk.red(`Error: --root directory does not exist: ${rootDir}`));\n } else if ((error as NodeJS.ErrnoException).code === 'EACCES') {\n console.error(chalk.red(`Error: --root directory is not accessible: ${rootDir}`));\n } else {\n console.error(chalk.red(`Error: Failed to access --root directory: ${rootDir}`));\n console.error(chalk.dim((error as Error).message));\n }\n process.exit(1);\n }\n }\n \n // Log to stderr since stdout is for MCP protocol\n showBanner();\n console.error(chalk.bold('Starting MCP server...\\n'));\n \n if (options.root) {\n console.error(chalk.dim(`Serving from: ${rootDir}\\n`));\n }\n \n // Handle deprecated --watch flag\n if (options.watch) {\n console.error(chalk.yellow('⚠️ --watch flag is deprecated (file watching is now default)'));\n console.error(chalk.dim(' Use --no-watch to disable file watching\\n'));\n }\n \n // Determine file watching state\n // Priority: --no-watch > --watch (deprecated) > config default\n const watch = options.noWatch ? false : options.watch ? true : undefined;\n \n await startMCPServer({\n rootDir,\n verbose: true,\n watch,\n });\n } catch (error) {\n console.error(chalk.red('Failed to start MCP server:'), error);\n process.exit(1);\n }\n}\n\n","import { Server } from '@modelcontextprotocol/sdk/server/index.js';\nimport { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';\nimport {\n CallToolRequestSchema,\n ListToolsRequestSchema,\n} from '@modelcontextprotocol/sdk/types.js';\nimport { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\nimport { tools } from './tools.js';\nimport { VectorDB } from '../vectordb/lancedb.js';\nimport { LocalEmbeddings } from '../embeddings/local.js';\nimport { GitStateTracker } from '../git/tracker.js';\nimport { indexMultipleFiles, indexSingleFile } from '../indexer/incremental.js';\nimport { configService } from '../config/service.js';\nimport { ManifestManager } from '../indexer/manifest.js';\nimport { isGitAvailable, isGitRepo } from '../git/utils.js';\nimport { FileWatcher } from '../watcher/index.js';\nimport { VERSION_CHECK_INTERVAL_MS } from '../constants.js';\nimport { wrapToolHandler } from './utils/tool-wrapper.js';\nimport {\n SemanticSearchSchema,\n FindSimilarSchema,\n GetFileContextSchema,\n ListFunctionsSchema,\n} from './schemas/index.js';\nimport { LienError, LienErrorCode } from '../errors/index.js';\n\n// Get version from package.json dynamically\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst require = createRequire(import.meta.url);\n\nlet packageJson: { name: string; version: string };\ntry {\n packageJson = require(join(__dirname, '../package.json'));\n} catch {\n packageJson = require(join(__dirname, '../../package.json'));\n}\n\nexport interface MCPServerOptions {\n rootDir: string;\n verbose?: boolean;\n watch?: boolean;\n}\n\nexport async function startMCPServer(options: MCPServerOptions): Promise<void> {\n const { rootDir, verbose, watch } = options;\n \n // Log to stderr (stdout is reserved for MCP protocol)\n const log = (message: string) => {\n if (verbose) {\n console.error(`[Lien MCP] ${message}`);\n }\n };\n \n log('Initializing MCP server...');\n \n // Initialize embeddings and vector DB\n const embeddings = new LocalEmbeddings();\n const vectorDB = new VectorDB(rootDir);\n \n try {\n log('Loading embedding model...');\n await embeddings.initialize();\n \n log('Loading vector database...');\n await vectorDB.initialize();\n \n log('Embeddings and vector DB ready');\n } catch (error) {\n console.error(`Failed to initialize: ${error}`);\n process.exit(1);\n }\n \n // Create MCP server\n const server = new Server(\n {\n name: 'lien',\n version: packageJson.version,\n },\n {\n capabilities: {\n tools: {},\n },\n }\n );\n \n // Register tool list handler\n server.setRequestHandler(ListToolsRequestSchema, async () => ({\n tools,\n }));\n \n // Helper function to check version and reconnect if needed\n const checkAndReconnect = async () => {\n try {\n const versionChanged = await vectorDB.checkVersion();\n if (versionChanged) {\n log('Index version changed, reconnecting to database...');\n await vectorDB.reconnect();\n log('Reconnected to updated index');\n }\n } catch (error) {\n // Log but don't throw - fall back to existing connection\n log(`Version check failed: ${error}`);\n }\n };\n \n // Helper to get current index metadata for responses\n const getIndexMetadata = () => ({\n indexVersion: vectorDB.getCurrentVersion(),\n indexDate: vectorDB.getVersionDate(),\n });\n \n // Start background polling for version changes (every 2 seconds)\n // This ensures we reconnect as soon as possible after reindex, even if no tool calls are made\n const versionCheckInterval = setInterval(async () => {\n await checkAndReconnect();\n }, VERSION_CHECK_INTERVAL_MS);\n \n // Register tool call handler\n server.setRequestHandler(CallToolRequestSchema, async (request) => {\n const { name, arguments: args } = request.params;\n \n log(`Handling tool call: ${name}`);\n \n try {\n switch (name) {\n case 'semantic_search':\n return await wrapToolHandler(\n SemanticSearchSchema,\n async (validatedArgs) => {\n log(`Searching for: \"${validatedArgs.query}\"`);\n \n // Check if index has been updated and reconnect if needed\n await checkAndReconnect();\n \n const queryEmbedding = await embeddings.embed(validatedArgs.query);\n const results = await vectorDB.search(queryEmbedding, validatedArgs.limit, validatedArgs.query);\n \n log(`Found ${results.length} results`);\n \n return {\n indexInfo: getIndexMetadata(),\n results,\n };\n }\n )(args);\n \n case 'find_similar':\n return await wrapToolHandler(\n FindSimilarSchema,\n async (validatedArgs) => {\n log(`Finding similar code...`);\n \n // Check if index has been updated and reconnect if needed\n await checkAndReconnect();\n \n const codeEmbedding = await embeddings.embed(validatedArgs.code);\n // Pass code as query for relevance boosting\n const results = await vectorDB.search(codeEmbedding, validatedArgs.limit, validatedArgs.code);\n \n log(`Found ${results.length} similar chunks`);\n \n return {\n indexInfo: getIndexMetadata(),\n results,\n };\n }\n )(args);\n \n case 'get_file_context':\n return await wrapToolHandler(\n GetFileContextSchema,\n async (validatedArgs) => {\n log(`Getting context for: ${validatedArgs.filepath}`);\n \n // Check if index has been updated and reconnect if needed\n await checkAndReconnect();\n \n // Search for chunks from this file by embedding the filepath\n // This is a simple approach; could be improved with metadata filtering\n const fileEmbedding = await embeddings.embed(validatedArgs.filepath);\n const allResults = await vectorDB.search(fileEmbedding, 50, validatedArgs.filepath);\n \n // Filter results to only include chunks from the target file\n const fileChunks = allResults.filter(r => \n r.metadata.file.includes(validatedArgs.filepath) || validatedArgs.filepath.includes(r.metadata.file)\n );\n \n let results = fileChunks;\n \n if (validatedArgs.includeRelated && fileChunks.length > 0) {\n // Get related chunks by searching with the first chunk's content\n const relatedEmbedding = await embeddings.embed(fileChunks[0].content);\n const related = await vectorDB.search(relatedEmbedding, 5, fileChunks[0].content);\n \n // Add related chunks that aren't from the same file\n const relatedOtherFiles = related.filter(r => \n !r.metadata.file.includes(validatedArgs.filepath) && !validatedArgs.filepath.includes(r.metadata.file)\n );\n \n results = [...fileChunks, ...relatedOtherFiles];\n }\n \n log(`Found ${results.length} chunks`);\n \n return {\n indexInfo: getIndexMetadata(),\n file: validatedArgs.filepath,\n chunks: results,\n };\n }\n )(args);\n \n case 'list_functions':\n return await wrapToolHandler(\n ListFunctionsSchema,\n async (validatedArgs) => {\n log('Listing functions with symbol metadata...');\n \n // Check if index has been updated and reconnect if needed\n await checkAndReconnect();\n \n let results;\n let usedMethod = 'symbols';\n \n try {\n // Try using symbol-based query first (v0.5.0+)\n results = await vectorDB.querySymbols({\n language: validatedArgs.language,\n pattern: validatedArgs.pattern,\n limit: 50,\n });\n \n // If no results and pattern was provided, it might be an old index\n // Fall back to content scanning\n if (results.length === 0 && (validatedArgs.language || validatedArgs.pattern)) {\n log('No symbol results, falling back to content scan...');\n results = await vectorDB.scanWithFilter({\n language: validatedArgs.language,\n pattern: validatedArgs.pattern,\n limit: 50,\n });\n usedMethod = 'content';\n }\n } catch (error) {\n // If querySymbols fails (e.g., old index without symbol fields), fall back\n log(`Symbol query failed, falling back to content scan: ${error}`);\n results = await vectorDB.scanWithFilter({\n language: validatedArgs.language,\n pattern: validatedArgs.pattern,\n limit: 50,\n });\n usedMethod = 'content';\n }\n \n log(`Found ${results.length} matches using ${usedMethod} method`);\n \n return {\n indexInfo: getIndexMetadata(),\n method: usedMethod,\n results,\n note: usedMethod === 'content' \n ? 'Using content search. Run \"lien reindex\" to enable faster symbol-based queries.'\n : undefined,\n };\n }\n )(args);\n \n default:\n throw new LienError(\n `Unknown tool: ${name}`,\n LienErrorCode.INVALID_INPUT,\n { requestedTool: name, availableTools: tools.map(t => t.name) },\n 'medium',\n false,\n false\n );\n }\n } catch (error) {\n // Handle errors at the switch level (e.g., unknown tool)\n if (error instanceof LienError) {\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify(error.toJSON(), null, 2),\n }],\n };\n }\n \n // Unexpected error\n console.error(`Unexpected error handling tool call ${name}:`, error);\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify({\n error: error instanceof Error ? error.message : 'Unknown error',\n code: LienErrorCode.INTERNAL_ERROR,\n tool: name,\n }, null, 2),\n }],\n };\n }\n });\n \n // Load configuration for auto-indexing, git detection, and file watching\n const config = await configService.load(rootDir);\n \n // Check if this is the first run (no data in index) and auto-index if needed\n const hasIndex = await vectorDB.hasData();\n \n if (!hasIndex && config.mcp.autoIndexOnFirstRun) {\n log('📦 No index found - running initial indexing...');\n log('⏱️ This may take 5-20 minutes depending on project size');\n \n try {\n // Import indexCodebase function\n const { indexCodebase } = await import('../indexer/index.js');\n await indexCodebase({ rootDir, verbose: true });\n log('✅ Initial indexing complete!');\n } catch (error) {\n log(`⚠️ Initial indexing failed: ${error}`);\n log('You can manually run: lien index');\n // Don't exit - server can still start, just won't have data\n }\n } else if (!hasIndex) {\n log('⚠️ No index found. Auto-indexing is disabled in config.');\n log('Run \"lien index\" to index your codebase.');\n }\n \n // Initialize git detection if enabled\n let gitTracker: GitStateTracker | null = null;\n let gitPollInterval: NodeJS.Timeout | null = null;\n let fileWatcher: FileWatcher | null = null;\n \n if (config.gitDetection.enabled) {\n const gitAvailable = await isGitAvailable();\n const isRepo = await isGitRepo(rootDir);\n \n if (gitAvailable && isRepo) {\n log('✓ Detected git repository');\n gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);\n \n // Check for git changes on startup\n try {\n log('Checking for git changes...');\n const changedFiles = await gitTracker.initialize();\n \n if (changedFiles && changedFiles.length > 0) {\n log(`🌿 Git changes detected: ${changedFiles.length} files changed`);\n log('Reindexing changed files...');\n \n const count = await indexMultipleFiles(\n changedFiles,\n vectorDB,\n embeddings,\n config,\n { verbose }\n );\n \n log(`✓ Reindexed ${count} files`);\n } else {\n log('✓ Index is up to date with git state');\n }\n } catch (error) {\n log(`Warning: Failed to check git state on startup: ${error}`);\n }\n \n // Start background polling for git changes\n log(`✓ Git detection enabled (checking every ${config.gitDetection.pollIntervalMs / 1000}s)`);\n \n gitPollInterval = setInterval(async () => {\n try {\n const changedFiles = await gitTracker!.detectChanges();\n \n if (changedFiles && changedFiles.length > 0) {\n log(`🌿 Git change detected: ${changedFiles.length} files changed`);\n log('Reindexing in background...');\n \n // Don't await - run in background\n indexMultipleFiles(\n changedFiles,\n vectorDB,\n embeddings,\n config,\n { verbose }\n ).then(count => {\n log(`✓ Background reindex complete: ${count} files`);\n }).catch(error => {\n log(`Warning: Background reindex failed: ${error}`);\n });\n }\n } catch (error) {\n log(`Warning: Git detection check failed: ${error}`);\n }\n }, config.gitDetection.pollIntervalMs);\n } else {\n if (!gitAvailable) {\n log('Git not available - git detection disabled');\n } else if (!isRepo) {\n log('Not a git repository - git detection disabled');\n }\n }\n } else {\n log('Git detection disabled by configuration');\n }\n \n // Initialize file watching if enabled\n // Priority: CLI flag if explicitly set (true/false), otherwise use config default\n const fileWatchingEnabled = watch !== undefined ? watch : config.fileWatching.enabled;\n \n if (fileWatchingEnabled) {\n log('👀 Starting file watcher...');\n fileWatcher = new FileWatcher(rootDir, config);\n \n try {\n await fileWatcher.start(async (event) => {\n const { type, filepath } = event;\n \n if (type === 'unlink') {\n // File deleted\n log(`🗑️ File deleted: ${filepath}`);\n try {\n await vectorDB.deleteByFile(filepath);\n \n // Update manifest\n const manifest = new ManifestManager(vectorDB.dbPath);\n await manifest.removeFile(filepath);\n \n log(`✓ Removed ${filepath} from index`);\n } catch (error) {\n log(`Warning: Failed to remove ${filepath}: ${error}`);\n }\n } else {\n // File added or changed\n const action = type === 'add' ? 'added' : 'changed';\n log(`📝 File ${action}: ${filepath}`);\n \n // Reindex in background\n indexSingleFile(filepath, vectorDB, embeddings, config, { verbose })\n .catch((error) => {\n log(`Warning: Failed to reindex ${filepath}: ${error}`);\n });\n }\n });\n \n const watchedCount = fileWatcher.getWatchedFiles().length;\n log(`✓ File watching enabled (watching ${watchedCount} files)`);\n } catch (error) {\n log(`Warning: Failed to start file watcher: ${error}`);\n fileWatcher = null;\n }\n }\n \n // Handle shutdown gracefully\n const cleanup = async () => {\n log('Shutting down MCP server...');\n clearInterval(versionCheckInterval);\n if (gitPollInterval) {\n clearInterval(gitPollInterval);\n }\n if (fileWatcher) {\n await fileWatcher.stop();\n }\n process.exit(0);\n };\n \n // Listen for termination signals\n process.on('SIGINT', cleanup);\n process.on('SIGTERM', cleanup);\n \n // Connect to stdio transport\n const transport = new StdioServerTransport();\n \n // Use SDK's transport callbacks for parent process detection\n // This avoids conflicts with the transport's stdin management\n transport.onclose = () => {\n log('Transport closed, parent process likely terminated');\n cleanup().catch(() => process.exit(0));\n };\n \n transport.onerror = (error) => {\n log(`Transport error: ${error}`);\n // Transport will close after error, onclose will handle cleanup\n };\n \n await server.connect(transport);\n \n log('MCP server started and listening on stdio');\n}\n\n","import { z } from 'zod';\nimport { zodToJsonSchema } from 'zod-to-json-schema';\n\n/**\n * Convert a Zod schema to an MCP tool schema.\n * \n * This utility generates JSON Schema from Zod schemas for use in MCP tool definitions.\n * The resulting schema includes all validation rules and descriptions from the Zod schema.\n * \n * @param zodSchema - The Zod schema to convert\n * @param name - The tool name\n * @param description - The tool description\n * @returns MCP-compatible tool schema object\n * \n * @example\n * ```typescript\n * const SearchSchema = z.object({\n * query: z.string().min(3).describe(\"Search query\"),\n * limit: z.number().default(5)\n * });\n * \n * const tool = toMCPToolSchema(\n * SearchSchema,\n * 'semantic_search',\n * 'Search the codebase semantically'\n * );\n * ```\n */\nexport function toMCPToolSchema(\n zodSchema: z.ZodSchema,\n name: string,\n description: string\n) {\n return {\n name,\n description,\n inputSchema: zodToJsonSchema(zodSchema, {\n target: 'jsonSchema7',\n $refStrategy: 'none',\n }),\n };\n}\n\n","import { z } from 'zod';\n\n/**\n * Schema for semantic search tool input.\n * \n * Validates query strings and result limits for semantic code search.\n * Includes rich descriptions to guide AI assistants on proper usage.\n */\nexport const SemanticSearchSchema = z.object({\n query: z.string()\n .min(3, \"Query must be at least 3 characters\")\n .max(500, \"Query too long (max 500 characters)\")\n .describe(\n \"Natural language description of what you're looking for.\\n\\n\" +\n \"Use full sentences describing functionality, not exact names.\\n\\n\" +\n \"Good examples:\\n\" +\n \" - 'handles user authentication'\\n\" +\n \" - 'validates email format'\\n\" +\n \" - 'processes payment transactions'\\n\\n\" +\n \"Bad examples:\\n\" +\n \" - 'auth' (too vague)\\n\" +\n \" - 'validateEmail' (use grep for exact names)\"\n ),\n \n limit: z.number()\n .int()\n .min(1, \"Limit must be at least 1\")\n .max(50, \"Limit cannot exceed 50\")\n .default(5)\n .describe(\n \"Number of results to return.\\n\\n\" +\n \"Default: 5\\n\" +\n \"Increase to 10-15 for broad exploration.\"\n ),\n});\n\n/**\n * Inferred TypeScript type for semantic search input\n */\nexport type SemanticSearchInput = z.infer<typeof SemanticSearchSchema>;\n\n","import { z } from 'zod';\n\n/**\n * Schema for find_similar tool input.\n * \n * Validates code snippets and result limits for similarity search.\n */\nexport const FindSimilarSchema = z.object({\n code: z.string()\n .min(10, \"Code snippet must be at least 10 characters\")\n .describe(\n \"Code snippet to find similar implementations for.\\n\\n\" +\n \"Provide a representative code sample that demonstrates the pattern \" +\n \"you want to find similar examples of in the codebase.\"\n ),\n \n limit: z.number()\n .int()\n .min(1, \"Limit must be at least 1\")\n .max(20, \"Limit cannot exceed 20\")\n .default(5)\n .describe(\n \"Number of similar code blocks to return.\\n\\n\" +\n \"Default: 5\"\n ),\n});\n\n/**\n * Inferred TypeScript type for find similar input\n */\nexport type FindSimilarInput = z.infer<typeof FindSimilarSchema>;\n\n","import { z } from 'zod';\n\n/**\n * Schema for get_file_context tool input.\n * \n * Validates file paths and context options for retrieving file-specific code chunks.\n */\nexport const GetFileContextSchema = z.object({\n filepath: z.string()\n .min(1, \"Filepath cannot be empty\")\n .describe(\n \"Relative path to file from workspace root.\\n\\n\" +\n \"Example: 'src/components/Button.tsx'\"\n ),\n \n includeRelated: z.boolean()\n .default(true)\n .describe(\n \"Include semantically related chunks from nearby code.\\n\\n\" +\n \"Default: true\\n\\n\" +\n \"When enabled, also returns related code from other files that are \" +\n \"semantically similar to the target file's contents.\"\n ),\n});\n\n/**\n * Inferred TypeScript type for file context input\n */\nexport type GetFileContextInput = z.infer<typeof GetFileContextSchema>;\n\n","import { z } from 'zod';\n\n/**\n * Schema for list_functions tool input.\n * \n * Validates pattern and language filters for symbol listing.\n */\nexport const ListFunctionsSchema = z.object({\n pattern: z.string()\n .optional()\n .describe(\n \"Regex pattern to match symbol names.\\n\\n\" +\n \"Examples:\\n\" +\n \" - '.*Controller.*' to find all Controllers\\n\" +\n \" - 'handle.*' to find handlers\\n\" +\n \" - '.*Service$' to find Services\\n\\n\" +\n \"If omitted, returns all symbols.\"\n ),\n \n language: z.string()\n .optional()\n .describe(\n \"Filter by programming language.\\n\\n\" +\n \"Examples: 'typescript', 'python', 'javascript', 'php'\\n\\n\" +\n \"If omitted, searches all languages.\"\n ),\n});\n\n/**\n * Inferred TypeScript type for list functions input\n */\nexport type ListFunctionsInput = z.infer<typeof ListFunctionsSchema>;\n\n","import { toMCPToolSchema } from './utils/zod-to-json-schema.js';\nimport {\n SemanticSearchSchema,\n FindSimilarSchema,\n GetFileContextSchema,\n ListFunctionsSchema,\n} from './schemas/index.js';\n\n/**\n * MCP tool definitions with Zod-generated schemas.\n * \n * All schemas are automatically generated from Zod definitions,\n * providing type safety and rich validation at runtime.\n */\nexport const tools = [\n toMCPToolSchema(\n SemanticSearchSchema,\n 'semantic_search',\n 'Search the codebase semantically for relevant code using natural language. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity.'\n ),\n toMCPToolSchema(\n FindSimilarSchema,\n 'find_similar',\n 'Find code similar to a given code snippet. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity.'\n ),\n toMCPToolSchema(\n GetFileContextSchema,\n 'get_file_context',\n 'Get all chunks and related context for a specific file. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity.'\n ),\n toMCPToolSchema(\n ListFunctionsSchema,\n 'list_functions',\n 'List functions, classes, and interfaces by name pattern and language'\n ),\n];\n\n","import chokidar from 'chokidar';\nimport { LienConfig, LegacyLienConfig, isLegacyConfig, isModernConfig } from '../config/schema.js';\n\nexport interface FileChangeEvent {\n type: 'add' | 'change' | 'unlink';\n filepath: string;\n}\n\nexport type FileChangeHandler = (event: FileChangeEvent) => void | Promise<void>;\n\n/**\n * File watcher service that monitors code files for changes.\n * Uses chokidar for robust file watching with debouncing support.\n */\nexport class FileWatcher {\n private watcher: chokidar.FSWatcher | null = null;\n private debounceTimers: Map<string, NodeJS.Timeout> = new Map();\n private config: LienConfig | LegacyLienConfig;\n private rootDir: string;\n private onChangeHandler: FileChangeHandler | null = null;\n \n constructor(rootDir: string, config: LienConfig | LegacyLienConfig) {\n this.rootDir = rootDir;\n this.config = config;\n }\n \n /**\n * Starts watching files for changes.\n * \n * @param handler - Callback function called when files change\n */\n async start(handler: FileChangeHandler): Promise<void> {\n if (this.watcher) {\n throw new Error('File watcher is already running');\n }\n \n this.onChangeHandler = handler;\n \n // Get watch patterns based on config type\n let includePatterns: string[];\n let excludePatterns: string[];\n \n if (isLegacyConfig(this.config)) {\n includePatterns = this.config.indexing.include;\n excludePatterns = this.config.indexing.exclude;\n } else if (isModernConfig(this.config)) {\n // For modern configs, aggregate patterns from all frameworks\n includePatterns = this.config.frameworks.flatMap(f => f.config.include);\n excludePatterns = this.config.frameworks.flatMap(f => f.config.exclude);\n } else {\n includePatterns = ['**/*'];\n excludePatterns = [];\n }\n \n // Configure chokidar\n this.watcher = chokidar.watch(includePatterns, {\n cwd: this.rootDir,\n ignored: excludePatterns,\n persistent: true,\n ignoreInitial: true, // Don't trigger for existing files\n awaitWriteFinish: {\n stabilityThreshold: 500, // Wait 500ms for file to stop changing\n pollInterval: 100,\n },\n // Performance optimizations\n usePolling: false,\n interval: 100,\n binaryInterval: 300,\n });\n \n // Register event handlers with debouncing\n this.watcher\n .on('add', (filepath) => this.handleChange('add', filepath))\n .on('change', (filepath) => this.handleChange('change', filepath))\n .on('unlink', (filepath) => this.handleChange('unlink', filepath))\n .on('error', (error) => {\n console.error(`[Lien] File watcher error: ${error}`);\n });\n \n // Wait for watcher to be ready\n await new Promise<void>((resolve) => {\n this.watcher!.on('ready', () => {\n resolve();\n });\n });\n }\n \n /**\n * Handles a file change event with debouncing.\n * Debouncing prevents rapid reindexing when files are saved multiple times quickly.\n */\n private handleChange(type: 'add' | 'change' | 'unlink', filepath: string): void {\n // Clear existing debounce timer for this file\n const existingTimer = this.debounceTimers.get(filepath);\n if (existingTimer) {\n clearTimeout(existingTimer);\n }\n \n // Set new debounce timer\n const timer = setTimeout(() => {\n this.debounceTimers.delete(filepath);\n \n // Call handler\n if (this.onChangeHandler) {\n const absolutePath = filepath.startsWith('/')\n ? filepath\n : `${this.rootDir}/${filepath}`;\n \n try {\n const result = this.onChangeHandler({\n type,\n filepath: absolutePath,\n });\n \n // Handle async handlers\n if (result instanceof Promise) {\n result.catch((error) => {\n console.error(`[Lien] Error handling file change: ${error}`);\n });\n }\n } catch (error) {\n console.error(`[Lien] Error handling file change: ${error}`);\n }\n }\n }, this.config.fileWatching.debounceMs);\n \n this.debounceTimers.set(filepath, timer);\n }\n \n /**\n * Stops the file watcher and cleans up resources.\n */\n async stop(): Promise<void> {\n if (!this.watcher) {\n return;\n }\n \n // Clear all pending debounce timers\n for (const timer of this.debounceTimers.values()) {\n clearTimeout(timer);\n }\n this.debounceTimers.clear();\n \n // Close watcher\n await this.watcher.close();\n this.watcher = null;\n this.onChangeHandler = null;\n }\n \n /**\n * Gets the list of files currently being watched.\n */\n getWatchedFiles(): string[] {\n if (!this.watcher) {\n return [];\n }\n \n const watched = this.watcher.getWatched();\n const files: string[] = [];\n \n for (const [dir, filenames] of Object.entries(watched)) {\n for (const filename of filenames) {\n files.push(`${dir}/${filename}`);\n }\n }\n \n return files;\n }\n \n /**\n * Checks if the watcher is currently running.\n */\n isRunning(): boolean {\n return this.watcher !== null;\n }\n}\n\n","import { ZodSchema, ZodError } from 'zod';\nimport { LienError, LienErrorCode } from '../../errors/index.js';\n\n/**\n * Wrap a tool handler with Zod validation and error handling.\n * \n * This utility provides automatic:\n * - Input validation using Zod schemas\n * - Type-safe handler execution with inferred types\n * - Consistent error formatting for validation, Lien, and unexpected errors\n * - MCP-compatible response structure\n * \n * @param schema - Zod schema to validate tool inputs against\n * @param handler - Tool handler function that receives validated inputs\n * @returns Wrapped handler that validates inputs and handles errors\n * \n * @example\n * ```typescript\n * const SearchSchema = z.object({\n * query: z.string().min(3),\n * limit: z.number().default(5)\n * });\n * \n * const searchHandler = wrapToolHandler(\n * SearchSchema,\n * async (args) => {\n * // args is fully typed: { query: string; limit: number }\n * const results = await search(args.query, args.limit);\n * return { results };\n * }\n * );\n * \n * // Use in MCP server\n * return await searchHandler(request.params.arguments);\n * ```\n */\nexport function wrapToolHandler<T>(\n schema: ZodSchema<T>,\n handler: (validated: T) => Promise<any>\n) {\n return async (args: unknown) => {\n try {\n // Validate input with Zod\n const validated = schema.parse(args);\n \n // Execute handler with validated, typed input\n const result = await handler(validated);\n \n // Return MCP-compatible success response\n return {\n content: [{\n type: 'text' as const,\n text: JSON.stringify(result, null, 2),\n }],\n };\n \n } catch (error) {\n // Handle Zod validation errors\n if (error instanceof ZodError) {\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify({\n error: 'Invalid parameters',\n code: LienErrorCode.INVALID_INPUT,\n details: error.errors.map(e => ({\n field: e.path.join('.'),\n message: e.message,\n })),\n }, null, 2),\n }],\n };\n }\n \n // Handle known Lien errors\n if (error instanceof LienError) {\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify(error.toJSON(), null, 2),\n }],\n };\n }\n \n // Handle unexpected errors\n console.error('Unexpected error in tool handler:', error);\n return {\n isError: true,\n content: [{\n type: 'text' as const,\n text: JSON.stringify({\n error: error instanceof Error ? error.message : 'Unknown error',\n code: LienErrorCode.INTERNAL_ERROR,\n }, null, 2),\n }],\n };\n }\n };\n}\n\n","import { program } from './cli/index.js';\n\nprogram.parse();\n\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,qBAAqB;AAC9B,SAAS,qBAAqB;AAC9B,SAAS,SAAS,YAAY;AAoCvB,SAAS,oBAA4B;AAC1C,SAAO,YAAY;AACrB;AAxCA,IAeM,YACA,WACAA,UAEF;AAnBJ;AAAA;AAAA;AAeA,IAAM,aAAa,cAAc,YAAY,GAAG;AAChD,IAAM,YAAY,QAAQ,UAAU;AACpC,IAAMA,WAAU,cAAc,YAAY,GAAG;AAI7C,QAAI;AAEF,oBAAcA,SAAQ,KAAK,WAAW,iBAAiB,CAAC;AAAA,IAC1D,QAAQ;AACN,UAAI;AAEF,sBAAcA,SAAQ,KAAK,WAAW,oBAAoB,CAAC;AAAA,MAC7D,QAAQ;AAEN,gBAAQ,KAAK,qEAAqE;AAClF,sBAAc,EAAE,SAAS,gBAAgB;AAAA,MAC3C;AAAA,IACF;AAAA;AAAA;;;ACjCA,IASa,oBACA,uBAGA,qBACA,8BAKA,4BAIA,0BAEA,0BAGA,sBACA,yBAGA,cACA,2BAGA,8BAGA,qBAIA,wBASA;AApDb;AAAA;AAAA;AAMA;AAGO,IAAM,qBAAqB;AAC3B,IAAM,wBAAwB;AAG9B,IAAM,sBAAsB;AAC5B,IAAM,+BAA+B;AAKrC,IAAM,6BAA6B;AAInC,IAAM,2BAA2B;AAEjC,IAAM,2BAA2B;AAGjC,IAAM,uBAAuB;AAC7B,IAAM,0BAA0B;AAGhC,IAAM,eAAe;AACrB,IAAM,4BAA4B;AAGlC,IAAM,+BAA+B;AAGrC,IAAM,sBAAsB;AAI5B,IAAM,yBAAyB,kBAAkB;AASjD,IAAM,uBAAuB;AAAA;AAAA;;;AC0C7B,SAAS,eACd,QAC4B;AAC5B,SAAO,cAAc,UAAU,EAAE,gBAAgB;AACnD;AAOO,SAAS,eACd,QACsB;AACtB,SAAO,gBAAgB;AACzB;AA7GA,IAmHa;AAnHb;AAAA;AAAA;AAAA;AAmHO,IAAM,gBAA4B;AAAA,MACvC,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,WAAW;AAAA,QACX,cAAc;AAAA,QACd,aAAa;AAAA,QACb,oBAAoB;AAAA,MACtB;AAAA,MACA,UAAU;AAAA,QACR,QAAQ;AAAA;AAAA,QACR,aAAa;AAAA;AAAA,MACf;AAAA,MACA,KAAK;AAAA,QACH,MAAM;AAAA,QACN,WAAW;AAAA,QACX,qBAAqB;AAAA,MACvB;AAAA,MACA,cAAc;AAAA,QACZ,SAAS;AAAA,QACT,gBAAgB;AAAA,MAClB;AAAA,MACA,cAAc;AAAA,QACZ,SAAS;AAAA;AAAA,QACT,YAAY;AAAA,MACd;AAAA,MACA,YAAY,CAAC;AAAA;AAAA,IACf;AAAA;AAAA;;;AC7IA,OAAO,QAAQ;AACf,OAAO,UAAU;AAOV,SAAS,eAAe,QAAsB;AAMnD,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,eAAe,UAAa,CAAC,OAAO,UAAU;AACvD,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,eAAe,UAAa,OAAO,aAAa,QAAW;AACpE,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,aAAa,QAAW;AACjC,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,WAAW,OAAO,QAAQ,WAAW,KAAK,GAAG;AACtD,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,SAAS,cAAc,WAA+D;AAE3F,QAAM,YAAwB;AAAA,IAC5B,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,WAAY,UAAkB,UAAU,aAAc,UAAkB,MAAM,aAAa,cAAc,KAAK;AAAA,MAC9G,cAAe,UAAkB,UAAU,gBAAiB,UAAkB,MAAM,gBAAgB,cAAc,KAAK;AAAA,MACvH,aAAc,UAAkB,UAAU,eAAgB,UAAkB,MAAM,eAAe,cAAc,KAAK;AAAA,MACpH,oBAAqB,UAAkB,UAAU,sBAAuB,UAAkB,MAAM,sBAAsB,cAAc,KAAK;AAAA,IAC3I;AAAA,IACA,UAAU;AAAA,MACR,QAAS,UAAkB,UAAU,UAAU,cAAc,SAAS;AAAA,MACtE,aAAc,UAAkB,UAAU,eAAe,cAAc,SAAS;AAAA,IAClF;AAAA,IACA,KAAK;AAAA,MACH,MAAM,UAAU,KAAK,QAAQ,cAAc,IAAI;AAAA,MAC/C,WAAW,UAAU,KAAK,aAAa,cAAc,IAAI;AAAA,MACzD,qBAAqB,UAAU,KAAK,uBAAuB,cAAc,IAAI;AAAA,IAC/E;AAAA,IACA,cAAc;AAAA,MACZ,SAAS,UAAU,cAAc,WAAW,cAAc,aAAa;AAAA,MACvE,gBAAgB,UAAU,cAAc,kBAAkB,cAAc,aAAa;AAAA,IACvF;AAAA,IACA,cAAc;AAAA,MACZ,SAAS,UAAU,cAAc,WAAW,cAAc,aAAa;AAAA,MACvE,YAAY,UAAU,cAAc,cAAc,cAAc,aAAa;AAAA,IAC/E;AAAA,IACA,YAAa,UAAkB,cAAc,CAAC;AAAA,EAChD;AAGA,MAAK,UAAkB,YAAY,UAAU,WAAW,WAAW,GAAG;AACpE,UAAM,mBAAsC;AAAA,MAC1C,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ;AAAA,QACN,SAAU,UAAkB,SAAS,WAAW,CAAC,6CAA6C;AAAA,QAC9F,SAAU,UAAkB,SAAS,WAAW;AAAA,UAC9C;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,cAAU,WAAW,KAAK,gBAAgB;AAAA,EAC5C,WAAW,UAAU,WAAW,WAAW,GAAG;AAE5C,UAAM,mBAAsC;AAAA,MAC1C,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ;AAAA,QACN,SAAS,CAAC,6CAA6C;AAAA,QACvD,SAAS;AAAA,UACP;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,cAAU,WAAW,KAAK,gBAAgB;AAAA,EAC5C;AAEA,SAAO;AACT;AAKA,eAAsB,kBAAkB,UAAkB,QAAQ,IAAI,GAInE;AACD,QAAM,aAAa,KAAK,KAAK,SAAS,mBAAmB;AAEzD,MAAI;AAEF,UAAM,gBAAgB,MAAM,GAAG,SAAS,YAAY,OAAO;AAC3D,UAAM,YAAY,KAAK,MAAM,aAAa;AAG1C,QAAI,CAAC,eAAe,SAAS,GAAG;AAC9B,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,IACF;AAGA,UAAM,YAAY,cAAc,SAAS;AAGzC,UAAM,aAAa,GAAG,UAAU;AAChC,UAAM,GAAG,SAAS,YAAY,UAAU;AAGxC,UAAM,GAAG,UAAU,YAAY,KAAK,UAAU,WAAW,MAAM,CAAC,IAAI,MAAM,OAAO;AAEjF,WAAO;AAAA,MACL,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV;AAAA,EACF,SAAS,OAAO;AAEd,QAAK,MAAgC,SAAS,UAAU;AACtD,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AA3KA;AAAA;AAAA;AAEA;AACA;AAAA;AAAA;;;ACOO,SAAS,gBAAgB,UAAsB,MAAuC;AAC3F,SAAO;AAAA,IACL,SAAS,KAAK,WAAW,SAAS;AAAA,IAClC,MAAM;AAAA,MACJ,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,UAAU;AAAA,MACR,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,KAAK;AAAA,MACH,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,cAAc;AAAA,MACZ,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,cAAc;AAAA,MACZ,GAAG,SAAS;AAAA,MACZ,GAAG,KAAK;AAAA,IACV;AAAA,IACA,YAAY,KAAK,cAAc,SAAS;AAAA,EAC1C;AACF;AAUO,SAAS,gBAAgB,QAA6B,OAAsC;AACjG,QAAM,YAAsB,CAAC;AAG7B,aAAW,OAAO,OAAO,KAAK,KAAK,GAAG;AACpC,QAAI,EAAE,OAAO,SAAS;AACpB,gBAAU,KAAK,GAAG;AAClB;AAAA,IACF;AAGA,QAAI,OAAO,MAAM,GAAG,MAAM,YAAY,MAAM,GAAG,MAAM,QAAQ,CAAC,MAAM,QAAQ,MAAM,GAAG,CAAC,GAAG;AACvF,YAAM,gBAAiB,OAAO,GAAG,KAA6B,CAAC;AAC/D,YAAM,eAAe,MAAM,GAAG;AAE9B,iBAAW,aAAa,OAAO,KAAK,YAAY,GAAG;AACjD,YAAI,EAAE,aAAa,gBAAgB;AACjC,oBAAU,KAAK,GAAG,GAAG,IAAI,SAAS,EAAE;AAAA,QACtC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AArEA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;AC8GO,SAAS,UACd,OACA,SACA,mBACW;AACX,QAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,QAAM,QAAQ,iBAAiB,QAAQ,MAAM,QAAQ;AAErD,QAAM,eAAe,IAAI;AAAA,IACvB,GAAG,OAAO,KAAK,OAAO;AAAA;AAAA,IAEtB;AAAA,EACF;AAGA,MAAI,OAAO;AACT,iBAAa,QAAQ,GAAG,aAAa,KAAK;AAAA;AAAA;AAAA,EAAmB,KAAK;AAAA,EACpE;AAEA,SAAO;AACT;AAlIA,IAaa,WAiDA,aAwBA,gBAUA;AAhGb;AAAA;AAAA;AAAA;AAGA;AAUO,IAAM,YAAN,cAAwB,MAAM;AAAA,MACnC,YACE,SACgB,MACA,SACA,WAA0B,UAC1B,cAAuB,MACvB,YAAqB,OACrC;AACA,cAAM,OAAO;AANG;AACA;AACA;AACA;AACA;AAGhB,aAAK,OAAO;AAGZ,YAAI,MAAM,mBAAmB;AAC3B,gBAAM,kBAAkB,MAAM,KAAK,WAAW;AAAA,QAChD;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,SAAS;AACP,eAAO;AAAA,UACL,OAAO,KAAK;AAAA,UACZ,MAAM,KAAK;AAAA,UACX,UAAU,KAAK;AAAA,UACf,aAAa,KAAK;AAAA,UAClB,SAAS,KAAK;AAAA,QAChB;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,cAAuB;AACrB,eAAO,KAAK;AAAA,MACd;AAAA;AAAA;AAAA;AAAA,MAKA,gBAAyB;AACvB,eAAO,KAAK;AAAA,MACd;AAAA,IACF;AAKO,IAAM,cAAN,cAA0B,UAAU;AAAA,MACzC,YAAY,SAAiB,SAAmC;AAC9D,cAAM,gDAAuC,SAAS,UAAU,MAAM,KAAK;AAC3E,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAmBO,IAAM,iBAAN,cAA6B,UAAU;AAAA,MAC5C,YAAY,SAAiB,SAAmC;AAC9D,cAAM,0EAAoD,SAAS,QAAQ,MAAM,IAAI;AACrF,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAKO,IAAM,gBAAN,cAA4B,UAAU;AAAA,MAC3C,YAAY,SAAiB,SAAmC;AAC9D,cAAM,gDAAuC,SAAS,QAAQ,MAAM,IAAI;AACxE,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAAA;AAAA;;;ACrGA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AADjB,IAgCa,eAujBA;AAvlBb;AAAA;AAAA;AAEA;AACA;AACA;AACA;AA2BO,IAAM,gBAAN,MAAM,eAAc;AAAA,MACzB,OAAwB,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAU1C,MAAM,KAAK,UAAkB,QAAQ,IAAI,GAAwB;AAC/D,cAAM,aAAa,KAAK,cAAc,OAAO;AAE7C,YAAI;AACF,gBAAM,gBAAgB,MAAMD,IAAG,SAAS,YAAY,OAAO;AAC3D,gBAAM,aAAa,KAAK,MAAM,aAAa;AAG3C,cAAI,KAAK,eAAe,UAAU,GAAG;AACnC,oBAAQ,IAAI,qDAA8C;AAE1D,kBAAM,SAAS,MAAM,KAAK,QAAQ,OAAO;AAEzC,gBAAI,OAAO,YAAY,OAAO,YAAY;AACxC,oBAAM,iBAAiBC,MAAK,SAAS,OAAO,UAAU;AACtD,sBAAQ,IAAI,8CAAyC,cAAc,EAAE;AACrE,sBAAQ,IAAI,+DAAwD;AAAA,YACtE;AAEA,mBAAO,OAAO;AAAA,UAChB;AAGA,gBAAM,eAAe,gBAAgB,eAAe,UAAiC;AAGrF,gBAAM,aAAa,KAAK,SAAS,YAAY;AAC7C,cAAI,CAAC,WAAW,OAAO;AACrB,kBAAM,IAAI;AAAA,cACR;AAAA,EAA2B,WAAW,OAAO,KAAK,IAAI,CAAC;AAAA,cACvD,EAAE,QAAQ,WAAW,QAAQ,UAAU,WAAW,SAAS;AAAA,YAC7D;AAAA,UACF;AAGA,cAAI,WAAW,SAAS,SAAS,GAAG;AAClC,oBAAQ,KAAK,uCAA6B;AAC1C,uBAAW,SAAS,QAAQ,aAAW,QAAQ,KAAK,MAAM,OAAO,EAAE,CAAC;AAAA,UACtE;AAEA,iBAAO;AAAA,QACT,SAAS,OAAO;AACd,cAAK,MAAgC,SAAS,UAAU;AAEtD,mBAAO;AAAA,UACT;AAEA,cAAI,iBAAiB,aAAa;AAChC,kBAAM;AAAA,UACR;AAEA,cAAI,iBAAiB,aAAa;AAChC,kBAAM,IAAI;AAAA,cACR;AAAA,cACA,EAAE,MAAM,YAAY,eAAe,MAAM,QAAQ;AAAA,YACnD;AAAA,UACF;AAEA,gBAAM,UAAU,OAAO,gCAAgC,EAAE,MAAM,WAAW,CAAC;AAAA,QAC7E;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUA,MAAM,KAAK,SAAiB,QAAmC;AAC7D,cAAM,aAAa,KAAK,cAAc,OAAO;AAG7C,cAAM,aAAa,KAAK,SAAS,MAAM;AACvC,YAAI,CAAC,WAAW,OAAO;AACrB,gBAAM,IAAI;AAAA,YACR;AAAA,EAAuC,WAAW,OAAO,KAAK,IAAI,CAAC;AAAA,YACnE,EAAE,QAAQ,WAAW,OAAO;AAAA,UAC9B;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,aAAa,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI;AACrD,gBAAMD,IAAG,UAAU,YAAY,YAAY,OAAO;AAAA,QACpD,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,gCAAgC,EAAE,MAAM,WAAW,CAAC;AAAA,QAC7E;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,OAAO,UAAkB,QAAQ,IAAI,GAAqB;AAC9D,cAAM,aAAa,KAAK,cAAc,OAAO;AAC7C,YAAI;AACF,gBAAMA,IAAG,OAAO,UAAU;AAC1B,iBAAO;AAAA,QACT,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUA,MAAM,QAAQ,UAAkB,QAAQ,IAAI,GAA6B;AACvE,cAAM,aAAa,KAAK,cAAc,OAAO;AAE7C,YAAI;AAEF,gBAAM,gBAAgB,MAAMA,IAAG,SAAS,YAAY,OAAO;AAC3D,gBAAM,YAAY,KAAK,MAAM,aAAa;AAG1C,cAAI,CAAC,KAAK,eAAe,SAAS,GAAG;AACnC,mBAAO;AAAA,cACL,UAAU;AAAA,cACV,QAAQ;AAAA,YACV;AAAA,UACF;AAGA,gBAAM,YAAY,cAAiB,SAAS;AAG5C,gBAAM,aAAa,KAAK,SAAS,SAAS;AAC1C,cAAI,CAAC,WAAW,OAAO;AACrB,kBAAM,IAAI;AAAA,cACR;AAAA,EAA8C,WAAW,OAAO,KAAK,IAAI,CAAC;AAAA,cAC1E,EAAE,QAAQ,WAAW,OAAO;AAAA,YAC9B;AAAA,UACF;AAGA,gBAAM,aAAa,GAAG,UAAU;AAChC,gBAAMA,IAAG,SAAS,YAAY,UAAU;AAGxC,gBAAM,KAAK,KAAK,SAAS,SAAS;AAElC,iBAAO;AAAA,YACL,UAAU;AAAA,YACV;AAAA,YACA,QAAQ;AAAA,UACV;AAAA,QACF,SAAS,OAAO;AACd,cAAK,MAAgC,SAAS,UAAU;AACtD,mBAAO;AAAA,cACL,UAAU;AAAA,cACV,QAAQ;AAAA,YACV;AAAA,UACF;AAEA,cAAI,iBAAiB,aAAa;AAChC,kBAAM;AAAA,UACR;AAEA,gBAAM,UAAU,OAAO,kCAAkC,EAAE,MAAM,WAAW,CAAC;AAAA,QAC/E;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,eAAe,QAA0B;AACvC,eAAO,eAAoB,MAAM;AAAA,MACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,SAAS,QAAmC;AAC1C,cAAM,SAAmB,CAAC;AAC1B,cAAM,WAAqB,CAAC;AAG5B,YAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,QAAQ,CAAC,iCAAiC;AAAA,YAC1C,UAAU,CAAC;AAAA,UACb;AAAA,QACF;AAEA,cAAM,MAAM;AAGZ,YAAI,CAAC,IAAI,SAAS;AAChB,iBAAO,KAAK,iCAAiC;AAAA,QAC/C;AAGA,YAAI,eAAe,GAAoC,GAAG;AACxD,eAAK,qBAAqB,KAAmB,QAAQ,QAAQ;AAAA,QAC/D,WAAW,eAAe,GAAoC,GAAG;AAC/D,eAAK,qBAAqB,KAAyB,QAAQ,QAAQ;AAAA,QACrE,OAAO;AACL,iBAAO,KAAK,wFAAwF;AAAA,QACtG;AAEA,eAAO;AAAA,UACL,OAAO,OAAO,WAAW;AAAA,UACzB;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,gBAAgB,QAA+C;AAC7D,cAAM,SAAmB,CAAC;AAC1B,cAAM,WAAqB,CAAC;AAG5B,YAAI,OAAO,MAAM;AACf,eAAK,mBAAmB,OAAO,MAAM,QAAQ,QAAQ;AAAA,QACvD;AAGA,YAAI,OAAO,KAAK;AACd,eAAK,kBAAkB,OAAO,KAAK,QAAQ,QAAQ;AAAA,QACrD;AAGA,YAAI,OAAO,cAAc;AACvB,eAAK,2BAA2B,OAAO,cAAc,QAAQ,QAAQ;AAAA,QACvE;AAGA,YAAI,OAAO,cAAc;AACvB,eAAK,2BAA2B,OAAO,cAAc,QAAQ,QAAQ;AAAA,QACvE;AAGA,YAAI,OAAO,YAAY;AACrB,eAAK,mBAAmB,OAAO,YAAY,QAAQ,QAAQ;AAAA,QAC7D;AAEA,eAAO;AAAA,UACL,OAAO,OAAO,WAAW;AAAA,UACzB;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,cAAc,SAAyB;AAC7C,eAAOC,MAAK,KAAK,SAAS,eAAc,eAAe;AAAA,MACzD;AAAA;AAAA;AAAA;AAAA,MAKQ,qBACN,QACA,QACA,UACM;AAEN,YAAI,CAAC,OAAO,MAAM;AAChB,iBAAO,KAAK,8BAA8B;AAC1C;AAAA,QACF;AACA,aAAK,mBAAmB,OAAO,MAAM,QAAQ,QAAQ;AAGrD,YAAI,CAAC,OAAO,KAAK;AACf,iBAAO,KAAK,6BAA6B;AACzC;AAAA,QACF;AACA,aAAK,kBAAkB,OAAO,KAAK,QAAQ,QAAQ;AAGnD,YAAI,CAAC,OAAO,cAAc;AACxB,iBAAO,KAAK,sCAAsC;AAClD;AAAA,QACF;AACA,aAAK,2BAA2B,OAAO,cAAc,QAAQ,QAAQ;AAGrE,YAAI,CAAC,OAAO,cAAc;AACxB,iBAAO,KAAK,sCAAsC;AAClD;AAAA,QACF;AACA,aAAK,2BAA2B,OAAO,cAAc,QAAQ,QAAQ;AAGrE,YAAI,CAAC,OAAO,YAAY;AACtB,iBAAO,KAAK,oCAAoC;AAChD;AAAA,QACF;AACA,aAAK,mBAAmB,OAAO,YAAY,QAAQ,QAAQ;AAAA,MAC7D;AAAA;AAAA;AAAA;AAAA,MAKQ,qBACN,QACA,QACA,UACM;AACN,iBAAS,KAAK,sFAAsF;AAGpG,YAAI,CAAC,OAAO,UAAU;AACpB,iBAAO,KAAK,kCAAkC;AAC9C;AAAA,QACF;AAEA,cAAM,EAAE,SAAS,IAAI;AAErB,YAAI,OAAO,SAAS,cAAc,YAAY,SAAS,aAAa,GAAG;AACrE,iBAAO,KAAK,8CAA8C;AAAA,QAC5D;AAEA,YAAI,OAAO,SAAS,iBAAiB,YAAY,SAAS,eAAe,GAAG;AAC1E,iBAAO,KAAK,qDAAqD;AAAA,QACnE;AAEA,YAAI,OAAO,SAAS,gBAAgB,YAAY,SAAS,cAAc,KAAK,SAAS,cAAc,IAAI;AACrG,iBAAO,KAAK,+CAA+C;AAAA,QAC7D;AAEA,YAAI,OAAO,SAAS,uBAAuB,YAAY,SAAS,sBAAsB,GAAG;AACvF,iBAAO,KAAK,uDAAuD;AAAA,QACrE;AAGA,YAAI,OAAO,KAAK;AACd,eAAK,kBAAkB,OAAO,KAAK,QAAQ,QAAQ;AAAA,QACrD;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,mBACN,MACA,QACA,UACM;AACN,YAAI,KAAK,cAAc,QAAW;AAChC,cAAI,OAAO,KAAK,cAAc,YAAY,KAAK,aAAa,GAAG;AAC7D,mBAAO,KAAK,0CAA0C;AAAA,UACxD,WAAW,KAAK,YAAY,IAAI;AAC9B,qBAAS,KAAK,kFAAkF;AAAA,UAClG,WAAW,KAAK,YAAY,KAAK;AAC/B,qBAAS,KAAK,wEAAwE;AAAA,UACxF;AAAA,QACF;AAEA,YAAI,KAAK,iBAAiB,QAAW;AACnC,cAAI,OAAO,KAAK,iBAAiB,YAAY,KAAK,eAAe,GAAG;AAClE,mBAAO,KAAK,iDAAiD;AAAA,UAC/D;AAAA,QACF;AAEA,YAAI,KAAK,gBAAgB,QAAW;AAClC,cAAI,OAAO,KAAK,gBAAgB,YAAY,KAAK,cAAc,KAAK,KAAK,cAAc,IAAI;AACzF,mBAAO,KAAK,2CAA2C;AAAA,UACzD;AAAA,QACF;AAEA,YAAI,KAAK,uBAAuB,QAAW;AACzC,cAAI,OAAO,KAAK,uBAAuB,YAAY,KAAK,sBAAsB,GAAG;AAC/E,mBAAO,KAAK,mDAAmD;AAAA,UACjE,WAAW,KAAK,qBAAqB,KAAK;AACxC,qBAAS,KAAK,4EAA4E;AAAA,UAC5F;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,kBACN,KACA,QACA,WACM;AACN,YAAI,IAAI,SAAS,QAAW;AAC1B,cAAI,OAAO,IAAI,SAAS,YAAY,IAAI,OAAO,QAAQ,IAAI,OAAO,OAAO;AACvE,mBAAO,KAAK,yCAAyC;AAAA,UACvD;AAAA,QACF;AAEA,YAAI,IAAI,cAAc,QAAW;AAC/B,cAAI,IAAI,cAAc,WAAW,IAAI,cAAc,UAAU;AAC3D,mBAAO,KAAK,kDAAkD;AAAA,UAChE;AAAA,QACF;AAEA,YAAI,IAAI,wBAAwB,QAAW;AACzC,cAAI,OAAO,IAAI,wBAAwB,WAAW;AAChD,mBAAO,KAAK,2CAA2C;AAAA,UACzD;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,2BACN,cACA,QACA,WACM;AACN,YAAI,aAAa,YAAY,QAAW;AACtC,cAAI,OAAO,aAAa,YAAY,WAAW;AAC7C,mBAAO,KAAK,wCAAwC;AAAA,UACtD;AAAA,QACF;AAEA,YAAI,aAAa,mBAAmB,QAAW;AAC7C,cAAI,OAAO,aAAa,mBAAmB,YAAY,aAAa,iBAAiB,KAAK;AACxF,mBAAO,KAAK,oDAAoD;AAAA,UAClE,WAAW,aAAa,iBAAiB,KAAM;AAC7C,sBAAU,KAAK,8EAA8E;AAAA,UAC/F;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,2BACN,cACA,QACA,UACM;AACN,YAAI,aAAa,YAAY,QAAW;AACtC,cAAI,OAAO,aAAa,YAAY,WAAW;AAC7C,mBAAO,KAAK,wCAAwC;AAAA,UACtD;AAAA,QACF;AAEA,YAAI,aAAa,eAAe,QAAW;AACzC,cAAI,OAAO,aAAa,eAAe,YAAY,aAAa,aAAa,GAAG;AAC9E,mBAAO,KAAK,uDAAuD;AAAA,UACrE,WAAW,aAAa,aAAa,KAAK;AACxC,qBAAS,KAAK,qFAAqF;AAAA,UACrG;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKQ,mBACN,YACA,QACA,UACM;AACN,YAAI,CAAC,MAAM,QAAQ,UAAU,GAAG;AAC9B,iBAAO,KAAK,6BAA6B;AACzC;AAAA,QACF;AAEA,mBAAW,QAAQ,CAAC,WAAW,UAAU;AACvC,cAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC/C,mBAAO,KAAK,cAAc,KAAK,qBAAqB;AACpD;AAAA,UACF;AAEA,gBAAM,KAAK;AAGX,cAAI,CAAC,GAAG,MAAM;AACZ,mBAAO,KAAK,cAAc,KAAK,gCAAgC;AAAA,UACjE;AAEA,cAAI,GAAG,SAAS,QAAW;AACzB,mBAAO,KAAK,cAAc,KAAK,gCAAgC;AAAA,UACjE,WAAW,OAAO,GAAG,SAAS,UAAU;AACtC,mBAAO,KAAK,cAAc,KAAK,yBAAyB;AAAA,UAC1D,WAAWA,MAAK,WAAW,GAAG,IAAI,GAAG;AACnC,mBAAO,KAAK,cAAc,KAAK,iCAAiC,GAAG,IAAI,EAAE;AAAA,UAC3E;AAEA,cAAI,GAAG,YAAY,QAAW;AAC5B,mBAAO,KAAK,cAAc,KAAK,mCAAmC;AAAA,UACpE,WAAW,OAAO,GAAG,YAAY,WAAW;AAC1C,mBAAO,KAAK,cAAc,KAAK,6BAA6B;AAAA,UAC9D;AAEA,cAAI,CAAC,GAAG,QAAQ;AACd,mBAAO,KAAK,cAAc,KAAK,kCAAkC;AAAA,UACnE,OAAO;AACL,iBAAK,wBAAwB,GAAG,QAAQ,cAAc,KAAK,YAAY,QAAQ,QAAQ;AAAA,UACzF;AAAA,QACF,CAAC;AAAA,MACH;AAAA;AAAA;AAAA;AAAA,MAKQ,wBACN,QACA,QACA,QACA,WACM;AACN,YAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,iBAAO,KAAK,GAAG,MAAM,oBAAoB;AACzC;AAAA,QACF;AAGA,YAAI,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AAClC,iBAAO,KAAK,GAAG,MAAM,2BAA2B;AAAA,QAClD,OAAO;AACL,iBAAO,QAAQ,QAAQ,CAAC,SAAkB,MAAc;AACtD,gBAAI,OAAO,YAAY,UAAU;AAC/B,qBAAO,KAAK,GAAG,MAAM,YAAY,CAAC,oBAAoB;AAAA,YACxD;AAAA,UACF,CAAC;AAAA,QACH;AAGA,YAAI,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AAClC,iBAAO,KAAK,GAAG,MAAM,2BAA2B;AAAA,QAClD,OAAO;AACL,iBAAO,QAAQ,QAAQ,CAAC,SAAkB,MAAc;AACtD,gBAAI,OAAO,YAAY,UAAU;AAC/B,qBAAO,KAAK,GAAG,MAAM,YAAY,CAAC,oBAAoB;AAAA,YACxD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGO,IAAM,gBAAgB,IAAI,cAAc;AAAA;AAAA;;;ACvlB/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAC1B,OAAOC,SAAQ;AACf,OAAOC,WAAU;AAUjB,eAAsB,UAAU,SAAmC;AACjE,MAAI;AACF,UAAM,SAASA,MAAK,KAAK,SAAS,MAAM;AACxC,UAAMD,IAAG,OAAO,MAAM;AACtB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AASA,eAAsB,iBAAiB,SAAkC;AACvE,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM,UAAU,mCAAmC;AAAA,MACpE,KAAK;AAAA,MACL,SAAS;AAAA;AAAA,IACX,CAAC;AACD,WAAO,OAAO,KAAK;AAAA,EACrB,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,iCAAiC,KAAK,EAAE;AAAA,EAC1D;AACF;AASA,eAAsB,iBAAiB,SAAkC;AACvE,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM,UAAU,sBAAsB;AAAA,MACvD,KAAK;AAAA,MACL,SAAS;AAAA,IACX,CAAC;AACD,WAAO,OAAO,KAAK;AAAA,EACrB,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,iCAAiC,KAAK,EAAE;AAAA,EAC1D;AACF;AAWA,eAAsB,gBACpB,SACA,SACA,OACmB;AACnB,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB,wBAAwB,OAAO,MAAM,KAAK;AAAA,MAC1C;AAAA,QACE,KAAK;AAAA,QACL,SAAS;AAAA;AAAA,MACX;AAAA,IACF;AAEA,UAAM,QAAQ,OACX,KAAK,EACL,MAAM,IAAI,EACV,OAAO,OAAO,EACd,IAAI,UAAQC,MAAK,KAAK,SAAS,IAAI,CAAC;AAEvC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,EACzD;AACF;AAUA,eAAsB,wBACpB,SACA,WACmB;AACnB,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB,+CAA+C,SAAS;AAAA,MACxD;AAAA,QACE,KAAK;AAAA,QACL,SAAS;AAAA,MACX;AAAA,IACF;AAEA,UAAM,QAAQ,OACX,KAAK,EACL,MAAM,IAAI,EACV,OAAO,OAAO,EACd,IAAI,UAAQA,MAAK,KAAK,SAAS,IAAI,CAAC;AAEvC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,0CAA0C,KAAK,EAAE;AAAA,EACnE;AACF;AAYA,eAAsB,8BACpB,SACA,YACA,UACmB;AACnB,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB,wBAAwB,UAAU,IAAI,QAAQ;AAAA,MAC9C;AAAA,QACE,KAAK;AAAA,QACL,SAAS;AAAA,MACX;AAAA,IACF;AAEA,UAAM,QAAQ,OACX,KAAK,EACL,MAAM,IAAI,EACV,OAAO,OAAO,EACd,IAAI,UAAQA,MAAK,KAAK,SAAS,IAAI,CAAC;AAEvC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,gDAAgD,KAAK,EAAE;AAAA,EACzE;AACF;AAOA,eAAsB,iBAAmC;AACvD,MAAI;AACF,UAAM,UAAU,iBAAiB,EAAE,SAAS,IAAK,CAAC;AAClD,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAjLA,IAKM;AALN;AAAA;AAAA;AAKA,IAAM,YAAY,UAAU,IAAI;AAAA;AAAA;;;ACLhC,OAAOC,UAAQ;AACf,OAAOC,YAAU;AAUjB,eAAsB,iBAAiB,WAAkC;AACvE,MAAI;AACF,UAAM,kBAAkBA,OAAK,KAAK,WAAW,YAAY;AACzD,UAAM,YAAY,KAAK,IAAI,EAAE,SAAS;AACtC,UAAMD,KAAG,UAAU,iBAAiB,WAAW,OAAO;AAAA,EACxD,SAAS,OAAO;AAEd,YAAQ,MAAM,0CAA0C,KAAK,EAAE;AAAA,EACjE;AACF;AASA,eAAsB,gBAAgB,WAAoC;AACxE,MAAI;AACF,UAAM,kBAAkBC,OAAK,KAAK,WAAW,YAAY;AACzD,UAAM,UAAU,MAAMD,KAAG,SAAS,iBAAiB,OAAO;AAC1D,UAAM,YAAY,SAAS,QAAQ,KAAK,GAAG,EAAE;AAC7C,WAAO,MAAM,SAAS,IAAI,IAAI;AAAA,EAChC,SAAS,OAAO;AAEd,WAAO;AAAA,EACT;AACF;AAvCA,IAGM;AAHN,IAAAE,gBAAA;AAAA;AAAA;AAGA,IAAM,eAAe;AAAA;AAAA;;;ACHrB,SAAS,YAAY;AACrB,OAAO,YAAY;AACnB,OAAOC,UAAQ;AACf,OAAOC,YAAU;AAUjB,eAAsB,2BACpB,SACA,QACmB;AACnB,QAAM,WAAqB,CAAC;AAG5B,aAAW,aAAa,OAAO,YAAY;AACzC,QAAI,CAAC,UAAU,SAAS;AACtB;AAAA,IACF;AAEA,UAAM,iBAAiB,MAAM,cAAc,SAAS,SAAS;AAC7D,aAAS,KAAK,GAAG,cAAc;AAAA,EACjC;AAEA,SAAO;AACT;AAKA,eAAe,cACb,SACA,WACmB;AACnB,QAAM,gBAAgBA,OAAK,KAAK,SAAS,UAAU,IAAI;AAGvD,QAAM,gBAAgBA,OAAK,KAAK,eAAe,YAAY;AAC3D,MAAI,KAAK,OAAO;AAEhB,MAAI;AACF,UAAM,mBAAmB,MAAMD,KAAG,SAAS,eAAe,OAAO;AACjE,SAAK,OAAO,EAAE,IAAI,gBAAgB;AAAA,EACpC,SAAS,GAAG;AAEV,UAAM,oBAAoBC,OAAK,KAAK,SAAS,YAAY;AACzD,QAAI;AACF,YAAM,mBAAmB,MAAMD,KAAG,SAAS,mBAAmB,OAAO;AACrE,WAAK,OAAO,EAAE,IAAI,gBAAgB;AAAA,IACpC,SAASE,IAAG;AAAA,IAEZ;AAAA,EACF;AAGA,KAAG,IAAI;AAAA,IACL,GAAG,UAAU,OAAO;AAAA,IACpB;AAAA,EACF,CAAC;AAGD,QAAM,WAAqB,CAAC;AAE5B,aAAW,WAAW,UAAU,OAAO,SAAS;AAC9C,UAAM,QAAQ,MAAM,KAAK,SAAS;AAAA,MAChC,KAAK;AAAA,MACL,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,MACP,QAAQ,UAAU,OAAO;AAAA,IAC3B,CAAC;AACD,aAAS,KAAK,GAAG,KAAK;AAAA,EACxB;AAGA,QAAM,cAAc,MAAM,KAAK,IAAI,IAAI,QAAQ,CAAC;AAGhD,SAAO,YACJ,OAAO,UAAQ,CAAC,GAAG,QAAQ,IAAI,CAAC,EAChC,IAAI,UAAQ;AAEX,WAAO,UAAU,SAAS,MACtB,OACAD,OAAK,KAAK,UAAU,MAAM,IAAI;AAAA,EACpC,CAAC;AACL;AAMA,eAAsB,aAAa,SAAyC;AAC1E,QAAM,EAAE,SAAS,kBAAkB,CAAC,GAAG,kBAAkB,CAAC,EAAE,IAAI;AAGhE,QAAM,gBAAgBA,OAAK,KAAK,SAAS,YAAY;AACrD,MAAI,KAAK,OAAO;AAEhB,MAAI;AACF,UAAM,mBAAmB,MAAMD,KAAG,SAAS,eAAe,OAAO;AACjE,SAAK,OAAO,EAAE,IAAI,gBAAgB;AAAA,EACpC,SAAS,GAAG;AAAA,EAEZ;AAGA,KAAG,IAAI;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EACL,CAAC;AAGD,QAAM,WAAW,gBAAgB,SAAS,IACtC,kBACA,CAAC,mDAAmD;AAGxD,QAAM,WAAqB,CAAC;AAE5B,aAAW,WAAW,UAAU;AAC9B,UAAM,QAAQ,MAAM,KAAK,SAAS;AAAA,MAChC,KAAK;AAAA,MACL,UAAU;AAAA,MACV,OAAO;AAAA,MACP,QAAQ,CAAC,mBAAmB,SAAS;AAAA,IACvC,CAAC;AACD,aAAS,KAAK,GAAG,KAAK;AAAA,EACxB;AAGA,QAAM,cAAc,MAAM,KAAK,IAAI,IAAI,QAAQ,CAAC;AAGhD,SAAO,YAAY,OAAO,UAAQ;AAChC,UAAM,eAAeC,OAAK,SAAS,SAAS,IAAI;AAChD,WAAO,CAAC,GAAG,QAAQ,YAAY;AAAA,EACjC,CAAC;AACH;AAEO,SAAS,eAAe,UAA0B;AACvD,QAAM,MAAMA,OAAK,QAAQ,QAAQ,EAAE,YAAY;AAE/C,QAAM,cAAsC;AAAA,IAC1C,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,UAAU;AAAA,IACV,OAAO;AAAA,IACP,OAAO;AAAA,IACP,UAAU;AAAA,IACV,WAAW;AAAA,IACX,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,aAAa;AAAA,EACf;AAEA,SAAO,YAAY,GAAG,KAAK;AAC7B;AAxLA;AAAA;AAAA;AAAA;AAAA;;;ACkBO,SAAS,eACd,SACA,UACkB;AAClB,QAAM,UAA4B;AAAA,IAChC,WAAW,CAAC;AAAA,IACZ,SAAS,CAAC;AAAA,IACV,YAAY,CAAC;AAAA,EACf;AAEA,QAAM,iBAAiB,SAAS,YAAY;AAE5C,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,mBAAmB,OAAO;AAC9C,cAAQ,UAAU,iBAAiB,OAAO;AAC1C,cAAQ,aAAa,oBAAoB,OAAO;AAChD;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,mBAAmB,OAAO;AAC9C,cAAQ,UAAU,iBAAiB,OAAO;AAC1C;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,uBAAuB,OAAO;AAClD,cAAQ,UAAU,qBAAqB,OAAO;AAC9C;AAAA,IAEF,KAAK;AACH,cAAQ,YAAY,oBAAoB,OAAO;AAC/C,cAAQ,UAAU,kBAAkB,OAAO;AAC3C,cAAQ,aAAa,qBAAqB,OAAO;AACjD;AAAA,IAEF,KAAK;AAEH,cAAQ,YAAY,oBAAoB,OAAO;AAC/C,cAAQ,UAAU,qBAAqB,OAAO;AAC9C;AAAA,IAEF,KAAK;AACH,cAAQ,YAAY,mBAAmB,OAAO;AAC9C,cAAQ,aAAa,oBAAoB,OAAO;AAChD;AAAA,IAEF,KAAK;AACH,cAAQ,YAAY,qBAAqB,OAAO;AAChD,cAAQ,UAAU,mBAAmB,OAAO;AAC5C,cAAQ,aAAa,sBAAsB,OAAO;AAClD;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,uBAAuB,OAAO;AAClD,cAAQ,UAAU,qBAAqB,OAAO;AAC9C,cAAQ,aAAa,wBAAwB,OAAO;AACpD;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,qBAAqB,OAAO;AAChD,cAAQ,UAAU,mBAAmB,OAAO;AAC5C;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,cAAQ,YAAY,qBAAqB,OAAO;AAChD;AAAA,EACJ;AAEA,SAAO;AACT;AAGA,SAAS,mBAAmB,SAA2B;AACrD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,qCAAqC;AAC9E,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,eAAe,QAAQ,SAAS,8DAA8D;AACpG,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,QAAQ,SAAS,wCAAwC;AAC/E,aAAW,SAAS,eAAe;AAEjC,QAAI,CAAC,CAAC,MAAM,OAAO,SAAS,UAAU,OAAO,EAAE,SAAS,MAAM,CAAC,CAAC,GAAG;AACjE,YAAM,IAAI,MAAM,CAAC,CAAC;AAAA,IACpB;AAAA,EACF;AAGA,QAAM,gBAAgB,QAAQ,SAAS,8CAA8C;AACrF,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,mBAAmB,SAA2B;AACrD,SAAO,mBAAmB,OAAO;AACnC;AAEA,SAAS,iBAAiB,SAA2B;AACnD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,8CAA8C;AACpF,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,iBAAiB,SAA2B;AACnD,SAAO,iBAAiB,OAAO;AACjC;AAEA,SAAS,oBAAoB,SAA2B;AACtD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,kCAAkC;AAC5E,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,cAAc,QAAQ,SAAS,iCAAiC;AACtE,aAAW,SAAS,aAAa;AAC/B,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,uBAAuB,SAA2B;AACzD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,mBAAmB;AAC5D,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,eAAe,QAAQ,SAAS,2BAA2B;AACjE,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,2BAA2B;AACjE,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,oBAAoB,SAA2B;AACtD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,wDAAwD;AACjG,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,kBAAkB,SAA2B;AACpD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,gCAAgC;AACtE,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,oBAAoB;AAC9D,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,eAAe,QAAQ,SAAS,gBAAgB;AACtD,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,mBAAmB,SAA2B;AACrD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,4CAA4C;AACrF,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,oBAAoB,SAA2B;AACtD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,gCAAgC;AAC1E,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,QAAQ,SAAS,6BAA6B;AACpE,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,gBAAgB,QAAQ,SAAS,gFAAgF;AACvH,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,mBAAmB,SAA2B;AACrD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,8CAA8C;AACpF,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,sBAAsB,SAA2B;AACxD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,kCAAkC;AAC5E,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,uBAAuB,SAA2B;AACzD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,gBAAgB,QAAQ,SAAS,sGAAsG;AAC7I,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,uDAAuD;AAC7F,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,wBAAwB,SAA2B;AAC1D,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,mBAAmB,QAAQ,SAAS,2CAA2C;AACrF,aAAW,SAAS,kBAAkB;AACpC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,gBAAgB,QAAQ,SAAS,yBAAyB;AAChE,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAEA,SAAS,mBAAmB,SAA2B;AACrD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,eAAe,QAAQ,SAAS,gBAAgB;AACtD,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,QAAQ,SAAS,iBAAiB;AACxD,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,kBAAkB,QAAQ,SAAS,6BAA6B;AACtE,aAAW,SAAS,iBAAiB;AACnC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,QAAQ,SAAS,4BAA4B;AACnE,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,eAAe,QAAQ,SAAS,2BAA2B;AACjE,aAAW,SAAS,cAAc;AAChC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,oBAAoB,SAA2B;AACtD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,cAAc,QAAQ,MAAM,mCAAmC;AACrE,MAAI,CAAC,YAAa,QAAO,CAAC;AAE1B,QAAM,gBAAgB,YAAY,CAAC;AAGnC,QAAM,qBAAqB,cAAc,SAAS,iCAAiC;AACnF,aAAW,SAAS,oBAAoB;AACtC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAGA,QAAM,gBAAgB,cAAc,SAAS,wBAAwB;AACrE,aAAW,SAAS,eAAe;AACjC,UAAM,IAAI,MAAM,CAAC,CAAC;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAGA,SAAS,qBAAqB,SAA2B;AACvD,QAAM,QAAQ,oBAAI,IAAY;AAG9B,QAAM,cAAc,QAAQ,MAAM,mCAAmC;AACrE,MAAI,CAAC,YAAa,QAAO,CAAC;AAE1B,QAAM,gBAAgB,YAAY,CAAC;AAGnC,QAAM,YAAY,cAAc,MAAM,uBAAuB;AAC7D,MAAI,WAAW;AACb,UAAM,IAAI,UAAU,CAAC,CAAC;AAAA,EACxB;AAGA,QAAM,uBAAuB,cAAc,MAAM,sBAAsB;AACvE,MAAI,sBAAsB;AACxB,UAAM,IAAI,cAAc;AAAA,EAC1B;AAEA,SAAO,MAAM,KAAK,KAAK;AACzB;AAlcA;AAAA;AAAA;AAAA;AAAA;;;ACAA,OAAO,YAAY;AACnB,OAAO,gBAAgB;AACvB,OAAO,gBAAgB;AACvB,OAAO,eAAe;AACtB,SAAS,eAAe;AA0BxB,SAAS,UAAU,UAAqC;AACtD,MAAI,CAAC,YAAY,IAAI,QAAQ,GAAG;AAC9B,UAAM,SAAS,IAAI,OAAO;AAC1B,UAAM,UAAU,eAAe,QAAQ;AAEvC,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,MAAM,sCAAsC,QAAQ,EAAE;AAAA,IAClE;AAEA,WAAO,YAAY,OAAO;AAC1B,gBAAY,IAAI,UAAU,MAAM;AAAA,EAClC;AAEA,SAAO,YAAY,IAAI,QAAQ;AACjC;AAMO,SAASE,gBAAe,UAA4C;AAGzE,QAAM,MAAM,QAAQ,QAAQ,EAAE,MAAM,CAAC,EAAE,YAAY;AAEnD,UAAQ,KAAK;AAAA,IACX,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAKO,SAAS,eAAe,UAA2B;AACxD,SAAOA,gBAAe,QAAQ,MAAM;AACtC;AAaO,SAAS,SAAS,SAAiB,UAA6C;AACrF,MAAI;AACF,UAAM,SAAS,UAAU,QAAQ;AACjC,UAAM,OAAO,OAAO,MAAM,OAAO;AAGjC,QAAI,KAAK,SAAS,UAAU;AAC1B,aAAO;AAAA,QACL;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,EAAE,KAAK;AAAA,EAChB,SAAS,OAAO;AACd,WAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAClD;AAAA,EACF;AACF;AA7GA,IAUM,aAWA;AArBN;AAAA;AAAA;AAUA,IAAM,cAAc,oBAAI,IAA+B;AAWvD,IAAM,iBAAgE;AAAA,MACpE,YAAY,WAAW;AAAA,MACvB,YAAY;AAAA,MACZ,KAAK,UAAU;AAAA;AAAA,IACjB;AAAA;AAAA;;;ACVA,SAAS,oBACP,MACA,SACA,aACmB;AACjB,QAAM,WAAW,KAAK,kBAAkB,MAAM;AAC9C,MAAI,CAAC,SAAU,QAAO;AAEtB,SAAO;AAAA,IACL,MAAM,SAAS;AAAA,IACf,MAAM,cAAc,WAAW;AAAA,IAC/B,WAAW,KAAK,cAAc,MAAM;AAAA,IACpC,SAAS,KAAK,YAAY,MAAM;AAAA,IAChC;AAAA,IACA,WAAW,iBAAiB,MAAM,OAAO;AAAA,IACzC,YAAY,kBAAkB,MAAM,OAAO;AAAA,IAC3C,YAAY,kBAAkB,MAAM,OAAO;AAAA,IAC3C,YAAY,oBAAoB,IAAI;AAAA,EACtC;AACF;AAKF,SAAS,yBACP,MACA,SACA,aACmB;AAEjB,QAAM,SAAS,KAAK;AACpB,MAAI,OAAO;AAEX,MAAI,QAAQ,SAAS,uBAAuB;AAC1C,UAAM,WAAW,OAAO,kBAAkB,MAAM;AAChD,WAAO,UAAU,QAAQ;AAAA,EAC3B;AAEA,SAAO;AAAA,IACL;AAAA,IACA,MAAM,cAAc,WAAW;AAAA,IAC/B,WAAW,KAAK,cAAc,MAAM;AAAA,IACpC,SAAS,KAAK,YAAY,MAAM;AAAA,IAChC;AAAA,IACA,WAAW,iBAAiB,MAAM,OAAO;AAAA,IACzC,YAAY,kBAAkB,MAAM,OAAO;AAAA,IAC3C,YAAY,oBAAoB,IAAI;AAAA,EACtC;AACF;AAKF,SAAS,kBACP,MACA,SACA,aACmB;AACjB,QAAM,WAAW,KAAK,kBAAkB,MAAM;AAC9C,MAAI,CAAC,SAAU,QAAO;AAEtB,SAAO;AAAA,IACL,MAAM,SAAS;AAAA,IACf,MAAM;AAAA,IACN,WAAW,KAAK,cAAc,MAAM;AAAA,IACpC,SAAS,KAAK,YAAY,MAAM;AAAA,IAChC;AAAA,IACA,WAAW,iBAAiB,MAAM,OAAO;AAAA,IACzC,YAAY,kBAAkB,MAAM,OAAO;AAAA,IAC3C,YAAY,kBAAkB,MAAM,OAAO;AAAA,IAC3C,YAAY,oBAAoB,IAAI;AAAA,EACtC;AACF;AAKF,SAAS,iBACP,MACA,UACA,cACmB;AACjB,QAAM,WAAW,KAAK,kBAAkB,MAAM;AAC9C,MAAI,CAAC,SAAU,QAAO;AAEtB,SAAO;AAAA,IACL,MAAM,SAAS;AAAA,IACf,MAAM;AAAA,IACN,WAAW,KAAK,cAAc,MAAM;AAAA,IACpC,SAAS,KAAK,YAAY,MAAM;AAAA,IAChC,WAAW,SAAS,SAAS,IAAI;AAAA,EACnC;AACF;AAKF,SAAS,qBACP,MACA,UACA,cACmB;AACjB,QAAM,WAAW,KAAK,kBAAkB,MAAM;AAC9C,MAAI,CAAC,SAAU,QAAO;AAEtB,SAAO;AAAA,IACL,MAAM,SAAS;AAAA,IACf,MAAM;AAAA,IACN,WAAW,KAAK,cAAc,MAAM;AAAA,IACpC,SAAS,KAAK,YAAY,MAAM;AAAA,IAChC,WAAW,aAAa,SAAS,IAAI;AAAA,EACvC;AACF;AA4BK,SAAS,kBACd,MACA,SACA,aACmB;AACnB,QAAM,YAAY,iBAAiB,KAAK,IAAI;AAC5C,SAAO,YAAY,UAAU,MAAM,SAAS,WAAW,IAAI;AAC7D;AAKA,SAAS,iBAAiB,MAAyB,SAAyB;AAE1E,QAAM,YAAY,KAAK,cAAc;AACrC,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,MAAI,YAAY,MAAM,SAAS,KAAK;AAGpC,MAAI,cAAc;AAClB,SAAO,cAAc,KAAK,YAAY,OAAO,CAAC,UAAU,SAAS,GAAG,KAAK,CAAC,UAAU,SAAS,IAAI,GAAG;AAClG;AACA,iBAAa,OAAO,MAAM,WAAW,KAAK;AAAA,EAC5C;AAGA,cAAY,UAAU,MAAM,GAAG,EAAE,CAAC,EAAE,MAAM,IAAI,EAAE,CAAC,EAAE,KAAK;AAGxD,MAAI,UAAU,SAAS,KAAK;AAC1B,gBAAY,UAAU,UAAU,GAAG,GAAG,IAAI;AAAA,EAC5C;AAEA,SAAO;AACT;AAQA,SAAS,kBAAkB,MAAyB,UAA4B;AAC9E,QAAM,aAAuB,CAAC;AAG9B,QAAM,aAAa,KAAK,kBAAkB,YAAY;AACtD,MAAI,CAAC,WAAY,QAAO;AAGxB,WAAS,IAAI,GAAG,IAAI,WAAW,iBAAiB,KAAK;AACnD,UAAM,QAAQ,WAAW,WAAW,CAAC;AACrC,QAAI,OAAO;AACT,iBAAW,KAAK,MAAM,IAAI;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO;AACT;AAQA,SAAS,kBAAkB,MAAyB,UAAsC;AACxF,QAAM,iBAAiB,KAAK,kBAAkB,aAAa;AAC3D,MAAI,CAAC,eAAgB,QAAO;AAE5B,SAAO,eAAe;AACxB;AAQO,SAAS,oBAAoB,MAAiC;AACnE,MAAI,aAAa;AAEjB,QAAM,iBAAiB;AAAA;AAAA,IAErB;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,IAGA;AAAA;AAAA,EACF;AAEA,WAAS,SAAS,GAAsB;AACtC,QAAI,eAAe,SAAS,EAAE,IAAI,GAAG;AAEnC,UAAI,EAAE,SAAS,qBAAqB;AAClC,cAAM,WAAW,EAAE,kBAAkB,UAAU;AAC/C,YAAI,aAAa,SAAS,SAAS,QAAQ,SAAS,SAAS,OAAO;AAClE;AAAA,QACF;AAAA,MACF,OAAO;AACL;AAAA,MACF;AAAA,IACF;AAGA,aAAS,IAAI,GAAG,IAAI,EAAE,iBAAiB,KAAK;AAC1C,YAAM,QAAQ,EAAE,WAAW,CAAC;AAC5B,UAAI,MAAO,UAAS,KAAK;AAAA,IAC3B;AAAA,EACF;AAEA,WAAS,IAAI;AACb,SAAO;AACT;AAKO,SAAS,eAAe,UAAuC;AACpE,QAAM,UAAoB,CAAC;AAE3B,WAAS,SAAS,MAAyB;AACzC,QAAI,KAAK,SAAS,oBAAoB;AAEpC,YAAM,aAAa,KAAK,kBAAkB,QAAQ;AAClD,UAAI,YAAY;AACd,cAAM,aAAa,WAAW,KAAK,QAAQ,SAAS,EAAE;AACtD,gBAAQ,KAAK,UAAU;AAAA,MACzB;AAAA,IACF;AAGA,QAAI,SAAS,UAAU;AACrB,eAAS,IAAI,GAAG,IAAI,KAAK,iBAAiB,KAAK;AAC7C,cAAM,QAAQ,KAAK,WAAW,CAAC;AAC/B,YAAI,MAAO,UAAS,KAAK;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,WAAS,QAAQ;AACjB,SAAO;AACT;AAjTA,IAoIM;AApIN;AAAA;AAAA;AAoIA,IAAM,mBAAoD;AAAA;AAAA,MAExD,wBAAwB;AAAA,MACxB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,uBAAuB;AAAA,MACvB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,yBAAyB;AAAA;AAAA,MAGzB,uBAAuB;AAAA;AAAA,MACvB,sBAAsB;AAAA;AAAA,IACxB;AAAA;AAAA;;;ACjJA,IASa,qBA2FA;AApGb;AAAA;AAAA;AASO,IAAM,sBAAN,MAAuD;AAAA,MAC5D,kBAAkB;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAAA,MAEA,iBAAiB;AAAA,QACf;AAAA;AAAA,MACF;AAAA,MAEA,mBAAmB;AAAA,QACjB;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAAA,MAEA,gBAAgB;AAAA,QACd;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MAEA,sBAAsB,MAAkC;AACtD,eAAO,KAAK,eAAe,SAAS,KAAK,IAAI;AAAA,MAC/C;AAAA,MAEA,0BAA0B,MAAkC;AAC1D,eAAO,KAAK,iBAAiB,SAAS,KAAK,IAAI;AAAA,MACjD;AAAA,MAEA,iBAAiB,MAAmD;AAClE,YAAI,KAAK,SAAS,qBAAqB;AACrC,iBAAO,KAAK,kBAAkB,MAAM;AAAA,QACtC;AACA,eAAO;AAAA,MACT;AAAA,MAEA,uBAAuB,MAAkC;AACvD,eAAO,KAAK,SAAS,aACd,KAAK,SAAS,sBACd,KAAK,SAAS;AAAA,MACvB;AAAA,MAEA,wBAAwB,MAA6C;AACnE,YAAI,UAAU,KAAK;AACnB,eAAO,SAAS;AACd,cAAI,QAAQ,SAAS,qBAAqB;AACxC,kBAAM,WAAW,QAAQ,kBAAkB,MAAM;AACjD,mBAAO,UAAU;AAAA,UACnB;AACA,oBAAU,QAAQ;AAAA,QACpB;AACA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA;AAAA,MAKA,0BAA0B,MAAkD;AAC1E,cAAMC,UAAS,CAAC,GAAsB,UAA4C;AAChF,cAAI,QAAQ,EAAG,QAAO;AAEtB,cAAI,KAAK,cAAc,SAAS,EAAE,IAAI,GAAG;AACvC,mBAAO;AAAA,UACT;AAEA,mBAAS,IAAI,GAAG,IAAI,EAAE,YAAY,KAAK;AACrC,kBAAM,QAAQ,EAAE,MAAM,CAAC;AACvB,gBAAI,OAAO;AACT,oBAAM,SAASA,QAAO,OAAO,QAAQ,CAAC;AACtC,kBAAI,OAAQ,QAAO;AAAA,YACrB;AAAA,UACF;AAEA,iBAAO;AAAA,QACT;AAEA,cAAM,eAAeA,QAAO,MAAM,CAAC;AACnC,eAAO;AAAA,UACL,aAAa,iBAAiB;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAKO,IAAM,sBAAN,cAAkC,oBAAoB;AAAA,IAAC;AAAA;AAAA;;;ACpG9D,IASa;AATb;AAAA;AAAA;AASO,IAAM,eAAN,MAAgD;AAAA,MACrD,kBAAkB;AAAA,QAChB;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAAA,MAEA,iBAAiB;AAAA,QACf;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAAA,MAEA,mBAAmB;AAAA;AAAA;AAAA,MAGnB;AAAA,MAEA,gBAAgB;AAAA,QACd;AAAA,QACA;AAAA,MACF;AAAA,MAEA,sBAAsB,MAAkC;AACtD,eAAO,KAAK,eAAe,SAAS,KAAK,IAAI;AAAA,MAC/C;AAAA,MAEA,0BAA0B,OAAmC;AAG3D,eAAO;AAAA,MACT;AAAA,MAEA,iBAAiB,MAAmD;AAClE,YAAI,KAAK,SAAS,uBACd,KAAK,SAAS,uBACd,KAAK,SAAS,yBAAyB;AAEzC,iBAAO,KAAK,kBAAkB,MAAM;AAAA,QACtC;AACA,eAAO;AAAA,MACT;AAAA,MAEA,uBAAuB,MAAkC;AACvD,eAAO,KAAK,SAAS;AAAA,QACd,KAAK,SAAS;AAAA,QACd,KAAK,SAAS;AAAA,MACvB;AAAA,MAEA,wBAAwB,MAA6C;AACnE,YAAI,UAAU,KAAK;AACnB,eAAO,SAAS;AACd,cAAI,QAAQ,SAAS,uBACjB,QAAQ,SAAS,qBAAqB;AACxC,kBAAM,WAAW,QAAQ,kBAAkB,MAAM;AACjD,mBAAO,UAAU;AAAA,UACnB;AACA,oBAAU,QAAQ;AAAA,QACpB;AACA,eAAO;AAAA,MACT;AAAA,MAEA,0BAA0B,OAAmD;AAE3E,eAAO;AAAA,UACL,aAAa;AAAA,UACb,cAAc;AAAA,QAChB;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;AChDO,SAAS,aAAa,UAAgD;AAC3E,QAAM,YAAY,kBAAkB,QAAQ;AAE5C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI,MAAM,wCAAwC,QAAQ,EAAE;AAAA,EACpE;AAEA,SAAO;AACT;AArCA,IAgBM;AAhBN;AAAA;AAAA;AAEA;AACA;AAaA,IAAM,oBAAkE;AAAA,MACtE,YAAY,IAAI,oBAAoB;AAAA,MACpC,YAAY,IAAI,oBAAoB;AAAA,MACpC,KAAK,IAAI,aAAa;AAAA,IACxB;AAAA;AAAA;;;ACQO,SAAS,WACd,UACA,SACA,UAA2B,CAAC,GAChB;AACZ,QAAM,EAAE,eAAe,EAAE,IAAI;AAG7B,QAAM,WAAWC,gBAAe,QAAQ;AACxC,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,kCAAkC,QAAQ,EAAE;AAAA,EAC9D;AAGA,QAAM,cAAc,SAAS,SAAS,QAAQ;AAG9C,MAAI,CAAC,YAAY,MAAM;AACrB,UAAM,IAAI,MAAM,mBAAmB,QAAQ,KAAK,YAAY,KAAK,EAAE;AAAA,EACrE;AAEA,QAAM,SAAqB,CAAC;AAC5B,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,WAAW,YAAY,KAAK;AAGlC,QAAM,YAAY,aAAa,QAAQ;AAGvC,QAAM,cAAc,eAAe,QAAQ;AAG3C,QAAM,gBAAgB,kBAAkB,UAAU,SAAS;AAE3D,aAAW,QAAQ,eAAe;AAEhC,QAAI,aAAa;AACjB,QAAI,UAAU,0BAA0B,IAAI,GAAG;AAC7C,YAAM,WAAW,UAAU,0BAA0B,IAAI;AACzD,UAAI,SAAS,cAAc;AACzB,qBAAa,SAAS;AAAA,MACxB;AAAA,IACF;AAGA,UAAM,kBAAkB,UAAU,wBAAwB,UAAU;AAEpE,UAAM,aAAa,kBAAkB,YAAY,SAAS,eAAe;AAGzE,UAAM,cAAc,eAAe,MAAM,KAAK;AAK9C,WAAO,KAAK,YAAY,UAAU,MAAM,aAAa,YAAY,aAAa,QAAQ,CAAC;AAAA,EACzF;AAGA,QAAM,gBAAgB,cAAc,IAAI,QAAM;AAAA,IAC5C,OAAO,EAAE,cAAc;AAAA,IACvB,KAAK,EAAE,YAAY;AAAA,EACrB,EAAE;AAEF,QAAM,kBAAkB;AAAA,IACtB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO,KAAK,GAAG,eAAe;AAG9B,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,YAAY,EAAE,SAAS,SAAS;AAEjE,SAAO;AACT;AAaA,SAAS,kBACP,UACA,WACqB;AACrB,QAAM,QAA6B,CAAC;AAEpC,WAAS,SAAS,MAAyB,OAAe;AAExD,QAAI,UAAU,0BAA0B,IAAI,KAAK,UAAU,GAAG;AAC5D,YAAM,WAAW,UAAU,0BAA0B,IAAI;AACzD,UAAI,SAAS,aAAa;AACxB,cAAM,KAAK,IAAI;AACf;AAAA,MACF;AAAA,IACF;AAGA,QAAI,SAAS,KAAK,UAAU,gBAAgB,SAAS,KAAK,IAAI,GAAG;AAC/D,YAAM,KAAK,IAAI;AACf;AAAA,IACF;AAGA,QAAI,UAAU,sBAAsB,IAAI,GAAG;AACzC,YAAM,OAAO,UAAU,iBAAiB,IAAI;AAC5C,UAAI,MAAM;AACR,iBAAS,MAAM,QAAQ,CAAC;AAAA,MAC1B;AACA;AAAA,IACF;AAGA,QAAI,UAAU,uBAAuB,IAAI,GAAG;AAC1C,eAAS,IAAI,GAAG,IAAI,KAAK,iBAAiB,KAAK;AAC7C,cAAM,QAAQ,KAAK,WAAW,CAAC;AAC/B,YAAI,MAAO,UAAS,OAAO,KAAK;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAEA,WAAS,UAAU,CAAC;AACpB,SAAO;AACT;AAKA,SAAS,eAAe,MAAyB,OAAyB;AACxE,QAAM,YAAY,KAAK,cAAc;AACrC,QAAM,UAAU,KAAK,YAAY;AAEjC,SAAO,MAAM,MAAM,WAAW,UAAU,CAAC,EAAE,KAAK,IAAI;AACtD;AAKA,SAAS,YACP,UACA,MACA,SACA,YACA,SACA,UACU;AAEV,QAAM,UAAU;AAAA,IACd,WAAW,CAAC;AAAA,IACZ,SAAS,CAAC;AAAA,IACV,YAAY,CAAC;AAAA,EACf;AAEA,MAAI,YAAY,MAAM;AAEpB,QAAI,WAAW,SAAS,cAAc,WAAW,SAAS,UAAU;AAClE,cAAQ,UAAU,KAAK,WAAW,IAAI;AAAA,IACxC,WAAW,WAAW,SAAS,SAAS;AACtC,cAAQ,QAAQ,KAAK,WAAW,IAAI;AAAA,IACtC,WAAW,WAAW,SAAS,aAAa;AAC1C,cAAQ,WAAW,KAAK,WAAW,IAAI;AAAA,IACzC;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,WAAW,KAAK,cAAc,MAAM;AAAA,MACpC,SAAS,KAAK,YAAY,MAAM;AAAA,MAChC,MAAM,cAAc,OAAO,UAAW,WAAW,SAAS,UAAU,UAAU;AAAA,MAC9E;AAAA;AAAA,MAEA;AAAA;AAAA,MAEA,YAAY,YAAY;AAAA,MACxB,YAAY,YAAY;AAAA,MACxB,aAAa,YAAY;AAAA,MACzB,YAAY,YAAY;AAAA,MACxB,YAAY,YAAY;AAAA,MACxB,WAAW,YAAY;AAAA,MACvB;AAAA,IACF;AAAA,EACF;AACF;AAaA,SAAS,oBACP,eACA,YACa;AACb,QAAM,kBAA+B,CAAC;AACtC,MAAI,eAAe;AAGnB,QAAM,eAAe,CAAC,GAAG,aAAa,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAExE,aAAW,SAAS,cAAc;AAChC,QAAI,eAAe,MAAM,OAAO;AAE9B,sBAAgB,KAAK;AAAA,QACnB,OAAO;AAAA,QACP,KAAK,MAAM,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH;AACA,mBAAe,MAAM,MAAM;AAAA,EAC7B;AAGA,MAAI,eAAe,YAAY;AAC7B,oBAAgB,KAAK;AAAA,MACnB,OAAO;AAAA,MACP,KAAK,aAAa;AAAA,IACpB,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,SAAS,qBACP,OACA,OACA,UACA,UACA,SACU;AACV,QAAM,iBAAiB,MAAM,MAAM,MAAM,OAAO,MAAM,MAAM,CAAC;AAC7D,QAAM,UAAU,eAAe,KAAK,IAAI,EAAE,KAAK;AAE/C,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,WAAW,MAAM,QAAQ;AAAA,MACzB,SAAS,MAAM,MAAM;AAAA,MACrB,MAAM;AAAA,MACN;AAAA;AAAA,MAEA,SAAS,EAAE,WAAW,CAAC,GAAG,SAAS,CAAC,GAAG,YAAY,CAAC,EAAE;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACF;AAKA,SAAS,aAAa,OAAiB,cAA+B;AACpE,QAAM,YAAY,MAAM,SAAS,UAAU,MAAM,SAAS,YAAY;AACtE,SAAO,MAAM,QAAQ,SAAS,KAAK,aAAa;AAClD;AAMA,SAAS,qBACP,OACA,eACA,UACA,cACA,SACA,UACY;AACZ,QAAM,kBAAkB,oBAAoB,eAAe,MAAM,MAAM;AAEvE,SAAO,gBACJ,IAAI,WAAS,qBAAqB,OAAO,OAAO,UAAU,UAAU,OAAO,CAAC,EAC5E,OAAO,WAAS,aAAa,OAAO,YAAY,CAAC;AACtD;AAKO,SAAS,aAAa,UAA2B;AACtD,SAAO,eAAe,QAAQ;AAChC;AAxUA;AAAA;AAAA;AAEA;AACA;AACA;AAAA;AAAA;;;AC4BA,SAAS,kBAAkB,eAA2C;AACpE,MAAI;AAGF,QAAI,cAAc,cACf,QAAQ,2BAA2B,EAAE,EACrC,QAAQ,8BAA8B,EAAE,EACxC,KAAK;AAGR,UAAM,SAAS,KAAK,MAAM,WAAW;AAErC,WAAO,OAAO,OAAO,SAAS,WAAW,OAAO,OAAO;AAAA,EACzD,SAAS,OAAO;AAAA,EAGhB;AACA,SAAO;AACT;AASA,SAAS,eAAe,SAAyB;AAE/C,SAAO,QAAQ,QAAQ,8DAA8D,EAAE;AACzF;AAqBA,SAAS,kBAAkB,wBAA0C;AACnE,QAAM,eAAe,oBAAI,IAAY;AAIrC,QAAM,gBAAgB;AACtB,MAAI;AAEJ,UAAQ,QAAQ,cAAc,KAAK,sBAAsB,OAAO,MAAM;AACpE,iBAAa,IAAI,MAAM,CAAC,CAAC;AAAA,EAC3B;AAGA,QAAM,iBAAiB;AAEvB,UAAQ,QAAQ,eAAe,KAAK,sBAAsB,OAAO,MAAM;AACrE,iBAAa,IAAI,MAAM,CAAC,CAAC;AAAA,EAC3B;AAGA,QAAM,iBAAiB;AAEvB,UAAQ,QAAQ,eAAe,KAAK,sBAAsB,OAAO,MAAM;AACrE,iBAAa,IAAI,MAAM,CAAC,CAAC;AAAA,EAC3B;AAEA,SAAO,MAAM,KAAK,YAAY;AAChC;AAWA,SAAS,iBAAiB,SAAgC;AACxD,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAwB,CAAC;AAI/B,QAAM,gBAAgB;AAAA,IACpB,EAAE,MAAM,UAAmB,OAAO,0BAA0B,KAAK,4BAA4B;AAAA,IAC7F,EAAE,MAAM,SAAkB,OAAO,yBAAyB,KAAK,2BAA2B;AAAA,IAC1F,EAAE,MAAM,cAAuB,OAAO,8BAA8B,KAAK,gCAAgC;AAAA,EAC3G;AAEA,aAAW,WAAW,eAAe;AACnC,QAAI,cAAc;AAElB,WAAO,cAAc,MAAM,QAAQ;AAEjC,YAAM,WAAW,MAAM;AAAA,QAAU,CAAC,MAAM,QACtC,OAAO,eAAe,QAAQ,MAAM,KAAK,IAAI;AAAA,MAC/C;AAEA,UAAI,aAAa,GAAI;AAGrB,YAAM,SAAS,MAAM;AAAA,QAAU,CAAC,MAAM,QACpC,OAAO,YAAY,QAAQ,IAAI,KAAK,IAAI;AAAA,MAC1C;AAEA,UAAI,WAAW,IAAI;AAEjB;AAAA,MACF;AAGA,YAAM,eAAe,MAAM,MAAM,UAAU,SAAS,CAAC,EAAE,KAAK,IAAI;AAEhE,aAAO,KAAK;AAAA,QACV,MAAM,QAAQ;AAAA,QACd,WAAW;AAAA,QACX,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAED,oBAAc,SAAS;AAAA,IACzB;AAAA,EACF;AAEA,SAAO,OAAO,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS;AACxD;AAYO,SAAS,gBACd,UACA,SACA,YAAoB,IACpB,eAAuB,IACV;AACb,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAS,iBAAiB,OAAO;AACvC,QAAM,SAAsB,CAAC;AAG7B,QAAM,yBAAyB,eAAe,OAAO;AACrD,QAAM,uBAAuB,uBAAuB,MAAM,IAAI;AAG9D,QAAM,eAAe,oBAAI,IAAY;AAGrC,aAAW,SAAS,QAAQ;AAE1B,aAAS,IAAI,MAAM,WAAW,KAAK,MAAM,SAAS,KAAK;AACrD,mBAAa,IAAI,CAAC;AAAA,IACpB;AAGA,QAAI;AACJ,QAAI,MAAM,SAAS,UAAU;AAC3B,mBAAa,kBAAkB,MAAM,OAAO;AAAA,IAC9C;AAGA,UAAM,8BAA8B,qBACjC,MAAM,MAAM,WAAW,MAAM,UAAU,CAAC,EACxC,KAAK,IAAI;AACZ,UAAM,UAAU,kBAAkB,2BAA2B;AAE7D,UAAM,iBAAiB,MAAM,UAAU,MAAM,YAAY;AACzD,UAAM,eAAe,YAAY;AAGjC,QAAI,kBAAkB,cAAc;AAClC,aAAO,KAAK;AAAA,QACV,SAAS,MAAM;AAAA,QACf,UAAU;AAAA,UACR,MAAM;AAAA,UACN,WAAW,MAAM,YAAY;AAAA;AAAA,UAC7B,SAAS,MAAM,UAAU;AAAA,UACzB,UAAU;AAAA,UACV,MAAM;AAAA,UACN;AAAA,UACA,YAAY,MAAM;AAAA,UAClB,SAAS,QAAQ,SAAS,IAAI,UAAU;AAAA,QAC1C;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AAEL,YAAM,aAAa,MAAM,QAAQ,MAAM,IAAI;AAE3C,eAAS,SAAS,GAAG,SAAS,WAAW,QAAQ,UAAU,YAAY,cAAc;AACnF,cAAM,YAAY,KAAK,IAAI,SAAS,WAAW,WAAW,MAAM;AAChE,cAAM,eAAe,WAAW,MAAM,QAAQ,SAAS,EAAE,KAAK,IAAI;AAElE,YAAI,aAAa,KAAK,EAAE,SAAS,GAAG;AAClC,iBAAO,KAAK;AAAA,YACV,SAAS;AAAA,YACT,UAAU;AAAA,cACR,MAAM;AAAA,cACN,WAAW,MAAM,YAAY,SAAS;AAAA;AAAA,cACtC,SAAS,MAAM,YAAY;AAAA;AAAA,cAC3B,UAAU;AAAA,cACV,MAAM;AAAA,cACN;AAAA;AAAA,cACA,YAAY,MAAM;AAAA,cAClB,SAAS,QAAQ,SAAS,IAAI,UAAU;AAAA,YAC1C;AAAA,UACF,CAAC;AAAA,QACH;AAEA,YAAI,aAAa,WAAW,OAAQ;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,eAAyB,CAAC;AAC9B,MAAI,iBAAiB;AAErB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AAErC,QAAI,aAAa,IAAI,CAAC,GAAG;AAEvB,UAAI,aAAa,SAAS,GAAG;AAC3B,cAAM,eAAe,aAAa,KAAK,IAAI;AAG3C,YAAI,aAAa,KAAK,EAAE,SAAS,GAAG;AAElC,gBAAM,eAAe,qBAAqB,MAAM,gBAAgB,CAAC,EAAE,KAAK,IAAI;AAC5E,gBAAM,UAAU,kBAAkB,YAAY;AAE9C,iBAAO,KAAK;AAAA,YACV,SAAS;AAAA,YACT,UAAU;AAAA,cACR,MAAM;AAAA,cACN,WAAW,iBAAiB;AAAA,cAC5B,SAAS;AAAA,cACT,UAAU;AAAA,cACV,MAAM;AAAA,cACN,SAAS,QAAQ,SAAS,IAAI,UAAU;AAAA,YAC1C;AAAA,UACF,CAAC;AAAA,QACH;AACA,uBAAe,CAAC;AAAA,MAClB;AACA;AAAA,IACF;AAGA,QAAI,aAAa,WAAW,GAAG;AAC7B,uBAAiB;AAAA,IACnB;AAEA,iBAAa,KAAK,MAAM,CAAC,CAAC;AAG1B,QAAI,aAAa,UAAU,WAAW;AACpC,YAAM,eAAe,aAAa,KAAK,IAAI;AAG3C,UAAI,aAAa,KAAK,EAAE,SAAS,GAAG;AAElC,cAAM,eAAe,qBAAqB,MAAM,gBAAgB,IAAI,CAAC,EAAE,KAAK,IAAI;AAChF,cAAM,UAAU,kBAAkB,YAAY;AAE9C,eAAO,KAAK;AAAA,UACV,SAAS;AAAA,UACT,UAAU;AAAA,YACR,MAAM;AAAA,YACN,WAAW,iBAAiB;AAAA,YAC5B,SAAS,IAAI;AAAA,YACb,UAAU;AAAA,YACV,MAAM;AAAA,YACN,SAAS,QAAQ,SAAS,IAAI,UAAU;AAAA,UAC1C;AAAA,QACF,CAAC;AAAA,MACH;AAGA,qBAAe,aAAa,MAAM,CAAC,YAAY;AAC/C,uBAAiB,KAAK,IAAI,GAAG,IAAI,IAAI,YAAY;AAAA,IACnD;AAAA,EACF;AAGA,MAAI,aAAa,SAAS,GAAG;AAC3B,UAAM,eAAe,aAAa,KAAK,IAAI;AAG3C,QAAI,aAAa,KAAK,EAAE,WAAW,GAAG;AACpC,aAAO,OAAO,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,YAAY,EAAE,SAAS,SAAS;AAAA,IAC1E;AAGA,UAAM,eAAe,qBAAqB,MAAM,gBAAgB,MAAM,MAAM,EAAE,KAAK,IAAI;AACvF,UAAM,UAAU,kBAAkB,YAAY;AAE9C,WAAO,KAAK;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,QACR,MAAM;AAAA,QACN,WAAW,iBAAiB;AAAA,QAC5B,SAAS,MAAM;AAAA,QACf,UAAU;AAAA,QACV,MAAM;AAAA,QACN,SAAS,QAAQ,SAAS,IAAI,UAAU;AAAA,MAC1C;AAAA,IACF,CAAC;AAAA,EACH;AAGA,SAAO,OAAO,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,YAAY,EAAE,SAAS,SAAS;AAC1E;AA1WA;AAAA;AAAA;AAAA;AAAA;;;ACuBA,SAAS,yBAAyB,aAA+B;AAC/D,MAAI;AACF,UAAM,WAAW,KAAK,MAAM,WAAW;AACvC,UAAM,eAAe,oBAAI,IAAY;AAGrC,QAAI,SAAS,YAAY,OAAO,SAAS,aAAa,UAAU;AAC9D,iBAAW,WAAW,OAAO,OAAO,SAAS,QAAQ,GAAG;AACtD,YACE,OAAO,YAAY,YACnB,YAAY,QACZ,UAAU,WACV,OAAO,QAAQ,SAAS,UACxB;AACA,uBAAa,IAAI,QAAQ,IAAI;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,YAAY;AAAA,EAChC,SAAS,OAAO;AAEd,YAAQ,KAAK,yCAAyC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAC9G,WAAO,CAAC;AAAA,EACV;AACF;AAQA,SAAS,oBAAoB,UAAsC;AAEjE,QAAM,QAAQ,SAAS,MAAM,wBAAwB;AACrD,SAAO,QAAQ,MAAM,CAAC,IAAI;AAC5B;AAQO,SAAS,kBACd,UACA,SACa;AAEb,MAAI,QAAQ,KAAK,EAAE,WAAW,GAAG;AAC/B,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,eAAe,oBAAoB,QAAQ;AACjD,QAAM,oBAAoB,yBAAyB,OAAO;AAE1D,SAAO,CAAC;AAAA,IACN;AAAA,IACA,UAAU;AAAA,MACR,MAAM;AAAA,MACN,WAAW;AAAA,MACX,SAAS,MAAM;AAAA,MACf,UAAU;AAAA,MACV,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,SAAS,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,IAC9D;AAAA,EACF,CAAC;AACH;AA9FA;AAAA;AAAA;AAAA;AAAA;;;ACcO,SAAS,UACd,UACA,SACA,UAAwB,CAAC,GACZ;AACb,QAAM,EAAE,YAAY,IAAI,eAAe,IAAI,SAAS,MAAM,cAAc,aAAa,IAAI;AAGzF,MAAI,SAAS,SAAS,SAAS,GAAG;AAChC,WAAO,gBAAgB,UAAU,SAAS,WAAW,YAAY;AAAA,EACnE;AAMA,MAAI,SAAS,SAAS,OAAO,KAAK,sBAAsB,KAAK,QAAQ,GAAG;AACtE,WAAO,kBAAkB,UAAU,OAAO;AAAA,EAC5C;AAGA,MAAI,UAAU,aAAa,QAAQ,GAAG;AACpC,QAAI;AACF,aAAO,WAAW,UAAU,SAAS;AAAA,QACnC,cAAc,KAAK,MAAM,YAAY,EAAE;AAAA,MACzC,CAAC;AAAA,IACH,SAAS,OAAO;AAEd,UAAI,gBAAgB,SAAS;AAE3B,cAAM,IAAI,MAAM,2BAA2B,QAAQ,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,MAClH;AAEA,cAAQ,KAAK,2BAA2B,QAAQ,iCAAiC,KAAK;AAAA,IACxF;AAAA,EACF;AAGA,SAAO,aAAa,UAAU,SAAS,WAAW,YAAY;AAChE;AAKA,SAAS,aACP,UACA,SACA,WACA,cACa;AACb,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAsB,CAAC;AAC7B,QAAM,WAAW,eAAe,QAAQ;AAGxC,MAAI,MAAM,WAAW,KAAM,MAAM,WAAW,KAAK,MAAM,CAAC,EAAE,KAAK,MAAM,IAAK;AACxE,WAAO;AAAA,EACT;AAGA,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,YAAY,cAAc;AAC/D,UAAM,UAAU,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM;AACpD,UAAM,aAAa,MAAM,MAAM,GAAG,OAAO;AACzC,UAAM,eAAe,WAAW,KAAK,IAAI;AAGzC,QAAI,aAAa,KAAK,EAAE,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,UAAM,UAAU,eAAe,cAAc,QAAQ;AAErD,WAAO,KAAK;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,QACR,MAAM;AAAA,QACN,WAAW,IAAI;AAAA,QACf;AAAA,QACA,MAAM;AAAA;AAAA,QACN;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,WAAW,MAAM,QAAQ;AAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AA1GA,IAAAC,gBAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACLA,SAAS,UAAU,WAA2C;AAA9D,IASa;AATb;AAAA;AAAA;AAEA;AACA;AAGA,QAAI,oBAAoB;AACxB,QAAI,mBAAmB;AAEhB,IAAM,kBAAN,MAAkD;AAAA,MAC/C,YAA8C;AAAA,MACrC,YAAY;AAAA,MACrB,cAAoC;AAAA,MAE5C,MAAM,aAA4B;AAEhC,YAAI,KAAK,aAAa;AACpB,iBAAO,KAAK;AAAA,QACd;AAEA,YAAI,KAAK,WAAW;AAClB;AAAA,QACF;AAEA,aAAK,eAAe,YAAY;AAC9B,cAAI;AAEF,iBAAK,YAAY,MAAM,SAAS,sBAAsB,KAAK,SAAS;AAAA,UACtE,SAAS,OAAgB;AACvB,iBAAK,cAAc;AACnB,kBAAM,UAAU,OAAO,sCAAsC;AAAA,UAC/D;AAAA,QACF,GAAG;AAEH,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,MAAM,MAAM,MAAqC;AAC/C,cAAM,KAAK,WAAW;AAEtB,YAAI,CAAC,KAAK,WAAW;AACnB,gBAAM,IAAI,eAAe,iCAAiC;AAAA,QAC5D;AAEA,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK,UAAU,MAAM;AAAA,YACxC,SAAS;AAAA,YACT,WAAW;AAAA,UACb,CAAC;AAED,iBAAO,OAAO;AAAA,QAChB,SAAS,OAAgB;AACvB,gBAAM,UAAU,OAAO,gCAAgC,EAAE,YAAY,KAAK,OAAO,CAAC;AAAA,QACpF;AAAA,MACF;AAAA,MAEA,MAAM,WAAW,OAA0C;AACzD,cAAM,KAAK,WAAW;AAEtB,YAAI,CAAC,KAAK,WAAW;AACnB,gBAAM,IAAI,eAAe,iCAAiC;AAAA,QAC5D;AAEA,YAAI;AAGF,gBAAM,UAAU,MAAM,QAAQ;AAAA,YAC5B,MAAM,IAAI,UAAQ,KAAK,MAAM,IAAI,CAAC;AAAA,UACpC;AACA,iBAAO;AAAA,QACT,SAAS,OAAgB;AACvB,gBAAM,UAAU,OAAO,uCAAuC,EAAE,WAAW,MAAM,OAAO,CAAC;AAAA,QAC3F;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;AC1EA,IAQa;AARb;AAAA;AAAA;AAAA;AAQO,IAAM,sBAAsB;AAAA;AAAA;;;ACM5B,SAAS,mBAAmB,OAAkC;AACnE,MAAI,QAAQ,EAAK,QAAO;AACxB,MAAI,QAAQ,IAAK,QAAO;AACxB,MAAI,QAAQ,IAAK,QAAO;AACxB,SAAO;AACT;AAnBA;AAAA;AAAA;AAAA;AAAA;;;AC8CO,SAAS,oBAAoB,OAA4B;AAC9D,QAAM,QAAQ,MAAM,YAAY,EAAE,KAAK;AAIvC,MACE,MAAM,MAAM,sCAAsC,KAClD,MAAM,MAAM,eAAe,KAC3B,MAAM,MAAM,WAAW,GACvB;AACA,WAAO;AAAA,EACT;AAIA,MACE,MAAM,MAAM,wBAAwB,KACpC,MAAM,MAAM,sBAAsB,KAClC,MAAM,MAAM,YAAY,KACxB,MAAM,MAAM,eAAe,KAC3B,MAAM,MAAM,qCAAqC,GACjD;AACA,WAAO;AAAA,EACT;AAIA,MACE,MAAM,MAAM,iDAAiD,KAC7D,MAAM,MAAM,qBAAqB,KACjC,MAAM,MAAM,qBAAqB,GACjC;AACA,WAAO;AAAA,EACT;AAIA,SAAO;AACT;AApFA;AAAA;AAAA;AAAA;AAAA;;;ACAA,OAAOC,YAAU;AA4CjB,SAAS,oBAAoB,UAA2B;AACtD,QAAM,QAAQ,SAAS,YAAY;AACnC,QAAM,WAAWA,OAAK,SAAS,QAAQ,EAAE,YAAY;AAErD,MAAI,SAAS,WAAW,QAAQ,EAAG,QAAO;AAC1C,MAAI,SAAS,WAAW,WAAW,EAAG,QAAO;AAC7C,MAAI,SAAS,SAAS,KAAK,KAAK,SAAS,SAAS,MAAM,KAAK,SAAS,SAAS,WAAW,GAAG;AAC3F,WAAO;AAAA,EACT;AACA,MACE,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,iBAAiB,KAChC,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,WAAW,GAC1B;AACA,WAAO;AAAA,EACT;AACA,MACE,MAAM,SAAS,cAAc,KAC7B,MAAM,SAAS,UAAU,KACzB,MAAM,SAAS,QAAQ,GACvB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKA,SAAS,WAAW,UAA2B;AAC7C,QAAM,QAAQ,SAAS,YAAY;AAEnC,MACE,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,SAAS,KACxB,MAAM,SAAS,aAAa,GAC5B;AACA,WAAO;AAAA,EACT;AAEA,MACE,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,QAAQ,GACvB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKA,SAAS,cAAc,UAA2B;AAChD,QAAM,QAAQ,SAAS,YAAY;AAEnC,MACE,MAAM,SAAS,SAAS,KACxB,MAAM,SAAS,aAAa,KAC5B,MAAM,SAAS,WAAW,KAC1B,MAAM,SAAS,OAAO,GACtB;AACA,WAAO;AAAA,EACT;AAEA,MACE,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,UAAU,KACzB,MAAM,SAAS,QAAQ,KACvB,MAAM,SAAS,UAAU,GACzB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKA,SAAS,mBACP,OACA,UACA,WACQ;AACR,QAAM,cAAc,MAAM,YAAY,EAAE,MAAM,KAAK;AACnD,QAAM,eAAe,SAAS,YAAY,EAAE,MAAM,GAAG;AAErD,MAAI,cAAc;AAElB,aAAW,SAAS,aAAa;AAC/B,QAAI,MAAM,UAAU,EAAG;AACvB,QAAI,aAAa,KAAK,SAAO,IAAI,SAAS,KAAK,CAAC,GAAG;AACjD,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,YAAY;AACrB;AAKA,SAAS,uBACP,OACA,UACA,WACQ;AACR,QAAM,WAAWA,OAAK,SAAS,UAAUA,OAAK,QAAQ,QAAQ,CAAC,EAAE,YAAY;AAC7E,QAAM,cAAc,MAAM,YAAY,EAAE,MAAM,KAAK;AAEnD,MAAI,cAAc;AAElB,aAAW,SAAS,aAAa;AAC/B,QAAI,MAAM,UAAU,EAAG;AAEvB,QAAI,aAAa,OAAO;AACtB,qBAAe;AAAA,IACjB,WAAW,SAAS,SAAS,KAAK,GAAG;AACnC,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,YAAY;AACrB;AAKA,SAAS,uBACP,OACA,UACA,WACQ;AACR,MAAI,QAAQ;AAEZ,QAAM,WAAWA,OAAK,SAAS,UAAUA,OAAK,QAAQ,QAAQ,CAAC,EAAE,YAAY;AAC7E,QAAM,cAAc,MAAM,YAAY,EAAE,MAAM,KAAK;AAEnD,aAAW,SAAS,aAAa;AAC/B,QAAI,MAAM,UAAU,EAAG;AAEvB,QAAI,aAAa,OAAO;AACtB,eAAS;AAAA,IACX,WAAW,SAAS,SAAS,KAAK,GAAG;AACnC,eAAS;AAAA,IACX;AAAA,EACF;AAEA,UAAQ,mBAAmB,OAAO,UAAU,KAAK;AAEjD,MAAI,WAAW,QAAQ,GAAG;AACxB,aAAS;AAAA,EACX;AAEA,SAAO;AACT;AAKA,SAAS,yBACP,OACA,UACA,WACQ;AACR,MAAI,QAAQ;AAEZ,MAAI,oBAAoB,QAAQ,GAAG;AACjC,aAAS;AAET,UAAM,QAAQ,SAAS,YAAY;AACnC,QACE,MAAM,SAAS,cAAc,KAC7B,MAAM,SAAS,UAAU,KACzB,MAAM,SAAS,MAAM,GACrB;AACA,eAAS;AAAA,IACX;AAAA,EACF;AAEA,MAAI,cAAc,QAAQ,GAAG;AAC3B,aAAS;AAAA,EACX;AAEA,QAAM,WAAWA,OAAK,SAAS,UAAUA,OAAK,QAAQ,QAAQ,CAAC,EAAE,YAAY;AAC7E,QAAM,cAAc,MAAM,YAAY,EAAE,MAAM,KAAK;AAEnD,aAAW,SAAS,aAAa;AAC/B,QAAI,MAAM,UAAU,EAAG;AACvB,QAAI,SAAS,SAAS,KAAK,GAAG;AAC5B,eAAS;AAAA,IACX;AAAA,EACF;AAEA,QAAM,eAAe,SAAS,YAAY,EAAE,MAAMA,OAAK,GAAG;AAC1D,aAAW,SAAS,aAAa;AAC/B,QAAI,MAAM,UAAU,EAAG;AAEvB,eAAW,WAAW,cAAc;AAClC,UAAI,QAAQ,SAAS,KAAK,GAAG;AAC3B,iBAAS;AACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,6BACP,OACA,UACA,WACQ;AACR,MAAI,QAAQ;AAEZ,UAAQ,uBAAuB,OAAO,UAAU,KAAK;AACrD,UAAQ,mBAAmB,OAAO,UAAU,KAAK;AAEjD,MAAI,WAAW,QAAQ,GAAG;AACxB,aAAS;AAAA,EACX;AAEA,SAAO;AACT;AAKA,SAAS,uBACP,OACA,UACA,WACQ;AACR,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,oBAAoB,KAAK;AAExC,UAAQ,QAAQ;AAAA,IACd;AACE,aAAO,uBAAuB,OAAO,UAAU,SAAS;AAAA,IAE1D;AACE,aAAO,yBAAyB,OAAO,UAAU,SAAS;AAAA,IAE5D;AACE,aAAO,6BAA6B,OAAO,UAAU,SAAS;AAAA,IAEhE;AACE,aAAO,6BAA6B,OAAO,UAAU,SAAS;AAAA,EAClE;AACF;AAKA,SAAS,uBACP,GACA,OACc;AACd,QAAM,YAAY,EAAE,aAAa;AACjC,QAAM,eAAe,uBAAuB,OAAO,EAAE,MAAM,SAAS;AAEpE,SAAO;AAAA,IACL,SAAS,EAAE;AAAA,IACX,UAAU;AAAA,MACR,MAAM,EAAE;AAAA,MACR,WAAW,EAAE;AAAA,MACb,SAAS,EAAE;AAAA,MACX,MAAM,EAAE;AAAA,MACR,UAAU,EAAE;AAAA;AAAA,MAEZ,YAAY,EAAE,cAAc;AAAA,MAC5B,YAAY,EAAE;AAAA,MACd,aAAa,EAAE,eAAe;AAAA,MAC9B,YAAY,EAAE,cAAc;AAAA,MAC5B,YAAa,EAAE,cAAc,EAAE,WAAW,SAAS,KAAK,EAAE,WAAW,CAAC,MAAM,KAAM,EAAE,aAAa;AAAA,MACjG,WAAW,EAAE,aAAa;AAAA,MAC1B,SAAU,EAAE,WAAW,EAAE,QAAQ,SAAS,KAAK,EAAE,QAAQ,CAAC,MAAM,KAAM,EAAE,UAAU;AAAA,IACpF;AAAA,IACA,OAAO;AAAA,IACP,WAAW,mBAAmB,YAAY;AAAA,EAC5C;AACF;AAKA,eAAsB,OACpB,OACA,aACA,QAAgB,GAChB,OACyB;AACzB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,cAAc,iCAAiC;AAAA,EAC3D;AAEA,MAAI;AACF,UAAM,UAAU,MAAM,MACnB,OAAO,MAAM,KAAK,WAAW,CAAC,EAC9B,MAAM,QAAQ,EAAE,EAChB,QAAQ;AAEX,UAAM,WAAY,QACf;AAAA,MAAO,CAAC,MACP,EAAE,WACF,EAAE,QAAQ,KAAK,EAAE,SAAS,KAC1B,EAAE,QACF,EAAE,KAAK,SAAS;AAAA,IAClB,EACC,IAAI,CAAC,MAAgB,uBAAuB,GAAG,KAAK,CAAC,EACrD,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,KAAK;AAEjB,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,WAAW,OAAO,KAAK;AAG7B,QAAI,SAAS,SAAS,YAAY,KAAK,SAAS,SAAS,QAAQ,GAAG;AAClE,YAAM,IAAI;AAAA,QACR;AAAA,QACA,EAAE,eAAe,MAAM;AAAA,MACzB;AAAA,IACF;AAEA,UAAM,UAAU,OAAO,kCAAkC;AAAA,EAC3D;AACF;AAKA,eAAsB,eACpB,OACA,SAKyB;AACzB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,cAAc,iCAAiC;AAAA,EAC3D;AAEA,QAAM,EAAE,UAAU,SAAS,QAAQ,IAAI,IAAI;AAE3C,MAAI;AACF,UAAM,aAAa,MAAM,mBAAmB,EAAE,KAAK,CAAC;AACpD,UAAM,QAAQ,MAAM,OAAO,UAAU,EAClC,MAAM,YAAY,EAClB,MAAM,KAAK,IAAI,QAAQ,GAAG,GAAG,CAAC;AAEjC,UAAM,UAAU,MAAM,MAAM,QAAQ;AAEpC,QAAI,WAAY,QAAkC;AAAA,MAAO,CAAC,MACxD,EAAE,WACF,EAAE,QAAQ,KAAK,EAAE,SAAS,KAC1B,EAAE,QACF,EAAE,KAAK,SAAS;AAAA,IAClB;AAEA,QAAI,UAAU;AACZ,iBAAW,SAAS;AAAA,QAAO,CAAC,MAC1B,EAAE,YAAY,EAAE,SAAS,YAAY,MAAM,SAAS,YAAY;AAAA,MAClE;AAAA,IACF;AAEA,QAAI,SAAS;AACX,YAAM,QAAQ,IAAI,OAAO,SAAS,GAAG;AACrC,iBAAW,SAAS;AAAA,QAAO,CAAC,MAC1B,MAAM,KAAK,EAAE,OAAO,KAAK,MAAM,KAAK,EAAE,IAAI;AAAA,MAC5C;AAAA,IACF;AAEA,WAAO,SAAS,MAAM,GAAG,KAAK,EAAE,IAAI,CAAC,MAAgB;AACnD,YAAM,QAAQ;AACd,aAAO;AAAA,QACL,SAAS,EAAE;AAAA,QACX,UAAU;AAAA,UACR,MAAM,EAAE;AAAA,UACR,WAAW,EAAE;AAAA,UACb,SAAS,EAAE;AAAA,UACX,MAAM,EAAE;AAAA,UACR,UAAU,EAAE;AAAA;AAAA,UAEZ,YAAY,EAAE,cAAc;AAAA,UAC5B,YAAY,EAAE;AAAA,UACd,aAAa,EAAE,eAAe;AAAA,UAC9B,YAAY,EAAE,cAAc;AAAA,UAC5B,YAAa,EAAE,cAAc,EAAE,WAAW,SAAS,KAAK,EAAE,WAAW,CAAC,MAAM,KAAM,EAAE,aAAa;AAAA,UACjG,WAAW,EAAE,aAAa;AAAA,UAC1B,SAAU,EAAE,WAAW,EAAE,QAAQ,SAAS,KAAK,EAAE,QAAQ,CAAC,MAAM,KAAM,EAAE,UAAU;AAAA,QACpF;AAAA,QACA;AAAA,QACA,WAAW,mBAAmB,KAAK;AAAA,MACrC;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,UAAM,UAAU,OAAO,4BAA4B;AAAA,EACrD;AACF;AAKA,SAAS,kBACP,QACA,YACA,SACS;AAET,MAAI,OAAO,YAAY;AACrB,QAAI,eAAe,YAAY;AAC7B,aAAO,OAAO,eAAe,cAAc,OAAO,eAAe;AAAA,IACnE,WAAW,eAAe,SAAS;AACjC,aAAO,OAAO,eAAe;AAAA,IAC/B,WAAW,eAAe,aAAa;AACrC,aAAO,OAAO,eAAe;AAAA,IAC/B;AACA,WAAO;AAAA,EACT;AAGA,SAAO,QAAQ,SAAS,KAAK,QAAQ,KAAK,CAAC,MAAc,EAAE,SAAS,KAAK,MAAM,EAAE;AACnF;AAKA,eAAsB,aACpB,OACA,SAMyB;AACzB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,cAAc,iCAAiC;AAAA,EAC3D;AAEA,QAAM,EAAE,UAAU,SAAS,YAAY,QAAQ,GAAG,IAAI;AAEtD,MAAI;AACF,UAAM,aAAa,MAAM,mBAAmB,EAAE,KAAK,CAAC;AACpD,UAAM,QAAQ,MAAM,OAAO,UAAU,EAClC,MAAM,YAAY,EAClB,MAAM,KAAK,IAAI,QAAQ,IAAI,GAAG,CAAC;AAElC,UAAM,UAAU,MAAM,MAAM,QAAQ;AAEpC,QAAI,WAAY,QAAkC,OAAO,CAAC,MAAgB;AACxE,UAAI,CAAC,EAAE,WAAW,EAAE,QAAQ,KAAK,EAAE,WAAW,GAAG;AAC/C,eAAO;AAAA,MACT;AACA,UAAI,CAAC,EAAE,QAAQ,EAAE,KAAK,WAAW,GAAG;AAClC,eAAO;AAAA,MACT;AAEA,UAAI,aAAa,CAAC,EAAE,YAAY,EAAE,SAAS,YAAY,MAAM,SAAS,YAAY,IAAI;AACpF,eAAO;AAAA,MACT;AAEA,YAAM,UAAU,eAAe,aAAc,EAAE,iBAAiB,CAAC,IAClD,eAAe,UAAW,EAAE,cAAc,CAAC,IAC3C,eAAe,cAAe,EAAE,kBAAkB,CAAC,IACnD,CAAC,GAAI,EAAE,iBAAiB,CAAC,GAAI,GAAI,EAAE,cAAc,CAAC,GAAI,GAAI,EAAE,kBAAkB,CAAC,CAAE;AAEhG,YAAM,gBAAgB,EAAE,cAAc;AAEtC,UAAI,QAAQ,WAAW,KAAK,CAAC,eAAe;AAC1C,eAAO;AAAA,MACT;AAEA,UAAI,SAAS;AACX,cAAM,QAAQ,IAAI,OAAO,SAAS,GAAG;AACrC,cAAM,oBAAoB,QAAQ,KAAK,CAAC,MAAc,MAAM,KAAK,CAAC,CAAC;AACnE,cAAM,mBAAmB,MAAM,KAAK,aAAa;AACjD,cAAM,cAAc,qBAAqB;AAEzC,YAAI,CAAC,YAAa,QAAO;AAEzB,YAAI,YAAY;AACd,iBAAO,kBAAkB,GAAG,YAAY,OAAO;AAAA,QACjD;AAEA,eAAO;AAAA,MACT;AAEA,UAAI,YAAY;AACd,eAAO,kBAAkB,GAAG,YAAY,OAAO;AAAA,MACjD;AAEA,aAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,MAAM,GAAG,KAAK,EAAE,IAAI,CAAC,MAAgB;AACnD,YAAM,QAAQ;AACd,aAAO;AAAA,QACL,SAAS,EAAE;AAAA,QACX,UAAU;AAAA,UACR,MAAM,EAAE;AAAA,UACR,WAAW,EAAE;AAAA,UACb,SAAS,EAAE;AAAA,UACX,MAAM,EAAE;AAAA,UACR,UAAU,EAAE;AAAA,UACZ,SAAS;AAAA,YACP,WAAY,EAAE,iBAAiB,EAAE,cAAc,SAAS,KAAK,EAAE,cAAc,CAAC,MAAM,KAAM,EAAE,gBAAgB,CAAC;AAAA,YAC7G,SAAU,EAAE,cAAc,EAAE,WAAW,SAAS,KAAK,EAAE,WAAW,CAAC,MAAM,KAAM,EAAE,aAAa,CAAC;AAAA,YAC/F,YAAa,EAAE,kBAAkB,EAAE,eAAe,SAAS,KAAK,EAAE,eAAe,CAAC,MAAM,KAAM,EAAE,iBAAiB,CAAC;AAAA,UACpH;AAAA;AAAA,UAEA,YAAY,EAAE,cAAc;AAAA,UAC5B,YAAY,EAAE;AAAA,UACd,aAAa,EAAE,eAAe;AAAA,UAC9B,YAAY,EAAE,cAAc;AAAA,UAC5B,YAAa,EAAE,cAAc,EAAE,WAAW,SAAS,KAAK,EAAE,WAAW,CAAC,MAAM,KAAM,EAAE,aAAa;AAAA,UACjG,WAAW,EAAE,aAAa;AAAA,UAC1B,SAAU,EAAE,WAAW,EAAE,QAAQ,SAAS,KAAK,EAAE,QAAQ,CAAC,MAAM,KAAM,EAAE,UAAU;AAAA,QACpF;AAAA,QACA;AAAA,QACA,WAAW,mBAAmB,KAAK;AAAA,MACrC;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,UAAM,UAAU,OAAO,yBAAyB;AAAA,EAClD;AACF;AAxkBA;AAAA;AAAA;AAEA;AACA;AACA;AACA;AAAA;AAAA;;;ACaA,eAAsB,YACpB,IACA,OACA,WACA,SACA,WACA,UAC8B;AAC9B,MAAI,CAAC,IAAI;AACP,UAAM,IAAI,cAAc,iCAAiC;AAAA,EAC3D;AAEA,MAAI,QAAQ,WAAW,UAAU,UAAU,QAAQ,WAAW,SAAS,QAAQ;AAC7E,UAAM,IAAI,cAAc,qEAAqE;AAAA,MAC3F,eAAe,QAAQ;AAAA,MACvB,iBAAiB,UAAU;AAAA,MAC3B,gBAAgB,SAAS;AAAA,IAC3B,CAAC;AAAA,EACH;AAGA,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO;AAAA,EACT;AAGA,MAAI,QAAQ,SAAS,0BAA0B;AAC7C,QAAI,eAAe;AACnB,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,0BAA0B;AACjE,YAAM,eAAe,QAAQ,MAAM,GAAG,KAAK,IAAI,IAAI,0BAA0B,QAAQ,MAAM,CAAC;AAC5F,YAAM,gBAAgB,UAAU,MAAM,GAAG,KAAK,IAAI,IAAI,0BAA0B,QAAQ,MAAM,CAAC;AAC/F,YAAM,gBAAgB,SAAS,MAAM,GAAG,KAAK,IAAI,IAAI,0BAA0B,QAAQ,MAAM,CAAC;AAE9F,qBAAe,MAAM,oBAAoB,IAAI,cAAc,WAAW,cAAc,eAAe,aAAa;AAAA,IAClH;AACA,QAAI,CAAC,cAAc;AACjB,YAAM,IAAI,cAAc,4CAA4C;AAAA,IACtE;AACA,WAAO;AAAA,EACT,OAAO;AACL,WAAO,oBAAoB,IAAI,OAAO,WAAW,SAAS,WAAW,QAAQ;AAAA,EAC/E;AACF;AAMA,eAAe,oBACb,IACA,OACA,WACA,SACA,WACA,UACuB;AAOvB,QAAM,QAA0B,CAAC,EAAE,SAAS,WAAW,SAAS,CAAC;AACjE,QAAM,gBAAkC,CAAC;AACzC,MAAI,eAAe;AAGnB,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,QAAQ,MAAM,MAAM;AAC1B,QAAI,CAAC,MAAO;AAEZ,QAAI;AACF,YAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,QAAQ,OAAO;AAAA,QAChD,QAAQ,MAAM,KAAK,MAAM;AAAA,QACzB,SAAS,MAAM,SAAS,CAAC;AAAA,QACzB,MAAM,MAAM,UAAU,CAAC,EAAE;AAAA,QACzB,WAAW,MAAM,UAAU,CAAC,EAAE;AAAA,QAC9B,SAAS,MAAM,UAAU,CAAC,EAAE;AAAA,QAC5B,MAAM,MAAM,UAAU,CAAC,EAAE;AAAA,QACzB,UAAU,MAAM,UAAU,CAAC,EAAE;AAAA;AAAA,QAE7B,eAAgB,MAAM,UAAU,CAAC,EAAE,SAAS,aAAa,MAAM,UAAU,CAAC,EAAE,QAAQ,UAAU,SAAS,IAAK,MAAM,UAAU,CAAC,EAAE,QAAQ,YAAY,CAAC,EAAE;AAAA,QACtJ,YAAa,MAAM,UAAU,CAAC,EAAE,SAAS,WAAW,MAAM,UAAU,CAAC,EAAE,QAAQ,QAAQ,SAAS,IAAK,MAAM,UAAU,CAAC,EAAE,QAAQ,UAAU,CAAC,EAAE;AAAA,QAC7I,gBAAiB,MAAM,UAAU,CAAC,EAAE,SAAS,cAAc,MAAM,UAAU,CAAC,EAAE,QAAQ,WAAW,SAAS,IAAK,MAAM,UAAU,CAAC,EAAE,QAAQ,aAAa,CAAC,EAAE;AAAA;AAAA,QAE1J,YAAY,MAAM,UAAU,CAAC,EAAE,cAAc;AAAA,QAC7C,YAAY,MAAM,UAAU,CAAC,EAAE,cAAc;AAAA,QAC7C,aAAa,MAAM,UAAU,CAAC,EAAE,eAAe;AAAA,QAC/C,YAAY,MAAM,UAAU,CAAC,EAAE,cAAc;AAAA,QAC7C,YAAa,MAAM,UAAU,CAAC,EAAE,cAAc,MAAM,UAAU,CAAC,EAAE,WAAW,SAAS,IAAK,MAAM,UAAU,CAAC,EAAE,aAAa,CAAC,EAAE;AAAA,QAC7H,WAAW,MAAM,UAAU,CAAC,EAAE,aAAa;AAAA,QAC3C,SAAU,MAAM,UAAU,CAAC,EAAE,WAAW,MAAM,UAAU,CAAC,EAAE,QAAQ,SAAS,IAAK,MAAM,UAAU,CAAC,EAAE,UAAU,CAAC,EAAE;AAAA,MACnH,EAAE;AAGF,UAAI,CAAC,cAAc;AACjB,uBAAe,MAAM,GAAG,YAAY,WAAW,OAAO;AAAA,MACxD,OAAO;AACL,cAAM,aAAa,IAAI,OAAO;AAAA,MAChC;AAAA,IACF,SAAS,OAAO;AAEd,UAAI,MAAM,QAAQ,SAAS,0BAA0B;AACnD,cAAM,OAAO,KAAK,MAAM,MAAM,QAAQ,SAAS,CAAC;AAGhD,cAAM,KAAK;AAAA,UACT,SAAS,MAAM,QAAQ,MAAM,GAAG,IAAI;AAAA,UACpC,WAAW,MAAM,UAAU,MAAM,GAAG,IAAI;AAAA,UACxC,UAAU,MAAM,SAAS,MAAM,GAAG,IAAI;AAAA,QACxC,CAAC;AACD,cAAM,KAAK;AAAA,UACT,SAAS,MAAM,QAAQ,MAAM,IAAI;AAAA,UACjC,WAAW,MAAM,UAAU,MAAM,IAAI;AAAA,UACrC,UAAU,MAAM,SAAS,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH,OAAO;AAEL,sBAAc,KAAK,KAAK;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAGA,MAAI,cAAc,SAAS,GAAG;AAC5B,UAAM,cAAc,cAAc,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,QAAQ,CAAC;AACtF,UAAM,IAAI;AAAA,MACR,oBAAoB,WAAW;AAAA,MAC/B;AAAA,QACE,eAAe,cAAc;AAAA,QAC7B,cAAc;AAAA,QACd,YAAY,cAAc,CAAC,EAAE,UAAU,CAAC,EAAE;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,cAAc,4CAA4C;AAAA,EACtE;AACA,SAAO;AACT;AA9JA;AAAA;AAAA;AACA;AACA;AAAA;AAAA;;;ACYA,eAAsB,MACpB,IACA,OACA,WACe;AACf,MAAI,CAAC,IAAI;AACP,UAAM,IAAI,cAAc,iCAAiC;AAAA,EAC3D;AAEA,MAAI;AAEF,QAAI,OAAO;AACT,YAAM,GAAG,UAAU,SAAS;AAAA,IAC9B;AAAA,EACF,SAAS,OAAO;AACd,UAAM,UAAU,OAAO,iCAAiC;AAAA,EAC1D;AACF;AAKA,eAAsB,aACpB,OACA,UACe;AACf,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,cAAc,iCAAiC;AAAA,EAC3D;AAEA,MAAI;AACF,UAAM,MAAM,OAAO,WAAW,QAAQ,GAAG;AAAA,EAC3C,SAAS,OAAO;AACd,UAAM,UAAU,OAAO,4CAA4C;AAAA,EACrE;AACF;AAKA,eAAsB,WACpB,IACA,OACA,WACA,QACA,UACA,SACA,WACA,UACuB;AACvB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,cAAc,iCAAiC;AAAA,EAC3D;AAEA,MAAI;AAEF,UAAM,aAAa,OAAO,QAAQ;AAGlC,QAAI,eAAe;AACnB,QAAI,QAAQ,SAAS,GAAG;AACtB,qBAAe,MAAM,YAAY,IAAI,OAAO,WAAW,SAAS,WAAW,QAAQ;AACnF,UAAI,CAAC,cAAc;AACjB,cAAM,IAAI,cAAc,wCAAwC;AAAA,MAClE;AAAA,IACF;AAGA,UAAM,iBAAiB,MAAM;AAE7B,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,UAAU,OAAO,0CAA0C;AAAA,EACnE;AACF;AAxFA;AAAA;AAAA;AACA;AACA,IAAAC;AACA;AAAA;AAAA;;;ACHA;AAAA;AAAA;AAAA;AAAA,YAAY,aAAa;AACzB,OAAOC,YAAU;AACjB,OAAOC,SAAQ;AACf,OAAOC,aAAY;AAHnB,IAgBa;AAhBb;AAAA;AAAA;AAMA;AACA,IAAAC;AACA;AACA;AACA;AACA;AAKO,IAAM,WAAN,MAAM,UAAsC;AAAA,MACzC,KAA+B;AAAA,MAC/B,QAA6B;AAAA,MACrB;AAAA,MACC,YAAY;AAAA,MACrB,mBAA2B;AAAA,MAC3B,iBAAyB;AAAA,MAEjC,YAAY,aAAqB;AAE/B,cAAM,cAAcH,OAAK,SAAS,WAAW;AAG7C,cAAM,WAAWE,QACd,WAAW,KAAK,EAChB,OAAO,WAAW,EAClB,OAAO,KAAK,EACZ,UAAU,GAAG,CAAC;AAEjB,aAAK,SAASF,OAAK;AAAA,UACjBC,IAAG,QAAQ;AAAA,UACX;AAAA,UACA;AAAA,UACA,GAAG,WAAW,IAAI,QAAQ;AAAA,QAC5B;AAAA,MACF;AAAA,MAEA,MAAM,aAA4B;AAChC,YAAI;AACF,eAAK,KAAK,MAAc,gBAAQ,KAAK,MAAM;AAE3C,cAAI;AACF,iBAAK,QAAQ,MAAM,KAAK,GAAG,UAAU,KAAK,SAAS;AAAA,UACrD,QAAQ;AAEN,iBAAK,QAAQ;AAAA,UACf;AAGA,cAAI;AACF,iBAAK,iBAAiB,MAAM,gBAAgB,KAAK,MAAM;AAAA,UACzD,QAAQ;AAEN,iBAAK,iBAAiB;AAAA,UACxB;AAAA,QACF,SAAS,OAAgB;AACvB,gBAAM,UAAU,OAAO,wCAAwC,EAAE,QAAQ,KAAK,OAAO,CAAC;AAAA,QACxF;AAAA,MACF;AAAA,MAEA,MAAM,YACJ,SACA,WACA,UACe;AACf,YAAI,CAAC,KAAK,IAAI;AACZ,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAGA,aAAK,QAAQ,MAAe;AAAA,UAC1B,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,OACJ,aACA,QAAgB,GAChB,OACyB;AACzB,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AAEA,YAAI;AACF,iBAAO,MAAe,OAAO,KAAK,OAAO,aAAa,OAAO,KAAK;AAAA,QACpE,SAAS,OAAO;AACd,gBAAM,WAAW,OAAO,KAAK;AAG7B,cAAI,SAAS,SAAS,YAAY,KAAK,SAAS,SAAS,QAAQ,GAAG;AAElE,gBAAI;AACF,oBAAM,KAAK,WAAW;AACtB,kBAAI,CAAC,KAAK,OAAO;AACf,sBAAM,IAAI,cAAc,oDAAoD;AAAA,cAC9E;AACA,qBAAO,MAAe,OAAO,KAAK,OAAO,aAAa,OAAO,KAAK;AAAA,YACpE,SAAS,YAAqB;AAC5B,oBAAM,IAAI;AAAA,gBACR;AAAA,gBACA,EAAE,eAAe,WAAW;AAAA,cAC9B;AAAA,YACF;AAAA,UACF;AAEA,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,MAEA,MAAM,eAAe,SAIO;AAC1B,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AACA,eAAgB,eAAe,KAAK,OAAO,OAAO;AAAA,MACpD;AAAA,MAEA,MAAM,aAAa,SAKS;AAC1B,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AACA,eAAgB,aAAa,KAAK,OAAO,OAAO;AAAA,MAClD;AAAA,MAEA,MAAM,QAAuB;AAC3B,YAAI,CAAC,KAAK,IAAI;AACZ,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AACA,cAAqB,MAAM,KAAK,IAAI,KAAK,OAAO,KAAK,SAAS;AAC9D,aAAK,QAAQ;AAAA,MACf;AAAA,MAEA,MAAM,aAAa,UAAiC;AAClD,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,iCAAiC;AAAA,QAC3D;AACA,cAAqB,aAAa,KAAK,OAAO,QAAQ;AAAA,MACxD;AAAA,MAEA,MAAM,WACJ,UACA,SACA,WACA,UACe;AACf,YAAI,CAAC,KAAK,IAAI;AACZ,gBAAM,IAAI,cAAc,4CAA4C;AAAA,QACtE;AACA,YAAI,CAAC,KAAK,OAAO;AACf,gBAAM,IAAI,cAAc,uCAAuC;AAAA,QACjE;AACA,aAAK,QAAQ,MAAqB;AAAA,UAChC,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,eAAiC;AACrC,cAAM,MAAM,KAAK,IAAI;AAGrB,YAAI,MAAM,KAAK,mBAAmB,KAAM;AACtC,iBAAO;AAAA,QACT;AAEA,aAAK,mBAAmB;AAExB,YAAI;AACF,gBAAM,UAAU,MAAM,gBAAgB,KAAK,MAAM;AAEjD,cAAI,UAAU,KAAK,gBAAgB;AACjC,iBAAK,iBAAiB;AACtB,mBAAO;AAAA,UACT;AAEA,iBAAO;AAAA,QACT,SAAS,OAAO;AAEd,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,MAEA,MAAM,YAA2B;AAC/B,YAAI;AAEF,eAAK,QAAQ;AACb,eAAK,KAAK;AAGV,gBAAM,KAAK,WAAW;AAAA,QACxB,SAAS,OAAO;AACd,gBAAM,UAAU,OAAO,wCAAwC;AAAA,QACjE;AAAA,MACF;AAAA,MAEA,oBAA4B;AAC1B,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,iBAAyB;AACvB,YAAI,KAAK,mBAAmB,GAAG;AAC7B,iBAAO;AAAA,QACT;AACA,eAAO,IAAI,KAAK,KAAK,cAAc,EAAE,eAAe;AAAA,MACtD;AAAA,MAEA,MAAM,UAA4B;AAChC,YAAI,CAAC,KAAK,OAAO;AACf,iBAAO;AAAA,QACT;AAEA,YAAI;AACF,gBAAM,QAAQ,MAAM,KAAK,MAAM,UAAU;AAEzC,cAAI,UAAU,GAAG;AACf,mBAAO;AAAA,UACT;AAGA,gBAAM,SAAS,MAAM,KAAK,MACvB,OAAO,MAAM,mBAAmB,EAAE,KAAK,CAAC,CAAC,EACzC,MAAM,KAAK,IAAI,OAAO,CAAC,CAAC,EACxB,QAAQ;AAEX,gBAAM,cAAe,OAA4B;AAAA,YAAK,CAAC,MACrD,EAAE,WACF,EAAE,QAAQ,KAAK,EAAE,SAAS;AAAA,UAC5B;AAEA,iBAAO;AAAA,QACT,QAAQ;AAEN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,MAEA,aAAa,KAAK,aAAwC;AACxD,cAAM,KAAK,IAAI,UAAS,WAAW;AACnC,cAAM,GAAG,WAAW;AACpB,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;;;AC3QA;AAAA;AAAA;AAAA;AAAA,OAAOG,UAAQ;AACf,OAAOC,YAAU;AADjB,IAMM,eA8BO;AApCb;AAAA;AAAA;AAEA;AAEA;AAEA,IAAM,gBAAgB;AA8Bf,IAAM,kBAAN,MAAsB;AAAA,MACnB;AAAA,MACA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,aAAa,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,MAMrC,YAAY,WAAmB;AAC7B,aAAK,YAAY;AACjB,aAAK,eAAeA,OAAK,KAAK,WAAW,aAAa;AAAA,MACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWA,MAAM,OAAsC;AAC1C,YAAI;AACF,gBAAM,UAAU,MAAMD,KAAG,SAAS,KAAK,cAAc,OAAO;AAC5D,gBAAM,WAAW,KAAK,MAAM,OAAO;AAGnC,cAAI,SAAS,kBAAkB,sBAAsB;AACnD,oBAAQ;AAAA,cACN,wBAAwB,SAAS,aAAa,kCAAkC,oBAAoB;AAAA,YACtG;AACA,oBAAQ,MAAM,iDAAiD;AAG/D,kBAAM,KAAK,MAAM;AACjB,mBAAO;AAAA,UACT;AAEA,iBAAO;AAAA,QACT,SAAS,OAAO;AAEd,cAAK,MAAgC,SAAS,UAAU;AACtD,mBAAO;AAAA,UACT;AAGA,kBAAQ,MAAM,4CAA4C,KAAK,EAAE;AACjE,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,KAAK,UAAwC;AACjD,YAAI;AAEF,gBAAMA,KAAG,MAAM,KAAK,WAAW,EAAE,WAAW,KAAK,CAAC;AAGlD,gBAAM,iBAAgC;AAAA,YACpC,GAAG;AAAA,YACH,eAAe;AAAA,YACf,aAAa,kBAAkB;AAAA,YAC/B,aAAa,KAAK,IAAI;AAAA,UACxB;AAEA,gBAAM,UAAU,KAAK,UAAU,gBAAgB,MAAM,CAAC;AACtD,gBAAMA,KAAG,UAAU,KAAK,cAAc,SAAS,OAAO;AAAA,QACxD,SAAS,OAAO;AAEd,kBAAQ,MAAM,4CAA4C,KAAK,EAAE;AAAA,QACnE;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,MAAM,WAAW,UAAkB,OAAiC;AAElE,aAAK,aAAa,KAAK,WAAW,KAAK,YAAY;AACjD,gBAAM,WAAW,MAAM,KAAK,KAAK,KAAK,KAAK,YAAY;AACvD,mBAAS,MAAM,QAAQ,IAAI;AAC3B,gBAAM,KAAK,KAAK,QAAQ;AAAA,QAC1B,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAQ,MAAM,wCAAwC,QAAQ,KAAK,KAAK,EAAE;AAE1E,iBAAO;AAAA,QACT,CAAC;AAGD,cAAM,KAAK;AAAA,MACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWA,MAAM,WAAW,UAAiC;AAEhD,aAAK,aAAa,KAAK,WAAW,KAAK,YAAY;AACjD,gBAAM,WAAW,MAAM,KAAK,KAAK;AACjC,cAAI,CAAC,UAAU;AAEb;AAAA,UACF;AAEA,iBAAO,SAAS,MAAM,QAAQ;AAC9B,gBAAM,KAAK,KAAK,QAAQ;AAAA,QAC1B,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAQ,MAAM,8CAA8C,QAAQ,KAAK,KAAK,EAAE;AAEhF,iBAAO;AAAA,QACT,CAAC;AAGD,cAAM,KAAK;AAAA,MACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,YAAY,SAAqC;AAErD,aAAK,aAAa,KAAK,WAAW,KAAK,YAAY;AACjD,gBAAM,WAAW,MAAM,KAAK,KAAK,KAAK,KAAK,YAAY;AAEvD,qBAAW,SAAS,SAAS;AAC3B,qBAAS,MAAM,MAAM,QAAQ,IAAI;AAAA,UACnC;AAEA,gBAAM,KAAK,KAAK,QAAQ;AAAA,QAC1B,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAQ,MAAM,wCAAwC,QAAQ,MAAM,WAAW,KAAK,EAAE;AAEtF,iBAAO;AAAA,QACT,CAAC;AAGD,cAAM,KAAK;AAAA,MACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,eAAe,UAAmC;AAEtD,aAAK,aAAa,KAAK,WAAW,KAAK,YAAY;AACjD,gBAAM,WAAW,MAAM,KAAK,KAAK,KAAK,KAAK,YAAY;AAEvD,mBAAS,WAAW;AACpB,gBAAM,KAAK,KAAK,QAAQ;AAAA,QAC1B,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAQ,MAAM,kDAAkD,KAAK,EAAE;AAEvE,iBAAO;AAAA,QACT,CAAC;AAGD,cAAM,KAAK;AAAA,MACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,kBAAqC;AACzC,cAAM,WAAW,MAAM,KAAK,KAAK;AACjC,YAAI,CAAC,SAAU,QAAO,CAAC;AAEvB,eAAO,OAAO,KAAK,SAAS,KAAK;AAAA,MACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,gBAAgB,cAAsD;AAC1E,cAAM,WAAW,MAAM,KAAK,KAAK;AACjC,YAAI,CAAC,UAAU;AAEb,iBAAO,MAAM,KAAK,aAAa,KAAK,CAAC;AAAA,QACvC;AAEA,cAAM,eAAyB,CAAC;AAEhC,mBAAW,CAAC,UAAU,KAAK,KAAK,cAAc;AAC5C,gBAAM,QAAQ,SAAS,MAAM,QAAQ;AAErC,cAAI,CAAC,OAAO;AAEV,yBAAa,KAAK,QAAQ;AAAA,UAC5B,WAAW,MAAM,eAAe,OAAO;AAErC,yBAAa,KAAK,QAAQ;AAAA,UAC5B;AAAA,QACF;AAEA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,MAAM,gBAAgB,cAA8C;AAClE,cAAM,WAAW,MAAM,KAAK,KAAK;AACjC,YAAI,CAAC,SAAU,QAAO,CAAC;AAEvB,cAAM,eAAyB,CAAC;AAEhC,mBAAW,YAAY,OAAO,KAAK,SAAS,KAAK,GAAG;AAClD,cAAI,CAAC,aAAa,IAAI,QAAQ,GAAG;AAC/B,yBAAa,KAAK,QAAQ;AAAA,UAC5B;AAAA,QACF;AAEA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA;AAAA,MAKA,MAAM,QAAuB;AAC3B,YAAI;AACF,gBAAMA,KAAG,OAAO,KAAK,YAAY;AAAA,QACnC,SAAS,OAAO;AAEd,cAAK,MAAgC,SAAS,UAAU;AACtD,oBAAQ,MAAM,6CAA6C,KAAK,EAAE;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOQ,cAA6B;AACnC,eAAO;AAAA,UACL,eAAe;AAAA,UACf,aAAa,kBAAkB;AAAA,UAC/B,aAAa,KAAK,IAAI;AAAA,UACtB,OAAO,CAAC;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;AC1TA;AAAA;AAAA;AAAA;AAAA,OAAOE,UAAQ;AACf,OAAOC,YAAU;AADjB,IAoBa;AApBb;AAAA;AAAA;AAEA;AAkBO,IAAM,kBAAN,MAAsB;AAAA,MACnB;AAAA,MACA;AAAA,MACA,eAAgC;AAAA,MAExC,YAAY,SAAiB,WAAmB;AAC9C,aAAK,UAAU;AACf,aAAK,YAAYA,OAAK,KAAK,WAAW,iBAAiB;AAAA,MACzD;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAc,YAAsC;AAClD,YAAI;AACF,gBAAM,UAAU,MAAMD,KAAG,SAAS,KAAK,WAAW,OAAO;AACzD,iBAAO,KAAK,MAAM,OAAO;AAAA,QAC3B,QAAQ;AAEN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,MAAc,UAAU,OAAgC;AACtD,YAAI;AACF,gBAAM,UAAU,KAAK,UAAU,OAAO,MAAM,CAAC;AAC7C,gBAAMA,KAAG,UAAU,KAAK,WAAW,SAAS,OAAO;AAAA,QACrD,SAAS,OAAO;AAEd,kBAAQ,MAAM,6CAA6C,KAAK,EAAE;AAAA,QACpE;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAc,qBAAwC;AACpD,cAAM,SAAS,MAAM,iBAAiB,KAAK,OAAO;AAClD,cAAM,SAAS,MAAM,iBAAiB,KAAK,OAAO;AAElD,eAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA,WAAW,KAAK,IAAI;AAAA,QACtB;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,aAAuC;AAE3C,cAAM,SAAS,MAAM,UAAU,KAAK,OAAO;AAC3C,YAAI,CAAC,QAAQ;AACX,iBAAO;AAAA,QACT;AAEA,YAAI;AAEF,eAAK,eAAe,MAAM,KAAK,mBAAmB;AAGlD,gBAAM,gBAAgB,MAAM,KAAK,UAAU;AAE3C,cAAI,CAAC,eAAe;AAElB,kBAAM,KAAK,UAAU,KAAK,YAAY;AACtC,mBAAO;AAAA,UACT;AAGA,gBAAM,gBAAgB,cAAc,WAAW,KAAK,aAAa;AACjE,gBAAM,gBAAgB,cAAc,WAAW,KAAK,aAAa;AAEjE,cAAI,CAAC,iBAAiB,CAAC,eAAe;AAEpC,mBAAO;AAAA,UACT;AAGA,cAAI,eAAyB,CAAC;AAE9B,cAAI,eAAe;AAEjB,gBAAI;AACF,6BAAe,MAAM;AAAA,gBACnB,KAAK;AAAA,gBACL,cAAc;AAAA,gBACd,KAAK,aAAa;AAAA,cACpB;AAAA,YACF,SAAS,OAAO;AAEd,sBAAQ,MAAM,iDAAiD,KAAK,EAAE;AACtE,6BAAe,MAAM;AAAA,gBACnB,KAAK;AAAA,gBACL,cAAc;AAAA,gBACd,KAAK,aAAa;AAAA,cACpB;AAAA,YACF;AAAA,UACF,WAAW,eAAe;AAExB,2BAAe,MAAM;AAAA,cACnB,KAAK;AAAA,cACL,cAAc;AAAA,cACd,KAAK,aAAa;AAAA,YACpB;AAAA,UACF;AAGA,gBAAM,KAAK,UAAU,KAAK,YAAY;AAEtC,iBAAO;AAAA,QACT,SAAS,OAAO;AACd,kBAAQ,MAAM,4CAA4C,KAAK,EAAE;AACjE,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,gBAA0C;AAE9C,cAAM,SAAS,MAAM,UAAU,KAAK,OAAO;AAC3C,YAAI,CAAC,QAAQ;AACX,iBAAO;AAAA,QACT;AAEA,YAAI;AAEF,gBAAM,WAAW,MAAM,KAAK,mBAAmB;AAG/C,cAAI,CAAC,KAAK,cAAc;AACtB,iBAAK,eAAe;AACpB,kBAAM,KAAK,UAAU,QAAQ;AAC7B,mBAAO;AAAA,UACT;AAGA,gBAAM,gBAAgB,KAAK,aAAa,WAAW,SAAS;AAC5D,gBAAM,gBAAgB,KAAK,aAAa,WAAW,SAAS;AAE5D,cAAI,CAAC,iBAAiB,CAAC,eAAe;AAEpC,mBAAO;AAAA,UACT;AAGA,cAAI,eAAyB,CAAC;AAE9B,cAAI,eAAe;AAEjB,gBAAI;AACF,6BAAe,MAAM;AAAA,gBACnB,KAAK;AAAA,gBACL,KAAK,aAAa;AAAA,gBAClB,SAAS;AAAA,cACX;AAAA,YACF,SAAS,OAAO;AAEd,sBAAQ,MAAM,iDAAiD,KAAK,EAAE;AACtE,6BAAe,MAAM;AAAA,gBACnB,KAAK;AAAA,gBACL,KAAK,aAAa;AAAA,gBAClB,SAAS;AAAA,cACX;AAAA,YACF;AAAA,UACF,WAAW,eAAe;AAExB,2BAAe,MAAM;AAAA,cACnB,KAAK;AAAA,cACL,KAAK,aAAa;AAAA,cAClB,SAAS;AAAA,YACX;AAAA,UACF;AAGA,eAAK,eAAe;AACpB,gBAAM,KAAK,UAAU,QAAQ;AAE7B,iBAAO;AAAA,QACT,SAAS,OAAO;AACd,kBAAQ,MAAM,wCAAwC,KAAK,EAAE;AAC7D,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,WAA4B;AAC1B,eAAO,KAAK;AAAA,MACd;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAM,cAA6B;AACjC,YAAI;AACF,eAAK,eAAe,MAAM,KAAK,mBAAmB;AAClD,gBAAM,KAAK,UAAU,KAAK,YAAY;AAAA,QACxC,SAAS,OAAO;AACd,kBAAQ,MAAM,sCAAsC,KAAK,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;ACjPA,OAAOE,UAAQ;AA2Bf,eAAsB,cACpB,SACA,UACA,QACgC;AAChC,QAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,QAAM,gBAAgB,MAAM,SAAS,KAAK;AAG1C,MAAI,CAAC,eAAe;AAClB,UAAM,WAAW,MAAM,YAAY,SAAS,MAAM;AAClD,WAAO;AAAA,MACL,OAAO;AAAA,MACP,UAAU,CAAC;AAAA,MACX,SAAS,CAAC;AAAA,MACV,QAAQ;AAAA,IACV;AAAA,EACF;AAIA,QAAM,eAAe,MAAM,eAAe;AAC1C,QAAM,SAAS,MAAM,UAAU,OAAO;AAEtC,MAAI,gBAAgB,UAAU,cAAc,UAAU;AACpD,UAAM,aAAa,IAAI,gBAAgB,SAAS,SAAS,MAAM;AAC/D,UAAM,WAAW,WAAW;AAE5B,UAAM,eAAe,WAAW,SAAS;AAGzC,QAAI,iBACC,aAAa,WAAW,cAAc,SAAS,UAC/C,aAAa,WAAW,cAAc,SAAS,SAAS;AAE3D,UAAI;AAEF,cAAM,oBAAoB,MAAM;AAAA,UAC9B;AAAA,UACA,cAAc,SAAS;AAAA,UACvB,aAAa;AAAA,QACf;AACA,cAAM,kBAAkB,IAAI,IAAI,iBAAiB;AAGjD,cAAM,WAAW,MAAM,YAAY,SAAS,MAAM;AAClD,cAAM,iBAAiB,IAAI,IAAI,QAAQ;AAEvC,cAAM,QAAkB,CAAC;AACzB,cAAM,WAAqB,CAAC;AAC5B,cAAM,UAAoB,CAAC;AAG3B,mBAAW,YAAY,mBAAmB;AACxC,cAAI,eAAe,IAAI,QAAQ,GAAG;AAEhC,gBAAI,cAAc,MAAM,QAAQ,GAAG;AACjC,uBAAS,KAAK,QAAQ;AAAA,YACxB,OAAO;AACL,oBAAM,KAAK,QAAQ;AAAA,YACrB;AAAA,UACF;AAAA,QAEF;AAGA,mBAAW,YAAY,UAAU;AAC/B,cAAI,CAAC,cAAc,MAAM,QAAQ,KAAK,CAAC,gBAAgB,IAAI,QAAQ,GAAG;AACpE,kBAAM,KAAK,QAAQ;AAAA,UACrB;AAAA,QACF;AAGA,mBAAW,YAAY,OAAO,KAAK,cAAc,KAAK,GAAG;AACvD,cAAI,CAAC,eAAe,IAAI,QAAQ,GAAG;AACjC,oBAAQ,KAAK,QAAQ;AAAA,UACvB;AAAA,QACF;AAEA,eAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,QACV;AAAA,MACF,SAAS,OAAO;AAEd,gBAAQ,KAAK,yDAAyD,KAAK,EAAE;AAC7E,cAAM,WAAW,MAAM,YAAY,SAAS,MAAM;AAClD,cAAM,iBAAiB,IAAI,IAAI,QAAQ;AAEvC,cAAM,UAAoB,CAAC;AAC3B,mBAAW,YAAY,OAAO,KAAK,cAAc,KAAK,GAAG;AACvD,cAAI,CAAC,eAAe,IAAI,QAAQ,GAAG;AACjC,oBAAQ,KAAK,QAAQ;AAAA,UACvB;AAAA,QACF;AAEA,eAAO;AAAA,UACL,OAAO;AAAA,UACP,UAAU,CAAC;AAAA,UACX;AAAA,UACA,QAAQ;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,SAAO,MAAM,oBAAoB,SAAS,eAAe,MAAM;AACjE;AAKA,eAAe,YACb,SACA,QACmB;AACnB,MAAI,eAAe,MAAM,KAAK,OAAO,WAAW,SAAS,GAAG;AAC1D,WAAO,MAAM,2BAA2B,SAAS,MAAM;AAAA,EACzD,WAAW,eAAe,MAAM,GAAG;AACjC,WAAO,MAAM,aAAa;AAAA,MACxB;AAAA,MACA,iBAAiB,OAAO,SAAS;AAAA,MACjC,iBAAiB,OAAO,SAAS;AAAA,IACnC,CAAC;AAAA,EACH,OAAO;AACL,WAAO,MAAM,aAAa;AAAA,MACxB;AAAA,MACA,iBAAiB,CAAC;AAAA,MAClB,iBAAiB,CAAC;AAAA,IACpB,CAAC;AAAA,EACH;AACF;AAKA,eAAe,oBACb,SACA,eACA,QACgC;AAChC,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAqB,CAAC;AAC5B,QAAM,UAAoB,CAAC;AAG3B,QAAM,eAAe,MAAM,YAAY,SAAS,MAAM;AACtD,QAAM,iBAAiB,IAAI,IAAI,YAAY;AAG3C,QAAM,YAAY,oBAAI,IAAoB;AAE1C,aAAW,YAAY,cAAc;AACnC,QAAI;AACF,YAAM,QAAQ,MAAMA,KAAG,KAAK,QAAQ;AACpC,gBAAU,IAAI,UAAU,MAAM,OAAO;AAAA,IACvC,QAAQ;AAEN;AAAA,IACF;AAAA,EACF;AAGA,aAAW,CAAC,UAAU,KAAK,KAAK,WAAW;AACzC,UAAM,QAAQ,cAAc,MAAM,QAAQ;AAE1C,QAAI,CAAC,OAAO;AAEV,YAAM,KAAK,QAAQ;AAAA,IACrB,WAAW,MAAM,eAAe,OAAO;AAErC,eAAS,KAAK,QAAQ;AAAA,IACxB;AAAA,EACF;AAGA,aAAW,YAAY,OAAO,KAAK,cAAc,KAAK,GAAG;AACvD,QAAI,CAAC,eAAe,IAAI,QAAQ,GAAG;AACjC,cAAQ,KAAK,QAAQ;AAAA,IACvB;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,EACV;AACF;AA1NA;AAAA;AAAA;AAEA;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACNA,OAAOC,UAAQ;AAoCf,eAAe,mBACb,UACA,SACA,YACA,QACA,SACmC;AAEnC,QAAM,YAAY,eAAe,MAAM,IACnC,OAAO,KAAK,YACX,eAAe,MAAM,IAAI,OAAO,SAAS,YAAY;AAC1D,QAAM,eAAe,eAAe,MAAM,IACtC,OAAO,KAAK,eACX,eAAe,MAAM,IAAI,OAAO,SAAS,eAAe;AAC7D,QAAM,SAAS,eAAe,MAAM,IAChC,OAAO,SAAS,SAChB;AACJ,QAAM,cAAc,eAAe,MAAM,IACrC,OAAO,SAAS,cAChB;AAGJ,QAAM,SAAS,UAAU,UAAU,SAAS;AAAA,IAC1C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,OAAO,WAAW,GAAG;AAEvB,QAAI,SAAS;AACX,cAAQ,MAAM,sBAAsB,QAAQ,EAAE;AAAA,IAChD;AACA,WAAO;AAAA,EACT;AAIA,QAAM,QAAQ,OAAO,IAAI,OAAK,EAAE,OAAO;AACvC,QAAM,UAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,4BAA4B;AACjE,UAAM,aAAa,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,4BAA4B,MAAM,MAAM,CAAC;AACxF,UAAM,eAAe,MAAM,WAAW,WAAW,UAAU;AAC3D,YAAQ,KAAK,GAAG,YAAY;AAG5B,QAAI,MAAM,SAAS,4BAA4B;AAC7C,YAAM,IAAI,QAAQ,aAAW,aAAa,OAAO,CAAC;AAAA,IACpD;AAAA,EACF;AAEA,SAAO;AAAA,IACL,YAAY,OAAO;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAaA,eAAsB,gBACpB,UACA,UACA,YACA,QACA,UAAmC,CAAC,GACrB;AACf,QAAM,EAAE,QAAQ,IAAI;AAEpB,MAAI;AAEF,QAAI;AACF,YAAMA,KAAG,OAAO,QAAQ;AAAA,IAC1B,QAAQ;AAEN,UAAI,SAAS;AACX,gBAAQ,MAAM,wBAAwB,QAAQ,EAAE;AAAA,MAClD;AACA,YAAM,SAAS,aAAa,QAAQ;AAEpC,YAAMC,YAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,YAAMA,UAAS,WAAW,QAAQ;AAClC;AAAA,IACF;AAGA,UAAM,UAAU,MAAMD,KAAG,SAAS,UAAU,OAAO;AAGnD,UAAM,SAAS,MAAM,mBAAmB,UAAU,SAAS,YAAY,QAAQ,WAAW,KAAK;AAG/F,UAAM,QAAQ,MAAMA,KAAG,KAAK,QAAQ;AACpC,UAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AAEpD,QAAI,WAAW,MAAM;AAEnB,YAAM,SAAS,aAAa,QAAQ;AACpC,YAAM,SAAS,WAAW,UAAU;AAAA,QAClC;AAAA,QACA,cAAc,MAAM;AAAA,QACpB,YAAY;AAAA,MACd,CAAC;AACD;AAAA,IACF;AAGA,UAAM,SAAS;AAAA,MACb;AAAA,MACA,OAAO;AAAA,MACP,OAAO,OAAO,IAAI,OAAK,EAAE,QAAQ;AAAA,MACjC,OAAO;AAAA,IACT;AAGA,UAAM,SAAS,WAAW,UAAU;AAAA,MAClC;AAAA,MACA,cAAc,MAAM;AAAA,MACpB,YAAY,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,SAAS;AACX,cAAQ,MAAM,yBAAoB,QAAQ,KAAK,OAAO,UAAU,UAAU;AAAA,IAC5E;AAAA,EACF,SAAS,OAAO;AAEd,YAAQ,MAAM,wCAA8B,QAAQ,KAAK,KAAK,EAAE;AAAA,EAClE;AACF;AAgBA,eAAsB,mBACpB,WACA,UACA,YACA,QACA,UAAmC,CAAC,GACnB;AACjB,QAAM,EAAE,QAAQ,IAAI;AACpB,MAAI,iBAAiB;AAGrB,QAAM,kBAAkF,CAAC;AAGzF,aAAW,YAAY,WAAW;AAEhC,QAAI;AACJ,QAAI;AACJ,QAAI;AACF,YAAM,QAAQ,MAAMA,KAAG,KAAK,QAAQ;AACpC,kBAAY,MAAM;AAClB,gBAAU,MAAMA,KAAG,SAAS,UAAU,OAAO;AAAA,IAC/C,SAAS,OAAO;AAEd,UAAI,SAAS;AACX,gBAAQ,MAAM,6BAA6B,QAAQ,EAAE;AAAA,MACvD;AACA,UAAI;AACF,cAAM,SAAS,aAAa,QAAQ;AACpC,cAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,cAAM,SAAS,WAAW,QAAQ;AAAA,MACpC,SAASE,QAAO;AAEd,YAAI,SAAS;AACX,kBAAQ,MAAM,gBAAgB,QAAQ,eAAe;AAAA,QACvD;AAAA,MACF;AAEA;AACA;AAAA,IACF;AAEA,QAAI;AAEF,YAAM,SAAS,MAAM,mBAAmB,UAAU,SAAS,YAAY,QAAQ,WAAW,KAAK;AAE/F,UAAI,WAAW,MAAM;AAEnB,YAAI;AACF,gBAAM,SAAS,aAAa,QAAQ;AAAA,QACtC,SAAS,OAAO;AAAA,QAEhB;AAGA,cAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,cAAM,SAAS,WAAW,UAAU;AAAA,UAClC;AAAA,UACA,cAAc;AAAA,UACd,YAAY;AAAA,QACd,CAAC;AAGD;AACA;AAAA,MACF;AAGA,UAAI;AACF,cAAM,SAAS,aAAa,QAAQ;AAAA,MACtC,SAAS,OAAO;AAAA,MAEhB;AAGA,YAAM,SAAS;AAAA,QACb,OAAO;AAAA,QACP,OAAO,OAAO,IAAI,OAAK,EAAE,QAAQ;AAAA,QACjC,OAAO;AAAA,MACT;AAGA,sBAAgB,KAAK;AAAA,QACnB;AAAA,QACA,YAAY,OAAO;AAAA,QACnB,OAAO;AAAA,MACT,CAAC;AAED,UAAI,SAAS;AACX,gBAAQ,MAAM,yBAAoB,QAAQ,KAAK,OAAO,UAAU,UAAU;AAAA,MAC5E;AAEA;AAAA,IACF,SAAS,OAAO;AAEd,cAAQ,MAAM,wCAA8B,QAAQ,KAAK,KAAK,EAAE;AAAA,IAChE;AAAA,EACF;AAGF,MAAI,gBAAgB,SAAS,GAAG;AAC9B,UAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,UAAM,SAAS;AAAA,MACb,gBAAgB,IAAI,YAAU;AAAA,QAC5B,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA;AAAA,QACpB,YAAY,MAAM;AAAA,MACpB,EAAE;AAAA,IACJ;AAAA,EACF;AAEA,SAAO;AACT;AAhTA;AAAA;AAAA;AACA,IAAAC;AAGA;AACA;AACA;AAAA;AAAA;;;ACkEO,SAAS,qBAA6B;AAC3C,QAAM,UAAU,kBAAkB,uBAAuB,kBAAkB,MAAM;AACjF;AACA,SAAO;AACT;AAMO,SAAS,sBAA8B;AAC5C,QAAM,UAAU,mBAAmB,wBAAwB,mBAAmB,MAAM;AACpF;AACA,SAAO;AACT;AAMO,SAAS,yBAAiC;AAC/C,QAAM,UAAU,uBAAuB,oBAAoB,uBAAuB,MAAM;AACxF;AACA,SAAO;AACT;AAhGA,IAKM,mBA4BA,oBAkBA,wBAaF,sBACA,uBACA;AAlEJ;AAAA;AAAA;AAKA,IAAM,oBAAoB;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,IAAM,qBAAqB;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,IAAM,yBAAyB;AAAA,MAC7B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,IAAI,uBAAuB;AAC3B,IAAI,wBAAwB;AAC5B,IAAI,oBAAoB;AAAA;AAAA;;;AClExB;AAAA;AAAA;AAAA;AAAA,OAAOC,UAAQ;AACf,OAAO,SAAS;AAChB,OAAOC,YAAW;AAClB,OAAO,YAAY;AA0BnB,eAAsB,cAAc,UAA2B,CAAC,GAAkB;AAChF,QAAM,UAAU,QAAQ,WAAW,QAAQ,IAAI;AAC/C,QAAM,UAAU,IAAI,8BAA8B,EAAE,MAAM;AAC1D,MAAI;AAEJ,MAAI;AAEF,YAAQ,OAAO;AACf,UAAM,SAAS,MAAM,cAAc,KAAK,OAAO;AAG/C,YAAQ,OAAO;AACf,UAAM,WAAW,IAAI,SAAS,OAAO;AACrC,UAAM,SAAS,WAAW;AAG1B,QAAI,CAAC,QAAQ,OAAO;AAClB,cAAQ,OAAO;AACf,YAAMC,YAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,YAAM,gBAAgB,MAAMA,UAAS,KAAK;AAE1C,UAAI,eAAe;AAEjB,cAAM,UAAU,MAAM,cAAc,SAAS,UAAU,MAAM;AAE7D,YAAI,QAAQ,WAAW,QAAQ;AAC7B,gBAAM,eAAe,QAAQ,MAAM,SAAS,QAAQ,SAAS;AAC7D,gBAAM,eAAe,QAAQ,QAAQ;AAErC,cAAI,iBAAiB,KAAK,iBAAiB,GAAG;AAC5C,oBAAQ,QAAQ,4CAA4C;AAC5D;AAAA,UACF;AAEA,kBAAQ;AAAA,YACN,qBAAqB,YAAY,oBAAoB,YAAY,eAAe,QAAQ,MAAM;AAAA,UAChG;AAGA,kBAAQ,MAAM,uBAAuB,CAAC;AACtC,gBAAMC,cAAa,IAAI,gBAAgB;AACvC,gBAAMA,YAAW,WAAW;AAC5B,kBAAQ,QAAQ,wBAAwB;AAGxC,cAAI,eAAe,GAAG;AACpB,oBAAQ,MAAM,YAAY,YAAY,mBAAmB;AACzD,gBAAI,eAAe;AACnB,uBAAW,YAAY,QAAQ,SAAS;AACtC,kBAAI;AACF,sBAAM,SAAS,aAAa,QAAQ;AACpC,sBAAMD,UAAS,WAAW,QAAQ;AAClC;AAAA,cACF,SAAS,KAAK;AACZ,wBAAQ,KAAK,0BAA0B,QAAQ,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,cACzG;AAAA,YACF;AACA,oBAAQ,QAAQ,WAAW,YAAY,IAAI,YAAY,gBAAgB;AAAA,UACzE;AAGA,cAAI,eAAe,GAAG;AACpB,oBAAQ,MAAM,cAAc,YAAY,mBAAmB;AAC3D,kBAAM,eAAe,CAAC,GAAG,QAAQ,OAAO,GAAG,QAAQ,QAAQ;AAC3D,kBAAM,QAAQ,MAAM;AAAA,cAClB;AAAA,cACA;AAAA,cACAC;AAAA,cACA;AAAA,cACA,EAAE,SAAS,QAAQ,QAAQ;AAAA,YAC7B;AAGA,kBAAM,iBAAiB,SAAS,MAAM;AAEtC,oBAAQ;AAAA,cACN,iCAAiC,KAAK,IAAI,YAAY;AAAA,YACxD;AAAA,UACF;AAGA,gBAAM,EAAE,gBAAAC,iBAAgB,WAAAC,WAAU,IAAI,MAAM;AAC5C,gBAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAClC,gBAAMC,gBAAe,MAAMH,gBAAe;AAC1C,gBAAMI,UAAS,MAAMH,WAAU,OAAO;AAEtC,cAAIE,iBAAgBC,SAAQ;AAC1B,kBAAM,aAAa,IAAIF,iBAAgB,SAAS,SAAS,MAAM;AAC/D,kBAAM,WAAW,WAAW;AAC5B,kBAAM,WAAW,WAAW,SAAS;AACrC,gBAAI,UAAU;AAEZ,oBAAMJ,UAAS,eAAe,QAAQ;AAAA,YACxC;AAAA,UACF;AAEA,kBAAQ,IAAID,OAAM,IAAI,kBAAkB,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,IAAI,yBAAyB,CAAC;AACzG;AAAA,QACF;AAGA,gBAAQ,OAAO;AAAA,MACjB;AAAA,IACF,OAAO;AACL,cAAQ,OAAO;AAAA,IACjB;AAGA,YAAQ,OAAO;AACf,QAAI;AAEJ,QAAI,eAAe,MAAM,KAAK,OAAO,WAAW,SAAS,GAAG;AAE1D,cAAQ,MAAM,2BAA2B,SAAS,MAAM;AAAA,IAC1D,WAAW,eAAe,MAAM,GAAG;AAEjC,cAAQ,MAAM,aAAa;AAAA,QACzB;AAAA,QACA,iBAAiB,OAAO,SAAS;AAAA,QACjC,iBAAiB,OAAO,SAAS;AAAA,MACnC,CAAC;AAAA,IACH,OAAO;AAEL,cAAQ,MAAM,aAAa;AAAA,QACzB;AAAA,QACA,iBAAiB,CAAC;AAAA,QAClB,iBAAiB,CAAC;AAAA,MACpB,CAAC;AAAA,IACH;AAEA,QAAI,MAAM,WAAW,GAAG;AACtB,cAAQ,KAAK,yBAAyB;AACtC;AAAA,IACF;AAEA,YAAQ,OAAO,SAAS,MAAM,MAAM;AAGpC,YAAQ,OAAO,uBAAuB;AACtC,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,WAAW,WAAW;AAC5B,YAAQ,QAAQ,wBAAwB;AAGxC,UAAM,cAAc,eAAe,MAAM,IACrC,OAAO,KAAK,cACZ;AACJ,UAAM,qBAAqB,eAAe,MAAM,IAC5C,OAAO,KAAK,qBACZ;AAEJ,UAAM,oBAAoB;AAE1B,YAAQ,MAAM,yBAAyB,WAAW,kBAAkB;AAEpE,UAAM,YAAY,KAAK,IAAI;AAC3B,QAAI,iBAAiB;AACrB,QAAI,kBAAkB;AAGtB,UAAM,mBAAuC,CAAC;AAC9C,UAAM,QAAQ,OAAO,WAAW;AAGhC,UAAM,qBAAqF,CAAC;AAG5F,UAAM,gBAAgB;AAAA,MACpB,gBAAgB;AAAA,MAChB,YAAY,MAAM;AAAA,MAClB,cAAc,mBAAmB;AAAA,IACnC;AAGA,UAAM,6BAA6B;AACnC,UAAM,+BAA+B;AACrC,UAAM,yBAAyB,KAAK,MAAM,+BAA+B,0BAA0B;AAEnG,QAAI,cAAc;AAClB,qBAAiB,YAAY,MAAM;AAEjC;AACA,UAAI,eAAe,wBAAwB;AACzC,sBAAc,eAAe,mBAAmB;AAChD,sBAAc;AAAA,MAChB;AAEA,cAAQ,OAAO,GAAG,cAAc,cAAc,IAAI,cAAc,UAAU,YAAY,cAAc,YAAY;AAAA,IAClH,GAAG,0BAA0B;AAG7B,UAAM,2BAA2B,YAAY;AAC3C,UAAI,iBAAiB,WAAW,EAAG;AAEnC,YAAM,YAAY,iBAAiB,OAAO,GAAG,iBAAiB,MAAM;AAGpE,eAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK,oBAAoB;AAC7D,cAAM,QAAQ,UAAU,MAAM,GAAG,KAAK,IAAI,IAAI,oBAAoB,UAAU,MAAM,CAAC;AAGnF,sBAAc,eAAe,oBAAoB;AAIjD,cAAM,QAAQ,MAAM,IAAI,UAAQ,KAAK,OAAO;AAC5C,cAAM,mBAAmC,CAAC;AAE1C,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,4BAA4B;AACjE,gBAAM,aAAa,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,4BAA4B,MAAM,MAAM,CAAC;AACxF,gBAAM,eAAe,MAAM,WAAW,WAAW,UAAU;AAC3D,2BAAiB,KAAK,GAAG,YAAY;AAGrC,gBAAM,IAAI,QAAQ,aAAW,aAAa,OAAO,CAAC;AAAA,QACpD;AAEA,2BAAmB,MAAM;AAGzB,sBAAc,eAAe,aAAa,MAAM,MAAM;AAEtD,cAAM,SAAS;AAAA,UACb;AAAA,UACA,MAAM,IAAI,UAAQ,KAAK,MAAM,QAAQ;AAAA,UACrC;AAAA,QACF;AAGA,cAAM,IAAI,QAAQ,aAAW,aAAa,OAAO,CAAC;AAAA,MACpD;AAEA,oBAAc,eAAe,mBAAmB;AAAA,IAClD;AAGA,UAAM,eAAe,MAAM;AAAA,MAAI,CAAC,SAC9B,MAAM,YAAY;AAChB,YAAI;AAEF,gBAAM,QAAQ,MAAMD,KAAG,KAAK,IAAI;AAChC,gBAAM,UAAU,MAAMA,KAAG,SAAS,MAAM,OAAO;AAC/C,gBAAM,YAAY,eAAe,MAAM,IACnC,OAAO,KAAK,YACZ;AACJ,gBAAM,eAAe,eAAe,MAAM,IACtC,OAAO,KAAK,eACZ;AACJ,gBAAM,SAAS,eAAe,MAAM,IAChC,OAAO,SAAS,SAChB;AACJ,gBAAM,cAAc,eAAe,MAAM,IACrC,OAAO,SAAS,cAChB;AAEJ,gBAAM,SAAS,UAAU,MAAM,SAAS;AAAA,YACtC;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAED,cAAI,OAAO,WAAW,GAAG;AACvB;AACA,0BAAc,iBAAiB;AAC/B;AAAA,UACF;AAGA,qBAAW,SAAS,QAAQ;AAC1B,6BAAiB,KAAK;AAAA,cACpB;AAAA,cACA,SAAS,MAAM;AAAA,YACjB,CAAC;AAAA,UACH;AAGA,6BAAmB,KAAK;AAAA,YACtB,UAAU;AAAA,YACV,YAAY,OAAO;AAAA,YACnB,OAAO,MAAM;AAAA,UACf,CAAC;AAED;AACA,wBAAc,iBAAiB;AAG/B,cAAI,iBAAiB,UAAU,mBAAmB;AAChD,kBAAM,yBAAyB;AAAA,UACjC;AAAA,QACF,SAAS,OAAO;AACd,cAAI,QAAQ,SAAS;AACnB,oBAAQ,MAAMC,OAAM,OAAO;AAAA,yBAAkB,IAAI,KAAK,KAAK,EAAE,CAAC;AAAA,UAChE;AACA;AACA,wBAAc,iBAAiB;AAAA,QACjC;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,QAAQ,IAAI,YAAY;AAG9B,kBAAc,eAAe;AAC7B,UAAM,yBAAyB;AAG/B,kBAAc,cAAc;AAG5B,YAAQ,MAAM,0BAA0B;AACxC,UAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,UAAM,SAAS;AAAA,MACb,mBAAmB,IAAI,YAAU;AAAA,QAC/B,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA;AAAA,QACpB,YAAY,MAAM;AAAA,MACpB,EAAE;AAAA,IACJ;AAGA,UAAM,EAAE,gBAAAG,iBAAgB,WAAAC,WAAU,IAAI,MAAM;AAC5C,UAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAClC,UAAM,eAAe,MAAMF,gBAAe;AAC1C,UAAM,SAAS,MAAMC,WAAU,OAAO;AAEtC,QAAI,gBAAgB,QAAQ;AAC1B,YAAM,aAAa,IAAIC,iBAAgB,SAAS,SAAS,MAAM;AAC/D,YAAM,WAAW,WAAW;AAC5B,YAAM,WAAW,WAAW,SAAS;AACrC,UAAI,UAAU;AACZ,cAAM,SAAS,eAAe,QAAQ;AAAA,MACxC;AAAA,IACF;AAEA,YAAQ,QAAQ,gBAAgB;AAIhC,UAAM,iBAAiB,SAAS,MAAM;AAEtC,UAAM,cAAc,KAAK,IAAI,IAAI,aAAa,KAAM,QAAQ,CAAC;AAC7D,YAAQ;AAAA,MACN,WAAW,cAAc,WAAW,eAAe,eAAe,SAAS,WAAW,WAAW;AAAA,IACnG;AAEA,YAAQ,IAAIL,OAAM,IAAI,kBAAkB,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,IAAI,yBAAyB,CAAC;AAAA,EAC3G,SAAS,OAAO;AAEd,QAAI,gBAAgB;AAClB,oBAAc,cAAc;AAAA,IAC9B;AACA,YAAQ,KAAK,oBAAoB,KAAK,EAAE;AACxC,UAAM;AAAA,EACR;AACF;AAjYA;AAAA;AAAA;AAIA;AACA,IAAAQ;AACA;AACA;AACA;AAEA,IAAAC;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAAA;;;AChBA,SAAS,eAAe;AACxB,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,aAAY;;;ACE9B;AALA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,SAAS,iBAAAC,sBAAqB;AAC9B,OAAOC,YAAW;AAClB,OAAO,cAAc;;;ACJrB,OAAO,YAAY;AACnB,OAAO,WAAW;AAClB,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,aAAY;AAK9B,IAAMC,cAAaH,eAAc,YAAY,GAAG;AAChD,IAAMI,aAAYH,SAAQE,WAAU;AACpC,IAAME,WAAUN,eAAc,YAAY,GAAG;AAG7C,IAAIO;AACJ,IAAI;AACF,EAAAA,eAAcD,SAAQH,MAAKE,YAAW,iBAAiB,CAAC;AAC1D,QAAQ;AACN,EAAAE,eAAcD,SAAQH,MAAKE,YAAW,oBAAoB,CAAC;AAC7D;AAGA,IAAM,eAAeE,aAAY;AACjC,IAAM,UAAUA,aAAY;AAK5B,SAAS,UAAU,MAAc,QAAgB,UAAU,GAAW;AACpE,QAAM,QAAQ,KAAK,MAAM,IAAI,EAAE,OAAO,UAAQ,KAAK,KAAK,EAAE,SAAS,CAAC;AAGpE,QAAM,YAAY,KAAK,IAAI,GAAG,MAAM,IAAI,UAAQ,KAAK,MAAM,CAAC;AAE5D,QAAM,mBAAmB,SAAI,OAAO,YAAY,UAAU,CAAC;AAC3D,QAAM,MAAM,SAAI,gBAAgB;AAChC,QAAM,SAAS,SAAI,gBAAgB;AACnC,QAAM,YAAY,SAAI,gBAAgB;AAEtC,QAAM,cAAc,MAAM,IAAI,UAAQ;AACpC,UAAM,WAAW,IAAI,OAAO,YAAY,KAAK,SAAS,OAAO;AAC7D,UAAM,UAAU,IAAI,OAAO,OAAO;AAClC,WAAO,SAAI,OAAO,GAAG,IAAI,GAAG,QAAQ;AAAA,EACtC,CAAC;AAGD,QAAM,WAAW,YAAY,OAAO;AACpC,QAAM,UAAU,KAAK,MAAM,WAAW,CAAC;AACvC,QAAM,WAAW,WAAW;AAC5B,QAAM,iBAAiB,IAAI,OAAO,OAAO,IAAI,SAAS,IAAI,OAAO,QAAQ;AAEzE,QAAM,eAAe,SAAI,IAAI,OAAO,OAAO,CAAC,GAAG,cAAc,GAAG,IAAI,OAAO,OAAO,CAAC;AAEnF,SAAO,CAAC,KAAK,GAAG,aAAa,WAAW,cAAc,MAAM,EAAE,KAAK,IAAI;AACzE;AAKO,SAAS,aAAmB;AACjC,QAAM,SAAS,OAAO,SAAS,QAAQ;AAAA,IACrC,MAAM;AAAA,IACN,kBAAkB;AAAA,IAClB,gBAAgB;AAAA,EAClB,CAAC;AAED,QAAM,SAAS,GAAG,YAAY,OAAO,OAAO;AAC5C,QAAM,cAAc,UAAU,OAAO,KAAK,GAAG,MAAM;AACnD,UAAQ,MAAM,MAAM,KAAK,WAAW,CAAC;AACrC,UAAQ,MAAM;AAChB;AAKO,SAAS,oBAA0B;AACxC,QAAM,SAAS,OAAO,SAAS,QAAQ;AAAA,IACrC,MAAM;AAAA,IACN,kBAAkB;AAAA,IAClB,gBAAgB;AAAA,EAClB,CAAC;AAED,QAAM,SAAS,GAAG,YAAY,OAAO,OAAO;AAC5C,QAAM,cAAc,UAAU,OAAO,KAAK,GAAG,MAAM;AACnD,UAAQ,IAAI,MAAM,KAAK,WAAW,CAAC;AACnC,UAAQ,IAAI;AACd;;;ACnFA;AACA;AACA;AACA;AANA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAOC,YAAW;AAuBX,IAAM,mBAAN,MAAuB;AAAA,EAC5B,YAA6B,UAAkB,QAAQ,IAAI,GAAG;AAAjC;AAAA,EAAkC;AAAA;AAAA;AAAA;AAAA,EAKvD,gBAAwB;AAC9B,WAAOD,MAAK,KAAK,KAAK,SAAS,mBAAmB;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAmC;AACvC,QAAI;AACF,YAAM,aAAa,KAAK,cAAc;AACtC,YAAM,UAAU,MAAMD,IAAG,SAAS,YAAY,OAAO;AACrD,YAAM,SAAS,KAAK,MAAM,OAAO;AACjC,aAAO,eAAe,MAAM;AAAA,IAC9B,SAAS,OAAO;AAEd,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cAAmC;AACvC,UAAM,SAAS,MAAM,kBAAkB,KAAK,OAAO;AAEnD,QAAI,OAAO,YAAY,OAAO,YAAY;AACxC,YAAM,iBAAiBC,MAAK,SAAS,OAAO,UAAU;AACtD,cAAQ,IAAI,8CAAyC,cAAc,EAAE;AACrE,cAAQ,IAAI,+DAAwD;AAAA,IACtE;AAEA,WAAO,OAAO;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,qBAAoC;AACxC,UAAM,aAAa,KAAK,cAAc;AAEtC,QAAI;AAEF,YAAM,kBAAkB,MAAMD,IAAG,SAAS,YAAY,OAAO;AAC7D,YAAM,iBAAiB,KAAK,MAAM,eAAe;AAGjD,YAAM,kBAAkB,eAAe,cAAc;AACrD,YAAM,YAAY,kBAAkB,CAAC,IAAI,gBAAgB,gBAAgB,aAAa;AACtF,YAAM,aAAa,mBAAmB,UAAU,SAAS;AAEzD,UAAI,CAAC,YAAY;AACf,gBAAQ,IAAIE,OAAM,MAAM,qCAAgC,CAAC;AACzD,gBAAQ,IAAIA,OAAM,IAAI,mBAAmB,CAAC;AAC1C;AAAA,MACF;AAGA,YAAM,aAAa,GAAG,UAAU;AAChC,YAAMF,IAAG,SAAS,YAAY,UAAU;AAGxC,UAAI;AACJ,UAAI,WAAW;AAEf,UAAI,iBAAiB;AACnB,gBAAQ,IAAIE,OAAM,KAAK,8CAAuC,sBAAsB,KAAK,CAAC;AAC1F,yBAAiB,cAAc,cAAc;AAC7C,mBAAW;AAAA,MACb,OAAO;AAEL,yBAAiB,gBAAgB,eAAe,cAAqC;AAErF,gBAAQ,IAAIA,OAAM,IAAI,sBAAsB,CAAC;AAC7C,kBAAU,QAAQ,WAAS,QAAQ,IAAIA,OAAM,IAAI,UAAK,GAAGA,OAAM,KAAK,KAAK,CAAC,CAAC;AAAA,MAC7E;AAGA,YAAMF,IAAG;AAAA,QACP;AAAA,QACA,KAAK,UAAU,gBAAgB,MAAM,CAAC,IAAI;AAAA,QAC1C;AAAA,MACF;AAGA,cAAQ,IAAIE,OAAM,MAAM,qCAAgC,CAAC;AACzD,cAAQ,IAAIA,OAAM,IAAI,kBAAkB,GAAG,UAAU;AAErD,UAAI,UAAU;AACZ,gBAAQ,IAAIA,OAAM,IAAI,iEAA0D,CAAC;AAAA,MACnF;AAAA,IACF,SAAS,OAAO;AACd,UAAK,MAAgC,SAAS,UAAU;AACtD,gBAAQ,IAAIA,OAAM,IAAI,6BAA6B,CAAC;AACpD,gBAAQ,IAAIA,OAAM,IAAI,KAAK,GAAGA,OAAM,KAAK,WAAW,GAAGA,OAAM,IAAI,yBAAyB,CAAC;AAC3F;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAoC;AACxC,WAAO,kBAAkB,KAAK,OAAO;AAAA,EACvC;AACF;;;AC5IA,OAAOC,SAAQ;AACf,OAAOC,WAAU;;;ACuDV,IAAM,0BAA4C;AAAA,EACvD,UAAU;AAAA,EACV,UAAU;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACxEA,OAAOC,SAAQ;AACf,OAAOC,WAAU;;;ACIjB,eAAsB,qBACpB,UACA,eAC0B;AAC1B,SAAO;AAAA,IACL,SAAS;AAAA;AAAA;AAAA,MAGP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;AD3CO,IAAM,iBAAoC;AAAA,EAC/C,MAAM;AAAA,EACN,UAAU;AAAA;AAAA,EAEV,MAAM,OAAO,SAAiB,cAAgD;AAC5E,UAAM,WAAWC,MAAK,KAAK,SAAS,YAAY;AAChD,UAAM,SAA0B;AAAA,MAC9B,UAAU;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,kBAAkBA,MAAK,KAAK,UAAU,cAAc;AAC1D,QAAIC,eAAmB;AAEvB,QAAI;AACF,YAAM,UAAU,MAAMC,IAAG,SAAS,iBAAiB,OAAO;AAC1D,MAAAD,eAAc,KAAK,MAAM,OAAO;AAChC,aAAO,SAAS,KAAK,oBAAoB;AAAA,IAC3C,QAAQ;AAEN,aAAO;AAAA,IACT;AAGA,WAAO,WAAW;AAClB,WAAO,aAAa;AAGpB,QAAIA,aAAY,iBAAiB,cAAcA,aAAY,cAAc,YAAY;AACnF,aAAO,SAAS,KAAK,qBAAqB;AAAA,IAC5C;AAGA,UAAM,iBAAiB;AAAA,MACrB,EAAE,MAAM,QAAQ,SAAS,OAAO;AAAA,MAChC,EAAE,MAAM,UAAU,SAAS,SAAS;AAAA,MACpC,EAAE,MAAM,SAAS,SAAS,QAAQ;AAAA,MAClC,EAAE,MAAM,OAAO,SAAS,MAAM;AAAA,MAC9B,EAAE,MAAM,oBAAoB,SAAS,aAAa;AAAA,IACpD;AAEA,eAAW,aAAa,gBAAgB;AACtC,UACEA,aAAY,kBAAkB,UAAU,IAAI,KAC5CA,aAAY,eAAe,UAAU,IAAI,GACzC;AACA,eAAO,SAAS,KAAK,GAAG,UAAU,OAAO,0BAA0B;AACnE;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa;AAAA,MACjB,EAAE,MAAM,QAAQ,SAAS,UAAU;AAAA,MACnC,EAAE,MAAM,SAAS,SAAS,QAAQ;AAAA,MAClC,EAAE,MAAM,OAAO,SAAS,MAAM;AAAA,MAC9B,EAAE,MAAM,WAAW,SAAS,UAAU;AAAA,MACtC,EAAE,MAAM,gBAAgB,SAAS,SAAS;AAAA,IAC5C;AAEA,eAAW,MAAM,YAAY;AAC3B,UAAIA,aAAY,eAAe,GAAG,IAAI,GAAG;AACvC,eAAO,SAAS,KAAK,GAAG,GAAG,OAAO,WAAW;AAC7C;AAAA,MACF;AAAA,IACF;AAGA,QAAIA,aAAY,SAAS,MAAM;AAC7B,aAAO,UAAUA,aAAY,QAAQ;AAAA,IACvC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,eAAe,SAAiB,cAAsB;AAC1D,WAAO,qBAAqB,SAAS,YAAY;AAAA,EACnD;AACF;;;AE1FA,OAAOE,SAAQ;AACf,OAAOC,WAAU;;;ACIjB,eAAsB,sBACpB,UACA,eAC0B;AAC1B,SAAO;AAAA,IACL,SAAS;AAAA;AAAA,MAEP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAEA;AAAA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;ADjDO,IAAM,kBAAqC;AAAA,EAChD,MAAM;AAAA,EACN,UAAU;AAAA;AAAA,EAEV,MAAM,OAAO,SAAiB,cAAgD;AAC5E,UAAM,WAAWC,MAAK,KAAK,SAAS,YAAY;AAChD,UAAM,SAA0B;AAAA,MAC9B,UAAU;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,mBAAmBA,MAAK,KAAK,UAAU,eAAe;AAC5D,QAAI,eAAoB;AAExB,QAAI;AACF,YAAM,UAAU,MAAMC,IAAG,SAAS,kBAAkB,OAAO;AAC3D,qBAAe,KAAK,MAAM,OAAO;AACjC,aAAO,SAAS,KAAK,qBAAqB;AAAA,IAC5C,QAAQ;AAEN,aAAO;AAAA,IACT;AAGA,UAAM,aACJ,aAAa,UAAU,mBAAmB,KAC1C,aAAa,aAAa,IAAI,mBAAmB;AAEnD,QAAI,CAAC,YAAY;AAEf,aAAO;AAAA,IACT;AAEA,WAAO,SAAS,KAAK,6CAA6C;AAGlE,UAAM,cAAcD,MAAK,KAAK,UAAU,SAAS;AACjD,QAAI;AACF,YAAMC,IAAG,OAAO,WAAW;AAC3B,aAAO,SAAS,KAAK,oBAAoB;AACzC,aAAO,aAAa;AAAA,IACtB,QAAQ;AACN,aAAO,aAAa;AAAA,IACtB;AAGA,UAAM,cAAc,CAAC,OAAO,UAAU,UAAU,UAAU;AAC1D,QAAI,YAAY;AAEhB,eAAW,OAAO,aAAa;AAC7B,UAAI;AACF,cAAM,UAAUD,MAAK,KAAK,UAAU,GAAG;AACvC,cAAM,QAAQ,MAAMC,IAAG,KAAK,OAAO;AACnC,YAAI,MAAM,YAAY,GAAG;AACvB;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,QAAI,aAAa,GAAG;AAClB,aAAO,SAAS,KAAK,yCAAyC,SAAS,IAAI,YAAY,MAAM,QAAQ;AACrG,aAAO,aAAa;AAAA,IACtB;AAGA,UAAM,kBAAkB;AAAA,MACtBD,MAAK,KAAK,UAAU,SAAS,SAAS;AAAA,MACtCA,MAAK,KAAK,UAAU,SAAS,MAAM;AAAA,IACrC;AAEA,eAAW,WAAW,iBAAiB;AACrC,UAAI;AACF,cAAM,QAAQ,MAAMC,IAAG,KAAK,OAAO;AACnC,YAAI,MAAM,YAAY,GAAG;AACvB,gBAAM,UAAUD,MAAK,SAASA,MAAK,QAAQ,OAAO,CAAC,IAAI,MAAMA,MAAK,SAAS,OAAO;AAClF,iBAAO,SAAS,KAAK,SAAS,OAAO,iBAAiB;AAAA,QACxD;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,QAAI,aAAa,UAAU,mBAAmB,GAAG;AAC/C,aAAO,UAAU,aAAa,QAAQ,mBAAmB;AAAA,IAC3D;AAEA,WAAO,WAAW;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,eAAe,SAAiB,cAAsB;AAC1D,WAAO,sBAAsB,SAAS,YAAY;AAAA,EACpD;AACF;;;AE5GA,OAAOE,SAAQ;AACf,OAAOC,WAAU;;;ACIjB,eAAsB,sBACpB,UACA,eAC0B;AAC1B,SAAO;AAAA,IACL,SAAS;AAAA;AAAA,MAEP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MACA;AAAA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA;AAAA,MAGA;AAAA;AAAA,MAGA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;ADtDO,IAAM,kBAAqC;AAAA,EAChD,MAAM;AAAA,EACN,UAAU;AAAA;AAAA,EAEV,MAAM,OAAO,SAAiB,cAAgD;AAC5E,UAAM,WAAWC,MAAK,KAAK,SAAS,YAAY;AAChD,UAAM,SAA0B;AAAA,MAC9B,UAAU;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,qBAAqBA,MAAK,KAAK,UAAU,UAAU,sBAAsB;AAC/E,QAAI,oBAAoB;AAExB,QAAI;AACF,YAAMC,IAAG,OAAO,kBAAkB;AAClC,0BAAoB;AACpB,aAAO,SAAS,KAAK,mCAAmC;AAAA,IAC1D,QAAQ;AAAA,IAER;AAGA,UAAM,kBAAkBD,MAAK,KAAK,UAAU,UAAU,cAAc;AACpE,QAAI,iBAAiB;AAErB,QAAI;AACF,YAAMC,IAAG,OAAO,eAAe;AAC/B,uBAAiB;AACjB,aAAO,SAAS,KAAK,2BAA2B;AAAA,IAClD,QAAQ;AAAA,IAER;AAGA,UAAM,cAAc,CAAC,YAAY,YAAY,aAAa,SAAS;AACnE,QAAI,YAAY;AAEhB,eAAW,OAAO,aAAa;AAC7B,UAAI;AACF,cAAM,UAAUD,MAAK,KAAK,UAAU,GAAG;AACvC,cAAM,QAAQ,MAAMC,IAAG,KAAK,OAAO;AACnC,YAAI,MAAM,YAAY,GAAG;AACvB;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,QAAI,aAAa,GAAG;AAClB,aAAO,SAAS,KAAK,yCAAyC,SAAS,IAAI,YAAY,MAAM,QAAQ;AAAA,IACvG;AAGA,QAAI;AACF,YAAM,WAAWD,MAAK,KAAK,UAAU,oBAAoB;AACzD,YAAMC,IAAG,OAAO,QAAQ;AACxB,aAAO,SAAS,KAAK,0BAA0B;AAAA,IACjD,QAAQ;AAAA,IAER;AAGA,QAAI;AACF,YAAM,aAAaD,MAAK,KAAK,UAAU,gBAAgB;AACvD,YAAMC,IAAG,OAAO,UAAU;AAC1B,aAAO,SAAS,KAAK,sBAAsB;AAAA,IAC7C,QAAQ;AAAA,IAER;AAIA,QAAI,qBAAqB,aAAa,GAAG;AACvC,aAAO,WAAW;AAClB,aAAO,aAAa;AACpB,aAAO;AAAA,IACT;AAGA,QAAI,qBAAsB,kBAAkB,aAAa,GAAI;AAC3D,aAAO,WAAW;AAClB,aAAO,aAAa;AACpB,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,GAAG;AAClB,aAAO,WAAW;AAClB,aAAO,aAAa;AACpB,aAAO;AAAA,IACT;AAGA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,eAAe,SAAiB,cAAsB;AAC1D,WAAO,sBAAsB,SAAS,YAAY;AAAA,EACpD;AACF;;;AExGO,IAAM,qBAA0C;AAAA,EACrD;AAAA,EACA;AAAA,EACA;AACF;AAmBO,SAAS,qBAAqB,MAA6C;AAChF,SAAO,mBAAmB,KAAK,OAAK,EAAE,SAAS,IAAI;AACrD;;;ARvBA,eAAsB,oBACpB,SACA,UAAqC,CAAC,GACV;AAC5B,QAAM,OAAO,EAAE,GAAG,yBAAyB,GAAG,QAAQ;AACtD,QAAM,UAA6B,CAAC;AACpC,QAAM,UAAU,oBAAI,IAAY;AAGhC,QAAM,aAAa,SAAS,KAAK,SAAS,OAAO;AAGjD,QAAM,mBAAmB,SAAS,KAAK,SAAS,SAAS,GAAG,IAAI;AAEhE,SAAO;AACT;AAKA,eAAe,aACb,SACA,cACA,SACA,SACe;AAEf,QAAM,WAAWC,MAAK,KAAK,SAAS,YAAY;AAChD,MAAI,QAAQ,IAAI,QAAQ,GAAG;AACzB;AAAA,EACF;AACA,UAAQ,IAAI,QAAQ;AAGpB,QAAM,iBAAgE,CAAC;AAEvE,aAAW,YAAY,oBAAoB;AACzC,QAAI;AACF,YAAM,SAAS,MAAM,SAAS,OAAO,SAAS,YAAY;AAC1D,UAAI,OAAO,UAAU;AACnB,uBAAe,KAAK;AAAA,UAClB,GAAG;AAAA,UACH,UAAU,SAAS,YAAY;AAAA,QACjC,CAAC;AAAA,MACH;AAAA,IACF,SAAS,OAAO;AAEd,cAAQ,MAAM,2BAA2B,SAAS,IAAI,QAAQ,YAAY,KAAK,KAAK;AAAA,IACtF;AAAA,EACF;AAIA,MAAI,eAAe,SAAS,GAAG;AAE7B,UAAM,iBAAiB,eAAe,OAAO,OAAK,EAAE,eAAe,MAAM;AACzE,UAAM,mBAAmB,eAAe,OAAO,OAAK,EAAE,eAAe,QAAQ;AAC7E,UAAM,gBAAgB,eAAe,OAAO,OAAK,EAAE,eAAe,KAAK;AAEvE,QAAI,eAAe,SAAS,GAAG;AAG7B,YAAM,eAAe,eAAe,IAAI,CAAC,EAAE,UAAU,GAAG,OAAO,MAAM,MAAM;AAC3E,cAAQ,KAAK,GAAG,YAAY;AAC5B,YAAM,QAAQ,eAAe,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,KAAK;AACxD,cAAQ,IAAI,qCAAgC,KAAK,EAAE;AAGnD,UAAI,iBAAiB,SAAS,KAAK,cAAc,SAAS,GAAG;AAC3D,cAAM,eAAe,CAAC,GAAG,kBAAkB,GAAG,aAAa,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AACvF,gBAAQ,IAAI,kDAA6C,YAAY,EAAE;AAAA,MACzE;AAAA,IACF,WAAW,eAAe,WAAW,GAAG;AAEtC,YAAM,EAAE,UAAU,GAAG,OAAO,IAAI,eAAe,CAAC;AAChD,cAAQ,KAAK,MAAM;AAGnB,UAAI,iBAAiB,SAAS,KAAK,cAAc,SAAS,GAAG;AAC3D,cAAM,eAAe,CAAC,GAAG,kBAAkB,GAAG,aAAa,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AACvF,gBAAQ,IAAI,kDAA6C,YAAY,EAAE;AAAA,MACzE;AAAA,IACF,WAAW,iBAAiB,SAAS,GAAG;AAEtC,uBAAiB,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ;AACvD,YAAM,EAAE,UAAU,GAAG,OAAO,IAAI,iBAAiB,CAAC;AAClD,cAAQ,KAAK,MAAM;AAGnB,YAAM,UAAU,CAAC,GAAG,iBAAiB,MAAM,CAAC,GAAG,GAAG,aAAa;AAC/D,UAAI,QAAQ,SAAS,GAAG;AACtB,cAAM,eAAe,QAAQ,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AACvD,gBAAQ,IAAI,qBAAgB,YAAY,OAAO,YAAY,KAAK,OAAO,IAAI,oBAAoB;AAAA,MACjG;AAAA,IACF,WAAW,cAAc,SAAS,GAAG;AAEnC,oBAAc,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ;AACpD,YAAM,EAAE,UAAU,GAAG,OAAO,IAAI,cAAc,CAAC;AACjD,cAAQ,KAAK,MAAM;AAGjB,YAAM,UAAU,cAAc,MAAM,CAAC;AACvC,UAAI,QAAQ,SAAS,GAAG;AACtB,cAAM,eAAe,QAAQ,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AACvD,gBAAQ,IAAI,qBAAgB,YAAY,OAAO,YAAY,KAAK,OAAO,IAAI,oBAAoB;AAAA,MAC/F;AAAA,IACF;AAAA,EACF,WAAW,eAAe,WAAW,GAAG;AACtC,UAAM,EAAE,UAAU,GAAG,OAAO,IAAI,eAAe,CAAC;AAChD,YAAQ,KAAK,MAAM;AAAA,EACrB;AACF;AAKA,eAAe,mBACb,SACA,cACA,SACA,SACA,OACA,SACe;AAEf,MAAI,SAAS,QAAQ,UAAU;AAC7B;AAAA,EACF;AAEA,QAAM,WAAWA,MAAK,KAAK,SAAS,YAAY;AAEhD,MAAI;AACF,UAAM,UAAU,MAAMC,IAAG,QAAQ,UAAU,EAAE,eAAe,KAAK,CAAC;AAGlE,UAAM,OAAO,QAAQ,OAAO,OAAK,EAAE,YAAY,CAAC;AAEhD,eAAW,OAAO,MAAM;AAEtB,UAAI,QAAQ,SAAS,SAAS,IAAI,IAAI,GAAG;AACvC;AAAA,MACF;AAGA,UAAI,IAAI,KAAK,WAAW,GAAG,GAAG;AAC5B;AAAA,MACF;AAEA,YAAM,UAAU,iBAAiB,MAC7B,IAAI,OACJD,MAAK,KAAK,cAAc,IAAI,IAAI;AAGpC,YAAM,aAAa,SAAS,SAAS,SAAS,OAAO;AAGrD,YAAM,mBAAmB,SAAS,SAAS,SAAS,SAAS,QAAQ,GAAG,OAAO;AAAA,IACjF;AAAA,EACF,SAAS,OAAO;AAEd;AAAA,EACF;AACF;;;AHjKA,IAAME,cAAaC,eAAc,YAAY,GAAG;AAChD,IAAMC,aAAYC,MAAK,QAAQH,WAAU;AAQzC,eAAsB,YAAY,UAAuB,CAAC,GAAG;AAC3D,QAAM,UAAU,QAAQ,QAAQ,QAAQ,IAAI;AAC5C,QAAM,aAAaG,MAAK,KAAK,SAAS,mBAAmB;AAEzD,MAAI;AAEF,QAAI,eAAe;AACnB,QAAI;AACF,YAAMC,IAAG,OAAO,UAAU;AAC1B,qBAAe;AAAA,IACjB,QAAQ;AAAA,IAER;AAGA,QAAI,gBAAgB,QAAQ,SAAS;AACnC,YAAM,mBAAmB,IAAI,iBAAiB,OAAO;AACrD,YAAM,iBAAiB,mBAAmB;AAC1C;AAAA,IACF;AAGA,QAAI,gBAAgB,CAAC,QAAQ,SAAS;AACpC,cAAQ,IAAIC,OAAM,OAAO,gDAAsC,CAAC;AAChE,cAAQ,IAAIA,OAAM,IAAI,KAAK,GAAGA,OAAM,KAAK,qBAAqB,GAAGA,OAAM,IAAI,6BAA6B,CAAC;AACzG;AAAA,IACF;AAGA,QAAI,CAAC,cAAc;AACjB,YAAM,gBAAgB,SAAS,OAAO;AAAA,IACxC;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAMA,OAAM,IAAI,6BAA6B,GAAG,KAAK;AAC7D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEA,eAAe,gBAAgB,SAAiB,SAAsB;AAEpE,oBAAkB;AAClB,UAAQ,IAAIA,OAAM,KAAK,wBAAwB,CAAC;AAGhD,UAAQ,IAAIA,OAAM,IAAI,mCAA4B,GAAGA,OAAM,KAAK,OAAO,CAAC;AACxE,QAAM,aAAa,MAAM,oBAAoB,OAAO;AAEpD,MAAI,aAAkC,CAAC;AAEvC,MAAI,WAAW,WAAW,GAAG;AAC3B,YAAQ,IAAIA,OAAM,OAAO,wCAA8B,CAAC;AAExD,QAAI,CAAC,QAAQ,KAAK;AAChB,YAAM,EAAE,WAAW,IAAI,MAAM,SAAS,OAAO;AAAA,QAC3C;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAED,UAAI,CAAC,YAAY;AACf,gBAAQ,IAAIA,OAAM,IAAI,UAAU,CAAC;AACjC;AAAA,MACF;AAAA,IACF;AAGA,eAAW,KAAK;AAAA,MACd,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ;AAAA,QACN,SAAS,CAAC,6CAA6C;AAAA,QACvD,SAAS;AAAA,UACP;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH,OAAO;AAEL,YAAQ,IAAIA,OAAM,MAAM;AAAA,eAAa,WAAW,MAAM;AAAA,CAAkB,CAAC;AAEzE,eAAW,OAAO,YAAY;AAC5B,YAAM,cAAc,IAAI,SAAS,MAAM,SAAS,IAAI;AACpD,cAAQ,IAAIA,OAAM,KAAK,KAAK,IAAI,IAAI,EAAE,GAAGA,OAAM,IAAI,IAAI,IAAI,UAAU,cAAc,CAAC;AACpF,cAAQ,IAAIA,OAAM,IAAI,iBAAiB,WAAW,EAAE,CAAC;AAErD,UAAI,IAAI,SAAS,SAAS,GAAG;AAC3B,YAAI,SAAS,QAAQ,CAAC,MAAM;AAC1B,kBAAQ,IAAIA,OAAM,IAAI,cAAS,CAAC,EAAE,CAAC;AAAA,QACrC,CAAC;AAAA,MACH;AACA,cAAQ,IAAI;AAAA,IACd;AAGA,QAAI,CAAC,QAAQ,KAAK;AAChB,YAAM,EAAE,QAAQ,IAAI,MAAM,SAAS,OAAO;AAAA,QACxC;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAED,UAAI,CAAC,SAAS;AACZ,gBAAQ,IAAIA,OAAM,IAAI,UAAU,CAAC;AACjC;AAAA,MACF;AAAA,IACF;AAGA,eAAW,OAAO,YAAY;AAC5B,YAAM,WAAW,qBAAqB,IAAI,IAAI;AAC9C,UAAI,CAAC,UAAU;AACb,gBAAQ,KAAKA,OAAM,OAAO,uCAA6B,IAAI,IAAI,YAAY,CAAC;AAC5E;AAAA,MACF;AAGA,YAAM,kBAAkB,MAAM,SAAS,eAAe,SAAS,IAAI,IAAI;AAGvE,UAAI,kBAAkB;AACtB,UAAI,CAAC,QAAQ,KAAK;AAChB,cAAM,EAAE,UAAU,IAAI,MAAM,SAAS,OAAO;AAAA,UAC1C;AAAA,YACE,MAAM;AAAA,YACN,MAAM;AAAA,YACN,SAAS,aAAa,IAAI,IAAI;AAAA,YAC9B,SAAS;AAAA,UACX;AAAA,QACF,CAAC;AACD,0BAAkB;AAAA,MACpB;AAEA,UAAI,cAAc;AAClB,UAAI,iBAAiB;AACnB,cAAM,aAAa,MAAM,uBAAuB,IAAI,MAAM,eAAe;AACzE,sBAAc,EAAE,GAAG,iBAAiB,GAAG,WAAW;AAAA,MACpD,OAAO;AACL,cAAM,cAAc,IAAI,SAAS,MAAM,SAAS,IAAI;AACpD,gBAAQ,IAAIA,OAAM,IAAI,+BAA0B,IAAI,IAAI,OAAO,WAAW,EAAE,CAAC;AAAA,MAC/E;AAEA,iBAAW,KAAK;AAAA,QACd,MAAM,IAAI;AAAA,QACV,MAAM,IAAI;AAAA,QACV,SAAS;AAAA,QACT,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAAA,EACF;AAGA,MAAI,CAAC,QAAQ,KAAK;AAChB,UAAM,EAAE,mBAAmB,IAAI,MAAM,SAAS,OAAO;AAAA,MACnD;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX;AAAA,IACF,CAAC;AAED,QAAI,oBAAoB;AACtB,UAAI;AACF,cAAM,iBAAiBF,MAAK,KAAK,SAAS,SAAS;AACnD,cAAMC,IAAG,MAAM,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAKlD,cAAM,eAAeD,MAAK,KAAKD,YAAW,6BAA6B;AAEvE,cAAM,YAAYC,MAAK,KAAK,gBAAgB,OAAO;AACnD,YAAI;AACJ,YAAI,cAAc;AAClB,YAAI,SAAS;AAEb,YAAI;AACF,gBAAM,QAAQ,MAAMC,IAAG,KAAK,SAAS;AACrC,wBAAc,MAAM,YAAY;AAChC,mBAAS,MAAM,OAAO;AAAA,QACxB,QAAQ;AAAA,QAER;AAEA,YAAI,aAAa;AAEf,uBAAaD,MAAK,KAAK,WAAW,UAAU;AAC5C,gBAAMC,IAAG,SAAS,cAAc,UAAU;AAC1C,kBAAQ,IAAIC,OAAM,MAAM,yDAAoD,CAAC;AAAA,QAC/E,WAAW,QAAQ;AAEjB,gBAAM,EAAE,aAAa,IAAI,MAAM,SAAS,OAAO;AAAA,YAC7C;AAAA,cACE,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,SAAS;AAAA,YACX;AAAA,UACF,CAAC;AAED,cAAI,cAAc;AAGhB,kBAAM,gBAAgB,MAAMD,IAAG,SAAS,WAAW,OAAO;AAE1D,kBAAMA,IAAG,OAAO,SAAS;AAEzB,kBAAMA,IAAG,MAAM,SAAS;AAExB,kBAAMA,IAAG,UAAUD,MAAK,KAAK,WAAW,aAAa,GAAG,aAAa;AAErE,kBAAMC,IAAG,SAAS,cAAcD,MAAK,KAAK,WAAW,UAAU,CAAC;AAChE,oBAAQ,IAAIE,OAAM,MAAM,6CAAwC,CAAC;AACjE,oBAAQ,IAAIA,OAAM,MAAM,mDAAmD,CAAC;AAC5E,oBAAQ,IAAIA,OAAM,MAAM,wCAAwC,CAAC;AAAA,UACnE,OAAO;AACL,oBAAQ,IAAIA,OAAM,IAAI,8DAA8D,CAAC;AAAA,UACvF;AAAA,QACF,OAAO;AAEL,gBAAMD,IAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,uBAAaD,MAAK,KAAK,WAAW,UAAU;AAC5C,gBAAMC,IAAG,SAAS,cAAc,UAAU;AAC1C,kBAAQ,IAAIC,OAAM,MAAM,yDAAoD,CAAC;AAAA,QAC/E;AAAA,MACF,SAAS,OAAO;AACd,gBAAQ,IAAIA,OAAM,OAAO,8CAAoC,CAAC;AAC9D,gBAAQ,IAAIA,OAAM,IAAI,UAAU,iBAAiB,QAAQ,MAAM,UAAU,eAAe,EAAE,CAAC;AAC3F,gBAAQ,IAAIA,OAAM,IAAI,0EAA0E,CAAC;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAGA,QAAM,SAAqB;AAAA,IACzB,GAAG;AAAA,IACH;AAAA,EACF;AAGA,QAAM,aAAaF,MAAK,KAAK,SAAS,mBAAmB;AACzD,QAAMC,IAAG,UAAU,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI,MAAM,OAAO;AAG9E,UAAQ,IAAIC,OAAM,MAAM,oCAA+B,CAAC;AACxD,UAAQ,IAAIA,OAAM,MAAM,qBAAgB,WAAW,MAAM,eAAe,CAAC;AACzE,UAAQ,IAAIA,OAAM,IAAI,eAAe,CAAC;AACtC,UAAQ,IAAIA,OAAM,IAAI,UAAU,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,IAAI,wBAAwB,CAAC;AAChG,UAAQ,IAAIA,OAAM,IAAI,UAAU,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,IAAI,yBAAyB,CAAC;AACjG,UAAQ,IAAIA,OAAM,IAAI,6DAA6D,CAAC;AACtF;AAEA,eAAe,uBAAuB,eAAuB,QAA4D;AACvH,UAAQ,IAAIA,OAAM,KAAK;AAAA,cAAiB,aAAa,YAAY,CAAC;AAElE,QAAM,UAAU,MAAM,SAAS,OAAO;AAAA,IACpC;AAAA,MACE,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,OAAO,QAAQ,KAAK,IAAI;AAAA,MACjC,QAAQ,CAAC,UAAkB,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAAA,IAC/D;AAAA,IACA;AAAA,MACE,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,OAAO,QAAQ,KAAK,IAAI;AAAA,MACjC,QAAQ,CAAC,UAAkB,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAAA,IAC/D;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL,SAAS,QAAQ;AAAA,IACjB,SAAS,QAAQ;AAAA,EACnB;AACF;;;AYlTA;AACA;AACAC;AAPA,OAAOC,YAAW;AAClB,OAAOC,UAAQ;AACf,OAAOC,YAAU;AACjB,OAAO,QAAQ;AACf,OAAO,YAAY;AAKnB;AAEA,eAAsB,gBAAgB;AACpC,QAAM,UAAU,QAAQ,IAAI;AAC5B,QAAM,cAAcC,OAAK,SAAS,OAAO;AAGzC,QAAM,WAAW,OACd,WAAW,KAAK,EAChB,OAAO,OAAO,EACd,OAAO,KAAK,EACZ,UAAU,GAAG,CAAC;AAEjB,QAAM,YAAYA,OAAK,KAAK,GAAG,QAAQ,GAAG,SAAS,WAAW,GAAG,WAAW,IAAI,QAAQ,EAAE;AAE1F,oBAAkB;AAClB,UAAQ,IAAIC,OAAM,KAAK,UAAU,CAAC;AAGlC,QAAM,YAAY,MAAM,cAAc,OAAO,OAAO;AACpD,UAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAG,YAAYA,OAAM,MAAM,cAAS,IAAIA,OAAM,IAAI,wBAAmB,CAAC;AAE5G,MAAI,CAAC,WAAW;AACd,YAAQ,IAAIA,OAAM,OAAO,OAAO,GAAGA,OAAM,KAAK,WAAW,GAAGA,OAAM,OAAO,eAAe,CAAC;AACzF;AAAA,EACF;AAGA,MAAI;AACF,UAAM,QAAQ,MAAMC,KAAG,KAAK,SAAS;AACrC,YAAQ,IAAID,OAAM,IAAI,iBAAiB,GAAG,SAAS;AACnD,YAAQ,IAAIA,OAAM,IAAI,eAAe,GAAGA,OAAM,MAAM,eAAU,CAAC;AAG/D,QAAI;AACF,YAAM,QAAQ,MAAMC,KAAG,QAAQ,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7D,cAAQ,IAAID,OAAM,IAAI,cAAc,GAAG,MAAM,MAAM;AAAA,IACrD,SAAS,GAAG;AAAA,IAEZ;AAEA,YAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAG,MAAM,MAAM,eAAe,CAAC;AAGrE,QAAI;AACF,YAAM,UAAU,MAAM,gBAAgB,SAAS;AAC/C,UAAI,UAAU,GAAG;AACf,cAAM,cAAc,IAAI,KAAK,OAAO;AACpC,gBAAQ,IAAIA,OAAM,IAAI,eAAe,GAAG,YAAY,eAAe,CAAC;AAAA,MACtE;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,IAAIA,OAAM,IAAI,eAAe,GAAGA,OAAM,OAAO,oBAAe,CAAC;AACrE,YAAQ,IAAIA,OAAM,OAAO,OAAO,GAAGA,OAAM,KAAK,YAAY,GAAGA,OAAM,OAAO,wBAAwB,CAAC;AAAA,EACrG;AAGA,MAAI;AACF,UAAM,SAAS,MAAM,cAAc,KAAK,OAAO;AAE/C,YAAQ,IAAIA,OAAM,KAAK,aAAa,CAAC;AAGrC,UAAM,SAAS,MAAM,UAAU,OAAO;AACtC,QAAI,OAAO,aAAa,WAAW,QAAQ;AACzC,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,MAAM,gBAAW,CAAC;AACjE,cAAQ,IAAIA,OAAM,IAAI,kBAAkB,GAAG,GAAG,OAAO,aAAa,iBAAiB,GAAI,GAAG;AAG1F,UAAI;AACF,cAAM,SAAS,MAAM,iBAAiB,OAAO;AAC7C,cAAM,SAAS,MAAM,iBAAiB,OAAO;AAC7C,gBAAQ,IAAIA,OAAM,IAAI,mBAAmB,GAAG,MAAM;AAClD,gBAAQ,IAAIA,OAAM,IAAI,mBAAmB,GAAG,OAAO,UAAU,GAAG,CAAC,CAAC;AAGlE,cAAM,eAAeD,OAAK,KAAK,WAAW,iBAAiB;AAC3D,YAAI;AACF,gBAAM,kBAAkB,MAAME,KAAG,SAAS,cAAc,OAAO;AAC/D,gBAAM,WAAW,KAAK,MAAM,eAAe;AAC3C,cAAI,SAAS,WAAW,UAAU,SAAS,WAAW,QAAQ;AAC5D,oBAAQ,IAAID,OAAM,OAAO,gEAAsD,CAAC;AAAA,UAClF;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF,WAAW,OAAO,aAAa,WAAW,CAAC,QAAQ;AACjD,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,OAAO,0BAA0B,CAAC;AAAA,IACnF,OAAO;AACL,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,KAAK,UAAU,CAAC;AAAA,IACjE;AAGA,QAAI,OAAO,aAAa,SAAS;AAC/B,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,MAAM,gBAAW,CAAC;AACjE,cAAQ,IAAIA,OAAM,IAAI,aAAa,GAAG,GAAG,OAAO,aAAa,UAAU,IAAI;AAAA,IAC7E,OAAO;AACL,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,KAAK,UAAU,CAAC;AAC/D,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAGA,OAAM,KAAK,oBAAoB,CAAC;AAAA,IAC3E;AAGA,YAAQ,IAAIA,OAAM,KAAK,sBAAsB,CAAC;AAC9C,QAAI,eAAe,MAAM,GAAG;AAC1B,cAAQ,IAAIA,OAAM,IAAI,cAAc,GAAG,OAAO,KAAK,WAAW;AAC9D,cAAQ,IAAIA,OAAM,IAAI,aAAa,GAAG,OAAO,KAAK,kBAAkB;AACpE,cAAQ,IAAIA,OAAM,IAAI,aAAa,GAAG,OAAO,KAAK,SAAS;AAC3D,cAAQ,IAAIA,OAAM,IAAI,gBAAgB,GAAG,OAAO,KAAK,YAAY;AAAA,IACnE;AAAA,EAEF,SAAS,OAAO;AACd,YAAQ,IAAIA,OAAM,OAAO,yCAAyC,CAAC;AAAA,EACrE;AACF;;;AC9HA;AADA,OAAOE,YAAW;AAIlB,eAAsB,aAAa,SAAkE;AACnG,oBAAkB;AAElB,MAAI;AAEF,QAAI,QAAQ,OAAO;AACjB,YAAM,EAAE,UAAAC,UAAS,IAAI,MAAM;AAC3B,YAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAElC,cAAQ,IAAIC,OAAM,OAAO,yCAAyC,CAAC;AACnE,YAAM,WAAW,IAAIF,UAAS,QAAQ,IAAI,CAAC;AAC3C,YAAM,SAAS,WAAW;AAC1B,YAAM,SAAS,MAAM;AAGrB,YAAM,WAAW,IAAIC,iBAAgB,SAAS,MAAM;AACpD,YAAM,SAAS,MAAM;AAErB,cAAQ,IAAIC,OAAM,MAAM,qCAAgC,CAAC;AAAA,IAC3D;AAEA,UAAM,cAAc;AAAA,MAClB,SAAS,QAAQ,IAAI;AAAA,MACrB,SAAS,QAAQ,WAAW;AAAA,MAC5B,OAAO,QAAQ,SAAS;AAAA,IAC1B,CAAC;AAED,QAAI,QAAQ,OAAO;AACjB,cAAQ,IAAIA,OAAM,OAAO,gDAAsC,CAAC;AAAA,IAElE;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAMA,OAAM,IAAI,wBAAwB,GAAG,KAAK;AACxD,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;;;ACvCA,OAAOC,YAAW;AAClB,OAAOC,UAAQ;AACf,OAAOC,YAAU;;;ACFjB,SAAS,cAAc;AACvB,SAAS,4BAA4B;AACrC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,aAAY;;;ACP9B,SAAS,uBAAuB;AA2BzB,SAAS,gBACd,WACA,MACA,aACA;AACA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa,gBAAgB,WAAW;AAAA,MACtC,QAAQ;AAAA,MACR,cAAc;AAAA,IAChB,CAAC;AAAA,EACH;AACF;;;ACzCA,SAAS,SAAS;AAQX,IAAM,uBAAuB,EAAE,OAAO;AAAA,EAC3C,OAAO,EAAE,OAAO,EACb,IAAI,GAAG,qCAAqC,EAC5C,IAAI,KAAK,qCAAqC,EAC9C;AAAA,IACC;AAAA,EASF;AAAA,EAEF,OAAO,EAAE,OAAO,EACb,IAAI,EACJ,IAAI,GAAG,0BAA0B,EACjC,IAAI,IAAI,wBAAwB,EAChC,QAAQ,CAAC,EACT;AAAA,IACC;AAAA,EAGF;AACJ,CAAC;;;AClCD,SAAS,KAAAC,UAAS;AAOX,IAAM,oBAAoBA,GAAE,OAAO;AAAA,EACxC,MAAMA,GAAE,OAAO,EACZ,IAAI,IAAI,6CAA6C,EACrD;AAAA,IACC;AAAA,EAGF;AAAA,EAEF,OAAOA,GAAE,OAAO,EACb,IAAI,EACJ,IAAI,GAAG,0BAA0B,EACjC,IAAI,IAAI,wBAAwB,EAChC,QAAQ,CAAC,EACT;AAAA,IACC;AAAA,EAEF;AACJ,CAAC;;;ACzBD,SAAS,KAAAC,UAAS;AAOX,IAAM,uBAAuBA,GAAE,OAAO;AAAA,EAC3C,UAAUA,GAAE,OAAO,EAChB,IAAI,GAAG,0BAA0B,EACjC;AAAA,IACC;AAAA,EAEF;AAAA,EAEF,gBAAgBA,GAAE,QAAQ,EACvB,QAAQ,IAAI,EACZ;AAAA,IACC;AAAA,EAIF;AACJ,CAAC;;;ACvBD,SAAS,KAAAC,UAAS;AAOX,IAAM,sBAAsBA,GAAE,OAAO;AAAA,EAC1C,SAASA,GAAE,OAAO,EACf,SAAS,EACT;AAAA,IACC;AAAA,EAMF;AAAA,EAEF,UAAUA,GAAE,OAAO,EAChB,SAAS,EACT;AAAA,IACC;AAAA,EAGF;AACJ,CAAC;;;ACZM,IAAM,QAAQ;AAAA,EACnB;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ANzBA;AACA;AACA;AACA;AACA;AACA;AACA;;;AOfA;AADA,OAAO,cAAc;AAcd,IAAM,cAAN,MAAkB;AAAA,EACf,UAAqC;AAAA,EACrC,iBAA8C,oBAAI,IAAI;AAAA,EACtD;AAAA,EACA;AAAA,EACA,kBAA4C;AAAA,EAEpD,YAAY,SAAiB,QAAuC;AAClE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,MAAM,SAA2C;AACrD,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAEA,SAAK,kBAAkB;AAGvB,QAAI;AACJ,QAAI;AAEJ,QAAI,eAAe,KAAK,MAAM,GAAG;AAC/B,wBAAkB,KAAK,OAAO,SAAS;AACvC,wBAAkB,KAAK,OAAO,SAAS;AAAA,IACzC,WAAW,eAAe,KAAK,MAAM,GAAG;AAEtC,wBAAkB,KAAK,OAAO,WAAW,QAAQ,OAAK,EAAE,OAAO,OAAO;AACtE,wBAAkB,KAAK,OAAO,WAAW,QAAQ,OAAK,EAAE,OAAO,OAAO;AAAA,IACxE,OAAO;AACL,wBAAkB,CAAC,MAAM;AACzB,wBAAkB,CAAC;AAAA,IACrB;AAGA,SAAK,UAAU,SAAS,MAAM,iBAAiB;AAAA,MAC7C,KAAK,KAAK;AAAA,MACV,SAAS;AAAA,MACT,YAAY;AAAA,MACZ,eAAe;AAAA;AAAA,MACf,kBAAkB;AAAA,QAChB,oBAAoB;AAAA;AAAA,QACpB,cAAc;AAAA,MAChB;AAAA;AAAA,MAEA,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,gBAAgB;AAAA,IAClB,CAAC;AAGD,SAAK,QACF,GAAG,OAAO,CAAC,aAAa,KAAK,aAAa,OAAO,QAAQ,CAAC,EAC1D,GAAG,UAAU,CAAC,aAAa,KAAK,aAAa,UAAU,QAAQ,CAAC,EAChE,GAAG,UAAU,CAAC,aAAa,KAAK,aAAa,UAAU,QAAQ,CAAC,EAChE,GAAG,SAAS,CAAC,UAAU;AACtB,cAAQ,MAAM,8BAA8B,KAAK,EAAE;AAAA,IACrD,CAAC;AAGH,UAAM,IAAI,QAAc,CAAC,YAAY;AACnC,WAAK,QAAS,GAAG,SAAS,MAAM;AAC9B,gBAAQ;AAAA,MACV,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,aAAa,MAAmC,UAAwB;AAE9E,UAAM,gBAAgB,KAAK,eAAe,IAAI,QAAQ;AACtD,QAAI,eAAe;AACjB,mBAAa,aAAa;AAAA,IAC5B;AAGA,UAAM,QAAQ,WAAW,MAAM;AAC7B,WAAK,eAAe,OAAO,QAAQ;AAGnC,UAAI,KAAK,iBAAiB;AACxB,cAAM,eAAe,SAAS,WAAW,GAAG,IACxC,WACA,GAAG,KAAK,OAAO,IAAI,QAAQ;AAE/B,YAAI;AACF,gBAAM,SAAS,KAAK,gBAAgB;AAAA,YAClC;AAAA,YACA,UAAU;AAAA,UACZ,CAAC;AAGD,cAAI,kBAAkB,SAAS;AAC7B,mBAAO,MAAM,CAAC,UAAU;AACtB,sBAAQ,MAAM,sCAAsC,KAAK,EAAE;AAAA,YAC7D,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAO;AACd,kBAAQ,MAAM,sCAAsC,KAAK,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF,GAAG,KAAK,OAAO,aAAa,UAAU;AAEtC,SAAK,eAAe,IAAI,UAAU,KAAK;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAsB;AAC1B,QAAI,CAAC,KAAK,SAAS;AACjB;AAAA,IACF;AAGA,eAAW,SAAS,KAAK,eAAe,OAAO,GAAG;AAChD,mBAAa,KAAK;AAAA,IACpB;AACA,SAAK,eAAe,MAAM;AAG1B,UAAM,KAAK,QAAQ,MAAM;AACzB,SAAK,UAAU;AACf,SAAK,kBAAkB;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,kBAA4B;AAC1B,QAAI,CAAC,KAAK,SAAS;AACjB,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,UAAU,KAAK,QAAQ,WAAW;AACxC,UAAM,QAAkB,CAAC;AAEzB,eAAW,CAAC,KAAK,SAAS,KAAK,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAW,YAAY,WAAW;AAChC,cAAM,KAAK,GAAG,GAAG,IAAI,QAAQ,EAAE;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,YAAqB;AACnB,WAAO,KAAK,YAAY;AAAA,EAC1B;AACF;;;AP7JA;;;AQjBA;AADA,SAAoB,gBAAgB;AAoC7B,SAAS,gBACd,QACA,SACA;AACA,SAAO,OAAO,SAAkB;AAC9B,QAAI;AAEF,YAAM,YAAY,OAAO,MAAM,IAAI;AAGnC,YAAM,SAAS,MAAM,QAAQ,SAAS;AAGtC,aAAO;AAAA,QACL,SAAS,CAAC;AAAA,UACR,MAAM;AAAA,UACN,MAAM,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QACtC,CAAC;AAAA,MACH;AAAA,IAEF,SAAS,OAAO;AAEd,UAAI,iBAAiB,UAAU;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS,CAAC;AAAA,YACR,MAAM;AAAA,YACN,MAAM,KAAK,UAAU;AAAA,cACnB,OAAO;AAAA,cACP;AAAA,cACA,SAAS,MAAM,OAAO,IAAI,QAAM;AAAA,gBAC9B,OAAO,EAAE,KAAK,KAAK,GAAG;AAAA,gBACtB,SAAS,EAAE;AAAA,cACb,EAAE;AAAA,YACJ,GAAG,MAAM,CAAC;AAAA,UACZ,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,iBAAiB,WAAW;AAC9B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS,CAAC;AAAA,YACR,MAAM;AAAA,YACN,MAAM,KAAK,UAAU,MAAM,OAAO,GAAG,MAAM,CAAC;AAAA,UAC9C,CAAC;AAAA,QACH;AAAA,MACF;AAGA,cAAQ,MAAM,qCAAqC,KAAK;AACxD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,CAAC;AAAA,UACR,MAAM;AAAA,UACN,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,YAChD;AAAA,UACF,GAAG,MAAM,CAAC;AAAA,QACZ,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;AR1EA;AAGA,IAAMC,cAAaC,eAAc,YAAY,GAAG;AAChD,IAAMC,aAAYC,SAAQH,WAAU;AACpC,IAAMI,WAAUC,eAAc,YAAY,GAAG;AAE7C,IAAIC;AACJ,IAAI;AACF,EAAAA,eAAcF,SAAQG,MAAKL,YAAW,iBAAiB,CAAC;AAC1D,QAAQ;AACN,EAAAI,eAAcF,SAAQG,MAAKL,YAAW,oBAAoB,CAAC;AAC7D;AAQA,eAAsB,eAAe,SAA0C;AAC7E,QAAM,EAAE,SAAS,SAAS,MAAM,IAAI;AAGpC,QAAM,MAAM,CAAC,YAAoB;AAC/B,QAAI,SAAS;AACX,cAAQ,MAAM,cAAc,OAAO,EAAE;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,4BAA4B;AAGhC,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,WAAW,IAAI,SAAS,OAAO;AAErC,MAAI;AACF,QAAI,4BAA4B;AAChC,UAAM,WAAW,WAAW;AAE5B,QAAI,4BAA4B;AAChC,UAAM,SAAS,WAAW;AAE1B,QAAI,gCAAgC;AAAA,EACtC,SAAS,OAAO;AACd,YAAQ,MAAM,yBAAyB,KAAK,EAAE;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,QAAM,SAAS,IAAI;AAAA,IACjB;AAAA,MACE,MAAM;AAAA,MACN,SAASI,aAAY;AAAA,IACvB;AAAA,IACA;AAAA,MACE,cAAc;AAAA,QACZ,OAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAGA,SAAO,kBAAkB,wBAAwB,aAAa;AAAA,IAC5D;AAAA,EACF,EAAE;AAGF,QAAM,oBAAoB,YAAY;AACpC,QAAI;AACF,YAAM,iBAAiB,MAAM,SAAS,aAAa;AACnD,UAAI,gBAAgB;AAClB,YAAI,oDAAoD;AACxD,cAAM,SAAS,UAAU;AACzB,YAAI,8BAA8B;AAAA,MACpC;AAAA,IACF,SAAS,OAAO;AAEd,UAAI,yBAAyB,KAAK,EAAE;AAAA,IACtC;AAAA,EACF;AAGA,QAAM,mBAAmB,OAAO;AAAA,IAC9B,cAAc,SAAS,kBAAkB;AAAA,IACzC,WAAW,SAAS,eAAe;AAAA,EACrC;AAIA,QAAM,uBAAuB,YAAY,YAAY;AACnD,UAAM,kBAAkB;AAAA,EAC1B,GAAG,yBAAyB;AAG5B,SAAO,kBAAkB,uBAAuB,OAAO,YAAY;AACjE,UAAM,EAAE,MAAM,WAAW,KAAK,IAAI,QAAQ;AAE1C,QAAI,uBAAuB,IAAI,EAAE;AAEjC,QAAI;AACF,cAAQ,MAAM;AAAA,QACd,KAAK;AACH,iBAAO,MAAM;AAAA,YACX;AAAA,YACA,OAAO,kBAAkB;AACvB,kBAAI,mBAAmB,cAAc,KAAK,GAAG;AAG7C,oBAAM,kBAAkB;AAExB,oBAAM,iBAAiB,MAAM,WAAW,MAAM,cAAc,KAAK;AACjE,oBAAM,UAAU,MAAM,SAAS,OAAO,gBAAgB,cAAc,OAAO,cAAc,KAAK;AAE9F,kBAAI,SAAS,QAAQ,MAAM,UAAU;AAErC,qBAAO;AAAA,gBACL,WAAW,iBAAiB;AAAA,gBAC5B;AAAA,cACF;AAAA,YACF;AAAA,UACF,EAAE,IAAI;AAAA,QAER,KAAK;AACH,iBAAO,MAAM;AAAA,YACX;AAAA,YACA,OAAO,kBAAkB;AACvB,kBAAI,yBAAyB;AAG7B,oBAAM,kBAAkB;AAExB,oBAAM,gBAAgB,MAAM,WAAW,MAAM,cAAc,IAAI;AAE/D,oBAAM,UAAU,MAAM,SAAS,OAAO,eAAe,cAAc,OAAO,cAAc,IAAI;AAE5F,kBAAI,SAAS,QAAQ,MAAM,iBAAiB;AAE5C,qBAAO;AAAA,gBACL,WAAW,iBAAiB;AAAA,gBAC5B;AAAA,cACF;AAAA,YACF;AAAA,UACF,EAAE,IAAI;AAAA,QAER,KAAK;AACH,iBAAO,MAAM;AAAA,YACX;AAAA,YACA,OAAO,kBAAkB;AACvB,kBAAI,wBAAwB,cAAc,QAAQ,EAAE;AAGpD,oBAAM,kBAAkB;AAIxB,oBAAM,gBAAgB,MAAM,WAAW,MAAM,cAAc,QAAQ;AACnE,oBAAM,aAAa,MAAM,SAAS,OAAO,eAAe,IAAI,cAAc,QAAQ;AAGlF,oBAAM,aAAa,WAAW;AAAA,gBAAO,OACnC,EAAE,SAAS,KAAK,SAAS,cAAc,QAAQ,KAAK,cAAc,SAAS,SAAS,EAAE,SAAS,IAAI;AAAA,cACrG;AAEA,kBAAI,UAAU;AAEd,kBAAI,cAAc,kBAAkB,WAAW,SAAS,GAAG;AAEzD,sBAAM,mBAAmB,MAAM,WAAW,MAAM,WAAW,CAAC,EAAE,OAAO;AACrE,sBAAM,UAAU,MAAM,SAAS,OAAO,kBAAkB,GAAG,WAAW,CAAC,EAAE,OAAO;AAGhF,sBAAM,oBAAoB,QAAQ;AAAA,kBAAO,OACvC,CAAC,EAAE,SAAS,KAAK,SAAS,cAAc,QAAQ,KAAK,CAAC,cAAc,SAAS,SAAS,EAAE,SAAS,IAAI;AAAA,gBACvG;AAEA,0BAAU,CAAC,GAAG,YAAY,GAAG,iBAAiB;AAAA,cAChD;AAEA,kBAAI,SAAS,QAAQ,MAAM,SAAS;AAEpC,qBAAO;AAAA,gBACL,WAAW,iBAAiB;AAAA,gBAC5B,MAAM,cAAc;AAAA,gBACpB,QAAQ;AAAA,cACV;AAAA,YACF;AAAA,UACF,EAAE,IAAI;AAAA,QAER,KAAK;AACH,iBAAO,MAAM;AAAA,YACX;AAAA,YACA,OAAO,kBAAkB;AACvB,kBAAI,2CAA2C;AAG/C,oBAAM,kBAAkB;AAExB,kBAAI;AACJ,kBAAI,aAAa;AAEjB,kBAAI;AAEF,0BAAU,MAAM,SAAS,aAAa;AAAA,kBACpC,UAAU,cAAc;AAAA,kBACxB,SAAS,cAAc;AAAA,kBACvB,OAAO;AAAA,gBACT,CAAC;AAID,oBAAI,QAAQ,WAAW,MAAM,cAAc,YAAY,cAAc,UAAU;AAC7E,sBAAI,oDAAoD;AACxD,4BAAU,MAAM,SAAS,eAAe;AAAA,oBACtC,UAAU,cAAc;AAAA,oBACxB,SAAS,cAAc;AAAA,oBACvB,OAAO;AAAA,kBACT,CAAC;AACD,+BAAa;AAAA,gBACf;AAAA,cACF,SAAS,OAAO;AAEd,oBAAI,sDAAsD,KAAK,EAAE;AACjE,0BAAU,MAAM,SAAS,eAAe;AAAA,kBACtC,UAAU,cAAc;AAAA,kBACxB,SAAS,cAAc;AAAA,kBACvB,OAAO;AAAA,gBACT,CAAC;AACD,6BAAa;AAAA,cACf;AAEA,kBAAI,SAAS,QAAQ,MAAM,kBAAkB,UAAU,SAAS;AAEhE,qBAAO;AAAA,gBACL,WAAW,iBAAiB;AAAA,gBAC5B,QAAQ;AAAA,gBACR;AAAA,gBACA,MAAM,eAAe,YACjB,oFACA;AAAA,cACN;AAAA,YACF;AAAA,UACF,EAAE,IAAI;AAAA,QAER;AACE,gBAAM,IAAI;AAAA,YACR,iBAAiB,IAAI;AAAA;AAAA,YAErB,EAAE,eAAe,MAAM,gBAAgB,MAAM,IAAI,OAAK,EAAE,IAAI,EAAE;AAAA,YAC9D;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AAEd,UAAI,iBAAiB,WAAW;AAC9B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS,CAAC;AAAA,YACR,MAAM;AAAA,YACN,MAAM,KAAK,UAAU,MAAM,OAAO,GAAG,MAAM,CAAC;AAAA,UAC9C,CAAC;AAAA,QACH;AAAA,MACF;AAGA,cAAQ,MAAM,uCAAuC,IAAI,KAAK,KAAK;AACnE,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,CAAC;AAAA,UACR,MAAM;AAAA,UACN,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,YAChD;AAAA,YACA,MAAM;AAAA,UACR,GAAG,MAAM,CAAC;AAAA,QACZ,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,CAAC;AAGD,QAAM,SAAS,MAAM,cAAc,KAAK,OAAO;AAG/C,QAAM,WAAW,MAAM,SAAS,QAAQ;AAExC,MAAI,CAAC,YAAY,OAAO,IAAI,qBAAqB;AAC/C,QAAI,wDAAiD;AACrD,QAAI,oEAA0D;AAE9D,QAAI;AAEF,YAAM,EAAE,eAAAE,eAAc,IAAI,MAAM;AAChC,YAAMA,eAAc,EAAE,SAAS,SAAS,KAAK,CAAC;AAC9C,UAAI,mCAA8B;AAAA,IACpC,SAAS,OAAO;AACd,UAAI,0CAAgC,KAAK,EAAE;AAC3C,UAAI,kCAAkC;AAAA,IAExC;AAAA,EACF,WAAW,CAAC,UAAU;AACpB,QAAI,oEAA0D;AAC9D,QAAI,0CAA0C;AAAA,EAChD;AAGA,MAAI,aAAqC;AACzC,MAAI,kBAAyC;AAC7C,MAAI,cAAkC;AAEtC,MAAI,OAAO,aAAa,SAAS;AAC/B,UAAM,eAAe,MAAM,eAAe;AAC1C,UAAM,SAAS,MAAM,UAAU,OAAO;AAEtC,QAAI,gBAAgB,QAAQ;AAC1B,UAAI,gCAA2B;AAC/B,mBAAa,IAAI,gBAAgB,SAAS,SAAS,MAAM;AAGzD,UAAI;AACF,YAAI,6BAA6B;AACjC,cAAM,eAAe,MAAM,WAAW,WAAW;AAEjD,YAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,cAAI,mCAA4B,aAAa,MAAM,gBAAgB;AACnE,cAAI,6BAA6B;AAEjC,gBAAM,QAAQ,MAAM;AAAA,YAClB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA,EAAE,QAAQ;AAAA,UACZ;AAEA,cAAI,oBAAe,KAAK,QAAQ;AAAA,QAClC,OAAO;AACL,cAAI,2CAAsC;AAAA,QAC5C;AAAA,MACF,SAAS,OAAO;AACd,YAAI,kDAAkD,KAAK,EAAE;AAAA,MAC/D;AAGA,UAAI,gDAA2C,OAAO,aAAa,iBAAiB,GAAI,IAAI;AAE5F,wBAAkB,YAAY,YAAY;AACxC,YAAI;AACF,gBAAM,eAAe,MAAM,WAAY,cAAc;AAErD,cAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,gBAAI,kCAA2B,aAAa,MAAM,gBAAgB;AAClE,gBAAI,6BAA6B;AAGjC;AAAA,cACE;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA,EAAE,QAAQ;AAAA,YACZ,EAAE,KAAK,WAAS;AACd,kBAAI,uCAAkC,KAAK,QAAQ;AAAA,YACrD,CAAC,EAAE,MAAM,WAAS;AAChB,kBAAI,uCAAuC,KAAK,EAAE;AAAA,YACpD,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAO;AACd,cAAI,wCAAwC,KAAK,EAAE;AAAA,QACrD;AAAA,MACF,GAAG,OAAO,aAAa,cAAc;AAAA,IACvC,OAAO;AACL,UAAI,CAAC,cAAc;AACjB,YAAI,4CAA4C;AAAA,MAClD,WAAW,CAAC,QAAQ;AAClB,YAAI,+CAA+C;AAAA,MACrD;AAAA,IACF;AAAA,EACF,OAAO;AACL,QAAI,yCAAyC;AAAA,EAC/C;AAIA,QAAM,sBAAsB,UAAU,SAAY,QAAQ,OAAO,aAAa;AAE9E,MAAI,qBAAqB;AACvB,QAAI,oCAA6B;AACjC,kBAAc,IAAI,YAAY,SAAS,MAAM;AAE7C,QAAI;AACF,YAAM,YAAY,MAAM,OAAO,UAAU;AACvC,cAAM,EAAE,MAAM,SAAS,IAAI;AAE3B,YAAI,SAAS,UAAU;AAErB,cAAI,kCAAsB,QAAQ,EAAE;AACpC,cAAI;AACF,kBAAM,SAAS,aAAa,QAAQ;AAGpC,kBAAM,WAAW,IAAI,gBAAgB,SAAS,MAAM;AACpD,kBAAM,SAAS,WAAW,QAAQ;AAElC,gBAAI,kBAAa,QAAQ,aAAa;AAAA,UACxC,SAAS,OAAO;AACd,gBAAI,6BAA6B,QAAQ,KAAK,KAAK,EAAE;AAAA,UACvD;AAAA,QACF,OAAO;AAEL,gBAAM,SAAS,SAAS,QAAQ,UAAU;AAC1C,cAAI,kBAAW,MAAM,KAAK,QAAQ,EAAE;AAGpC,0BAAgB,UAAU,UAAU,YAAY,QAAQ,EAAE,QAAQ,CAAC,EAChE,MAAM,CAAC,UAAU;AAChB,gBAAI,8BAA8B,QAAQ,KAAK,KAAK,EAAE;AAAA,UACxD,CAAC;AAAA,QACL;AAAA,MACF,CAAC;AAED,YAAM,eAAe,YAAY,gBAAgB,EAAE;AACnD,UAAI,0CAAqC,YAAY,SAAS;AAAA,IAChE,SAAS,OAAO;AACd,UAAI,0CAA0C,KAAK,EAAE;AACrD,oBAAc;AAAA,IAChB;AAAA,EACF;AAGA,QAAM,UAAU,YAAY;AAC1B,QAAI,6BAA6B;AACjC,kBAAc,oBAAoB;AAClC,QAAI,iBAAiB;AACnB,oBAAc,eAAe;AAAA,IAC/B;AACA,QAAI,aAAa;AACf,YAAM,YAAY,KAAK;AAAA,IACzB;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,UAAQ,GAAG,UAAU,OAAO;AAC5B,UAAQ,GAAG,WAAW,OAAO;AAG7B,QAAM,YAAY,IAAI,qBAAqB;AAI3C,YAAU,UAAU,MAAM;AACxB,QAAI,oDAAoD;AACxD,YAAQ,EAAE,MAAM,MAAM,QAAQ,KAAK,CAAC,CAAC;AAAA,EACvC;AAEA,YAAU,UAAU,CAAC,UAAU;AAC7B,QAAI,oBAAoB,KAAK,EAAE;AAAA,EAEjC;AAEA,QAAM,OAAO,QAAQ,SAAS;AAE9B,MAAI,2CAA2C;AACjD;;;ADteA,eAAsB,aAAa,SAA+E;AAChH,QAAM,UAAU,QAAQ,OAAOC,OAAK,QAAQ,QAAQ,IAAI,IAAI,QAAQ,IAAI;AAExE,MAAI;AAEF,QAAI,QAAQ,MAAM;AAChB,UAAI;AACF,cAAM,QAAQ,MAAMC,KAAG,KAAK,OAAO;AACnC,YAAI,CAAC,MAAM,YAAY,GAAG;AACxB,kBAAQ,MAAMC,OAAM,IAAI,0CAA0C,OAAO,EAAE,CAAC;AAC5E,kBAAQ,KAAK,CAAC;AAAA,QAChB;AAAA,MACF,SAAS,OAAO;AACd,YAAK,MAAgC,SAAS,UAAU;AACtD,kBAAQ,MAAMA,OAAM,IAAI,2CAA2C,OAAO,EAAE,CAAC;AAAA,QAC/E,WAAY,MAAgC,SAAS,UAAU;AAC7D,kBAAQ,MAAMA,OAAM,IAAI,8CAA8C,OAAO,EAAE,CAAC;AAAA,QAClF,OAAO;AACL,kBAAQ,MAAMA,OAAM,IAAI,6CAA6C,OAAO,EAAE,CAAC;AAC/E,kBAAQ,MAAMA,OAAM,IAAK,MAAgB,OAAO,CAAC;AAAA,QACnD;AACA,gBAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,IACF;AAGA,eAAW;AACX,YAAQ,MAAMA,OAAM,KAAK,0BAA0B,CAAC;AAEpD,QAAI,QAAQ,MAAM;AAChB,cAAQ,MAAMA,OAAM,IAAI,iBAAiB,OAAO;AAAA,CAAI,CAAC;AAAA,IACvD;AAGA,QAAI,QAAQ,OAAO;AACjB,cAAQ,MAAMA,OAAM,OAAO,yEAA+D,CAAC;AAC3F,cAAQ,MAAMA,OAAM,IAAI,+CAA+C,CAAC;AAAA,IAC1E;AAIA,UAAM,QAAQ,QAAQ,UAAU,QAAQ,QAAQ,QAAQ,OAAO;AAE/D,UAAM,eAAe;AAAA,MACnB;AAAA,MACA,SAAS;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,MAAMA,OAAM,IAAI,6BAA6B,GAAG,KAAK;AAC7D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;;;AfhDA,IAAMC,cAAaC,eAAc,YAAY,GAAG;AAChD,IAAMC,aAAYC,SAAQH,WAAU;AACpC,IAAMI,WAAUC,eAAc,YAAY,GAAG;AAE7C,IAAIC;AACJ,IAAI;AACF,EAAAA,eAAcF,SAAQG,MAAKL,YAAW,iBAAiB,CAAC;AAC1D,QAAQ;AACN,EAAAI,eAAcF,SAAQG,MAAKL,YAAW,oBAAoB,CAAC;AAC7D;AAEO,IAAM,UAAU,IAAI,QAAQ;AAEnC,QACG,KAAK,MAAM,EACX,YAAY,sDAAsD,EAClE,QAAQI,aAAY,OAAO;AAE9B,QACG,QAAQ,MAAM,EACd,YAAY,0CAA0C,EACtD,OAAO,iBAAiB,0CAA0C,EAClE,OAAO,aAAa,2CAA2C,EAC/D,OAAO,qBAAqB,oDAAoD,EAChF,OAAO,WAAW;AAErB,QACG,QAAQ,OAAO,EACf,YAAY,wCAAwC,EACpD,OAAO,eAAe,uCAAuC,EAC7D,OAAO,eAAe,8CAA8C,EACpE,OAAO,iBAAiB,uCAAuC,EAC/D,OAAO,YAAY;AAEtB,QACG,QAAQ,OAAO,EACf,YAAY,6CAA6C,EACzD,OAAO,qBAAqB,gCAAgC,MAAM,EAClE,OAAO,cAAc,wCAAwC,EAC7D,OAAO,eAAe,sDAAsD,EAC5E,OAAO,qBAAqB,yDAAyD,EACrF,OAAO,YAAY;AAEtB,QACG,QAAQ,QAAQ,EAChB,YAAY,qCAAqC,EACjD,OAAO,aAAa;;;AyBtDvB,QAAQ,MAAM;","names":["require","fs","path","fs","path","fs","path","init_version","fs","path","e","detectLanguage","search","detectLanguage","init_chunker","path","init_version","path","os","crypto","init_version","fs","path","fs","path","fs","fs","manifest","error","init_chunker","fs","chalk","manifest","embeddings","isGitAvailable","isGitRepo","GitStateTracker","gitAvailable","isRepo","init_chunker","init_version","createRequire","fileURLToPath","dirname","join","fs","path","fileURLToPath","chalk","createRequire","fileURLToPath","dirname","join","__filename","__dirname","require","packageJson","fs","path","chalk","fs","path","fs","path","path","packageJson","fs","fs","path","path","fs","fs","path","path","fs","path","fs","__filename","fileURLToPath","__dirname","path","fs","chalk","init_version","chalk","fs","path","path","chalk","fs","chalk","VectorDB","ManifestManager","chalk","chalk","fs","path","createRequire","fileURLToPath","dirname","join","z","z","z","__filename","fileURLToPath","__dirname","dirname","require","createRequire","packageJson","join","indexCodebase","path","fs","chalk","__filename","fileURLToPath","__dirname","dirname","require","createRequire","packageJson","join"]}