raggrep 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +15 -0
  3. package/dist/application/index.d.ts +7 -0
  4. package/dist/application/usecases/cleanupIndex.d.ts +54 -0
  5. package/dist/application/usecases/index.d.ts +9 -0
  6. package/dist/application/usecases/indexDirectory.d.ts +54 -0
  7. package/dist/application/usecases/searchIndex.d.ts +48 -0
  8. package/dist/cli/main.d.ts +1 -0
  9. package/dist/cli/main.js +1596 -0
  10. package/dist/cli/main.js.map +22 -0
  11. package/dist/composition.d.ts +52 -0
  12. package/dist/domain/entities/chunk.d.ts +41 -0
  13. package/dist/domain/entities/config.d.ts +43 -0
  14. package/dist/domain/entities/fileIndex.d.ts +58 -0
  15. package/dist/domain/entities/fileSummary.d.ts +61 -0
  16. package/dist/domain/entities/index.d.ts +14 -0
  17. package/dist/domain/entities/searchResult.d.ts +36 -0
  18. package/dist/domain/index.d.ts +11 -0
  19. package/dist/domain/ports/embedding.d.ts +60 -0
  20. package/dist/domain/ports/filesystem.d.ts +78 -0
  21. package/dist/domain/ports/index.d.ts +10 -0
  22. package/dist/domain/ports/storage.d.ts +79 -0
  23. package/dist/domain/services/bm25.d.ts +82 -0
  24. package/dist/domain/services/bm25.test.d.ts +4 -0
  25. package/dist/domain/services/index.d.ts +8 -0
  26. package/dist/domain/services/keywords.d.ts +27 -0
  27. package/dist/index.d.ts +98 -0
  28. package/dist/index.js +1378 -0
  29. package/dist/index.js.map +22 -0
  30. package/dist/indexer/index.d.ts +33 -0
  31. package/dist/infrastructure/embeddings/index.d.ts +4 -0
  32. package/dist/infrastructure/embeddings/transformersEmbedding.d.ts +34 -0
  33. package/dist/infrastructure/filesystem/index.d.ts +4 -0
  34. package/dist/infrastructure/filesystem/nodeFileSystem.d.ts +28 -0
  35. package/dist/infrastructure/index.d.ts +9 -0
  36. package/dist/infrastructure/storage/fileIndexStorage.d.ts +68 -0
  37. package/dist/infrastructure/storage/index.d.ts +4 -0
  38. package/dist/modules/registry.d.ts +3 -0
  39. package/dist/modules/semantic/index.d.ts +55 -0
  40. package/dist/modules/semantic/parseCode.d.ts +44 -0
  41. package/dist/modules/semantic/parseCode.test.d.ts +4 -0
  42. package/dist/search/index.d.ts +11 -0
  43. package/dist/types.d.ts +84 -0
  44. package/dist/utils/bm25.d.ts +9 -0
  45. package/dist/utils/config.d.ts +45 -0
  46. package/dist/utils/embeddings.d.ts +46 -0
  47. package/dist/utils/embeddings.test.d.ts +4 -0
  48. package/dist/utils/tieredIndex.d.ts +100 -0
  49. package/package.json +66 -0
@@ -0,0 +1,22 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/utils/embeddings.ts", "../../src/domain/entities/config.ts", "../../src/domain/entities/index.ts", "../../src/utils/config.ts", "../../src/domain/services/bm25.ts", "../../src/modules/semantic/parseCode.ts", "../../src/domain/services/keywords.ts", "../../src/utils/tieredIndex.ts", "../../src/modules/semantic/index.ts", "../../src/modules/registry.ts", "../../src/indexer/index.ts", "../../src/search/index.ts", "../../src/cli/main.ts"],
4
+ "sourcesContent": [
5
+ "// Local embedding provider using Transformers.js\n// Models are automatically downloaded and cached on first use\n\nimport { pipeline, env, type FeatureExtractionPipeline } from '@xenova/transformers';\nimport * as path from 'path';\nimport * as os from 'os';\n\n// ============================================================================\n// Configuration\n// ============================================================================\n\n// Configure cache directory for models\n// Uses ~/.cache/raggrep/models by default\nconst CACHE_DIR = path.join(os.homedir(), '.cache', 'raggrep', 'models');\n\n// Set the cache directory for transformers.js\nenv.cacheDir = CACHE_DIR;\n\n// Disable local model check (always try to use cache first, then download)\nenv.allowLocalModels = true;\n\n// Available embedding models (smaller = faster, larger = better quality)\nexport const EMBEDDING_MODELS = {\n // Default: Good balance of speed and quality (~33M params, 384 dimensions)\n 'all-MiniLM-L6-v2': 'Xenova/all-MiniLM-L6-v2',\n \n // Higher quality, slightly slower (~33M params, 384 dimensions)\n 'all-MiniLM-L12-v2': 'Xenova/all-MiniLM-L12-v2',\n \n // BGE small - good for code (~33M params, 384 dimensions)\n 'bge-small-en-v1.5': 'Xenova/bge-small-en-v1.5',\n \n // Even smaller/faster option (~22M params, 384 dimensions)\n 'paraphrase-MiniLM-L3-v2': 'Xenova/paraphrase-MiniLM-L3-v2',\n} as const;\n\nexport type EmbeddingModelName = keyof typeof EMBEDDING_MODELS;\n\n// ============================================================================\n// Embedding Provider\n// ============================================================================\n\nlet embeddingPipeline: FeatureExtractionPipeline | null = null;\nlet currentModelName: string | null = null;\nlet isInitializing = false;\nlet initPromise: Promise<void> | null = null;\n\nexport interface EmbeddingConfig {\n model: EmbeddingModelName;\n /** Show progress during model download */\n showProgress?: boolean;\n}\n\nconst DEFAULT_CONFIG: EmbeddingConfig = {\n model: 'all-MiniLM-L6-v2',\n showProgress: true,\n};\n\nlet currentConfig: EmbeddingConfig = { ...DEFAULT_CONFIG };\n\n/**\n * Configure the embedding model\n */\nexport function configureEmbeddings(config: Partial<EmbeddingConfig>): void {\n const newConfig = { ...currentConfig, ...config };\n \n // If model changed, reset pipeline\n if (newConfig.model !== currentConfig.model) {\n embeddingPipeline = null;\n currentModelName = null;\n }\n \n currentConfig = newConfig;\n}\n\n/**\n * Initialize the embedding pipeline (downloads model if needed)\n */\nasync function initializePipeline(): Promise<void> {\n if (embeddingPipeline && currentModelName === currentConfig.model) {\n return;\n }\n \n // Prevent multiple simultaneous initializations\n if (isInitializing && initPromise) {\n return initPromise;\n }\n \n isInitializing = true;\n \n initPromise = (async () => {\n const modelId = EMBEDDING_MODELS[currentConfig.model];\n \n if (currentConfig.showProgress) {\n console.log(`\\n Loading embedding model: ${currentConfig.model}`);\n console.log(` Cache: ${CACHE_DIR}`);\n }\n \n try {\n // Create the feature extraction pipeline\n // This will download the model on first run\n embeddingPipeline = await pipeline('feature-extraction', modelId, {\n progress_callback: currentConfig.showProgress \n ? (progress: { status: string; file?: string; progress?: number; loaded?: number; total?: number }) => {\n if (progress.status === 'progress' && progress.file) {\n const pct = progress.progress ? Math.round(progress.progress) : 0;\n process.stdout.write(`\\r Downloading ${progress.file}: ${pct}% `);\n } else if (progress.status === 'done' && progress.file) {\n process.stdout.write(`\\r Downloaded ${progress.file} \\n`);\n } else if (progress.status === 'ready') {\n // Model is ready\n }\n }\n : undefined,\n });\n \n currentModelName = currentConfig.model;\n \n if (currentConfig.showProgress) {\n console.log(` Model ready.\\n`);\n }\n } catch (error) {\n embeddingPipeline = null;\n currentModelName = null;\n throw new Error(`Failed to load embedding model: ${error}`);\n } finally {\n isInitializing = false;\n initPromise = null;\n }\n })();\n \n return initPromise;\n}\n\n/**\n * Get embedding for a single text\n */\nexport async function getEmbedding(text: string): Promise<number[]> {\n await initializePipeline();\n \n if (!embeddingPipeline) {\n throw new Error('Embedding pipeline not initialized');\n }\n \n // Get embeddings using mean pooling\n const output = await embeddingPipeline(text, {\n pooling: 'mean',\n normalize: true,\n });\n \n // Convert to array\n return Array.from(output.data as Float32Array);\n}\n\n/** Maximum number of texts to process in a single batch */\nconst BATCH_SIZE = 32;\n\n/**\n * Get embeddings for multiple texts (batched for efficiency)\n * \n * Processes texts in batches of BATCH_SIZE for better performance\n * while avoiding memory issues with very large batches.\n * \n * @param texts - Array of texts to embed\n * @returns Array of embedding vectors\n */\nexport async function getEmbeddings(texts: string[]): Promise<number[][]> {\n if (texts.length === 0) return [];\n \n await initializePipeline();\n \n if (!embeddingPipeline) {\n throw new Error('Embedding pipeline not initialized');\n }\n \n const results: number[][] = [];\n \n // Process in batches for efficiency\n for (let i = 0; i < texts.length; i += BATCH_SIZE) {\n const batch = texts.slice(i, i + BATCH_SIZE);\n \n // Process batch - transformers.js handles array inputs\n const outputs = await Promise.all(\n batch.map(async (text) => {\n const output = await embeddingPipeline!(text, {\n pooling: 'mean',\n normalize: true,\n });\n return Array.from(output.data as Float32Array);\n })\n );\n \n results.push(...outputs);\n }\n \n return results;\n}\n\n// ============================================================================\n// Vector Math\n// ============================================================================\n\n/**\n * Calculate cosine similarity between two vectors\n */\nexport function cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length) {\n throw new Error('Vectors must have the same length');\n }\n\n let dotProduct = 0;\n let normA = 0;\n let normB = 0;\n\n for (let i = 0; i < a.length; i++) {\n dotProduct += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n\n if (normA === 0 || normB === 0) return 0;\n\n return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));\n}\n\n/**\n * Get current embedding configuration\n */\nexport function getEmbeddingConfig(): EmbeddingConfig {\n return { ...currentConfig };\n}\n\n/**\n * Get the cache directory path\n */\nexport function getCacheDir(): string {\n return CACHE_DIR;\n}\n\n/**\n * Check if a model is already cached\n */\nexport async function isModelCached(model: EmbeddingModelName = currentConfig.model): Promise<boolean> {\n const modelId = EMBEDDING_MODELS[model];\n const modelPath = path.join(CACHE_DIR, modelId.replace('/', '--'));\n \n try {\n const fs = await import('fs/promises');\n await fs.access(modelPath);\n return true;\n } catch {\n return false;\n }\n}\n",
6
+ "/**\n * Config Entity\n * \n * Configuration for RAGgrep indexing and search operations.\n */\n\n/**\n * Configuration for a specific index module.\n */\nexport interface ModuleConfig {\n /** Unique module identifier */\n id: string;\n \n /** Whether the module is enabled */\n enabled: boolean;\n \n /** Module-specific options */\n options?: Record<string, unknown>;\n}\n\n/**\n * Main RAGgrep configuration.\n */\nexport interface Config {\n /** RAGgrep version */\n version: string;\n \n /** Directory name for index storage (default: '.raggrep') */\n indexDir: string;\n \n /** File extensions to index (e.g., ['.ts', '.tsx', '.js']) */\n extensions: string[];\n \n /** Paths to ignore during indexing */\n ignorePaths: string[];\n \n /** Enabled modules and their configurations */\n modules: ModuleConfig[];\n}\n\n/**\n * Default paths to ignore during indexing.\n */\nexport const DEFAULT_IGNORE_PATHS = [\n // Package managers & dependencies\n 'node_modules',\n '.pnpm-store',\n '.yarn',\n 'vendor',\n \n // Version control\n '.git',\n \n // Build outputs\n 'dist',\n 'build',\n 'out',\n '.output',\n 'target',\n \n // Framework-specific build outputs\n '.next',\n '.nuxt',\n '.svelte-kit',\n '.vercel',\n '.netlify',\n \n // Caches\n '.cache',\n '.turbo',\n '.parcel-cache',\n '.eslintcache',\n \n // Test & coverage\n 'coverage',\n '.nyc_output',\n \n // Python\n '__pycache__',\n '.venv',\n 'venv',\n '.pytest_cache',\n '*.egg-info',\n \n // IDE & editor\n '.idea',\n \n // RAGgrep index\n '.raggrep',\n];\n\n/**\n * Default file extensions to index.\n */\nexport const DEFAULT_EXTENSIONS = [\n '.ts', '.tsx', '.js', '.jsx',\n '.py', '.go', '.rs', '.java', '.md',\n];\n\n/**\n * Create a default configuration.\n */\nexport function createDefaultConfig(): Config {\n return {\n version: '0.1.0',\n indexDir: '.raggrep',\n extensions: DEFAULT_EXTENSIONS,\n ignorePaths: DEFAULT_IGNORE_PATHS,\n modules: [\n {\n id: 'semantic',\n enabled: true,\n options: {\n embeddingModel: 'all-MiniLM-L6-v2',\n },\n },\n ],\n };\n}\n\n",
7
+ "/**\n * Domain Entities\n *\n * Core business objects with no external dependencies.\n * These represent the fundamental concepts in the RAGgrep domain.\n */\n\n// Chunk - The fundamental unit of indexing\nexport type { Chunk, ChunkType } from \"./chunk\";\nexport { createChunkId } from \"./chunk\";\n\n// FileIndex - Tier 2 full index data\nexport type {\n FileIndex,\n FileManifestEntry,\n ModuleManifest,\n GlobalManifest,\n} from \"./fileIndex\";\n\n// FileSummary - Symbolic index (lightweight summaries)\nexport type {\n FileSummary,\n SymbolicIndexMeta,\n Tier1Manifest,\n} from \"./fileSummary\";\n\n// SearchResult - Query results\nexport type { SearchResult, SearchOptions } from \"./searchResult\";\nexport { DEFAULT_SEARCH_OPTIONS } from \"./searchResult\";\n\n// Config - Application configuration\nexport type { Config, ModuleConfig } from \"./config\";\nexport {\n DEFAULT_IGNORE_PATHS,\n DEFAULT_EXTENSIONS,\n createDefaultConfig,\n} from \"./config\";\n",
8
+ "/**\n * Configuration utilities\n * \n * Provides functions for loading, saving, and managing RAGgrep configuration.\n */\n\nimport * as path from 'path';\nimport * as fs from 'fs/promises';\nimport type { Config, ModuleConfig } from '../domain/entities';\nimport { createDefaultConfig } from '../domain/entities';\nimport { EmbeddingConfig, EmbeddingModelName, EMBEDDING_MODELS } from './embeddings';\n\n/** Default configuration instance */\nexport const DEFAULT_CONFIG: Config = createDefaultConfig();\n\n/**\n * Get the root .raggrep directory path\n */\nexport function getRaggrepDir(rootDir: string, config: Config = DEFAULT_CONFIG): string {\n return path.join(rootDir, config.indexDir);\n}\n\n/**\n * Get the index data directory for a specific module\n */\nexport function getModuleIndexPath(rootDir: string, moduleId: string, config: Config = DEFAULT_CONFIG): string {\n return path.join(rootDir, config.indexDir, 'index', moduleId);\n}\n\n/**\n * Get the manifest path for a specific module\n */\nexport function getModuleManifestPath(rootDir: string, moduleId: string, config: Config = DEFAULT_CONFIG): string {\n return path.join(rootDir, config.indexDir, 'index', moduleId, 'manifest.json');\n}\n\n/**\n * Get the global manifest path\n */\nexport function getGlobalManifestPath(rootDir: string, config: Config = DEFAULT_CONFIG): string {\n return path.join(rootDir, config.indexDir, 'manifest.json');\n}\n\n/**\n * Get the config file path\n */\nexport function getConfigPath(rootDir: string, config: Config = DEFAULT_CONFIG): string {\n return path.join(rootDir, config.indexDir, 'config.json');\n}\n\n/**\n * Load config from file or return default\n */\nexport async function loadConfig(rootDir: string): Promise<Config> {\n const configPath = getConfigPath(rootDir, DEFAULT_CONFIG);\n \n try {\n const content = await fs.readFile(configPath, 'utf-8');\n const savedConfig = JSON.parse(content) as Partial<Config>;\n return { ...DEFAULT_CONFIG, ...savedConfig };\n } catch {\n return DEFAULT_CONFIG;\n }\n}\n\n/**\n * Save config to file\n */\nexport async function saveConfig(rootDir: string, config: Config): Promise<void> {\n const configPath = getConfigPath(rootDir, config);\n await fs.mkdir(path.dirname(configPath), { recursive: true });\n await fs.writeFile(configPath, JSON.stringify(config, null, 2));\n}\n\n/**\n * Get module config by ID\n */\nexport function getModuleConfig(config: Config, moduleId: string): ModuleConfig | undefined {\n return config.modules.find(m => m.id === moduleId);\n}\n\n/**\n * Extract embedding config from module options\n */\nexport function getEmbeddingConfigFromModule(moduleConfig: ModuleConfig): EmbeddingConfig {\n const options = moduleConfig.options || {};\n const modelName = (options.embeddingModel as string) || 'all-MiniLM-L6-v2';\n \n // Validate model name\n if (!(modelName in EMBEDDING_MODELS)) {\n console.warn(`Unknown embedding model: ${modelName}, falling back to all-MiniLM-L6-v2`);\n return { model: 'all-MiniLM-L6-v2' };\n }\n \n return {\n model: modelName as EmbeddingModelName,\n showProgress: options.showProgress !== false,\n };\n}\n",
9
+ "/**\n * BM25 (Best Matching 25) Implementation\n * \n * A ranking function for keyword-based search. This is a pure domain service\n * with no external dependencies - just algorithms operating on data.\n * \n * BM25 estimates relevance of documents to a search query using term frequency\n * and inverse document frequency with length normalization.\n */\n\n/**\n * BM25 parameters\n * - k1: Term frequency saturation (typical: 1.2-2.0)\n * - b: Length normalization (typical: 0.75)\n */\nconst BM25_K1 = 1.5;\nconst BM25_B = 0.75;\n\n/**\n * Tokenize text into normalized terms.\n * \n * @param text - Text to tokenize\n * @returns Array of lowercase tokens\n */\nexport function tokenize(text: string): string[] {\n return text\n .toLowerCase()\n .replace(/[^\\w\\s]/g, ' ')\n .split(/\\s+/)\n .filter(token => token.length > 1);\n}\n\n/**\n * Calculate term frequency (TF) for a term in a document.\n */\nfunction termFrequency(term: string, tokens: string[]): number {\n return tokens.filter(t => t === term).length;\n}\n\n/**\n * Document data for BM25 scoring.\n */\nexport interface BM25Document {\n id: string;\n content: string;\n /** Pre-computed tokens (optional, computed from content if not provided) */\n tokens?: string[];\n}\n\n/**\n * BM25 search result.\n */\nexport interface BM25Result {\n id: string;\n score: number;\n}\n\n/**\n * BM25 search index.\n * \n * This is a pure in-memory data structure with no I/O operations.\n * Build the index by adding documents, then search against it.\n */\nexport class BM25Index {\n private documents: Map<string, { content: string; tokens: string[] }> = new Map();\n private avgDocLength: number = 0;\n private documentFrequencies: Map<string, number> = new Map();\n private totalDocs: number = 0;\n\n /**\n * Add documents to the index.\n * \n * @param documents - Array of documents to index\n */\n addDocuments(documents: BM25Document[]): void {\n let totalLength = this.avgDocLength * this.totalDocs;\n\n for (const doc of documents) {\n const tokens = doc.tokens ?? tokenize(doc.content);\n this.documents.set(doc.id, { content: doc.content, tokens });\n totalLength += tokens.length;\n this.totalDocs++;\n\n // Update document frequencies\n const uniqueTerms = new Set(tokens);\n for (const term of uniqueTerms) {\n const count = this.documentFrequencies.get(term) || 0;\n this.documentFrequencies.set(term, count + 1);\n }\n }\n\n this.avgDocLength = this.totalDocs > 0 ? totalLength / this.totalDocs : 0;\n }\n\n /**\n * Calculate IDF (Inverse Document Frequency) for a term.\n */\n private idf(term: string): number {\n const docFreq = this.documentFrequencies.get(term) || 0;\n if (docFreq === 0) return 0;\n \n // Standard IDF formula with smoothing\n return Math.log(1 + (this.totalDocs - docFreq + 0.5) / (docFreq + 0.5));\n }\n\n /**\n * Calculate BM25 score for a document given query terms.\n */\n private score(tokens: string[], queryTerms: string[]): number {\n const docLength = tokens.length;\n let score = 0;\n\n for (const term of queryTerms) {\n const tf = termFrequency(term, tokens);\n if (tf === 0) continue;\n\n const idfScore = this.idf(term);\n \n // BM25 formula\n const numerator = tf * (BM25_K1 + 1);\n const denominator = tf + BM25_K1 * (1 - BM25_B + BM25_B * (docLength / this.avgDocLength));\n \n score += idfScore * (numerator / denominator);\n }\n\n return score;\n }\n\n /**\n * Search the index with a query.\n * \n * @param query - Search query\n * @param topK - Maximum number of results to return\n * @returns Sorted array of results (highest score first)\n */\n search(query: string, topK: number = 10): BM25Result[] {\n const queryTerms = tokenize(query);\n if (queryTerms.length === 0) return [];\n\n const results: BM25Result[] = [];\n\n for (const [id, { tokens }] of this.documents) {\n const score = this.score(tokens, queryTerms);\n if (score > 0) {\n results.push({ id, score });\n }\n }\n\n results.sort((a, b) => b.score - a.score);\n return results.slice(0, topK);\n }\n\n /**\n * Get the number of indexed documents.\n */\n get size(): number {\n return this.totalDocs;\n }\n\n /**\n * Clear the index.\n */\n clear(): void {\n this.documents.clear();\n this.documentFrequencies.clear();\n this.avgDocLength = 0;\n this.totalDocs = 0;\n }\n}\n\n/**\n * Normalize a raw score to 0-1 range using sigmoid function.\n * \n * @param score - Raw score\n * @param midpoint - Score at which output is 0.5\n * @returns Normalized score between 0 and 1\n */\nexport function normalizeScore(score: number, midpoint: number = 5): number {\n return 1 / (1 + Math.exp(-score / midpoint + 1));\n}\n\n",
10
+ "/**\n * TypeScript/JavaScript Code Parser\n * \n * Uses the TypeScript Compiler API for accurate AST-based parsing.\n * Extracts semantic chunks: functions, classes, interfaces, types, enums.\n */\n\nimport * as ts from 'typescript';\n\n/**\n * Chunk types that can be extracted from code\n */\nexport type ChunkType = \n | 'function' \n | 'class' \n | 'interface' \n | 'type' \n | 'enum'\n | 'variable'\n | 'block' \n | 'file';\n\n/**\n * Represents a parsed chunk of code with location information\n */\nexport interface ParsedChunk {\n /** The source code content */\n content: string;\n /** 1-based start line number */\n startLine: number;\n /** 1-based end line number */\n endLine: number;\n /** The type of code construct */\n type: ChunkType;\n /** Name of the construct (function name, class name, etc.) */\n name?: string;\n /** Whether this is exported */\n isExported?: boolean;\n /** JSDoc comment if present */\n jsDoc?: string;\n}\n\n/**\n * Parse code into semantic chunks based on file extension\n * @param content - The source code content\n * @param filepath - The file path (used to determine language)\n * @returns Array of parsed chunks\n */\nexport function parseCode(content: string, filepath: string): ParsedChunk[] {\n const ext = filepath.split('.').pop()?.toLowerCase();\n\n // For TypeScript/JavaScript files, use the TypeScript parser\n if (['ts', 'tsx', 'js', 'jsx', 'mts', 'cts', 'mjs', 'cjs'].includes(ext || '')) {\n return parseTypeScript(content, filepath);\n }\n\n // For other files, use simple line-based chunking\n return parseGenericCode(content);\n}\n\n/**\n * Parse TypeScript/JavaScript code using the TypeScript Compiler API\n * @param content - The source code content\n * @param filepath - The file path\n * @returns Array of parsed chunks\n */\nfunction parseTypeScript(content: string, filepath: string): ParsedChunk[] {\n const chunks: ParsedChunk[] = [];\n const lines = content.split('\\n');\n\n // Create a source file from the content\n const sourceFile = ts.createSourceFile(\n filepath,\n content,\n ts.ScriptTarget.Latest,\n true, // setParentNodes\n filepath.endsWith('.tsx') || filepath.endsWith('.jsx') \n ? ts.ScriptKind.TSX \n : ts.ScriptKind.TS\n );\n\n /**\n * Get line numbers for a node (1-based)\n */\n function getLineNumbers(node: ts.Node): { startLine: number; endLine: number } {\n const start = sourceFile.getLineAndCharacterOfPosition(node.getStart());\n const end = sourceFile.getLineAndCharacterOfPosition(node.getEnd());\n return {\n startLine: start.line + 1,\n endLine: end.line + 1,\n };\n }\n\n /**\n * Get the source text for a node\n */\n function getNodeText(node: ts.Node): string {\n return node.getText(sourceFile);\n }\n\n /**\n * Check if a node has export modifier\n */\n function isExported(node: ts.Node): boolean {\n if (!ts.canHaveModifiers(node)) return false;\n const modifiers = ts.getModifiers(node);\n return modifiers?.some(m => m.kind === ts.SyntaxKind.ExportKeyword) ?? false;\n }\n\n /**\n * Get JSDoc comment for a node\n */\n function getJSDoc(node: ts.Node): string | undefined {\n const jsDocNodes = ts.getJSDocCommentsAndTags(node);\n if (jsDocNodes.length === 0) return undefined;\n \n return jsDocNodes\n .map(doc => doc.getText(sourceFile))\n .join('\\n');\n }\n\n /**\n * Get function name from various function declarations\n */\n function getFunctionName(node: ts.Node): string | undefined {\n if (ts.isFunctionDeclaration(node) && node.name) {\n return node.name.text;\n }\n if (ts.isMethodDeclaration(node) && ts.isIdentifier(node.name)) {\n return node.name.text;\n }\n if (ts.isVariableDeclaration(node) && ts.isIdentifier(node.name)) {\n return node.name.text;\n }\n return undefined;\n }\n\n /**\n * Visit nodes recursively to extract chunks\n */\n function visit(node: ts.Node): void {\n const { startLine, endLine } = getLineNumbers(node);\n\n // Function declarations (including async)\n if (ts.isFunctionDeclaration(node) && node.name) {\n chunks.push({\n content: getNodeText(node),\n startLine,\n endLine,\n type: 'function',\n name: node.name.text,\n isExported: isExported(node),\n jsDoc: getJSDoc(node),\n });\n return; // Don't recurse into function body\n }\n\n // Arrow functions and function expressions assigned to variables\n if (ts.isVariableStatement(node)) {\n for (const decl of node.declarationList.declarations) {\n if (decl.initializer && \n (ts.isArrowFunction(decl.initializer) || ts.isFunctionExpression(decl.initializer))) {\n const name = ts.isIdentifier(decl.name) ? decl.name.text : undefined;\n chunks.push({\n content: getNodeText(node),\n startLine,\n endLine,\n type: 'function',\n name,\n isExported: isExported(node),\n jsDoc: getJSDoc(node),\n });\n return;\n }\n }\n }\n\n // Class declarations\n if (ts.isClassDeclaration(node) && node.name) {\n chunks.push({\n content: getNodeText(node),\n startLine,\n endLine,\n type: 'class',\n name: node.name.text,\n isExported: isExported(node),\n jsDoc: getJSDoc(node),\n });\n return; // Don't recurse into class body (it's included in the chunk)\n }\n\n // Interface declarations\n if (ts.isInterfaceDeclaration(node)) {\n chunks.push({\n content: getNodeText(node),\n startLine,\n endLine,\n type: 'interface',\n name: node.name.text,\n isExported: isExported(node),\n jsDoc: getJSDoc(node),\n });\n return;\n }\n\n // Type alias declarations\n if (ts.isTypeAliasDeclaration(node)) {\n chunks.push({\n content: getNodeText(node),\n startLine,\n endLine,\n type: 'type',\n name: node.name.text,\n isExported: isExported(node),\n jsDoc: getJSDoc(node),\n });\n return;\n }\n\n // Enum declarations\n if (ts.isEnumDeclaration(node)) {\n chunks.push({\n content: getNodeText(node),\n startLine,\n endLine,\n type: 'enum',\n name: node.name.text,\n isExported: isExported(node),\n jsDoc: getJSDoc(node),\n });\n return;\n }\n\n // Exported variable declarations (constants)\n if (ts.isVariableStatement(node) && isExported(node)) {\n for (const decl of node.declarationList.declarations) {\n // Skip if it's a function (already handled above)\n if (decl.initializer && \n (ts.isArrowFunction(decl.initializer) || ts.isFunctionExpression(decl.initializer))) {\n continue;\n }\n const name = ts.isIdentifier(decl.name) ? decl.name.text : undefined;\n chunks.push({\n content: getNodeText(node),\n startLine,\n endLine,\n type: 'variable',\n name,\n isExported: true,\n jsDoc: getJSDoc(node),\n });\n }\n return;\n }\n\n // Recurse into children\n ts.forEachChild(node, visit);\n }\n\n // Start visiting from the root\n ts.forEachChild(sourceFile, visit);\n\n // If no semantic chunks found, fall back to block-based chunking\n if (chunks.length === 0) {\n return parseGenericCode(content);\n }\n\n return chunks;\n}\n\n/**\n * Parse generic code using line-based chunking\n * Used for non-TypeScript/JavaScript files or as fallback\n * @param content - The source code content\n * @returns Array of parsed chunks\n */\nfunction parseGenericCode(content: string): ParsedChunk[] {\n const chunks: ParsedChunk[] = [];\n const lines = content.split('\\n');\n const CHUNK_SIZE = 30; // lines per chunk\n const OVERLAP = 5; // overlap between chunks\n\n // If file is small, treat as single chunk\n if (lines.length <= CHUNK_SIZE) {\n return [\n {\n content: content,\n startLine: 1,\n endLine: lines.length,\n type: 'file',\n },\n ];\n }\n\n // Split into overlapping chunks\n for (let i = 0; i < lines.length; i += CHUNK_SIZE - OVERLAP) {\n const endIdx = Math.min(i + CHUNK_SIZE, lines.length);\n chunks.push({\n content: lines.slice(i, endIdx).join('\\n'),\n startLine: i + 1,\n endLine: endIdx,\n type: 'block',\n });\n\n if (endIdx >= lines.length) break;\n }\n\n return chunks;\n}\n\n/**\n * Generate a unique chunk ID from filepath and line numbers\n * @param filepath - The source file path\n * @param startLine - Start line number\n * @param endLine - End line number\n * @returns Unique chunk identifier\n */\nexport function generateChunkId(filepath: string, startLine: number, endLine: number): string {\n const safePath = filepath.replace(/[/\\\\]/g, '-').replace(/\\./g, '_');\n return `${safePath}-${startLine}-${endLine}`;\n}\n",
11
+ "/**\n * Keyword Extraction Service\n * \n * Pure domain service for extracting keywords from code.\n * No external dependencies - operates only on string data.\n */\n\n/**\n * Common programming keywords to exclude from keyword extraction.\n * These appear in almost every code file and don't add search value.\n */\nexport const COMMON_KEYWORDS = new Set([\n // JavaScript/TypeScript\n 'const', 'let', 'var', 'function', 'class', 'interface', 'type', 'enum',\n 'export', 'import', 'from', 'return', 'async', 'await', 'new', 'this',\n 'true', 'false', 'null', 'undefined', 'if', 'else', 'for', 'while',\n 'switch', 'case', 'break', 'continue', 'try', 'catch', 'finally',\n 'throw', 'typeof', 'instanceof', 'void', 'delete', 'in', 'of',\n 'string', 'number', 'boolean', 'any', 'unknown', 'never', 'object',\n 'public', 'private', 'protected', 'static', 'readonly', 'abstract',\n 'implements', 'extends', 'super', 'get', 'set', 'constructor',\n // Common words\n 'the', 'and', 'for', 'not', 'with', 'are', 'was', 'has', 'have',\n]);\n\n/**\n * Extract keywords from code content and optional name.\n * \n * @param content - Code content to extract keywords from\n * @param name - Optional name (function name, class name, etc.)\n * @param maxKeywords - Maximum keywords to return (default: 50)\n * @returns Array of unique lowercase keywords\n */\nexport function extractKeywords(\n content: string, \n name?: string,\n maxKeywords: number = 50\n): string[] {\n const keywords = new Set<string>();\n \n // Add the name if present\n if (name) {\n keywords.add(name.toLowerCase());\n \n // Also add camelCase parts (e.g., \"getUserById\" → [\"get\", \"user\", \"by\", \"id\"])\n const parts = name.split(/(?=[A-Z])/).map(p => p.toLowerCase());\n parts.forEach(p => p.length > 2 && keywords.add(p));\n }\n \n // Extract identifiers from content\n const identifierRegex = /\\b([a-zA-Z_][a-zA-Z0-9_]{2,})\\b/g;\n let match;\n \n while ((match = identifierRegex.exec(content)) !== null) {\n const word = match[1].toLowerCase();\n \n // Skip common keywords and very short words\n if (!COMMON_KEYWORDS.has(word) && word.length > 2) {\n keywords.add(word);\n }\n }\n \n return Array.from(keywords).slice(0, maxKeywords);\n}\n\n/**\n * Extract keywords from a file path.\n * \n * @param filepath - File path to extract keywords from\n * @returns Array of keywords from path segments\n */\nexport function extractPathKeywords(filepath: string): string[] {\n return filepath\n .split(/[/\\\\.]/)\n .filter(p => p.length > 2 && !COMMON_KEYWORDS.has(p.toLowerCase()))\n .map(p => p.toLowerCase());\n}\n\n",
12
+ "/**\n * Symbolic Index System\n * \n * Provides fast keyword-based filtering using BM25 before semantic search.\n * \n * Structure:\n * .raggrep/index/<module>/symbolic/\n * ├── _meta.json (BM25 statistics)\n * └── <filepath>.json (per-file summaries)\n * \n * This approach scales well because:\n * - Each file summary is stored separately\n * - BM25 metadata is small and loads quickly\n * - Summaries are loaded on-demand during search\n */\n\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport { BM25Index } from '../domain/services/bm25';\nimport { extractKeywords, extractPathKeywords } from '../domain/services/keywords';\nimport type { FileSummary, SymbolicIndexMeta } from '../domain/entities';\n\n// Re-export for backwards compatibility\nexport type { FileSummary, SymbolicIndexMeta } from '../domain/entities';\nexport { extractKeywords } from '../domain/services/keywords';\n\n/** @deprecated Use SymbolicIndexMeta */\nexport type Tier1Manifest = SymbolicIndexMeta & { files: Record<string, FileSummary> };\n\n/**\n * Symbolic Index Manager\n * \n * Manages the keyword-based index for fast file filtering.\n */\nexport class SymbolicIndex {\n private meta: SymbolicIndexMeta | null = null;\n private fileSummaries: Map<string, FileSummary> = new Map();\n private bm25Index: BM25Index | null = null;\n private symbolicPath: string;\n private moduleId: string;\n\n constructor(indexDir: string, moduleId: string) {\n this.symbolicPath = path.join(indexDir, 'index', moduleId, 'symbolic');\n this.moduleId = moduleId;\n }\n\n /**\n * Initialize or load the symbolic index\n */\n async initialize(): Promise<void> {\n try {\n await this.load();\n } catch {\n // Create empty metadata\n this.meta = {\n version: '1.0.0',\n lastUpdated: new Date().toISOString(),\n moduleId: this.moduleId,\n fileCount: 0,\n bm25Data: {\n avgDocLength: 0,\n documentFrequencies: {},\n totalDocs: 0,\n },\n };\n this.bm25Index = new BM25Index();\n }\n }\n\n /**\n * Add or update a file summary\n */\n addFile(summary: FileSummary): void {\n this.fileSummaries.set(summary.filepath, summary);\n }\n\n /**\n * Remove a file from the index\n */\n removeFile(filepath: string): boolean {\n return this.fileSummaries.delete(filepath);\n }\n\n /**\n * Build BM25 index from file summaries\n */\n buildBM25Index(): void {\n this.bm25Index = new BM25Index();\n \n // Add each file's keywords as a document\n for (const [filepath, summary] of this.fileSummaries) {\n const content = [\n ...summary.keywords,\n ...summary.exports,\n ...extractPathKeywords(filepath),\n ].join(' ');\n \n this.bm25Index.addDocuments([{ id: filepath, content }]);\n }\n \n // Update metadata\n if (this.meta) {\n this.meta.fileCount = this.fileSummaries.size;\n this.meta.bm25Data.totalDocs = this.fileSummaries.size;\n }\n }\n\n /**\n * Find candidate files using BM25 keyword search\n */\n findCandidates(query: string, maxCandidates: number = 20): string[] {\n if (!this.bm25Index) {\n return Array.from(this.fileSummaries.keys());\n }\n \n const results = this.bm25Index.search(query, maxCandidates);\n return results.map(r => r.id);\n }\n\n /**\n * Get all file paths in the index\n */\n getAllFiles(): string[] {\n return Array.from(this.fileSummaries.keys());\n }\n\n /**\n * Get summary for a specific file\n */\n getFileSummary(filepath: string): FileSummary | undefined {\n return this.fileSummaries.get(filepath);\n }\n\n /**\n * Save the index to disk (per-file structure)\n */\n async save(): Promise<void> {\n if (!this.meta) throw new Error('Index not initialized');\n \n // Update metadata\n this.meta.lastUpdated = new Date().toISOString();\n this.meta.fileCount = this.fileSummaries.size;\n \n // Ensure symbolic directory exists\n await fs.mkdir(this.symbolicPath, { recursive: true });\n \n // Save metadata\n const metaPath = path.join(this.symbolicPath, '_meta.json');\n await fs.writeFile(metaPath, JSON.stringify(this.meta, null, 2));\n \n // Save each file summary\n for (const [filepath, summary] of this.fileSummaries) {\n const summaryPath = this.getFileSummaryPath(filepath);\n await fs.mkdir(path.dirname(summaryPath), { recursive: true });\n await fs.writeFile(summaryPath, JSON.stringify(summary, null, 2));\n }\n }\n\n /**\n * Load the index from disk\n */\n async load(): Promise<void> {\n // Load metadata\n const metaPath = path.join(this.symbolicPath, '_meta.json');\n const metaContent = await fs.readFile(metaPath, 'utf-8');\n this.meta = JSON.parse(metaContent);\n \n // Load all file summaries by walking the symbolic directory\n this.fileSummaries.clear();\n await this.loadFileSummariesRecursive(this.symbolicPath);\n \n // Rebuild BM25 index\n this.buildBM25Index();\n }\n\n /**\n * Recursively load file summaries from the symbolic directory\n */\n private async loadFileSummariesRecursive(dir: string): Promise<void> {\n try {\n const entries = await fs.readdir(dir, { withFileTypes: true });\n \n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n \n if (entry.isDirectory()) {\n await this.loadFileSummariesRecursive(fullPath);\n } else if (entry.name.endsWith('.json') && entry.name !== '_meta.json') {\n try {\n const content = await fs.readFile(fullPath, 'utf-8');\n const summary = JSON.parse(content) as FileSummary;\n if (summary.filepath) {\n this.fileSummaries.set(summary.filepath, summary);\n }\n } catch {\n // Skip invalid files\n }\n }\n }\n } catch {\n // Directory doesn't exist yet\n }\n }\n\n /**\n * Get the path for a file summary\n */\n private getFileSummaryPath(filepath: string): string {\n const jsonPath = filepath.replace(/\\.[^.]+$/, '.json');\n return path.join(this.symbolicPath, jsonPath);\n }\n\n /**\n * Delete a file summary from disk\n */\n async deleteFileSummary(filepath: string): Promise<void> {\n try {\n await fs.unlink(this.getFileSummaryPath(filepath));\n } catch {\n // Ignore if file doesn't exist\n }\n this.fileSummaries.delete(filepath);\n }\n\n /**\n * Check if the index exists on disk\n */\n async exists(): Promise<boolean> {\n try {\n const metaPath = path.join(this.symbolicPath, '_meta.json');\n await fs.access(metaPath);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Get the number of indexed files\n */\n get size(): number {\n return this.fileSummaries.size;\n }\n\n /**\n * Clear the index\n */\n clear(): void {\n this.fileSummaries.clear();\n if (this.meta) {\n this.meta.fileCount = 0;\n this.meta.bm25Data = {\n avgDocLength: 0,\n documentFrequencies: {},\n totalDocs: 0,\n };\n }\n this.bm25Index = new BM25Index();\n }\n}\n\n// ============================================================================\n// Backwards compatibility aliases\n// ============================================================================\n\n/** @deprecated Use SymbolicIndex instead */\nexport const Tier1Index = SymbolicIndex;\n\n/** @deprecated Use SymbolicIndex instead */\nexport function getTier1Path(rootDir: string, moduleId: string, indexDir: string = '.raggrep'): string {\n return path.join(rootDir, indexDir, 'index', moduleId, 'tier1.json');\n}\n\nexport function getSymbolicPath(rootDir: string, moduleId: string, indexDir: string = '.raggrep'): string {\n return path.join(rootDir, indexDir, 'index', moduleId, 'symbolic');\n}\n",
13
+ "/**\n * Semantic Index Module\n * \n * Uses local text embeddings for natural language code search.\n * Implements a tiered index system:\n * - Tier 1: Lightweight file summaries with keywords for fast filtering\n * - Tier 2: Full chunk embeddings for semantic similarity\n * \n * This approach keeps the filesystem-based design while enabling\n * efficient search by only loading relevant files.\n */\n\nimport * as path from 'path';\nimport {\n IndexModule,\n IndexContext,\n SearchContext,\n SearchOptions,\n FileIndex,\n SearchResult,\n Chunk,\n ModuleConfig,\n ChunkType,\n} from '../../types';\nimport {\n getEmbeddings,\n getEmbedding,\n cosineSimilarity,\n configureEmbeddings,\n EmbeddingConfig,\n getEmbeddingConfig,\n} from '../../utils/embeddings';\nimport { BM25Index, normalizeScore } from '../../utils/bm25';\nimport { getEmbeddingConfigFromModule, getRaggrepDir } from '../../utils/config';\nimport { parseCode, generateChunkId } from './parseCode';\nimport { SymbolicIndex, FileSummary, extractKeywords } from '../../utils/tieredIndex';\n\n/** Default minimum similarity score for search results */\nexport const DEFAULT_MIN_SCORE = 0.15;\n\n/** Default number of results to return */\nexport const DEFAULT_TOP_K = 10;\n\n/** Weight for semantic similarity in hybrid scoring (0-1) */\nconst SEMANTIC_WEIGHT = 0.7;\n\n/** Weight for BM25 keyword matching in hybrid scoring (0-1) */\nconst BM25_WEIGHT = 0.3;\n\n/**\n * Module-specific data stored alongside file index\n */\nexport interface SemanticModuleData {\n embeddings: number[][];\n /** Store the model used for these embeddings for compatibility checking */\n embeddingModel: string;\n [key: string]: unknown; // Index signature for compatibility with Record<string, unknown>\n}\n\n/** Number of candidate files to retrieve from Tier 1 before loading Tier 2 */\nconst TIER1_CANDIDATE_MULTIPLIER = 3;\n\nexport class SemanticModule implements IndexModule {\n readonly id = 'semantic';\n readonly name = 'Semantic Search';\n readonly description = 'Natural language code search using local text embeddings';\n readonly version = '1.0.0';\n\n private embeddingConfig: EmbeddingConfig | null = null;\n private symbolicIndex: SymbolicIndex | null = null;\n private pendingSummaries: Map<string, FileSummary> = new Map();\n private rootDir: string = '';\n\n async initialize(config: ModuleConfig): Promise<void> {\n // Extract embedding config from module options\n this.embeddingConfig = getEmbeddingConfigFromModule(config);\n \n // Configure the embedding provider\n configureEmbeddings(this.embeddingConfig);\n \n // Clear pending summaries for fresh indexing\n this.pendingSummaries.clear();\n }\n\n async indexFile(\n filepath: string,\n content: string,\n ctx: IndexContext\n ): Promise<FileIndex | null> {\n // Store rootDir for finalize\n this.rootDir = ctx.rootDir;\n \n // Parse code into chunks\n const parsedChunks = parseCode(content, filepath);\n\n if (parsedChunks.length === 0) {\n return null;\n }\n\n // Generate embeddings for all chunks\n const chunkContents = parsedChunks.map((c) => c.content);\n const embeddings = await getEmbeddings(chunkContents);\n\n // Create chunks with all metadata\n const chunks: Chunk[] = parsedChunks.map((pc) => ({\n id: generateChunkId(filepath, pc.startLine, pc.endLine),\n content: pc.content,\n startLine: pc.startLine,\n endLine: pc.endLine,\n type: pc.type,\n name: pc.name,\n isExported: pc.isExported,\n jsDoc: pc.jsDoc,\n }));\n\n // Extract references (imports)\n const references = this.extractReferences(content, filepath);\n\n const stats = await ctx.getFileStats(filepath);\n const currentConfig = getEmbeddingConfig();\n\n const moduleData: SemanticModuleData = {\n embeddings,\n embeddingModel: currentConfig.model,\n };\n\n // Build Tier 1 summary for this file\n const chunkTypes = [...new Set(parsedChunks.map(pc => pc.type))] as ChunkType[];\n const exports = parsedChunks\n .filter(pc => pc.isExported && pc.name)\n .map(pc => pc.name!);\n \n // Extract keywords from all chunks\n const allKeywords = new Set<string>();\n for (const pc of parsedChunks) {\n const keywords = extractKeywords(pc.content, pc.name);\n keywords.forEach(k => allKeywords.add(k));\n }\n\n const fileSummary: FileSummary = {\n filepath,\n chunkCount: chunks.length,\n chunkTypes,\n keywords: Array.from(allKeywords),\n exports,\n lastModified: stats.lastModified,\n };\n \n // Store summary for finalize\n this.pendingSummaries.set(filepath, fileSummary);\n\n return {\n filepath,\n lastModified: stats.lastModified,\n chunks,\n moduleData,\n references,\n };\n }\n\n /**\n * Finalize indexing by building and saving the symbolic index\n */\n async finalize(ctx: IndexContext): Promise<void> {\n const indexDir = getRaggrepDir(ctx.rootDir, ctx.config);\n \n // Initialize symbolic index\n this.symbolicIndex = new SymbolicIndex(indexDir, this.id);\n await this.symbolicIndex.initialize();\n \n // Add all pending summaries\n for (const [filepath, summary] of this.pendingSummaries) {\n this.symbolicIndex.addFile(summary);\n }\n \n // Build BM25 index from summaries\n this.symbolicIndex.buildBM25Index();\n \n // Save to disk (creates symbolic/ folder with per-file summaries)\n await this.symbolicIndex.save();\n \n console.log(` Symbolic index built with ${this.pendingSummaries.size} file summaries`);\n \n // Clear pending summaries\n this.pendingSummaries.clear();\n }\n\n /**\n * Search the semantic index for chunks matching the query.\n * \n * Uses a tiered approach for efficient search:\n * 1. Tier 1: Use BM25 on file summaries to find candidate files\n * 2. Tier 2: Load only candidate files and compute semantic similarity\n * \n * @param query - Natural language search query\n * @param ctx - Search context with index access\n * @param options - Search options (topK, minScore, filePatterns)\n * @returns Array of search results sorted by relevance\n */\n async search(\n query: string,\n ctx: SearchContext,\n options: SearchOptions = {}\n ): Promise<SearchResult[]> {\n const { topK = DEFAULT_TOP_K, minScore = DEFAULT_MIN_SCORE, filePatterns } = options;\n\n // Load symbolic index for candidate filtering\n const indexDir = getRaggrepDir(ctx.rootDir, ctx.config);\n const symbolicIndex = new SymbolicIndex(indexDir, this.id);\n \n let candidateFiles: string[];\n \n try {\n await symbolicIndex.initialize();\n \n // Use BM25 keyword search on symbolic index to find candidate files\n const maxCandidates = topK * TIER1_CANDIDATE_MULTIPLIER;\n candidateFiles = symbolicIndex.findCandidates(query, maxCandidates);\n \n // If no candidates found via BM25, fall back to all files\n if (candidateFiles.length === 0) {\n candidateFiles = symbolicIndex.getAllFiles();\n }\n } catch {\n // Symbolic index doesn't exist, fall back to loading all files\n candidateFiles = await ctx.listIndexedFiles();\n }\n\n // Apply file pattern filter\n if (filePatterns && filePatterns.length > 0) {\n candidateFiles = candidateFiles.filter(filepath => {\n return filePatterns.some(pattern => {\n if (pattern.startsWith('*.')) {\n const ext = pattern.slice(1);\n return filepath.endsWith(ext);\n }\n return filepath.includes(pattern);\n });\n });\n }\n\n // Get query embedding for semantic search\n const queryEmbedding = await getEmbedding(query);\n\n // Tier 2: Load only candidate files and compute scores\n const bm25Index = new BM25Index();\n const allChunksData: Array<{\n filepath: string;\n chunk: Chunk;\n embedding: number[];\n }> = [];\n\n for (const filepath of candidateFiles) {\n const fileIndex = await ctx.loadFileIndex(filepath);\n if (!fileIndex) continue;\n\n const moduleData = fileIndex.moduleData as unknown as SemanticModuleData;\n if (!moduleData?.embeddings) continue;\n\n for (let i = 0; i < fileIndex.chunks.length; i++) {\n const chunk = fileIndex.chunks[i];\n const embedding = moduleData.embeddings[i];\n\n if (!embedding) continue;\n\n allChunksData.push({\n filepath: fileIndex.filepath,\n chunk,\n embedding,\n });\n\n // Add to BM25 index for chunk-level keyword matching\n bm25Index.addDocuments([{ id: chunk.id, content: chunk.content }]);\n }\n }\n\n // Perform BM25 search at chunk level\n const bm25Results = bm25Index.search(query, topK * 3);\n const bm25Scores = new Map<string, number>();\n \n for (const result of bm25Results) {\n bm25Scores.set(result.id, normalizeScore(result.score, 3));\n }\n\n // Calculate hybrid scores for all chunks\n const results: SearchResult[] = [];\n\n for (const { filepath, chunk, embedding } of allChunksData) {\n const semanticScore = cosineSimilarity(queryEmbedding, embedding);\n const bm25Score = bm25Scores.get(chunk.id) || 0;\n \n // Hybrid score: weighted combination of semantic and BM25\n const hybridScore = (SEMANTIC_WEIGHT * semanticScore) + (BM25_WEIGHT * bm25Score);\n\n if (hybridScore >= minScore || bm25Score > 0.3) {\n results.push({\n filepath,\n chunk,\n score: hybridScore,\n moduleId: this.id,\n context: {\n semanticScore,\n bm25Score,\n },\n });\n }\n }\n\n // Sort by score descending and take top K\n results.sort((a, b) => b.score - a.score);\n return results.slice(0, topK);\n }\n\n private extractReferences(content: string, filepath: string): string[] {\n const references: string[] = [];\n\n // Extract import statements\n const importRegex = /import\\s+.*?\\s+from\\s+['\"]([^'\"]+)['\"]/g;\n const requireRegex = /require\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n\n let match;\n while ((match = importRegex.exec(content)) !== null) {\n const importPath = match[1];\n if (importPath.startsWith('.')) {\n const dir = path.dirname(filepath);\n const resolved = path.normalize(path.join(dir, importPath));\n references.push(resolved);\n }\n }\n\n while ((match = requireRegex.exec(content)) !== null) {\n const importPath = match[1];\n if (importPath.startsWith('.')) {\n const dir = path.dirname(filepath);\n const resolved = path.normalize(path.join(dir, importPath));\n references.push(resolved);\n }\n }\n\n return references;\n }\n}\n",
14
+ "// Module registry - manages available index modules\nimport { IndexModule, ModuleRegistry, Config } from '../types';\n\nclass ModuleRegistryImpl implements ModuleRegistry {\n private modules = new Map<string, IndexModule>();\n\n register(module: IndexModule): void {\n if (this.modules.has(module.id)) {\n console.warn(`Module '${module.id}' is already registered, overwriting...`);\n }\n this.modules.set(module.id, module);\n }\n\n get(id: string): IndexModule | undefined {\n return this.modules.get(id);\n }\n\n list(): IndexModule[] {\n return Array.from(this.modules.values());\n }\n\n getEnabled(config: Config): IndexModule[] {\n const enabledIds = new Set(\n config.modules\n .filter(m => m.enabled)\n .map(m => m.id)\n );\n \n return this.list().filter(m => enabledIds.has(m.id));\n }\n}\n\n// Global singleton registry\nexport const registry: ModuleRegistry = new ModuleRegistryImpl();\n\n// Auto-register built-in modules\nexport async function registerBuiltInModules(): Promise<void> {\n // Dynamic import to avoid circular dependencies\n const { SemanticModule } = await import('./semantic');\n registry.register(new SemanticModule());\n}\n",
15
+ "// Main indexer - coordinates modules for indexing files\nimport { glob } from 'glob';\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport {\n Config,\n IndexContext,\n IndexModule,\n ModuleManifest,\n GlobalManifest,\n FileIndex,\n} from '../types';\nimport {\n DEFAULT_CONFIG,\n loadConfig,\n getModuleIndexPath,\n getModuleManifestPath,\n getGlobalManifestPath,\n getModuleConfig,\n} from '../utils/config';\nimport { registry, registerBuiltInModules } from '../modules/registry';\nimport { EmbeddingModelName } from '../utils/embeddings';\n\nexport interface IndexResult {\n moduleId: string;\n indexed: number;\n skipped: number;\n errors: number;\n}\n\nexport interface IndexOptions {\n /** Override the embedding model (semantic module) */\n model?: EmbeddingModelName;\n /** Show detailed progress */\n verbose?: boolean;\n}\n\nexport interface CleanupResult {\n moduleId: string;\n /** Number of stale entries removed */\n removed: number;\n /** Number of valid entries kept */\n kept: number;\n}\n\n/**\n * Index a directory using all enabled modules\n */\nexport async function indexDirectory(rootDir: string, options: IndexOptions = {}): Promise<IndexResult[]> {\n const verbose = options.verbose ?? false;\n \n // Ensure absolute path\n rootDir = path.resolve(rootDir);\n \n console.log(`Indexing directory: ${rootDir}`);\n\n // Load config\n const config = await loadConfig(rootDir);\n\n // Register built-in modules\n await registerBuiltInModules();\n\n // Get enabled modules\n const enabledModules = registry.getEnabled(config);\n\n if (enabledModules.length === 0) {\n console.log('No modules enabled. Check your configuration.');\n return [];\n }\n\n console.log(`Enabled modules: ${enabledModules.map((m) => m.id).join(', ')}`);\n\n // Get all files matching extensions\n const files = await findFiles(rootDir, config);\n console.log(`Found ${files.length} files to index`);\n\n // Index with each module\n const results: IndexResult[] = [];\n\n for (const module of enabledModules) {\n console.log(`\\n[${module.name}] Starting indexing...`);\n\n // Initialize module if needed\n const moduleConfig = getModuleConfig(config, module.id);\n if (module.initialize && moduleConfig) {\n // Apply CLI overrides to module config\n const configWithOverrides = { ...moduleConfig };\n if (options.model && module.id === 'semantic') {\n configWithOverrides.options = {\n ...configWithOverrides.options,\n embeddingModel: options.model,\n };\n }\n await module.initialize(configWithOverrides);\n }\n\n const result = await indexWithModule(rootDir, files, module, config, verbose);\n results.push(result);\n\n // Call finalize to build secondary indexes (Tier 1, BM25, etc.)\n if (module.finalize) {\n console.log(`[${module.name}] Building secondary indexes...`);\n const ctx: IndexContext = {\n rootDir,\n config,\n readFile: async (filepath: string) => {\n const fullPath = path.isAbsolute(filepath) ? filepath : path.join(rootDir, filepath);\n return fs.readFile(fullPath, 'utf-8');\n },\n getFileStats: async (filepath: string) => {\n const fullPath = path.isAbsolute(filepath) ? filepath : path.join(rootDir, filepath);\n const stats = await fs.stat(fullPath);\n return { lastModified: stats.mtime.toISOString() };\n },\n };\n await module.finalize(ctx);\n }\n\n console.log(`[${module.name}] Complete: ${result.indexed} indexed, ${result.skipped} skipped, ${result.errors} errors`);\n }\n\n // Update global manifest\n await updateGlobalManifest(rootDir, enabledModules, config);\n\n return results;\n}\n\n/**\n * Index files with a specific module\n */\nasync function indexWithModule(\n rootDir: string,\n files: string[],\n module: IndexModule,\n config: Config,\n verbose: boolean\n): Promise<IndexResult> {\n const result: IndexResult = {\n moduleId: module.id,\n indexed: 0,\n skipped: 0,\n errors: 0,\n };\n\n // Load existing manifest for this module\n const manifest = await loadModuleManifest(rootDir, module.id, config);\n\n // Create index context\n const ctx: IndexContext = {\n rootDir,\n config,\n readFile: async (filepath: string) => {\n const fullPath = path.isAbsolute(filepath) ? filepath : path.join(rootDir, filepath);\n return fs.readFile(fullPath, 'utf-8');\n },\n getFileStats: async (filepath: string) => {\n const fullPath = path.isAbsolute(filepath) ? filepath : path.join(rootDir, filepath);\n const stats = await fs.stat(fullPath);\n return { lastModified: stats.mtime.toISOString() };\n },\n };\n\n // Process each file\n for (const filepath of files) {\n const relativePath = path.relative(rootDir, filepath);\n \n try {\n const stats = await fs.stat(filepath);\n const lastModified = stats.mtime.toISOString();\n\n // Check if file needs re-indexing\n const existingEntry = manifest.files[relativePath];\n if (existingEntry && existingEntry.lastModified === lastModified) {\n if (verbose) {\n console.log(` Skipped ${relativePath} (unchanged)`);\n }\n result.skipped++;\n continue;\n }\n\n // Read and index file\n const content = await fs.readFile(filepath, 'utf-8');\n \n if (verbose) {\n console.log(` Processing ${relativePath}...`);\n }\n const fileIndex = await module.indexFile(relativePath, content, ctx);\n\n if (!fileIndex) {\n if (verbose) {\n console.log(` Skipped ${relativePath} (no chunks)`);\n }\n result.skipped++;\n continue;\n }\n\n // Write index file\n await writeFileIndex(rootDir, module.id, relativePath, fileIndex, config);\n\n // Update manifest\n manifest.files[relativePath] = {\n lastModified,\n chunkCount: fileIndex.chunks.length,\n };\n\n result.indexed++;\n } catch (error) {\n console.error(` Error indexing ${relativePath}:`, error);\n result.errors++;\n }\n }\n\n // Update manifest\n manifest.lastUpdated = new Date().toISOString();\n await writeModuleManifest(rootDir, module.id, manifest, config);\n\n return result;\n}\n\nasync function findFiles(rootDir: string, config: Config): Promise<string[]> {\n const patterns = config.extensions.map((ext) => `**/*${ext}`);\n const ignorePatterns = config.ignorePaths.map((p) => `**/${p}/**`);\n\n const files: string[] = [];\n for (const pattern of patterns) {\n const matches = await glob(pattern, {\n cwd: rootDir,\n absolute: true,\n ignore: ignorePatterns,\n });\n files.push(...matches);\n }\n\n return [...new Set(files)]; // Remove duplicates\n}\n\nasync function loadModuleManifest(\n rootDir: string,\n moduleId: string,\n config: Config\n): Promise<ModuleManifest> {\n const manifestPath = getModuleManifestPath(rootDir, moduleId, config);\n\n try {\n const content = await fs.readFile(manifestPath, 'utf-8');\n return JSON.parse(content);\n } catch {\n return {\n moduleId,\n version: '1.0.0',\n lastUpdated: new Date().toISOString(),\n files: {},\n };\n }\n}\n\nasync function writeModuleManifest(\n rootDir: string,\n moduleId: string,\n manifest: ModuleManifest,\n config: Config\n): Promise<void> {\n const manifestPath = getModuleManifestPath(rootDir, moduleId, config);\n await fs.mkdir(path.dirname(manifestPath), { recursive: true });\n await fs.writeFile(manifestPath, JSON.stringify(manifest, null, 2));\n}\n\nasync function writeFileIndex(\n rootDir: string,\n moduleId: string,\n filepath: string,\n fileIndex: FileIndex,\n config: Config\n): Promise<void> {\n const indexPath = getModuleIndexPath(rootDir, moduleId, config);\n const indexFilePath = path.join(indexPath, filepath.replace(/\\.[^.]+$/, '.json'));\n\n await fs.mkdir(path.dirname(indexFilePath), { recursive: true });\n await fs.writeFile(indexFilePath, JSON.stringify(fileIndex, null, 2));\n}\n\nasync function updateGlobalManifest(\n rootDir: string,\n modules: IndexModule[],\n config: Config\n): Promise<void> {\n const manifestPath = getGlobalManifestPath(rootDir, config);\n\n const manifest: GlobalManifest = {\n version: config.version,\n lastUpdated: new Date().toISOString(),\n modules: modules.map((m) => m.id),\n };\n\n await fs.mkdir(path.dirname(manifestPath), { recursive: true });\n await fs.writeFile(manifestPath, JSON.stringify(manifest, null, 2));\n}\n\n/**\n * Clean up stale index entries for files that no longer exist\n * @param rootDir - Root directory of the project\n * @param options - Cleanup options\n * @returns Array of cleanup results per module\n */\nexport async function cleanupIndex(\n rootDir: string, \n options: { verbose?: boolean } = {}\n): Promise<CleanupResult[]> {\n const verbose = options.verbose ?? false;\n \n // Ensure absolute path\n rootDir = path.resolve(rootDir);\n \n console.log(`Cleaning up index in: ${rootDir}`);\n\n // Load config\n const config = await loadConfig(rootDir);\n\n // Register built-in modules\n await registerBuiltInModules();\n\n // Get enabled modules\n const enabledModules = registry.getEnabled(config);\n\n if (enabledModules.length === 0) {\n console.log('No modules enabled.');\n return [];\n }\n\n const results: CleanupResult[] = [];\n\n for (const module of enabledModules) {\n console.log(`\\n[${module.name}] Checking for stale entries...`);\n \n const result = await cleanupModuleIndex(rootDir, module.id, config, verbose);\n results.push(result);\n \n console.log(`[${module.name}] Removed ${result.removed} stale entries, kept ${result.kept} valid entries`);\n }\n\n return results;\n}\n\n/**\n * Clean up stale index entries for a specific module\n */\nasync function cleanupModuleIndex(\n rootDir: string,\n moduleId: string,\n config: Config,\n verbose: boolean\n): Promise<CleanupResult> {\n const result: CleanupResult = {\n moduleId,\n removed: 0,\n kept: 0,\n };\n\n // Load manifest\n const manifest = await loadModuleManifest(rootDir, moduleId, config);\n const indexPath = getModuleIndexPath(rootDir, moduleId, config);\n \n const filesToRemove: string[] = [];\n const updatedFiles: ModuleManifest['files'] = {};\n\n // Check each indexed file\n for (const [filepath, entry] of Object.entries(manifest.files)) {\n const fullPath = path.join(rootDir, filepath);\n \n try {\n await fs.access(fullPath);\n // File exists, keep it\n updatedFiles[filepath] = entry;\n result.kept++;\n } catch {\n // File doesn't exist, mark for removal\n filesToRemove.push(filepath);\n result.removed++;\n \n if (verbose) {\n console.log(` Removing stale entry: ${filepath}`);\n }\n }\n }\n\n // Remove stale index files\n for (const filepath of filesToRemove) {\n const indexFilePath = path.join(indexPath, filepath.replace(/\\.[^.]+$/, '.json'));\n try {\n await fs.unlink(indexFilePath);\n } catch {\n // Index file may not exist, that's okay\n }\n }\n\n // Update manifest with only valid files\n manifest.files = updatedFiles;\n manifest.lastUpdated = new Date().toISOString();\n await writeModuleManifest(rootDir, moduleId, manifest, config);\n\n // Clean up empty directories in the index\n await cleanupEmptyDirectories(indexPath);\n\n return result;\n}\n\n/**\n * Recursively remove empty directories\n */\nasync function cleanupEmptyDirectories(dir: string): Promise<boolean> {\n try {\n const entries = await fs.readdir(dir, { withFileTypes: true });\n \n // Process subdirectories first\n for (const entry of entries) {\n if (entry.isDirectory()) {\n const subDir = path.join(dir, entry.name);\n await cleanupEmptyDirectories(subDir);\n }\n }\n \n // Check if directory is now empty (re-read after potential subdirectory removal)\n const remainingEntries = await fs.readdir(dir);\n \n // Don't remove the root index directory or manifest files\n if (remainingEntries.length === 0) {\n await fs.rmdir(dir);\n return true;\n }\n \n return false;\n } catch {\n return false;\n }\n}\n",
16
+ "// Search module - queries across all enabled modules\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport {\n Config,\n SearchContext,\n SearchOptions,\n SearchResult,\n FileIndex,\n IndexModule,\n GlobalManifest,\n} from '../types';\nimport {\n loadConfig,\n getModuleIndexPath,\n getGlobalManifestPath,\n getModuleConfig,\n} from '../utils/config';\nimport { registry, registerBuiltInModules } from '../modules/registry';\n\n/**\n * Search across all enabled modules\n */\nexport async function search(\n rootDir: string,\n query: string,\n options: SearchOptions = {}\n): Promise<SearchResult[]> {\n // Ensure absolute path\n rootDir = path.resolve(rootDir);\n\n console.log(`Searching for: \"${query}\"`);\n\n // Load config\n const config = await loadConfig(rootDir);\n\n // Register built-in modules\n await registerBuiltInModules();\n\n // Check which modules have indexes\n const globalManifest = await loadGlobalManifest(rootDir, config);\n \n if (!globalManifest || globalManifest.modules.length === 0) {\n console.log('No index found. Run \"bun run index\" first.');\n return [];\n }\n\n // Get modules that are both enabled and have indexes\n const modulesToSearch: IndexModule[] = [];\n \n for (const moduleId of globalManifest.modules) {\n const module = registry.get(moduleId);\n const moduleConfig = getModuleConfig(config, moduleId);\n \n if (module && moduleConfig?.enabled) {\n // Initialize module if needed\n if (module.initialize) {\n await module.initialize(moduleConfig);\n }\n modulesToSearch.push(module);\n }\n }\n\n if (modulesToSearch.length === 0) {\n console.log('No enabled modules with indexes found.');\n return [];\n }\n\n // Search with each module and aggregate results\n const allResults: SearchResult[] = [];\n\n for (const module of modulesToSearch) {\n const ctx = createSearchContext(rootDir, module.id, config);\n const moduleResults = await module.search(query, ctx, options);\n allResults.push(...moduleResults);\n }\n\n // Sort all results by score\n allResults.sort((a, b) => b.score - a.score);\n\n // Return top K\n const topK = options.topK ?? 10;\n return allResults.slice(0, topK);\n}\n\n/**\n * Create a search context for a specific module\n */\nfunction createSearchContext(\n rootDir: string,\n moduleId: string,\n config: Config\n): SearchContext {\n const indexPath = getModuleIndexPath(rootDir, moduleId, config);\n\n return {\n rootDir,\n config,\n \n loadFileIndex: async (filepath: string): Promise<FileIndex | null> => {\n // filepath may or may not have an extension\n // If it has an extension, replace it with .json; otherwise append .json\n const hasExtension = /\\.[^./]+$/.test(filepath);\n const indexFilePath = hasExtension\n ? path.join(indexPath, filepath.replace(/\\.[^.]+$/, '.json'))\n : path.join(indexPath, filepath + '.json');\n \n try {\n const content = await fs.readFile(indexFilePath, 'utf-8');\n return JSON.parse(content);\n } catch {\n return null;\n }\n },\n \n listIndexedFiles: async (): Promise<string[]> => {\n const files: string[] = [];\n await traverseDirectory(indexPath, files, indexPath);\n \n // Convert index file paths back to source file paths\n return files\n .filter(f => f.endsWith('.json') && !f.endsWith('manifest.json'))\n .map(f => {\n const relative = path.relative(indexPath, f);\n // Convert .json back to original extension (we'll handle this generically)\n return relative.replace(/\\.json$/, '');\n });\n },\n };\n}\n\nasync function traverseDirectory(dir: string, files: string[], basePath: string): Promise<void> {\n try {\n const entries = await fs.readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n\n if (entry.isDirectory()) {\n await traverseDirectory(fullPath, files, basePath);\n } else if (entry.isFile()) {\n files.push(fullPath);\n }\n }\n } catch {\n // Directory doesn't exist\n }\n}\n\nasync function loadGlobalManifest(rootDir: string, config: Config): Promise<GlobalManifest | null> {\n const manifestPath = getGlobalManifestPath(rootDir, config);\n\n try {\n const content = await fs.readFile(manifestPath, 'utf-8');\n return JSON.parse(content);\n } catch {\n return null;\n }\n}\n\n/**\n * Format search results for display\n * @param results - Array of search results to format\n * @returns Formatted string for console output\n */\nexport function formatSearchResults(results: SearchResult[]): string {\n if (results.length === 0) {\n return 'No results found.';\n }\n\n let output = `Found ${results.length} results:\\n\\n`;\n\n for (let i = 0; i < results.length; i++) {\n const result = results[i];\n const { chunk } = result;\n \n // Format location with optional name\n const location = `${result.filepath}:${chunk.startLine}-${chunk.endLine}`;\n const nameInfo = chunk.name ? ` (${chunk.name})` : '';\n \n output += `${i + 1}. ${location}${nameInfo}\\n`;\n output += ` Score: ${(result.score * 100).toFixed(1)}% | Type: ${chunk.type}`;\n \n // Add export indicator\n if (chunk.isExported) {\n output += ' | exported';\n }\n output += '\\n';\n\n // Show preview (first 3 lines)\n const lines = chunk.content.split('\\n').slice(0, 3);\n for (const line of lines) {\n const trimmedLine = line.substring(0, 80);\n output += ` ${trimmedLine}${line.length > 80 ? '...' : ''}\\n`;\n }\n\n output += '\\n';\n }\n\n return output;\n}\n",
17
+ "// Main CLI entry point for raggrep\n\nimport { EMBEDDING_MODELS, getCacheDir, type EmbeddingModelName } from '../utils/embeddings';\n\nconst args = process.argv.slice(2);\nconst command = args[0];\n\n/**\n * Parsed CLI flags from command line arguments\n */\ninterface ParsedFlags {\n /** Embedding model to use */\n model?: EmbeddingModelName;\n /** Number of results to return */\n topK?: number;\n /** Minimum similarity score threshold (0-1) */\n minScore?: number;\n /** File extension filter (e.g., 'ts', 'tsx') */\n fileType?: string;\n /** Show help message */\n help: boolean;\n /** Show detailed progress */\n verbose: boolean;\n /** Remaining positional arguments */\n remaining: string[];\n}\n\n/**\n * Parse CLI flags from command line arguments\n * @param args - Array of command line arguments (excluding command name)\n * @returns Parsed flags object\n */\nfunction parseFlags(args: string[]): ParsedFlags {\n const flags: ParsedFlags = {\n help: false,\n verbose: false,\n remaining: [],\n };\n \n for (let i = 0; i < args.length; i++) {\n const arg = args[i];\n \n if (arg === '--help' || arg === '-h') {\n flags.help = true;\n } else if (arg === '--verbose' || arg === '-v') {\n flags.verbose = true;\n } else if (arg === '--model' || arg === '-m') {\n const modelName = args[++i];\n if (modelName && modelName in EMBEDDING_MODELS) {\n flags.model = modelName as EmbeddingModelName;\n } else {\n console.error(`Invalid model: ${modelName}`);\n console.error(`Available models: ${Object.keys(EMBEDDING_MODELS).join(', ')}`);\n process.exit(1);\n }\n } else if (arg === '--top' || arg === '-k') {\n const k = parseInt(args[++i], 10);\n if (!isNaN(k) && k > 0) {\n flags.topK = k;\n }\n } else if (arg === '--min-score' || arg === '-s') {\n const score = parseFloat(args[++i]);\n if (!isNaN(score) && score >= 0 && score <= 1) {\n flags.minScore = score;\n } else {\n console.error(`Invalid min-score: ${args[i]}. Must be a number between 0 and 1.`);\n process.exit(1);\n }\n } else if (arg === '--type' || arg === '-t') {\n const type = args[++i];\n if (type) {\n // Normalize: remove leading dot if present\n flags.fileType = type.startsWith('.') ? type.slice(1) : type;\n } else {\n console.error('--type requires a file extension (e.g., ts, tsx, js)');\n process.exit(1);\n }\n } else if (!arg.startsWith('-')) {\n flags.remaining.push(arg);\n }\n }\n \n return flags;\n}\n\nasync function main() {\n const flags = parseFlags(args.slice(1)); // Skip the command itself\n \n switch (command) {\n case 'index': {\n if (flags.help) {\n const models = Object.keys(EMBEDDING_MODELS).join(', ');\n console.log(`\nraggrep index - Index the current directory for semantic search\n\nUsage:\n raggrep index [options]\n\nOptions:\n -m, --model <name> Embedding model to use (default: all-MiniLM-L6-v2)\n -v, --verbose Show detailed progress\n -h, --help Show this help message\n\nAvailable Models:\n ${models}\n\nModel Cache: ${getCacheDir()}\n\nExamples:\n raggrep index\n raggrep index --model bge-small-en-v1.5\n raggrep index --verbose\n`);\n process.exit(0);\n }\n\n const { indexDirectory } = await import('../indexer');\n console.log('RAGgrep Indexer');\n console.log('================\\n');\n try {\n const results = await indexDirectory(process.cwd(), { \n model: flags.model,\n verbose: flags.verbose,\n });\n console.log('\\n================');\n console.log('Summary:');\n for (const result of results) {\n console.log(` ${result.moduleId}: ${result.indexed} indexed, ${result.skipped} skipped, ${result.errors} errors`);\n }\n } catch (error) {\n console.error('Error during indexing:', error);\n process.exit(1);\n }\n break;\n }\n\n case 'query': {\n if (flags.help) {\n console.log(`\nraggrep query - Search the indexed codebase\n\nUsage:\n raggrep query <search query> [options]\n\nOptions:\n -k, --top <n> Number of results to return (default: 10)\n -s, --min-score <n> Minimum similarity score 0-1 (default: 0.15)\n -t, --type <ext> Filter by file extension (e.g., ts, tsx, js)\n -h, --help Show this help message\n\nExamples:\n raggrep query \"user authentication\"\n raggrep query \"handle errors\" --top 5\n raggrep query \"database\" --min-score 0.1\n raggrep query \"interface\" --type ts\n`);\n process.exit(0);\n }\n\n const { search, formatSearchResults } = await import('../search');\n const query = flags.remaining[0];\n \n if (!query) {\n console.error('Usage: raggrep query <search query>');\n console.error('Run \"raggrep query --help\" for more information.');\n process.exit(1);\n }\n \n console.log('RAGgrep Search');\n console.log('==============\\n');\n try {\n // Build file patterns if type filter specified\n const filePatterns = flags.fileType ? [`*.${flags.fileType}`] : undefined;\n \n const results = await search(process.cwd(), query, { \n topK: flags.topK ?? 10,\n minScore: flags.minScore,\n filePatterns,\n });\n console.log(formatSearchResults(results));\n } catch (error) {\n console.error('Error during search:', error);\n process.exit(1);\n }\n break;\n }\n\n case 'cleanup': {\n if (flags.help) {\n console.log(`\nraggrep cleanup - Remove stale index entries for deleted files\n\nUsage:\n raggrep cleanup [options]\n\nOptions:\n -v, --verbose Show detailed progress\n -h, --help Show this help message\n\nDescription:\n Scans the index and removes entries for files that no longer exist.\n Run this command after deleting files to clean up the index.\n\nExamples:\n raggrep cleanup\n raggrep cleanup --verbose\n`);\n process.exit(0);\n }\n\n const { cleanupIndex } = await import('../indexer');\n console.log('RAGgrep Cleanup');\n console.log('===============\\n');\n try {\n const results = await cleanupIndex(process.cwd(), { \n verbose: flags.verbose,\n });\n console.log('\\n===============');\n console.log('Summary:');\n for (const result of results) {\n console.log(` ${result.moduleId}: ${result.removed} removed, ${result.kept} kept`);\n }\n } catch (error) {\n console.error('Error during cleanup:', error);\n process.exit(1);\n }\n break;\n }\n\n default:\n console.log(`\nraggrep - Local filesystem-based RAG system for codebases\n\nUsage:\n raggrep <command> [options]\n\nCommands:\n index Index the current directory\n query Search the indexed codebase\n cleanup Remove stale index entries for deleted files\n\nOptions:\n -h, --help Show help for a command\n\nExamples:\n raggrep index\n raggrep index --model bge-small-en-v1.5\n raggrep query \"user login\"\n raggrep query \"handle errors\" --top 5\n raggrep cleanup\n\nRun 'raggrep <command> --help' for more information.\n`);\n if (command && command !== '--help' && command !== '-h') {\n console.error(`Unknown command: ${command}`);\n process.exit(1);\n }\n }\n}\n\nmain();\n"
18
+ ],
19
+ "mappings": ";;;;;;;;;;;;;AAGA;AACA;AACA;AA0DO,SAAS,mBAAmB,CAAC,QAAwC;AAAA,EAC1E,MAAM,YAAY,KAAK,kBAAkB,OAAO;AAAA,EAGhD,IAAI,UAAU,UAAU,cAAc,OAAO;AAAA,IAC3C,oBAAoB;AAAA,IACpB,mBAAmB;AAAA,EACrB;AAAA,EAEA,gBAAgB;AAAA;AAMlB,eAAe,kBAAkB,GAAkB;AAAA,EACjD,IAAI,qBAAqB,qBAAqB,cAAc,OAAO;AAAA,IACjE;AAAA,EACF;AAAA,EAGA,IAAI,kBAAkB,aAAa;AAAA,IACjC,OAAO;AAAA,EACT;AAAA,EAEA,iBAAiB;AAAA,EAEjB,eAAe,YAAY;AAAA,IACzB,MAAM,UAAU,iBAAiB,cAAc;AAAA,IAE/C,IAAI,cAAc,cAAc;AAAA,MAC9B,QAAQ,IAAI;AAAA,6BAAgC,cAAc,OAAO;AAAA,MACjE,QAAQ,IAAI,YAAY,WAAW;AAAA,IACrC;AAAA,IAEA,IAAI;AAAA,MAGF,oBAAoB,MAAM,SAAS,sBAAsB,SAAS;AAAA,QAChE,mBAAmB,cAAc,eAC7B,CAAC,aAAoG;AAAA,UACnG,IAAI,SAAS,WAAW,cAAc,SAAS,MAAM;AAAA,YACnD,MAAM,MAAM,SAAS,WAAW,KAAK,MAAM,SAAS,QAAQ,IAAI;AAAA,YAChE,QAAQ,OAAO,MAAM,mBAAmB,SAAS,SAAS,SAAS;AAAA,UACrE,EAAO,SAAI,SAAS,WAAW,UAAU,SAAS,MAAM;AAAA,YACtD,QAAQ,OAAO,MAAM,kBAAkB,SAAS;AAAA,CAAsB;AAAA,UACxE,EAAO,SAAI,SAAS,WAAW,SAAS,CAExC;AAAA,YAEF;AAAA,MACN,CAAC;AAAA,MAED,mBAAmB,cAAc;AAAA,MAEjC,IAAI,cAAc,cAAc;AAAA,QAC9B,QAAQ,IAAI;AAAA,CAAkB;AAAA,MAChC;AAAA,MACA,OAAO,OAAO;AAAA,MACd,oBAAoB;AAAA,MACpB,mBAAmB;AAAA,MACnB,MAAM,IAAI,MAAM,mCAAmC,OAAO;AAAA,cAC1D;AAAA,MACA,iBAAiB;AAAA,MACjB,cAAc;AAAA;AAAA,KAEf;AAAA,EAEH,OAAO;AAAA;AAMT,eAAsB,YAAY,CAAC,MAAiC;AAAA,EAClE,MAAM,mBAAmB;AAAA,EAEzB,IAAI,CAAC,mBAAmB;AAAA,IACtB,MAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAAA,EAGA,MAAM,SAAS,MAAM,kBAAkB,MAAM;AAAA,IAC3C,SAAS;AAAA,IACT,WAAW;AAAA,EACb,CAAC;AAAA,EAGD,OAAO,MAAM,KAAK,OAAO,IAAoB;AAAA;AAe/C,eAAsB,aAAa,CAAC,OAAsC;AAAA,EACxE,IAAI,MAAM,WAAW;AAAA,IAAG,OAAO,CAAC;AAAA,EAEhC,MAAM,mBAAmB;AAAA,EAEzB,IAAI,CAAC,mBAAmB;AAAA,IACtB,MAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAAA,EAEA,MAAM,UAAsB,CAAC;AAAA,EAG7B,SAAS,IAAI,EAAG,IAAI,MAAM,QAAQ,KAAK,YAAY;AAAA,IACjD,MAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,UAAU;AAAA,IAG3C,MAAM,UAAU,MAAM,QAAQ,IAC5B,MAAM,IAAI,OAAO,SAAS;AAAA,MACxB,MAAM,SAAS,MAAM,kBAAmB,MAAM;AAAA,QAC5C,SAAS;AAAA,QACT,WAAW;AAAA,MACb,CAAC;AAAA,MACD,OAAO,MAAM,KAAK,OAAO,IAAoB;AAAA,KAC9C,CACH;AAAA,IAEA,QAAQ,KAAK,GAAG,OAAO;AAAA,EACzB;AAAA,EAEA,OAAO;AAAA;AAUF,SAAS,gBAAgB,CAAC,GAAa,GAAqB;AAAA,EACjE,IAAI,EAAE,WAAW,EAAE,QAAQ;AAAA,IACzB,MAAM,IAAI,MAAM,mCAAmC;AAAA,EACrD;AAAA,EAEA,IAAI,aAAa;AAAA,EACjB,IAAI,QAAQ;AAAA,EACZ,IAAI,QAAQ;AAAA,EAEZ,SAAS,IAAI,EAAG,IAAI,EAAE,QAAQ,KAAK;AAAA,IACjC,cAAc,EAAE,KAAK,EAAE;AAAA,IACvB,SAAS,EAAE,KAAK,EAAE;AAAA,IAClB,SAAS,EAAE,KAAK,EAAE;AAAA,EACpB;AAAA,EAEA,IAAI,UAAU,KAAK,UAAU;AAAA,IAAG,OAAO;AAAA,EAEvC,OAAO,cAAc,KAAK,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK;AAAA;AAMlD,SAAS,kBAAkB,GAAoB;AAAA,EACpD,OAAO,KAAK,cAAc;AAAA;AAMrB,SAAS,WAAW,GAAW;AAAA,EACpC,OAAO;AAAA;AAAA,IA/NH,WASO,kBAoBT,oBAAsD,MACtD,mBAAkC,MAClC,iBAAiB,OACjB,cAAoC,MAQlC,gBAKF,eAiGE,aAAa;AAAA;AAAA,EA9Ib,YAAiB,UAAQ,WAAQ,GAAG,UAAU,WAAW,QAAQ;AAAA,EAGvE,IAAI,WAAW;AAAA,EAGf,IAAI,mBAAmB;AAAA,EAGV,mBAAmB;AAAA,IAE9B,oBAAoB;AAAA,IAGpB,qBAAqB;AAAA,IAGrB,qBAAqB;AAAA,IAGrB,2BAA2B;AAAA,EAC7B;AAAA,EAmBM,iBAAkC;AAAA,IACtC,OAAO;AAAA,IACP,cAAc;AAAA,EAChB;AAAA,EAEI,gBAAiC,KAAK,eAAe;AAAA;;;;;AC4ClD,SAAS,mBAAmB,GAAW;AAAA,EAC5C,OAAO;AAAA,IACL,SAAS;AAAA,IACT,UAAU;AAAA,IACV,YAAY;AAAA,IACZ,aAAa;AAAA,IACb,SAAS;AAAA,MACP;AAAA,QACE,IAAI;AAAA,QACJ,SAAS;AAAA,QACT,SAAS;AAAA,UACP,gBAAgB;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA,IA1EW,sBAmDA;AAAA;AAAA,EAnDA,uBAAuB;AAAA,IAElC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IAGA;AAAA,IAGA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IAGA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IAGA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IAGA;AAAA,IACA;AAAA,IAGA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IAGA;AAAA,IAGA;AAAA,EACF;AAAA,EAKa,qBAAqB;AAAA,IAChC;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAO;AAAA,IACtB;AAAA,IAAO;AAAA,IAAO;AAAA,IAAO;AAAA,IAAS;AAAA,EAChC;AAAA;;;;ECrEA;AAAA,EAIA;AAAA;;;AC1BA;AACA;AAWO,SAAS,aAAa,CAAC,SAAiB,SAAiB,iBAAwB;AAAA,EACtF,OAAY,WAAK,SAAS,OAAO,QAAQ;AAAA;AAMpC,SAAS,kBAAkB,CAAC,SAAiB,UAAkB,SAAiB,iBAAwB;AAAA,EAC7G,OAAY,WAAK,SAAS,OAAO,UAAU,SAAS,QAAQ;AAAA;AAMvD,SAAS,qBAAqB,CAAC,SAAiB,UAAkB,SAAiB,iBAAwB;AAAA,EAChH,OAAY,WAAK,SAAS,OAAO,UAAU,SAAS,UAAU,eAAe;AAAA;AAMxE,SAAS,qBAAqB,CAAC,SAAiB,SAAiB,iBAAwB;AAAA,EAC9F,OAAY,WAAK,SAAS,OAAO,UAAU,eAAe;AAAA;AAMrD,SAAS,aAAa,CAAC,SAAiB,SAAiB,iBAAwB;AAAA,EACtF,OAAY,WAAK,SAAS,OAAO,UAAU,aAAa;AAAA;AAM1D,eAAsB,UAAU,CAAC,SAAkC;AAAA,EACjE,MAAM,aAAa,cAAc,SAAS,eAAc;AAAA,EAExD,IAAI;AAAA,IACF,MAAM,UAAU,MAAS,YAAS,YAAY,OAAO;AAAA,IACrD,MAAM,cAAc,KAAK,MAAM,OAAO;AAAA,IACtC,OAAO,KAAK,oBAAmB,YAAY;AAAA,IAC3C,MAAM;AAAA,IACN,OAAO;AAAA;AAAA;AAgBJ,SAAS,eAAe,CAAC,QAAgB,UAA4C;AAAA,EAC1F,OAAO,OAAO,QAAQ,KAAK,OAAK,EAAE,OAAO,QAAQ;AAAA;AAM5C,SAAS,4BAA4B,CAAC,cAA6C;AAAA,EACxF,MAAM,UAAU,aAAa,WAAW,CAAC;AAAA,EACzC,MAAM,YAAa,QAAQ,kBAA6B;AAAA,EAGxD,IAAI,EAAE,aAAa,mBAAmB;AAAA,IACpC,QAAQ,KAAK,4BAA4B,6CAA6C;AAAA,IACtF,OAAO,EAAE,OAAO,mBAAmB;AAAA,EACrC;AAAA,EAEA,OAAO;AAAA,IACL,OAAO;AAAA,IACP,cAAc,QAAQ,iBAAiB;AAAA,EACzC;AAAA;AAAA,IApFW;AAAA;AAAA,EAJb;AAAA,EACA;AAAA,EAGa,kBAAyB,oBAAoB;AAAA;;;ACWnD,SAAS,QAAQ,CAAC,MAAwB;AAAA,EAC/C,OAAO,KACJ,YAAY,EACZ,QAAQ,YAAY,GAAG,EACvB,MAAM,KAAK,EACX,OAAO,WAAS,MAAM,SAAS,CAAC;AAAA;AAMrC,SAAS,aAAa,CAAC,MAAc,QAA0B;AAAA,EAC7D,OAAO,OAAO,OAAO,OAAK,MAAM,IAAI,EAAE;AAAA;AAAA;AA2BjC,MAAM,UAAU;AAAA,EACb,YAAgE,IAAI;AAAA,EACpE,eAAuB;AAAA,EACvB,sBAA2C,IAAI;AAAA,EAC/C,YAAoB;AAAA,EAO5B,YAAY,CAAC,WAAiC;AAAA,IAC5C,IAAI,cAAc,KAAK,eAAe,KAAK;AAAA,IAE3C,WAAW,OAAO,WAAW;AAAA,MAC3B,MAAM,SAAS,IAAI,UAAU,SAAS,IAAI,OAAO;AAAA,MACjD,KAAK,UAAU,IAAI,IAAI,IAAI,EAAE,SAAS,IAAI,SAAS,OAAO,CAAC;AAAA,MAC3D,eAAe,OAAO;AAAA,MACtB,KAAK;AAAA,MAGL,MAAM,cAAc,IAAI,IAAI,MAAM;AAAA,MAClC,WAAW,QAAQ,aAAa;AAAA,QAC9B,MAAM,QAAQ,KAAK,oBAAoB,IAAI,IAAI,KAAK;AAAA,QACpD,KAAK,oBAAoB,IAAI,MAAM,QAAQ,CAAC;AAAA,MAC9C;AAAA,IACF;AAAA,IAEA,KAAK,eAAe,KAAK,YAAY,IAAI,cAAc,KAAK,YAAY;AAAA;AAAA,EAMlE,GAAG,CAAC,MAAsB;AAAA,IAChC,MAAM,UAAU,KAAK,oBAAoB,IAAI,IAAI,KAAK;AAAA,IACtD,IAAI,YAAY;AAAA,MAAG,OAAO;AAAA,IAG1B,OAAO,KAAK,IAAI,KAAK,KAAK,YAAY,UAAU,QAAQ,UAAU,IAAI;AAAA;AAAA,EAMhE,KAAK,CAAC,QAAkB,YAA8B;AAAA,IAC5D,MAAM,YAAY,OAAO;AAAA,IACzB,IAAI,QAAQ;AAAA,IAEZ,WAAW,QAAQ,YAAY;AAAA,MAC7B,MAAM,KAAK,cAAc,MAAM,MAAM;AAAA,MACrC,IAAI,OAAO;AAAA,QAAG;AAAA,MAEd,MAAM,WAAW,KAAK,IAAI,IAAI;AAAA,MAG9B,MAAM,YAAY,MAAM,UAAU;AAAA,MAClC,MAAM,cAAc,KAAK,WAAW,IAAI,SAAS,UAAU,YAAY,KAAK;AAAA,MAE5E,SAAS,YAAY,YAAY;AAAA,IACnC;AAAA,IAEA,OAAO;AAAA;AAAA,EAUT,MAAM,CAAC,OAAe,OAAe,IAAkB;AAAA,IACrD,MAAM,aAAa,SAAS,KAAK;AAAA,IACjC,IAAI,WAAW,WAAW;AAAA,MAAG,OAAO,CAAC;AAAA,IAErC,MAAM,UAAwB,CAAC;AAAA,IAE/B,YAAY,MAAM,aAAa,KAAK,WAAW;AAAA,MAC7C,MAAM,QAAQ,KAAK,MAAM,QAAQ,UAAU;AAAA,MAC3C,IAAI,QAAQ,GAAG;AAAA,QACb,QAAQ,KAAK,EAAE,IAAI,MAAM,CAAC;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAAA,IACxC,OAAO,QAAQ,MAAM,GAAG,IAAI;AAAA;AAAA,MAM1B,IAAI,GAAW;AAAA,IACjB,OAAO,KAAK;AAAA;AAAA,EAMd,KAAK,GAAS;AAAA,IACZ,KAAK,UAAU,MAAM;AAAA,IACrB,KAAK,oBAAoB,MAAM;AAAA,IAC/B,KAAK,eAAe;AAAA,IACpB,KAAK,YAAY;AAAA;AAErB;AASO,SAAS,cAAc,CAAC,OAAe,WAAmB,GAAW;AAAA,EAC1E,OAAO,KAAK,IAAI,KAAK,IAAI,CAAC,QAAQ,WAAW,CAAC;AAAA;AAAA,IAnK1C,UAAU,KACV,SAAS;;;;;;ACTf;AAyCO,SAAS,SAAS,CAAC,SAAiB,UAAiC;AAAA,EAC1E,MAAM,MAAM,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY;AAAA,EAGnD,IAAI,CAAC,MAAM,OAAO,MAAM,OAAO,OAAO,OAAO,OAAO,KAAK,EAAE,SAAS,OAAO,EAAE,GAAG;AAAA,IAC9E,OAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAAA,EAGA,OAAO,iBAAiB,OAAO;AAAA;AASjC,SAAS,eAAe,CAAC,SAAiB,UAAiC;AAAA,EACzE,MAAM,SAAwB,CAAC;AAAA,EAC/B,MAAM,QAAQ,QAAQ,MAAM;AAAA,CAAI;AAAA,EAGhC,MAAM,aAAgB,oBACpB,UACA,SACG,gBAAa,QAChB,MACA,SAAS,SAAS,MAAM,KAAK,SAAS,SAAS,MAAM,IAC9C,cAAW,MACX,cAAW,EACpB;AAAA,EAKA,SAAS,cAAc,CAAC,MAAuD;AAAA,IAC7E,MAAM,QAAQ,WAAW,8BAA8B,KAAK,SAAS,CAAC;AAAA,IACtE,MAAM,MAAM,WAAW,8BAA8B,KAAK,OAAO,CAAC;AAAA,IAClE,OAAO;AAAA,MACL,WAAW,MAAM,OAAO;AAAA,MACxB,SAAS,IAAI,OAAO;AAAA,IACtB;AAAA;AAAA,EAMF,SAAS,WAAW,CAAC,MAAuB;AAAA,IAC1C,OAAO,KAAK,QAAQ,UAAU;AAAA;AAAA,EAMhC,SAAS,UAAU,CAAC,MAAwB;AAAA,IAC1C,IAAI,CAAI,oBAAiB,IAAI;AAAA,MAAG,OAAO;AAAA,IACvC,MAAM,YAAe,gBAAa,IAAI;AAAA,IACtC,OAAO,WAAW,KAAK,OAAK,EAAE,SAAY,cAAW,aAAa,KAAK;AAAA;AAAA,EAMzE,SAAS,QAAQ,CAAC,MAAmC;AAAA,IACnD,MAAM,aAAgB,2BAAwB,IAAI;AAAA,IAClD,IAAI,WAAW,WAAW;AAAA,MAAG;AAAA,IAE7B,OAAO,WACJ,IAAI,SAAO,IAAI,QAAQ,UAAU,CAAC,EAClC,KAAK;AAAA,CAAI;AAAA;AAAA,EAMd,SAAS,eAAe,CAAC,MAAmC;AAAA,IAC1D,IAAO,yBAAsB,IAAI,KAAK,KAAK,MAAM;AAAA,MAC/C,OAAO,KAAK,KAAK;AAAA,IACnB;AAAA,IACA,IAAO,uBAAoB,IAAI,KAAQ,gBAAa,KAAK,IAAI,GAAG;AAAA,MAC9D,OAAO,KAAK,KAAK;AAAA,IACnB;AAAA,IACA,IAAO,yBAAsB,IAAI,KAAQ,gBAAa,KAAK,IAAI,GAAG;AAAA,MAChE,OAAO,KAAK,KAAK;AAAA,IACnB;AAAA,IACA;AAAA;AAAA,EAMF,SAAS,KAAK,CAAC,MAAqB;AAAA,IAClC,QAAQ,WAAW,YAAY,eAAe,IAAI;AAAA,IAGlD,IAAO,yBAAsB,IAAI,KAAK,KAAK,MAAM;AAAA,MAC/C,OAAO,KAAK;AAAA,QACV,SAAS,YAAY,IAAI;AAAA,QACzB;AAAA,QACA;AAAA,QACA,MAAM;AAAA,QACN,MAAM,KAAK,KAAK;AAAA,QAChB,YAAY,WAAW,IAAI;AAAA,QAC3B,OAAO,SAAS,IAAI;AAAA,MACtB,CAAC;AAAA,MACD;AAAA,IACF;AAAA,IAGA,IAAO,uBAAoB,IAAI,GAAG;AAAA,MAChC,WAAW,QAAQ,KAAK,gBAAgB,cAAc;AAAA,QACpD,IAAI,KAAK,gBACD,mBAAgB,KAAK,WAAW,KAAQ,wBAAqB,KAAK,WAAW,IAAI;AAAA,UACvF,MAAM,OAAU,gBAAa,KAAK,IAAI,IAAI,KAAK,KAAK,OAAO;AAAA,UAC3D,OAAO,KAAK;AAAA,YACV,SAAS,YAAY,IAAI;AAAA,YACzB;AAAA,YACA;AAAA,YACA,MAAM;AAAA,YACN;AAAA,YACA,YAAY,WAAW,IAAI;AAAA,YAC3B,OAAO,SAAS,IAAI;AAAA,UACtB,CAAC;AAAA,UACD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAGA,IAAO,sBAAmB,IAAI,KAAK,KAAK,MAAM;AAAA,MAC5C,OAAO,KAAK;AAAA,QACV,SAAS,YAAY,IAAI;AAAA,QACzB;AAAA,QACA;AAAA,QACA,MAAM;AAAA,QACN,MAAM,KAAK,KAAK;AAAA,QAChB,YAAY,WAAW,IAAI;AAAA,QAC3B,OAAO,SAAS,IAAI;AAAA,MACtB,CAAC;AAAA,MACD;AAAA,IACF;AAAA,IAGA,IAAO,0BAAuB,IAAI,GAAG;AAAA,MACnC,OAAO,KAAK;AAAA,QACV,SAAS,YAAY,IAAI;AAAA,QACzB;AAAA,QACA;AAAA,QACA,MAAM;AAAA,QACN,MAAM,KAAK,KAAK;AAAA,QAChB,YAAY,WAAW,IAAI;AAAA,QAC3B,OAAO,SAAS,IAAI;AAAA,MACtB,CAAC;AAAA,MACD;AAAA,IACF;AAAA,IAGA,IAAO,0BAAuB,IAAI,GAAG;AAAA,MACnC,OAAO,KAAK;AAAA,QACV,SAAS,YAAY,IAAI;AAAA,QACzB;AAAA,QACA;AAAA,QACA,MAAM;AAAA,QACN,MAAM,KAAK,KAAK;AAAA,QAChB,YAAY,WAAW,IAAI;AAAA,QAC3B,OAAO,SAAS,IAAI;AAAA,MACtB,CAAC;AAAA,MACD;AAAA,IACF;AAAA,IAGA,IAAO,qBAAkB,IAAI,GAAG;AAAA,MAC9B,OAAO,KAAK;AAAA,QACV,SAAS,YAAY,IAAI;AAAA,QACzB;AAAA,QACA;AAAA,QACA,MAAM;AAAA,QACN,MAAM,KAAK,KAAK;AAAA,QAChB,YAAY,WAAW,IAAI;AAAA,QAC3B,OAAO,SAAS,IAAI;AAAA,MACtB,CAAC;AAAA,MACD;AAAA,IACF;AAAA,IAGA,IAAO,uBAAoB,IAAI,KAAK,WAAW,IAAI,GAAG;AAAA,MACpD,WAAW,QAAQ,KAAK,gBAAgB,cAAc;AAAA,QAEpD,IAAI,KAAK,gBACD,mBAAgB,KAAK,WAAW,KAAQ,wBAAqB,KAAK,WAAW,IAAI;AAAA,UACvF;AAAA,QACF;AAAA,QACA,MAAM,OAAU,gBAAa,KAAK,IAAI,IAAI,KAAK,KAAK,OAAO;AAAA,QAC3D,OAAO,KAAK;AAAA,UACV,SAAS,YAAY,IAAI;AAAA,UACzB;AAAA,UACA;AAAA,UACA,MAAM;AAAA,UACN;AAAA,UACA,YAAY;AAAA,UACZ,OAAO,SAAS,IAAI;AAAA,QACtB,CAAC;AAAA,MACH;AAAA,MACA;AAAA,IACF;AAAA,IAGG,gBAAa,MAAM,KAAK;AAAA;AAAA,EAI1B,gBAAa,YAAY,KAAK;AAAA,EAGjC,IAAI,OAAO,WAAW,GAAG;AAAA,IACvB,OAAO,iBAAiB,OAAO;AAAA,EACjC;AAAA,EAEA,OAAO;AAAA;AAST,SAAS,gBAAgB,CAAC,SAAgC;AAAA,EACxD,MAAM,SAAwB,CAAC;AAAA,EAC/B,MAAM,QAAQ,QAAQ,MAAM;AAAA,CAAI;AAAA,EAChC,MAAM,aAAa;AAAA,EACnB,MAAM,UAAU;AAAA,EAGhB,IAAI,MAAM,UAAU,YAAY;AAAA,IAC9B,OAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,WAAW;AAAA,QACX,SAAS,MAAM;AAAA,QACf,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAGA,SAAS,IAAI,EAAG,IAAI,MAAM,QAAQ,KAAK,aAAa,SAAS;AAAA,IAC3D,MAAM,SAAS,KAAK,IAAI,IAAI,YAAY,MAAM,MAAM;AAAA,IACpD,OAAO,KAAK;AAAA,MACV,SAAS,MAAM,MAAM,GAAG,MAAM,EAAE,KAAK;AAAA,CAAI;AAAA,MACzC,WAAW,IAAI;AAAA,MACf,SAAS;AAAA,MACT,MAAM;AAAA,IACR,CAAC;AAAA,IAED,IAAI,UAAU,MAAM;AAAA,MAAQ;AAAA,EAC9B;AAAA,EAEA,OAAO;AAAA;AAUF,SAAS,eAAe,CAAC,UAAkB,WAAmB,SAAyB;AAAA,EAC5F,MAAM,WAAW,SAAS,QAAQ,UAAU,GAAG,EAAE,QAAQ,OAAO,GAAG;AAAA,EACnE,OAAO,GAAG,YAAY,aAAa;AAAA;AAAA;;;AC9R9B,SAAS,eAAe,CAC7B,SACA,MACA,cAAsB,IACZ;AAAA,EACV,MAAM,WAAW,IAAI;AAAA,EAGrB,IAAI,MAAM;AAAA,IACR,SAAS,IAAI,KAAK,YAAY,CAAC;AAAA,IAG/B,MAAM,QAAQ,KAAK,MAAM,WAAW,EAAE,IAAI,OAAK,EAAE,YAAY,CAAC;AAAA,IAC9D,MAAM,QAAQ,OAAK,EAAE,SAAS,KAAK,SAAS,IAAI,CAAC,CAAC;AAAA,EACpD;AAAA,EAGA,MAAM,kBAAkB;AAAA,EACxB,IAAI;AAAA,EAEJ,QAAQ,QAAQ,gBAAgB,KAAK,OAAO,OAAO,MAAM;AAAA,IACvD,MAAM,OAAO,MAAM,GAAG,YAAY;AAAA,IAGlC,IAAI,CAAC,gBAAgB,IAAI,IAAI,KAAK,KAAK,SAAS,GAAG;AAAA,MACjD,SAAS,IAAI,IAAI;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,OAAO,MAAM,KAAK,QAAQ,EAAE,MAAM,GAAG,WAAW;AAAA;AAS3C,SAAS,mBAAmB,CAAC,UAA4B;AAAA,EAC9D,OAAO,SACJ,MAAM,QAAQ,EACd,OAAO,OAAK,EAAE,SAAS,KAAK,CAAC,gBAAgB,IAAI,EAAE,YAAY,CAAC,CAAC,EACjE,IAAI,OAAK,EAAE,YAAY,CAAC;AAAA;AAAA,IAhEhB;AAAA;AAAA,oBAAkB,IAAI,IAAI;AAAA,IAErC;AAAA,IAAS;AAAA,IAAO;AAAA,IAAO;AAAA,IAAY;AAAA,IAAS;AAAA,IAAa;AAAA,IAAQ;AAAA,IACjE;AAAA,IAAU;AAAA,IAAU;AAAA,IAAQ;AAAA,IAAU;AAAA,IAAS;AAAA,IAAS;AAAA,IAAO;AAAA,IAC/D;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAQ;AAAA,IAAa;AAAA,IAAM;AAAA,IAAQ;AAAA,IAAO;AAAA,IAC3D;AAAA,IAAU;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAY;AAAA,IAAO;AAAA,IAAS;AAAA,IACvD;AAAA,IAAS;AAAA,IAAU;AAAA,IAAc;AAAA,IAAQ;AAAA,IAAU;AAAA,IAAM;AAAA,IACzD;AAAA,IAAU;AAAA,IAAU;AAAA,IAAW;AAAA,IAAO;AAAA,IAAW;AAAA,IAAS;AAAA,IAC1D;AAAA,IAAU;AAAA,IAAW;AAAA,IAAa;AAAA,IAAU;AAAA,IAAY;AAAA,IACxD;AAAA,IAAc;AAAA,IAAW;AAAA,IAAS;AAAA,IAAO;AAAA,IAAO;AAAA,IAEhD;AAAA,IAAO;AAAA,IAAO;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAO;AAAA,IAAO;AAAA,EAC3D,CAAC;AAAA;;;ACPD;AACA;AAAA;AAiBO,MAAM,cAAc;AAAA,EACjB,OAAiC;AAAA,EACjC,gBAA0C,IAAI;AAAA,EAC9C,YAA8B;AAAA,EAC9B;AAAA,EACA;AAAA,EAER,WAAW,CAAC,UAAkB,UAAkB;AAAA,IAC9C,KAAK,eAAoB,WAAK,UAAU,SAAS,UAAU,UAAU;AAAA,IACrE,KAAK,WAAW;AAAA;AAAA,OAMZ,WAAU,GAAkB;AAAA,IAChC,IAAI;AAAA,MACF,MAAM,KAAK,KAAK;AAAA,MAChB,MAAM;AAAA,MAEN,KAAK,OAAO;AAAA,QACV,SAAS;AAAA,QACT,aAAa,IAAI,KAAK,EAAE,YAAY;AAAA,QACpC,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,UAAU;AAAA,UACR,cAAc;AAAA,UACd,qBAAqB,CAAC;AAAA,UACtB,WAAW;AAAA,QACb;AAAA,MACF;AAAA,MACA,KAAK,YAAY,IAAI;AAAA;AAAA;AAAA,EAOzB,OAAO,CAAC,SAA4B;AAAA,IAClC,KAAK,cAAc,IAAI,QAAQ,UAAU,OAAO;AAAA;AAAA,EAMlD,UAAU,CAAC,UAA2B;AAAA,IACpC,OAAO,KAAK,cAAc,OAAO,QAAQ;AAAA;AAAA,EAM3C,cAAc,GAAS;AAAA,IACrB,KAAK,YAAY,IAAI;AAAA,IAGrB,YAAY,UAAU,YAAY,KAAK,eAAe;AAAA,MACpD,MAAM,UAAU;AAAA,QACd,GAAG,QAAQ;AAAA,QACX,GAAG,QAAQ;AAAA,QACX,GAAG,oBAAoB,QAAQ;AAAA,MACjC,EAAE,KAAK,GAAG;AAAA,MAEV,KAAK,UAAU,aAAa,CAAC,EAAE,IAAI,UAAU,QAAQ,CAAC,CAAC;AAAA,IACzD;AAAA,IAGA,IAAI,KAAK,MAAM;AAAA,MACb,KAAK,KAAK,YAAY,KAAK,cAAc;AAAA,MACzC,KAAK,KAAK,SAAS,YAAY,KAAK,cAAc;AAAA,IACpD;AAAA;AAAA,EAMF,cAAc,CAAC,OAAe,gBAAwB,IAAc;AAAA,IAClE,IAAI,CAAC,KAAK,WAAW;AAAA,MACnB,OAAO,MAAM,KAAK,KAAK,cAAc,KAAK,CAAC;AAAA,IAC7C;AAAA,IAEA,MAAM,UAAU,KAAK,UAAU,OAAO,OAAO,aAAa;AAAA,IAC1D,OAAO,QAAQ,IAAI,OAAK,EAAE,EAAE;AAAA;AAAA,EAM9B,WAAW,GAAa;AAAA,IACtB,OAAO,MAAM,KAAK,KAAK,cAAc,KAAK,CAAC;AAAA;AAAA,EAM7C,cAAc,CAAC,UAA2C;AAAA,IACxD,OAAO,KAAK,cAAc,IAAI,QAAQ;AAAA;AAAA,OAMlC,KAAI,GAAkB;AAAA,IAC1B,IAAI,CAAC,KAAK;AAAA,MAAM,MAAM,IAAI,MAAM,uBAAuB;AAAA,IAGvD,KAAK,KAAK,cAAc,IAAI,KAAK,EAAE,YAAY;AAAA,IAC/C,KAAK,KAAK,YAAY,KAAK,cAAc;AAAA,IAGzC,MAAS,UAAM,KAAK,cAAc,EAAE,WAAW,KAAK,CAAC;AAAA,IAGrD,MAAM,WAAgB,WAAK,KAAK,cAAc,YAAY;AAAA,IAC1D,MAAS,cAAU,UAAU,KAAK,UAAU,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,IAG/D,YAAY,UAAU,YAAY,KAAK,eAAe;AAAA,MACpD,MAAM,cAAc,KAAK,mBAAmB,QAAQ;AAAA,MACpD,MAAS,UAAW,cAAQ,WAAW,GAAG,EAAE,WAAW,KAAK,CAAC;AAAA,MAC7D,MAAS,cAAU,aAAa,KAAK,UAAU,SAAS,MAAM,CAAC,CAAC;AAAA,IAClE;AAAA;AAAA,OAMI,KAAI,GAAkB;AAAA,IAE1B,MAAM,WAAgB,WAAK,KAAK,cAAc,YAAY;AAAA,IAC1D,MAAM,cAAc,MAAS,aAAS,UAAU,OAAO;AAAA,IACvD,KAAK,OAAO,KAAK,MAAM,WAAW;AAAA,IAGlC,KAAK,cAAc,MAAM;AAAA,IACzB,MAAM,KAAK,2BAA2B,KAAK,YAAY;AAAA,IAGvD,KAAK,eAAe;AAAA;AAAA,OAMR,2BAA0B,CAAC,KAA4B;AAAA,IACnE,IAAI;AAAA,MACF,MAAM,UAAU,MAAS,YAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAAA,MAE7D,WAAW,SAAS,SAAS;AAAA,QAC3B,MAAM,WAAgB,WAAK,KAAK,MAAM,IAAI;AAAA,QAE1C,IAAI,MAAM,YAAY,GAAG;AAAA,UACvB,MAAM,KAAK,2BAA2B,QAAQ;AAAA,QAChD,EAAO,SAAI,MAAM,KAAK,SAAS,OAAO,KAAK,MAAM,SAAS,cAAc;AAAA,UACtE,IAAI;AAAA,YACF,MAAM,UAAU,MAAS,aAAS,UAAU,OAAO;AAAA,YACnD,MAAM,UAAU,KAAK,MAAM,OAAO;AAAA,YAClC,IAAI,QAAQ,UAAU;AAAA,cACpB,KAAK,cAAc,IAAI,QAAQ,UAAU,OAAO;AAAA,YAClD;AAAA,YACA,MAAM;AAAA,QAGV;AAAA,MACF;AAAA,MACA,MAAM;AAAA;AAAA,EAQF,kBAAkB,CAAC,UAA0B;AAAA,IACnD,MAAM,WAAW,SAAS,QAAQ,YAAY,OAAO;AAAA,IACrD,OAAY,WAAK,KAAK,cAAc,QAAQ;AAAA;AAAA,OAMxC,kBAAiB,CAAC,UAAiC;AAAA,IACvD,IAAI;AAAA,MACF,MAAS,WAAO,KAAK,mBAAmB,QAAQ,CAAC;AAAA,MACjD,MAAM;AAAA,IAGR,KAAK,cAAc,OAAO,QAAQ;AAAA;AAAA,OAM9B,OAAM,GAAqB;AAAA,IAC/B,IAAI;AAAA,MACF,MAAM,WAAgB,WAAK,KAAK,cAAc,YAAY;AAAA,MAC1D,MAAS,WAAO,QAAQ;AAAA,MACxB,OAAO;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA;AAAA;AAAA,MAOP,IAAI,GAAW;AAAA,IACjB,OAAO,KAAK,cAAc;AAAA;AAAA,EAM5B,KAAK,GAAS;AAAA,IACZ,KAAK,cAAc,MAAM;AAAA,IACzB,IAAI,KAAK,MAAM;AAAA,MACb,KAAK,KAAK,YAAY;AAAA,MACtB,KAAK,KAAK,WAAW;AAAA,QACnB,cAAc;AAAA,QACd,qBAAqB,CAAC;AAAA,QACtB,WAAW;AAAA,MACb;AAAA,IACF;AAAA,IACA,KAAK,YAAY,IAAI;AAAA;AAEzB;AAAA;AAAA,EAhPA;AAAA,EAKA;AAAA;;;;;;;;;ACZA;AAAA;AAkDO,MAAM,eAAsC;AAAA,EACxC,KAAK;AAAA,EACL,OAAO;AAAA,EACP,cAAc;AAAA,EACd,UAAU;AAAA,EAEX,kBAA0C;AAAA,EAC1C,gBAAsC;AAAA,EACtC,mBAA6C,IAAI;AAAA,EACjD,UAAkB;AAAA,OAEpB,WAAU,CAAC,QAAqC;AAAA,IAEpD,KAAK,kBAAkB,6BAA6B,MAAM;AAAA,IAG1D,oBAAoB,KAAK,eAAe;AAAA,IAGxC,KAAK,iBAAiB,MAAM;AAAA;AAAA,OAGxB,UAAS,CACb,UACA,SACA,KAC2B;AAAA,IAE3B,KAAK,UAAU,IAAI;AAAA,IAGnB,MAAM,eAAe,UAAU,SAAS,QAAQ;AAAA,IAEhD,IAAI,aAAa,WAAW,GAAG;AAAA,MAC7B,OAAO;AAAA,IACT;AAAA,IAGA,MAAM,gBAAgB,aAAa,IAAI,CAAC,MAAM,EAAE,OAAO;AAAA,IACvD,MAAM,aAAa,MAAM,cAAc,aAAa;AAAA,IAGpD,MAAM,SAAkB,aAAa,IAAI,CAAC,QAAQ;AAAA,MAChD,IAAI,gBAAgB,UAAU,GAAG,WAAW,GAAG,OAAO;AAAA,MACtD,SAAS,GAAG;AAAA,MACZ,WAAW,GAAG;AAAA,MACd,SAAS,GAAG;AAAA,MACZ,MAAM,GAAG;AAAA,MACT,MAAM,GAAG;AAAA,MACT,YAAY,GAAG;AAAA,MACf,OAAO,GAAG;AAAA,IACZ,EAAE;AAAA,IAGF,MAAM,aAAa,KAAK,kBAAkB,SAAS,QAAQ;AAAA,IAE3D,MAAM,QAAQ,MAAM,IAAI,aAAa,QAAQ;AAAA,IAC7C,MAAM,iBAAgB,mBAAmB;AAAA,IAEzC,MAAM,aAAiC;AAAA,MACrC;AAAA,MACA,gBAAgB,eAAc;AAAA,IAChC;AAAA,IAGA,MAAM,aAAa,CAAC,GAAG,IAAI,IAAI,aAAa,IAAI,QAAM,GAAG,IAAI,CAAC,CAAC;AAAA,IAC/D,MAAM,UAAU,aACb,OAAO,QAAM,GAAG,cAAc,GAAG,IAAI,EACrC,IAAI,QAAM,GAAG,IAAK;AAAA,IAGrB,MAAM,cAAc,IAAI;AAAA,IACxB,WAAW,MAAM,cAAc;AAAA,MAC7B,MAAM,WAAW,gBAAgB,GAAG,SAAS,GAAG,IAAI;AAAA,MACpD,SAAS,QAAQ,OAAK,YAAY,IAAI,CAAC,CAAC;AAAA,IAC1C;AAAA,IAEA,MAAM,cAA2B;AAAA,MAC/B;AAAA,MACA,YAAY,OAAO;AAAA,MACnB;AAAA,MACA,UAAU,MAAM,KAAK,WAAW;AAAA,MAChC;AAAA,MACA,cAAc,MAAM;AAAA,IACtB;AAAA,IAGA,KAAK,iBAAiB,IAAI,UAAU,WAAW;AAAA,IAE/C,OAAO;AAAA,MACL;AAAA,MACA,cAAc,MAAM;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA;AAAA,OAMI,SAAQ,CAAC,KAAkC;AAAA,IAC/C,MAAM,WAAW,cAAc,IAAI,SAAS,IAAI,MAAM;AAAA,IAGtD,KAAK,gBAAgB,IAAI,cAAc,UAAU,KAAK,EAAE;AAAA,IACxD,MAAM,KAAK,cAAc,WAAW;AAAA,IAGpC,YAAY,UAAU,YAAY,KAAK,kBAAkB;AAAA,MACvD,KAAK,cAAc,QAAQ,OAAO;AAAA,IACpC;AAAA,IAGA,KAAK,cAAc,eAAe;AAAA,IAGlC,MAAM,KAAK,cAAc,KAAK;AAAA,IAE9B,QAAQ,IAAI,+BAA+B,KAAK,iBAAiB,qBAAqB;AAAA,IAGtF,KAAK,iBAAiB,MAAM;AAAA;AAAA,OAexB,OAAM,CACV,OACA,KACA,UAAyB,CAAC,GACD;AAAA,IACzB,QAAQ,OAAO,eAAe,WAAW,mBAAmB,iBAAiB;AAAA,IAG7E,MAAM,WAAW,cAAc,IAAI,SAAS,IAAI,MAAM;AAAA,IACtD,MAAM,gBAAgB,IAAI,cAAc,UAAU,KAAK,EAAE;AAAA,IAEzD,IAAI;AAAA,IAEJ,IAAI;AAAA,MACF,MAAM,cAAc,WAAW;AAAA,MAG/B,MAAM,gBAAgB,OAAO;AAAA,MAC7B,iBAAiB,cAAc,eAAe,OAAO,aAAa;AAAA,MAGlE,IAAI,eAAe,WAAW,GAAG;AAAA,QAC/B,iBAAiB,cAAc,YAAY;AAAA,MAC7C;AAAA,MACA,MAAM;AAAA,MAEN,iBAAiB,MAAM,IAAI,iBAAiB;AAAA;AAAA,IAI9C,IAAI,gBAAgB,aAAa,SAAS,GAAG;AAAA,MAC3C,iBAAiB,eAAe,OAAO,cAAY;AAAA,QACjD,OAAO,aAAa,KAAK,aAAW;AAAA,UAClC,IAAI,QAAQ,WAAW,IAAI,GAAG;AAAA,YAC5B,MAAM,MAAM,QAAQ,MAAM,CAAC;AAAA,YAC3B,OAAO,SAAS,SAAS,GAAG;AAAA,UAC9B;AAAA,UACA,OAAO,SAAS,SAAS,OAAO;AAAA,SACjC;AAAA,OACF;AAAA,IACH;AAAA,IAGA,MAAM,iBAAiB,MAAM,aAAa,KAAK;AAAA,IAG/C,MAAM,YAAY,IAAI;AAAA,IACtB,MAAM,gBAID,CAAC;AAAA,IAEN,WAAW,YAAY,gBAAgB;AAAA,MACrC,MAAM,YAAY,MAAM,IAAI,cAAc,QAAQ;AAAA,MAClD,IAAI,CAAC;AAAA,QAAW;AAAA,MAEhB,MAAM,aAAa,UAAU;AAAA,MAC7B,IAAI,CAAC,YAAY;AAAA,QAAY;AAAA,MAE7B,SAAS,IAAI,EAAG,IAAI,UAAU,OAAO,QAAQ,KAAK;AAAA,QAChD,MAAM,QAAQ,UAAU,OAAO;AAAA,QAC/B,MAAM,YAAY,WAAW,WAAW;AAAA,QAExC,IAAI,CAAC;AAAA,UAAW;AAAA,QAEhB,cAAc,KAAK;AAAA,UACjB,UAAU,UAAU;AAAA,UACpB;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QAGD,UAAU,aAAa,CAAC,EAAE,IAAI,MAAM,IAAI,SAAS,MAAM,QAAQ,CAAC,CAAC;AAAA,MACnE;AAAA,IACF;AAAA,IAGA,MAAM,cAAc,UAAU,OAAO,OAAO,OAAO,CAAC;AAAA,IACpD,MAAM,aAAa,IAAI;AAAA,IAEvB,WAAW,UAAU,aAAa;AAAA,MAChC,WAAW,IAAI,OAAO,IAAI,eAAe,OAAO,OAAO,CAAC,CAAC;AAAA,IAC3D;AAAA,IAGA,MAAM,UAA0B,CAAC;AAAA,IAEjC,aAAa,UAAU,OAAO,eAAe,eAAe;AAAA,MAC1D,MAAM,gBAAgB,iBAAiB,gBAAgB,SAAS;AAAA,MAChE,MAAM,YAAY,WAAW,IAAI,MAAM,EAAE,KAAK;AAAA,MAG9C,MAAM,cAAe,kBAAkB,gBAAkB,cAAc;AAAA,MAEvE,IAAI,eAAe,YAAY,YAAY,KAAK;AAAA,QAC9C,QAAQ,KAAK;AAAA,UACX;AAAA,UACA;AAAA,UACA,OAAO;AAAA,UACP,UAAU,KAAK;AAAA,UACf,SAAS;AAAA,YACP;AAAA,YACA;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IAGA,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAAA,IACxC,OAAO,QAAQ,MAAM,GAAG,IAAI;AAAA;AAAA,EAGtB,iBAAiB,CAAC,SAAiB,UAA4B;AAAA,IACrE,MAAM,aAAuB,CAAC;AAAA,IAG9B,MAAM,cAAc;AAAA,IACpB,MAAM,eAAe;AAAA,IAErB,IAAI;AAAA,IACJ,QAAQ,QAAQ,YAAY,KAAK,OAAO,OAAO,MAAM;AAAA,MACnD,MAAM,aAAa,MAAM;AAAA,MACzB,IAAI,WAAW,WAAW,GAAG,GAAG;AAAA,QAC9B,MAAM,MAAW,cAAQ,QAAQ;AAAA,QACjC,MAAM,WAAgB,gBAAe,WAAK,KAAK,UAAU,CAAC;AAAA,QAC1D,WAAW,KAAK,QAAQ;AAAA,MAC1B;AAAA,IACF;AAAA,IAEA,QAAQ,QAAQ,aAAa,KAAK,OAAO,OAAO,MAAM;AAAA,MACpD,MAAM,aAAa,MAAM;AAAA,MACzB,IAAI,WAAW,WAAW,GAAG,GAAG;AAAA,QAC9B,MAAM,MAAW,cAAQ,QAAQ;AAAA,QACjC,MAAM,WAAgB,gBAAe,WAAK,KAAK,UAAU,CAAC;AAAA,QAC1D,WAAW,KAAK,QAAQ;AAAA,MAC1B;AAAA,IACF;AAAA,IAEA,OAAO;AAAA;AAEX;AAAA,IA/Sa,oBAAoB,MAGpB,gBAAgB,IAGvB,kBAAkB,KAGlB,cAAc,KAad,6BAA6B;AAAA;AAAA,EApCnC;AAAA,EAQA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;;;AChCA,MAAM,mBAA6C;AAAA,EACzC,UAAU,IAAI;AAAA,EAEtB,QAAQ,CAAC,QAA2B;AAAA,IAClC,IAAI,KAAK,QAAQ,IAAI,OAAO,EAAE,GAAG;AAAA,MAC/B,QAAQ,KAAK,WAAW,OAAO,2CAA2C;AAAA,IAC5E;AAAA,IACA,KAAK,QAAQ,IAAI,OAAO,IAAI,MAAM;AAAA;AAAA,EAGpC,GAAG,CAAC,IAAqC;AAAA,IACvC,OAAO,KAAK,QAAQ,IAAI,EAAE;AAAA;AAAA,EAG5B,IAAI,GAAkB;AAAA,IACpB,OAAO,MAAM,KAAK,KAAK,QAAQ,OAAO,CAAC;AAAA;AAAA,EAGzC,UAAU,CAAC,QAA+B;AAAA,IACxC,MAAM,aAAa,IAAI,IACrB,OAAO,QACJ,OAAO,OAAK,EAAE,OAAO,EACrB,IAAI,OAAK,EAAE,EAAE,CAClB;AAAA,IAEA,OAAO,KAAK,KAAK,EAAE,OAAO,OAAK,WAAW,IAAI,EAAE,EAAE,CAAC;AAAA;AAEvD;AAMA,eAAsB,sBAAsB,GAAkB;AAAA,EAE5D,QAAQ,oCAAmB;AAAA,EAC3B,SAAS,SAAS,IAAI,eAAgB;AAAA;AAAA,IAN3B;AAAA;AAAA,aAA2B,IAAI;AAAA;;;;;;;;AChC5C;AACA;AACA;AA6CA,eAAsB,cAAc,CAAC,SAAiB,UAAwB,CAAC,GAA2B;AAAA,EACxG,MAAM,UAAU,QAAQ,WAAW;AAAA,EAGnC,UAAe,cAAQ,OAAO;AAAA,EAE9B,QAAQ,IAAI,uBAAuB,SAAS;AAAA,EAG5C,MAAM,SAAS,MAAM,WAAW,OAAO;AAAA,EAGvC,MAAM,uBAAuB;AAAA,EAG7B,MAAM,iBAAiB,SAAS,WAAW,MAAM;AAAA,EAEjD,IAAI,eAAe,WAAW,GAAG;AAAA,IAC/B,QAAQ,IAAI,+CAA+C;AAAA,IAC3D,OAAO,CAAC;AAAA,EACV;AAAA,EAEA,QAAQ,IAAI,oBAAoB,eAAe,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,KAAK,IAAI,GAAG;AAAA,EAG5E,MAAM,QAAQ,MAAM,UAAU,SAAS,MAAM;AAAA,EAC7C,QAAQ,IAAI,SAAS,MAAM,uBAAuB;AAAA,EAGlD,MAAM,UAAyB,CAAC;AAAA,EAEhC,WAAW,UAAU,gBAAgB;AAAA,IACnC,QAAQ,IAAI;AAAA,GAAM,OAAO,4BAA4B;AAAA,IAGrD,MAAM,eAAe,gBAAgB,QAAQ,OAAO,EAAE;AAAA,IACtD,IAAI,OAAO,cAAc,cAAc;AAAA,MAErC,MAAM,sBAAsB,KAAK,aAAa;AAAA,MAC9C,IAAI,QAAQ,SAAS,OAAO,OAAO,YAAY;AAAA,QAC7C,oBAAoB,UAAU;AAAA,aACzB,oBAAoB;AAAA,UACvB,gBAAgB,QAAQ;AAAA,QAC1B;AAAA,MACF;AAAA,MACA,MAAM,OAAO,WAAW,mBAAmB;AAAA,IAC7C;AAAA,IAEA,MAAM,SAAS,MAAM,gBAAgB,SAAS,OAAO,QAAQ,QAAQ,OAAO;AAAA,IAC5E,QAAQ,KAAK,MAAM;AAAA,IAGnB,IAAI,OAAO,UAAU;AAAA,MACnB,QAAQ,IAAI,IAAI,OAAO,qCAAqC;AAAA,MAC5D,MAAM,MAAoB;AAAA,QACxB;AAAA,QACA;AAAA,QACA,UAAU,OAAO,aAAqB;AAAA,UACpC,MAAM,WAAgB,iBAAW,QAAQ,IAAI,WAAgB,WAAK,SAAS,QAAQ;AAAA,UACnF,OAAU,aAAS,UAAU,OAAO;AAAA;AAAA,QAEtC,cAAc,OAAO,aAAqB;AAAA,UACxC,MAAM,WAAgB,iBAAW,QAAQ,IAAI,WAAgB,WAAK,SAAS,QAAQ;AAAA,UACnF,MAAM,QAAQ,MAAS,SAAK,QAAQ;AAAA,UACpC,OAAO,EAAE,cAAc,MAAM,MAAM,YAAY,EAAE;AAAA;AAAA,MAErD;AAAA,MACA,MAAM,OAAO,SAAS,GAAG;AAAA,IAC3B;AAAA,IAEA,QAAQ,IAAI,IAAI,OAAO,mBAAmB,OAAO,oBAAoB,OAAO,oBAAoB,OAAO,eAAe;AAAA,EACxH;AAAA,EAGA,MAAM,qBAAqB,SAAS,gBAAgB,MAAM;AAAA,EAE1D,OAAO;AAAA;AAMT,eAAe,eAAe,CAC5B,SACA,OACA,QACA,QACA,SACsB;AAAA,EACtB,MAAM,SAAsB;AAAA,IAC1B,UAAU,OAAO;AAAA,IACjB,SAAS;AAAA,IACT,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EAGA,MAAM,WAAW,MAAM,mBAAmB,SAAS,OAAO,IAAI,MAAM;AAAA,EAGpE,MAAM,MAAoB;AAAA,IACxB;AAAA,IACA;AAAA,IACA,UAAU,OAAO,aAAqB;AAAA,MACpC,MAAM,WAAgB,iBAAW,QAAQ,IAAI,WAAgB,WAAK,SAAS,QAAQ;AAAA,MACnF,OAAU,aAAS,UAAU,OAAO;AAAA;AAAA,IAEtC,cAAc,OAAO,aAAqB;AAAA,MACxC,MAAM,WAAgB,iBAAW,QAAQ,IAAI,WAAgB,WAAK,SAAS,QAAQ;AAAA,MACnF,MAAM,QAAQ,MAAS,SAAK,QAAQ;AAAA,MACpC,OAAO,EAAE,cAAc,MAAM,MAAM,YAAY,EAAE;AAAA;AAAA,EAErD;AAAA,EAGA,WAAW,YAAY,OAAO;AAAA,IAC5B,MAAM,eAAoB,eAAS,SAAS,QAAQ;AAAA,IAEpD,IAAI;AAAA,MACF,MAAM,QAAQ,MAAS,SAAK,QAAQ;AAAA,MACpC,MAAM,eAAe,MAAM,MAAM,YAAY;AAAA,MAG7C,MAAM,gBAAgB,SAAS,MAAM;AAAA,MACrC,IAAI,iBAAiB,cAAc,iBAAiB,cAAc;AAAA,QAChE,IAAI,SAAS;AAAA,UACX,QAAQ,IAAI,aAAa,0BAA0B;AAAA,QACrD;AAAA,QACA,OAAO;AAAA,QACP;AAAA,MACF;AAAA,MAGA,MAAM,UAAU,MAAS,aAAS,UAAU,OAAO;AAAA,MAEnD,IAAI,SAAS;AAAA,QACX,QAAQ,IAAI,gBAAgB,iBAAiB;AAAA,MAC/C;AAAA,MACA,MAAM,YAAY,MAAM,OAAO,UAAU,cAAc,SAAS,GAAG;AAAA,MAEnE,IAAI,CAAC,WAAW;AAAA,QACd,IAAI,SAAS;AAAA,UACX,QAAQ,IAAI,aAAa,0BAA0B;AAAA,QACrD;AAAA,QACA,OAAO;AAAA,QACP;AAAA,MACF;AAAA,MAGA,MAAM,eAAe,SAAS,OAAO,IAAI,cAAc,WAAW,MAAM;AAAA,MAGxE,SAAS,MAAM,gBAAgB;AAAA,QAC7B;AAAA,QACA,YAAY,UAAU,OAAO;AAAA,MAC/B;AAAA,MAEA,OAAO;AAAA,MACP,OAAO,OAAO;AAAA,MACd,QAAQ,MAAM,oBAAoB,iBAAiB,KAAK;AAAA,MACxD,OAAO;AAAA;AAAA,EAEX;AAAA,EAGA,SAAS,cAAc,IAAI,KAAK,EAAE,YAAY;AAAA,EAC9C,MAAM,oBAAoB,SAAS,OAAO,IAAI,UAAU,MAAM;AAAA,EAE9D,OAAO;AAAA;AAGT,eAAe,SAAS,CAAC,SAAiB,QAAmC;AAAA,EAC3E,MAAM,WAAW,OAAO,WAAW,IAAI,CAAC,QAAQ,OAAO,KAAK;AAAA,EAC5D,MAAM,iBAAiB,OAAO,YAAY,IAAI,CAAC,MAAM,MAAM,MAAM;AAAA,EAEjE,MAAM,QAAkB,CAAC;AAAA,EACzB,WAAW,WAAW,UAAU;AAAA,IAC9B,MAAM,UAAU,MAAM,KAAK,SAAS;AAAA,MAClC,KAAK;AAAA,MACL,UAAU;AAAA,MACV,QAAQ;AAAA,IACV,CAAC;AAAA,IACD,MAAM,KAAK,GAAG,OAAO;AAAA,EACvB;AAAA,EAEA,OAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAAA;AAG3B,eAAe,kBAAkB,CAC/B,SACA,UACA,QACyB;AAAA,EACzB,MAAM,eAAe,sBAAsB,SAAS,UAAU,MAAM;AAAA,EAEpE,IAAI;AAAA,IACF,MAAM,UAAU,MAAS,aAAS,cAAc,OAAO;AAAA,IACvD,OAAO,KAAK,MAAM,OAAO;AAAA,IACzB,MAAM;AAAA,IACN,OAAO;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,aAAa,IAAI,KAAK,EAAE,YAAY;AAAA,MACpC,OAAO,CAAC;AAAA,IACV;AAAA;AAAA;AAIJ,eAAe,mBAAmB,CAChC,SACA,UACA,UACA,QACe;AAAA,EACf,MAAM,eAAe,sBAAsB,SAAS,UAAU,MAAM;AAAA,EACpE,MAAS,UAAW,cAAQ,YAAY,GAAG,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9D,MAAS,cAAU,cAAc,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AAAA;AAGpE,eAAe,cAAc,CAC3B,SACA,UACA,UACA,WACA,QACe;AAAA,EACf,MAAM,YAAY,mBAAmB,SAAS,UAAU,MAAM;AAAA,EAC9D,MAAM,gBAAqB,WAAK,WAAW,SAAS,QAAQ,YAAY,OAAO,CAAC;AAAA,EAEhF,MAAS,UAAW,cAAQ,aAAa,GAAG,EAAE,WAAW,KAAK,CAAC;AAAA,EAC/D,MAAS,cAAU,eAAe,KAAK,UAAU,WAAW,MAAM,CAAC,CAAC;AAAA;AAGtE,eAAe,oBAAoB,CACjC,SACA,SACA,QACe;AAAA,EACf,MAAM,eAAe,sBAAsB,SAAS,MAAM;AAAA,EAE1D,MAAM,WAA2B;AAAA,IAC/B,SAAS,OAAO;AAAA,IAChB,aAAa,IAAI,KAAK,EAAE,YAAY;AAAA,IACpC,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,EAClC;AAAA,EAEA,MAAS,UAAW,cAAQ,YAAY,GAAG,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9D,MAAS,cAAU,cAAc,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AAAA;AASpE,eAAsB,YAAY,CAChC,SACA,UAAiC,CAAC,GACR;AAAA,EAC1B,MAAM,UAAU,QAAQ,WAAW;AAAA,EAGnC,UAAe,cAAQ,OAAO;AAAA,EAE9B,QAAQ,IAAI,yBAAyB,SAAS;AAAA,EAG9C,MAAM,SAAS,MAAM,WAAW,OAAO;AAAA,EAGvC,MAAM,uBAAuB;AAAA,EAG7B,MAAM,iBAAiB,SAAS,WAAW,MAAM;AAAA,EAEjD,IAAI,eAAe,WAAW,GAAG;AAAA,IAC/B,QAAQ,IAAI,qBAAqB;AAAA,IACjC,OAAO,CAAC;AAAA,EACV;AAAA,EAEA,MAAM,UAA2B,CAAC;AAAA,EAElC,WAAW,UAAU,gBAAgB;AAAA,IACnC,QAAQ,IAAI;AAAA,GAAM,OAAO,qCAAqC;AAAA,IAE9D,MAAM,SAAS,MAAM,mBAAmB,SAAS,OAAO,IAAI,QAAQ,OAAO;AAAA,IAC3E,QAAQ,KAAK,MAAM;AAAA,IAEnB,QAAQ,IAAI,IAAI,OAAO,iBAAiB,OAAO,+BAA+B,OAAO,oBAAoB;AAAA,EAC3G;AAAA,EAEA,OAAO;AAAA;AAMT,eAAe,kBAAkB,CAC/B,SACA,UACA,QACA,SACwB;AAAA,EACxB,MAAM,SAAwB;AAAA,IAC5B;AAAA,IACA,SAAS;AAAA,IACT,MAAM;AAAA,EACR;AAAA,EAGA,MAAM,WAAW,MAAM,mBAAmB,SAAS,UAAU,MAAM;AAAA,EACnE,MAAM,YAAY,mBAAmB,SAAS,UAAU,MAAM;AAAA,EAE9D,MAAM,gBAA0B,CAAC;AAAA,EACjC,MAAM,eAAwC,CAAC;AAAA,EAG/C,YAAY,UAAU,UAAU,OAAO,QAAQ,SAAS,KAAK,GAAG;AAAA,IAC9D,MAAM,WAAgB,WAAK,SAAS,QAAQ;AAAA,IAE5C,IAAI;AAAA,MACF,MAAS,WAAO,QAAQ;AAAA,MAExB,aAAa,YAAY;AAAA,MACzB,OAAO;AAAA,MACP,MAAM;AAAA,MAEN,cAAc,KAAK,QAAQ;AAAA,MAC3B,OAAO;AAAA,MAEP,IAAI,SAAS;AAAA,QACX,QAAQ,IAAI,2BAA2B,UAAU;AAAA,MACnD;AAAA;AAAA,EAEJ;AAAA,EAGA,WAAW,YAAY,eAAe;AAAA,IACpC,MAAM,gBAAqB,WAAK,WAAW,SAAS,QAAQ,YAAY,OAAO,CAAC;AAAA,IAChF,IAAI;AAAA,MACF,MAAS,WAAO,aAAa;AAAA,MAC7B,MAAM;AAAA,EAGV;AAAA,EAGA,SAAS,QAAQ;AAAA,EACjB,SAAS,cAAc,IAAI,KAAK,EAAE,YAAY;AAAA,EAC9C,MAAM,oBAAoB,SAAS,UAAU,UAAU,MAAM;AAAA,EAG7D,MAAM,wBAAwB,SAAS;AAAA,EAEvC,OAAO;AAAA;AAMT,eAAe,uBAAuB,CAAC,KAA+B;AAAA,EACpE,IAAI;AAAA,IACF,MAAM,UAAU,MAAS,YAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAAA,IAG7D,WAAW,SAAS,SAAS;AAAA,MAC3B,IAAI,MAAM,YAAY,GAAG;AAAA,QACvB,MAAM,SAAc,WAAK,KAAK,MAAM,IAAI;AAAA,QACxC,MAAM,wBAAwB,MAAM;AAAA,MACtC;AAAA,IACF;AAAA,IAGA,MAAM,mBAAmB,MAAS,YAAQ,GAAG;AAAA,IAG7C,IAAI,iBAAiB,WAAW,GAAG;AAAA,MACjC,MAAS,UAAM,GAAG;AAAA,MAClB,OAAO;AAAA,IACT;AAAA,IAEA,OAAO;AAAA,IACP,MAAM;AAAA,IACN,OAAO;AAAA;AAAA;AAAA;AAAA,EApaX;AAAA,EAQA;AAAA;;;;;;;;ACnBA;AACA;AAqBA,eAAsB,MAAM,CAC1B,SACA,OACA,UAAyB,CAAC,GACD;AAAA,EAEzB,UAAe,cAAQ,OAAO;AAAA,EAE9B,QAAQ,IAAI,mBAAmB,QAAQ;AAAA,EAGvC,MAAM,SAAS,MAAM,WAAW,OAAO;AAAA,EAGvC,MAAM,uBAAuB;AAAA,EAG7B,MAAM,iBAAiB,MAAM,mBAAmB,SAAS,MAAM;AAAA,EAE/D,IAAI,CAAC,kBAAkB,eAAe,QAAQ,WAAW,GAAG;AAAA,IAC1D,QAAQ,IAAI,4CAA4C;AAAA,IACxD,OAAO,CAAC;AAAA,EACV;AAAA,EAGA,MAAM,kBAAiC,CAAC;AAAA,EAExC,WAAW,YAAY,eAAe,SAAS;AAAA,IAC7C,MAAM,SAAS,SAAS,IAAI,QAAQ;AAAA,IACpC,MAAM,eAAe,gBAAgB,QAAQ,QAAQ;AAAA,IAErD,IAAI,UAAU,cAAc,SAAS;AAAA,MAEnC,IAAI,OAAO,YAAY;AAAA,QACrB,MAAM,OAAO,WAAW,YAAY;AAAA,MACtC;AAAA,MACA,gBAAgB,KAAK,MAAM;AAAA,IAC7B;AAAA,EACF;AAAA,EAEA,IAAI,gBAAgB,WAAW,GAAG;AAAA,IAChC,QAAQ,IAAI,wCAAwC;AAAA,IACpD,OAAO,CAAC;AAAA,EACV;AAAA,EAGA,MAAM,aAA6B,CAAC;AAAA,EAEpC,WAAW,UAAU,iBAAiB;AAAA,IACpC,MAAM,MAAM,oBAAoB,SAAS,OAAO,IAAI,MAAM;AAAA,IAC1D,MAAM,gBAAgB,MAAM,OAAO,OAAO,OAAO,KAAK,OAAO;AAAA,IAC7D,WAAW,KAAK,GAAG,aAAa;AAAA,EAClC;AAAA,EAGA,WAAW,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAAA,EAG3C,MAAM,OAAO,QAAQ,QAAQ;AAAA,EAC7B,OAAO,WAAW,MAAM,GAAG,IAAI;AAAA;AAMjC,SAAS,mBAAmB,CAC1B,SACA,UACA,QACe;AAAA,EACf,MAAM,YAAY,mBAAmB,SAAS,UAAU,MAAM;AAAA,EAE9D,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IAEA,eAAe,OAAO,aAAgD;AAAA,MAGpE,MAAM,eAAe,YAAY,KAAK,QAAQ;AAAA,MAC9C,MAAM,gBAAgB,eACb,WAAK,WAAW,SAAS,QAAQ,YAAY,OAAO,CAAC,IACrD,WAAK,WAAW,WAAW,OAAO;AAAA,MAE3C,IAAI;AAAA,QACF,MAAM,UAAU,MAAS,aAAS,eAAe,OAAO;AAAA,QACxD,OAAO,KAAK,MAAM,OAAO;AAAA,QACzB,MAAM;AAAA,QACN,OAAO;AAAA;AAAA;AAAA,IAIX,kBAAkB,YAA+B;AAAA,MAC/C,MAAM,QAAkB,CAAC;AAAA,MACzB,MAAM,kBAAkB,WAAW,OAAO,SAAS;AAAA,MAGnD,OAAO,MACJ,OAAO,OAAK,EAAE,SAAS,OAAO,KAAK,CAAC,EAAE,SAAS,eAAe,CAAC,EAC/D,IAAI,OAAK;AAAA,QACR,MAAM,YAAgB,eAAS,WAAW,CAAC;AAAA,QAE3C,OAAO,UAAS,QAAQ,WAAW,EAAE;AAAA,OACtC;AAAA;AAAA,EAEP;AAAA;AAGF,eAAe,iBAAiB,CAAC,KAAa,OAAiB,UAAiC;AAAA,EAC9F,IAAI;AAAA,IACF,MAAM,UAAU,MAAS,YAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAAA,IAE7D,WAAW,SAAS,SAAS;AAAA,MAC3B,MAAM,WAAgB,WAAK,KAAK,MAAM,IAAI;AAAA,MAE1C,IAAI,MAAM,YAAY,GAAG;AAAA,QACvB,MAAM,kBAAkB,UAAU,OAAO,QAAQ;AAAA,MACnD,EAAO,SAAI,MAAM,OAAO,GAAG;AAAA,QACzB,MAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,IACA,MAAM;AAAA;AAKV,eAAe,kBAAkB,CAAC,SAAiB,QAAgD;AAAA,EACjG,MAAM,eAAe,sBAAsB,SAAS,MAAM;AAAA,EAE1D,IAAI;AAAA,IACF,MAAM,UAAU,MAAS,aAAS,cAAc,OAAO;AAAA,IACvD,OAAO,KAAK,MAAM,OAAO;AAAA,IACzB,MAAM;AAAA,IACN,OAAO;AAAA;AAAA;AASJ,SAAS,mBAAmB,CAAC,SAAiC;AAAA,EACnE,IAAI,QAAQ,WAAW,GAAG;AAAA,IACxB,OAAO;AAAA,EACT;AAAA,EAEA,IAAI,SAAS,SAAS,QAAQ;AAAA;AAAA;AAAA,EAE9B,SAAS,IAAI,EAAG,IAAI,QAAQ,QAAQ,KAAK;AAAA,IACvC,MAAM,SAAS,QAAQ;AAAA,IACvB,QAAQ,UAAU;AAAA,IAGlB,MAAM,WAAW,GAAG,OAAO,YAAY,MAAM,aAAa,MAAM;AAAA,IAChE,MAAM,WAAW,MAAM,OAAO,KAAK,MAAM,UAAU;AAAA,IAEnD,UAAU,GAAG,IAAI,MAAM,WAAW;AAAA;AAAA,IAClC,UAAU,cAAc,OAAO,QAAQ,KAAK,QAAQ,CAAC,cAAc,MAAM;AAAA,IAGzE,IAAI,MAAM,YAAY;AAAA,MACpB,UAAU;AAAA,IACZ;AAAA,IACA,UAAU;AAAA;AAAA,IAGV,MAAM,QAAQ,MAAM,QAAQ,MAAM;AAAA,CAAI,EAAE,MAAM,GAAG,CAAC;AAAA,IAClD,WAAW,QAAQ,OAAO;AAAA,MACxB,MAAM,cAAc,KAAK,UAAU,GAAG,EAAE;AAAA,MACxC,UAAU,SAAS,cAAc,KAAK,SAAS,KAAK,QAAQ;AAAA;AAAA,IAC9D;AAAA,IAEA,UAAU;AAAA;AAAA,EACZ;AAAA,EAEA,OAAO;AAAA;AAAA;AAAA,EA3LT;AAAA,EAMA;AAAA;;;AChBA;AAEA,IAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AACjC,IAAM,UAAU,KAAK;AA2BrB,SAAS,UAAU,CAAC,OAA6B;AAAA,EAC/C,MAAM,QAAqB;AAAA,IACzB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,WAAW,CAAC;AAAA,EACd;AAAA,EAEA,SAAS,IAAI,EAAG,IAAI,MAAK,QAAQ,KAAK;AAAA,IACpC,MAAM,MAAM,MAAK;AAAA,IAEjB,IAAI,QAAQ,YAAY,QAAQ,MAAM;AAAA,MACpC,MAAM,OAAO;AAAA,IACf,EAAO,SAAI,QAAQ,eAAe,QAAQ,MAAM;AAAA,MAC9C,MAAM,UAAU;AAAA,IAClB,EAAO,SAAI,QAAQ,aAAa,QAAQ,MAAM;AAAA,MAC5C,MAAM,YAAY,MAAK,EAAE;AAAA,MACzB,IAAI,aAAa,aAAa,kBAAkB;AAAA,QAC9C,MAAM,QAAQ;AAAA,MAChB,EAAO;AAAA,QACL,QAAQ,MAAM,kBAAkB,WAAW;AAAA,QAC3C,QAAQ,MAAM,qBAAqB,OAAO,KAAK,gBAAgB,EAAE,KAAK,IAAI,GAAG;AAAA,QAC7E,QAAQ,KAAK,CAAC;AAAA;AAAA,IAElB,EAAO,SAAI,QAAQ,WAAW,QAAQ,MAAM;AAAA,MAC1C,MAAM,IAAI,SAAS,MAAK,EAAE,IAAI,EAAE;AAAA,MAChC,IAAI,CAAC,MAAM,CAAC,KAAK,IAAI,GAAG;AAAA,QACtB,MAAM,OAAO;AAAA,MACf;AAAA,IACF,EAAO,SAAI,QAAQ,iBAAiB,QAAQ,MAAM;AAAA,MAChD,MAAM,QAAQ,WAAW,MAAK,EAAE,EAAE;AAAA,MAClC,IAAI,CAAC,MAAM,KAAK,KAAK,SAAS,KAAK,SAAS,GAAG;AAAA,QAC7C,MAAM,WAAW;AAAA,MACnB,EAAO;AAAA,QACL,QAAQ,MAAM,sBAAsB,MAAK,uCAAuC;AAAA,QAChF,QAAQ,KAAK,CAAC;AAAA;AAAA,IAElB,EAAO,SAAI,QAAQ,YAAY,QAAQ,MAAM;AAAA,MAC3C,MAAM,OAAO,MAAK,EAAE;AAAA,MACpB,IAAI,MAAM;AAAA,QAER,MAAM,WAAW,KAAK,WAAW,GAAG,IAAI,KAAK,MAAM,CAAC,IAAI;AAAA,MAC1D,EAAO;AAAA,QACL,QAAQ,MAAM,sDAAsD;AAAA,QACpE,QAAQ,KAAK,CAAC;AAAA;AAAA,IAElB,EAAO,SAAI,CAAC,IAAI,WAAW,GAAG,GAAG;AAAA,MAC/B,MAAM,UAAU,KAAK,GAAG;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,OAAO;AAAA;AAGT,eAAe,IAAI,GAAG;AAAA,EACpB,MAAM,QAAQ,WAAW,KAAK,MAAM,CAAC,CAAC;AAAA,EAEtC,QAAQ;AAAA,SACD,SAAS;AAAA,MACZ,IAAI,MAAM,MAAM;AAAA,QACd,MAAM,SAAS,OAAO,KAAK,gBAAgB,EAAE,KAAK,IAAI;AAAA,QACtD,QAAQ,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAYhB;AAAA;AAAA,eAEW,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAM1B;AAAA,QACO,QAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,MAEA,QAAQ,oCAAmB;AAAA,MAC3B,QAAQ,IAAI,iBAAiB;AAAA,MAC7B,QAAQ,IAAI;AAAA,CAAoB;AAAA,MAChC,IAAI;AAAA,QACF,MAAM,UAAU,MAAM,gBAAe,QAAQ,IAAI,GAAG;AAAA,UAClD,OAAO,MAAM;AAAA,UACb,SAAS,MAAM;AAAA,QACjB,CAAC;AAAA,QACD,QAAQ,IAAI;AAAA,iBAAoB;AAAA,QAChC,QAAQ,IAAI,UAAU;AAAA,QACtB,WAAW,UAAU,SAAS;AAAA,UAC5B,QAAQ,IAAI,KAAK,OAAO,aAAa,OAAO,oBAAoB,OAAO,oBAAoB,OAAO,eAAe;AAAA,QACnH;AAAA,QACA,OAAO,OAAO;AAAA,QACd,QAAQ,MAAM,0BAA0B,KAAK;AAAA,QAC7C,QAAQ,KAAK,CAAC;AAAA;AAAA,MAEhB;AAAA,IACF;AAAA,SAEK,SAAS;AAAA,MACZ,IAAI,MAAM,MAAM;AAAA,QACd,QAAQ,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAiBnB;AAAA,QACO,QAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,MAEA,QAAQ,iBAAQ,8CAAwB;AAAA,MACxC,MAAM,QAAQ,MAAM,UAAU;AAAA,MAE9B,IAAI,CAAC,OAAO;AAAA,QACV,QAAQ,MAAM,qCAAqC;AAAA,QACnD,QAAQ,MAAM,kDAAkD;AAAA,QAChE,QAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,MAEA,QAAQ,IAAI,gBAAgB;AAAA,MAC5B,QAAQ,IAAI;AAAA,CAAkB;AAAA,MAC9B,IAAI;AAAA,QAEF,MAAM,eAAe,MAAM,WAAW,CAAC,KAAK,MAAM,UAAU,IAAI;AAAA,QAEhE,MAAM,UAAU,MAAM,QAAO,QAAQ,IAAI,GAAG,OAAO;AAAA,UACjD,MAAM,MAAM,QAAQ;AAAA,UACpB,UAAU,MAAM;AAAA,UAChB;AAAA,QACF,CAAC;AAAA,QACD,QAAQ,IAAI,qBAAoB,OAAO,CAAC;AAAA,QACxC,OAAO,OAAO;AAAA,QACd,QAAQ,MAAM,wBAAwB,KAAK;AAAA,QAC3C,QAAQ,KAAK,CAAC;AAAA;AAAA,MAEhB;AAAA,IACF;AAAA,SAEK,WAAW;AAAA,MACd,IAAI,MAAM,MAAM;AAAA,QACd,QAAQ,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAiBnB;AAAA,QACO,QAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,MAEA,QAAQ,gCAAiB;AAAA,MACzB,QAAQ,IAAI,iBAAiB;AAAA,MAC7B,QAAQ,IAAI;AAAA,CAAmB;AAAA,MAC/B,IAAI;AAAA,QACF,MAAM,UAAU,MAAM,cAAa,QAAQ,IAAI,GAAG;AAAA,UAChD,SAAS,MAAM;AAAA,QACjB,CAAC;AAAA,QACD,QAAQ,IAAI;AAAA,gBAAmB;AAAA,QAC/B,QAAQ,IAAI,UAAU;AAAA,QACtB,WAAW,UAAU,SAAS;AAAA,UAC5B,QAAQ,IAAI,KAAK,OAAO,aAAa,OAAO,oBAAoB,OAAO,WAAW;AAAA,QACpF;AAAA,QACA,OAAO,OAAO;AAAA,QACd,QAAQ,MAAM,yBAAyB,KAAK;AAAA,QAC5C,QAAQ,KAAK,CAAC;AAAA;AAAA,MAEhB;AAAA,IACF;AAAA;AAAA,MAGE,QAAQ,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAsBjB;AAAA,MACK,IAAI,WAAW,YAAY,YAAY,YAAY,MAAM;AAAA,QACvD,QAAQ,MAAM,oBAAoB,SAAS;AAAA,QAC3C,QAAQ,KAAK,CAAC;AAAA,MAChB;AAAA;AAAA;AAIN,KAAK;",
20
+ "debugId": "8E6C629D8A1BA14F64756E2164756E21",
21
+ "names": []
22
+ }
@@ -0,0 +1,52 @@
1
+ /**
2
+ * Composition Root
3
+ *
4
+ * This is the single place where all dependencies are wired together.
5
+ * The composition root creates concrete implementations and injects them
6
+ * into use cases and services.
7
+ *
8
+ * This is the only file that knows about concrete implementations.
9
+ * Everything else depends only on interfaces (ports).
10
+ */
11
+ import type { Config } from './domain/entities';
12
+ import type { FileSystem } from './domain/ports';
13
+ import type { IndexModule, IndexContext, SearchContext } from './types';
14
+ import { FileIndexStorage } from './infrastructure/storage';
15
+ /**
16
+ * Container for all application services.
17
+ * Created once and passed to use cases.
18
+ */
19
+ export interface ServiceContainer {
20
+ fileSystem: FileSystem;
21
+ storage: FileIndexStorage;
22
+ getEnabledModules: (config: Config) => IndexModule[];
23
+ getModule: (moduleId: string) => IndexModule | undefined;
24
+ initializeModule: (module: IndexModule, config: Config) => Promise<void>;
25
+ }
26
+ /**
27
+ * Create a service container for a specific project directory.
28
+ */
29
+ export declare function createServiceContainer(rootDir: string): Promise<ServiceContainer>;
30
+ import type { IndexDirectoryDependencies } from './application/usecases/indexDirectory';
31
+ import type { SearchIndexDependencies } from './application/usecases/searchIndex';
32
+ import type { CleanupIndexDependencies } from './application/usecases/cleanupIndex';
33
+ /**
34
+ * Create dependencies for the indexDirectory use case.
35
+ */
36
+ export declare function createIndexDependencies(container: ServiceContainer): IndexDirectoryDependencies;
37
+ /**
38
+ * Create dependencies for the searchIndex use case.
39
+ */
40
+ export declare function createSearchDependencies(container: ServiceContainer): SearchIndexDependencies;
41
+ /**
42
+ * Create dependencies for the cleanupIndex use case.
43
+ */
44
+ export declare function createCleanupDependencies(container: ServiceContainer): CleanupIndexDependencies;
45
+ /**
46
+ * Create an IndexContext for a module.
47
+ */
48
+ export declare function createIndexContext(rootDir: string, config: Config, fileSystem: FileSystem): IndexContext;
49
+ /**
50
+ * Create a SearchContext for a module.
51
+ */
52
+ export declare function createSearchContext(rootDir: string, moduleId: string, config: Config, storage: FileIndexStorage): SearchContext;
@@ -0,0 +1,41 @@
1
+ /**
2
+ * Chunk Entity
3
+ *
4
+ * Represents a semantic unit of code that can be indexed and searched.
5
+ * This is a core domain entity with no external dependencies.
6
+ */
7
+ /**
8
+ * Types of code chunks that can be extracted from source files.
9
+ */
10
+ export type ChunkType = 'function' | 'class' | 'interface' | 'type' | 'enum' | 'variable' | 'block' | 'file';
11
+ /**
12
+ * A chunk of code or text that has been parsed and can be indexed.
13
+ *
14
+ * Chunks are the fundamental unit of indexing in RAGgrep. Each chunk
15
+ * represents a meaningful code construct (function, class, etc.) that
16
+ * can be independently searched and retrieved.
17
+ */
18
+ export interface Chunk {
19
+ /** Unique identifier for this chunk (typically filepath + line range) */
20
+ id: string;
21
+ /** The source code content */
22
+ content: string;
23
+ /** 1-based start line number in the source file */
24
+ startLine: number;
25
+ /** 1-based end line number in the source file */
26
+ endLine: number;
27
+ /** The type of code construct */
28
+ type: ChunkType;
29
+ /** Name of the construct (function name, class name, etc.) */
30
+ name?: string;
31
+ /** Whether this chunk is exported from its module */
32
+ isExported?: boolean;
33
+ /** JSDoc comment if present */
34
+ jsDoc?: string;
35
+ /** Additional metadata for extensibility */
36
+ metadata?: Record<string, unknown>;
37
+ }
38
+ /**
39
+ * Generate a unique chunk ID from filepath and line numbers.
40
+ */
41
+ export declare function createChunkId(filepath: string, startLine: number, endLine: number): string;
@@ -0,0 +1,43 @@
1
+ /**
2
+ * Config Entity
3
+ *
4
+ * Configuration for RAGgrep indexing and search operations.
5
+ */
6
+ /**
7
+ * Configuration for a specific index module.
8
+ */
9
+ export interface ModuleConfig {
10
+ /** Unique module identifier */
11
+ id: string;
12
+ /** Whether the module is enabled */
13
+ enabled: boolean;
14
+ /** Module-specific options */
15
+ options?: Record<string, unknown>;
16
+ }
17
+ /**
18
+ * Main RAGgrep configuration.
19
+ */
20
+ export interface Config {
21
+ /** RAGgrep version */
22
+ version: string;
23
+ /** Directory name for index storage (default: '.raggrep') */
24
+ indexDir: string;
25
+ /** File extensions to index (e.g., ['.ts', '.tsx', '.js']) */
26
+ extensions: string[];
27
+ /** Paths to ignore during indexing */
28
+ ignorePaths: string[];
29
+ /** Enabled modules and their configurations */
30
+ modules: ModuleConfig[];
31
+ }
32
+ /**
33
+ * Default paths to ignore during indexing.
34
+ */
35
+ export declare const DEFAULT_IGNORE_PATHS: string[];
36
+ /**
37
+ * Default file extensions to index.
38
+ */
39
+ export declare const DEFAULT_EXTENSIONS: string[];
40
+ /**
41
+ * Create a default configuration.
42
+ */
43
+ export declare function createDefaultConfig(): Config;
@@ -0,0 +1,58 @@
1
+ /**
2
+ * FileIndex Entity
3
+ *
4
+ * Represents the indexed data for a single source file.
5
+ * This is a Tier 2 index structure containing full chunk data and embeddings.
6
+ */
7
+ import type { Chunk } from './chunk';
8
+ /**
9
+ * Indexed data for a single file (Tier 2 index).
10
+ *
11
+ * Contains all chunks extracted from the file along with
12
+ * module-specific data like embeddings.
13
+ */
14
+ export interface FileIndex {
15
+ /** Relative path to the source file */
16
+ filepath: string;
17
+ /** ISO timestamp of when the file was last modified */
18
+ lastModified: string;
19
+ /** Chunks extracted from the file */
20
+ chunks: Chunk[];
21
+ /** Module-specific indexed data (e.g., embeddings, symbol tables) */
22
+ moduleData: Record<string, unknown>;
23
+ /** References to other files (imports, requires) */
24
+ references?: string[];
25
+ }
26
+ /**
27
+ * Manifest entry for a single indexed file.
28
+ */
29
+ export interface FileManifestEntry {
30
+ /** ISO timestamp of when the file was last modified */
31
+ lastModified: string;
32
+ /** Number of chunks in the file */
33
+ chunkCount: number;
34
+ }
35
+ /**
36
+ * Manifest tracking all indexed files for a specific module.
37
+ */
38
+ export interface ModuleManifest {
39
+ /** Module identifier */
40
+ moduleId: string;
41
+ /** Module version (for compatibility checking) */
42
+ version: string;
43
+ /** ISO timestamp of last update */
44
+ lastUpdated: string;
45
+ /** Map of filepath to manifest entry */
46
+ files: Record<string, FileManifestEntry>;
47
+ }
48
+ /**
49
+ * Global manifest tracking all active modules.
50
+ */
51
+ export interface GlobalManifest {
52
+ /** RAGgrep version */
53
+ version: string;
54
+ /** ISO timestamp of last update */
55
+ lastUpdated: string;
56
+ /** List of active module IDs */
57
+ modules: string[];
58
+ }
@@ -0,0 +1,61 @@
1
+ /**
2
+ * FileSummary Entity
3
+ *
4
+ * Lightweight file summary for the symbolic index.
5
+ * Used for fast keyword-based filtering before loading full file indexes.
6
+ *
7
+ * Stored as individual files in: .raggrep/index/<module>/symbolic/<filepath>.json
8
+ */
9
+ import type { ChunkType } from "./chunk";
10
+ /**
11
+ * Lightweight file summary for fast filtering.
12
+ *
13
+ * Contains just enough information to decide if a file
14
+ * is a candidate for more detailed semantic search.
15
+ */
16
+ export interface FileSummary {
17
+ /** Relative path to the source file */
18
+ filepath: string;
19
+ /** Number of chunks in this file */
20
+ chunkCount: number;
21
+ /** Types of chunks present (function, class, interface, etc.) */
22
+ chunkTypes: ChunkType[];
23
+ /** Extracted keywords from chunk names and content */
24
+ keywords: string[];
25
+ /** Names of exported symbols */
26
+ exports: string[];
27
+ /** ISO timestamp of when the file was last modified */
28
+ lastModified: string;
29
+ }
30
+ /**
31
+ * Metadata for the symbolic index.
32
+ * Stored in: .raggrep/index/<module>/symbolic/_meta.json
33
+ *
34
+ * Contains global BM25 statistics needed for keyword search.
35
+ * Individual FileSummary files are stored separately for scalability.
36
+ */
37
+ export interface SymbolicIndexMeta {
38
+ /** Schema version */
39
+ version: string;
40
+ /** ISO timestamp of last update */
41
+ lastUpdated: string;
42
+ /** Module ID this index belongs to */
43
+ moduleId: string;
44
+ /** Number of indexed files */
45
+ fileCount: number;
46
+ /** Pre-computed BM25 data for keyword search */
47
+ bm25Data: {
48
+ /** Average document length */
49
+ avgDocLength: number;
50
+ /** Document frequencies for each term */
51
+ documentFrequencies: Record<string, number>;
52
+ /** Total number of documents */
53
+ totalDocs: number;
54
+ };
55
+ }
56
+ /**
57
+ * @deprecated Use SymbolicIndexMeta instead. Kept for backwards compatibility.
58
+ */
59
+ export type Tier1Manifest = SymbolicIndexMeta & {
60
+ files: Record<string, FileSummary>;
61
+ };
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Domain Entities
3
+ *
4
+ * Core business objects with no external dependencies.
5
+ * These represent the fundamental concepts in the RAGgrep domain.
6
+ */
7
+ export type { Chunk, ChunkType } from "./chunk";
8
+ export { createChunkId } from "./chunk";
9
+ export type { FileIndex, FileManifestEntry, ModuleManifest, GlobalManifest, } from "./fileIndex";
10
+ export type { FileSummary, SymbolicIndexMeta, Tier1Manifest, } from "./fileSummary";
11
+ export type { SearchResult, SearchOptions } from "./searchResult";
12
+ export { DEFAULT_SEARCH_OPTIONS } from "./searchResult";
13
+ export type { Config, ModuleConfig } from "./config";
14
+ export { DEFAULT_IGNORE_PATHS, DEFAULT_EXTENSIONS, createDefaultConfig, } from "./config";
@@ -0,0 +1,36 @@
1
+ /**
2
+ * SearchResult Entity
3
+ *
4
+ * Represents a single result from a search query.
5
+ */
6
+ import type { Chunk } from './chunk';
7
+ /**
8
+ * A search result with relevance score and source information.
9
+ */
10
+ export interface SearchResult {
11
+ /** Path to the file containing the result */
12
+ filepath: string;
13
+ /** The matching chunk */
14
+ chunk: Chunk;
15
+ /** Relevance score (0-1, higher is better) */
16
+ score: number;
17
+ /** ID of the module that produced this result */
18
+ moduleId: string;
19
+ /** Additional context from the search (e.g., semantic vs keyword scores) */
20
+ context?: Record<string, unknown>;
21
+ }
22
+ /**
23
+ * Options for search operations.
24
+ */
25
+ export interface SearchOptions {
26
+ /** Maximum number of results to return (default: 10) */
27
+ topK?: number;
28
+ /** Minimum similarity score threshold 0-1 (default: 0.15) */
29
+ minScore?: number;
30
+ /** Filter to specific file patterns (e.g., ['*.ts', '*.tsx']) */
31
+ filePatterns?: string[];
32
+ }
33
+ /**
34
+ * Default search options.
35
+ */
36
+ export declare const DEFAULT_SEARCH_OPTIONS: Required<SearchOptions>;
@@ -0,0 +1,11 @@
1
+ /**
2
+ * Domain Layer
3
+ *
4
+ * Contains the core business logic of RAGgrep:
5
+ * - Entities: Core data structures
6
+ * - Ports: Interfaces for external dependencies
7
+ * - Services: Pure business logic and algorithms
8
+ */
9
+ export * from './entities';
10
+ export * from './ports';
11
+ export * from './services';