@chatbot-packages/rag 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunking/index.d.ts +0 -0
- package/dist/chunking/index.js +0 -0
- package/dist/chunking/index.js.map +0 -0
- package/dist/embeddings/index.d.ts +54 -1
- package/dist/embeddings/index.js +155 -3
- package/dist/embeddings/index.js.map +1 -1
- package/dist/extractors/index.d.ts +0 -0
- package/dist/extractors/index.js +0 -0
- package/dist/extractors/index.js.map +0 -0
- package/dist/index.d.ts +0 -0
- package/dist/index.js +153 -3
- package/dist/index.js.map +1 -1
- package/dist/retrieval/index.d.ts +0 -0
- package/dist/retrieval/index.js +0 -0
- package/dist/retrieval/index.js.map +0 -0
- package/dist/types-CjnplPJD.d.ts +0 -0
- package/dist/vectorstore/index.d.ts +0 -0
- package/dist/vectorstore/index.js +0 -0
- package/dist/vectorstore/index.js.map +0 -0
- package/package.json +16 -16
package/dist/chunking/index.d.ts
CHANGED
|
File without changes
|
package/dist/chunking/index.js
CHANGED
|
File without changes
|
|
File without changes
|
|
@@ -89,6 +89,59 @@ declare const OPENAI_MODELS: {
|
|
|
89
89
|
ADA_002: string;
|
|
90
90
|
};
|
|
91
91
|
|
|
92
|
+
/**
|
|
93
|
+
* HuggingFace Inference API Embeddings
|
|
94
|
+
*
|
|
95
|
+
* Uses HuggingFace's free Inference API for embeddings.
|
|
96
|
+
* - Free tier: 1,000 requests/day
|
|
97
|
+
* - GPU-powered (faster than local)
|
|
98
|
+
* - No model download needed
|
|
99
|
+
*/
|
|
100
|
+
|
|
101
|
+
interface HuggingFaceEmbeddingOptions {
|
|
102
|
+
/** HuggingFace API token (get free at hf.co/settings/tokens) */
|
|
103
|
+
apiToken: string;
|
|
104
|
+
/** Model name (default: BAAI/bge-base-en-v1.5) */
|
|
105
|
+
model?: string;
|
|
106
|
+
/** Batch size for processing (default: 32) */
|
|
107
|
+
batchSize?: number;
|
|
108
|
+
/** Request timeout in ms (default: 30000) */
|
|
109
|
+
timeout?: number;
|
|
110
|
+
/** Number of retries (default: 3) */
|
|
111
|
+
retries?: number;
|
|
112
|
+
}
|
|
113
|
+
declare class HuggingFaceEmbeddingBackend implements EmbeddingBackend {
|
|
114
|
+
private apiToken;
|
|
115
|
+
private model;
|
|
116
|
+
private batchSize;
|
|
117
|
+
private timeout;
|
|
118
|
+
private retries;
|
|
119
|
+
private apiUrl;
|
|
120
|
+
private dimensions;
|
|
121
|
+
private static MODEL_DIMENSIONS;
|
|
122
|
+
constructor(options: HuggingFaceEmbeddingOptions);
|
|
123
|
+
private queryAPI;
|
|
124
|
+
embed(text: string): Promise<EmbeddingResult>;
|
|
125
|
+
embedBatch(texts: string[]): Promise<EmbeddingResult[]>;
|
|
126
|
+
getDimensions(): number;
|
|
127
|
+
getModel(): string;
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Available HuggingFace models
|
|
131
|
+
*/
|
|
132
|
+
declare const HUGGINGFACE_MODELS: {
|
|
133
|
+
/** BGE Large - Best quality (1024 dims) */
|
|
134
|
+
BGE_LARGE: string;
|
|
135
|
+
/** BGE Base - Good balance (768 dims) */
|
|
136
|
+
BGE_BASE: string;
|
|
137
|
+
/** BGE Small - Fastest (384 dims) */
|
|
138
|
+
BGE_SMALL: string;
|
|
139
|
+
/** MiniLM L6 - Very fast (384 dims) */
|
|
140
|
+
MINILM_L6: string;
|
|
141
|
+
/** MPNet - High quality (768 dims) */
|
|
142
|
+
MPNET: string;
|
|
143
|
+
};
|
|
144
|
+
|
|
92
145
|
/**
|
|
93
146
|
* Embedding Backends
|
|
94
147
|
*
|
|
@@ -100,4 +153,4 @@ declare const OPENAI_MODELS: {
|
|
|
100
153
|
*/
|
|
101
154
|
declare function createEmbeddingBackend(options: EmbeddingOptions): EmbeddingBackend;
|
|
102
155
|
|
|
103
|
-
export { LOCAL_MODELS, LocalEmbeddingBackend, type LocalEmbeddingOptions, OPENAI_MODELS, OpenAIEmbeddingBackend, type OpenAIEmbeddingOptions, createEmbeddingBackend };
|
|
156
|
+
export { HUGGINGFACE_MODELS, HuggingFaceEmbeddingBackend, type HuggingFaceEmbeddingOptions, LOCAL_MODELS, LocalEmbeddingBackend, type LocalEmbeddingOptions, OPENAI_MODELS, OpenAIEmbeddingBackend, type OpenAIEmbeddingOptions, createEmbeddingBackend };
|
package/dist/embeddings/index.js
CHANGED
|
@@ -158,12 +158,158 @@ var OPENAI_MODELS = {
|
|
|
158
158
|
ADA_002: "text-embedding-ada-002"
|
|
159
159
|
};
|
|
160
160
|
|
|
161
|
+
// src/embeddings/huggingface.ts
|
|
162
|
+
var HuggingFaceEmbeddingBackend = class _HuggingFaceEmbeddingBackend {
|
|
163
|
+
apiToken;
|
|
164
|
+
model;
|
|
165
|
+
batchSize;
|
|
166
|
+
timeout;
|
|
167
|
+
retries;
|
|
168
|
+
apiUrl;
|
|
169
|
+
dimensions;
|
|
170
|
+
// Model dimension map
|
|
171
|
+
static MODEL_DIMENSIONS = {
|
|
172
|
+
"BAAI/bge-large-en-v1.5": 1024,
|
|
173
|
+
"BAAI/bge-base-en-v1.5": 768,
|
|
174
|
+
"BAAI/bge-small-en-v1.5": 384,
|
|
175
|
+
"sentence-transformers/all-MiniLM-L6-v2": 384,
|
|
176
|
+
"sentence-transformers/all-mpnet-base-v2": 768
|
|
177
|
+
};
|
|
178
|
+
constructor(options) {
|
|
179
|
+
this.apiToken = options.apiToken;
|
|
180
|
+
this.model = options.model || "BAAI/bge-base-en-v1.5";
|
|
181
|
+
this.batchSize = options.batchSize || 32;
|
|
182
|
+
this.timeout = options.timeout || 3e4;
|
|
183
|
+
this.retries = options.retries || 3;
|
|
184
|
+
this.apiUrl = `https://router.huggingface.co/hf-inference/models/${this.model}`;
|
|
185
|
+
this.dimensions = _HuggingFaceEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 768;
|
|
186
|
+
}
|
|
187
|
+
async queryAPI(texts, attempt = 0) {
|
|
188
|
+
try {
|
|
189
|
+
const controller = new AbortController();
|
|
190
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
191
|
+
const response = await fetch(this.apiUrl, {
|
|
192
|
+
method: "POST",
|
|
193
|
+
headers: {
|
|
194
|
+
"Authorization": `Bearer ${this.apiToken}`,
|
|
195
|
+
"Content-Type": "application/json"
|
|
196
|
+
},
|
|
197
|
+
body: JSON.stringify({
|
|
198
|
+
inputs: texts,
|
|
199
|
+
options: {
|
|
200
|
+
wait_for_model: true
|
|
201
|
+
}
|
|
202
|
+
}),
|
|
203
|
+
signal: controller.signal
|
|
204
|
+
});
|
|
205
|
+
clearTimeout(timeoutId);
|
|
206
|
+
if (response.ok) {
|
|
207
|
+
const data = await response.json();
|
|
208
|
+
return data;
|
|
209
|
+
}
|
|
210
|
+
if (response.status === 503) {
|
|
211
|
+
if (attempt < this.retries) {
|
|
212
|
+
const delay = Math.pow(2, attempt) * 1e3;
|
|
213
|
+
console.log(
|
|
214
|
+
`[HuggingFace] Model loading, retrying in ${delay}ms (attempt ${attempt + 1}/${this.retries})`
|
|
215
|
+
);
|
|
216
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
217
|
+
return this.queryAPI(texts, attempt + 1);
|
|
218
|
+
}
|
|
219
|
+
} else if (response.status === 429) {
|
|
220
|
+
console.warn("[HuggingFace] Rate limit exceeded");
|
|
221
|
+
} else {
|
|
222
|
+
const errorText = await response.text();
|
|
223
|
+
console.warn(`[HuggingFace] API error ${response.status}: ${errorText}`);
|
|
224
|
+
}
|
|
225
|
+
return null;
|
|
226
|
+
} catch (error) {
|
|
227
|
+
if (error.name === "AbortError") {
|
|
228
|
+
console.warn(`[HuggingFace] Request timeout after ${this.timeout}ms`);
|
|
229
|
+
} else {
|
|
230
|
+
console.warn(`[HuggingFace] Request failed:`, error.message);
|
|
231
|
+
}
|
|
232
|
+
if (attempt < this.retries) {
|
|
233
|
+
const delay = Math.pow(2, attempt) * 1e3;
|
|
234
|
+
console.log(`[HuggingFace] Retrying in ${delay}ms (attempt ${attempt + 1}/${this.retries})`);
|
|
235
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
236
|
+
return this.queryAPI(texts, attempt + 1);
|
|
237
|
+
}
|
|
238
|
+
return null;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
async embed(text) {
|
|
242
|
+
const result = await this.queryAPI([text]);
|
|
243
|
+
if (result && result.length > 0) {
|
|
244
|
+
return {
|
|
245
|
+
embedding: result[0],
|
|
246
|
+
tokens: Math.ceil(text.length / 4)
|
|
247
|
+
// Rough estimate
|
|
248
|
+
};
|
|
249
|
+
}
|
|
250
|
+
throw new Error("HuggingFace Inference API failed after retries");
|
|
251
|
+
}
|
|
252
|
+
async embedBatch(texts) {
|
|
253
|
+
const results = [];
|
|
254
|
+
for (let i = 0; i < texts.length; i += this.batchSize) {
|
|
255
|
+
const batch = texts.slice(i, i + this.batchSize);
|
|
256
|
+
const embeddings = await this.queryAPI(batch);
|
|
257
|
+
if (!embeddings || embeddings.length !== batch.length) {
|
|
258
|
+
throw new Error(`HuggingFace API failed for batch ${i / this.batchSize + 1}`);
|
|
259
|
+
}
|
|
260
|
+
results.push(
|
|
261
|
+
...embeddings.map((embedding, idx) => ({
|
|
262
|
+
embedding,
|
|
263
|
+
tokens: Math.ceil(batch[idx].length / 4)
|
|
264
|
+
}))
|
|
265
|
+
);
|
|
266
|
+
}
|
|
267
|
+
return results;
|
|
268
|
+
}
|
|
269
|
+
getDimensions() {
|
|
270
|
+
return this.dimensions;
|
|
271
|
+
}
|
|
272
|
+
getModel() {
|
|
273
|
+
return this.model;
|
|
274
|
+
}
|
|
275
|
+
};
|
|
276
|
+
var HUGGINGFACE_MODELS = {
|
|
277
|
+
/** BGE Large - Best quality (1024 dims) */
|
|
278
|
+
BGE_LARGE: "BAAI/bge-large-en-v1.5",
|
|
279
|
+
/** BGE Base - Good balance (768 dims) */
|
|
280
|
+
BGE_BASE: "BAAI/bge-base-en-v1.5",
|
|
281
|
+
/** BGE Small - Fastest (384 dims) */
|
|
282
|
+
BGE_SMALL: "BAAI/bge-small-en-v1.5",
|
|
283
|
+
/** MiniLM L6 - Very fast (384 dims) */
|
|
284
|
+
MINILM_L6: "sentence-transformers/all-MiniLM-L6-v2",
|
|
285
|
+
/** MPNet - High quality (768 dims) */
|
|
286
|
+
MPNET: "sentence-transformers/all-mpnet-base-v2"
|
|
287
|
+
};
|
|
288
|
+
|
|
161
289
|
// src/embeddings/index.ts
|
|
290
|
+
var MODEL_ALIASES = {
|
|
291
|
+
// Local models (Xenova)
|
|
292
|
+
"bge-large": LOCAL_MODELS.BGE_LARGE,
|
|
293
|
+
"bge-base": LOCAL_MODELS.BGE_BASE,
|
|
294
|
+
"bge-small": LOCAL_MODELS.BGE_SMALL,
|
|
295
|
+
"minilm": LOCAL_MODELS.MINILM_L6,
|
|
296
|
+
"mpnet": LOCAL_MODELS.MPNET,
|
|
297
|
+
// HuggingFace models (BAAI)
|
|
298
|
+
"bge-large-hf": HUGGINGFACE_MODELS.BGE_LARGE,
|
|
299
|
+
"bge-base-hf": HUGGINGFACE_MODELS.BGE_BASE,
|
|
300
|
+
"bge-small-hf": HUGGINGFACE_MODELS.BGE_SMALL,
|
|
301
|
+
"minilm-hf": HUGGINGFACE_MODELS.MINILM_L6,
|
|
302
|
+
"mpnet-hf": HUGGINGFACE_MODELS.MPNET
|
|
303
|
+
};
|
|
304
|
+
function resolveModelName(model) {
|
|
305
|
+
if (!model) return void 0;
|
|
306
|
+
return MODEL_ALIASES[model.toLowerCase()] || model;
|
|
307
|
+
}
|
|
162
308
|
function createEmbeddingBackend(options) {
|
|
163
309
|
switch (options.provider) {
|
|
164
310
|
case "local":
|
|
165
311
|
return new LocalEmbeddingBackend({
|
|
166
|
-
model: options.model,
|
|
312
|
+
model: resolveModelName(options.model),
|
|
167
313
|
batchSize: options.batchSize
|
|
168
314
|
});
|
|
169
315
|
case "openai":
|
|
@@ -177,8 +323,12 @@ function createEmbeddingBackend(options) {
|
|
|
177
323
|
batchSize: options.batchSize
|
|
178
324
|
});
|
|
179
325
|
case "huggingface":
|
|
180
|
-
|
|
181
|
-
|
|
326
|
+
if (!options.apiKey) {
|
|
327
|
+
throw new Error("HuggingFace Inference API requires an API token (get free at hf.co/settings/tokens)");
|
|
328
|
+
}
|
|
329
|
+
return new HuggingFaceEmbeddingBackend({
|
|
330
|
+
apiToken: options.apiKey,
|
|
331
|
+
model: resolveModelName(options.model) || HUGGINGFACE_MODELS.BGE_BASE,
|
|
182
332
|
batchSize: options.batchSize
|
|
183
333
|
});
|
|
184
334
|
default:
|
|
@@ -186,6 +336,8 @@ function createEmbeddingBackend(options) {
|
|
|
186
336
|
}
|
|
187
337
|
}
|
|
188
338
|
export {
|
|
339
|
+
HUGGINGFACE_MODELS,
|
|
340
|
+
HuggingFaceEmbeddingBackend,
|
|
189
341
|
LOCAL_MODELS,
|
|
190
342
|
LocalEmbeddingBackend,
|
|
191
343
|
OPENAI_MODELS,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/embeddings/local.ts","../../src/embeddings/openai.ts","../../src/embeddings/index.ts"],"sourcesContent":["/**\n * Local Embeddings using @xenova/transformers\n *\n * Runs embedding models locally without API calls.\n * Supports BGE, all-MiniLM, and other sentence-transformer models.\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\n// Dynamic import for transformers (it's a heavy module)\nlet pipeline: any = null;\nlet embedder: any = null;\n\nasync function loadPipeline(model: string) {\n if (!pipeline) {\n const transformers = await import('@xenova/transformers');\n pipeline = transformers.pipeline;\n }\n\n if (!embedder) {\n console.log(`[LocalEmbeddings] Loading model: ${model}...`);\n embedder = await pipeline('feature-extraction', model, {\n quantized: true, // Use quantized model for faster inference\n });\n console.log(`[LocalEmbeddings] Model loaded successfully`);\n }\n\n return embedder;\n}\n\nexport interface LocalEmbeddingOptions {\n /** Model name (default: 'Xenova/bge-base-en-v1.5') */\n model?: string;\n /** Batch size for processing (default: 32) */\n batchSize?: number;\n /** Whether to normalize embeddings (default: true) */\n normalize?: boolean;\n}\n\nexport class LocalEmbeddingBackend implements EmbeddingBackend {\n private model: string;\n private batchSize: number;\n private normalize: boolean;\n private dimensions: number;\n\n // Model dimension map\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'Xenova/bge-large-en-v1.5': 1024,\n 'Xenova/bge-base-en-v1.5': 768,\n 'Xenova/bge-small-en-v1.5': 384,\n 'Xenova/all-MiniLM-L6-v2': 384,\n 'Xenova/all-MiniLM-L12-v2': 384,\n 'Xenova/all-mpnet-base-v2': 768,\n };\n\n constructor(options?: LocalEmbeddingOptions) {\n // Use bge-base for balance of quality and speed\n this.model = options?.model || 'Xenova/bge-base-en-v1.5';\n this.batchSize = options?.batchSize || 32;\n this.normalize = options?.normalize ?? true;\n this.dimensions = LocalEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 768;\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const embedder = await loadPipeline(this.model);\n\n // For BGE models, add instruction prefix for better retrieval\n const processedText = this.model.includes('bge')\n ? `Represent this sentence for searching relevant passages: ${text}`\n : text;\n\n const output = await embedder(processedText, {\n pooling: 'mean',\n normalize: this.normalize,\n });\n\n // Convert to regular array\n const embedding = Array.from(output.data as Float32Array);\n\n return {\n embedding,\n tokens: Math.ceil(text.length / 4), // Rough estimate\n };\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const results: EmbeddingResult[] = [];\n\n // Process in batches\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const batchResults = await Promise.all(batch.map((text) => this.embed(text)));\n results.push(...batchResults);\n }\n\n return results;\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available local models\n */\nexport const LOCAL_MODELS = {\n /** BGE Large - Best quality, slower (1024 dims) */\n BGE_LARGE: 'Xenova/bge-large-en-v1.5',\n /** BGE Base - Good balance (768 dims) */\n BGE_BASE: 'Xenova/bge-base-en-v1.5',\n /** BGE Small - Fastest (384 dims) */\n BGE_SMALL: 'Xenova/bge-small-en-v1.5',\n /** MiniLM L6 - Very fast (384 dims) */\n MINILM_L6: 'Xenova/all-MiniLM-L6-v2',\n /** MiniLM L12 - Good quality (384 dims) */\n MINILM_L12: 'Xenova/all-MiniLM-L12-v2',\n /** MPNet - High quality (768 dims) */\n MPNET: 'Xenova/all-mpnet-base-v2',\n};\n","/**\n * OpenAI Embeddings\n *\n * Uses OpenAI's embedding API for high-quality embeddings.\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\nexport interface OpenAIEmbeddingOptions {\n /** OpenAI API key */\n apiKey: string;\n /** Model name (default: 'text-embedding-3-small') */\n model?: string;\n /** Embedding dimensions (for models that support it) */\n dimensions?: number;\n /** Batch size for processing (default: 100) */\n batchSize?: number;\n /** Base URL for API (for compatible APIs) */\n baseUrl?: string;\n}\n\nexport class OpenAIEmbeddingBackend implements EmbeddingBackend {\n private apiKey: string;\n private model: string;\n private dimensions: number;\n private batchSize: number;\n private baseUrl: string;\n\n // Model dimension defaults\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'text-embedding-3-large': 3072,\n 'text-embedding-3-small': 1536,\n 'text-embedding-ada-002': 1536,\n };\n\n constructor(options: OpenAIEmbeddingOptions) {\n this.apiKey = options.apiKey;\n this.model = options.model || 'text-embedding-3-small';\n this.dimensions =\n options.dimensions || OpenAIEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 1536;\n this.batchSize = options.batchSize || 100;\n this.baseUrl = options.baseUrl || 'https://api.openai.com/v1';\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const results = await this.embedBatch([text]);\n return results[0];\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const allResults: EmbeddingResult[] = [];\n\n // Process in batches (OpenAI has limits)\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const batchResults = await this.callAPI(batch);\n allResults.push(...batchResults);\n }\n\n return allResults;\n }\n\n private async callAPI(texts: string[]): Promise<EmbeddingResult[]> {\n const body: Record<string, unknown> = {\n model: this.model,\n input: texts,\n };\n\n // Add dimensions param for text-embedding-3-* models\n if (this.model.startsWith('text-embedding-3-') && this.dimensions) {\n body.dimensions = this.dimensions;\n }\n\n const response = await fetch(`${this.baseUrl}/embeddings`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${this.apiKey}`,\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`OpenAI API error: ${response.status} - ${error}`);\n }\n\n const data = (await response.json()) as {\n data: Array<{ embedding: number[]; index: number }>;\n usage: { prompt_tokens: number; total_tokens: number };\n };\n\n // Sort by index to maintain order\n const sorted = data.data.sort((a, b) => a.index - b.index);\n\n return sorted.map((item, i) => ({\n embedding: item.embedding,\n tokens: Math.ceil(texts[i].length / 4), // Rough estimate\n }));\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available OpenAI embedding models\n */\nexport const OPENAI_MODELS = {\n /** text-embedding-3-large - Highest quality (3072 dims, can reduce) */\n EMBEDDING_3_LARGE: 'text-embedding-3-large',\n /** text-embedding-3-small - Good balance (1536 dims, can reduce) */\n EMBEDDING_3_SMALL: 'text-embedding-3-small',\n /** text-embedding-ada-002 - Legacy model (1536 dims) */\n ADA_002: 'text-embedding-ada-002',\n};\n","/**\n * Embedding Backends\n *\n * Generate vector embeddings from text using local or cloud models.\n */\n\nexport {\n LocalEmbeddingBackend,\n LOCAL_MODELS,\n type LocalEmbeddingOptions,\n} from './local.js';\n\nexport {\n OpenAIEmbeddingBackend,\n OPENAI_MODELS,\n type OpenAIEmbeddingOptions,\n} from './openai.js';\n\nimport type { EmbeddingBackend, EmbeddingOptions } from '../types.js';\nimport { LocalEmbeddingBackend } from './local.js';\nimport { OpenAIEmbeddingBackend } from './openai.js';\n\n/**\n * Create an embedding backend based on options\n */\nexport function createEmbeddingBackend(options: EmbeddingOptions): EmbeddingBackend {\n switch (options.provider) {\n case 'local':\n return new LocalEmbeddingBackend({\n model: options.model,\n batchSize: options.batchSize,\n });\n\n case 'openai':\n if (!options.apiKey) {\n throw new Error('OpenAI embedding requires an API key');\n }\n return new OpenAIEmbeddingBackend({\n apiKey: options.apiKey,\n model: options.model,\n dimensions: options.dimensions,\n batchSize: options.batchSize,\n });\n\n case 'huggingface':\n // For now, use local with HuggingFace models\n return new LocalEmbeddingBackend({\n model: options.model || 'Xenova/all-MiniLM-L6-v2',\n batchSize: options.batchSize,\n });\n\n default:\n throw new Error(`Unknown embedding provider: ${options.provider}`);\n }\n}\n"],"mappings":";AAUA,IAAI,WAAgB;AACpB,IAAI,WAAgB;AAEpB,eAAe,aAAa,OAAe;AACzC,MAAI,CAAC,UAAU;AACb,UAAM,eAAe,MAAM,OAAO,sBAAsB;AACxD,eAAW,aAAa;AAAA,EAC1B;AAEA,MAAI,CAAC,UAAU;AACb,YAAQ,IAAI,oCAAoC,KAAK,KAAK;AAC1D,eAAW,MAAM,SAAS,sBAAsB,OAAO;AAAA,MACrD,WAAW;AAAA;AAAA,IACb,CAAC;AACD,YAAQ,IAAI,6CAA6C;AAAA,EAC3D;AAEA,SAAO;AACT;AAWO,IAAM,wBAAN,MAAM,uBAAkD;AAAA,EACrD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,4BAA4B;AAAA,IAC5B,2BAA2B;AAAA,IAC3B,4BAA4B;AAAA,IAC5B,2BAA2B;AAAA,IAC3B,4BAA4B;AAAA,IAC5B,4BAA4B;AAAA,EAC9B;AAAA,EAEA,YAAY,SAAiC;AAE3C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,YAAY,SAAS,aAAa;AACvC,SAAK,YAAY,SAAS,aAAa;AACvC,SAAK,aAAa,uBAAsB,iBAAiB,KAAK,KAAK,KAAK;AAAA,EAC1E;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAMA,YAAW,MAAM,aAAa,KAAK,KAAK;AAG9C,UAAM,gBAAgB,KAAK,MAAM,SAAS,KAAK,IAC3C,4DAA4D,IAAI,KAChE;AAEJ,UAAM,SAAS,MAAMA,UAAS,eAAe;AAAA,MAC3C,SAAS;AAAA,MACT,WAAW,KAAK;AAAA,IAClB,CAAC;AAGD,UAAM,YAAY,MAAM,KAAK,OAAO,IAAoB;AAExD,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,UAA6B,CAAC;AAGpC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,eAAe,MAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,SAAS,KAAK,MAAM,IAAI,CAAC,CAAC;AAC5E,cAAQ,KAAK,GAAG,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,eAAe;AAAA;AAAA,EAE1B,WAAW;AAAA;AAAA,EAEX,UAAU;AAAA;AAAA,EAEV,WAAW;AAAA;AAAA,EAEX,WAAW;AAAA;AAAA,EAEX,YAAY;AAAA;AAAA,EAEZ,OAAO;AACT;;;ACtGO,IAAM,yBAAN,MAAM,wBAAmD;AAAA,EACtD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,0BAA0B;AAAA,IAC1B,0BAA0B;AAAA,IAC1B,0BAA0B;AAAA,EAC5B;AAAA,EAEA,YAAY,SAAiC;AAC3C,SAAK,SAAS,QAAQ;AACtB,SAAK,QAAQ,QAAQ,SAAS;AAC9B,SAAK,aACH,QAAQ,cAAc,wBAAuB,iBAAiB,KAAK,KAAK,KAAK;AAC/E,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAM,UAAU,MAAM,KAAK,WAAW,CAAC,IAAI,CAAC;AAC5C,WAAO,QAAQ,CAAC;AAAA,EAClB;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,aAAgC,CAAC;AAGvC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,eAAe,MAAM,KAAK,QAAQ,KAAK;AAC7C,iBAAW,KAAK,GAAG,YAAY;AAAA,IACjC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,QAAQ,OAA6C;AACjE,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,IACT;AAGA,QAAI,KAAK,MAAM,WAAW,mBAAmB,KAAK,KAAK,YAAY;AACjE,WAAK,aAAa,KAAK;AAAA,IACzB;AAEA,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,eAAe;AAAA,MACzD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,KAAK,MAAM;AAAA,MACtC;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,MAAM,KAAK,EAAE;AAAA,IACnE;AAEA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAMlC,UAAM,SAAS,KAAK,KAAK,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEzD,WAAO,OAAO,IAAI,CAAC,MAAM,OAAO;AAAA,MAC9B,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK,KAAK,MAAM,CAAC,EAAE,SAAS,CAAC;AAAA;AAAA,IACvC,EAAE;AAAA,EACJ;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,gBAAgB;AAAA;AAAA,EAE3B,mBAAmB;AAAA;AAAA,EAEnB,mBAAmB;AAAA;AAAA,EAEnB,SAAS;AACX;;;AC/FO,SAAS,uBAAuB,SAA6C;AAClF,UAAQ,QAAQ,UAAU;AAAA,IACxB,KAAK;AACH,aAAO,IAAI,sBAAsB;AAAA,QAC/B,OAAO,QAAQ;AAAA,QACf,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH,KAAK;AACH,UAAI,CAAC,QAAQ,QAAQ;AACnB,cAAM,IAAI,MAAM,sCAAsC;AAAA,MACxD;AACA,aAAO,IAAI,uBAAuB;AAAA,QAChC,QAAQ,QAAQ;AAAA,QAChB,OAAO,QAAQ;AAAA,QACf,YAAY,QAAQ;AAAA,QACpB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH,KAAK;AAEH,aAAO,IAAI,sBAAsB;AAAA,QAC/B,OAAO,QAAQ,SAAS;AAAA,QACxB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH;AACE,YAAM,IAAI,MAAM,+BAA+B,QAAQ,QAAQ,EAAE;AAAA,EACrE;AACF;","names":["embedder"]}
|
|
1
|
+
{"version":3,"sources":["../../src/embeddings/local.ts","../../src/embeddings/openai.ts","../../src/embeddings/huggingface.ts","../../src/embeddings/index.ts"],"sourcesContent":["/**\n * Local Embeddings using @xenova/transformers\n *\n * Runs embedding models locally without API calls.\n * Supports BGE, all-MiniLM, and other sentence-transformer models.\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\n// Dynamic import for transformers (it's a heavy module)\nlet pipeline: any = null;\nlet embedder: any = null;\n\nasync function loadPipeline(model: string) {\n if (!pipeline) {\n const transformers = await import('@xenova/transformers');\n pipeline = transformers.pipeline;\n }\n\n if (!embedder) {\n console.log(`[LocalEmbeddings] Loading model: ${model}...`);\n embedder = await pipeline('feature-extraction', model, {\n quantized: true, // Use quantized model for faster inference\n });\n console.log(`[LocalEmbeddings] Model loaded successfully`);\n }\n\n return embedder;\n}\n\nexport interface LocalEmbeddingOptions {\n /** Model name (default: 'Xenova/bge-base-en-v1.5') */\n model?: string;\n /** Batch size for processing (default: 32) */\n batchSize?: number;\n /** Whether to normalize embeddings (default: true) */\n normalize?: boolean;\n}\n\nexport class LocalEmbeddingBackend implements EmbeddingBackend {\n private model: string;\n private batchSize: number;\n private normalize: boolean;\n private dimensions: number;\n\n // Model dimension map\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'Xenova/bge-large-en-v1.5': 1024,\n 'Xenova/bge-base-en-v1.5': 768,\n 'Xenova/bge-small-en-v1.5': 384,\n 'Xenova/all-MiniLM-L6-v2': 384,\n 'Xenova/all-MiniLM-L12-v2': 384,\n 'Xenova/all-mpnet-base-v2': 768,\n };\n\n constructor(options?: LocalEmbeddingOptions) {\n // Use bge-base for balance of quality and speed\n this.model = options?.model || 'Xenova/bge-base-en-v1.5';\n this.batchSize = options?.batchSize || 32;\n this.normalize = options?.normalize ?? true;\n this.dimensions = LocalEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 768;\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const embedder = await loadPipeline(this.model);\n\n // For BGE models, add instruction prefix for better retrieval\n const processedText = this.model.includes('bge')\n ? `Represent this sentence for searching relevant passages: ${text}`\n : text;\n\n const output = await embedder(processedText, {\n pooling: 'mean',\n normalize: this.normalize,\n });\n\n // Convert to regular array\n const embedding = Array.from(output.data as Float32Array);\n\n return {\n embedding,\n tokens: Math.ceil(text.length / 4), // Rough estimate\n };\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const results: EmbeddingResult[] = [];\n\n // Process in batches\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const batchResults = await Promise.all(batch.map((text) => this.embed(text)));\n results.push(...batchResults);\n }\n\n return results;\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available local models\n */\nexport const LOCAL_MODELS = {\n /** BGE Large - Best quality, slower (1024 dims) */\n BGE_LARGE: 'Xenova/bge-large-en-v1.5',\n /** BGE Base - Good balance (768 dims) */\n BGE_BASE: 'Xenova/bge-base-en-v1.5',\n /** BGE Small - Fastest (384 dims) */\n BGE_SMALL: 'Xenova/bge-small-en-v1.5',\n /** MiniLM L6 - Very fast (384 dims) */\n MINILM_L6: 'Xenova/all-MiniLM-L6-v2',\n /** MiniLM L12 - Good quality (384 dims) */\n MINILM_L12: 'Xenova/all-MiniLM-L12-v2',\n /** MPNet - High quality (768 dims) */\n MPNET: 'Xenova/all-mpnet-base-v2',\n};\n","/**\n * OpenAI Embeddings\n *\n * Uses OpenAI's embedding API for high-quality embeddings.\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\nexport interface OpenAIEmbeddingOptions {\n /** OpenAI API key */\n apiKey: string;\n /** Model name (default: 'text-embedding-3-small') */\n model?: string;\n /** Embedding dimensions (for models that support it) */\n dimensions?: number;\n /** Batch size for processing (default: 100) */\n batchSize?: number;\n /** Base URL for API (for compatible APIs) */\n baseUrl?: string;\n}\n\nexport class OpenAIEmbeddingBackend implements EmbeddingBackend {\n private apiKey: string;\n private model: string;\n private dimensions: number;\n private batchSize: number;\n private baseUrl: string;\n\n // Model dimension defaults\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'text-embedding-3-large': 3072,\n 'text-embedding-3-small': 1536,\n 'text-embedding-ada-002': 1536,\n };\n\n constructor(options: OpenAIEmbeddingOptions) {\n this.apiKey = options.apiKey;\n this.model = options.model || 'text-embedding-3-small';\n this.dimensions =\n options.dimensions || OpenAIEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 1536;\n this.batchSize = options.batchSize || 100;\n this.baseUrl = options.baseUrl || 'https://api.openai.com/v1';\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const results = await this.embedBatch([text]);\n return results[0];\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const allResults: EmbeddingResult[] = [];\n\n // Process in batches (OpenAI has limits)\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const batchResults = await this.callAPI(batch);\n allResults.push(...batchResults);\n }\n\n return allResults;\n }\n\n private async callAPI(texts: string[]): Promise<EmbeddingResult[]> {\n const body: Record<string, unknown> = {\n model: this.model,\n input: texts,\n };\n\n // Add dimensions param for text-embedding-3-* models\n if (this.model.startsWith('text-embedding-3-') && this.dimensions) {\n body.dimensions = this.dimensions;\n }\n\n const response = await fetch(`${this.baseUrl}/embeddings`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${this.apiKey}`,\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`OpenAI API error: ${response.status} - ${error}`);\n }\n\n const data = (await response.json()) as {\n data: Array<{ embedding: number[]; index: number }>;\n usage: { prompt_tokens: number; total_tokens: number };\n };\n\n // Sort by index to maintain order\n const sorted = data.data.sort((a, b) => a.index - b.index);\n\n return sorted.map((item, i) => ({\n embedding: item.embedding,\n tokens: Math.ceil(texts[i].length / 4), // Rough estimate\n }));\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available OpenAI embedding models\n */\nexport const OPENAI_MODELS = {\n /** text-embedding-3-large - Highest quality (3072 dims, can reduce) */\n EMBEDDING_3_LARGE: 'text-embedding-3-large',\n /** text-embedding-3-small - Good balance (1536 dims, can reduce) */\n EMBEDDING_3_SMALL: 'text-embedding-3-small',\n /** text-embedding-ada-002 - Legacy model (1536 dims) */\n ADA_002: 'text-embedding-ada-002',\n};\n","/**\n * HuggingFace Inference API Embeddings\n *\n * Uses HuggingFace's free Inference API for embeddings.\n * - Free tier: 1,000 requests/day\n * - GPU-powered (faster than local)\n * - No model download needed\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\nexport interface HuggingFaceEmbeddingOptions {\n /** HuggingFace API token (get free at hf.co/settings/tokens) */\n apiToken: string;\n /** Model name (default: BAAI/bge-base-en-v1.5) */\n model?: string;\n /** Batch size for processing (default: 32) */\n batchSize?: number;\n /** Request timeout in ms (default: 30000) */\n timeout?: number;\n /** Number of retries (default: 3) */\n retries?: number;\n}\n\nexport class HuggingFaceEmbeddingBackend implements EmbeddingBackend {\n private apiToken: string;\n private model: string;\n private batchSize: number;\n private timeout: number;\n private retries: number;\n private apiUrl: string;\n private dimensions: number;\n\n // Model dimension map\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'BAAI/bge-large-en-v1.5': 1024,\n 'BAAI/bge-base-en-v1.5': 768,\n 'BAAI/bge-small-en-v1.5': 384,\n 'sentence-transformers/all-MiniLM-L6-v2': 384,\n 'sentence-transformers/all-mpnet-base-v2': 768,\n };\n\n constructor(options: HuggingFaceEmbeddingOptions) {\n this.apiToken = options.apiToken;\n this.model = options.model || 'BAAI/bge-base-en-v1.5';\n this.batchSize = options.batchSize || 32;\n this.timeout = options.timeout || 30000;\n this.retries = options.retries || 3;\n\n this.apiUrl = `https://router.huggingface.co/hf-inference/models/${this.model}`;\n this.dimensions = HuggingFaceEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 768;\n }\n\n private async queryAPI(\n texts: string[],\n attempt: number = 0\n ): Promise<number[][] | null> {\n try {\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.timeout);\n\n const response = await fetch(this.apiUrl, {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiToken}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n inputs: texts,\n options: {\n wait_for_model: true,\n },\n }),\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (response.ok) {\n const data = (await response.json()) as number[][];\n return data;\n }\n\n // Handle specific errors\n if (response.status === 503) {\n // Model loading - retry with backoff\n if (attempt < this.retries) {\n const delay = Math.pow(2, attempt) * 1000; // Exponential backoff\n console.log(\n `[HuggingFace] Model loading, retrying in ${delay}ms (attempt ${attempt + 1}/${this.retries})`\n );\n await new Promise((resolve) => setTimeout(resolve, delay));\n return this.queryAPI(texts, attempt + 1);\n }\n } else if (response.status === 429) {\n console.warn('[HuggingFace] Rate limit exceeded');\n } else {\n const errorText = await response.text();\n console.warn(`[HuggingFace] API error ${response.status}: ${errorText}`);\n }\n\n return null;\n } catch (error: any) {\n if (error.name === 'AbortError') {\n console.warn(`[HuggingFace] Request timeout after ${this.timeout}ms`);\n } else {\n console.warn(`[HuggingFace] Request failed:`, error.message);\n }\n\n // Retry on network errors\n if (attempt < this.retries) {\n const delay = Math.pow(2, attempt) * 1000;\n console.log(`[HuggingFace] Retrying in ${delay}ms (attempt ${attempt + 1}/${this.retries})`);\n await new Promise((resolve) => setTimeout(resolve, delay));\n return this.queryAPI(texts, attempt + 1);\n }\n\n return null;\n }\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const result = await this.queryAPI([text]);\n\n if (result && result.length > 0) {\n return {\n embedding: result[0],\n tokens: Math.ceil(text.length / 4), // Rough estimate\n };\n }\n\n throw new Error('HuggingFace Inference API failed after retries');\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const results: EmbeddingResult[] = [];\n\n // Process in batches\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const embeddings = await this.queryAPI(batch);\n\n if (!embeddings || embeddings.length !== batch.length) {\n throw new Error(`HuggingFace API failed for batch ${i / this.batchSize + 1}`);\n }\n\n results.push(\n ...embeddings.map((embedding, idx) => ({\n embedding,\n tokens: Math.ceil(batch[idx].length / 4),\n }))\n );\n }\n\n return results;\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available HuggingFace models\n */\nexport const HUGGINGFACE_MODELS = {\n /** BGE Large - Best quality (1024 dims) */\n BGE_LARGE: 'BAAI/bge-large-en-v1.5',\n /** BGE Base - Good balance (768 dims) */\n BGE_BASE: 'BAAI/bge-base-en-v1.5',\n /** BGE Small - Fastest (384 dims) */\n BGE_SMALL: 'BAAI/bge-small-en-v1.5',\n /** MiniLM L6 - Very fast (384 dims) */\n MINILM_L6: 'sentence-transformers/all-MiniLM-L6-v2',\n /** MPNet - High quality (768 dims) */\n MPNET: 'sentence-transformers/all-mpnet-base-v2',\n};\n","/**\n * Embedding Backends\n *\n * Generate vector embeddings from text using local or cloud models.\n */\n\nexport {\n LocalEmbeddingBackend,\n LOCAL_MODELS,\n type LocalEmbeddingOptions,\n} from './local.js';\n\nexport {\n OpenAIEmbeddingBackend,\n OPENAI_MODELS,\n type OpenAIEmbeddingOptions,\n} from './openai.js';\n\nexport {\n HuggingFaceEmbeddingBackend,\n HUGGINGFACE_MODELS,\n type HuggingFaceEmbeddingOptions,\n} from './huggingface.js';\n\nimport type { EmbeddingBackend, EmbeddingOptions } from '../types.js';\nimport { LocalEmbeddingBackend, LOCAL_MODELS } from './local.js';\nimport { OpenAIEmbeddingBackend } from './openai.js';\nimport { HuggingFaceEmbeddingBackend, HUGGINGFACE_MODELS } from './huggingface.js';\n\n// Model name aliases for convenience\nconst MODEL_ALIASES: Record<string, string> = {\n // Local models (Xenova)\n 'bge-large': LOCAL_MODELS.BGE_LARGE,\n 'bge-base': LOCAL_MODELS.BGE_BASE,\n 'bge-small': LOCAL_MODELS.BGE_SMALL,\n 'minilm': LOCAL_MODELS.MINILM_L6,\n 'mpnet': LOCAL_MODELS.MPNET,\n // HuggingFace models (BAAI)\n 'bge-large-hf': HUGGINGFACE_MODELS.BGE_LARGE,\n 'bge-base-hf': HUGGINGFACE_MODELS.BGE_BASE,\n 'bge-small-hf': HUGGINGFACE_MODELS.BGE_SMALL,\n 'minilm-hf': HUGGINGFACE_MODELS.MINILM_L6,\n 'mpnet-hf': HUGGINGFACE_MODELS.MPNET,\n};\n\nfunction resolveModelName(model?: string): string | undefined {\n if (!model) return undefined;\n return MODEL_ALIASES[model.toLowerCase()] || model;\n}\n\n/**\n * Create an embedding backend based on options\n */\nexport function createEmbeddingBackend(options: EmbeddingOptions): EmbeddingBackend {\n switch (options.provider) {\n case 'local':\n return new LocalEmbeddingBackend({\n model: resolveModelName(options.model),\n batchSize: options.batchSize,\n });\n\n case 'openai':\n if (!options.apiKey) {\n throw new Error('OpenAI embedding requires an API key');\n }\n return new OpenAIEmbeddingBackend({\n apiKey: options.apiKey,\n model: options.model,\n dimensions: options.dimensions,\n batchSize: options.batchSize,\n });\n\n case 'huggingface':\n if (!options.apiKey) {\n throw new Error('HuggingFace Inference API requires an API token (get free at hf.co/settings/tokens)');\n }\n return new HuggingFaceEmbeddingBackend({\n apiToken: options.apiKey,\n model: resolveModelName(options.model) || HUGGINGFACE_MODELS.BGE_BASE,\n batchSize: options.batchSize,\n });\n\n default:\n throw new Error(`Unknown embedding provider: ${options.provider}`);\n }\n}\n"],"mappings":";AAUA,IAAI,WAAgB;AACpB,IAAI,WAAgB;AAEpB,eAAe,aAAa,OAAe;AACzC,MAAI,CAAC,UAAU;AACb,UAAM,eAAe,MAAM,OAAO,sBAAsB;AACxD,eAAW,aAAa;AAAA,EAC1B;AAEA,MAAI,CAAC,UAAU;AACb,YAAQ,IAAI,oCAAoC,KAAK,KAAK;AAC1D,eAAW,MAAM,SAAS,sBAAsB,OAAO;AAAA,MACrD,WAAW;AAAA;AAAA,IACb,CAAC;AACD,YAAQ,IAAI,6CAA6C;AAAA,EAC3D;AAEA,SAAO;AACT;AAWO,IAAM,wBAAN,MAAM,uBAAkD;AAAA,EACrD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,4BAA4B;AAAA,IAC5B,2BAA2B;AAAA,IAC3B,4BAA4B;AAAA,IAC5B,2BAA2B;AAAA,IAC3B,4BAA4B;AAAA,IAC5B,4BAA4B;AAAA,EAC9B;AAAA,EAEA,YAAY,SAAiC;AAE3C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,YAAY,SAAS,aAAa;AACvC,SAAK,YAAY,SAAS,aAAa;AACvC,SAAK,aAAa,uBAAsB,iBAAiB,KAAK,KAAK,KAAK;AAAA,EAC1E;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAMA,YAAW,MAAM,aAAa,KAAK,KAAK;AAG9C,UAAM,gBAAgB,KAAK,MAAM,SAAS,KAAK,IAC3C,4DAA4D,IAAI,KAChE;AAEJ,UAAM,SAAS,MAAMA,UAAS,eAAe;AAAA,MAC3C,SAAS;AAAA,MACT,WAAW,KAAK;AAAA,IAClB,CAAC;AAGD,UAAM,YAAY,MAAM,KAAK,OAAO,IAAoB;AAExD,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,UAA6B,CAAC;AAGpC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,eAAe,MAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,SAAS,KAAK,MAAM,IAAI,CAAC,CAAC;AAC5E,cAAQ,KAAK,GAAG,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,eAAe;AAAA;AAAA,EAE1B,WAAW;AAAA;AAAA,EAEX,UAAU;AAAA;AAAA,EAEV,WAAW;AAAA;AAAA,EAEX,WAAW;AAAA;AAAA,EAEX,YAAY;AAAA;AAAA,EAEZ,OAAO;AACT;;;ACtGO,IAAM,yBAAN,MAAM,wBAAmD;AAAA,EACtD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,0BAA0B;AAAA,IAC1B,0BAA0B;AAAA,IAC1B,0BAA0B;AAAA,EAC5B;AAAA,EAEA,YAAY,SAAiC;AAC3C,SAAK,SAAS,QAAQ;AACtB,SAAK,QAAQ,QAAQ,SAAS;AAC9B,SAAK,aACH,QAAQ,cAAc,wBAAuB,iBAAiB,KAAK,KAAK,KAAK;AAC/E,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAM,UAAU,MAAM,KAAK,WAAW,CAAC,IAAI,CAAC;AAC5C,WAAO,QAAQ,CAAC;AAAA,EAClB;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,aAAgC,CAAC;AAGvC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,eAAe,MAAM,KAAK,QAAQ,KAAK;AAC7C,iBAAW,KAAK,GAAG,YAAY;AAAA,IACjC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,QAAQ,OAA6C;AACjE,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,IACT;AAGA,QAAI,KAAK,MAAM,WAAW,mBAAmB,KAAK,KAAK,YAAY;AACjE,WAAK,aAAa,KAAK;AAAA,IACzB;AAEA,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,eAAe;AAAA,MACzD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,KAAK,MAAM;AAAA,MACtC;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,MAAM,KAAK,EAAE;AAAA,IACnE;AAEA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAMlC,UAAM,SAAS,KAAK,KAAK,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEzD,WAAO,OAAO,IAAI,CAAC,MAAM,OAAO;AAAA,MAC9B,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK,KAAK,MAAM,CAAC,EAAE,SAAS,CAAC;AAAA;AAAA,IACvC,EAAE;AAAA,EACJ;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,gBAAgB;AAAA;AAAA,EAE3B,mBAAmB;AAAA;AAAA,EAEnB,mBAAmB;AAAA;AAAA,EAEnB,SAAS;AACX;;;AChGO,IAAM,8BAAN,MAAM,6BAAwD;AAAA,EAC3D;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,0BAA0B;AAAA,IAC1B,yBAAyB;AAAA,IACzB,0BAA0B;AAAA,IAC1B,0CAA0C;AAAA,IAC1C,2CAA2C;AAAA,EAC7C;AAAA,EAEA,YAAY,SAAsC;AAChD,SAAK,WAAW,QAAQ;AACxB,SAAK,QAAQ,QAAQ,SAAS;AAC9B,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,UAAU,QAAQ,WAAW;AAClC,SAAK,UAAU,QAAQ,WAAW;AAElC,SAAK,SAAS,qDAAqD,KAAK,KAAK;AAC7E,SAAK,aAAa,6BAA4B,iBAAiB,KAAK,KAAK,KAAK;AAAA,EAChF;AAAA,EAEA,MAAc,SACZ,OACA,UAAkB,GACU;AAC5B,QAAI;AACF,YAAM,aAAa,IAAI,gBAAgB;AACvC,YAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,OAAO;AAEnE,YAAM,WAAW,MAAM,MAAM,KAAK,QAAQ;AAAA,QACxC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,iBAAiB,UAAU,KAAK,QAAQ;AAAA,UACxC,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,UAClB;AAAA,QACF,CAAC;AAAA,QACD,QAAQ,WAAW;AAAA,MACrB,CAAC;AAED,mBAAa,SAAS;AAEtB,UAAI,SAAS,IAAI;AACf,cAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,eAAO;AAAA,MACT;AAGA,UAAI,SAAS,WAAW,KAAK;AAE3B,YAAI,UAAU,KAAK,SAAS;AAC1B,gBAAM,QAAQ,KAAK,IAAI,GAAG,OAAO,IAAI;AACrC,kBAAQ;AAAA,YACN,4CAA4C,KAAK,eAAe,UAAU,CAAC,IAAI,KAAK,OAAO;AAAA,UAC7F;AACA,gBAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,KAAK,CAAC;AACzD,iBAAO,KAAK,SAAS,OAAO,UAAU,CAAC;AAAA,QACzC;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,gBAAQ,KAAK,mCAAmC;AAAA,MAClD,OAAO;AACL,cAAM,YAAY,MAAM,SAAS,KAAK;AACtC,gBAAQ,KAAK,2BAA2B,SAAS,MAAM,KAAK,SAAS,EAAE;AAAA,MACzE;AAEA,aAAO;AAAA,IACT,SAAS,OAAY;AACnB,UAAI,MAAM,SAAS,cAAc;AAC/B,gBAAQ,KAAK,uCAAuC,KAAK,OAAO,IAAI;AAAA,MACtE,OAAO;AACL,gBAAQ,KAAK,iCAAiC,MAAM,OAAO;AAAA,MAC7D;AAGA,UAAI,UAAU,KAAK,SAAS;AAC1B,cAAM,QAAQ,KAAK,IAAI,GAAG,OAAO,IAAI;AACrC,gBAAQ,IAAI,6BAA6B,KAAK,eAAe,UAAU,CAAC,IAAI,KAAK,OAAO,GAAG;AAC3F,cAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,KAAK,CAAC;AACzD,eAAO,KAAK,SAAS,OAAO,UAAU,CAAC;AAAA,MACzC;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAM,SAAS,MAAM,KAAK,SAAS,CAAC,IAAI,CAAC;AAEzC,QAAI,UAAU,OAAO,SAAS,GAAG;AAC/B,aAAO;AAAA,QACL,WAAW,OAAO,CAAC;AAAA,QACnB,QAAQ,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA;AAAA,MACnC;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,UAA6B,CAAC;AAGpC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,aAAa,MAAM,KAAK,SAAS,KAAK;AAE5C,UAAI,CAAC,cAAc,WAAW,WAAW,MAAM,QAAQ;AACrD,cAAM,IAAI,MAAM,oCAAoC,IAAI,KAAK,YAAY,CAAC,EAAE;AAAA,MAC9E;AAEA,cAAQ;AAAA,QACN,GAAG,WAAW,IAAI,CAAC,WAAW,SAAS;AAAA,UACrC;AAAA,UACA,QAAQ,KAAK,KAAK,MAAM,GAAG,EAAE,SAAS,CAAC;AAAA,QACzC,EAAE;AAAA,MACJ;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,qBAAqB;AAAA;AAAA,EAEhC,WAAW;AAAA;AAAA,EAEX,UAAU;AAAA;AAAA,EAEV,WAAW;AAAA;AAAA,EAEX,WAAW;AAAA;AAAA,EAEX,OAAO;AACT;;;ACtJA,IAAM,gBAAwC;AAAA;AAAA,EAE5C,aAAa,aAAa;AAAA,EAC1B,YAAY,aAAa;AAAA,EACzB,aAAa,aAAa;AAAA,EAC1B,UAAU,aAAa;AAAA,EACvB,SAAS,aAAa;AAAA;AAAA,EAEtB,gBAAgB,mBAAmB;AAAA,EACnC,eAAe,mBAAmB;AAAA,EAClC,gBAAgB,mBAAmB;AAAA,EACnC,aAAa,mBAAmB;AAAA,EAChC,YAAY,mBAAmB;AACjC;AAEA,SAAS,iBAAiB,OAAoC;AAC5D,MAAI,CAAC,MAAO,QAAO;AACnB,SAAO,cAAc,MAAM,YAAY,CAAC,KAAK;AAC/C;AAKO,SAAS,uBAAuB,SAA6C;AAClF,UAAQ,QAAQ,UAAU;AAAA,IACxB,KAAK;AACH,aAAO,IAAI,sBAAsB;AAAA,QAC/B,OAAO,iBAAiB,QAAQ,KAAK;AAAA,QACrC,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH,KAAK;AACH,UAAI,CAAC,QAAQ,QAAQ;AACnB,cAAM,IAAI,MAAM,sCAAsC;AAAA,MACxD;AACA,aAAO,IAAI,uBAAuB;AAAA,QAChC,QAAQ,QAAQ;AAAA,QAChB,OAAO,QAAQ;AAAA,QACf,YAAY,QAAQ;AAAA,QACpB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH,KAAK;AACH,UAAI,CAAC,QAAQ,QAAQ;AACnB,cAAM,IAAI,MAAM,qFAAqF;AAAA,MACvG;AACA,aAAO,IAAI,4BAA4B;AAAA,QACrC,UAAU,QAAQ;AAAA,QAClB,OAAO,iBAAiB,QAAQ,KAAK,KAAK,mBAAmB;AAAA,QAC7D,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH;AACE,YAAM,IAAI,MAAM,+BAA+B,QAAQ,QAAQ,EAAE;AAAA,EACrE;AACF;","names":["embedder"]}
|
|
File without changes
|
package/dist/extractors/index.js
CHANGED
|
File without changes
|
|
File without changes
|
package/dist/index.d.ts
CHANGED
|
File without changes
|
package/dist/index.js
CHANGED
|
@@ -743,12 +743,158 @@ var OPENAI_MODELS = {
|
|
|
743
743
|
ADA_002: "text-embedding-ada-002"
|
|
744
744
|
};
|
|
745
745
|
|
|
746
|
+
// src/embeddings/huggingface.ts
|
|
747
|
+
var HuggingFaceEmbeddingBackend = class _HuggingFaceEmbeddingBackend {
|
|
748
|
+
apiToken;
|
|
749
|
+
model;
|
|
750
|
+
batchSize;
|
|
751
|
+
timeout;
|
|
752
|
+
retries;
|
|
753
|
+
apiUrl;
|
|
754
|
+
dimensions;
|
|
755
|
+
// Model dimension map
|
|
756
|
+
static MODEL_DIMENSIONS = {
|
|
757
|
+
"BAAI/bge-large-en-v1.5": 1024,
|
|
758
|
+
"BAAI/bge-base-en-v1.5": 768,
|
|
759
|
+
"BAAI/bge-small-en-v1.5": 384,
|
|
760
|
+
"sentence-transformers/all-MiniLM-L6-v2": 384,
|
|
761
|
+
"sentence-transformers/all-mpnet-base-v2": 768
|
|
762
|
+
};
|
|
763
|
+
constructor(options) {
|
|
764
|
+
this.apiToken = options.apiToken;
|
|
765
|
+
this.model = options.model || "BAAI/bge-base-en-v1.5";
|
|
766
|
+
this.batchSize = options.batchSize || 32;
|
|
767
|
+
this.timeout = options.timeout || 3e4;
|
|
768
|
+
this.retries = options.retries || 3;
|
|
769
|
+
this.apiUrl = `https://router.huggingface.co/hf-inference/models/${this.model}`;
|
|
770
|
+
this.dimensions = _HuggingFaceEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 768;
|
|
771
|
+
}
|
|
772
|
+
async queryAPI(texts, attempt = 0) {
|
|
773
|
+
try {
|
|
774
|
+
const controller = new AbortController();
|
|
775
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
776
|
+
const response = await fetch(this.apiUrl, {
|
|
777
|
+
method: "POST",
|
|
778
|
+
headers: {
|
|
779
|
+
"Authorization": `Bearer ${this.apiToken}`,
|
|
780
|
+
"Content-Type": "application/json"
|
|
781
|
+
},
|
|
782
|
+
body: JSON.stringify({
|
|
783
|
+
inputs: texts,
|
|
784
|
+
options: {
|
|
785
|
+
wait_for_model: true
|
|
786
|
+
}
|
|
787
|
+
}),
|
|
788
|
+
signal: controller.signal
|
|
789
|
+
});
|
|
790
|
+
clearTimeout(timeoutId);
|
|
791
|
+
if (response.ok) {
|
|
792
|
+
const data = await response.json();
|
|
793
|
+
return data;
|
|
794
|
+
}
|
|
795
|
+
if (response.status === 503) {
|
|
796
|
+
if (attempt < this.retries) {
|
|
797
|
+
const delay = Math.pow(2, attempt) * 1e3;
|
|
798
|
+
console.log(
|
|
799
|
+
`[HuggingFace] Model loading, retrying in ${delay}ms (attempt ${attempt + 1}/${this.retries})`
|
|
800
|
+
);
|
|
801
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
802
|
+
return this.queryAPI(texts, attempt + 1);
|
|
803
|
+
}
|
|
804
|
+
} else if (response.status === 429) {
|
|
805
|
+
console.warn("[HuggingFace] Rate limit exceeded");
|
|
806
|
+
} else {
|
|
807
|
+
const errorText = await response.text();
|
|
808
|
+
console.warn(`[HuggingFace] API error ${response.status}: ${errorText}`);
|
|
809
|
+
}
|
|
810
|
+
return null;
|
|
811
|
+
} catch (error) {
|
|
812
|
+
if (error.name === "AbortError") {
|
|
813
|
+
console.warn(`[HuggingFace] Request timeout after ${this.timeout}ms`);
|
|
814
|
+
} else {
|
|
815
|
+
console.warn(`[HuggingFace] Request failed:`, error.message);
|
|
816
|
+
}
|
|
817
|
+
if (attempt < this.retries) {
|
|
818
|
+
const delay = Math.pow(2, attempt) * 1e3;
|
|
819
|
+
console.log(`[HuggingFace] Retrying in ${delay}ms (attempt ${attempt + 1}/${this.retries})`);
|
|
820
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
821
|
+
return this.queryAPI(texts, attempt + 1);
|
|
822
|
+
}
|
|
823
|
+
return null;
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
async embed(text) {
|
|
827
|
+
const result = await this.queryAPI([text]);
|
|
828
|
+
if (result && result.length > 0) {
|
|
829
|
+
return {
|
|
830
|
+
embedding: result[0],
|
|
831
|
+
tokens: Math.ceil(text.length / 4)
|
|
832
|
+
// Rough estimate
|
|
833
|
+
};
|
|
834
|
+
}
|
|
835
|
+
throw new Error("HuggingFace Inference API failed after retries");
|
|
836
|
+
}
|
|
837
|
+
async embedBatch(texts) {
|
|
838
|
+
const results = [];
|
|
839
|
+
for (let i = 0; i < texts.length; i += this.batchSize) {
|
|
840
|
+
const batch = texts.slice(i, i + this.batchSize);
|
|
841
|
+
const embeddings = await this.queryAPI(batch);
|
|
842
|
+
if (!embeddings || embeddings.length !== batch.length) {
|
|
843
|
+
throw new Error(`HuggingFace API failed for batch ${i / this.batchSize + 1}`);
|
|
844
|
+
}
|
|
845
|
+
results.push(
|
|
846
|
+
...embeddings.map((embedding, idx) => ({
|
|
847
|
+
embedding,
|
|
848
|
+
tokens: Math.ceil(batch[idx].length / 4)
|
|
849
|
+
}))
|
|
850
|
+
);
|
|
851
|
+
}
|
|
852
|
+
return results;
|
|
853
|
+
}
|
|
854
|
+
getDimensions() {
|
|
855
|
+
return this.dimensions;
|
|
856
|
+
}
|
|
857
|
+
getModel() {
|
|
858
|
+
return this.model;
|
|
859
|
+
}
|
|
860
|
+
};
|
|
861
|
+
var HUGGINGFACE_MODELS = {
|
|
862
|
+
/** BGE Large - Best quality (1024 dims) */
|
|
863
|
+
BGE_LARGE: "BAAI/bge-large-en-v1.5",
|
|
864
|
+
/** BGE Base - Good balance (768 dims) */
|
|
865
|
+
BGE_BASE: "BAAI/bge-base-en-v1.5",
|
|
866
|
+
/** BGE Small - Fastest (384 dims) */
|
|
867
|
+
BGE_SMALL: "BAAI/bge-small-en-v1.5",
|
|
868
|
+
/** MiniLM L6 - Very fast (384 dims) */
|
|
869
|
+
MINILM_L6: "sentence-transformers/all-MiniLM-L6-v2",
|
|
870
|
+
/** MPNet - High quality (768 dims) */
|
|
871
|
+
MPNET: "sentence-transformers/all-mpnet-base-v2"
|
|
872
|
+
};
|
|
873
|
+
|
|
746
874
|
// src/embeddings/index.ts
|
|
875
|
+
var MODEL_ALIASES = {
|
|
876
|
+
// Local models (Xenova)
|
|
877
|
+
"bge-large": LOCAL_MODELS.BGE_LARGE,
|
|
878
|
+
"bge-base": LOCAL_MODELS.BGE_BASE,
|
|
879
|
+
"bge-small": LOCAL_MODELS.BGE_SMALL,
|
|
880
|
+
"minilm": LOCAL_MODELS.MINILM_L6,
|
|
881
|
+
"mpnet": LOCAL_MODELS.MPNET,
|
|
882
|
+
// HuggingFace models (BAAI)
|
|
883
|
+
"bge-large-hf": HUGGINGFACE_MODELS.BGE_LARGE,
|
|
884
|
+
"bge-base-hf": HUGGINGFACE_MODELS.BGE_BASE,
|
|
885
|
+
"bge-small-hf": HUGGINGFACE_MODELS.BGE_SMALL,
|
|
886
|
+
"minilm-hf": HUGGINGFACE_MODELS.MINILM_L6,
|
|
887
|
+
"mpnet-hf": HUGGINGFACE_MODELS.MPNET
|
|
888
|
+
};
|
|
889
|
+
function resolveModelName(model) {
|
|
890
|
+
if (!model) return void 0;
|
|
891
|
+
return MODEL_ALIASES[model.toLowerCase()] || model;
|
|
892
|
+
}
|
|
747
893
|
function createEmbeddingBackend(options) {
|
|
748
894
|
switch (options.provider) {
|
|
749
895
|
case "local":
|
|
750
896
|
return new LocalEmbeddingBackend({
|
|
751
|
-
model: options.model,
|
|
897
|
+
model: resolveModelName(options.model),
|
|
752
898
|
batchSize: options.batchSize
|
|
753
899
|
});
|
|
754
900
|
case "openai":
|
|
@@ -762,8 +908,12 @@ function createEmbeddingBackend(options) {
|
|
|
762
908
|
batchSize: options.batchSize
|
|
763
909
|
});
|
|
764
910
|
case "huggingface":
|
|
765
|
-
|
|
766
|
-
|
|
911
|
+
if (!options.apiKey) {
|
|
912
|
+
throw new Error("HuggingFace Inference API requires an API token (get free at hf.co/settings/tokens)");
|
|
913
|
+
}
|
|
914
|
+
return new HuggingFaceEmbeddingBackend({
|
|
915
|
+
apiToken: options.apiKey,
|
|
916
|
+
model: resolveModelName(options.model) || HUGGINGFACE_MODELS.BGE_BASE,
|
|
767
917
|
batchSize: options.batchSize
|
|
768
918
|
});
|
|
769
919
|
default:
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/rag-service.ts","../src/extractors/base.ts","../src/extractors/chm.ts","../src/extractors/html.ts","../src/extractors/markdown.ts","../src/extractors/index.ts","../src/chunking/header-aware-chunker.ts","../src/embeddings/local.ts","../src/embeddings/openai.ts","../src/embeddings/index.ts","../src/vectorstore/sqlite.ts","../src/vectorstore/postgres.ts","../src/vectorstore/index.ts","../src/retrieval/hybrid.ts"],"sourcesContent":["/**\n * RAG Service\n *\n * Main service that orchestrates document indexing and question answering.\n */\n\nimport { generateId } from '@chatbot-packages/utils';\nimport { OpenAIProvider, AnthropicProvider, type BaseProvider } from '@chatbot-packages/ai';\nimport { extractDocuments } from './extractors/index.js';\nimport { HeaderAwareChunker } from './chunking/index.js';\nimport { createEmbeddingBackend } from './embeddings/index.js';\nimport { createVectorStore } from './vectorstore/index.js';\nimport { HybridRetriever } from './retrieval/index.js';\nimport type {\n RAGOptions,\n RAGResponse,\n RAGService,\n RetrievalOptions,\n Citation,\n DocumentChunk,\n EmbeddingBackend,\n VectorStore,\n} from './types.js';\n\nexport class RAG implements RAGService {\n private vectorStore: VectorStore;\n private embeddings: EmbeddingBackend;\n private retriever: HybridRetriever;\n private llm: BaseProvider;\n private chunker: HeaderAwareChunker;\n private systemPrompt: string;\n private initialized = false;\n\n constructor(private options: RAGOptions) {\n // Initialize embedding backend\n this.embeddings = createEmbeddingBackend(options.embeddings);\n\n // Initialize vector store\n this.vectorStore = createVectorStore({\n ...options.vectorStore,\n dimensions: this.embeddings.getDimensions(),\n });\n\n // Initialize retriever\n this.retriever = new HybridRetriever({\n vectorStore: this.vectorStore,\n embeddings: this.embeddings,\n defaultTopK: options.retrieval?.topK || 8,\n denseWeight: options.retrieval?.denseWeight,\n sparseWeight: options.retrieval?.sparseWeight,\n rrfK: options.retrieval?.rrfK,\n });\n\n // Initialize chunker\n this.chunker = new HeaderAwareChunker(options.chunking);\n\n // Initialize LLM\n this.llm = this.createLLM(options.llm);\n\n // Set system prompt\n this.systemPrompt =\n options.systemPrompt ||\n `You are a helpful documentation assistant. Answer questions based on the provided context.\nAlways cite your sources using [1], [2], etc. format when referencing specific information.\nIf the context doesn't contain enough information to answer, say so clearly.\nBe concise and accurate.`;\n }\n\n private createLLM(config: RAGOptions['llm']): BaseProvider {\n switch (config.provider) {\n case 'openai':\n case 'cerebras':\n case 'groq':\n return new OpenAIProvider({\n apiKey: config.apiKey || process.env.OPENAI_API_KEY || '',\n baseUrl: config.baseUrl,\n model: config.model || 'gpt-4o-mini',\n });\n\n case 'anthropic':\n return new AnthropicProvider({\n apiKey: config.apiKey || process.env.ANTHROPIC_API_KEY || '',\n model: config.model || 'claude-sonnet-4-20250514',\n });\n\n default:\n throw new Error(`Unknown LLM provider: ${config.provider}`);\n }\n }\n\n /**\n * Initialize the RAG service\n */\n async initialize(): Promise<void> {\n if (this.initialized) return;\n await this.vectorStore.initialize();\n this.initialized = true;\n }\n\n /**\n * Index documents from a path\n */\n async index(\n path: string,\n options?: { sourceId?: string }\n ): Promise<{ documentsIndexed: number; chunksCreated: number }> {\n await this.initialize();\n\n const sourceId = options?.sourceId || generateId('source');\n console.log(`[RAG] Indexing documents from: ${path}`);\n\n // Extract documents\n const documents = await extractDocuments(path);\n console.log(`[RAG] Extracted ${documents.length} documents`);\n\n let totalChunks = 0;\n\n for (const doc of documents) {\n const documentId = generateId('doc');\n\n // Chunk the document\n const { chunks, stats } = this.chunker.chunk(doc, documentId);\n console.log(\n `[RAG] Chunked \"${doc.title}\": ${stats.totalChunks} chunks (avg: ${stats.avgChunkSize} chars)`\n );\n\n if (chunks.length === 0) continue;\n\n // Generate embeddings\n console.log(`[RAG] Generating embeddings for ${chunks.length} chunks...`);\n const texts = chunks.map((c) => c.text);\n const embeddingResults = await this.embeddings.embedBatch(texts);\n\n // Add embeddings to chunks\n for (let i = 0; i < chunks.length; i++) {\n chunks[i].embedding = embeddingResults[i].embedding;\n }\n\n // Insert into vector store\n await this.vectorStore.insert(chunks);\n totalChunks += chunks.length;\n }\n\n console.log(`[RAG] Indexing complete: ${documents.length} docs, ${totalChunks} chunks`);\n\n return {\n documentsIndexed: documents.length,\n chunksCreated: totalChunks,\n };\n }\n\n /**\n * Ask a question and get an answer with citations\n */\n async ask(question: string, options?: RetrievalOptions): Promise<RAGResponse> {\n await this.initialize();\n\n const startTime = Date.now();\n\n // Retrieve relevant chunks\n const retrievalStart = Date.now();\n const { results, stats } = await this.retriever.search(question, options);\n const retrievalTime = Date.now() - retrievalStart;\n\n if (results.length === 0) {\n return {\n question,\n answer: \"I couldn't find any relevant information in the documentation to answer your question.\",\n citations: [],\n context: [],\n metadata: {\n totalTime: Date.now() - startTime,\n retrievalTime,\n generationTime: 0,\n cached: false,\n model: this.llm.name || 'unknown',\n },\n };\n }\n\n // Build context from retrieved chunks\n const context = results.map((r) => r.chunk);\n const contextText = this.buildContextText(context);\n\n // Generate answer\n const generationStart = Date.now();\n const answer = await this.generateAnswer(question, contextText);\n const generationTime = Date.now() - generationStart;\n\n // Extract citations from the answer\n const citations = this.extractCitations(answer, context);\n\n return {\n question,\n answer,\n citations,\n context,\n metadata: {\n totalTime: Date.now() - startTime,\n retrievalTime,\n generationTime,\n cached: false,\n model: this.llm.name || 'unknown',\n },\n };\n }\n\n /**\n * Build context text from chunks\n */\n private buildContextText(chunks: DocumentChunk[]): string {\n return chunks\n .map((chunk, i) => {\n const header = chunk.metadata.sectionPath || chunk.metadata.headingH1 || 'Document';\n return `[${i + 1}] ${header}\\n${chunk.text}`;\n })\n .join('\\n\\n---\\n\\n');\n }\n\n /**\n * Generate answer using LLM\n */\n private async generateAnswer(question: string, context: string): Promise<string> {\n const prompt = `Based on the following documentation context, answer the user's question.\nCite sources using [1], [2], etc. format when referencing specific information.\n\nCONTEXT:\n${context}\n\nQUESTION: ${question}\n\nANSWER:`;\n\n const response = await this.llm.complete({\n messages: [\n {\n id: generateId('msg'),\n role: 'user',\n content: [{ type: 'text', text: prompt }],\n timestamp: new Date(),\n },\n ],\n systemPrompt: this.systemPrompt,\n maxTokens: 1000,\n });\n\n return response.content;\n }\n\n /**\n * Extract citations from answer text\n */\n private extractCitations(answer: string, context: DocumentChunk[]): Citation[] {\n const citations: Citation[] = [];\n const citationRegex = /\\[(\\d+)\\]/g;\n const matches = answer.matchAll(citationRegex);\n const seenIndexes = new Set<number>();\n\n for (const match of matches) {\n const index = parseInt(match[1], 10);\n if (!seenIndexes.has(index) && index > 0 && index <= context.length) {\n seenIndexes.add(index);\n const chunk = context[index - 1];\n citations.push({\n index,\n chunkId: chunk.id,\n sectionPath: chunk.metadata.sectionPath,\n heading:\n chunk.metadata.headingH3 || chunk.metadata.headingH2 || chunk.metadata.headingH1,\n snippet: chunk.text.substring(0, 200) + (chunk.text.length > 200 ? '...' : ''),\n });\n }\n }\n\n return citations.sort((a, b) => a.index - b.index);\n }\n\n /**\n * Get document count\n */\n async getDocumentCount(): Promise<number> {\n // This is a simplified implementation\n // In production, you'd query the documents table\n return 0;\n }\n\n /**\n * Get chunk count\n */\n async getChunkCount(): Promise<number> {\n // This is a simplified implementation\n return 0;\n }\n\n /**\n * Clear all indexed data\n */\n async clear(): Promise<void> {\n // This would need to be implemented per vector store\n console.log('[RAG] Clear not implemented yet');\n }\n\n /**\n * Close connections\n */\n async close(): Promise<void> {\n await this.vectorStore.close();\n }\n}\n\n/**\n * Create a RAG service instance\n */\nexport async function createRAG(options: RAGOptions): Promise<RAG> {\n const rag = new RAG(options);\n await rag.initialize();\n return rag;\n}\n","/**\n * Base Extractor\n */\n\nimport type { ExtractedDocument, ExtractorOptions, ExtractorType } from '../types.js';\n\nexport abstract class BaseExtractor {\n protected options: ExtractorOptions;\n\n constructor(options: ExtractorOptions) {\n this.options = options;\n }\n\n /** Get the extractor type */\n abstract getType(): ExtractorType;\n\n /** Check if this extractor can handle the given path */\n abstract canHandle(path: string): boolean;\n\n /** Extract documents from the source */\n abstract extract(): Promise<ExtractedDocument[]>;\n\n /** Get file extension */\n protected getExtension(path: string): string {\n const parts = path.split('.');\n return parts.length > 1 ? parts[parts.length - 1].toLowerCase() : '';\n }\n\n /** Normalize line endings */\n protected normalizeLineEndings(text: string): string {\n return text.replace(/\\r\\n/g, '\\n').replace(/\\r/g, '\\n');\n }\n\n /** Clean excessive whitespace */\n protected cleanWhitespace(text: string): string {\n return text\n .replace(/\\n{3,}/g, '\\n\\n')\n .replace(/[ \\t]+/g, ' ')\n .trim();\n }\n}\n","/**\n * CHM (Compiled HTML Help) Extractor\n *\n * Extracts HTML content from .chm files using 7z.\n * CHM files are Microsoft's compiled HTML help format.\n */\n\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { readdir, readFile, mkdir, rm } from 'fs/promises';\nimport { existsSync } from 'fs';\nimport { join, basename, dirname } from 'path';\nimport { BaseExtractor } from './base.js';\nimport { HTMLExtractor } from './html.js';\nimport type { ExtractedDocument, ExtractorType } from '../types.js';\n\nconst execAsync = promisify(exec);\n\nexport class CHMExtractor extends BaseExtractor {\n private tempDir: string;\n\n constructor(options: { sourcePath: string; outputDir?: string }) {\n super(options);\n this.tempDir = options.outputDir || join(dirname(options.sourcePath), '.chm-extract-temp');\n }\n\n getType(): ExtractorType {\n return 'chm';\n }\n\n canHandle(path: string): boolean {\n return this.getExtension(path) === 'chm';\n }\n\n async extract(): Promise<ExtractedDocument[]> {\n const { sourcePath } = this.options;\n\n if (!existsSync(sourcePath)) {\n throw new Error(`CHM file not found: ${sourcePath}`);\n }\n\n // Create temp directory\n await mkdir(this.tempDir, { recursive: true });\n\n try {\n // Extract CHM using 7z\n await this.extractWithSevenZip(sourcePath, this.tempDir);\n\n // Find and parse all HTML files\n const htmlFiles = await this.findHTMLFiles(this.tempDir);\n const documents: ExtractedDocument[] = [];\n\n for (const htmlFile of htmlFiles) {\n try {\n const content = await readFile(htmlFile, 'utf-8');\n const htmlExtractor = new HTMLExtractor({ sourcePath: htmlFile });\n const extracted = await htmlExtractor.extractFromString(content, htmlFile);\n\n if (extracted.content.trim()) {\n documents.push({\n ...extracted,\n format: 'chm',\n metadata: {\n ...extracted.metadata,\n sourceChm: basename(sourcePath),\n originalPath: htmlFile.replace(this.tempDir, ''),\n },\n });\n }\n } catch (error) {\n // Skip files that can't be parsed\n console.warn(`Failed to parse HTML file: ${htmlFile}`, error);\n }\n }\n\n return documents;\n } finally {\n // Cleanup temp directory\n if (existsSync(this.tempDir)) {\n await rm(this.tempDir, { recursive: true, force: true });\n }\n }\n }\n\n private async extractWithSevenZip(chmPath: string, outputDir: string): Promise<void> {\n try {\n // Try 7z first\n await execAsync(`7z x \"${chmPath}\" -o\"${outputDir}\" -y`, {\n maxBuffer: 50 * 1024 * 1024, // 50MB buffer\n });\n } catch (error) {\n // Try 7za as fallback (common on some systems)\n try {\n await execAsync(`7za x \"${chmPath}\" -o\"${outputDir}\" -y`, {\n maxBuffer: 50 * 1024 * 1024,\n });\n } catch {\n throw new Error(\n `Failed to extract CHM file. Please ensure 7z is installed.\\n` +\n ` - On Ubuntu/Debian: sudo apt-get install p7zip-full\\n` +\n ` - On macOS: brew install p7zip\\n` +\n ` - On Windows: Install 7-Zip from https://7-zip.org/\\n` +\n `Original error: ${error}`\n );\n }\n }\n }\n\n private async findHTMLFiles(dir: string): Promise<string[]> {\n const htmlFiles: string[] = [];\n\n const scanDir = async (currentDir: string) => {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(currentDir, entry.name);\n\n if (entry.isDirectory()) {\n // Skip system directories\n if (!entry.name.startsWith('$') && !entry.name.startsWith('#')) {\n await scanDir(fullPath);\n }\n } else if (entry.isFile()) {\n const ext = this.getExtension(entry.name);\n if (ext === 'html' || ext === 'htm') {\n htmlFiles.push(fullPath);\n }\n }\n }\n };\n\n await scanDir(dir);\n return htmlFiles.sort();\n }\n}\n","/**\n * HTML Extractor\n *\n * Extracts text content from HTML files, preserving structure.\n */\n\nimport { readFile, readdir, stat } from 'fs/promises';\nimport { join, basename } from 'path';\nimport * as cheerio from 'cheerio';\nimport { convert } from 'html-to-text';\nimport { BaseExtractor } from './base.js';\nimport type { ExtractedDocument, ExtractorType } from '../types.js';\n\nexport interface HTMLExtractorOptions {\n sourcePath: string;\n /** Whether to preserve headings structure */\n preserveHeadings?: boolean;\n /** Whether to include links */\n includeLinks?: boolean;\n /** Tags to remove */\n removeTags?: string[];\n}\n\nexport class HTMLExtractor extends BaseExtractor {\n private htmlOptions: HTMLExtractorOptions;\n\n constructor(options: HTMLExtractorOptions) {\n super(options);\n this.htmlOptions = {\n preserveHeadings: true,\n includeLinks: false,\n removeTags: ['script', 'style', 'nav', 'footer', 'header', 'aside', 'meta', 'link'],\n ...options,\n };\n }\n\n getType(): ExtractorType {\n return 'html';\n }\n\n canHandle(path: string): boolean {\n const ext = this.getExtension(path);\n return ext === 'html' || ext === 'htm';\n }\n\n async extract(): Promise<ExtractedDocument[]> {\n const { sourcePath } = this.options;\n const stats = await stat(sourcePath);\n\n if (stats.isFile()) {\n const content = await readFile(sourcePath, 'utf-8');\n return [await this.extractFromString(content, sourcePath)];\n }\n\n if (stats.isDirectory()) {\n return this.extractFromDirectory(sourcePath);\n }\n\n throw new Error(`Invalid path: ${sourcePath}`);\n }\n\n async extractFromString(html: string, filePath: string): Promise<ExtractedDocument> {\n const $ = cheerio.load(html);\n\n // Remove unwanted tags\n for (const tag of this.htmlOptions.removeTags || []) {\n $(tag).remove();\n }\n\n // Extract title\n let title = $('title').text().trim();\n if (!title) {\n title = $('h1').first().text().trim();\n }\n if (!title) {\n title = basename(filePath, '.html').replace(/-|_/g, ' ');\n }\n\n // Convert to text with structure preservation\n const text = convert($.html(), {\n wordwrap: false,\n preserveNewlines: true,\n selectors: [\n { selector: 'h1', options: { uppercase: false, prefix: '\\n# ' } },\n { selector: 'h2', options: { uppercase: false, prefix: '\\n## ' } },\n { selector: 'h3', options: { uppercase: false, prefix: '\\n### ' } },\n { selector: 'h4', options: { uppercase: false, prefix: '\\n#### ' } },\n { selector: 'h5', options: { uppercase: false, prefix: '\\n##### ' } },\n { selector: 'h6', options: { uppercase: false, prefix: '\\n###### ' } },\n { selector: 'ul', options: { itemPrefix: ' - ' } },\n { selector: 'ol', options: { itemPrefix: ' 1. ' } },\n { selector: 'table', format: 'dataTable' },\n { selector: 'a', options: { ignoreHref: !this.htmlOptions.includeLinks } },\n { selector: 'img', format: 'skip' },\n ],\n });\n\n const content = this.cleanWhitespace(this.normalizeLineEndings(text));\n\n return {\n path: filePath,\n title,\n content,\n format: 'html',\n metadata: {\n originalLength: html.length,\n extractedLength: content.length,\n },\n };\n }\n\n private async extractFromDirectory(dir: string): Promise<ExtractedDocument[]> {\n const documents: ExtractedDocument[] = [];\n\n const scanDir = async (currentDir: string) => {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(currentDir, entry.name);\n\n if (entry.isDirectory()) {\n await scanDir(fullPath);\n } else if (entry.isFile() && this.canHandle(entry.name)) {\n try {\n const content = await readFile(fullPath, 'utf-8');\n const doc = await this.extractFromString(content, fullPath);\n if (doc.content.trim()) {\n documents.push(doc);\n }\n } catch (error) {\n console.warn(`Failed to extract ${fullPath}:`, error);\n }\n }\n }\n };\n\n await scanDir(dir);\n return documents;\n }\n}\n","/**\n * Markdown Extractor\n *\n * Extracts content from Markdown files.\n */\n\nimport { readFile, readdir, stat } from 'fs/promises';\nimport { join, basename } from 'path';\nimport { BaseExtractor } from './base.js';\nimport type { ExtractedDocument, ExtractorType } from '../types.js';\n\nexport class MarkdownExtractor extends BaseExtractor {\n getType(): ExtractorType {\n return 'markdown';\n }\n\n canHandle(path: string): boolean {\n const ext = this.getExtension(path);\n return ext === 'md' || ext === 'markdown';\n }\n\n async extract(): Promise<ExtractedDocument[]> {\n const { sourcePath } = this.options;\n const stats = await stat(sourcePath);\n\n if (stats.isFile()) {\n return [await this.extractFromFile(sourcePath)];\n }\n\n if (stats.isDirectory()) {\n return this.extractFromDirectory(sourcePath);\n }\n\n throw new Error(`Invalid path: ${sourcePath}`);\n }\n\n private async extractFromFile(filePath: string): Promise<ExtractedDocument> {\n const content = await readFile(filePath, 'utf-8');\n const normalizedContent = this.normalizeLineEndings(content);\n\n // Extract title from first H1 or filename\n let title = this.extractTitle(normalizedContent);\n if (!title) {\n title = basename(filePath, '.md').replace(/-|_/g, ' ');\n }\n\n return {\n path: filePath,\n title,\n content: this.cleanWhitespace(normalizedContent),\n format: 'markdown',\n metadata: {\n originalLength: content.length,\n },\n };\n }\n\n private extractTitle(content: string): string | undefined {\n // Look for first H1 heading\n const h1Match = content.match(/^#\\s+(.+)$/m);\n if (h1Match) {\n return h1Match[1].trim();\n }\n\n // Look for underline-style H1\n const underlineMatch = content.match(/^(.+)\\n=+\\s*$/m);\n if (underlineMatch) {\n return underlineMatch[1].trim();\n }\n\n return undefined;\n }\n\n private async extractFromDirectory(dir: string): Promise<ExtractedDocument[]> {\n const documents: ExtractedDocument[] = [];\n\n const scanDir = async (currentDir: string) => {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(currentDir, entry.name);\n\n if (entry.isDirectory()) {\n // Skip hidden and common non-content directories\n if (!entry.name.startsWith('.') && entry.name !== 'node_modules') {\n await scanDir(fullPath);\n }\n } else if (entry.isFile() && this.canHandle(entry.name)) {\n try {\n const doc = await this.extractFromFile(fullPath);\n if (doc.content.trim()) {\n documents.push(doc);\n }\n } catch (error) {\n console.warn(`Failed to extract ${fullPath}:`, error);\n }\n }\n }\n };\n\n await scanDir(dir);\n return documents;\n }\n}\n","/**\n * Document Extractors\n *\n * Extract content from various file formats (CHM, HTML, Markdown)\n */\n\nexport { BaseExtractor } from './base.js';\nexport { CHMExtractor } from './chm.js';\nexport { HTMLExtractor, type HTMLExtractorOptions } from './html.js';\nexport { MarkdownExtractor } from './markdown.js';\n\nimport { stat } from 'fs/promises';\nimport { CHMExtractor } from './chm.js';\nimport { HTMLExtractor } from './html.js';\nimport { MarkdownExtractor } from './markdown.js';\nimport type { ExtractedDocument } from '../types.js';\n\n/**\n * Auto-detect and extract documents from a path\n */\nexport async function extractDocuments(sourcePath: string): Promise<ExtractedDocument[]> {\n const stats = await stat(sourcePath);\n const ext = sourcePath.split('.').pop()?.toLowerCase() || '';\n\n // Single file\n if (stats.isFile()) {\n if (ext === 'chm') {\n const extractor = new CHMExtractor({ sourcePath });\n return extractor.extract();\n }\n if (ext === 'html' || ext === 'htm') {\n const extractor = new HTMLExtractor({ sourcePath });\n return extractor.extract();\n }\n if (ext === 'md' || ext === 'markdown') {\n const extractor = new MarkdownExtractor({ sourcePath });\n return extractor.extract();\n }\n throw new Error(`Unsupported file format: ${ext}`);\n }\n\n // Directory - extract all supported files\n if (stats.isDirectory()) {\n const allDocuments: ExtractedDocument[] = [];\n\n // Extract HTML files\n const htmlExtractor = new HTMLExtractor({ sourcePath });\n const htmlDocs = await htmlExtractor.extract();\n allDocuments.push(...htmlDocs);\n\n // Extract Markdown files\n const mdExtractor = new MarkdownExtractor({ sourcePath });\n const mdDocs = await mdExtractor.extract();\n allDocuments.push(...mdDocs);\n\n return allDocuments;\n }\n\n throw new Error(`Invalid path: ${sourcePath}`);\n}\n","/**\n * Header-Aware Chunker\n *\n * Splits documents into chunks while respecting heading boundaries.\n * Ported from the Python implementation in Gen21AIHelpAndQNA.\n */\n\nimport { generateId } from '@chatbot-packages/utils';\nimport type {\n ChunkingOptions,\n ChunkResult,\n DocumentChunk,\n ChunkMetadata,\n ExtractedDocument,\n} from '../types.js';\n\nexport interface HeaderAwareChunkerOptions extends ChunkingOptions {\n /** Document ID to associate chunks with */\n documentId?: string;\n}\n\ninterface HeadingContext {\n h1?: string;\n h2?: string;\n h3?: string;\n sectionPath: string;\n}\n\nexport class HeaderAwareChunker {\n private options: Required<Omit<HeaderAwareChunkerOptions, 'documentId'>>;\n\n constructor(options?: ChunkingOptions) {\n this.options = {\n chunkSize: 512,\n chunkOverlap: 50,\n minChunkSize: 100,\n maxChunkSize: 800,\n respectHeadings: true,\n splitOnHeadings: [1, 2],\n ...options,\n };\n }\n\n /**\n * Chunk a document into smaller pieces\n */\n chunk(document: ExtractedDocument, documentId?: string): ChunkResult {\n const docId = documentId || generateId('doc');\n const { content } = document;\n\n if (!content.trim()) {\n return {\n chunks: [],\n stats: { totalChunks: 0, avgChunkSize: 0, minChunkSize: 0, maxChunkSize: 0 },\n };\n }\n\n const chunks: DocumentChunk[] = [];\n const sections = this.splitBySections(content);\n\n for (const section of sections) {\n const sectionChunks = this.chunkSection(section, docId, document.path);\n chunks.push(...sectionChunks);\n }\n\n // Calculate stats\n const sizes = chunks.map((c) => c.text.length);\n const stats = {\n totalChunks: chunks.length,\n avgChunkSize: sizes.length > 0 ? Math.round(sizes.reduce((a, b) => a + b, 0) / sizes.length) : 0,\n minChunkSize: sizes.length > 0 ? Math.min(...sizes) : 0,\n maxChunkSize: sizes.length > 0 ? Math.max(...sizes) : 0,\n };\n\n return { chunks, stats };\n }\n\n /**\n * Split content by heading boundaries\n */\n private splitBySections(content: string): Array<{ text: string; context: HeadingContext }> {\n const lines = content.split('\\n');\n const sections: Array<{ text: string; context: HeadingContext }> = [];\n\n let currentContext: HeadingContext = { sectionPath: '' };\n let currentText: string[] = [];\n\n for (const line of lines) {\n const heading = this.parseHeading(line);\n\n if (heading && this.options.splitOnHeadings.includes(heading.level)) {\n // Save current section\n if (currentText.length > 0) {\n sections.push({\n text: currentText.join('\\n').trim(),\n context: { ...currentContext },\n });\n currentText = [];\n }\n\n // Update context based on heading level\n if (heading.level === 1) {\n currentContext = {\n h1: heading.text,\n sectionPath: heading.text,\n };\n } else if (heading.level === 2) {\n currentContext = {\n ...currentContext,\n h2: heading.text,\n h3: undefined,\n sectionPath: currentContext.h1\n ? `${currentContext.h1} > ${heading.text}`\n : heading.text,\n };\n } else if (heading.level === 3) {\n currentContext = {\n ...currentContext,\n h3: heading.text,\n sectionPath: currentContext.h2\n ? `${currentContext.sectionPath} > ${heading.text}`\n : heading.text,\n };\n }\n\n currentText.push(line);\n } else {\n currentText.push(line);\n }\n }\n\n // Don't forget the last section\n if (currentText.length > 0) {\n sections.push({\n text: currentText.join('\\n').trim(),\n context: { ...currentContext },\n });\n }\n\n return sections;\n }\n\n /**\n * Parse a heading line\n */\n private parseHeading(line: string): { level: number; text: string } | null {\n // Markdown-style headings: # H1, ## H2, ### H3\n const match = line.match(/^(#{1,6})\\s+(.+)$/);\n if (match) {\n return {\n level: match[1].length,\n text: match[2].trim(),\n };\n }\n return null;\n }\n\n /**\n * Chunk a single section\n */\n private chunkSection(\n section: { text: string; context: HeadingContext },\n documentId: string,\n sourcePath: string\n ): DocumentChunk[] {\n const { text, context } = section;\n const chunks: DocumentChunk[] = [];\n\n // If section is small enough, keep as single chunk\n if (this.estimateTokens(text) <= this.options.maxChunkSize) {\n if (this.estimateTokens(text) >= this.options.minChunkSize) {\n chunks.push(this.createChunk(text, context, documentId, sourcePath, 0));\n }\n return chunks;\n }\n\n // Split into paragraphs\n const paragraphs = text.split(/\\n\\n+/);\n let currentChunk: string[] = [];\n let currentTokens = 0;\n\n for (const para of paragraphs) {\n const paraTokens = this.estimateTokens(para);\n\n // If single paragraph is too large, split by sentences\n if (paraTokens > this.options.maxChunkSize) {\n // Save current chunk first\n if (currentChunk.length > 0) {\n chunks.push(\n this.createChunk(\n currentChunk.join('\\n\\n'),\n context,\n documentId,\n sourcePath,\n chunks.length\n )\n );\n currentChunk = [];\n currentTokens = 0;\n }\n\n // Split large paragraph by sentences\n const sentenceChunks = this.splitLargeParagraph(para);\n for (const sentenceChunk of sentenceChunks) {\n chunks.push(\n this.createChunk(sentenceChunk, context, documentId, sourcePath, chunks.length)\n );\n }\n continue;\n }\n\n // Check if adding paragraph exceeds chunk size\n if (currentTokens + paraTokens > this.options.chunkSize) {\n // Save current chunk\n if (currentChunk.length > 0) {\n chunks.push(\n this.createChunk(\n currentChunk.join('\\n\\n'),\n context,\n documentId,\n sourcePath,\n chunks.length\n )\n );\n\n // Start new chunk with overlap\n const overlapText = this.getOverlapText(currentChunk);\n currentChunk = overlapText ? [overlapText, para] : [para];\n currentTokens = this.estimateTokens(currentChunk.join('\\n\\n'));\n } else {\n currentChunk = [para];\n currentTokens = paraTokens;\n }\n } else {\n currentChunk.push(para);\n currentTokens += paraTokens;\n }\n }\n\n // Don't forget the last chunk\n if (currentChunk.length > 0 && currentTokens >= this.options.minChunkSize) {\n chunks.push(\n this.createChunk(currentChunk.join('\\n\\n'), context, documentId, sourcePath, chunks.length)\n );\n }\n\n return chunks;\n }\n\n /**\n * Split a large paragraph into sentence-based chunks\n */\n private splitLargeParagraph(paragraph: string): string[] {\n const sentences = paragraph.match(/[^.!?]+[.!?]+/g) || [paragraph];\n const chunks: string[] = [];\n let currentChunk: string[] = [];\n let currentTokens = 0;\n\n for (const sentence of sentences) {\n const sentenceTokens = this.estimateTokens(sentence);\n\n if (currentTokens + sentenceTokens > this.options.chunkSize && currentChunk.length > 0) {\n chunks.push(currentChunk.join(' ').trim());\n currentChunk = [];\n currentTokens = 0;\n }\n\n currentChunk.push(sentence.trim());\n currentTokens += sentenceTokens;\n }\n\n if (currentChunk.length > 0) {\n chunks.push(currentChunk.join(' ').trim());\n }\n\n return chunks;\n }\n\n /**\n * Get overlap text from previous chunk\n */\n private getOverlapText(previousChunk: string[]): string | null {\n if (!this.options.chunkOverlap || previousChunk.length === 0) {\n return null;\n }\n\n // Take last paragraph(s) that fit within overlap size\n const reversed = [...previousChunk].reverse();\n const overlapParts: string[] = [];\n let tokens = 0;\n\n for (const part of reversed) {\n const partTokens = this.estimateTokens(part);\n if (tokens + partTokens > this.options.chunkOverlap) {\n break;\n }\n overlapParts.unshift(part);\n tokens += partTokens;\n }\n\n return overlapParts.length > 0 ? overlapParts.join('\\n\\n') : null;\n }\n\n /**\n * Create a chunk object\n */\n private createChunk(\n text: string,\n context: HeadingContext,\n documentId: string,\n sourcePath: string,\n index: number\n ): DocumentChunk {\n const metadata: ChunkMetadata = {\n sectionPath: context.sectionPath,\n headingH1: context.h1,\n headingH2: context.h2,\n headingH3: context.h3,\n sourcePath,\n chunkIndex: index,\n };\n\n return {\n id: generateId('chunk'),\n documentId,\n text: text.trim(),\n metadata,\n createdAt: new Date(),\n };\n }\n\n /**\n * Estimate token count (rough approximation: ~4 chars per token)\n */\n private estimateTokens(text: string): number {\n return Math.ceil(text.length / 4);\n }\n}\n","/**\n * Local Embeddings using @xenova/transformers\n *\n * Runs embedding models locally without API calls.\n * Supports BGE, all-MiniLM, and other sentence-transformer models.\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\n// Dynamic import for transformers (it's a heavy module)\nlet pipeline: any = null;\nlet embedder: any = null;\n\nasync function loadPipeline(model: string) {\n if (!pipeline) {\n const transformers = await import('@xenova/transformers');\n pipeline = transformers.pipeline;\n }\n\n if (!embedder) {\n console.log(`[LocalEmbeddings] Loading model: ${model}...`);\n embedder = await pipeline('feature-extraction', model, {\n quantized: true, // Use quantized model for faster inference\n });\n console.log(`[LocalEmbeddings] Model loaded successfully`);\n }\n\n return embedder;\n}\n\nexport interface LocalEmbeddingOptions {\n /** Model name (default: 'Xenova/bge-base-en-v1.5') */\n model?: string;\n /** Batch size for processing (default: 32) */\n batchSize?: number;\n /** Whether to normalize embeddings (default: true) */\n normalize?: boolean;\n}\n\nexport class LocalEmbeddingBackend implements EmbeddingBackend {\n private model: string;\n private batchSize: number;\n private normalize: boolean;\n private dimensions: number;\n\n // Model dimension map\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'Xenova/bge-large-en-v1.5': 1024,\n 'Xenova/bge-base-en-v1.5': 768,\n 'Xenova/bge-small-en-v1.5': 384,\n 'Xenova/all-MiniLM-L6-v2': 384,\n 'Xenova/all-MiniLM-L12-v2': 384,\n 'Xenova/all-mpnet-base-v2': 768,\n };\n\n constructor(options?: LocalEmbeddingOptions) {\n // Use bge-base for balance of quality and speed\n this.model = options?.model || 'Xenova/bge-base-en-v1.5';\n this.batchSize = options?.batchSize || 32;\n this.normalize = options?.normalize ?? true;\n this.dimensions = LocalEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 768;\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const embedder = await loadPipeline(this.model);\n\n // For BGE models, add instruction prefix for better retrieval\n const processedText = this.model.includes('bge')\n ? `Represent this sentence for searching relevant passages: ${text}`\n : text;\n\n const output = await embedder(processedText, {\n pooling: 'mean',\n normalize: this.normalize,\n });\n\n // Convert to regular array\n const embedding = Array.from(output.data as Float32Array);\n\n return {\n embedding,\n tokens: Math.ceil(text.length / 4), // Rough estimate\n };\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const results: EmbeddingResult[] = [];\n\n // Process in batches\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const batchResults = await Promise.all(batch.map((text) => this.embed(text)));\n results.push(...batchResults);\n }\n\n return results;\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available local models\n */\nexport const LOCAL_MODELS = {\n /** BGE Large - Best quality, slower (1024 dims) */\n BGE_LARGE: 'Xenova/bge-large-en-v1.5',\n /** BGE Base - Good balance (768 dims) */\n BGE_BASE: 'Xenova/bge-base-en-v1.5',\n /** BGE Small - Fastest (384 dims) */\n BGE_SMALL: 'Xenova/bge-small-en-v1.5',\n /** MiniLM L6 - Very fast (384 dims) */\n MINILM_L6: 'Xenova/all-MiniLM-L6-v2',\n /** MiniLM L12 - Good quality (384 dims) */\n MINILM_L12: 'Xenova/all-MiniLM-L12-v2',\n /** MPNet - High quality (768 dims) */\n MPNET: 'Xenova/all-mpnet-base-v2',\n};\n","/**\n * OpenAI Embeddings\n *\n * Uses OpenAI's embedding API for high-quality embeddings.\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\nexport interface OpenAIEmbeddingOptions {\n /** OpenAI API key */\n apiKey: string;\n /** Model name (default: 'text-embedding-3-small') */\n model?: string;\n /** Embedding dimensions (for models that support it) */\n dimensions?: number;\n /** Batch size for processing (default: 100) */\n batchSize?: number;\n /** Base URL for API (for compatible APIs) */\n baseUrl?: string;\n}\n\nexport class OpenAIEmbeddingBackend implements EmbeddingBackend {\n private apiKey: string;\n private model: string;\n private dimensions: number;\n private batchSize: number;\n private baseUrl: string;\n\n // Model dimension defaults\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'text-embedding-3-large': 3072,\n 'text-embedding-3-small': 1536,\n 'text-embedding-ada-002': 1536,\n };\n\n constructor(options: OpenAIEmbeddingOptions) {\n this.apiKey = options.apiKey;\n this.model = options.model || 'text-embedding-3-small';\n this.dimensions =\n options.dimensions || OpenAIEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 1536;\n this.batchSize = options.batchSize || 100;\n this.baseUrl = options.baseUrl || 'https://api.openai.com/v1';\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const results = await this.embedBatch([text]);\n return results[0];\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const allResults: EmbeddingResult[] = [];\n\n // Process in batches (OpenAI has limits)\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const batchResults = await this.callAPI(batch);\n allResults.push(...batchResults);\n }\n\n return allResults;\n }\n\n private async callAPI(texts: string[]): Promise<EmbeddingResult[]> {\n const body: Record<string, unknown> = {\n model: this.model,\n input: texts,\n };\n\n // Add dimensions param for text-embedding-3-* models\n if (this.model.startsWith('text-embedding-3-') && this.dimensions) {\n body.dimensions = this.dimensions;\n }\n\n const response = await fetch(`${this.baseUrl}/embeddings`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${this.apiKey}`,\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`OpenAI API error: ${response.status} - ${error}`);\n }\n\n const data = (await response.json()) as {\n data: Array<{ embedding: number[]; index: number }>;\n usage: { prompt_tokens: number; total_tokens: number };\n };\n\n // Sort by index to maintain order\n const sorted = data.data.sort((a, b) => a.index - b.index);\n\n return sorted.map((item, i) => ({\n embedding: item.embedding,\n tokens: Math.ceil(texts[i].length / 4), // Rough estimate\n }));\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available OpenAI embedding models\n */\nexport const OPENAI_MODELS = {\n /** text-embedding-3-large - Highest quality (3072 dims, can reduce) */\n EMBEDDING_3_LARGE: 'text-embedding-3-large',\n /** text-embedding-3-small - Good balance (1536 dims, can reduce) */\n EMBEDDING_3_SMALL: 'text-embedding-3-small',\n /** text-embedding-ada-002 - Legacy model (1536 dims) */\n ADA_002: 'text-embedding-ada-002',\n};\n","/**\n * Embedding Backends\n *\n * Generate vector embeddings from text using local or cloud models.\n */\n\nexport {\n LocalEmbeddingBackend,\n LOCAL_MODELS,\n type LocalEmbeddingOptions,\n} from './local.js';\n\nexport {\n OpenAIEmbeddingBackend,\n OPENAI_MODELS,\n type OpenAIEmbeddingOptions,\n} from './openai.js';\n\nimport type { EmbeddingBackend, EmbeddingOptions } from '../types.js';\nimport { LocalEmbeddingBackend } from './local.js';\nimport { OpenAIEmbeddingBackend } from './openai.js';\n\n/**\n * Create an embedding backend based on options\n */\nexport function createEmbeddingBackend(options: EmbeddingOptions): EmbeddingBackend {\n switch (options.provider) {\n case 'local':\n return new LocalEmbeddingBackend({\n model: options.model,\n batchSize: options.batchSize,\n });\n\n case 'openai':\n if (!options.apiKey) {\n throw new Error('OpenAI embedding requires an API key');\n }\n return new OpenAIEmbeddingBackend({\n apiKey: options.apiKey,\n model: options.model,\n dimensions: options.dimensions,\n batchSize: options.batchSize,\n });\n\n case 'huggingface':\n // For now, use local with HuggingFace models\n return new LocalEmbeddingBackend({\n model: options.model || 'Xenova/all-MiniLM-L6-v2',\n batchSize: options.batchSize,\n });\n\n default:\n throw new Error(`Unknown embedding provider: ${options.provider}`);\n }\n}\n","/**\n * SQLite Vector Store\n *\n * Simple vector store using SQLite with better-sqlite3.\n * Stores embeddings as JSON arrays and performs similarity search in JavaScript.\n * Best for development, small datasets, or when PostgreSQL isn't available.\n */\n\nimport Database from 'better-sqlite3';\nimport type { VectorStore, DocumentChunk, SearchResult } from '../types.js';\n\nexport interface SQLiteVectorStoreOptions {\n /** Database file path (use ':memory:' for in-memory) */\n path?: string;\n /** Table name (default: 'chunks') */\n tableName?: string;\n /** Embedding dimensions */\n dimensions: number;\n}\n\nexport class SQLiteVectorStore implements VectorStore {\n private db: Database.Database;\n private tableName: string;\n private dimensions: number;\n\n constructor(options: SQLiteVectorStoreOptions) {\n this.db = new Database(options.path || ':memory:');\n this.tableName = options.tableName || 'chunks';\n this.dimensions = options.dimensions;\n\n // Enable WAL mode for better concurrent access\n this.db.pragma('journal_mode = WAL');\n }\n\n async initialize(): Promise<void> {\n // Create chunks table\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS ${this.tableName} (\n id TEXT PRIMARY KEY,\n document_id TEXT NOT NULL,\n text TEXT NOT NULL,\n embedding TEXT,\n metadata TEXT,\n created_at TEXT DEFAULT CURRENT_TIMESTAMP\n )\n `);\n\n // Create indexes\n this.db.exec(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_document_id\n ON ${this.tableName}(document_id)\n `);\n\n // Create FTS5 virtual table for text search\n this.db.exec(`\n CREATE VIRTUAL TABLE IF NOT EXISTS ${this.tableName}_fts\n USING fts5(id, text, content='${this.tableName}', content_rowid='rowid')\n `);\n\n // Create triggers to keep FTS in sync\n this.db.exec(`\n CREATE TRIGGER IF NOT EXISTS ${this.tableName}_ai AFTER INSERT ON ${this.tableName} BEGIN\n INSERT INTO ${this.tableName}_fts(rowid, id, text) VALUES (new.rowid, new.id, new.text);\n END\n `);\n\n this.db.exec(`\n CREATE TRIGGER IF NOT EXISTS ${this.tableName}_ad AFTER DELETE ON ${this.tableName} BEGIN\n INSERT INTO ${this.tableName}_fts(${this.tableName}_fts, rowid, id, text)\n VALUES('delete', old.rowid, old.id, old.text);\n END\n `);\n }\n\n async insert(chunks: DocumentChunk[]): Promise<void> {\n const stmt = this.db.prepare(`\n INSERT OR REPLACE INTO ${this.tableName} (id, document_id, text, embedding, metadata, created_at)\n VALUES (?, ?, ?, ?, ?, ?)\n `);\n\n const insertMany = this.db.transaction((items: DocumentChunk[]) => {\n for (const chunk of items) {\n stmt.run(\n chunk.id,\n chunk.documentId,\n chunk.text,\n chunk.embedding ? JSON.stringify(chunk.embedding) : null,\n JSON.stringify(chunk.metadata),\n chunk.createdAt.toISOString()\n );\n }\n });\n\n insertMany(chunks);\n }\n\n async denseSearch(\n embedding: number[],\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n // Get all chunks with embeddings\n let query = `SELECT * FROM ${this.tableName} WHERE embedding IS NOT NULL`;\n const params: unknown[] = [];\n\n // Apply filters\n if (filter?.documentId) {\n query += ` AND document_id = ?`;\n params.push(filter.documentId);\n }\n\n const rows = this.db.prepare(query).all(...params) as Array<{\n id: string;\n document_id: string;\n text: string;\n embedding: string;\n metadata: string;\n created_at: string;\n }>;\n\n // Calculate cosine similarity for each chunk\n const results: Array<{ chunk: DocumentChunk; score: number }> = [];\n\n for (const row of rows) {\n const chunkEmbedding = JSON.parse(row.embedding) as number[];\n const score = this.cosineSimilarity(embedding, chunkEmbedding);\n\n results.push({\n chunk: this.rowToChunk(row),\n score,\n });\n }\n\n // Sort by score and take top K\n results.sort((a, b) => b.score - a.score);\n\n return results.slice(0, topK).map((r) => ({\n ...r,\n searchType: 'dense' as const,\n }));\n }\n\n async sparseSearch(\n query: string,\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n // Use FTS5 for text search\n let ftsQuery = `\n SELECT c.*, bm25(${this.tableName}_fts) as score\n FROM ${this.tableName}_fts fts\n JOIN ${this.tableName} c ON fts.id = c.id\n WHERE ${this.tableName}_fts MATCH ?\n `;\n const params: unknown[] = [this.escapeFTSQuery(query)];\n\n // Apply filters\n if (filter?.documentId) {\n ftsQuery += ` AND c.document_id = ?`;\n params.push(filter.documentId);\n }\n\n ftsQuery += ` ORDER BY score LIMIT ?`;\n params.push(topK);\n\n try {\n const rows = this.db.prepare(ftsQuery).all(...params) as Array<{\n id: string;\n document_id: string;\n text: string;\n embedding: string | null;\n metadata: string;\n created_at: string;\n score: number;\n }>;\n\n return rows.map((row) => ({\n chunk: this.rowToChunk(row),\n score: Math.abs(row.score), // BM25 returns negative scores\n searchType: 'sparse' as const,\n }));\n } catch {\n // FTS query failed, return empty results\n return [];\n }\n }\n\n async deleteByDocumentId(documentId: string): Promise<number> {\n const result = this.db\n .prepare(`DELETE FROM ${this.tableName} WHERE document_id = ?`)\n .run(documentId);\n return result.changes;\n }\n\n async getById(id: string): Promise<DocumentChunk | null> {\n const row = this.db\n .prepare(`SELECT * FROM ${this.tableName} WHERE id = ?`)\n .get(id) as {\n id: string;\n document_id: string;\n text: string;\n embedding: string | null;\n metadata: string;\n created_at: string;\n } | undefined;\n\n return row ? this.rowToChunk(row) : null;\n }\n\n async close(): Promise<void> {\n this.db.close();\n }\n\n /**\n * Calculate cosine similarity between two vectors\n */\n private cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length) {\n throw new Error('Vectors must have same length');\n }\n\n let dotProduct = 0;\n let normA = 0;\n let normB = 0;\n\n for (let i = 0; i < a.length; i++) {\n dotProduct += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n\n const denominator = Math.sqrt(normA) * Math.sqrt(normB);\n return denominator === 0 ? 0 : dotProduct / denominator;\n }\n\n /**\n * Convert database row to DocumentChunk\n */\n private rowToChunk(row: {\n id: string;\n document_id: string;\n text: string;\n embedding: string | null;\n metadata: string;\n created_at: string;\n }): DocumentChunk {\n return {\n id: row.id,\n documentId: row.document_id,\n text: row.text,\n embedding: row.embedding ? JSON.parse(row.embedding) : undefined,\n metadata: JSON.parse(row.metadata),\n createdAt: new Date(row.created_at),\n };\n }\n\n /**\n * Escape FTS query to prevent syntax errors\n */\n private escapeFTSQuery(query: string): string {\n // Remove special FTS characters and wrap in quotes for phrase search\n const cleaned = query.replace(/['\"(){}[\\]^~*?:\\\\]/g, ' ').trim();\n return cleaned\n .split(/\\s+/)\n .filter((w) => w.length > 0)\n .map((w) => `\"${w}\"`)\n .join(' OR ');\n }\n}\n","/**\n * PostgreSQL Vector Store with pgvector\n *\n * Production-ready vector store using PostgreSQL with pgvector extension.\n * Supports HNSW indexing for fast similarity search.\n */\n\nimport { Pool, type PoolConfig } from 'pg';\nimport type { VectorStore, DocumentChunk, SearchResult } from '../types.js';\n\nexport interface PostgresVectorStoreOptions {\n /** Connection string or pool config */\n connectionString?: string;\n poolConfig?: PoolConfig;\n /** Table name (default: 'chunks') */\n tableName?: string;\n /** Embedding dimensions */\n dimensions: number;\n /** Schema name (default: 'public') */\n schema?: string;\n}\n\nexport class PostgresVectorStore implements VectorStore {\n private pool: Pool;\n private tableName: string;\n private schema: string;\n private dimensions: number;\n private fullTableName: string;\n\n constructor(options: PostgresVectorStoreOptions) {\n const poolConfig: PoolConfig = options.poolConfig || {\n connectionString: options.connectionString,\n max: 10,\n idleTimeoutMillis: 30000,\n };\n\n this.pool = new Pool(poolConfig);\n this.tableName = options.tableName || 'chunks';\n this.schema = options.schema || 'public';\n this.dimensions = options.dimensions;\n this.fullTableName = `${this.schema}.${this.tableName}`;\n }\n\n async initialize(): Promise<void> {\n const client = await this.pool.connect();\n\n try {\n // Enable required extensions\n await client.query('CREATE EXTENSION IF NOT EXISTS vector');\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_trgm');\n\n // Create documents table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.schema}.documents (\n id TEXT PRIMARY KEY,\n source_id TEXT NOT NULL,\n path TEXT NOT NULL,\n title TEXT NOT NULL,\n metadata JSONB DEFAULT '{}',\n created_at TIMESTAMPTZ DEFAULT NOW(),\n updated_at TIMESTAMPTZ DEFAULT NOW()\n )\n `);\n\n // Create chunks table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.fullTableName} (\n id TEXT PRIMARY KEY,\n document_id TEXT NOT NULL REFERENCES ${this.schema}.documents(id) ON DELETE CASCADE,\n text TEXT NOT NULL,\n embedding vector(${this.dimensions}),\n metadata JSONB DEFAULT '{}',\n created_at TIMESTAMPTZ DEFAULT NOW()\n )\n `);\n\n // Create indexes\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_document_id\n ON ${this.fullTableName}(document_id)\n `);\n\n // HNSW index for vector similarity (better than IVFFlat for most cases)\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_embedding_hnsw\n ON ${this.fullTableName}\n USING hnsw (embedding vector_cosine_ops)\n WITH (m = 16, ef_construction = 64)\n `);\n\n // GIN index for text search\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_text_trgm\n ON ${this.fullTableName}\n USING gin (text gin_trgm_ops)\n `);\n\n // GIN index for metadata\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_metadata\n ON ${this.fullTableName}\n USING gin (metadata)\n `);\n } finally {\n client.release();\n }\n }\n\n async insert(chunks: DocumentChunk[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const client = await this.pool.connect();\n\n try {\n await client.query('BEGIN');\n\n // Use COPY for bulk insert (much faster than individual inserts)\n for (const chunk of chunks) {\n const embedding = chunk.embedding\n ? `[${chunk.embedding.join(',')}]`\n : null;\n\n await client.query(\n `\n INSERT INTO ${this.fullTableName} (id, document_id, text, embedding, metadata, created_at)\n VALUES ($1, $2, $3, $4::vector, $5, $6)\n ON CONFLICT (id) DO UPDATE SET\n text = EXCLUDED.text,\n embedding = EXCLUDED.embedding,\n metadata = EXCLUDED.metadata\n `,\n [\n chunk.id,\n chunk.documentId,\n chunk.text,\n embedding,\n JSON.stringify(chunk.metadata),\n chunk.createdAt,\n ]\n );\n }\n\n await client.query('COMMIT');\n } catch (error) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n client.release();\n }\n }\n\n async denseSearch(\n embedding: number[],\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n const embeddingStr = `[${embedding.join(',')}]`;\n\n let query = `\n SELECT\n id,\n document_id,\n text,\n metadata,\n created_at,\n 1 - (embedding <=> $1::vector) as score\n FROM ${this.fullTableName}\n WHERE embedding IS NOT NULL\n `;\n const params: unknown[] = [embeddingStr];\n let paramIndex = 2;\n\n // Apply filters\n if (filter?.documentId) {\n query += ` AND document_id = $${paramIndex}`;\n params.push(filter.documentId);\n paramIndex++;\n }\n\n if (filter?.metadata) {\n query += ` AND metadata @> $${paramIndex}`;\n params.push(JSON.stringify(filter.metadata));\n paramIndex++;\n }\n\n query += ` ORDER BY embedding <=> $1::vector LIMIT $${paramIndex}`;\n params.push(topK);\n\n const result = await this.pool.query(query, params);\n\n return result.rows.map((row) => ({\n chunk: this.rowToChunk(row),\n score: row.score,\n searchType: 'dense' as const,\n }));\n }\n\n async sparseSearch(\n query: string,\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n // Use trigram similarity for BM25-like text search\n let sql = `\n SELECT\n id,\n document_id,\n text,\n metadata,\n created_at,\n similarity(text, $1) as score\n FROM ${this.fullTableName}\n WHERE text % $1\n `;\n const params: unknown[] = [query];\n let paramIndex = 2;\n\n // Apply filters\n if (filter?.documentId) {\n sql += ` AND document_id = $${paramIndex}`;\n params.push(filter.documentId);\n paramIndex++;\n }\n\n sql += ` ORDER BY score DESC LIMIT $${paramIndex}`;\n params.push(topK);\n\n const result = await this.pool.query(sql, params);\n\n return result.rows.map((row) => ({\n chunk: this.rowToChunk(row),\n score: row.score,\n searchType: 'sparse' as const,\n }));\n }\n\n async deleteByDocumentId(documentId: string): Promise<number> {\n const result = await this.pool.query(\n `DELETE FROM ${this.fullTableName} WHERE document_id = $1`,\n [documentId]\n );\n return result.rowCount || 0;\n }\n\n async getById(id: string): Promise<DocumentChunk | null> {\n const result = await this.pool.query(\n `SELECT * FROM ${this.fullTableName} WHERE id = $1`,\n [id]\n );\n return result.rows.length > 0 ? this.rowToChunk(result.rows[0]) : null;\n }\n\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n /**\n * Get chunk and document counts\n */\n async getStats(): Promise<{ chunks: number; documents: number }> {\n const chunksResult = await this.pool.query(\n `SELECT COUNT(*) FROM ${this.fullTableName}`\n );\n const docsResult = await this.pool.query(\n `SELECT COUNT(*) FROM ${this.schema}.documents`\n );\n\n return {\n chunks: parseInt(chunksResult.rows[0].count, 10),\n documents: parseInt(docsResult.rows[0].count, 10),\n };\n }\n\n /**\n * Insert or update a document\n */\n async upsertDocument(doc: {\n id: string;\n sourceId: string;\n path: string;\n title: string;\n metadata?: Record<string, unknown>;\n }): Promise<void> {\n await this.pool.query(\n `\n INSERT INTO ${this.schema}.documents (id, source_id, path, title, metadata)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (id) DO UPDATE SET\n path = EXCLUDED.path,\n title = EXCLUDED.title,\n metadata = EXCLUDED.metadata,\n updated_at = NOW()\n `,\n [doc.id, doc.sourceId, doc.path, doc.title, JSON.stringify(doc.metadata || {})]\n );\n }\n\n private rowToChunk(row: {\n id: string;\n document_id: string;\n text: string;\n embedding?: string;\n metadata: Record<string, unknown>;\n created_at: Date;\n }): DocumentChunk {\n return {\n id: row.id,\n documentId: row.document_id,\n text: row.text,\n metadata: row.metadata as DocumentChunk['metadata'],\n createdAt: new Date(row.created_at),\n };\n }\n}\n","/**\n * Vector Store Implementations\n *\n * Store and search document chunks using vector similarity.\n */\n\nexport { SQLiteVectorStore, type SQLiteVectorStoreOptions } from './sqlite.js';\nexport { PostgresVectorStore, type PostgresVectorStoreOptions } from './postgres.js';\n\nimport type { VectorStore, VectorStoreOptions } from '../types.js';\nimport { SQLiteVectorStore } from './sqlite.js';\nimport { PostgresVectorStore } from './postgres.js';\n\n/**\n * Create a vector store based on options\n */\nexport function createVectorStore(options: VectorStoreOptions): VectorStore {\n switch (options.type) {\n case 'sqlite':\n return new SQLiteVectorStore({\n path: options.connectionString || ':memory:',\n tableName: options.tableName,\n dimensions: options.dimensions,\n });\n\n case 'postgres':\n if (!options.connectionString) {\n throw new Error('PostgreSQL requires a connection string');\n }\n return new PostgresVectorStore({\n connectionString: options.connectionString,\n tableName: options.tableName,\n dimensions: options.dimensions,\n });\n\n case 'memory':\n // Use SQLite in-memory mode\n return new SQLiteVectorStore({\n path: ':memory:',\n tableName: options.tableName,\n dimensions: options.dimensions,\n });\n\n default:\n throw new Error(`Unknown vector store type: ${options.type}`);\n }\n}\n","/**\n * Hybrid Retrieval with RRF Fusion\n *\n * Combines dense (vector) and sparse (BM25) search results using\n * Reciprocal Rank Fusion for better retrieval quality.\n */\n\nimport type {\n VectorStore,\n EmbeddingBackend,\n SearchResult,\n RetrievalOptions,\n RetrievalResult,\n} from '../types.js';\n\nexport interface HybridRetrieverOptions {\n vectorStore: VectorStore;\n embeddings: EmbeddingBackend;\n /** Default number of results to return */\n defaultTopK?: number;\n /** Dense search weight (0-1, default: 0.7) */\n denseWeight?: number;\n /** Sparse search weight (0-1, default: 0.3) */\n sparseWeight?: number;\n /** RRF k parameter (default: 60) */\n rrfK?: number;\n}\n\nexport class HybridRetriever {\n private vectorStore: VectorStore;\n private embeddings: EmbeddingBackend;\n private defaultTopK: number;\n private denseWeight: number;\n private sparseWeight: number;\n private rrfK: number;\n\n constructor(options: HybridRetrieverOptions) {\n this.vectorStore = options.vectorStore;\n this.embeddings = options.embeddings;\n this.defaultTopK = options.defaultTopK || 10;\n this.denseWeight = options.denseWeight ?? 0.7;\n this.sparseWeight = options.sparseWeight ?? 0.3;\n this.rrfK = options.rrfK || 60;\n }\n\n /**\n * Perform hybrid search combining dense and sparse retrieval\n */\n async search(query: string, options?: RetrievalOptions): Promise<RetrievalResult> {\n const startTime = Date.now();\n const topK = options?.topK || this.defaultTopK;\n const filter = options?.filter;\n\n // Get weights (can be overridden per-query)\n const denseWeight = options?.denseWeight ?? this.denseWeight;\n const sparseWeight = options?.sparseWeight ?? this.sparseWeight;\n\n // Retrieve more candidates than needed for fusion\n const candidateK = Math.min(topK * 3, 100);\n\n // Run dense and sparse searches in parallel\n const [denseResults, sparseResults] = await Promise.all([\n this.denseSearch(query, candidateK, filter),\n this.sparseSearch(query, candidateK, filter),\n ]);\n\n // Apply RRF fusion\n const fusedResults = this.rrfFusion(\n denseResults,\n sparseResults,\n denseWeight,\n sparseWeight\n );\n\n // Take top K results\n const finalResults = fusedResults.slice(0, topK);\n\n return {\n results: finalResults,\n stats: {\n denseCount: denseResults.length,\n sparseCount: sparseResults.length,\n rerankingApplied: false,\n totalTime: Date.now() - startTime,\n },\n };\n }\n\n /**\n * Dense (vector) search\n */\n private async denseSearch(\n query: string,\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n try {\n const embeddingResult = await this.embeddings.embed(query);\n return await this.vectorStore.denseSearch(embeddingResult.embedding, topK, filter);\n } catch (error) {\n console.warn('Dense search failed:', error);\n return [];\n }\n }\n\n /**\n * Sparse (BM25/text) search\n */\n private async sparseSearch(\n query: string,\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n try {\n return await this.vectorStore.sparseSearch(query, topK, filter);\n } catch (error) {\n console.warn('Sparse search failed:', error);\n return [];\n }\n }\n\n /**\n * Reciprocal Rank Fusion\n *\n * Combines multiple ranked lists into a single ranking.\n * Formula: score = sum(1 / (k + rank_i)) for each list i\n */\n private rrfFusion(\n denseResults: SearchResult[],\n sparseResults: SearchResult[],\n denseWeight: number,\n sparseWeight: number\n ): SearchResult[] {\n const k = this.rrfK;\n const scores = new Map<string, { score: number; result: SearchResult }>();\n\n // Add dense results with weighted RRF score\n denseResults.forEach((result, rank) => {\n const rrfScore = (denseWeight * 1) / (k + rank + 1);\n const existing = scores.get(result.chunk.id);\n\n if (existing) {\n existing.score += rrfScore;\n } else {\n scores.set(result.chunk.id, {\n score: rrfScore,\n result: { ...result, searchType: 'hybrid' },\n });\n }\n });\n\n // Add sparse results with weighted RRF score\n sparseResults.forEach((result, rank) => {\n const rrfScore = (sparseWeight * 1) / (k + rank + 1);\n const existing = scores.get(result.chunk.id);\n\n if (existing) {\n existing.score += rrfScore;\n } else {\n scores.set(result.chunk.id, {\n score: rrfScore,\n result: { ...result, searchType: 'hybrid' },\n });\n }\n });\n\n // Sort by combined score\n const combined = Array.from(scores.values())\n .sort((a, b) => b.score - a.score)\n .map((entry) => ({\n ...entry.result,\n score: entry.score,\n }));\n\n return combined;\n }\n}\n\n/**\n * Simple dense-only retriever (no sparse search)\n */\nexport class DenseRetriever {\n private vectorStore: VectorStore;\n private embeddings: EmbeddingBackend;\n private defaultTopK: number;\n\n constructor(options: {\n vectorStore: VectorStore;\n embeddings: EmbeddingBackend;\n defaultTopK?: number;\n }) {\n this.vectorStore = options.vectorStore;\n this.embeddings = options.embeddings;\n this.defaultTopK = options.defaultTopK || 10;\n }\n\n async search(query: string, options?: RetrievalOptions): Promise<RetrievalResult> {\n const startTime = Date.now();\n const topK = options?.topK || this.defaultTopK;\n\n const embeddingResult = await this.embeddings.embed(query);\n const results = await this.vectorStore.denseSearch(\n embeddingResult.embedding,\n topK,\n options?.filter\n );\n\n return {\n results,\n stats: {\n denseCount: results.length,\n sparseCount: 0,\n rerankingApplied: false,\n totalTime: Date.now() - startTime,\n },\n };\n }\n}\n"],"mappings":";AAMA,SAAS,cAAAA,mBAAkB;AAC3B,SAAS,gBAAgB,yBAA4C;;;ACD9D,IAAe,gBAAf,MAA6B;AAAA,EACxB;AAAA,EAEV,YAAY,SAA2B;AACrC,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA,EAYU,aAAa,MAAsB;AAC3C,UAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,WAAO,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,CAAC,EAAE,YAAY,IAAI;AAAA,EACpE;AAAA;AAAA,EAGU,qBAAqB,MAAsB;AACnD,WAAO,KAAK,QAAQ,SAAS,IAAI,EAAE,QAAQ,OAAO,IAAI;AAAA,EACxD;AAAA;AAAA,EAGU,gBAAgB,MAAsB;AAC9C,WAAO,KACJ,QAAQ,WAAW,MAAM,EACzB,QAAQ,WAAW,GAAG,EACtB,KAAK;AAAA,EACV;AACF;;;ACjCA,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAC1B,SAAS,WAAAC,UAAS,YAAAC,WAAU,OAAO,UAAU;AAC7C,SAAS,kBAAkB;AAC3B,SAAS,QAAAC,OAAM,YAAAC,WAAU,eAAe;;;ACLxC,SAAS,UAAU,SAAS,YAAY;AACxC,SAAS,MAAM,gBAAgB;AAC/B,YAAY,aAAa;AACzB,SAAS,eAAe;AAcjB,IAAM,gBAAN,cAA4B,cAAc;AAAA,EACvC;AAAA,EAER,YAAY,SAA+B;AACzC,UAAM,OAAO;AACb,SAAK,cAAc;AAAA,MACjB,kBAAkB;AAAA,MAClB,cAAc;AAAA,MACd,YAAY,CAAC,UAAU,SAAS,OAAO,UAAU,UAAU,SAAS,QAAQ,MAAM;AAAA,MAClF,GAAG;AAAA,IACL;AAAA,EACF;AAAA,EAEA,UAAyB;AACvB,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,MAAuB;AAC/B,UAAM,MAAM,KAAK,aAAa,IAAI;AAClC,WAAO,QAAQ,UAAU,QAAQ;AAAA,EACnC;AAAA,EAEA,MAAM,UAAwC;AAC5C,UAAM,EAAE,WAAW,IAAI,KAAK;AAC5B,UAAM,QAAQ,MAAM,KAAK,UAAU;AAEnC,QAAI,MAAM,OAAO,GAAG;AAClB,YAAM,UAAU,MAAM,SAAS,YAAY,OAAO;AAClD,aAAO,CAAC,MAAM,KAAK,kBAAkB,SAAS,UAAU,CAAC;AAAA,IAC3D;AAEA,QAAI,MAAM,YAAY,GAAG;AACvB,aAAO,KAAK,qBAAqB,UAAU;AAAA,IAC7C;AAEA,UAAM,IAAI,MAAM,iBAAiB,UAAU,EAAE;AAAA,EAC/C;AAAA,EAEA,MAAM,kBAAkB,MAAc,UAA8C;AAClF,UAAM,IAAY,aAAK,IAAI;AAG3B,eAAW,OAAO,KAAK,YAAY,cAAc,CAAC,GAAG;AACnD,QAAE,GAAG,EAAE,OAAO;AAAA,IAChB;AAGA,QAAI,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AACnC,QAAI,CAAC,OAAO;AACV,cAAQ,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AAAA,IACtC;AACA,QAAI,CAAC,OAAO;AACV,cAAQ,SAAS,UAAU,OAAO,EAAE,QAAQ,QAAQ,GAAG;AAAA,IACzD;AAGA,UAAM,OAAO,QAAQ,EAAE,KAAK,GAAG;AAAA,MAC7B,UAAU;AAAA,MACV,kBAAkB;AAAA,MAClB,WAAW;AAAA,QACT,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,OAAO,EAAE;AAAA,QAChE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,QAAQ,EAAE;AAAA,QACjE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,SAAS,EAAE;AAAA,QAClE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,UAAU,EAAE;AAAA,QACnE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,WAAW,EAAE;AAAA,QACpE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,YAAY,EAAE;AAAA,QACrE,EAAE,UAAU,MAAM,SAAS,EAAE,YAAY,OAAO,EAAE;AAAA,QAClD,EAAE,UAAU,MAAM,SAAS,EAAE,YAAY,QAAQ,EAAE;AAAA,QACnD,EAAE,UAAU,SAAS,QAAQ,YAAY;AAAA,QACzC,EAAE,UAAU,KAAK,SAAS,EAAE,YAAY,CAAC,KAAK,YAAY,aAAa,EAAE;AAAA,QACzE,EAAE,UAAU,OAAO,QAAQ,OAAO;AAAA,MACpC;AAAA,IACF,CAAC;AAED,UAAM,UAAU,KAAK,gBAAgB,KAAK,qBAAqB,IAAI,CAAC;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,UAAU;AAAA,QACR,gBAAgB,KAAK;AAAA,QACrB,iBAAiB,QAAQ;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,qBAAqB,KAA2C;AAC5E,UAAM,YAAiC,CAAC;AAExC,UAAM,UAAU,OAAO,eAAuB;AAC5C,YAAM,UAAU,MAAM,QAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAW,KAAK,YAAY,MAAM,IAAI;AAE5C,YAAI,MAAM,YAAY,GAAG;AACvB,gBAAM,QAAQ,QAAQ;AAAA,QACxB,WAAW,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,IAAI,GAAG;AACvD,cAAI;AACF,kBAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,kBAAM,MAAM,MAAM,KAAK,kBAAkB,SAAS,QAAQ;AAC1D,gBAAI,IAAI,QAAQ,KAAK,GAAG;AACtB,wBAAU,KAAK,GAAG;AAAA,YACpB;AAAA,UACF,SAAS,OAAO;AACd,oBAAQ,KAAK,qBAAqB,QAAQ,KAAK,KAAK;AAAA,UACtD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,QAAQ,GAAG;AACjB,WAAO;AAAA,EACT;AACF;;;AD3HA,IAAM,YAAY,UAAU,IAAI;AAEzB,IAAM,eAAN,cAA2B,cAAc;AAAA,EACtC;AAAA,EAER,YAAY,SAAqD;AAC/D,UAAM,OAAO;AACb,SAAK,UAAU,QAAQ,aAAaC,MAAK,QAAQ,QAAQ,UAAU,GAAG,mBAAmB;AAAA,EAC3F;AAAA,EAEA,UAAyB;AACvB,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,MAAuB;AAC/B,WAAO,KAAK,aAAa,IAAI,MAAM;AAAA,EACrC;AAAA,EAEA,MAAM,UAAwC;AAC5C,UAAM,EAAE,WAAW,IAAI,KAAK;AAE5B,QAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,YAAM,IAAI,MAAM,uBAAuB,UAAU,EAAE;AAAA,IACrD;AAGA,UAAM,MAAM,KAAK,SAAS,EAAE,WAAW,KAAK,CAAC;AAE7C,QAAI;AAEF,YAAM,KAAK,oBAAoB,YAAY,KAAK,OAAO;AAGvD,YAAM,YAAY,MAAM,KAAK,cAAc,KAAK,OAAO;AACvD,YAAM,YAAiC,CAAC;AAExC,iBAAW,YAAY,WAAW;AAChC,YAAI;AACF,gBAAM,UAAU,MAAMC,UAAS,UAAU,OAAO;AAChD,gBAAM,gBAAgB,IAAI,cAAc,EAAE,YAAY,SAAS,CAAC;AAChE,gBAAM,YAAY,MAAM,cAAc,kBAAkB,SAAS,QAAQ;AAEzE,cAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,sBAAU,KAAK;AAAA,cACb,GAAG;AAAA,cACH,QAAQ;AAAA,cACR,UAAU;AAAA,gBACR,GAAG,UAAU;AAAA,gBACb,WAAWC,UAAS,UAAU;AAAA,gBAC9B,cAAc,SAAS,QAAQ,KAAK,SAAS,EAAE;AAAA,cACjD;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAO;AAEd,kBAAQ,KAAK,8BAA8B,QAAQ,IAAI,KAAK;AAAA,QAC9D;AAAA,MACF;AAEA,aAAO;AAAA,IACT,UAAE;AAEA,UAAI,WAAW,KAAK,OAAO,GAAG;AAC5B,cAAM,GAAG,KAAK,SAAS,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,oBAAoB,SAAiB,WAAkC;AACnF,QAAI;AAEF,YAAM,UAAU,SAAS,OAAO,QAAQ,SAAS,QAAQ;AAAA,QACvD,WAAW,KAAK,OAAO;AAAA;AAAA,MACzB,CAAC;AAAA,IACH,SAAS,OAAO;AAEd,UAAI;AACF,cAAM,UAAU,UAAU,OAAO,QAAQ,SAAS,QAAQ;AAAA,UACxD,WAAW,KAAK,OAAO;AAAA,QACzB,CAAC;AAAA,MACH,QAAQ;AACN,cAAM,IAAI;AAAA,UACR;AAAA;AAAA;AAAA;AAAA,kBAIqB,KAAK;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,cAAc,KAAgC;AAC1D,UAAM,YAAsB,CAAC;AAE7B,UAAM,UAAU,OAAO,eAAuB;AAC5C,YAAM,UAAU,MAAMC,SAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAWH,MAAK,YAAY,MAAM,IAAI;AAE5C,YAAI,MAAM,YAAY,GAAG;AAEvB,cAAI,CAAC,MAAM,KAAK,WAAW,GAAG,KAAK,CAAC,MAAM,KAAK,WAAW,GAAG,GAAG;AAC9D,kBAAM,QAAQ,QAAQ;AAAA,UACxB;AAAA,QACF,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAM,KAAK,aAAa,MAAM,IAAI;AACxC,cAAI,QAAQ,UAAU,QAAQ,OAAO;AACnC,sBAAU,KAAK,QAAQ;AAAA,UACzB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,QAAQ,GAAG;AACjB,WAAO,UAAU,KAAK;AAAA,EACxB;AACF;;;AEhIA,SAAS,YAAAI,WAAU,WAAAC,UAAS,QAAAC,aAAY;AACxC,SAAS,QAAAC,OAAM,YAAAC,iBAAgB;AAIxB,IAAM,oBAAN,cAAgC,cAAc;AAAA,EACnD,UAAyB;AACvB,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,MAAuB;AAC/B,UAAM,MAAM,KAAK,aAAa,IAAI;AAClC,WAAO,QAAQ,QAAQ,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,UAAwC;AAC5C,UAAM,EAAE,WAAW,IAAI,KAAK;AAC5B,UAAM,QAAQ,MAAMC,MAAK,UAAU;AAEnC,QAAI,MAAM,OAAO,GAAG;AAClB,aAAO,CAAC,MAAM,KAAK,gBAAgB,UAAU,CAAC;AAAA,IAChD;AAEA,QAAI,MAAM,YAAY,GAAG;AACvB,aAAO,KAAK,qBAAqB,UAAU;AAAA,IAC7C;AAEA,UAAM,IAAI,MAAM,iBAAiB,UAAU,EAAE;AAAA,EAC/C;AAAA,EAEA,MAAc,gBAAgB,UAA8C;AAC1E,UAAM,UAAU,MAAMC,UAAS,UAAU,OAAO;AAChD,UAAM,oBAAoB,KAAK,qBAAqB,OAAO;AAG3D,QAAI,QAAQ,KAAK,aAAa,iBAAiB;AAC/C,QAAI,CAAC,OAAO;AACV,cAAQC,UAAS,UAAU,KAAK,EAAE,QAAQ,QAAQ,GAAG;AAAA,IACvD;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,SAAS,KAAK,gBAAgB,iBAAiB;AAAA,MAC/C,QAAQ;AAAA,MACR,UAAU;AAAA,QACR,gBAAgB,QAAQ;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,aAAa,SAAqC;AAExD,UAAM,UAAU,QAAQ,MAAM,aAAa;AAC3C,QAAI,SAAS;AACX,aAAO,QAAQ,CAAC,EAAE,KAAK;AAAA,IACzB;AAGA,UAAM,iBAAiB,QAAQ,MAAM,gBAAgB;AACrD,QAAI,gBAAgB;AAClB,aAAO,eAAe,CAAC,EAAE,KAAK;AAAA,IAChC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,qBAAqB,KAA2C;AAC5E,UAAM,YAAiC,CAAC;AAExC,UAAM,UAAU,OAAO,eAAuB;AAC5C,YAAM,UAAU,MAAMC,SAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAWC,MAAK,YAAY,MAAM,IAAI;AAE5C,YAAI,MAAM,YAAY,GAAG;AAEvB,cAAI,CAAC,MAAM,KAAK,WAAW,GAAG,KAAK,MAAM,SAAS,gBAAgB;AAChE,kBAAM,QAAQ,QAAQ;AAAA,UACxB;AAAA,QACF,WAAW,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,IAAI,GAAG;AACvD,cAAI;AACF,kBAAM,MAAM,MAAM,KAAK,gBAAgB,QAAQ;AAC/C,gBAAI,IAAI,QAAQ,KAAK,GAAG;AACtB,wBAAU,KAAK,GAAG;AAAA,YACpB;AAAA,UACF,SAAS,OAAO;AACd,oBAAQ,KAAK,qBAAqB,QAAQ,KAAK,KAAK;AAAA,UACtD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,QAAQ,GAAG;AACjB,WAAO;AAAA,EACT;AACF;;;AC5FA,SAAS,QAAAC,aAAY;AASrB,eAAsB,iBAAiB,YAAkD;AACvF,QAAM,QAAQ,MAAMC,MAAK,UAAU;AACnC,QAAM,MAAM,WAAW,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AAG1D,MAAI,MAAM,OAAO,GAAG;AAClB,QAAI,QAAQ,OAAO;AACjB,YAAM,YAAY,IAAI,aAAa,EAAE,WAAW,CAAC;AACjD,aAAO,UAAU,QAAQ;AAAA,IAC3B;AACA,QAAI,QAAQ,UAAU,QAAQ,OAAO;AACnC,YAAM,YAAY,IAAI,cAAc,EAAE,WAAW,CAAC;AAClD,aAAO,UAAU,QAAQ;AAAA,IAC3B;AACA,QAAI,QAAQ,QAAQ,QAAQ,YAAY;AACtC,YAAM,YAAY,IAAI,kBAAkB,EAAE,WAAW,CAAC;AACtD,aAAO,UAAU,QAAQ;AAAA,IAC3B;AACA,UAAM,IAAI,MAAM,4BAA4B,GAAG,EAAE;AAAA,EACnD;AAGA,MAAI,MAAM,YAAY,GAAG;AACvB,UAAM,eAAoC,CAAC;AAG3C,UAAM,gBAAgB,IAAI,cAAc,EAAE,WAAW,CAAC;AACtD,UAAM,WAAW,MAAM,cAAc,QAAQ;AAC7C,iBAAa,KAAK,GAAG,QAAQ;AAG7B,UAAM,cAAc,IAAI,kBAAkB,EAAE,WAAW,CAAC;AACxD,UAAM,SAAS,MAAM,YAAY,QAAQ;AACzC,iBAAa,KAAK,GAAG,MAAM;AAE3B,WAAO;AAAA,EACT;AAEA,QAAM,IAAI,MAAM,iBAAiB,UAAU,EAAE;AAC/C;;;ACpDA,SAAS,kBAAkB;AAqBpB,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EAER,YAAY,SAA2B;AACrC,SAAK,UAAU;AAAA,MACb,WAAW;AAAA,MACX,cAAc;AAAA,MACd,cAAc;AAAA,MACd,cAAc;AAAA,MACd,iBAAiB;AAAA,MACjB,iBAAiB,CAAC,GAAG,CAAC;AAAA,MACtB,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAA6B,YAAkC;AACnE,UAAM,QAAQ,cAAc,WAAW,KAAK;AAC5C,UAAM,EAAE,QAAQ,IAAI;AAEpB,QAAI,CAAC,QAAQ,KAAK,GAAG;AACnB,aAAO;AAAA,QACL,QAAQ,CAAC;AAAA,QACT,OAAO,EAAE,aAAa,GAAG,cAAc,GAAG,cAAc,GAAG,cAAc,EAAE;AAAA,MAC7E;AAAA,IACF;AAEA,UAAM,SAA0B,CAAC;AACjC,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAE7C,eAAW,WAAW,UAAU;AAC9B,YAAM,gBAAgB,KAAK,aAAa,SAAS,OAAO,SAAS,IAAI;AACrE,aAAO,KAAK,GAAG,aAAa;AAAA,IAC9B;AAGA,UAAM,QAAQ,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,MAAM;AAC7C,UAAM,QAAQ;AAAA,MACZ,aAAa,OAAO;AAAA,MACpB,cAAc,MAAM,SAAS,IAAI,KAAK,MAAM,MAAM,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC,IAAI,MAAM,MAAM,IAAI;AAAA,MAC/F,cAAc,MAAM,SAAS,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI;AAAA,MACtD,cAAc,MAAM,SAAS,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI;AAAA,IACxD;AAEA,WAAO,EAAE,QAAQ,MAAM;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,SAAmE;AACzF,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,WAA6D,CAAC;AAEpE,QAAI,iBAAiC,EAAE,aAAa,GAAG;AACvD,QAAI,cAAwB,CAAC;AAE7B,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,KAAK,aAAa,IAAI;AAEtC,UAAI,WAAW,KAAK,QAAQ,gBAAgB,SAAS,QAAQ,KAAK,GAAG;AAEnE,YAAI,YAAY,SAAS,GAAG;AAC1B,mBAAS,KAAK;AAAA,YACZ,MAAM,YAAY,KAAK,IAAI,EAAE,KAAK;AAAA,YAClC,SAAS,EAAE,GAAG,eAAe;AAAA,UAC/B,CAAC;AACD,wBAAc,CAAC;AAAA,QACjB;AAGA,YAAI,QAAQ,UAAU,GAAG;AACvB,2BAAiB;AAAA,YACf,IAAI,QAAQ;AAAA,YACZ,aAAa,QAAQ;AAAA,UACvB;AAAA,QACF,WAAW,QAAQ,UAAU,GAAG;AAC9B,2BAAiB;AAAA,YACf,GAAG;AAAA,YACH,IAAI,QAAQ;AAAA,YACZ,IAAI;AAAA,YACJ,aAAa,eAAe,KACxB,GAAG,eAAe,EAAE,MAAM,QAAQ,IAAI,KACtC,QAAQ;AAAA,UACd;AAAA,QACF,WAAW,QAAQ,UAAU,GAAG;AAC9B,2BAAiB;AAAA,YACf,GAAG;AAAA,YACH,IAAI,QAAQ;AAAA,YACZ,aAAa,eAAe,KACxB,GAAG,eAAe,WAAW,MAAM,QAAQ,IAAI,KAC/C,QAAQ;AAAA,UACd;AAAA,QACF;AAEA,oBAAY,KAAK,IAAI;AAAA,MACvB,OAAO;AACL,oBAAY,KAAK,IAAI;AAAA,MACvB;AAAA,IACF;AAGA,QAAI,YAAY,SAAS,GAAG;AAC1B,eAAS,KAAK;AAAA,QACZ,MAAM,YAAY,KAAK,IAAI,EAAE,KAAK;AAAA,QAClC,SAAS,EAAE,GAAG,eAAe;AAAA,MAC/B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAsD;AAEzE,UAAM,QAAQ,KAAK,MAAM,mBAAmB;AAC5C,QAAI,OAAO;AACT,aAAO;AAAA,QACL,OAAO,MAAM,CAAC,EAAE;AAAA,QAChB,MAAM,MAAM,CAAC,EAAE,KAAK;AAAA,MACtB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aACN,SACA,YACA,YACiB;AACjB,UAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAM,SAA0B,CAAC;AAGjC,QAAI,KAAK,eAAe,IAAI,KAAK,KAAK,QAAQ,cAAc;AAC1D,UAAI,KAAK,eAAe,IAAI,KAAK,KAAK,QAAQ,cAAc;AAC1D,eAAO,KAAK,KAAK,YAAY,MAAM,SAAS,YAAY,YAAY,CAAC,CAAC;AAAA,MACxE;AACA,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,KAAK,MAAM,OAAO;AACrC,QAAI,eAAyB,CAAC;AAC9B,QAAI,gBAAgB;AAEpB,eAAW,QAAQ,YAAY;AAC7B,YAAM,aAAa,KAAK,eAAe,IAAI;AAG3C,UAAI,aAAa,KAAK,QAAQ,cAAc;AAE1C,YAAI,aAAa,SAAS,GAAG;AAC3B,iBAAO;AAAA,YACL,KAAK;AAAA,cACH,aAAa,KAAK,MAAM;AAAA,cACxB;AAAA,cACA;AAAA,cACA;AAAA,cACA,OAAO;AAAA,YACT;AAAA,UACF;AACA,yBAAe,CAAC;AAChB,0BAAgB;AAAA,QAClB;AAGA,cAAM,iBAAiB,KAAK,oBAAoB,IAAI;AACpD,mBAAW,iBAAiB,gBAAgB;AAC1C,iBAAO;AAAA,YACL,KAAK,YAAY,eAAe,SAAS,YAAY,YAAY,OAAO,MAAM;AAAA,UAChF;AAAA,QACF;AACA;AAAA,MACF;AAGA,UAAI,gBAAgB,aAAa,KAAK,QAAQ,WAAW;AAEvD,YAAI,aAAa,SAAS,GAAG;AAC3B,iBAAO;AAAA,YACL,KAAK;AAAA,cACH,aAAa,KAAK,MAAM;AAAA,cACxB;AAAA,cACA;AAAA,cACA;AAAA,cACA,OAAO;AAAA,YACT;AAAA,UACF;AAGA,gBAAM,cAAc,KAAK,eAAe,YAAY;AACpD,yBAAe,cAAc,CAAC,aAAa,IAAI,IAAI,CAAC,IAAI;AACxD,0BAAgB,KAAK,eAAe,aAAa,KAAK,MAAM,CAAC;AAAA,QAC/D,OAAO;AACL,yBAAe,CAAC,IAAI;AACpB,0BAAgB;AAAA,QAClB;AAAA,MACF,OAAO;AACL,qBAAa,KAAK,IAAI;AACtB,yBAAiB;AAAA,MACnB;AAAA,IACF;AAGA,QAAI,aAAa,SAAS,KAAK,iBAAiB,KAAK,QAAQ,cAAc;AACzE,aAAO;AAAA,QACL,KAAK,YAAY,aAAa,KAAK,MAAM,GAAG,SAAS,YAAY,YAAY,OAAO,MAAM;AAAA,MAC5F;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,WAA6B;AACvD,UAAM,YAAY,UAAU,MAAM,gBAAgB,KAAK,CAAC,SAAS;AACjE,UAAM,SAAmB,CAAC;AAC1B,QAAI,eAAyB,CAAC;AAC9B,QAAI,gBAAgB;AAEpB,eAAW,YAAY,WAAW;AAChC,YAAM,iBAAiB,KAAK,eAAe,QAAQ;AAEnD,UAAI,gBAAgB,iBAAiB,KAAK,QAAQ,aAAa,aAAa,SAAS,GAAG;AACtF,eAAO,KAAK,aAAa,KAAK,GAAG,EAAE,KAAK,CAAC;AACzC,uBAAe,CAAC;AAChB,wBAAgB;AAAA,MAClB;AAEA,mBAAa,KAAK,SAAS,KAAK,CAAC;AACjC,uBAAiB;AAAA,IACnB;AAEA,QAAI,aAAa,SAAS,GAAG;AAC3B,aAAO,KAAK,aAAa,KAAK,GAAG,EAAE,KAAK,CAAC;AAAA,IAC3C;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,eAAwC;AAC7D,QAAI,CAAC,KAAK,QAAQ,gBAAgB,cAAc,WAAW,GAAG;AAC5D,aAAO;AAAA,IACT;AAGA,UAAM,WAAW,CAAC,GAAG,aAAa,EAAE,QAAQ;AAC5C,UAAM,eAAyB,CAAC;AAChC,QAAI,SAAS;AAEb,eAAW,QAAQ,UAAU;AAC3B,YAAM,aAAa,KAAK,eAAe,IAAI;AAC3C,UAAI,SAAS,aAAa,KAAK,QAAQ,cAAc;AACnD;AAAA,MACF;AACA,mBAAa,QAAQ,IAAI;AACzB,gBAAU;AAAA,IACZ;AAEA,WAAO,aAAa,SAAS,IAAI,aAAa,KAAK,MAAM,IAAI;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKQ,YACN,MACA,SACA,YACA,YACA,OACe;AACf,UAAM,WAA0B;AAAA,MAC9B,aAAa,QAAQ;AAAA,MACrB,WAAW,QAAQ;AAAA,MACnB,WAAW,QAAQ;AAAA,MACnB,WAAW,QAAQ;AAAA,MACnB;AAAA,MACA,YAAY;AAAA,IACd;AAEA,WAAO;AAAA,MACL,IAAI,WAAW,OAAO;AAAA,MACtB;AAAA,MACA,MAAM,KAAK,KAAK;AAAA,MAChB;AAAA,MACA,WAAW,oBAAI,KAAK;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAsB;AAC3C,WAAO,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA,EAClC;AACF;;;ACvUA,IAAI,WAAgB;AACpB,IAAI,WAAgB;AAEpB,eAAe,aAAa,OAAe;AACzC,MAAI,CAAC,UAAU;AACb,UAAM,eAAe,MAAM,OAAO,sBAAsB;AACxD,eAAW,aAAa;AAAA,EAC1B;AAEA,MAAI,CAAC,UAAU;AACb,YAAQ,IAAI,oCAAoC,KAAK,KAAK;AAC1D,eAAW,MAAM,SAAS,sBAAsB,OAAO;AAAA,MACrD,WAAW;AAAA;AAAA,IACb,CAAC;AACD,YAAQ,IAAI,6CAA6C;AAAA,EAC3D;AAEA,SAAO;AACT;AAWO,IAAM,wBAAN,MAAM,uBAAkD;AAAA,EACrD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,4BAA4B;AAAA,IAC5B,2BAA2B;AAAA,IAC3B,4BAA4B;AAAA,IAC5B,2BAA2B;AAAA,IAC3B,4BAA4B;AAAA,IAC5B,4BAA4B;AAAA,EAC9B;AAAA,EAEA,YAAY,SAAiC;AAE3C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,YAAY,SAAS,aAAa;AACvC,SAAK,YAAY,SAAS,aAAa;AACvC,SAAK,aAAa,uBAAsB,iBAAiB,KAAK,KAAK,KAAK;AAAA,EAC1E;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAMC,YAAW,MAAM,aAAa,KAAK,KAAK;AAG9C,UAAM,gBAAgB,KAAK,MAAM,SAAS,KAAK,IAC3C,4DAA4D,IAAI,KAChE;AAEJ,UAAM,SAAS,MAAMA,UAAS,eAAe;AAAA,MAC3C,SAAS;AAAA,MACT,WAAW,KAAK;AAAA,IAClB,CAAC;AAGD,UAAM,YAAY,MAAM,KAAK,OAAO,IAAoB;AAExD,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,UAA6B,CAAC;AAGpC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,eAAe,MAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,SAAS,KAAK,MAAM,IAAI,CAAC,CAAC;AAC5E,cAAQ,KAAK,GAAG,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,eAAe;AAAA;AAAA,EAE1B,WAAW;AAAA;AAAA,EAEX,UAAU;AAAA;AAAA,EAEV,WAAW;AAAA;AAAA,EAEX,WAAW;AAAA;AAAA,EAEX,YAAY;AAAA;AAAA,EAEZ,OAAO;AACT;;;ACtGO,IAAM,yBAAN,MAAM,wBAAmD;AAAA,EACtD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,0BAA0B;AAAA,IAC1B,0BAA0B;AAAA,IAC1B,0BAA0B;AAAA,EAC5B;AAAA,EAEA,YAAY,SAAiC;AAC3C,SAAK,SAAS,QAAQ;AACtB,SAAK,QAAQ,QAAQ,SAAS;AAC9B,SAAK,aACH,QAAQ,cAAc,wBAAuB,iBAAiB,KAAK,KAAK,KAAK;AAC/E,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAM,UAAU,MAAM,KAAK,WAAW,CAAC,IAAI,CAAC;AAC5C,WAAO,QAAQ,CAAC;AAAA,EAClB;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,aAAgC,CAAC;AAGvC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,eAAe,MAAM,KAAK,QAAQ,KAAK;AAC7C,iBAAW,KAAK,GAAG,YAAY;AAAA,IACjC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,QAAQ,OAA6C;AACjE,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,IACT;AAGA,QAAI,KAAK,MAAM,WAAW,mBAAmB,KAAK,KAAK,YAAY;AACjE,WAAK,aAAa,KAAK;AAAA,IACzB;AAEA,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,eAAe;AAAA,MACzD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,KAAK,MAAM;AAAA,MACtC;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,MAAM,KAAK,EAAE;AAAA,IACnE;AAEA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAMlC,UAAM,SAAS,KAAK,KAAK,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEzD,WAAO,OAAO,IAAI,CAAC,MAAM,OAAO;AAAA,MAC9B,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK,KAAK,MAAM,CAAC,EAAE,SAAS,CAAC;AAAA;AAAA,IACvC,EAAE;AAAA,EACJ;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,gBAAgB;AAAA;AAAA,EAE3B,mBAAmB;AAAA;AAAA,EAEnB,mBAAmB;AAAA;AAAA,EAEnB,SAAS;AACX;;;AC/FO,SAAS,uBAAuB,SAA6C;AAClF,UAAQ,QAAQ,UAAU;AAAA,IACxB,KAAK;AACH,aAAO,IAAI,sBAAsB;AAAA,QAC/B,OAAO,QAAQ;AAAA,QACf,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH,KAAK;AACH,UAAI,CAAC,QAAQ,QAAQ;AACnB,cAAM,IAAI,MAAM,sCAAsC;AAAA,MACxD;AACA,aAAO,IAAI,uBAAuB;AAAA,QAChC,QAAQ,QAAQ;AAAA,QAChB,OAAO,QAAQ;AAAA,QACf,YAAY,QAAQ;AAAA,QACpB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH,KAAK;AAEH,aAAO,IAAI,sBAAsB;AAAA,QAC/B,OAAO,QAAQ,SAAS;AAAA,QACxB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH;AACE,YAAM,IAAI,MAAM,+BAA+B,QAAQ,QAAQ,EAAE;AAAA,EACrE;AACF;;;AC9CA,OAAO,cAAc;AAYd,IAAM,oBAAN,MAA+C;AAAA,EAC5C;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,SAAmC;AAC7C,SAAK,KAAK,IAAI,SAAS,QAAQ,QAAQ,UAAU;AACjD,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,aAAa,QAAQ;AAG1B,SAAK,GAAG,OAAO,oBAAoB;AAAA,EACrC;AAAA,EAEA,MAAM,aAA4B;AAEhC,SAAK,GAAG,KAAK;AAAA,mCACkB,KAAK,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAQ5C;AAGD,SAAK,GAAG,KAAK;AAAA,uCACsB,KAAK,SAAS;AAAA,WAC1C,KAAK,SAAS;AAAA,KACpB;AAGD,SAAK,GAAG,KAAK;AAAA,2CAC0B,KAAK,SAAS;AAAA,sCACnB,KAAK,SAAS;AAAA,KAC/C;AAGD,SAAK,GAAG,KAAK;AAAA,qCACoB,KAAK,SAAS,uBAAuB,KAAK,SAAS;AAAA,sBAClE,KAAK,SAAS;AAAA;AAAA,KAE/B;AAED,SAAK,GAAG,KAAK;AAAA,qCACoB,KAAK,SAAS,uBAAuB,KAAK,SAAS;AAAA,sBAClE,KAAK,SAAS,QAAQ,KAAK,SAAS;AAAA;AAAA;AAAA,KAGrD;AAAA,EACH;AAAA,EAEA,MAAM,OAAO,QAAwC;AACnD,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA,+BACF,KAAK,SAAS;AAAA;AAAA,KAExC;AAED,UAAM,aAAa,KAAK,GAAG,YAAY,CAAC,UAA2B;AACjE,iBAAW,SAAS,OAAO;AACzB,aAAK;AAAA,UACH,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM,YAAY,KAAK,UAAU,MAAM,SAAS,IAAI;AAAA,UACpD,KAAK,UAAU,MAAM,QAAQ;AAAA,UAC7B,MAAM,UAAU,YAAY;AAAA,QAC9B;AAAA,MACF;AAAA,IACF,CAAC;AAED,eAAW,MAAM;AAAA,EACnB;AAAA,EAEA,MAAM,YACJ,WACA,MACA,QACyB;AAEzB,QAAI,QAAQ,iBAAiB,KAAK,SAAS;AAC3C,UAAM,SAAoB,CAAC;AAG3B,QAAI,QAAQ,YAAY;AACtB,eAAS;AACT,aAAO,KAAK,OAAO,UAAU;AAAA,IAC/B;AAEA,UAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,EAAE,IAAI,GAAG,MAAM;AAUjD,UAAM,UAA0D,CAAC;AAEjE,eAAW,OAAO,MAAM;AACtB,YAAM,iBAAiB,KAAK,MAAM,IAAI,SAAS;AAC/C,YAAM,QAAQ,KAAK,iBAAiB,WAAW,cAAc;AAE7D,cAAQ,KAAK;AAAA,QACX,OAAO,KAAK,WAAW,GAAG;AAAA,QAC1B;AAAA,MACF,CAAC;AAAA,IACH;AAGA,YAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAExC,WAAO,QAAQ,MAAM,GAAG,IAAI,EAAE,IAAI,CAAC,OAAO;AAAA,MACxC,GAAG;AAAA,MACH,YAAY;AAAA,IACd,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,aACJ,OACA,MACA,QACyB;AAEzB,QAAI,WAAW;AAAA,yBACM,KAAK,SAAS;AAAA,aAC1B,KAAK,SAAS;AAAA,aACd,KAAK,SAAS;AAAA,cACb,KAAK,SAAS;AAAA;AAExB,UAAM,SAAoB,CAAC,KAAK,eAAe,KAAK,CAAC;AAGrD,QAAI,QAAQ,YAAY;AACtB,kBAAY;AACZ,aAAO,KAAK,OAAO,UAAU;AAAA,IAC/B;AAEA,gBAAY;AACZ,WAAO,KAAK,IAAI;AAEhB,QAAI;AACF,YAAM,OAAO,KAAK,GAAG,QAAQ,QAAQ,EAAE,IAAI,GAAG,MAAM;AAUpD,aAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QACxB,OAAO,KAAK,WAAW,GAAG;AAAA,QAC1B,OAAO,KAAK,IAAI,IAAI,KAAK;AAAA;AAAA,QACzB,YAAY;AAAA,MACd,EAAE;AAAA,IACJ,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA,EAEA,MAAM,mBAAmB,YAAqC;AAC5D,UAAM,SAAS,KAAK,GACjB,QAAQ,eAAe,KAAK,SAAS,wBAAwB,EAC7D,IAAI,UAAU;AACjB,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAM,QAAQ,IAA2C;AACvD,UAAM,MAAM,KAAK,GACd,QAAQ,iBAAiB,KAAK,SAAS,eAAe,EACtD,IAAI,EAAE;AAST,WAAO,MAAM,KAAK,WAAW,GAAG,IAAI;AAAA,EACtC;AAAA,EAEA,MAAM,QAAuB;AAC3B,SAAK,GAAG,MAAM;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,GAAa,GAAqB;AACzD,QAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,YAAM,IAAI,MAAM,+BAA+B;AAAA,IACjD;AAEA,QAAI,aAAa;AACjB,QAAI,QAAQ;AACZ,QAAI,QAAQ;AAEZ,aAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,oBAAc,EAAE,CAAC,IAAI,EAAE,CAAC;AACxB,eAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AACnB,eAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AAAA,IACrB;AAEA,UAAM,cAAc,KAAK,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK;AACtD,WAAO,gBAAgB,IAAI,IAAI,aAAa;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,KAOD;AAChB,WAAO;AAAA,MACL,IAAI,IAAI;AAAA,MACR,YAAY,IAAI;AAAA,MAChB,MAAM,IAAI;AAAA,MACV,WAAW,IAAI,YAAY,KAAK,MAAM,IAAI,SAAS,IAAI;AAAA,MACvD,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,MACjC,WAAW,IAAI,KAAK,IAAI,UAAU;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,OAAuB;AAE5C,UAAM,UAAU,MAAM,QAAQ,uBAAuB,GAAG,EAAE,KAAK;AAC/D,WAAO,QACJ,MAAM,KAAK,EACX,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC,EAC1B,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EACnB,KAAK,MAAM;AAAA,EAChB;AACF;;;ACrQA,SAAS,YAA6B;AAe/B,IAAM,sBAAN,MAAiD;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,SAAqC;AAC/C,UAAM,aAAyB,QAAQ,cAAc;AAAA,MACnD,kBAAkB,QAAQ;AAAA,MAC1B,KAAK;AAAA,MACL,mBAAmB;AAAA,IACrB;AAEA,SAAK,OAAO,IAAI,KAAK,UAAU;AAC/B,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,SAAS,QAAQ,UAAU;AAChC,SAAK,aAAa,QAAQ;AAC1B,SAAK,gBAAgB,GAAG,KAAK,MAAM,IAAI,KAAK,SAAS;AAAA,EACvD;AAAA,EAEA,MAAM,aAA4B;AAChC,UAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AAEvC,QAAI;AAEF,YAAM,OAAO,MAAM,uCAAuC;AAC1D,YAAM,OAAO,MAAM,wCAAwC;AAG3D,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OASzC;AAGD,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,aAAa;AAAA;AAAA,iDAEN,KAAK,MAAM;AAAA;AAAA,6BAE/B,KAAK,UAAU;AAAA;AAAA;AAAA;AAAA,OAIrC;AAGD,YAAM,OAAO,MAAM;AAAA,yCACgB,KAAK,SAAS;AAAA,aAC1C,KAAK,aAAa;AAAA,OACxB;AAGD,YAAM,OAAO,MAAM;AAAA,yCACgB,KAAK,SAAS;AAAA,aAC1C,KAAK,aAAa;AAAA;AAAA;AAAA,OAGxB;AAGD,YAAM,OAAO,MAAM;AAAA,yCACgB,KAAK,SAAS;AAAA,aAC1C,KAAK,aAAa;AAAA;AAAA,OAExB;AAGD,YAAM,OAAO,MAAM;AAAA,yCACgB,KAAK,SAAS;AAAA,aAC1C,KAAK,aAAa;AAAA;AAAA,OAExB;AAAA,IACH,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,QAAwC;AACnD,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AAEvC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,iBAAW,SAAS,QAAQ;AAC1B,cAAM,YAAY,MAAM,YACpB,IAAI,MAAM,UAAU,KAAK,GAAG,CAAC,MAC7B;AAEJ,cAAM,OAAO;AAAA,UACX;AAAA,wBACc,KAAK,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAOhC;AAAA,YACE,MAAM;AAAA,YACN,MAAM;AAAA,YACN,MAAM;AAAA,YACN;AAAA,YACA,KAAK,UAAU,MAAM,QAAQ;AAAA,YAC7B,MAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAEA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU;AAC7B,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,YACJ,WACA,MACA,QACyB;AACzB,UAAM,eAAe,IAAI,UAAU,KAAK,GAAG,CAAC;AAE5C,QAAI,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAQH,KAAK,aAAa;AAAA;AAAA;AAG3B,UAAM,SAAoB,CAAC,YAAY;AACvC,QAAI,aAAa;AAGjB,QAAI,QAAQ,YAAY;AACtB,eAAS,uBAAuB,UAAU;AAC1C,aAAO,KAAK,OAAO,UAAU;AAC7B;AAAA,IACF;AAEA,QAAI,QAAQ,UAAU;AACpB,eAAS,qBAAqB,UAAU;AACxC,aAAO,KAAK,KAAK,UAAU,OAAO,QAAQ,CAAC;AAC3C;AAAA,IACF;AAEA,aAAS,6CAA6C,UAAU;AAChE,WAAO,KAAK,IAAI;AAEhB,UAAM,SAAS,MAAM,KAAK,KAAK,MAAM,OAAO,MAAM;AAElD,WAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MAC/B,OAAO,KAAK,WAAW,GAAG;AAAA,MAC1B,OAAO,IAAI;AAAA,MACX,YAAY;AAAA,IACd,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,aACJ,OACA,MACA,QACyB;AAEzB,QAAI,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAQD,KAAK,aAAa;AAAA;AAAA;AAG3B,UAAM,SAAoB,CAAC,KAAK;AAChC,QAAI,aAAa;AAGjB,QAAI,QAAQ,YAAY;AACtB,aAAO,uBAAuB,UAAU;AACxC,aAAO,KAAK,OAAO,UAAU;AAC7B;AAAA,IACF;AAEA,WAAO,+BAA+B,UAAU;AAChD,WAAO,KAAK,IAAI;AAEhB,UAAM,SAAS,MAAM,KAAK,KAAK,MAAM,KAAK,MAAM;AAEhD,WAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MAC/B,OAAO,KAAK,WAAW,GAAG;AAAA,MAC1B,OAAO,IAAI;AAAA,MACX,YAAY;AAAA,IACd,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,mBAAmB,YAAqC;AAC5D,UAAM,SAAS,MAAM,KAAK,KAAK;AAAA,MAC7B,eAAe,KAAK,aAAa;AAAA,MACjC,CAAC,UAAU;AAAA,IACb;AACA,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA,EAEA,MAAM,QAAQ,IAA2C;AACvD,UAAM,SAAS,MAAM,KAAK,KAAK;AAAA,MAC7B,iBAAiB,KAAK,aAAa;AAAA,MACnC,CAAC,EAAE;AAAA,IACL;AACA,WAAO,OAAO,KAAK,SAAS,IAAI,KAAK,WAAW,OAAO,KAAK,CAAC,CAAC,IAAI;AAAA,EACpE;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,KAAK,IAAI;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAA2D;AAC/D,UAAM,eAAe,MAAM,KAAK,KAAK;AAAA,MACnC,wBAAwB,KAAK,aAAa;AAAA,IAC5C;AACA,UAAM,aAAa,MAAM,KAAK,KAAK;AAAA,MACjC,wBAAwB,KAAK,MAAM;AAAA,IACrC;AAEA,WAAO;AAAA,MACL,QAAQ,SAAS,aAAa,KAAK,CAAC,EAAE,OAAO,EAAE;AAAA,MAC/C,WAAW,SAAS,WAAW,KAAK,CAAC,EAAE,OAAO,EAAE;AAAA,IAClD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAe,KAMH;AAChB,UAAM,KAAK,KAAK;AAAA,MACd;AAAA,oBACc,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQzB,CAAC,IAAI,IAAI,IAAI,UAAU,IAAI,MAAM,IAAI,OAAO,KAAK,UAAU,IAAI,YAAY,CAAC,CAAC,CAAC;AAAA,IAChF;AAAA,EACF;AAAA,EAEQ,WAAW,KAOD;AAChB,WAAO;AAAA,MACL,IAAI,IAAI;AAAA,MACR,YAAY,IAAI;AAAA,MAChB,MAAM,IAAI;AAAA,MACV,UAAU,IAAI;AAAA,MACd,WAAW,IAAI,KAAK,IAAI,UAAU;AAAA,IACpC;AAAA,EACF;AACF;;;ACzSO,SAAS,kBAAkB,SAA0C;AAC1E,UAAQ,QAAQ,MAAM;AAAA,IACpB,KAAK;AACH,aAAO,IAAI,kBAAkB;AAAA,QAC3B,MAAM,QAAQ,oBAAoB;AAAA,QAClC,WAAW,QAAQ;AAAA,QACnB,YAAY,QAAQ;AAAA,MACtB,CAAC;AAAA,IAEH,KAAK;AACH,UAAI,CAAC,QAAQ,kBAAkB;AAC7B,cAAM,IAAI,MAAM,yCAAyC;AAAA,MAC3D;AACA,aAAO,IAAI,oBAAoB;AAAA,QAC7B,kBAAkB,QAAQ;AAAA,QAC1B,WAAW,QAAQ;AAAA,QACnB,YAAY,QAAQ;AAAA,MACtB,CAAC;AAAA,IAEH,KAAK;AAEH,aAAO,IAAI,kBAAkB;AAAA,QAC3B,MAAM;AAAA,QACN,WAAW,QAAQ;AAAA,QACnB,YAAY,QAAQ;AAAA,MACtB,CAAC;AAAA,IAEH;AACE,YAAM,IAAI,MAAM,8BAA8B,QAAQ,IAAI,EAAE;AAAA,EAChE;AACF;;;AClBO,IAAM,kBAAN,MAAsB;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,SAAiC;AAC3C,SAAK,cAAc,QAAQ;AAC3B,SAAK,aAAa,QAAQ;AAC1B,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,eAAe,QAAQ,gBAAgB;AAC5C,SAAK,OAAO,QAAQ,QAAQ;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,OAAe,SAAsD;AAChF,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,OAAO,SAAS,QAAQ,KAAK;AACnC,UAAM,SAAS,SAAS;AAGxB,UAAM,cAAc,SAAS,eAAe,KAAK;AACjD,UAAM,eAAe,SAAS,gBAAgB,KAAK;AAGnD,UAAM,aAAa,KAAK,IAAI,OAAO,GAAG,GAAG;AAGzC,UAAM,CAAC,cAAc,aAAa,IAAI,MAAM,QAAQ,IAAI;AAAA,MACtD,KAAK,YAAY,OAAO,YAAY,MAAM;AAAA,MAC1C,KAAK,aAAa,OAAO,YAAY,MAAM;AAAA,IAC7C,CAAC;AAGD,UAAM,eAAe,KAAK;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,UAAM,eAAe,aAAa,MAAM,GAAG,IAAI;AAE/C,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,QACL,YAAY,aAAa;AAAA,QACzB,aAAa,cAAc;AAAA,QAC3B,kBAAkB;AAAA,QAClB,WAAW,KAAK,IAAI,IAAI;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,OACA,MACA,QACyB;AACzB,QAAI;AACF,YAAM,kBAAkB,MAAM,KAAK,WAAW,MAAM,KAAK;AACzD,aAAO,MAAM,KAAK,YAAY,YAAY,gBAAgB,WAAW,MAAM,MAAM;AAAA,IACnF,SAAS,OAAO;AACd,cAAQ,KAAK,wBAAwB,KAAK;AAC1C,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,OACA,MACA,QACyB;AACzB,QAAI;AACF,aAAO,MAAM,KAAK,YAAY,aAAa,OAAO,MAAM,MAAM;AAAA,IAChE,SAAS,OAAO;AACd,cAAQ,KAAK,yBAAyB,KAAK;AAC3C,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,UACN,cACA,eACA,aACA,cACgB;AAChB,UAAM,IAAI,KAAK;AACf,UAAM,SAAS,oBAAI,IAAqD;AAGxE,iBAAa,QAAQ,CAAC,QAAQ,SAAS;AACrC,YAAM,WAAY,cAAc,KAAM,IAAI,OAAO;AACjD,YAAM,WAAW,OAAO,IAAI,OAAO,MAAM,EAAE;AAE3C,UAAI,UAAU;AACZ,iBAAS,SAAS;AAAA,MACpB,OAAO;AACL,eAAO,IAAI,OAAO,MAAM,IAAI;AAAA,UAC1B,OAAO;AAAA,UACP,QAAQ,EAAE,GAAG,QAAQ,YAAY,SAAS;AAAA,QAC5C,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGD,kBAAc,QAAQ,CAAC,QAAQ,SAAS;AACtC,YAAM,WAAY,eAAe,KAAM,IAAI,OAAO;AAClD,YAAM,WAAW,OAAO,IAAI,OAAO,MAAM,EAAE;AAE3C,UAAI,UAAU;AACZ,iBAAS,SAAS;AAAA,MACpB,OAAO;AACL,eAAO,IAAI,OAAO,MAAM,IAAI;AAAA,UAC1B,OAAO;AAAA,UACP,QAAQ,EAAE,GAAG,QAAQ,YAAY,SAAS;AAAA,QAC5C,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGD,UAAM,WAAW,MAAM,KAAK,OAAO,OAAO,CAAC,EACxC,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,IAAI,CAAC,WAAW;AAAA,MACf,GAAG,MAAM;AAAA,MACT,OAAO,MAAM;AAAA,IACf,EAAE;AAEJ,WAAO;AAAA,EACT;AACF;AAKO,IAAM,iBAAN,MAAqB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,SAIT;AACD,SAAK,cAAc,QAAQ;AAC3B,SAAK,aAAa,QAAQ;AAC1B,SAAK,cAAc,QAAQ,eAAe;AAAA,EAC5C;AAAA,EAEA,MAAM,OAAO,OAAe,SAAsD;AAChF,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,OAAO,SAAS,QAAQ,KAAK;AAEnC,UAAM,kBAAkB,MAAM,KAAK,WAAW,MAAM,KAAK;AACzD,UAAM,UAAU,MAAM,KAAK,YAAY;AAAA,MACrC,gBAAgB;AAAA,MAChB;AAAA,MACA,SAAS;AAAA,IACX;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,YAAY,QAAQ;AAAA,QACpB,aAAa;AAAA,QACb,kBAAkB;AAAA,QAClB,WAAW,KAAK,IAAI,IAAI;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AACF;;;AbjMO,IAAM,MAAN,MAAgC;AAAA,EASrC,YAAoB,SAAqB;AAArB;AAElB,SAAK,aAAa,uBAAuB,QAAQ,UAAU;AAG3D,SAAK,cAAc,kBAAkB;AAAA,MACnC,GAAG,QAAQ;AAAA,MACX,YAAY,KAAK,WAAW,cAAc;AAAA,IAC5C,CAAC;AAGD,SAAK,YAAY,IAAI,gBAAgB;AAAA,MACnC,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,MACjB,aAAa,QAAQ,WAAW,QAAQ;AAAA,MACxC,aAAa,QAAQ,WAAW;AAAA,MAChC,cAAc,QAAQ,WAAW;AAAA,MACjC,MAAM,QAAQ,WAAW;AAAA,IAC3B,CAAC;AAGD,SAAK,UAAU,IAAI,mBAAmB,QAAQ,QAAQ;AAGtD,SAAK,MAAM,KAAK,UAAU,QAAQ,GAAG;AAGrC,SAAK,eACH,QAAQ,gBACR;AAAA;AAAA;AAAA;AAAA,EAIJ;AAAA,EAzCQ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EAqCd,UAAU,QAAyC;AACzD,YAAQ,OAAO,UAAU;AAAA,MACvB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AACH,eAAO,IAAI,eAAe;AAAA,UACxB,QAAQ,OAAO,UAAU,QAAQ,IAAI,kBAAkB;AAAA,UACvD,SAAS,OAAO;AAAA,UAChB,OAAO,OAAO,SAAS;AAAA,QACzB,CAAC;AAAA,MAEH,KAAK;AACH,eAAO,IAAI,kBAAkB;AAAA,UAC3B,QAAQ,OAAO,UAAU,QAAQ,IAAI,qBAAqB;AAAA,UAC1D,OAAO,OAAO,SAAS;AAAA,QACzB,CAAC;AAAA,MAEH;AACE,cAAM,IAAI,MAAM,yBAAyB,OAAO,QAAQ,EAAE;AAAA,IAC9D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,QAAI,KAAK,YAAa;AACtB,UAAM,KAAK,YAAY,WAAW;AAClC,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MACJ,MACA,SAC8D;AAC9D,UAAM,KAAK,WAAW;AAEtB,UAAM,WAAW,SAAS,YAAYC,YAAW,QAAQ;AACzD,YAAQ,IAAI,kCAAkC,IAAI,EAAE;AAGpD,UAAM,YAAY,MAAM,iBAAiB,IAAI;AAC7C,YAAQ,IAAI,mBAAmB,UAAU,MAAM,YAAY;AAE3D,QAAI,cAAc;AAElB,eAAW,OAAO,WAAW;AAC3B,YAAM,aAAaA,YAAW,KAAK;AAGnC,YAAM,EAAE,QAAQ,MAAM,IAAI,KAAK,QAAQ,MAAM,KAAK,UAAU;AAC5D,cAAQ;AAAA,QACN,kBAAkB,IAAI,KAAK,MAAM,MAAM,WAAW,iBAAiB,MAAM,YAAY;AAAA,MACvF;AAEA,UAAI,OAAO,WAAW,EAAG;AAGzB,cAAQ,IAAI,mCAAmC,OAAO,MAAM,YAAY;AACxE,YAAM,QAAQ,OAAO,IAAI,CAAC,MAAM,EAAE,IAAI;AACtC,YAAM,mBAAmB,MAAM,KAAK,WAAW,WAAW,KAAK;AAG/D,eAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,eAAO,CAAC,EAAE,YAAY,iBAAiB,CAAC,EAAE;AAAA,MAC5C;AAGA,YAAM,KAAK,YAAY,OAAO,MAAM;AACpC,qBAAe,OAAO;AAAA,IACxB;AAEA,YAAQ,IAAI,4BAA4B,UAAU,MAAM,UAAU,WAAW,SAAS;AAEtF,WAAO;AAAA,MACL,kBAAkB,UAAU;AAAA,MAC5B,eAAe;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,UAAkB,SAAkD;AAC5E,UAAM,KAAK,WAAW;AAEtB,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,iBAAiB,KAAK,IAAI;AAChC,UAAM,EAAE,SAAS,MAAM,IAAI,MAAM,KAAK,UAAU,OAAO,UAAU,OAAO;AACxE,UAAM,gBAAgB,KAAK,IAAI,IAAI;AAEnC,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO;AAAA,QACL;AAAA,QACA,QAAQ;AAAA,QACR,WAAW,CAAC;AAAA,QACZ,SAAS,CAAC;AAAA,QACV,UAAU;AAAA,UACR,WAAW,KAAK,IAAI,IAAI;AAAA,UACxB;AAAA,UACA,gBAAgB;AAAA,UAChB,QAAQ;AAAA,UACR,OAAO,KAAK,IAAI,QAAQ;AAAA,QAC1B;AAAA,MACF;AAAA,IACF;AAGA,UAAM,UAAU,QAAQ,IAAI,CAAC,MAAM,EAAE,KAAK;AAC1C,UAAM,cAAc,KAAK,iBAAiB,OAAO;AAGjD,UAAM,kBAAkB,KAAK,IAAI;AACjC,UAAM,SAAS,MAAM,KAAK,eAAe,UAAU,WAAW;AAC9D,UAAM,iBAAiB,KAAK,IAAI,IAAI;AAGpC,UAAM,YAAY,KAAK,iBAAiB,QAAQ,OAAO;AAEvD,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACR,WAAW,KAAK,IAAI,IAAI;AAAA,QACxB;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,OAAO,KAAK,IAAI,QAAQ;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,QAAiC;AACxD,WAAO,OACJ,IAAI,CAAC,OAAO,MAAM;AACjB,YAAM,SAAS,MAAM,SAAS,eAAe,MAAM,SAAS,aAAa;AACzE,aAAO,IAAI,IAAI,CAAC,KAAK,MAAM;AAAA,EAAK,MAAM,IAAI;AAAA,IAC5C,CAAC,EACA,KAAK,aAAa;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eAAe,UAAkB,SAAkC;AAC/E,UAAM,SAAS;AAAA;AAAA;AAAA;AAAA,EAIjB,OAAO;AAAA;AAAA,YAEG,QAAQ;AAAA;AAAA;AAIhB,UAAM,WAAW,MAAM,KAAK,IAAI,SAAS;AAAA,MACvC,UAAU;AAAA,QACR;AAAA,UACE,IAAIA,YAAW,KAAK;AAAA,UACpB,MAAM;AAAA,UACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,OAAO,CAAC;AAAA,UACxC,WAAW,oBAAI,KAAK;AAAA,QACtB;AAAA,MACF;AAAA,MACA,cAAc,KAAK;AAAA,MACnB,WAAW;AAAA,IACb,CAAC;AAED,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,QAAgB,SAAsC;AAC7E,UAAM,YAAwB,CAAC;AAC/B,UAAM,gBAAgB;AACtB,UAAM,UAAU,OAAO,SAAS,aAAa;AAC7C,UAAM,cAAc,oBAAI,IAAY;AAEpC,eAAW,SAAS,SAAS;AAC3B,YAAM,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE;AACnC,UAAI,CAAC,YAAY,IAAI,KAAK,KAAK,QAAQ,KAAK,SAAS,QAAQ,QAAQ;AACnE,oBAAY,IAAI,KAAK;AACrB,cAAM,QAAQ,QAAQ,QAAQ,CAAC;AAC/B,kBAAU,KAAK;AAAA,UACb;AAAA,UACA,SAAS,MAAM;AAAA,UACf,aAAa,MAAM,SAAS;AAAA,UAC5B,SACE,MAAM,SAAS,aAAa,MAAM,SAAS,aAAa,MAAM,SAAS;AAAA,UACzE,SAAS,MAAM,KAAK,UAAU,GAAG,GAAG,KAAK,MAAM,KAAK,SAAS,MAAM,QAAQ;AAAA,QAC7E,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,UAAU,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,mBAAoC;AAGxC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAiC;AAErC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAE3B,YAAQ,IAAI,iCAAiC;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,UAAM,KAAK,YAAY,MAAM;AAAA,EAC/B;AACF;AAKA,eAAsB,UAAU,SAAmC;AACjE,QAAM,MAAM,IAAI,IAAI,OAAO;AAC3B,QAAM,IAAI,WAAW;AACrB,SAAO;AACT;","names":["generateId","readdir","readFile","join","basename","join","readFile","basename","readdir","readFile","readdir","stat","join","basename","stat","readFile","basename","readdir","join","stat","stat","embedder","generateId"]}
|
|
1
|
+
{"version":3,"sources":["../src/rag-service.ts","../src/extractors/base.ts","../src/extractors/chm.ts","../src/extractors/html.ts","../src/extractors/markdown.ts","../src/extractors/index.ts","../src/chunking/header-aware-chunker.ts","../src/embeddings/local.ts","../src/embeddings/openai.ts","../src/embeddings/huggingface.ts","../src/embeddings/index.ts","../src/vectorstore/sqlite.ts","../src/vectorstore/postgres.ts","../src/vectorstore/index.ts","../src/retrieval/hybrid.ts"],"sourcesContent":["/**\n * RAG Service\n *\n * Main service that orchestrates document indexing and question answering.\n */\n\nimport { generateId } from '@chatbot-packages/utils';\nimport { OpenAIProvider, AnthropicProvider, type BaseProvider } from '@chatbot-packages/ai';\nimport { extractDocuments } from './extractors/index.js';\nimport { HeaderAwareChunker } from './chunking/index.js';\nimport { createEmbeddingBackend } from './embeddings/index.js';\nimport { createVectorStore } from './vectorstore/index.js';\nimport { HybridRetriever } from './retrieval/index.js';\nimport type {\n RAGOptions,\n RAGResponse,\n RAGService,\n RetrievalOptions,\n Citation,\n DocumentChunk,\n EmbeddingBackend,\n VectorStore,\n} from './types.js';\n\nexport class RAG implements RAGService {\n private vectorStore: VectorStore;\n private embeddings: EmbeddingBackend;\n private retriever: HybridRetriever;\n private llm: BaseProvider;\n private chunker: HeaderAwareChunker;\n private systemPrompt: string;\n private initialized = false;\n\n constructor(private options: RAGOptions) {\n // Initialize embedding backend\n this.embeddings = createEmbeddingBackend(options.embeddings);\n\n // Initialize vector store\n this.vectorStore = createVectorStore({\n ...options.vectorStore,\n dimensions: this.embeddings.getDimensions(),\n });\n\n // Initialize retriever\n this.retriever = new HybridRetriever({\n vectorStore: this.vectorStore,\n embeddings: this.embeddings,\n defaultTopK: options.retrieval?.topK || 8,\n denseWeight: options.retrieval?.denseWeight,\n sparseWeight: options.retrieval?.sparseWeight,\n rrfK: options.retrieval?.rrfK,\n });\n\n // Initialize chunker\n this.chunker = new HeaderAwareChunker(options.chunking);\n\n // Initialize LLM\n this.llm = this.createLLM(options.llm);\n\n // Set system prompt\n this.systemPrompt =\n options.systemPrompt ||\n `You are a helpful documentation assistant. Answer questions based on the provided context.\nAlways cite your sources using [1], [2], etc. format when referencing specific information.\nIf the context doesn't contain enough information to answer, say so clearly.\nBe concise and accurate.`;\n }\n\n private createLLM(config: RAGOptions['llm']): BaseProvider {\n switch (config.provider) {\n case 'openai':\n case 'cerebras':\n case 'groq':\n return new OpenAIProvider({\n apiKey: config.apiKey || process.env.OPENAI_API_KEY || '',\n baseUrl: config.baseUrl,\n model: config.model || 'gpt-4o-mini',\n });\n\n case 'anthropic':\n return new AnthropicProvider({\n apiKey: config.apiKey || process.env.ANTHROPIC_API_KEY || '',\n model: config.model || 'claude-sonnet-4-20250514',\n });\n\n default:\n throw new Error(`Unknown LLM provider: ${config.provider}`);\n }\n }\n\n /**\n * Initialize the RAG service\n */\n async initialize(): Promise<void> {\n if (this.initialized) return;\n await this.vectorStore.initialize();\n this.initialized = true;\n }\n\n /**\n * Index documents from a path\n */\n async index(\n path: string,\n options?: { sourceId?: string }\n ): Promise<{ documentsIndexed: number; chunksCreated: number }> {\n await this.initialize();\n\n const sourceId = options?.sourceId || generateId('source');\n console.log(`[RAG] Indexing documents from: ${path}`);\n\n // Extract documents\n const documents = await extractDocuments(path);\n console.log(`[RAG] Extracted ${documents.length} documents`);\n\n let totalChunks = 0;\n\n for (const doc of documents) {\n const documentId = generateId('doc');\n\n // Chunk the document\n const { chunks, stats } = this.chunker.chunk(doc, documentId);\n console.log(\n `[RAG] Chunked \"${doc.title}\": ${stats.totalChunks} chunks (avg: ${stats.avgChunkSize} chars)`\n );\n\n if (chunks.length === 0) continue;\n\n // Generate embeddings\n console.log(`[RAG] Generating embeddings for ${chunks.length} chunks...`);\n const texts = chunks.map((c) => c.text);\n const embeddingResults = await this.embeddings.embedBatch(texts);\n\n // Add embeddings to chunks\n for (let i = 0; i < chunks.length; i++) {\n chunks[i].embedding = embeddingResults[i].embedding;\n }\n\n // Insert into vector store\n await this.vectorStore.insert(chunks);\n totalChunks += chunks.length;\n }\n\n console.log(`[RAG] Indexing complete: ${documents.length} docs, ${totalChunks} chunks`);\n\n return {\n documentsIndexed: documents.length,\n chunksCreated: totalChunks,\n };\n }\n\n /**\n * Ask a question and get an answer with citations\n */\n async ask(question: string, options?: RetrievalOptions): Promise<RAGResponse> {\n await this.initialize();\n\n const startTime = Date.now();\n\n // Retrieve relevant chunks\n const retrievalStart = Date.now();\n const { results, stats } = await this.retriever.search(question, options);\n const retrievalTime = Date.now() - retrievalStart;\n\n if (results.length === 0) {\n return {\n question,\n answer: \"I couldn't find any relevant information in the documentation to answer your question.\",\n citations: [],\n context: [],\n metadata: {\n totalTime: Date.now() - startTime,\n retrievalTime,\n generationTime: 0,\n cached: false,\n model: this.llm.name || 'unknown',\n },\n };\n }\n\n // Build context from retrieved chunks\n const context = results.map((r) => r.chunk);\n const contextText = this.buildContextText(context);\n\n // Generate answer\n const generationStart = Date.now();\n const answer = await this.generateAnswer(question, contextText);\n const generationTime = Date.now() - generationStart;\n\n // Extract citations from the answer\n const citations = this.extractCitations(answer, context);\n\n return {\n question,\n answer,\n citations,\n context,\n metadata: {\n totalTime: Date.now() - startTime,\n retrievalTime,\n generationTime,\n cached: false,\n model: this.llm.name || 'unknown',\n },\n };\n }\n\n /**\n * Build context text from chunks\n */\n private buildContextText(chunks: DocumentChunk[]): string {\n return chunks\n .map((chunk, i) => {\n const header = chunk.metadata.sectionPath || chunk.metadata.headingH1 || 'Document';\n return `[${i + 1}] ${header}\\n${chunk.text}`;\n })\n .join('\\n\\n---\\n\\n');\n }\n\n /**\n * Generate answer using LLM\n */\n private async generateAnswer(question: string, context: string): Promise<string> {\n const prompt = `Based on the following documentation context, answer the user's question.\nCite sources using [1], [2], etc. format when referencing specific information.\n\nCONTEXT:\n${context}\n\nQUESTION: ${question}\n\nANSWER:`;\n\n const response = await this.llm.complete({\n messages: [\n {\n id: generateId('msg'),\n role: 'user',\n content: [{ type: 'text', text: prompt }],\n timestamp: new Date(),\n },\n ],\n systemPrompt: this.systemPrompt,\n maxTokens: 1000,\n });\n\n return response.content;\n }\n\n /**\n * Extract citations from answer text\n */\n private extractCitations(answer: string, context: DocumentChunk[]): Citation[] {\n const citations: Citation[] = [];\n const citationRegex = /\\[(\\d+)\\]/g;\n const matches = answer.matchAll(citationRegex);\n const seenIndexes = new Set<number>();\n\n for (const match of matches) {\n const index = parseInt(match[1], 10);\n if (!seenIndexes.has(index) && index > 0 && index <= context.length) {\n seenIndexes.add(index);\n const chunk = context[index - 1];\n citations.push({\n index,\n chunkId: chunk.id,\n sectionPath: chunk.metadata.sectionPath,\n heading:\n chunk.metadata.headingH3 || chunk.metadata.headingH2 || chunk.metadata.headingH1,\n snippet: chunk.text.substring(0, 200) + (chunk.text.length > 200 ? '...' : ''),\n });\n }\n }\n\n return citations.sort((a, b) => a.index - b.index);\n }\n\n /**\n * Get document count\n */\n async getDocumentCount(): Promise<number> {\n // This is a simplified implementation\n // In production, you'd query the documents table\n return 0;\n }\n\n /**\n * Get chunk count\n */\n async getChunkCount(): Promise<number> {\n // This is a simplified implementation\n return 0;\n }\n\n /**\n * Clear all indexed data\n */\n async clear(): Promise<void> {\n // This would need to be implemented per vector store\n console.log('[RAG] Clear not implemented yet');\n }\n\n /**\n * Close connections\n */\n async close(): Promise<void> {\n await this.vectorStore.close();\n }\n}\n\n/**\n * Create a RAG service instance\n */\nexport async function createRAG(options: RAGOptions): Promise<RAG> {\n const rag = new RAG(options);\n await rag.initialize();\n return rag;\n}\n","/**\n * Base Extractor\n */\n\nimport type { ExtractedDocument, ExtractorOptions, ExtractorType } from '../types.js';\n\nexport abstract class BaseExtractor {\n protected options: ExtractorOptions;\n\n constructor(options: ExtractorOptions) {\n this.options = options;\n }\n\n /** Get the extractor type */\n abstract getType(): ExtractorType;\n\n /** Check if this extractor can handle the given path */\n abstract canHandle(path: string): boolean;\n\n /** Extract documents from the source */\n abstract extract(): Promise<ExtractedDocument[]>;\n\n /** Get file extension */\n protected getExtension(path: string): string {\n const parts = path.split('.');\n return parts.length > 1 ? parts[parts.length - 1].toLowerCase() : '';\n }\n\n /** Normalize line endings */\n protected normalizeLineEndings(text: string): string {\n return text.replace(/\\r\\n/g, '\\n').replace(/\\r/g, '\\n');\n }\n\n /** Clean excessive whitespace */\n protected cleanWhitespace(text: string): string {\n return text\n .replace(/\\n{3,}/g, '\\n\\n')\n .replace(/[ \\t]+/g, ' ')\n .trim();\n }\n}\n","/**\n * CHM (Compiled HTML Help) Extractor\n *\n * Extracts HTML content from .chm files using 7z.\n * CHM files are Microsoft's compiled HTML help format.\n */\n\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { readdir, readFile, mkdir, rm } from 'fs/promises';\nimport { existsSync } from 'fs';\nimport { join, basename, dirname } from 'path';\nimport { BaseExtractor } from './base.js';\nimport { HTMLExtractor } from './html.js';\nimport type { ExtractedDocument, ExtractorType } from '../types.js';\n\nconst execAsync = promisify(exec);\n\nexport class CHMExtractor extends BaseExtractor {\n private tempDir: string;\n\n constructor(options: { sourcePath: string; outputDir?: string }) {\n super(options);\n this.tempDir = options.outputDir || join(dirname(options.sourcePath), '.chm-extract-temp');\n }\n\n getType(): ExtractorType {\n return 'chm';\n }\n\n canHandle(path: string): boolean {\n return this.getExtension(path) === 'chm';\n }\n\n async extract(): Promise<ExtractedDocument[]> {\n const { sourcePath } = this.options;\n\n if (!existsSync(sourcePath)) {\n throw new Error(`CHM file not found: ${sourcePath}`);\n }\n\n // Create temp directory\n await mkdir(this.tempDir, { recursive: true });\n\n try {\n // Extract CHM using 7z\n await this.extractWithSevenZip(sourcePath, this.tempDir);\n\n // Find and parse all HTML files\n const htmlFiles = await this.findHTMLFiles(this.tempDir);\n const documents: ExtractedDocument[] = [];\n\n for (const htmlFile of htmlFiles) {\n try {\n const content = await readFile(htmlFile, 'utf-8');\n const htmlExtractor = new HTMLExtractor({ sourcePath: htmlFile });\n const extracted = await htmlExtractor.extractFromString(content, htmlFile);\n\n if (extracted.content.trim()) {\n documents.push({\n ...extracted,\n format: 'chm',\n metadata: {\n ...extracted.metadata,\n sourceChm: basename(sourcePath),\n originalPath: htmlFile.replace(this.tempDir, ''),\n },\n });\n }\n } catch (error) {\n // Skip files that can't be parsed\n console.warn(`Failed to parse HTML file: ${htmlFile}`, error);\n }\n }\n\n return documents;\n } finally {\n // Cleanup temp directory\n if (existsSync(this.tempDir)) {\n await rm(this.tempDir, { recursive: true, force: true });\n }\n }\n }\n\n private async extractWithSevenZip(chmPath: string, outputDir: string): Promise<void> {\n try {\n // Try 7z first\n await execAsync(`7z x \"${chmPath}\" -o\"${outputDir}\" -y`, {\n maxBuffer: 50 * 1024 * 1024, // 50MB buffer\n });\n } catch (error) {\n // Try 7za as fallback (common on some systems)\n try {\n await execAsync(`7za x \"${chmPath}\" -o\"${outputDir}\" -y`, {\n maxBuffer: 50 * 1024 * 1024,\n });\n } catch {\n throw new Error(\n `Failed to extract CHM file. Please ensure 7z is installed.\\n` +\n ` - On Ubuntu/Debian: sudo apt-get install p7zip-full\\n` +\n ` - On macOS: brew install p7zip\\n` +\n ` - On Windows: Install 7-Zip from https://7-zip.org/\\n` +\n `Original error: ${error}`\n );\n }\n }\n }\n\n private async findHTMLFiles(dir: string): Promise<string[]> {\n const htmlFiles: string[] = [];\n\n const scanDir = async (currentDir: string) => {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(currentDir, entry.name);\n\n if (entry.isDirectory()) {\n // Skip system directories\n if (!entry.name.startsWith('$') && !entry.name.startsWith('#')) {\n await scanDir(fullPath);\n }\n } else if (entry.isFile()) {\n const ext = this.getExtension(entry.name);\n if (ext === 'html' || ext === 'htm') {\n htmlFiles.push(fullPath);\n }\n }\n }\n };\n\n await scanDir(dir);\n return htmlFiles.sort();\n }\n}\n","/**\n * HTML Extractor\n *\n * Extracts text content from HTML files, preserving structure.\n */\n\nimport { readFile, readdir, stat } from 'fs/promises';\nimport { join, basename } from 'path';\nimport * as cheerio from 'cheerio';\nimport { convert } from 'html-to-text';\nimport { BaseExtractor } from './base.js';\nimport type { ExtractedDocument, ExtractorType } from '../types.js';\n\nexport interface HTMLExtractorOptions {\n sourcePath: string;\n /** Whether to preserve headings structure */\n preserveHeadings?: boolean;\n /** Whether to include links */\n includeLinks?: boolean;\n /** Tags to remove */\n removeTags?: string[];\n}\n\nexport class HTMLExtractor extends BaseExtractor {\n private htmlOptions: HTMLExtractorOptions;\n\n constructor(options: HTMLExtractorOptions) {\n super(options);\n this.htmlOptions = {\n preserveHeadings: true,\n includeLinks: false,\n removeTags: ['script', 'style', 'nav', 'footer', 'header', 'aside', 'meta', 'link'],\n ...options,\n };\n }\n\n getType(): ExtractorType {\n return 'html';\n }\n\n canHandle(path: string): boolean {\n const ext = this.getExtension(path);\n return ext === 'html' || ext === 'htm';\n }\n\n async extract(): Promise<ExtractedDocument[]> {\n const { sourcePath } = this.options;\n const stats = await stat(sourcePath);\n\n if (stats.isFile()) {\n const content = await readFile(sourcePath, 'utf-8');\n return [await this.extractFromString(content, sourcePath)];\n }\n\n if (stats.isDirectory()) {\n return this.extractFromDirectory(sourcePath);\n }\n\n throw new Error(`Invalid path: ${sourcePath}`);\n }\n\n async extractFromString(html: string, filePath: string): Promise<ExtractedDocument> {\n const $ = cheerio.load(html);\n\n // Remove unwanted tags\n for (const tag of this.htmlOptions.removeTags || []) {\n $(tag).remove();\n }\n\n // Extract title\n let title = $('title').text().trim();\n if (!title) {\n title = $('h1').first().text().trim();\n }\n if (!title) {\n title = basename(filePath, '.html').replace(/-|_/g, ' ');\n }\n\n // Convert to text with structure preservation\n const text = convert($.html(), {\n wordwrap: false,\n preserveNewlines: true,\n selectors: [\n { selector: 'h1', options: { uppercase: false, prefix: '\\n# ' } },\n { selector: 'h2', options: { uppercase: false, prefix: '\\n## ' } },\n { selector: 'h3', options: { uppercase: false, prefix: '\\n### ' } },\n { selector: 'h4', options: { uppercase: false, prefix: '\\n#### ' } },\n { selector: 'h5', options: { uppercase: false, prefix: '\\n##### ' } },\n { selector: 'h6', options: { uppercase: false, prefix: '\\n###### ' } },\n { selector: 'ul', options: { itemPrefix: ' - ' } },\n { selector: 'ol', options: { itemPrefix: ' 1. ' } },\n { selector: 'table', format: 'dataTable' },\n { selector: 'a', options: { ignoreHref: !this.htmlOptions.includeLinks } },\n { selector: 'img', format: 'skip' },\n ],\n });\n\n const content = this.cleanWhitespace(this.normalizeLineEndings(text));\n\n return {\n path: filePath,\n title,\n content,\n format: 'html',\n metadata: {\n originalLength: html.length,\n extractedLength: content.length,\n },\n };\n }\n\n private async extractFromDirectory(dir: string): Promise<ExtractedDocument[]> {\n const documents: ExtractedDocument[] = [];\n\n const scanDir = async (currentDir: string) => {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(currentDir, entry.name);\n\n if (entry.isDirectory()) {\n await scanDir(fullPath);\n } else if (entry.isFile() && this.canHandle(entry.name)) {\n try {\n const content = await readFile(fullPath, 'utf-8');\n const doc = await this.extractFromString(content, fullPath);\n if (doc.content.trim()) {\n documents.push(doc);\n }\n } catch (error) {\n console.warn(`Failed to extract ${fullPath}:`, error);\n }\n }\n }\n };\n\n await scanDir(dir);\n return documents;\n }\n}\n","/**\n * Markdown Extractor\n *\n * Extracts content from Markdown files.\n */\n\nimport { readFile, readdir, stat } from 'fs/promises';\nimport { join, basename } from 'path';\nimport { BaseExtractor } from './base.js';\nimport type { ExtractedDocument, ExtractorType } from '../types.js';\n\nexport class MarkdownExtractor extends BaseExtractor {\n getType(): ExtractorType {\n return 'markdown';\n }\n\n canHandle(path: string): boolean {\n const ext = this.getExtension(path);\n return ext === 'md' || ext === 'markdown';\n }\n\n async extract(): Promise<ExtractedDocument[]> {\n const { sourcePath } = this.options;\n const stats = await stat(sourcePath);\n\n if (stats.isFile()) {\n return [await this.extractFromFile(sourcePath)];\n }\n\n if (stats.isDirectory()) {\n return this.extractFromDirectory(sourcePath);\n }\n\n throw new Error(`Invalid path: ${sourcePath}`);\n }\n\n private async extractFromFile(filePath: string): Promise<ExtractedDocument> {\n const content = await readFile(filePath, 'utf-8');\n const normalizedContent = this.normalizeLineEndings(content);\n\n // Extract title from first H1 or filename\n let title = this.extractTitle(normalizedContent);\n if (!title) {\n title = basename(filePath, '.md').replace(/-|_/g, ' ');\n }\n\n return {\n path: filePath,\n title,\n content: this.cleanWhitespace(normalizedContent),\n format: 'markdown',\n metadata: {\n originalLength: content.length,\n },\n };\n }\n\n private extractTitle(content: string): string | undefined {\n // Look for first H1 heading\n const h1Match = content.match(/^#\\s+(.+)$/m);\n if (h1Match) {\n return h1Match[1].trim();\n }\n\n // Look for underline-style H1\n const underlineMatch = content.match(/^(.+)\\n=+\\s*$/m);\n if (underlineMatch) {\n return underlineMatch[1].trim();\n }\n\n return undefined;\n }\n\n private async extractFromDirectory(dir: string): Promise<ExtractedDocument[]> {\n const documents: ExtractedDocument[] = [];\n\n const scanDir = async (currentDir: string) => {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(currentDir, entry.name);\n\n if (entry.isDirectory()) {\n // Skip hidden and common non-content directories\n if (!entry.name.startsWith('.') && entry.name !== 'node_modules') {\n await scanDir(fullPath);\n }\n } else if (entry.isFile() && this.canHandle(entry.name)) {\n try {\n const doc = await this.extractFromFile(fullPath);\n if (doc.content.trim()) {\n documents.push(doc);\n }\n } catch (error) {\n console.warn(`Failed to extract ${fullPath}:`, error);\n }\n }\n }\n };\n\n await scanDir(dir);\n return documents;\n }\n}\n","/**\n * Document Extractors\n *\n * Extract content from various file formats (CHM, HTML, Markdown)\n */\n\nexport { BaseExtractor } from './base.js';\nexport { CHMExtractor } from './chm.js';\nexport { HTMLExtractor, type HTMLExtractorOptions } from './html.js';\nexport { MarkdownExtractor } from './markdown.js';\n\nimport { stat } from 'fs/promises';\nimport { CHMExtractor } from './chm.js';\nimport { HTMLExtractor } from './html.js';\nimport { MarkdownExtractor } from './markdown.js';\nimport type { ExtractedDocument } from '../types.js';\n\n/**\n * Auto-detect and extract documents from a path\n */\nexport async function extractDocuments(sourcePath: string): Promise<ExtractedDocument[]> {\n const stats = await stat(sourcePath);\n const ext = sourcePath.split('.').pop()?.toLowerCase() || '';\n\n // Single file\n if (stats.isFile()) {\n if (ext === 'chm') {\n const extractor = new CHMExtractor({ sourcePath });\n return extractor.extract();\n }\n if (ext === 'html' || ext === 'htm') {\n const extractor = new HTMLExtractor({ sourcePath });\n return extractor.extract();\n }\n if (ext === 'md' || ext === 'markdown') {\n const extractor = new MarkdownExtractor({ sourcePath });\n return extractor.extract();\n }\n throw new Error(`Unsupported file format: ${ext}`);\n }\n\n // Directory - extract all supported files\n if (stats.isDirectory()) {\n const allDocuments: ExtractedDocument[] = [];\n\n // Extract HTML files\n const htmlExtractor = new HTMLExtractor({ sourcePath });\n const htmlDocs = await htmlExtractor.extract();\n allDocuments.push(...htmlDocs);\n\n // Extract Markdown files\n const mdExtractor = new MarkdownExtractor({ sourcePath });\n const mdDocs = await mdExtractor.extract();\n allDocuments.push(...mdDocs);\n\n return allDocuments;\n }\n\n throw new Error(`Invalid path: ${sourcePath}`);\n}\n","/**\n * Header-Aware Chunker\n *\n * Splits documents into chunks while respecting heading boundaries.\n * Ported from the Python implementation in Gen21AIHelpAndQNA.\n */\n\nimport { generateId } from '@chatbot-packages/utils';\nimport type {\n ChunkingOptions,\n ChunkResult,\n DocumentChunk,\n ChunkMetadata,\n ExtractedDocument,\n} from '../types.js';\n\nexport interface HeaderAwareChunkerOptions extends ChunkingOptions {\n /** Document ID to associate chunks with */\n documentId?: string;\n}\n\ninterface HeadingContext {\n h1?: string;\n h2?: string;\n h3?: string;\n sectionPath: string;\n}\n\nexport class HeaderAwareChunker {\n private options: Required<Omit<HeaderAwareChunkerOptions, 'documentId'>>;\n\n constructor(options?: ChunkingOptions) {\n this.options = {\n chunkSize: 512,\n chunkOverlap: 50,\n minChunkSize: 100,\n maxChunkSize: 800,\n respectHeadings: true,\n splitOnHeadings: [1, 2],\n ...options,\n };\n }\n\n /**\n * Chunk a document into smaller pieces\n */\n chunk(document: ExtractedDocument, documentId?: string): ChunkResult {\n const docId = documentId || generateId('doc');\n const { content } = document;\n\n if (!content.trim()) {\n return {\n chunks: [],\n stats: { totalChunks: 0, avgChunkSize: 0, minChunkSize: 0, maxChunkSize: 0 },\n };\n }\n\n const chunks: DocumentChunk[] = [];\n const sections = this.splitBySections(content);\n\n for (const section of sections) {\n const sectionChunks = this.chunkSection(section, docId, document.path);\n chunks.push(...sectionChunks);\n }\n\n // Calculate stats\n const sizes = chunks.map((c) => c.text.length);\n const stats = {\n totalChunks: chunks.length,\n avgChunkSize: sizes.length > 0 ? Math.round(sizes.reduce((a, b) => a + b, 0) / sizes.length) : 0,\n minChunkSize: sizes.length > 0 ? Math.min(...sizes) : 0,\n maxChunkSize: sizes.length > 0 ? Math.max(...sizes) : 0,\n };\n\n return { chunks, stats };\n }\n\n /**\n * Split content by heading boundaries\n */\n private splitBySections(content: string): Array<{ text: string; context: HeadingContext }> {\n const lines = content.split('\\n');\n const sections: Array<{ text: string; context: HeadingContext }> = [];\n\n let currentContext: HeadingContext = { sectionPath: '' };\n let currentText: string[] = [];\n\n for (const line of lines) {\n const heading = this.parseHeading(line);\n\n if (heading && this.options.splitOnHeadings.includes(heading.level)) {\n // Save current section\n if (currentText.length > 0) {\n sections.push({\n text: currentText.join('\\n').trim(),\n context: { ...currentContext },\n });\n currentText = [];\n }\n\n // Update context based on heading level\n if (heading.level === 1) {\n currentContext = {\n h1: heading.text,\n sectionPath: heading.text,\n };\n } else if (heading.level === 2) {\n currentContext = {\n ...currentContext,\n h2: heading.text,\n h3: undefined,\n sectionPath: currentContext.h1\n ? `${currentContext.h1} > ${heading.text}`\n : heading.text,\n };\n } else if (heading.level === 3) {\n currentContext = {\n ...currentContext,\n h3: heading.text,\n sectionPath: currentContext.h2\n ? `${currentContext.sectionPath} > ${heading.text}`\n : heading.text,\n };\n }\n\n currentText.push(line);\n } else {\n currentText.push(line);\n }\n }\n\n // Don't forget the last section\n if (currentText.length > 0) {\n sections.push({\n text: currentText.join('\\n').trim(),\n context: { ...currentContext },\n });\n }\n\n return sections;\n }\n\n /**\n * Parse a heading line\n */\n private parseHeading(line: string): { level: number; text: string } | null {\n // Markdown-style headings: # H1, ## H2, ### H3\n const match = line.match(/^(#{1,6})\\s+(.+)$/);\n if (match) {\n return {\n level: match[1].length,\n text: match[2].trim(),\n };\n }\n return null;\n }\n\n /**\n * Chunk a single section\n */\n private chunkSection(\n section: { text: string; context: HeadingContext },\n documentId: string,\n sourcePath: string\n ): DocumentChunk[] {\n const { text, context } = section;\n const chunks: DocumentChunk[] = [];\n\n // If section is small enough, keep as single chunk\n if (this.estimateTokens(text) <= this.options.maxChunkSize) {\n if (this.estimateTokens(text) >= this.options.minChunkSize) {\n chunks.push(this.createChunk(text, context, documentId, sourcePath, 0));\n }\n return chunks;\n }\n\n // Split into paragraphs\n const paragraphs = text.split(/\\n\\n+/);\n let currentChunk: string[] = [];\n let currentTokens = 0;\n\n for (const para of paragraphs) {\n const paraTokens = this.estimateTokens(para);\n\n // If single paragraph is too large, split by sentences\n if (paraTokens > this.options.maxChunkSize) {\n // Save current chunk first\n if (currentChunk.length > 0) {\n chunks.push(\n this.createChunk(\n currentChunk.join('\\n\\n'),\n context,\n documentId,\n sourcePath,\n chunks.length\n )\n );\n currentChunk = [];\n currentTokens = 0;\n }\n\n // Split large paragraph by sentences\n const sentenceChunks = this.splitLargeParagraph(para);\n for (const sentenceChunk of sentenceChunks) {\n chunks.push(\n this.createChunk(sentenceChunk, context, documentId, sourcePath, chunks.length)\n );\n }\n continue;\n }\n\n // Check if adding paragraph exceeds chunk size\n if (currentTokens + paraTokens > this.options.chunkSize) {\n // Save current chunk\n if (currentChunk.length > 0) {\n chunks.push(\n this.createChunk(\n currentChunk.join('\\n\\n'),\n context,\n documentId,\n sourcePath,\n chunks.length\n )\n );\n\n // Start new chunk with overlap\n const overlapText = this.getOverlapText(currentChunk);\n currentChunk = overlapText ? [overlapText, para] : [para];\n currentTokens = this.estimateTokens(currentChunk.join('\\n\\n'));\n } else {\n currentChunk = [para];\n currentTokens = paraTokens;\n }\n } else {\n currentChunk.push(para);\n currentTokens += paraTokens;\n }\n }\n\n // Don't forget the last chunk\n if (currentChunk.length > 0 && currentTokens >= this.options.minChunkSize) {\n chunks.push(\n this.createChunk(currentChunk.join('\\n\\n'), context, documentId, sourcePath, chunks.length)\n );\n }\n\n return chunks;\n }\n\n /**\n * Split a large paragraph into sentence-based chunks\n */\n private splitLargeParagraph(paragraph: string): string[] {\n const sentences = paragraph.match(/[^.!?]+[.!?]+/g) || [paragraph];\n const chunks: string[] = [];\n let currentChunk: string[] = [];\n let currentTokens = 0;\n\n for (const sentence of sentences) {\n const sentenceTokens = this.estimateTokens(sentence);\n\n if (currentTokens + sentenceTokens > this.options.chunkSize && currentChunk.length > 0) {\n chunks.push(currentChunk.join(' ').trim());\n currentChunk = [];\n currentTokens = 0;\n }\n\n currentChunk.push(sentence.trim());\n currentTokens += sentenceTokens;\n }\n\n if (currentChunk.length > 0) {\n chunks.push(currentChunk.join(' ').trim());\n }\n\n return chunks;\n }\n\n /**\n * Get overlap text from previous chunk\n */\n private getOverlapText(previousChunk: string[]): string | null {\n if (!this.options.chunkOverlap || previousChunk.length === 0) {\n return null;\n }\n\n // Take last paragraph(s) that fit within overlap size\n const reversed = [...previousChunk].reverse();\n const overlapParts: string[] = [];\n let tokens = 0;\n\n for (const part of reversed) {\n const partTokens = this.estimateTokens(part);\n if (tokens + partTokens > this.options.chunkOverlap) {\n break;\n }\n overlapParts.unshift(part);\n tokens += partTokens;\n }\n\n return overlapParts.length > 0 ? overlapParts.join('\\n\\n') : null;\n }\n\n /**\n * Create a chunk object\n */\n private createChunk(\n text: string,\n context: HeadingContext,\n documentId: string,\n sourcePath: string,\n index: number\n ): DocumentChunk {\n const metadata: ChunkMetadata = {\n sectionPath: context.sectionPath,\n headingH1: context.h1,\n headingH2: context.h2,\n headingH3: context.h3,\n sourcePath,\n chunkIndex: index,\n };\n\n return {\n id: generateId('chunk'),\n documentId,\n text: text.trim(),\n metadata,\n createdAt: new Date(),\n };\n }\n\n /**\n * Estimate token count (rough approximation: ~4 chars per token)\n */\n private estimateTokens(text: string): number {\n return Math.ceil(text.length / 4);\n }\n}\n","/**\n * Local Embeddings using @xenova/transformers\n *\n * Runs embedding models locally without API calls.\n * Supports BGE, all-MiniLM, and other sentence-transformer models.\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\n// Dynamic import for transformers (it's a heavy module)\nlet pipeline: any = null;\nlet embedder: any = null;\n\nasync function loadPipeline(model: string) {\n if (!pipeline) {\n const transformers = await import('@xenova/transformers');\n pipeline = transformers.pipeline;\n }\n\n if (!embedder) {\n console.log(`[LocalEmbeddings] Loading model: ${model}...`);\n embedder = await pipeline('feature-extraction', model, {\n quantized: true, // Use quantized model for faster inference\n });\n console.log(`[LocalEmbeddings] Model loaded successfully`);\n }\n\n return embedder;\n}\n\nexport interface LocalEmbeddingOptions {\n /** Model name (default: 'Xenova/bge-base-en-v1.5') */\n model?: string;\n /** Batch size for processing (default: 32) */\n batchSize?: number;\n /** Whether to normalize embeddings (default: true) */\n normalize?: boolean;\n}\n\nexport class LocalEmbeddingBackend implements EmbeddingBackend {\n private model: string;\n private batchSize: number;\n private normalize: boolean;\n private dimensions: number;\n\n // Model dimension map\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'Xenova/bge-large-en-v1.5': 1024,\n 'Xenova/bge-base-en-v1.5': 768,\n 'Xenova/bge-small-en-v1.5': 384,\n 'Xenova/all-MiniLM-L6-v2': 384,\n 'Xenova/all-MiniLM-L12-v2': 384,\n 'Xenova/all-mpnet-base-v2': 768,\n };\n\n constructor(options?: LocalEmbeddingOptions) {\n // Use bge-base for balance of quality and speed\n this.model = options?.model || 'Xenova/bge-base-en-v1.5';\n this.batchSize = options?.batchSize || 32;\n this.normalize = options?.normalize ?? true;\n this.dimensions = LocalEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 768;\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const embedder = await loadPipeline(this.model);\n\n // For BGE models, add instruction prefix for better retrieval\n const processedText = this.model.includes('bge')\n ? `Represent this sentence for searching relevant passages: ${text}`\n : text;\n\n const output = await embedder(processedText, {\n pooling: 'mean',\n normalize: this.normalize,\n });\n\n // Convert to regular array\n const embedding = Array.from(output.data as Float32Array);\n\n return {\n embedding,\n tokens: Math.ceil(text.length / 4), // Rough estimate\n };\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const results: EmbeddingResult[] = [];\n\n // Process in batches\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const batchResults = await Promise.all(batch.map((text) => this.embed(text)));\n results.push(...batchResults);\n }\n\n return results;\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available local models\n */\nexport const LOCAL_MODELS = {\n /** BGE Large - Best quality, slower (1024 dims) */\n BGE_LARGE: 'Xenova/bge-large-en-v1.5',\n /** BGE Base - Good balance (768 dims) */\n BGE_BASE: 'Xenova/bge-base-en-v1.5',\n /** BGE Small - Fastest (384 dims) */\n BGE_SMALL: 'Xenova/bge-small-en-v1.5',\n /** MiniLM L6 - Very fast (384 dims) */\n MINILM_L6: 'Xenova/all-MiniLM-L6-v2',\n /** MiniLM L12 - Good quality (384 dims) */\n MINILM_L12: 'Xenova/all-MiniLM-L12-v2',\n /** MPNet - High quality (768 dims) */\n MPNET: 'Xenova/all-mpnet-base-v2',\n};\n","/**\n * OpenAI Embeddings\n *\n * Uses OpenAI's embedding API for high-quality embeddings.\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\nexport interface OpenAIEmbeddingOptions {\n /** OpenAI API key */\n apiKey: string;\n /** Model name (default: 'text-embedding-3-small') */\n model?: string;\n /** Embedding dimensions (for models that support it) */\n dimensions?: number;\n /** Batch size for processing (default: 100) */\n batchSize?: number;\n /** Base URL for API (for compatible APIs) */\n baseUrl?: string;\n}\n\nexport class OpenAIEmbeddingBackend implements EmbeddingBackend {\n private apiKey: string;\n private model: string;\n private dimensions: number;\n private batchSize: number;\n private baseUrl: string;\n\n // Model dimension defaults\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'text-embedding-3-large': 3072,\n 'text-embedding-3-small': 1536,\n 'text-embedding-ada-002': 1536,\n };\n\n constructor(options: OpenAIEmbeddingOptions) {\n this.apiKey = options.apiKey;\n this.model = options.model || 'text-embedding-3-small';\n this.dimensions =\n options.dimensions || OpenAIEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 1536;\n this.batchSize = options.batchSize || 100;\n this.baseUrl = options.baseUrl || 'https://api.openai.com/v1';\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const results = await this.embedBatch([text]);\n return results[0];\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const allResults: EmbeddingResult[] = [];\n\n // Process in batches (OpenAI has limits)\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const batchResults = await this.callAPI(batch);\n allResults.push(...batchResults);\n }\n\n return allResults;\n }\n\n private async callAPI(texts: string[]): Promise<EmbeddingResult[]> {\n const body: Record<string, unknown> = {\n model: this.model,\n input: texts,\n };\n\n // Add dimensions param for text-embedding-3-* models\n if (this.model.startsWith('text-embedding-3-') && this.dimensions) {\n body.dimensions = this.dimensions;\n }\n\n const response = await fetch(`${this.baseUrl}/embeddings`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${this.apiKey}`,\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`OpenAI API error: ${response.status} - ${error}`);\n }\n\n const data = (await response.json()) as {\n data: Array<{ embedding: number[]; index: number }>;\n usage: { prompt_tokens: number; total_tokens: number };\n };\n\n // Sort by index to maintain order\n const sorted = data.data.sort((a, b) => a.index - b.index);\n\n return sorted.map((item, i) => ({\n embedding: item.embedding,\n tokens: Math.ceil(texts[i].length / 4), // Rough estimate\n }));\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available OpenAI embedding models\n */\nexport const OPENAI_MODELS = {\n /** text-embedding-3-large - Highest quality (3072 dims, can reduce) */\n EMBEDDING_3_LARGE: 'text-embedding-3-large',\n /** text-embedding-3-small - Good balance (1536 dims, can reduce) */\n EMBEDDING_3_SMALL: 'text-embedding-3-small',\n /** text-embedding-ada-002 - Legacy model (1536 dims) */\n ADA_002: 'text-embedding-ada-002',\n};\n","/**\n * HuggingFace Inference API Embeddings\n *\n * Uses HuggingFace's free Inference API for embeddings.\n * - Free tier: 1,000 requests/day\n * - GPU-powered (faster than local)\n * - No model download needed\n */\n\nimport type { EmbeddingBackend, EmbeddingResult } from '../types.js';\n\nexport interface HuggingFaceEmbeddingOptions {\n /** HuggingFace API token (get free at hf.co/settings/tokens) */\n apiToken: string;\n /** Model name (default: BAAI/bge-base-en-v1.5) */\n model?: string;\n /** Batch size for processing (default: 32) */\n batchSize?: number;\n /** Request timeout in ms (default: 30000) */\n timeout?: number;\n /** Number of retries (default: 3) */\n retries?: number;\n}\n\nexport class HuggingFaceEmbeddingBackend implements EmbeddingBackend {\n private apiToken: string;\n private model: string;\n private batchSize: number;\n private timeout: number;\n private retries: number;\n private apiUrl: string;\n private dimensions: number;\n\n // Model dimension map\n private static MODEL_DIMENSIONS: Record<string, number> = {\n 'BAAI/bge-large-en-v1.5': 1024,\n 'BAAI/bge-base-en-v1.5': 768,\n 'BAAI/bge-small-en-v1.5': 384,\n 'sentence-transformers/all-MiniLM-L6-v2': 384,\n 'sentence-transformers/all-mpnet-base-v2': 768,\n };\n\n constructor(options: HuggingFaceEmbeddingOptions) {\n this.apiToken = options.apiToken;\n this.model = options.model || 'BAAI/bge-base-en-v1.5';\n this.batchSize = options.batchSize || 32;\n this.timeout = options.timeout || 30000;\n this.retries = options.retries || 3;\n\n this.apiUrl = `https://router.huggingface.co/hf-inference/models/${this.model}`;\n this.dimensions = HuggingFaceEmbeddingBackend.MODEL_DIMENSIONS[this.model] || 768;\n }\n\n private async queryAPI(\n texts: string[],\n attempt: number = 0\n ): Promise<number[][] | null> {\n try {\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.timeout);\n\n const response = await fetch(this.apiUrl, {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiToken}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n inputs: texts,\n options: {\n wait_for_model: true,\n },\n }),\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (response.ok) {\n const data = (await response.json()) as number[][];\n return data;\n }\n\n // Handle specific errors\n if (response.status === 503) {\n // Model loading - retry with backoff\n if (attempt < this.retries) {\n const delay = Math.pow(2, attempt) * 1000; // Exponential backoff\n console.log(\n `[HuggingFace] Model loading, retrying in ${delay}ms (attempt ${attempt + 1}/${this.retries})`\n );\n await new Promise((resolve) => setTimeout(resolve, delay));\n return this.queryAPI(texts, attempt + 1);\n }\n } else if (response.status === 429) {\n console.warn('[HuggingFace] Rate limit exceeded');\n } else {\n const errorText = await response.text();\n console.warn(`[HuggingFace] API error ${response.status}: ${errorText}`);\n }\n\n return null;\n } catch (error: any) {\n if (error.name === 'AbortError') {\n console.warn(`[HuggingFace] Request timeout after ${this.timeout}ms`);\n } else {\n console.warn(`[HuggingFace] Request failed:`, error.message);\n }\n\n // Retry on network errors\n if (attempt < this.retries) {\n const delay = Math.pow(2, attempt) * 1000;\n console.log(`[HuggingFace] Retrying in ${delay}ms (attempt ${attempt + 1}/${this.retries})`);\n await new Promise((resolve) => setTimeout(resolve, delay));\n return this.queryAPI(texts, attempt + 1);\n }\n\n return null;\n }\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n const result = await this.queryAPI([text]);\n\n if (result && result.length > 0) {\n return {\n embedding: result[0],\n tokens: Math.ceil(text.length / 4), // Rough estimate\n };\n }\n\n throw new Error('HuggingFace Inference API failed after retries');\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const results: EmbeddingResult[] = [];\n\n // Process in batches\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const batch = texts.slice(i, i + this.batchSize);\n const embeddings = await this.queryAPI(batch);\n\n if (!embeddings || embeddings.length !== batch.length) {\n throw new Error(`HuggingFace API failed for batch ${i / this.batchSize + 1}`);\n }\n\n results.push(\n ...embeddings.map((embedding, idx) => ({\n embedding,\n tokens: Math.ceil(batch[idx].length / 4),\n }))\n );\n }\n\n return results;\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n}\n\n/**\n * Available HuggingFace models\n */\nexport const HUGGINGFACE_MODELS = {\n /** BGE Large - Best quality (1024 dims) */\n BGE_LARGE: 'BAAI/bge-large-en-v1.5',\n /** BGE Base - Good balance (768 dims) */\n BGE_BASE: 'BAAI/bge-base-en-v1.5',\n /** BGE Small - Fastest (384 dims) */\n BGE_SMALL: 'BAAI/bge-small-en-v1.5',\n /** MiniLM L6 - Very fast (384 dims) */\n MINILM_L6: 'sentence-transformers/all-MiniLM-L6-v2',\n /** MPNet - High quality (768 dims) */\n MPNET: 'sentence-transformers/all-mpnet-base-v2',\n};\n","/**\n * Embedding Backends\n *\n * Generate vector embeddings from text using local or cloud models.\n */\n\nexport {\n LocalEmbeddingBackend,\n LOCAL_MODELS,\n type LocalEmbeddingOptions,\n} from './local.js';\n\nexport {\n OpenAIEmbeddingBackend,\n OPENAI_MODELS,\n type OpenAIEmbeddingOptions,\n} from './openai.js';\n\nexport {\n HuggingFaceEmbeddingBackend,\n HUGGINGFACE_MODELS,\n type HuggingFaceEmbeddingOptions,\n} from './huggingface.js';\n\nimport type { EmbeddingBackend, EmbeddingOptions } from '../types.js';\nimport { LocalEmbeddingBackend, LOCAL_MODELS } from './local.js';\nimport { OpenAIEmbeddingBackend } from './openai.js';\nimport { HuggingFaceEmbeddingBackend, HUGGINGFACE_MODELS } from './huggingface.js';\n\n// Model name aliases for convenience\nconst MODEL_ALIASES: Record<string, string> = {\n // Local models (Xenova)\n 'bge-large': LOCAL_MODELS.BGE_LARGE,\n 'bge-base': LOCAL_MODELS.BGE_BASE,\n 'bge-small': LOCAL_MODELS.BGE_SMALL,\n 'minilm': LOCAL_MODELS.MINILM_L6,\n 'mpnet': LOCAL_MODELS.MPNET,\n // HuggingFace models (BAAI)\n 'bge-large-hf': HUGGINGFACE_MODELS.BGE_LARGE,\n 'bge-base-hf': HUGGINGFACE_MODELS.BGE_BASE,\n 'bge-small-hf': HUGGINGFACE_MODELS.BGE_SMALL,\n 'minilm-hf': HUGGINGFACE_MODELS.MINILM_L6,\n 'mpnet-hf': HUGGINGFACE_MODELS.MPNET,\n};\n\nfunction resolveModelName(model?: string): string | undefined {\n if (!model) return undefined;\n return MODEL_ALIASES[model.toLowerCase()] || model;\n}\n\n/**\n * Create an embedding backend based on options\n */\nexport function createEmbeddingBackend(options: EmbeddingOptions): EmbeddingBackend {\n switch (options.provider) {\n case 'local':\n return new LocalEmbeddingBackend({\n model: resolveModelName(options.model),\n batchSize: options.batchSize,\n });\n\n case 'openai':\n if (!options.apiKey) {\n throw new Error('OpenAI embedding requires an API key');\n }\n return new OpenAIEmbeddingBackend({\n apiKey: options.apiKey,\n model: options.model,\n dimensions: options.dimensions,\n batchSize: options.batchSize,\n });\n\n case 'huggingface':\n if (!options.apiKey) {\n throw new Error('HuggingFace Inference API requires an API token (get free at hf.co/settings/tokens)');\n }\n return new HuggingFaceEmbeddingBackend({\n apiToken: options.apiKey,\n model: resolveModelName(options.model) || HUGGINGFACE_MODELS.BGE_BASE,\n batchSize: options.batchSize,\n });\n\n default:\n throw new Error(`Unknown embedding provider: ${options.provider}`);\n }\n}\n","/**\n * SQLite Vector Store\n *\n * Simple vector store using SQLite with better-sqlite3.\n * Stores embeddings as JSON arrays and performs similarity search in JavaScript.\n * Best for development, small datasets, or when PostgreSQL isn't available.\n */\n\nimport Database from 'better-sqlite3';\nimport type { VectorStore, DocumentChunk, SearchResult } from '../types.js';\n\nexport interface SQLiteVectorStoreOptions {\n /** Database file path (use ':memory:' for in-memory) */\n path?: string;\n /** Table name (default: 'chunks') */\n tableName?: string;\n /** Embedding dimensions */\n dimensions: number;\n}\n\nexport class SQLiteVectorStore implements VectorStore {\n private db: Database.Database;\n private tableName: string;\n private dimensions: number;\n\n constructor(options: SQLiteVectorStoreOptions) {\n this.db = new Database(options.path || ':memory:');\n this.tableName = options.tableName || 'chunks';\n this.dimensions = options.dimensions;\n\n // Enable WAL mode for better concurrent access\n this.db.pragma('journal_mode = WAL');\n }\n\n async initialize(): Promise<void> {\n // Create chunks table\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS ${this.tableName} (\n id TEXT PRIMARY KEY,\n document_id TEXT NOT NULL,\n text TEXT NOT NULL,\n embedding TEXT,\n metadata TEXT,\n created_at TEXT DEFAULT CURRENT_TIMESTAMP\n )\n `);\n\n // Create indexes\n this.db.exec(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_document_id\n ON ${this.tableName}(document_id)\n `);\n\n // Create FTS5 virtual table for text search\n this.db.exec(`\n CREATE VIRTUAL TABLE IF NOT EXISTS ${this.tableName}_fts\n USING fts5(id, text, content='${this.tableName}', content_rowid='rowid')\n `);\n\n // Create triggers to keep FTS in sync\n this.db.exec(`\n CREATE TRIGGER IF NOT EXISTS ${this.tableName}_ai AFTER INSERT ON ${this.tableName} BEGIN\n INSERT INTO ${this.tableName}_fts(rowid, id, text) VALUES (new.rowid, new.id, new.text);\n END\n `);\n\n this.db.exec(`\n CREATE TRIGGER IF NOT EXISTS ${this.tableName}_ad AFTER DELETE ON ${this.tableName} BEGIN\n INSERT INTO ${this.tableName}_fts(${this.tableName}_fts, rowid, id, text)\n VALUES('delete', old.rowid, old.id, old.text);\n END\n `);\n }\n\n async insert(chunks: DocumentChunk[]): Promise<void> {\n const stmt = this.db.prepare(`\n INSERT OR REPLACE INTO ${this.tableName} (id, document_id, text, embedding, metadata, created_at)\n VALUES (?, ?, ?, ?, ?, ?)\n `);\n\n const insertMany = this.db.transaction((items: DocumentChunk[]) => {\n for (const chunk of items) {\n stmt.run(\n chunk.id,\n chunk.documentId,\n chunk.text,\n chunk.embedding ? JSON.stringify(chunk.embedding) : null,\n JSON.stringify(chunk.metadata),\n chunk.createdAt.toISOString()\n );\n }\n });\n\n insertMany(chunks);\n }\n\n async denseSearch(\n embedding: number[],\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n // Get all chunks with embeddings\n let query = `SELECT * FROM ${this.tableName} WHERE embedding IS NOT NULL`;\n const params: unknown[] = [];\n\n // Apply filters\n if (filter?.documentId) {\n query += ` AND document_id = ?`;\n params.push(filter.documentId);\n }\n\n const rows = this.db.prepare(query).all(...params) as Array<{\n id: string;\n document_id: string;\n text: string;\n embedding: string;\n metadata: string;\n created_at: string;\n }>;\n\n // Calculate cosine similarity for each chunk\n const results: Array<{ chunk: DocumentChunk; score: number }> = [];\n\n for (const row of rows) {\n const chunkEmbedding = JSON.parse(row.embedding) as number[];\n const score = this.cosineSimilarity(embedding, chunkEmbedding);\n\n results.push({\n chunk: this.rowToChunk(row),\n score,\n });\n }\n\n // Sort by score and take top K\n results.sort((a, b) => b.score - a.score);\n\n return results.slice(0, topK).map((r) => ({\n ...r,\n searchType: 'dense' as const,\n }));\n }\n\n async sparseSearch(\n query: string,\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n // Use FTS5 for text search\n let ftsQuery = `\n SELECT c.*, bm25(${this.tableName}_fts) as score\n FROM ${this.tableName}_fts fts\n JOIN ${this.tableName} c ON fts.id = c.id\n WHERE ${this.tableName}_fts MATCH ?\n `;\n const params: unknown[] = [this.escapeFTSQuery(query)];\n\n // Apply filters\n if (filter?.documentId) {\n ftsQuery += ` AND c.document_id = ?`;\n params.push(filter.documentId);\n }\n\n ftsQuery += ` ORDER BY score LIMIT ?`;\n params.push(topK);\n\n try {\n const rows = this.db.prepare(ftsQuery).all(...params) as Array<{\n id: string;\n document_id: string;\n text: string;\n embedding: string | null;\n metadata: string;\n created_at: string;\n score: number;\n }>;\n\n return rows.map((row) => ({\n chunk: this.rowToChunk(row),\n score: Math.abs(row.score), // BM25 returns negative scores\n searchType: 'sparse' as const,\n }));\n } catch {\n // FTS query failed, return empty results\n return [];\n }\n }\n\n async deleteByDocumentId(documentId: string): Promise<number> {\n const result = this.db\n .prepare(`DELETE FROM ${this.tableName} WHERE document_id = ?`)\n .run(documentId);\n return result.changes;\n }\n\n async getById(id: string): Promise<DocumentChunk | null> {\n const row = this.db\n .prepare(`SELECT * FROM ${this.tableName} WHERE id = ?`)\n .get(id) as {\n id: string;\n document_id: string;\n text: string;\n embedding: string | null;\n metadata: string;\n created_at: string;\n } | undefined;\n\n return row ? this.rowToChunk(row) : null;\n }\n\n async close(): Promise<void> {\n this.db.close();\n }\n\n /**\n * Calculate cosine similarity between two vectors\n */\n private cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length) {\n throw new Error('Vectors must have same length');\n }\n\n let dotProduct = 0;\n let normA = 0;\n let normB = 0;\n\n for (let i = 0; i < a.length; i++) {\n dotProduct += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n\n const denominator = Math.sqrt(normA) * Math.sqrt(normB);\n return denominator === 0 ? 0 : dotProduct / denominator;\n }\n\n /**\n * Convert database row to DocumentChunk\n */\n private rowToChunk(row: {\n id: string;\n document_id: string;\n text: string;\n embedding: string | null;\n metadata: string;\n created_at: string;\n }): DocumentChunk {\n return {\n id: row.id,\n documentId: row.document_id,\n text: row.text,\n embedding: row.embedding ? JSON.parse(row.embedding) : undefined,\n metadata: JSON.parse(row.metadata),\n createdAt: new Date(row.created_at),\n };\n }\n\n /**\n * Escape FTS query to prevent syntax errors\n */\n private escapeFTSQuery(query: string): string {\n // Remove special FTS characters and wrap in quotes for phrase search\n const cleaned = query.replace(/['\"(){}[\\]^~*?:\\\\]/g, ' ').trim();\n return cleaned\n .split(/\\s+/)\n .filter((w) => w.length > 0)\n .map((w) => `\"${w}\"`)\n .join(' OR ');\n }\n}\n","/**\n * PostgreSQL Vector Store with pgvector\n *\n * Production-ready vector store using PostgreSQL with pgvector extension.\n * Supports HNSW indexing for fast similarity search.\n */\n\nimport { Pool, type PoolConfig } from 'pg';\nimport type { VectorStore, DocumentChunk, SearchResult } from '../types.js';\n\nexport interface PostgresVectorStoreOptions {\n /** Connection string or pool config */\n connectionString?: string;\n poolConfig?: PoolConfig;\n /** Table name (default: 'chunks') */\n tableName?: string;\n /** Embedding dimensions */\n dimensions: number;\n /** Schema name (default: 'public') */\n schema?: string;\n}\n\nexport class PostgresVectorStore implements VectorStore {\n private pool: Pool;\n private tableName: string;\n private schema: string;\n private dimensions: number;\n private fullTableName: string;\n\n constructor(options: PostgresVectorStoreOptions) {\n const poolConfig: PoolConfig = options.poolConfig || {\n connectionString: options.connectionString,\n max: 10,\n idleTimeoutMillis: 30000,\n };\n\n this.pool = new Pool(poolConfig);\n this.tableName = options.tableName || 'chunks';\n this.schema = options.schema || 'public';\n this.dimensions = options.dimensions;\n this.fullTableName = `${this.schema}.${this.tableName}`;\n }\n\n async initialize(): Promise<void> {\n const client = await this.pool.connect();\n\n try {\n // Enable required extensions\n await client.query('CREATE EXTENSION IF NOT EXISTS vector');\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_trgm');\n\n // Create documents table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.schema}.documents (\n id TEXT PRIMARY KEY,\n source_id TEXT NOT NULL,\n path TEXT NOT NULL,\n title TEXT NOT NULL,\n metadata JSONB DEFAULT '{}',\n created_at TIMESTAMPTZ DEFAULT NOW(),\n updated_at TIMESTAMPTZ DEFAULT NOW()\n )\n `);\n\n // Create chunks table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.fullTableName} (\n id TEXT PRIMARY KEY,\n document_id TEXT NOT NULL REFERENCES ${this.schema}.documents(id) ON DELETE CASCADE,\n text TEXT NOT NULL,\n embedding vector(${this.dimensions}),\n metadata JSONB DEFAULT '{}',\n created_at TIMESTAMPTZ DEFAULT NOW()\n )\n `);\n\n // Create indexes\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_document_id\n ON ${this.fullTableName}(document_id)\n `);\n\n // HNSW index for vector similarity (better than IVFFlat for most cases)\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_embedding_hnsw\n ON ${this.fullTableName}\n USING hnsw (embedding vector_cosine_ops)\n WITH (m = 16, ef_construction = 64)\n `);\n\n // GIN index for text search\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_text_trgm\n ON ${this.fullTableName}\n USING gin (text gin_trgm_ops)\n `);\n\n // GIN index for metadata\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_${this.tableName}_metadata\n ON ${this.fullTableName}\n USING gin (metadata)\n `);\n } finally {\n client.release();\n }\n }\n\n async insert(chunks: DocumentChunk[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const client = await this.pool.connect();\n\n try {\n await client.query('BEGIN');\n\n // Use COPY for bulk insert (much faster than individual inserts)\n for (const chunk of chunks) {\n const embedding = chunk.embedding\n ? `[${chunk.embedding.join(',')}]`\n : null;\n\n await client.query(\n `\n INSERT INTO ${this.fullTableName} (id, document_id, text, embedding, metadata, created_at)\n VALUES ($1, $2, $3, $4::vector, $5, $6)\n ON CONFLICT (id) DO UPDATE SET\n text = EXCLUDED.text,\n embedding = EXCLUDED.embedding,\n metadata = EXCLUDED.metadata\n `,\n [\n chunk.id,\n chunk.documentId,\n chunk.text,\n embedding,\n JSON.stringify(chunk.metadata),\n chunk.createdAt,\n ]\n );\n }\n\n await client.query('COMMIT');\n } catch (error) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n client.release();\n }\n }\n\n async denseSearch(\n embedding: number[],\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n const embeddingStr = `[${embedding.join(',')}]`;\n\n let query = `\n SELECT\n id,\n document_id,\n text,\n metadata,\n created_at,\n 1 - (embedding <=> $1::vector) as score\n FROM ${this.fullTableName}\n WHERE embedding IS NOT NULL\n `;\n const params: unknown[] = [embeddingStr];\n let paramIndex = 2;\n\n // Apply filters\n if (filter?.documentId) {\n query += ` AND document_id = $${paramIndex}`;\n params.push(filter.documentId);\n paramIndex++;\n }\n\n if (filter?.metadata) {\n query += ` AND metadata @> $${paramIndex}`;\n params.push(JSON.stringify(filter.metadata));\n paramIndex++;\n }\n\n query += ` ORDER BY embedding <=> $1::vector LIMIT $${paramIndex}`;\n params.push(topK);\n\n const result = await this.pool.query(query, params);\n\n return result.rows.map((row) => ({\n chunk: this.rowToChunk(row),\n score: row.score,\n searchType: 'dense' as const,\n }));\n }\n\n async sparseSearch(\n query: string,\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n // Use trigram similarity for BM25-like text search\n let sql = `\n SELECT\n id,\n document_id,\n text,\n metadata,\n created_at,\n similarity(text, $1) as score\n FROM ${this.fullTableName}\n WHERE text % $1\n `;\n const params: unknown[] = [query];\n let paramIndex = 2;\n\n // Apply filters\n if (filter?.documentId) {\n sql += ` AND document_id = $${paramIndex}`;\n params.push(filter.documentId);\n paramIndex++;\n }\n\n sql += ` ORDER BY score DESC LIMIT $${paramIndex}`;\n params.push(topK);\n\n const result = await this.pool.query(sql, params);\n\n return result.rows.map((row) => ({\n chunk: this.rowToChunk(row),\n score: row.score,\n searchType: 'sparse' as const,\n }));\n }\n\n async deleteByDocumentId(documentId: string): Promise<number> {\n const result = await this.pool.query(\n `DELETE FROM ${this.fullTableName} WHERE document_id = $1`,\n [documentId]\n );\n return result.rowCount || 0;\n }\n\n async getById(id: string): Promise<DocumentChunk | null> {\n const result = await this.pool.query(\n `SELECT * FROM ${this.fullTableName} WHERE id = $1`,\n [id]\n );\n return result.rows.length > 0 ? this.rowToChunk(result.rows[0]) : null;\n }\n\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n /**\n * Get chunk and document counts\n */\n async getStats(): Promise<{ chunks: number; documents: number }> {\n const chunksResult = await this.pool.query(\n `SELECT COUNT(*) FROM ${this.fullTableName}`\n );\n const docsResult = await this.pool.query(\n `SELECT COUNT(*) FROM ${this.schema}.documents`\n );\n\n return {\n chunks: parseInt(chunksResult.rows[0].count, 10),\n documents: parseInt(docsResult.rows[0].count, 10),\n };\n }\n\n /**\n * Insert or update a document\n */\n async upsertDocument(doc: {\n id: string;\n sourceId: string;\n path: string;\n title: string;\n metadata?: Record<string, unknown>;\n }): Promise<void> {\n await this.pool.query(\n `\n INSERT INTO ${this.schema}.documents (id, source_id, path, title, metadata)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (id) DO UPDATE SET\n path = EXCLUDED.path,\n title = EXCLUDED.title,\n metadata = EXCLUDED.metadata,\n updated_at = NOW()\n `,\n [doc.id, doc.sourceId, doc.path, doc.title, JSON.stringify(doc.metadata || {})]\n );\n }\n\n private rowToChunk(row: {\n id: string;\n document_id: string;\n text: string;\n embedding?: string;\n metadata: Record<string, unknown>;\n created_at: Date;\n }): DocumentChunk {\n return {\n id: row.id,\n documentId: row.document_id,\n text: row.text,\n metadata: row.metadata as DocumentChunk['metadata'],\n createdAt: new Date(row.created_at),\n };\n }\n}\n","/**\n * Vector Store Implementations\n *\n * Store and search document chunks using vector similarity.\n */\n\nexport { SQLiteVectorStore, type SQLiteVectorStoreOptions } from './sqlite.js';\nexport { PostgresVectorStore, type PostgresVectorStoreOptions } from './postgres.js';\n\nimport type { VectorStore, VectorStoreOptions } from '../types.js';\nimport { SQLiteVectorStore } from './sqlite.js';\nimport { PostgresVectorStore } from './postgres.js';\n\n/**\n * Create a vector store based on options\n */\nexport function createVectorStore(options: VectorStoreOptions): VectorStore {\n switch (options.type) {\n case 'sqlite':\n return new SQLiteVectorStore({\n path: options.connectionString || ':memory:',\n tableName: options.tableName,\n dimensions: options.dimensions,\n });\n\n case 'postgres':\n if (!options.connectionString) {\n throw new Error('PostgreSQL requires a connection string');\n }\n return new PostgresVectorStore({\n connectionString: options.connectionString,\n tableName: options.tableName,\n dimensions: options.dimensions,\n });\n\n case 'memory':\n // Use SQLite in-memory mode\n return new SQLiteVectorStore({\n path: ':memory:',\n tableName: options.tableName,\n dimensions: options.dimensions,\n });\n\n default:\n throw new Error(`Unknown vector store type: ${options.type}`);\n }\n}\n","/**\n * Hybrid Retrieval with RRF Fusion\n *\n * Combines dense (vector) and sparse (BM25) search results using\n * Reciprocal Rank Fusion for better retrieval quality.\n */\n\nimport type {\n VectorStore,\n EmbeddingBackend,\n SearchResult,\n RetrievalOptions,\n RetrievalResult,\n} from '../types.js';\n\nexport interface HybridRetrieverOptions {\n vectorStore: VectorStore;\n embeddings: EmbeddingBackend;\n /** Default number of results to return */\n defaultTopK?: number;\n /** Dense search weight (0-1, default: 0.7) */\n denseWeight?: number;\n /** Sparse search weight (0-1, default: 0.3) */\n sparseWeight?: number;\n /** RRF k parameter (default: 60) */\n rrfK?: number;\n}\n\nexport class HybridRetriever {\n private vectorStore: VectorStore;\n private embeddings: EmbeddingBackend;\n private defaultTopK: number;\n private denseWeight: number;\n private sparseWeight: number;\n private rrfK: number;\n\n constructor(options: HybridRetrieverOptions) {\n this.vectorStore = options.vectorStore;\n this.embeddings = options.embeddings;\n this.defaultTopK = options.defaultTopK || 10;\n this.denseWeight = options.denseWeight ?? 0.7;\n this.sparseWeight = options.sparseWeight ?? 0.3;\n this.rrfK = options.rrfK || 60;\n }\n\n /**\n * Perform hybrid search combining dense and sparse retrieval\n */\n async search(query: string, options?: RetrievalOptions): Promise<RetrievalResult> {\n const startTime = Date.now();\n const topK = options?.topK || this.defaultTopK;\n const filter = options?.filter;\n\n // Get weights (can be overridden per-query)\n const denseWeight = options?.denseWeight ?? this.denseWeight;\n const sparseWeight = options?.sparseWeight ?? this.sparseWeight;\n\n // Retrieve more candidates than needed for fusion\n const candidateK = Math.min(topK * 3, 100);\n\n // Run dense and sparse searches in parallel\n const [denseResults, sparseResults] = await Promise.all([\n this.denseSearch(query, candidateK, filter),\n this.sparseSearch(query, candidateK, filter),\n ]);\n\n // Apply RRF fusion\n const fusedResults = this.rrfFusion(\n denseResults,\n sparseResults,\n denseWeight,\n sparseWeight\n );\n\n // Take top K results\n const finalResults = fusedResults.slice(0, topK);\n\n return {\n results: finalResults,\n stats: {\n denseCount: denseResults.length,\n sparseCount: sparseResults.length,\n rerankingApplied: false,\n totalTime: Date.now() - startTime,\n },\n };\n }\n\n /**\n * Dense (vector) search\n */\n private async denseSearch(\n query: string,\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n try {\n const embeddingResult = await this.embeddings.embed(query);\n return await this.vectorStore.denseSearch(embeddingResult.embedding, topK, filter);\n } catch (error) {\n console.warn('Dense search failed:', error);\n return [];\n }\n }\n\n /**\n * Sparse (BM25/text) search\n */\n private async sparseSearch(\n query: string,\n topK: number,\n filter?: Record<string, unknown>\n ): Promise<SearchResult[]> {\n try {\n return await this.vectorStore.sparseSearch(query, topK, filter);\n } catch (error) {\n console.warn('Sparse search failed:', error);\n return [];\n }\n }\n\n /**\n * Reciprocal Rank Fusion\n *\n * Combines multiple ranked lists into a single ranking.\n * Formula: score = sum(1 / (k + rank_i)) for each list i\n */\n private rrfFusion(\n denseResults: SearchResult[],\n sparseResults: SearchResult[],\n denseWeight: number,\n sparseWeight: number\n ): SearchResult[] {\n const k = this.rrfK;\n const scores = new Map<string, { score: number; result: SearchResult }>();\n\n // Add dense results with weighted RRF score\n denseResults.forEach((result, rank) => {\n const rrfScore = (denseWeight * 1) / (k + rank + 1);\n const existing = scores.get(result.chunk.id);\n\n if (existing) {\n existing.score += rrfScore;\n } else {\n scores.set(result.chunk.id, {\n score: rrfScore,\n result: { ...result, searchType: 'hybrid' },\n });\n }\n });\n\n // Add sparse results with weighted RRF score\n sparseResults.forEach((result, rank) => {\n const rrfScore = (sparseWeight * 1) / (k + rank + 1);\n const existing = scores.get(result.chunk.id);\n\n if (existing) {\n existing.score += rrfScore;\n } else {\n scores.set(result.chunk.id, {\n score: rrfScore,\n result: { ...result, searchType: 'hybrid' },\n });\n }\n });\n\n // Sort by combined score\n const combined = Array.from(scores.values())\n .sort((a, b) => b.score - a.score)\n .map((entry) => ({\n ...entry.result,\n score: entry.score,\n }));\n\n return combined;\n }\n}\n\n/**\n * Simple dense-only retriever (no sparse search)\n */\nexport class DenseRetriever {\n private vectorStore: VectorStore;\n private embeddings: EmbeddingBackend;\n private defaultTopK: number;\n\n constructor(options: {\n vectorStore: VectorStore;\n embeddings: EmbeddingBackend;\n defaultTopK?: number;\n }) {\n this.vectorStore = options.vectorStore;\n this.embeddings = options.embeddings;\n this.defaultTopK = options.defaultTopK || 10;\n }\n\n async search(query: string, options?: RetrievalOptions): Promise<RetrievalResult> {\n const startTime = Date.now();\n const topK = options?.topK || this.defaultTopK;\n\n const embeddingResult = await this.embeddings.embed(query);\n const results = await this.vectorStore.denseSearch(\n embeddingResult.embedding,\n topK,\n options?.filter\n );\n\n return {\n results,\n stats: {\n denseCount: results.length,\n sparseCount: 0,\n rerankingApplied: false,\n totalTime: Date.now() - startTime,\n },\n };\n }\n}\n"],"mappings":";AAMA,SAAS,cAAAA,mBAAkB;AAC3B,SAAS,gBAAgB,yBAA4C;;;ACD9D,IAAe,gBAAf,MAA6B;AAAA,EACxB;AAAA,EAEV,YAAY,SAA2B;AACrC,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA,EAYU,aAAa,MAAsB;AAC3C,UAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,WAAO,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,CAAC,EAAE,YAAY,IAAI;AAAA,EACpE;AAAA;AAAA,EAGU,qBAAqB,MAAsB;AACnD,WAAO,KAAK,QAAQ,SAAS,IAAI,EAAE,QAAQ,OAAO,IAAI;AAAA,EACxD;AAAA;AAAA,EAGU,gBAAgB,MAAsB;AAC9C,WAAO,KACJ,QAAQ,WAAW,MAAM,EACzB,QAAQ,WAAW,GAAG,EACtB,KAAK;AAAA,EACV;AACF;;;ACjCA,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAC1B,SAAS,WAAAC,UAAS,YAAAC,WAAU,OAAO,UAAU;AAC7C,SAAS,kBAAkB;AAC3B,SAAS,QAAAC,OAAM,YAAAC,WAAU,eAAe;;;ACLxC,SAAS,UAAU,SAAS,YAAY;AACxC,SAAS,MAAM,gBAAgB;AAC/B,YAAY,aAAa;AACzB,SAAS,eAAe;AAcjB,IAAM,gBAAN,cAA4B,cAAc;AAAA,EACvC;AAAA,EAER,YAAY,SAA+B;AACzC,UAAM,OAAO;AACb,SAAK,cAAc;AAAA,MACjB,kBAAkB;AAAA,MAClB,cAAc;AAAA,MACd,YAAY,CAAC,UAAU,SAAS,OAAO,UAAU,UAAU,SAAS,QAAQ,MAAM;AAAA,MAClF,GAAG;AAAA,IACL;AAAA,EACF;AAAA,EAEA,UAAyB;AACvB,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,MAAuB;AAC/B,UAAM,MAAM,KAAK,aAAa,IAAI;AAClC,WAAO,QAAQ,UAAU,QAAQ;AAAA,EACnC;AAAA,EAEA,MAAM,UAAwC;AAC5C,UAAM,EAAE,WAAW,IAAI,KAAK;AAC5B,UAAM,QAAQ,MAAM,KAAK,UAAU;AAEnC,QAAI,MAAM,OAAO,GAAG;AAClB,YAAM,UAAU,MAAM,SAAS,YAAY,OAAO;AAClD,aAAO,CAAC,MAAM,KAAK,kBAAkB,SAAS,UAAU,CAAC;AAAA,IAC3D;AAEA,QAAI,MAAM,YAAY,GAAG;AACvB,aAAO,KAAK,qBAAqB,UAAU;AAAA,IAC7C;AAEA,UAAM,IAAI,MAAM,iBAAiB,UAAU,EAAE;AAAA,EAC/C;AAAA,EAEA,MAAM,kBAAkB,MAAc,UAA8C;AAClF,UAAM,IAAY,aAAK,IAAI;AAG3B,eAAW,OAAO,KAAK,YAAY,cAAc,CAAC,GAAG;AACnD,QAAE,GAAG,EAAE,OAAO;AAAA,IAChB;AAGA,QAAI,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AACnC,QAAI,CAAC,OAAO;AACV,cAAQ,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AAAA,IACtC;AACA,QAAI,CAAC,OAAO;AACV,cAAQ,SAAS,UAAU,OAAO,EAAE,QAAQ,QAAQ,GAAG;AAAA,IACzD;AAGA,UAAM,OAAO,QAAQ,EAAE,KAAK,GAAG;AAAA,MAC7B,UAAU;AAAA,MACV,kBAAkB;AAAA,MAClB,WAAW;AAAA,QACT,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,OAAO,EAAE;AAAA,QAChE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,QAAQ,EAAE;AAAA,QACjE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,SAAS,EAAE;AAAA,QAClE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,UAAU,EAAE;AAAA,QACnE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,WAAW,EAAE;AAAA,QACpE,EAAE,UAAU,MAAM,SAAS,EAAE,WAAW,OAAO,QAAQ,YAAY,EAAE;AAAA,QACrE,EAAE,UAAU,MAAM,SAAS,EAAE,YAAY,OAAO,EAAE;AAAA,QAClD,EAAE,UAAU,MAAM,SAAS,EAAE,YAAY,QAAQ,EAAE;AAAA,QACnD,EAAE,UAAU,SAAS,QAAQ,YAAY;AAAA,QACzC,EAAE,UAAU,KAAK,SAAS,EAAE,YAAY,CAAC,KAAK,YAAY,aAAa,EAAE;AAAA,QACzE,EAAE,UAAU,OAAO,QAAQ,OAAO;AAAA,MACpC;AAAA,IACF,CAAC;AAED,UAAM,UAAU,KAAK,gBAAgB,KAAK,qBAAqB,IAAI,CAAC;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,UAAU;AAAA,QACR,gBAAgB,KAAK;AAAA,QACrB,iBAAiB,QAAQ;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,qBAAqB,KAA2C;AAC5E,UAAM,YAAiC,CAAC;AAExC,UAAM,UAAU,OAAO,eAAuB;AAC5C,YAAM,UAAU,MAAM,QAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAW,KAAK,YAAY,MAAM,IAAI;AAE5C,YAAI,MAAM,YAAY,GAAG;AACvB,gBAAM,QAAQ,QAAQ;AAAA,QACxB,WAAW,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,IAAI,GAAG;AACvD,cAAI;AACF,kBAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,kBAAM,MAAM,MAAM,KAAK,kBAAkB,SAAS,QAAQ;AAC1D,gBAAI,IAAI,QAAQ,KAAK,GAAG;AACtB,wBAAU,KAAK,GAAG;AAAA,YACpB;AAAA,UACF,SAAS,OAAO;AACd,oBAAQ,KAAK,qBAAqB,QAAQ,KAAK,KAAK;AAAA,UACtD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,QAAQ,GAAG;AACjB,WAAO;AAAA,EACT;AACF;;;AD3HA,IAAM,YAAY,UAAU,IAAI;AAEzB,IAAM,eAAN,cAA2B,cAAc;AAAA,EACtC;AAAA,EAER,YAAY,SAAqD;AAC/D,UAAM,OAAO;AACb,SAAK,UAAU,QAAQ,aAAaC,MAAK,QAAQ,QAAQ,UAAU,GAAG,mBAAmB;AAAA,EAC3F;AAAA,EAEA,UAAyB;AACvB,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,MAAuB;AAC/B,WAAO,KAAK,aAAa,IAAI,MAAM;AAAA,EACrC;AAAA,EAEA,MAAM,UAAwC;AAC5C,UAAM,EAAE,WAAW,IAAI,KAAK;AAE5B,QAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,YAAM,IAAI,MAAM,uBAAuB,UAAU,EAAE;AAAA,IACrD;AAGA,UAAM,MAAM,KAAK,SAAS,EAAE,WAAW,KAAK,CAAC;AAE7C,QAAI;AAEF,YAAM,KAAK,oBAAoB,YAAY,KAAK,OAAO;AAGvD,YAAM,YAAY,MAAM,KAAK,cAAc,KAAK,OAAO;AACvD,YAAM,YAAiC,CAAC;AAExC,iBAAW,YAAY,WAAW;AAChC,YAAI;AACF,gBAAM,UAAU,MAAMC,UAAS,UAAU,OAAO;AAChD,gBAAM,gBAAgB,IAAI,cAAc,EAAE,YAAY,SAAS,CAAC;AAChE,gBAAM,YAAY,MAAM,cAAc,kBAAkB,SAAS,QAAQ;AAEzE,cAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,sBAAU,KAAK;AAAA,cACb,GAAG;AAAA,cACH,QAAQ;AAAA,cACR,UAAU;AAAA,gBACR,GAAG,UAAU;AAAA,gBACb,WAAWC,UAAS,UAAU;AAAA,gBAC9B,cAAc,SAAS,QAAQ,KAAK,SAAS,EAAE;AAAA,cACjD;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAO;AAEd,kBAAQ,KAAK,8BAA8B,QAAQ,IAAI,KAAK;AAAA,QAC9D;AAAA,MACF;AAEA,aAAO;AAAA,IACT,UAAE;AAEA,UAAI,WAAW,KAAK,OAAO,GAAG;AAC5B,cAAM,GAAG,KAAK,SAAS,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,oBAAoB,SAAiB,WAAkC;AACnF,QAAI;AAEF,YAAM,UAAU,SAAS,OAAO,QAAQ,SAAS,QAAQ;AAAA,QACvD,WAAW,KAAK,OAAO;AAAA;AAAA,MACzB,CAAC;AAAA,IACH,SAAS,OAAO;AAEd,UAAI;AACF,cAAM,UAAU,UAAU,OAAO,QAAQ,SAAS,QAAQ;AAAA,UACxD,WAAW,KAAK,OAAO;AAAA,QACzB,CAAC;AAAA,MACH,QAAQ;AACN,cAAM,IAAI;AAAA,UACR;AAAA;AAAA;AAAA;AAAA,kBAIqB,KAAK;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,cAAc,KAAgC;AAC1D,UAAM,YAAsB,CAAC;AAE7B,UAAM,UAAU,OAAO,eAAuB;AAC5C,YAAM,UAAU,MAAMC,SAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAWH,MAAK,YAAY,MAAM,IAAI;AAE5C,YAAI,MAAM,YAAY,GAAG;AAEvB,cAAI,CAAC,MAAM,KAAK,WAAW,GAAG,KAAK,CAAC,MAAM,KAAK,WAAW,GAAG,GAAG;AAC9D,kBAAM,QAAQ,QAAQ;AAAA,UACxB;AAAA,QACF,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAM,KAAK,aAAa,MAAM,IAAI;AACxC,cAAI,QAAQ,UAAU,QAAQ,OAAO;AACnC,sBAAU,KAAK,QAAQ;AAAA,UACzB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,QAAQ,GAAG;AACjB,WAAO,UAAU,KAAK;AAAA,EACxB;AACF;;;AEhIA,SAAS,YAAAI,WAAU,WAAAC,UAAS,QAAAC,aAAY;AACxC,SAAS,QAAAC,OAAM,YAAAC,iBAAgB;AAIxB,IAAM,oBAAN,cAAgC,cAAc;AAAA,EACnD,UAAyB;AACvB,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,MAAuB;AAC/B,UAAM,MAAM,KAAK,aAAa,IAAI;AAClC,WAAO,QAAQ,QAAQ,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,UAAwC;AAC5C,UAAM,EAAE,WAAW,IAAI,KAAK;AAC5B,UAAM,QAAQ,MAAMC,MAAK,UAAU;AAEnC,QAAI,MAAM,OAAO,GAAG;AAClB,aAAO,CAAC,MAAM,KAAK,gBAAgB,UAAU,CAAC;AAAA,IAChD;AAEA,QAAI,MAAM,YAAY,GAAG;AACvB,aAAO,KAAK,qBAAqB,UAAU;AAAA,IAC7C;AAEA,UAAM,IAAI,MAAM,iBAAiB,UAAU,EAAE;AAAA,EAC/C;AAAA,EAEA,MAAc,gBAAgB,UAA8C;AAC1E,UAAM,UAAU,MAAMC,UAAS,UAAU,OAAO;AAChD,UAAM,oBAAoB,KAAK,qBAAqB,OAAO;AAG3D,QAAI,QAAQ,KAAK,aAAa,iBAAiB;AAC/C,QAAI,CAAC,OAAO;AACV,cAAQC,UAAS,UAAU,KAAK,EAAE,QAAQ,QAAQ,GAAG;AAAA,IACvD;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,SAAS,KAAK,gBAAgB,iBAAiB;AAAA,MAC/C,QAAQ;AAAA,MACR,UAAU;AAAA,QACR,gBAAgB,QAAQ;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,aAAa,SAAqC;AAExD,UAAM,UAAU,QAAQ,MAAM,aAAa;AAC3C,QAAI,SAAS;AACX,aAAO,QAAQ,CAAC,EAAE,KAAK;AAAA,IACzB;AAGA,UAAM,iBAAiB,QAAQ,MAAM,gBAAgB;AACrD,QAAI,gBAAgB;AAClB,aAAO,eAAe,CAAC,EAAE,KAAK;AAAA,IAChC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,qBAAqB,KAA2C;AAC5E,UAAM,YAAiC,CAAC;AAExC,UAAM,UAAU,OAAO,eAAuB;AAC5C,YAAM,UAAU,MAAMC,SAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAWC,MAAK,YAAY,MAAM,IAAI;AAE5C,YAAI,MAAM,YAAY,GAAG;AAEvB,cAAI,CAAC,MAAM,KAAK,WAAW,GAAG,KAAK,MAAM,SAAS,gBAAgB;AAChE,kBAAM,QAAQ,QAAQ;AAAA,UACxB;AAAA,QACF,WAAW,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,IAAI,GAAG;AACvD,cAAI;AACF,kBAAM,MAAM,MAAM,KAAK,gBAAgB,QAAQ;AAC/C,gBAAI,IAAI,QAAQ,KAAK,GAAG;AACtB,wBAAU,KAAK,GAAG;AAAA,YACpB;AAAA,UACF,SAAS,OAAO;AACd,oBAAQ,KAAK,qBAAqB,QAAQ,KAAK,KAAK;AAAA,UACtD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,QAAQ,GAAG;AACjB,WAAO;AAAA,EACT;AACF;;;AC5FA,SAAS,QAAAC,aAAY;AASrB,eAAsB,iBAAiB,YAAkD;AACvF,QAAM,QAAQ,MAAMC,MAAK,UAAU;AACnC,QAAM,MAAM,WAAW,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AAG1D,MAAI,MAAM,OAAO,GAAG;AAClB,QAAI,QAAQ,OAAO;AACjB,YAAM,YAAY,IAAI,aAAa,EAAE,WAAW,CAAC;AACjD,aAAO,UAAU,QAAQ;AAAA,IAC3B;AACA,QAAI,QAAQ,UAAU,QAAQ,OAAO;AACnC,YAAM,YAAY,IAAI,cAAc,EAAE,WAAW,CAAC;AAClD,aAAO,UAAU,QAAQ;AAAA,IAC3B;AACA,QAAI,QAAQ,QAAQ,QAAQ,YAAY;AACtC,YAAM,YAAY,IAAI,kBAAkB,EAAE,WAAW,CAAC;AACtD,aAAO,UAAU,QAAQ;AAAA,IAC3B;AACA,UAAM,IAAI,MAAM,4BAA4B,GAAG,EAAE;AAAA,EACnD;AAGA,MAAI,MAAM,YAAY,GAAG;AACvB,UAAM,eAAoC,CAAC;AAG3C,UAAM,gBAAgB,IAAI,cAAc,EAAE,WAAW,CAAC;AACtD,UAAM,WAAW,MAAM,cAAc,QAAQ;AAC7C,iBAAa,KAAK,GAAG,QAAQ;AAG7B,UAAM,cAAc,IAAI,kBAAkB,EAAE,WAAW,CAAC;AACxD,UAAM,SAAS,MAAM,YAAY,QAAQ;AACzC,iBAAa,KAAK,GAAG,MAAM;AAE3B,WAAO;AAAA,EACT;AAEA,QAAM,IAAI,MAAM,iBAAiB,UAAU,EAAE;AAC/C;;;ACpDA,SAAS,kBAAkB;AAqBpB,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EAER,YAAY,SAA2B;AACrC,SAAK,UAAU;AAAA,MACb,WAAW;AAAA,MACX,cAAc;AAAA,MACd,cAAc;AAAA,MACd,cAAc;AAAA,MACd,iBAAiB;AAAA,MACjB,iBAAiB,CAAC,GAAG,CAAC;AAAA,MACtB,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAA6B,YAAkC;AACnE,UAAM,QAAQ,cAAc,WAAW,KAAK;AAC5C,UAAM,EAAE,QAAQ,IAAI;AAEpB,QAAI,CAAC,QAAQ,KAAK,GAAG;AACnB,aAAO;AAAA,QACL,QAAQ,CAAC;AAAA,QACT,OAAO,EAAE,aAAa,GAAG,cAAc,GAAG,cAAc,GAAG,cAAc,EAAE;AAAA,MAC7E;AAAA,IACF;AAEA,UAAM,SAA0B,CAAC;AACjC,UAAM,WAAW,KAAK,gBAAgB,OAAO;AAE7C,eAAW,WAAW,UAAU;AAC9B,YAAM,gBAAgB,KAAK,aAAa,SAAS,OAAO,SAAS,IAAI;AACrE,aAAO,KAAK,GAAG,aAAa;AAAA,IAC9B;AAGA,UAAM,QAAQ,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,MAAM;AAC7C,UAAM,QAAQ;AAAA,MACZ,aAAa,OAAO;AAAA,MACpB,cAAc,MAAM,SAAS,IAAI,KAAK,MAAM,MAAM,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC,IAAI,MAAM,MAAM,IAAI;AAAA,MAC/F,cAAc,MAAM,SAAS,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI;AAAA,MACtD,cAAc,MAAM,SAAS,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI;AAAA,IACxD;AAEA,WAAO,EAAE,QAAQ,MAAM;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,SAAmE;AACzF,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,WAA6D,CAAC;AAEpE,QAAI,iBAAiC,EAAE,aAAa,GAAG;AACvD,QAAI,cAAwB,CAAC;AAE7B,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,KAAK,aAAa,IAAI;AAEtC,UAAI,WAAW,KAAK,QAAQ,gBAAgB,SAAS,QAAQ,KAAK,GAAG;AAEnE,YAAI,YAAY,SAAS,GAAG;AAC1B,mBAAS,KAAK;AAAA,YACZ,MAAM,YAAY,KAAK,IAAI,EAAE,KAAK;AAAA,YAClC,SAAS,EAAE,GAAG,eAAe;AAAA,UAC/B,CAAC;AACD,wBAAc,CAAC;AAAA,QACjB;AAGA,YAAI,QAAQ,UAAU,GAAG;AACvB,2BAAiB;AAAA,YACf,IAAI,QAAQ;AAAA,YACZ,aAAa,QAAQ;AAAA,UACvB;AAAA,QACF,WAAW,QAAQ,UAAU,GAAG;AAC9B,2BAAiB;AAAA,YACf,GAAG;AAAA,YACH,IAAI,QAAQ;AAAA,YACZ,IAAI;AAAA,YACJ,aAAa,eAAe,KACxB,GAAG,eAAe,EAAE,MAAM,QAAQ,IAAI,KACtC,QAAQ;AAAA,UACd;AAAA,QACF,WAAW,QAAQ,UAAU,GAAG;AAC9B,2BAAiB;AAAA,YACf,GAAG;AAAA,YACH,IAAI,QAAQ;AAAA,YACZ,aAAa,eAAe,KACxB,GAAG,eAAe,WAAW,MAAM,QAAQ,IAAI,KAC/C,QAAQ;AAAA,UACd;AAAA,QACF;AAEA,oBAAY,KAAK,IAAI;AAAA,MACvB,OAAO;AACL,oBAAY,KAAK,IAAI;AAAA,MACvB;AAAA,IACF;AAGA,QAAI,YAAY,SAAS,GAAG;AAC1B,eAAS,KAAK;AAAA,QACZ,MAAM,YAAY,KAAK,IAAI,EAAE,KAAK;AAAA,QAClC,SAAS,EAAE,GAAG,eAAe;AAAA,MAC/B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAsD;AAEzE,UAAM,QAAQ,KAAK,MAAM,mBAAmB;AAC5C,QAAI,OAAO;AACT,aAAO;AAAA,QACL,OAAO,MAAM,CAAC,EAAE;AAAA,QAChB,MAAM,MAAM,CAAC,EAAE,KAAK;AAAA,MACtB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aACN,SACA,YACA,YACiB;AACjB,UAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAM,SAA0B,CAAC;AAGjC,QAAI,KAAK,eAAe,IAAI,KAAK,KAAK,QAAQ,cAAc;AAC1D,UAAI,KAAK,eAAe,IAAI,KAAK,KAAK,QAAQ,cAAc;AAC1D,eAAO,KAAK,KAAK,YAAY,MAAM,SAAS,YAAY,YAAY,CAAC,CAAC;AAAA,MACxE;AACA,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,KAAK,MAAM,OAAO;AACrC,QAAI,eAAyB,CAAC;AAC9B,QAAI,gBAAgB;AAEpB,eAAW,QAAQ,YAAY;AAC7B,YAAM,aAAa,KAAK,eAAe,IAAI;AAG3C,UAAI,aAAa,KAAK,QAAQ,cAAc;AAE1C,YAAI,aAAa,SAAS,GAAG;AAC3B,iBAAO;AAAA,YACL,KAAK;AAAA,cACH,aAAa,KAAK,MAAM;AAAA,cACxB;AAAA,cACA;AAAA,cACA;AAAA,cACA,OAAO;AAAA,YACT;AAAA,UACF;AACA,yBAAe,CAAC;AAChB,0BAAgB;AAAA,QAClB;AAGA,cAAM,iBAAiB,KAAK,oBAAoB,IAAI;AACpD,mBAAW,iBAAiB,gBAAgB;AAC1C,iBAAO;AAAA,YACL,KAAK,YAAY,eAAe,SAAS,YAAY,YAAY,OAAO,MAAM;AAAA,UAChF;AAAA,QACF;AACA;AAAA,MACF;AAGA,UAAI,gBAAgB,aAAa,KAAK,QAAQ,WAAW;AAEvD,YAAI,aAAa,SAAS,GAAG;AAC3B,iBAAO;AAAA,YACL,KAAK;AAAA,cACH,aAAa,KAAK,MAAM;AAAA,cACxB;AAAA,cACA;AAAA,cACA;AAAA,cACA,OAAO;AAAA,YACT;AAAA,UACF;AAGA,gBAAM,cAAc,KAAK,eAAe,YAAY;AACpD,yBAAe,cAAc,CAAC,aAAa,IAAI,IAAI,CAAC,IAAI;AACxD,0BAAgB,KAAK,eAAe,aAAa,KAAK,MAAM,CAAC;AAAA,QAC/D,OAAO;AACL,yBAAe,CAAC,IAAI;AACpB,0BAAgB;AAAA,QAClB;AAAA,MACF,OAAO;AACL,qBAAa,KAAK,IAAI;AACtB,yBAAiB;AAAA,MACnB;AAAA,IACF;AAGA,QAAI,aAAa,SAAS,KAAK,iBAAiB,KAAK,QAAQ,cAAc;AACzE,aAAO;AAAA,QACL,KAAK,YAAY,aAAa,KAAK,MAAM,GAAG,SAAS,YAAY,YAAY,OAAO,MAAM;AAAA,MAC5F;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,WAA6B;AACvD,UAAM,YAAY,UAAU,MAAM,gBAAgB,KAAK,CAAC,SAAS;AACjE,UAAM,SAAmB,CAAC;AAC1B,QAAI,eAAyB,CAAC;AAC9B,QAAI,gBAAgB;AAEpB,eAAW,YAAY,WAAW;AAChC,YAAM,iBAAiB,KAAK,eAAe,QAAQ;AAEnD,UAAI,gBAAgB,iBAAiB,KAAK,QAAQ,aAAa,aAAa,SAAS,GAAG;AACtF,eAAO,KAAK,aAAa,KAAK,GAAG,EAAE,KAAK,CAAC;AACzC,uBAAe,CAAC;AAChB,wBAAgB;AAAA,MAClB;AAEA,mBAAa,KAAK,SAAS,KAAK,CAAC;AACjC,uBAAiB;AAAA,IACnB;AAEA,QAAI,aAAa,SAAS,GAAG;AAC3B,aAAO,KAAK,aAAa,KAAK,GAAG,EAAE,KAAK,CAAC;AAAA,IAC3C;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,eAAwC;AAC7D,QAAI,CAAC,KAAK,QAAQ,gBAAgB,cAAc,WAAW,GAAG;AAC5D,aAAO;AAAA,IACT;AAGA,UAAM,WAAW,CAAC,GAAG,aAAa,EAAE,QAAQ;AAC5C,UAAM,eAAyB,CAAC;AAChC,QAAI,SAAS;AAEb,eAAW,QAAQ,UAAU;AAC3B,YAAM,aAAa,KAAK,eAAe,IAAI;AAC3C,UAAI,SAAS,aAAa,KAAK,QAAQ,cAAc;AACnD;AAAA,MACF;AACA,mBAAa,QAAQ,IAAI;AACzB,gBAAU;AAAA,IACZ;AAEA,WAAO,aAAa,SAAS,IAAI,aAAa,KAAK,MAAM,IAAI;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKQ,YACN,MACA,SACA,YACA,YACA,OACe;AACf,UAAM,WAA0B;AAAA,MAC9B,aAAa,QAAQ;AAAA,MACrB,WAAW,QAAQ;AAAA,MACnB,WAAW,QAAQ;AAAA,MACnB,WAAW,QAAQ;AAAA,MACnB;AAAA,MACA,YAAY;AAAA,IACd;AAEA,WAAO;AAAA,MACL,IAAI,WAAW,OAAO;AAAA,MACtB;AAAA,MACA,MAAM,KAAK,KAAK;AAAA,MAChB;AAAA,MACA,WAAW,oBAAI,KAAK;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,MAAsB;AAC3C,WAAO,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA,EAClC;AACF;;;ACvUA,IAAI,WAAgB;AACpB,IAAI,WAAgB;AAEpB,eAAe,aAAa,OAAe;AACzC,MAAI,CAAC,UAAU;AACb,UAAM,eAAe,MAAM,OAAO,sBAAsB;AACxD,eAAW,aAAa;AAAA,EAC1B;AAEA,MAAI,CAAC,UAAU;AACb,YAAQ,IAAI,oCAAoC,KAAK,KAAK;AAC1D,eAAW,MAAM,SAAS,sBAAsB,OAAO;AAAA,MACrD,WAAW;AAAA;AAAA,IACb,CAAC;AACD,YAAQ,IAAI,6CAA6C;AAAA,EAC3D;AAEA,SAAO;AACT;AAWO,IAAM,wBAAN,MAAM,uBAAkD;AAAA,EACrD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,4BAA4B;AAAA,IAC5B,2BAA2B;AAAA,IAC3B,4BAA4B;AAAA,IAC5B,2BAA2B;AAAA,IAC3B,4BAA4B;AAAA,IAC5B,4BAA4B;AAAA,EAC9B;AAAA,EAEA,YAAY,SAAiC;AAE3C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,YAAY,SAAS,aAAa;AACvC,SAAK,YAAY,SAAS,aAAa;AACvC,SAAK,aAAa,uBAAsB,iBAAiB,KAAK,KAAK,KAAK;AAAA,EAC1E;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAMC,YAAW,MAAM,aAAa,KAAK,KAAK;AAG9C,UAAM,gBAAgB,KAAK,MAAM,SAAS,KAAK,IAC3C,4DAA4D,IAAI,KAChE;AAEJ,UAAM,SAAS,MAAMA,UAAS,eAAe;AAAA,MAC3C,SAAS;AAAA,MACT,WAAW,KAAK;AAAA,IAClB,CAAC;AAGD,UAAM,YAAY,MAAM,KAAK,OAAO,IAAoB;AAExD,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,UAA6B,CAAC;AAGpC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,eAAe,MAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,SAAS,KAAK,MAAM,IAAI,CAAC,CAAC;AAC5E,cAAQ,KAAK,GAAG,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,eAAe;AAAA;AAAA,EAE1B,WAAW;AAAA;AAAA,EAEX,UAAU;AAAA;AAAA,EAEV,WAAW;AAAA;AAAA,EAEX,WAAW;AAAA;AAAA,EAEX,YAAY;AAAA;AAAA,EAEZ,OAAO;AACT;;;ACtGO,IAAM,yBAAN,MAAM,wBAAmD;AAAA,EACtD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,0BAA0B;AAAA,IAC1B,0BAA0B;AAAA,IAC1B,0BAA0B;AAAA,EAC5B;AAAA,EAEA,YAAY,SAAiC;AAC3C,SAAK,SAAS,QAAQ;AACtB,SAAK,QAAQ,QAAQ,SAAS;AAC9B,SAAK,aACH,QAAQ,cAAc,wBAAuB,iBAAiB,KAAK,KAAK,KAAK;AAC/E,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAM,UAAU,MAAM,KAAK,WAAW,CAAC,IAAI,CAAC;AAC5C,WAAO,QAAQ,CAAC;AAAA,EAClB;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,aAAgC,CAAC;AAGvC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,eAAe,MAAM,KAAK,QAAQ,KAAK;AAC7C,iBAAW,KAAK,GAAG,YAAY;AAAA,IACjC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,QAAQ,OAA6C;AACjE,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,IACT;AAGA,QAAI,KAAK,MAAM,WAAW,mBAAmB,KAAK,KAAK,YAAY;AACjE,WAAK,aAAa,KAAK;AAAA,IACzB;AAEA,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,eAAe;AAAA,MACzD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,KAAK,MAAM;AAAA,MACtC;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,MAAM,KAAK,EAAE;AAAA,IACnE;AAEA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAMlC,UAAM,SAAS,KAAK,KAAK,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEzD,WAAO,OAAO,IAAI,CAAC,MAAM,OAAO;AAAA,MAC9B,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK,KAAK,MAAM,CAAC,EAAE,SAAS,CAAC;AAAA;AAAA,IACvC,EAAE;AAAA,EACJ;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,gBAAgB;AAAA;AAAA,EAE3B,mBAAmB;AAAA;AAAA,EAEnB,mBAAmB;AAAA;AAAA,EAEnB,SAAS;AACX;;;AChGO,IAAM,8BAAN,MAAM,6BAAwD;AAAA,EAC3D;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGR,OAAe,mBAA2C;AAAA,IACxD,0BAA0B;AAAA,IAC1B,yBAAyB;AAAA,IACzB,0BAA0B;AAAA,IAC1B,0CAA0C;AAAA,IAC1C,2CAA2C;AAAA,EAC7C;AAAA,EAEA,YAAY,SAAsC;AAChD,SAAK,WAAW,QAAQ;AACxB,SAAK,QAAQ,QAAQ,SAAS;AAC9B,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,UAAU,QAAQ,WAAW;AAClC,SAAK,UAAU,QAAQ,WAAW;AAElC,SAAK,SAAS,qDAAqD,KAAK,KAAK;AAC7E,SAAK,aAAa,6BAA4B,iBAAiB,KAAK,KAAK,KAAK;AAAA,EAChF;AAAA,EAEA,MAAc,SACZ,OACA,UAAkB,GACU;AAC5B,QAAI;AACF,YAAM,aAAa,IAAI,gBAAgB;AACvC,YAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,OAAO;AAEnE,YAAM,WAAW,MAAM,MAAM,KAAK,QAAQ;AAAA,QACxC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,iBAAiB,UAAU,KAAK,QAAQ;AAAA,UACxC,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,UAClB;AAAA,QACF,CAAC;AAAA,QACD,QAAQ,WAAW;AAAA,MACrB,CAAC;AAED,mBAAa,SAAS;AAEtB,UAAI,SAAS,IAAI;AACf,cAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,eAAO;AAAA,MACT;AAGA,UAAI,SAAS,WAAW,KAAK;AAE3B,YAAI,UAAU,KAAK,SAAS;AAC1B,gBAAM,QAAQ,KAAK,IAAI,GAAG,OAAO,IAAI;AACrC,kBAAQ;AAAA,YACN,4CAA4C,KAAK,eAAe,UAAU,CAAC,IAAI,KAAK,OAAO;AAAA,UAC7F;AACA,gBAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,KAAK,CAAC;AACzD,iBAAO,KAAK,SAAS,OAAO,UAAU,CAAC;AAAA,QACzC;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,gBAAQ,KAAK,mCAAmC;AAAA,MAClD,OAAO;AACL,cAAM,YAAY,MAAM,SAAS,KAAK;AACtC,gBAAQ,KAAK,2BAA2B,SAAS,MAAM,KAAK,SAAS,EAAE;AAAA,MACzE;AAEA,aAAO;AAAA,IACT,SAAS,OAAY;AACnB,UAAI,MAAM,SAAS,cAAc;AAC/B,gBAAQ,KAAK,uCAAuC,KAAK,OAAO,IAAI;AAAA,MACtE,OAAO;AACL,gBAAQ,KAAK,iCAAiC,MAAM,OAAO;AAAA,MAC7D;AAGA,UAAI,UAAU,KAAK,SAAS;AAC1B,cAAM,QAAQ,KAAK,IAAI,GAAG,OAAO,IAAI;AACrC,gBAAQ,IAAI,6BAA6B,KAAK,eAAe,UAAU,CAAC,IAAI,KAAK,OAAO,GAAG;AAC3F,cAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,KAAK,CAAC;AACzD,eAAO,KAAK,SAAS,OAAO,UAAU,CAAC;AAAA,MACzC;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,MAAwC;AAClD,UAAM,SAAS,MAAM,KAAK,SAAS,CAAC,IAAI,CAAC;AAEzC,QAAI,UAAU,OAAO,SAAS,GAAG;AAC/B,aAAO;AAAA,QACL,WAAW,OAAO,CAAC;AAAA,QACnB,QAAQ,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA;AAAA,MACnC;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAAA,EAEA,MAAM,WAAW,OAA6C;AAC5D,UAAM,UAA6B,CAAC;AAGpC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,aAAa,MAAM,KAAK,SAAS,KAAK;AAE5C,UAAI,CAAC,cAAc,WAAW,WAAW,MAAM,QAAQ;AACrD,cAAM,IAAI,MAAM,oCAAoC,IAAI,KAAK,YAAY,CAAC,EAAE;AAAA,MAC9E;AAEA,cAAQ;AAAA,QACN,GAAG,WAAW,IAAI,CAAC,WAAW,SAAS;AAAA,UACrC;AAAA,UACA,QAAQ,KAAK,KAAK,MAAM,GAAG,EAAE,SAAS,CAAC;AAAA,QACzC,EAAE;AAAA,MACJ;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAKO,IAAM,qBAAqB;AAAA;AAAA,EAEhC,WAAW;AAAA;AAAA,EAEX,UAAU;AAAA;AAAA,EAEV,WAAW;AAAA;AAAA,EAEX,WAAW;AAAA;AAAA,EAEX,OAAO;AACT;;;ACtJA,IAAM,gBAAwC;AAAA;AAAA,EAE5C,aAAa,aAAa;AAAA,EAC1B,YAAY,aAAa;AAAA,EACzB,aAAa,aAAa;AAAA,EAC1B,UAAU,aAAa;AAAA,EACvB,SAAS,aAAa;AAAA;AAAA,EAEtB,gBAAgB,mBAAmB;AAAA,EACnC,eAAe,mBAAmB;AAAA,EAClC,gBAAgB,mBAAmB;AAAA,EACnC,aAAa,mBAAmB;AAAA,EAChC,YAAY,mBAAmB;AACjC;AAEA,SAAS,iBAAiB,OAAoC;AAC5D,MAAI,CAAC,MAAO,QAAO;AACnB,SAAO,cAAc,MAAM,YAAY,CAAC,KAAK;AAC/C;AAKO,SAAS,uBAAuB,SAA6C;AAClF,UAAQ,QAAQ,UAAU;AAAA,IACxB,KAAK;AACH,aAAO,IAAI,sBAAsB;AAAA,QAC/B,OAAO,iBAAiB,QAAQ,KAAK;AAAA,QACrC,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH,KAAK;AACH,UAAI,CAAC,QAAQ,QAAQ;AACnB,cAAM,IAAI,MAAM,sCAAsC;AAAA,MACxD;AACA,aAAO,IAAI,uBAAuB;AAAA,QAChC,QAAQ,QAAQ;AAAA,QAChB,OAAO,QAAQ;AAAA,QACf,YAAY,QAAQ;AAAA,QACpB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH,KAAK;AACH,UAAI,CAAC,QAAQ,QAAQ;AACnB,cAAM,IAAI,MAAM,qFAAqF;AAAA,MACvG;AACA,aAAO,IAAI,4BAA4B;AAAA,QACrC,UAAU,QAAQ;AAAA,QAClB,OAAO,iBAAiB,QAAQ,KAAK,KAAK,mBAAmB;AAAA,QAC7D,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IAEH;AACE,YAAM,IAAI,MAAM,+BAA+B,QAAQ,QAAQ,EAAE;AAAA,EACrE;AACF;;;AC7EA,OAAO,cAAc;AAYd,IAAM,oBAAN,MAA+C;AAAA,EAC5C;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,SAAmC;AAC7C,SAAK,KAAK,IAAI,SAAS,QAAQ,QAAQ,UAAU;AACjD,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,aAAa,QAAQ;AAG1B,SAAK,GAAG,OAAO,oBAAoB;AAAA,EACrC;AAAA,EAEA,MAAM,aAA4B;AAEhC,SAAK,GAAG,KAAK;AAAA,mCACkB,KAAK,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAQ5C;AAGD,SAAK,GAAG,KAAK;AAAA,uCACsB,KAAK,SAAS;AAAA,WAC1C,KAAK,SAAS;AAAA,KACpB;AAGD,SAAK,GAAG,KAAK;AAAA,2CAC0B,KAAK,SAAS;AAAA,sCACnB,KAAK,SAAS;AAAA,KAC/C;AAGD,SAAK,GAAG,KAAK;AAAA,qCACoB,KAAK,SAAS,uBAAuB,KAAK,SAAS;AAAA,sBAClE,KAAK,SAAS;AAAA;AAAA,KAE/B;AAED,SAAK,GAAG,KAAK;AAAA,qCACoB,KAAK,SAAS,uBAAuB,KAAK,SAAS;AAAA,sBAClE,KAAK,SAAS,QAAQ,KAAK,SAAS;AAAA;AAAA;AAAA,KAGrD;AAAA,EACH;AAAA,EAEA,MAAM,OAAO,QAAwC;AACnD,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA,+BACF,KAAK,SAAS;AAAA;AAAA,KAExC;AAED,UAAM,aAAa,KAAK,GAAG,YAAY,CAAC,UAA2B;AACjE,iBAAW,SAAS,OAAO;AACzB,aAAK;AAAA,UACH,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM,YAAY,KAAK,UAAU,MAAM,SAAS,IAAI;AAAA,UACpD,KAAK,UAAU,MAAM,QAAQ;AAAA,UAC7B,MAAM,UAAU,YAAY;AAAA,QAC9B;AAAA,MACF;AAAA,IACF,CAAC;AAED,eAAW,MAAM;AAAA,EACnB;AAAA,EAEA,MAAM,YACJ,WACA,MACA,QACyB;AAEzB,QAAI,QAAQ,iBAAiB,KAAK,SAAS;AAC3C,UAAM,SAAoB,CAAC;AAG3B,QAAI,QAAQ,YAAY;AACtB,eAAS;AACT,aAAO,KAAK,OAAO,UAAU;AAAA,IAC/B;AAEA,UAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,EAAE,IAAI,GAAG,MAAM;AAUjD,UAAM,UAA0D,CAAC;AAEjE,eAAW,OAAO,MAAM;AACtB,YAAM,iBAAiB,KAAK,MAAM,IAAI,SAAS;AAC/C,YAAM,QAAQ,KAAK,iBAAiB,WAAW,cAAc;AAE7D,cAAQ,KAAK;AAAA,QACX,OAAO,KAAK,WAAW,GAAG;AAAA,QAC1B;AAAA,MACF,CAAC;AAAA,IACH;AAGA,YAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAExC,WAAO,QAAQ,MAAM,GAAG,IAAI,EAAE,IAAI,CAAC,OAAO;AAAA,MACxC,GAAG;AAAA,MACH,YAAY;AAAA,IACd,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,aACJ,OACA,MACA,QACyB;AAEzB,QAAI,WAAW;AAAA,yBACM,KAAK,SAAS;AAAA,aAC1B,KAAK,SAAS;AAAA,aACd,KAAK,SAAS;AAAA,cACb,KAAK,SAAS;AAAA;AAExB,UAAM,SAAoB,CAAC,KAAK,eAAe,KAAK,CAAC;AAGrD,QAAI,QAAQ,YAAY;AACtB,kBAAY;AACZ,aAAO,KAAK,OAAO,UAAU;AAAA,IAC/B;AAEA,gBAAY;AACZ,WAAO,KAAK,IAAI;AAEhB,QAAI;AACF,YAAM,OAAO,KAAK,GAAG,QAAQ,QAAQ,EAAE,IAAI,GAAG,MAAM;AAUpD,aAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QACxB,OAAO,KAAK,WAAW,GAAG;AAAA,QAC1B,OAAO,KAAK,IAAI,IAAI,KAAK;AAAA;AAAA,QACzB,YAAY;AAAA,MACd,EAAE;AAAA,IACJ,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA,EAEA,MAAM,mBAAmB,YAAqC;AAC5D,UAAM,SAAS,KAAK,GACjB,QAAQ,eAAe,KAAK,SAAS,wBAAwB,EAC7D,IAAI,UAAU;AACjB,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAM,QAAQ,IAA2C;AACvD,UAAM,MAAM,KAAK,GACd,QAAQ,iBAAiB,KAAK,SAAS,eAAe,EACtD,IAAI,EAAE;AAST,WAAO,MAAM,KAAK,WAAW,GAAG,IAAI;AAAA,EACtC;AAAA,EAEA,MAAM,QAAuB;AAC3B,SAAK,GAAG,MAAM;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,GAAa,GAAqB;AACzD,QAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,YAAM,IAAI,MAAM,+BAA+B;AAAA,IACjD;AAEA,QAAI,aAAa;AACjB,QAAI,QAAQ;AACZ,QAAI,QAAQ;AAEZ,aAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,oBAAc,EAAE,CAAC,IAAI,EAAE,CAAC;AACxB,eAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AACnB,eAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AAAA,IACrB;AAEA,UAAM,cAAc,KAAK,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK;AACtD,WAAO,gBAAgB,IAAI,IAAI,aAAa;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,KAOD;AAChB,WAAO;AAAA,MACL,IAAI,IAAI;AAAA,MACR,YAAY,IAAI;AAAA,MAChB,MAAM,IAAI;AAAA,MACV,WAAW,IAAI,YAAY,KAAK,MAAM,IAAI,SAAS,IAAI;AAAA,MACvD,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,MACjC,WAAW,IAAI,KAAK,IAAI,UAAU;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,OAAuB;AAE5C,UAAM,UAAU,MAAM,QAAQ,uBAAuB,GAAG,EAAE,KAAK;AAC/D,WAAO,QACJ,MAAM,KAAK,EACX,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC,EAC1B,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EACnB,KAAK,MAAM;AAAA,EAChB;AACF;;;ACrQA,SAAS,YAA6B;AAe/B,IAAM,sBAAN,MAAiD;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,SAAqC;AAC/C,UAAM,aAAyB,QAAQ,cAAc;AAAA,MACnD,kBAAkB,QAAQ;AAAA,MAC1B,KAAK;AAAA,MACL,mBAAmB;AAAA,IACrB;AAEA,SAAK,OAAO,IAAI,KAAK,UAAU;AAC/B,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,SAAS,QAAQ,UAAU;AAChC,SAAK,aAAa,QAAQ;AAC1B,SAAK,gBAAgB,GAAG,KAAK,MAAM,IAAI,KAAK,SAAS;AAAA,EACvD;AAAA,EAEA,MAAM,aAA4B;AAChC,UAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AAEvC,QAAI;AAEF,YAAM,OAAO,MAAM,uCAAuC;AAC1D,YAAM,OAAO,MAAM,wCAAwC;AAG3D,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OASzC;AAGD,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,aAAa;AAAA;AAAA,iDAEN,KAAK,MAAM;AAAA;AAAA,6BAE/B,KAAK,UAAU;AAAA;AAAA;AAAA;AAAA,OAIrC;AAGD,YAAM,OAAO,MAAM;AAAA,yCACgB,KAAK,SAAS;AAAA,aAC1C,KAAK,aAAa;AAAA,OACxB;AAGD,YAAM,OAAO,MAAM;AAAA,yCACgB,KAAK,SAAS;AAAA,aAC1C,KAAK,aAAa;AAAA;AAAA;AAAA,OAGxB;AAGD,YAAM,OAAO,MAAM;AAAA,yCACgB,KAAK,SAAS;AAAA,aAC1C,KAAK,aAAa;AAAA;AAAA,OAExB;AAGD,YAAM,OAAO,MAAM;AAAA,yCACgB,KAAK,SAAS;AAAA,aAC1C,KAAK,aAAa;AAAA;AAAA,OAExB;AAAA,IACH,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,QAAwC;AACnD,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AAEvC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,iBAAW,SAAS,QAAQ;AAC1B,cAAM,YAAY,MAAM,YACpB,IAAI,MAAM,UAAU,KAAK,GAAG,CAAC,MAC7B;AAEJ,cAAM,OAAO;AAAA,UACX;AAAA,wBACc,KAAK,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAOhC;AAAA,YACE,MAAM;AAAA,YACN,MAAM;AAAA,YACN,MAAM;AAAA,YACN;AAAA,YACA,KAAK,UAAU,MAAM,QAAQ;AAAA,YAC7B,MAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAEA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU;AAC7B,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,YACJ,WACA,MACA,QACyB;AACzB,UAAM,eAAe,IAAI,UAAU,KAAK,GAAG,CAAC;AAE5C,QAAI,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAQH,KAAK,aAAa;AAAA;AAAA;AAG3B,UAAM,SAAoB,CAAC,YAAY;AACvC,QAAI,aAAa;AAGjB,QAAI,QAAQ,YAAY;AACtB,eAAS,uBAAuB,UAAU;AAC1C,aAAO,KAAK,OAAO,UAAU;AAC7B;AAAA,IACF;AAEA,QAAI,QAAQ,UAAU;AACpB,eAAS,qBAAqB,UAAU;AACxC,aAAO,KAAK,KAAK,UAAU,OAAO,QAAQ,CAAC;AAC3C;AAAA,IACF;AAEA,aAAS,6CAA6C,UAAU;AAChE,WAAO,KAAK,IAAI;AAEhB,UAAM,SAAS,MAAM,KAAK,KAAK,MAAM,OAAO,MAAM;AAElD,WAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MAC/B,OAAO,KAAK,WAAW,GAAG;AAAA,MAC1B,OAAO,IAAI;AAAA,MACX,YAAY;AAAA,IACd,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,aACJ,OACA,MACA,QACyB;AAEzB,QAAI,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAQD,KAAK,aAAa;AAAA;AAAA;AAG3B,UAAM,SAAoB,CAAC,KAAK;AAChC,QAAI,aAAa;AAGjB,QAAI,QAAQ,YAAY;AACtB,aAAO,uBAAuB,UAAU;AACxC,aAAO,KAAK,OAAO,UAAU;AAC7B;AAAA,IACF;AAEA,WAAO,+BAA+B,UAAU;AAChD,WAAO,KAAK,IAAI;AAEhB,UAAM,SAAS,MAAM,KAAK,KAAK,MAAM,KAAK,MAAM;AAEhD,WAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MAC/B,OAAO,KAAK,WAAW,GAAG;AAAA,MAC1B,OAAO,IAAI;AAAA,MACX,YAAY;AAAA,IACd,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,mBAAmB,YAAqC;AAC5D,UAAM,SAAS,MAAM,KAAK,KAAK;AAAA,MAC7B,eAAe,KAAK,aAAa;AAAA,MACjC,CAAC,UAAU;AAAA,IACb;AACA,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA,EAEA,MAAM,QAAQ,IAA2C;AACvD,UAAM,SAAS,MAAM,KAAK,KAAK;AAAA,MAC7B,iBAAiB,KAAK,aAAa;AAAA,MACnC,CAAC,EAAE;AAAA,IACL;AACA,WAAO,OAAO,KAAK,SAAS,IAAI,KAAK,WAAW,OAAO,KAAK,CAAC,CAAC,IAAI;AAAA,EACpE;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,KAAK,IAAI;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAA2D;AAC/D,UAAM,eAAe,MAAM,KAAK,KAAK;AAAA,MACnC,wBAAwB,KAAK,aAAa;AAAA,IAC5C;AACA,UAAM,aAAa,MAAM,KAAK,KAAK;AAAA,MACjC,wBAAwB,KAAK,MAAM;AAAA,IACrC;AAEA,WAAO;AAAA,MACL,QAAQ,SAAS,aAAa,KAAK,CAAC,EAAE,OAAO,EAAE;AAAA,MAC/C,WAAW,SAAS,WAAW,KAAK,CAAC,EAAE,OAAO,EAAE;AAAA,IAClD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAe,KAMH;AAChB,UAAM,KAAK,KAAK;AAAA,MACd;AAAA,oBACc,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQzB,CAAC,IAAI,IAAI,IAAI,UAAU,IAAI,MAAM,IAAI,OAAO,KAAK,UAAU,IAAI,YAAY,CAAC,CAAC,CAAC;AAAA,IAChF;AAAA,EACF;AAAA,EAEQ,WAAW,KAOD;AAChB,WAAO;AAAA,MACL,IAAI,IAAI;AAAA,MACR,YAAY,IAAI;AAAA,MAChB,MAAM,IAAI;AAAA,MACV,UAAU,IAAI;AAAA,MACd,WAAW,IAAI,KAAK,IAAI,UAAU;AAAA,IACpC;AAAA,EACF;AACF;;;ACzSO,SAAS,kBAAkB,SAA0C;AAC1E,UAAQ,QAAQ,MAAM;AAAA,IACpB,KAAK;AACH,aAAO,IAAI,kBAAkB;AAAA,QAC3B,MAAM,QAAQ,oBAAoB;AAAA,QAClC,WAAW,QAAQ;AAAA,QACnB,YAAY,QAAQ;AAAA,MACtB,CAAC;AAAA,IAEH,KAAK;AACH,UAAI,CAAC,QAAQ,kBAAkB;AAC7B,cAAM,IAAI,MAAM,yCAAyC;AAAA,MAC3D;AACA,aAAO,IAAI,oBAAoB;AAAA,QAC7B,kBAAkB,QAAQ;AAAA,QAC1B,WAAW,QAAQ;AAAA,QACnB,YAAY,QAAQ;AAAA,MACtB,CAAC;AAAA,IAEH,KAAK;AAEH,aAAO,IAAI,kBAAkB;AAAA,QAC3B,MAAM;AAAA,QACN,WAAW,QAAQ;AAAA,QACnB,YAAY,QAAQ;AAAA,MACtB,CAAC;AAAA,IAEH;AACE,YAAM,IAAI,MAAM,8BAA8B,QAAQ,IAAI,EAAE;AAAA,EAChE;AACF;;;AClBO,IAAM,kBAAN,MAAsB;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,SAAiC;AAC3C,SAAK,cAAc,QAAQ;AAC3B,SAAK,aAAa,QAAQ;AAC1B,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,eAAe,QAAQ,gBAAgB;AAC5C,SAAK,OAAO,QAAQ,QAAQ;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,OAAe,SAAsD;AAChF,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,OAAO,SAAS,QAAQ,KAAK;AACnC,UAAM,SAAS,SAAS;AAGxB,UAAM,cAAc,SAAS,eAAe,KAAK;AACjD,UAAM,eAAe,SAAS,gBAAgB,KAAK;AAGnD,UAAM,aAAa,KAAK,IAAI,OAAO,GAAG,GAAG;AAGzC,UAAM,CAAC,cAAc,aAAa,IAAI,MAAM,QAAQ,IAAI;AAAA,MACtD,KAAK,YAAY,OAAO,YAAY,MAAM;AAAA,MAC1C,KAAK,aAAa,OAAO,YAAY,MAAM;AAAA,IAC7C,CAAC;AAGD,UAAM,eAAe,KAAK;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,UAAM,eAAe,aAAa,MAAM,GAAG,IAAI;AAE/C,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,QACL,YAAY,aAAa;AAAA,QACzB,aAAa,cAAc;AAAA,QAC3B,kBAAkB;AAAA,QAClB,WAAW,KAAK,IAAI,IAAI;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,OACA,MACA,QACyB;AACzB,QAAI;AACF,YAAM,kBAAkB,MAAM,KAAK,WAAW,MAAM,KAAK;AACzD,aAAO,MAAM,KAAK,YAAY,YAAY,gBAAgB,WAAW,MAAM,MAAM;AAAA,IACnF,SAAS,OAAO;AACd,cAAQ,KAAK,wBAAwB,KAAK;AAC1C,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,OACA,MACA,QACyB;AACzB,QAAI;AACF,aAAO,MAAM,KAAK,YAAY,aAAa,OAAO,MAAM,MAAM;AAAA,IAChE,SAAS,OAAO;AACd,cAAQ,KAAK,yBAAyB,KAAK;AAC3C,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,UACN,cACA,eACA,aACA,cACgB;AAChB,UAAM,IAAI,KAAK;AACf,UAAM,SAAS,oBAAI,IAAqD;AAGxE,iBAAa,QAAQ,CAAC,QAAQ,SAAS;AACrC,YAAM,WAAY,cAAc,KAAM,IAAI,OAAO;AACjD,YAAM,WAAW,OAAO,IAAI,OAAO,MAAM,EAAE;AAE3C,UAAI,UAAU;AACZ,iBAAS,SAAS;AAAA,MACpB,OAAO;AACL,eAAO,IAAI,OAAO,MAAM,IAAI;AAAA,UAC1B,OAAO;AAAA,UACP,QAAQ,EAAE,GAAG,QAAQ,YAAY,SAAS;AAAA,QAC5C,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGD,kBAAc,QAAQ,CAAC,QAAQ,SAAS;AACtC,YAAM,WAAY,eAAe,KAAM,IAAI,OAAO;AAClD,YAAM,WAAW,OAAO,IAAI,OAAO,MAAM,EAAE;AAE3C,UAAI,UAAU;AACZ,iBAAS,SAAS;AAAA,MACpB,OAAO;AACL,eAAO,IAAI,OAAO,MAAM,IAAI;AAAA,UAC1B,OAAO;AAAA,UACP,QAAQ,EAAE,GAAG,QAAQ,YAAY,SAAS;AAAA,QAC5C,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGD,UAAM,WAAW,MAAM,KAAK,OAAO,OAAO,CAAC,EACxC,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,IAAI,CAAC,WAAW;AAAA,MACf,GAAG,MAAM;AAAA,MACT,OAAO,MAAM;AAAA,IACf,EAAE;AAEJ,WAAO;AAAA,EACT;AACF;AAKO,IAAM,iBAAN,MAAqB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,SAIT;AACD,SAAK,cAAc,QAAQ;AAC3B,SAAK,aAAa,QAAQ;AAC1B,SAAK,cAAc,QAAQ,eAAe;AAAA,EAC5C;AAAA,EAEA,MAAM,OAAO,OAAe,SAAsD;AAChF,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,OAAO,SAAS,QAAQ,KAAK;AAEnC,UAAM,kBAAkB,MAAM,KAAK,WAAW,MAAM,KAAK;AACzD,UAAM,UAAU,MAAM,KAAK,YAAY;AAAA,MACrC,gBAAgB;AAAA,MAChB;AAAA,MACA,SAAS;AAAA,IACX;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,YAAY,QAAQ;AAAA,QACpB,aAAa;AAAA,QACb,kBAAkB;AAAA,QAClB,WAAW,KAAK,IAAI,IAAI;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AACF;;;AdjMO,IAAM,MAAN,MAAgC;AAAA,EASrC,YAAoB,SAAqB;AAArB;AAElB,SAAK,aAAa,uBAAuB,QAAQ,UAAU;AAG3D,SAAK,cAAc,kBAAkB;AAAA,MACnC,GAAG,QAAQ;AAAA,MACX,YAAY,KAAK,WAAW,cAAc;AAAA,IAC5C,CAAC;AAGD,SAAK,YAAY,IAAI,gBAAgB;AAAA,MACnC,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,MACjB,aAAa,QAAQ,WAAW,QAAQ;AAAA,MACxC,aAAa,QAAQ,WAAW;AAAA,MAChC,cAAc,QAAQ,WAAW;AAAA,MACjC,MAAM,QAAQ,WAAW;AAAA,IAC3B,CAAC;AAGD,SAAK,UAAU,IAAI,mBAAmB,QAAQ,QAAQ;AAGtD,SAAK,MAAM,KAAK,UAAU,QAAQ,GAAG;AAGrC,SAAK,eACH,QAAQ,gBACR;AAAA;AAAA;AAAA;AAAA,EAIJ;AAAA,EAzCQ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EAqCd,UAAU,QAAyC;AACzD,YAAQ,OAAO,UAAU;AAAA,MACvB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AACH,eAAO,IAAI,eAAe;AAAA,UACxB,QAAQ,OAAO,UAAU,QAAQ,IAAI,kBAAkB;AAAA,UACvD,SAAS,OAAO;AAAA,UAChB,OAAO,OAAO,SAAS;AAAA,QACzB,CAAC;AAAA,MAEH,KAAK;AACH,eAAO,IAAI,kBAAkB;AAAA,UAC3B,QAAQ,OAAO,UAAU,QAAQ,IAAI,qBAAqB;AAAA,UAC1D,OAAO,OAAO,SAAS;AAAA,QACzB,CAAC;AAAA,MAEH;AACE,cAAM,IAAI,MAAM,yBAAyB,OAAO,QAAQ,EAAE;AAAA,IAC9D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,QAAI,KAAK,YAAa;AACtB,UAAM,KAAK,YAAY,WAAW;AAClC,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MACJ,MACA,SAC8D;AAC9D,UAAM,KAAK,WAAW;AAEtB,UAAM,WAAW,SAAS,YAAYC,YAAW,QAAQ;AACzD,YAAQ,IAAI,kCAAkC,IAAI,EAAE;AAGpD,UAAM,YAAY,MAAM,iBAAiB,IAAI;AAC7C,YAAQ,IAAI,mBAAmB,UAAU,MAAM,YAAY;AAE3D,QAAI,cAAc;AAElB,eAAW,OAAO,WAAW;AAC3B,YAAM,aAAaA,YAAW,KAAK;AAGnC,YAAM,EAAE,QAAQ,MAAM,IAAI,KAAK,QAAQ,MAAM,KAAK,UAAU;AAC5D,cAAQ;AAAA,QACN,kBAAkB,IAAI,KAAK,MAAM,MAAM,WAAW,iBAAiB,MAAM,YAAY;AAAA,MACvF;AAEA,UAAI,OAAO,WAAW,EAAG;AAGzB,cAAQ,IAAI,mCAAmC,OAAO,MAAM,YAAY;AACxE,YAAM,QAAQ,OAAO,IAAI,CAAC,MAAM,EAAE,IAAI;AACtC,YAAM,mBAAmB,MAAM,KAAK,WAAW,WAAW,KAAK;AAG/D,eAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,eAAO,CAAC,EAAE,YAAY,iBAAiB,CAAC,EAAE;AAAA,MAC5C;AAGA,YAAM,KAAK,YAAY,OAAO,MAAM;AACpC,qBAAe,OAAO;AAAA,IACxB;AAEA,YAAQ,IAAI,4BAA4B,UAAU,MAAM,UAAU,WAAW,SAAS;AAEtF,WAAO;AAAA,MACL,kBAAkB,UAAU;AAAA,MAC5B,eAAe;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,UAAkB,SAAkD;AAC5E,UAAM,KAAK,WAAW;AAEtB,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,iBAAiB,KAAK,IAAI;AAChC,UAAM,EAAE,SAAS,MAAM,IAAI,MAAM,KAAK,UAAU,OAAO,UAAU,OAAO;AACxE,UAAM,gBAAgB,KAAK,IAAI,IAAI;AAEnC,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO;AAAA,QACL;AAAA,QACA,QAAQ;AAAA,QACR,WAAW,CAAC;AAAA,QACZ,SAAS,CAAC;AAAA,QACV,UAAU;AAAA,UACR,WAAW,KAAK,IAAI,IAAI;AAAA,UACxB;AAAA,UACA,gBAAgB;AAAA,UAChB,QAAQ;AAAA,UACR,OAAO,KAAK,IAAI,QAAQ;AAAA,QAC1B;AAAA,MACF;AAAA,IACF;AAGA,UAAM,UAAU,QAAQ,IAAI,CAAC,MAAM,EAAE,KAAK;AAC1C,UAAM,cAAc,KAAK,iBAAiB,OAAO;AAGjD,UAAM,kBAAkB,KAAK,IAAI;AACjC,UAAM,SAAS,MAAM,KAAK,eAAe,UAAU,WAAW;AAC9D,UAAM,iBAAiB,KAAK,IAAI,IAAI;AAGpC,UAAM,YAAY,KAAK,iBAAiB,QAAQ,OAAO;AAEvD,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACR,WAAW,KAAK,IAAI,IAAI;AAAA,QACxB;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,OAAO,KAAK,IAAI,QAAQ;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,QAAiC;AACxD,WAAO,OACJ,IAAI,CAAC,OAAO,MAAM;AACjB,YAAM,SAAS,MAAM,SAAS,eAAe,MAAM,SAAS,aAAa;AACzE,aAAO,IAAI,IAAI,CAAC,KAAK,MAAM;AAAA,EAAK,MAAM,IAAI;AAAA,IAC5C,CAAC,EACA,KAAK,aAAa;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eAAe,UAAkB,SAAkC;AAC/E,UAAM,SAAS;AAAA;AAAA;AAAA;AAAA,EAIjB,OAAO;AAAA;AAAA,YAEG,QAAQ;AAAA;AAAA;AAIhB,UAAM,WAAW,MAAM,KAAK,IAAI,SAAS;AAAA,MACvC,UAAU;AAAA,QACR;AAAA,UACE,IAAIA,YAAW,KAAK;AAAA,UACpB,MAAM;AAAA,UACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,OAAO,CAAC;AAAA,UACxC,WAAW,oBAAI,KAAK;AAAA,QACtB;AAAA,MACF;AAAA,MACA,cAAc,KAAK;AAAA,MACnB,WAAW;AAAA,IACb,CAAC;AAED,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,QAAgB,SAAsC;AAC7E,UAAM,YAAwB,CAAC;AAC/B,UAAM,gBAAgB;AACtB,UAAM,UAAU,OAAO,SAAS,aAAa;AAC7C,UAAM,cAAc,oBAAI,IAAY;AAEpC,eAAW,SAAS,SAAS;AAC3B,YAAM,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE;AACnC,UAAI,CAAC,YAAY,IAAI,KAAK,KAAK,QAAQ,KAAK,SAAS,QAAQ,QAAQ;AACnE,oBAAY,IAAI,KAAK;AACrB,cAAM,QAAQ,QAAQ,QAAQ,CAAC;AAC/B,kBAAU,KAAK;AAAA,UACb;AAAA,UACA,SAAS,MAAM;AAAA,UACf,aAAa,MAAM,SAAS;AAAA,UAC5B,SACE,MAAM,SAAS,aAAa,MAAM,SAAS,aAAa,MAAM,SAAS;AAAA,UACzE,SAAS,MAAM,KAAK,UAAU,GAAG,GAAG,KAAK,MAAM,KAAK,SAAS,MAAM,QAAQ;AAAA,QAC7E,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,UAAU,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,mBAAoC;AAGxC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAiC;AAErC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAE3B,YAAQ,IAAI,iCAAiC;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,UAAM,KAAK,YAAY,MAAM;AAAA,EAC/B;AACF;AAKA,eAAsB,UAAU,SAAmC;AACjE,QAAM,MAAM,IAAI,IAAI,OAAO;AAC3B,QAAM,IAAI,WAAW;AACrB,SAAO;AACT;","names":["generateId","readdir","readFile","join","basename","join","readFile","basename","readdir","readFile","readdir","stat","join","basename","stat","readFile","basename","readdir","join","stat","stat","embedder","generateId"]}
|
|
File without changes
|
package/dist/retrieval/index.js
CHANGED
|
File without changes
|
|
File without changes
|
package/dist/types-CjnplPJD.d.ts
CHANGED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@chatbot-packages/rag",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.2.0",
|
|
4
4
|
"description": "RAG (Retrieval-Augmented Generation) system for documentation Q&A",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -36,16 +36,25 @@
|
|
|
36
36
|
"dist",
|
|
37
37
|
"README.md"
|
|
38
38
|
],
|
|
39
|
+
"scripts": {
|
|
40
|
+
"build": "tsup",
|
|
41
|
+
"dev": "tsup --watch",
|
|
42
|
+
"typecheck": "tsc --noEmit",
|
|
43
|
+
"test": "vitest run",
|
|
44
|
+
"test:watch": "vitest",
|
|
45
|
+
"lint": "eslint src/",
|
|
46
|
+
"clean": "rm -rf dist"
|
|
47
|
+
},
|
|
39
48
|
"dependencies": {
|
|
49
|
+
"@chatbot-packages/ai": "workspace:*",
|
|
50
|
+
"@chatbot-packages/types": "workspace:*",
|
|
51
|
+
"@chatbot-packages/utils": "workspace:*",
|
|
40
52
|
"@xenova/transformers": "^2.17.0",
|
|
41
53
|
"better-sqlite3": "^11.0.0",
|
|
42
54
|
"cheerio": "^1.0.0",
|
|
43
55
|
"html-to-text": "^9.0.5",
|
|
44
56
|
"pg": "^8.12.0",
|
|
45
|
-
"pgvector": "^0.2.0"
|
|
46
|
-
"@chatbot-packages/ai": "0.1.0",
|
|
47
|
-
"@chatbot-packages/types": "0.1.0",
|
|
48
|
-
"@chatbot-packages/utils": "0.1.0"
|
|
57
|
+
"pgvector": "^0.2.0"
|
|
49
58
|
},
|
|
50
59
|
"devDependencies": {
|
|
51
60
|
"@types/better-sqlite3": "^7.6.11",
|
|
@@ -70,14 +79,5 @@
|
|
|
70
79
|
"vector-search"
|
|
71
80
|
],
|
|
72
81
|
"author": "Robi",
|
|
73
|
-
"license": "MIT"
|
|
74
|
-
|
|
75
|
-
"build": "tsup",
|
|
76
|
-
"dev": "tsup --watch",
|
|
77
|
-
"typecheck": "tsc --noEmit",
|
|
78
|
-
"test": "vitest run",
|
|
79
|
-
"test:watch": "vitest",
|
|
80
|
-
"lint": "eslint src/",
|
|
81
|
-
"clean": "rm -rf dist"
|
|
82
|
-
}
|
|
83
|
-
}
|
|
82
|
+
"license": "MIT"
|
|
83
|
+
}
|