@theglitchking/semantic-pages 0.4.1 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-7BYJ7MHY.js → chunk-UTXV7ZIQ.js} +16 -7
- package/dist/chunk-UTXV7ZIQ.js.map +1 -0
- package/dist/cli/index.js +4 -3
- package/dist/cli/index.js.map +1 -1
- package/dist/core/index.d.ts +2 -1
- package/dist/core/index.js +1 -1
- package/dist/mcp/server.d.ts +1 -0
- package/dist/mcp/server.js +2 -2
- package/dist/mcp/server.js.map +1 -1
- package/package.json +1 -1
- package/dist/chunk-7BYJ7MHY.js.map +0 -1
|
@@ -11,11 +11,16 @@ import { fileURLToPath } from "url";
|
|
|
11
11
|
var DEFAULT_MODEL = "nomic-ai/nomic-embed-text-v1.5";
|
|
12
12
|
var CACHE_DIR = join(homedir(), ".semantic-pages", "models");
|
|
13
13
|
var DEFAULT_WORKERS = 1;
|
|
14
|
-
var DEFAULT_BATCH_SIZE =
|
|
14
|
+
var DEFAULT_BATCH_SIZE = 8;
|
|
15
|
+
var DEFAULT_QUANTIZED = true;
|
|
15
16
|
var ONNX_MODEL_PATHS = {
|
|
16
17
|
"nomic-ai/nomic-embed-text-v1.5": "onnx/model.onnx",
|
|
17
18
|
"sentence-transformers/all-MiniLM-L6-v2": "onnx/model.onnx"
|
|
18
19
|
};
|
|
20
|
+
var ONNX_QUANTIZED_MODEL_PATHS = {
|
|
21
|
+
"nomic-ai/nomic-embed-text-v1.5": "onnx/model_quantized.onnx",
|
|
22
|
+
"sentence-transformers/all-MiniLM-L6-v2": "onnx/model_quantized.onnx"
|
|
23
|
+
};
|
|
19
24
|
async function resolveOnnxRuntime() {
|
|
20
25
|
try {
|
|
21
26
|
const ort = await import("onnxruntime-node");
|
|
@@ -42,11 +47,13 @@ var Embedder = class {
|
|
|
42
47
|
initialized = false;
|
|
43
48
|
numWorkers;
|
|
44
49
|
batchSize;
|
|
50
|
+
quantized;
|
|
45
51
|
modelPath = "";
|
|
46
|
-
constructor(model = DEFAULT_MODEL, numWorkers = DEFAULT_WORKERS, batchSize = DEFAULT_BATCH_SIZE) {
|
|
52
|
+
constructor(model = DEFAULT_MODEL, numWorkers = DEFAULT_WORKERS, batchSize = DEFAULT_BATCH_SIZE, quantized = DEFAULT_QUANTIZED) {
|
|
47
53
|
this.model = model;
|
|
48
54
|
this.numWorkers = numWorkers;
|
|
49
55
|
this.batchSize = batchSize;
|
|
56
|
+
this.quantized = quantized;
|
|
50
57
|
}
|
|
51
58
|
async init() {
|
|
52
59
|
if (this.initialized) return;
|
|
@@ -55,12 +62,14 @@ var Embedder = class {
|
|
|
55
62
|
const { ort, label } = await resolveOnnxRuntime();
|
|
56
63
|
this.ort = ort;
|
|
57
64
|
this.runtimeLabel = label;
|
|
58
|
-
this.
|
|
65
|
+
const modelFileName = this.quantized ? "model_quantized.onnx" : "model.onnx";
|
|
66
|
+
this.modelPath = join(modelDir, modelFileName);
|
|
59
67
|
const modelPath = this.modelPath;
|
|
60
68
|
if (!existsSync(modelPath)) {
|
|
61
|
-
const
|
|
69
|
+
const pathMap = this.quantized ? ONNX_QUANTIZED_MODEL_PATHS : ONNX_MODEL_PATHS;
|
|
70
|
+
const onnxSubpath = pathMap[this.model] ?? (this.quantized ? "onnx/model_quantized.onnx" : "onnx/model.onnx");
|
|
62
71
|
const url = `https://huggingface.co/${this.model}/resolve/main/${onnxSubpath}`;
|
|
63
|
-
process.stderr.write(`Downloading ONNX model: ${this.model}...
|
|
72
|
+
process.stderr.write(`Downloading ONNX model: ${this.model} (${this.quantized ? "quantized" : "full precision"})...
|
|
64
73
|
`);
|
|
65
74
|
await downloadFile(url, modelPath);
|
|
66
75
|
process.stderr.write(`Model downloaded to ${modelDir}
|
|
@@ -75,7 +84,7 @@ var Embedder = class {
|
|
|
75
84
|
const test = await this.embed("test");
|
|
76
85
|
this.dimensions = test.length;
|
|
77
86
|
this.initialized = true;
|
|
78
|
-
process.stderr.write(`Embedder ready (${label} runtime, ${this.dimensions}d, batch_size=${this.batchSize})
|
|
87
|
+
process.stderr.write(`Embedder ready (${label} runtime, ${this.dimensions}d, batch_size=${this.batchSize}, ${this.quantized ? "quantized" : "fp32"})
|
|
79
88
|
`);
|
|
80
89
|
}
|
|
81
90
|
async embed(text) {
|
|
@@ -880,4 +889,4 @@ export {
|
|
|
880
889
|
TagManager,
|
|
881
890
|
Watcher
|
|
882
891
|
};
|
|
883
|
-
//# sourceMappingURL=chunk-
|
|
892
|
+
//# sourceMappingURL=chunk-UTXV7ZIQ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/core/embedder.ts","../src/core/graph.ts","../src/core/vector.ts","../src/core/search-text.ts","../src/core/crud.ts","../src/core/frontmatter.ts","../src/core/watcher.ts"],"sourcesContent":["import { AutoTokenizer } from \"@huggingface/transformers\";\nimport { readFile, writeFile, mkdir } from \"node:fs/promises\";\nimport { join, dirname } from \"node:path\";\nimport { existsSync, createWriteStream } from \"node:fs\";\nimport { homedir, cpus } from \"node:os\";\nimport { Readable } from \"node:stream\";\nimport { pipeline as streamPipeline } from \"node:stream/promises\";\nimport { Worker } from \"node:worker_threads\";\nimport { fileURLToPath } from \"node:url\";\n\nconst DEFAULT_MODEL = \"nomic-ai/nomic-embed-text-v1.5\";\nconst CACHE_DIR = join(homedir(), \".semantic-pages\", \"models\");\n// Default to 1 worker (serial). Worker threads only help on memory-rich machines\n// (each worker loads its own ONNX session, ~400 MB each). On typical dev machines\n// with <4 GB free RAM, parallel workers cause swap thrashing and are 3x slower.\n// Enable with --workers N when you have sufficient RAM (N * ~400 MB free).\nconst DEFAULT_WORKERS = 1;\n// Batch size of 8 balances batching benefit vs padding overhead on CPU.\n// Large batches (32+) cause seqLen to pad to the max in the batch, wasting compute.\nconst DEFAULT_BATCH_SIZE = 8;\n// Use quantized model by default — int8 quantization is ~4x faster on CPU\n// with negligible quality loss for retrieval tasks.\nconst DEFAULT_QUANTIZED = true;\n\n// Full-precision ONNX model subpaths\nconst ONNX_MODEL_PATHS: Record<string, string> = {\n \"nomic-ai/nomic-embed-text-v1.5\": \"onnx/model.onnx\",\n \"sentence-transformers/all-MiniLM-L6-v2\": \"onnx/model.onnx\",\n};\n\n// Quantized (int8) ONNX model subpaths — faster on CPU, ~same quality\nconst ONNX_QUANTIZED_MODEL_PATHS: Record<string, string> = {\n \"nomic-ai/nomic-embed-text-v1.5\": \"onnx/model_quantized.onnx\",\n \"sentence-transformers/all-MiniLM-L6-v2\": \"onnx/model_quantized.onnx\",\n};\n\ninterface OrtSession {\n run(feeds: Record<string, unknown>): Promise<Record<string, { data: Float32Array; dims: number[] }>>;\n inputNames: string[];\n outputNames: string[];\n}\n\ninterface OrtModule {\n InferenceSession: {\n create(path: string, options?: Record<string, unknown>): Promise<OrtSession>;\n };\n Tensor: new (type: string, data: ArrayLike<number | bigint>, dims: number[]) => unknown;\n}\n\ntype RuntimeLabel = \"native\" | \"wasm\";\n\nasync function resolveOnnxRuntime(): Promise<{ ort: OrtModule; label: RuntimeLabel }> {\n try {\n const ort = await import(\"onnxruntime-node\");\n return { ort: ort as unknown as OrtModule, label: \"native\" };\n } catch {\n const ort = await import(\"onnxruntime-web\");\n return { ort: ort as unknown as OrtModule, label: \"wasm\" };\n }\n}\n\nasync function downloadFile(url: string, destPath: string): Promise<void> {\n const response = await fetch(url);\n if (!response.ok) throw new Error(`Download failed (${response.status}): ${url}`);\n if (!response.body) throw new Error(`No response body: ${url}`);\n const fileStream = createWriteStream(destPath);\n await streamPipeline(Readable.fromWeb(response.body as never), fileStream);\n}\n\nexport class Embedder {\n private model: string;\n private session: OrtSession | null = null;\n private tokenizer: Awaited<ReturnType<typeof AutoTokenizer.from_pretrained>> | null = null;\n private ort: OrtModule | null = null;\n private dimensions = 0;\n private runtimeLabel: RuntimeLabel = \"wasm\";\n private initialized = false;\n private numWorkers: number;\n private batchSize: number;\n private quantized: boolean;\n private modelPath = \"\";\n\n constructor(\n model: string = DEFAULT_MODEL,\n numWorkers: number = DEFAULT_WORKERS,\n batchSize: number = DEFAULT_BATCH_SIZE,\n quantized: boolean = DEFAULT_QUANTIZED\n ) {\n this.model = model;\n this.numWorkers = numWorkers;\n this.batchSize = batchSize;\n this.quantized = quantized;\n }\n\n async init(): Promise<void> {\n if (this.initialized) return;\n\n const modelDir = join(CACHE_DIR, this.model.replace(/\\//g, \"--\"));\n await mkdir(modelDir, { recursive: true });\n\n // Resolve ONNX runtime (native C++ or WASM fallback)\n const { ort, label } = await resolveOnnxRuntime();\n this.ort = ort;\n this.runtimeLabel = label;\n\n // Download ONNX model if not cached\n const modelFileName = this.quantized ? \"model_quantized.onnx\" : \"model.onnx\";\n this.modelPath = join(modelDir, modelFileName);\n const modelPath = this.modelPath;\n if (!existsSync(modelPath)) {\n const pathMap = this.quantized ? ONNX_QUANTIZED_MODEL_PATHS : ONNX_MODEL_PATHS;\n const onnxSubpath = pathMap[this.model] ?? (this.quantized ? \"onnx/model_quantized.onnx\" : \"onnx/model.onnx\");\n const url = `https://huggingface.co/${this.model}/resolve/main/${onnxSubpath}`;\n process.stderr.write(`Downloading ONNX model: ${this.model} (${this.quantized ? \"quantized\" : \"full precision\"})...\\n`);\n await downloadFile(url, modelPath);\n process.stderr.write(`Model downloaded to ${modelDir}\\n`);\n }\n\n // Load tokenizer (uses HF transformers tokenizer infrastructure)\n this.tokenizer = await AutoTokenizer.from_pretrained(this.model, {\n cache_dir: CACHE_DIR,\n });\n\n // Create ONNX inference session\n this.session = await ort.InferenceSession.create(modelPath, {\n executionProviders: [label === \"native\" ? \"cpu\" : \"wasm\"],\n });\n\n // Determine dimensions from a test embedding\n const test = await this.embed(\"test\");\n this.dimensions = test.length;\n this.initialized = true;\n\n process.stderr.write(`Embedder ready (${label} runtime, ${this.dimensions}d, batch_size=${this.batchSize}, ${this.quantized ? \"quantized\" : \"fp32\"})\\n`);\n }\n\n async embed(text: string): Promise<Float32Array> {\n if (!this.session || !this.tokenizer || !this.ort)\n throw new Error(\"Embedder not initialized. Call init() first.\");\n\n // Tokenize\n const encoded = await this.tokenizer(text, {\n padding: true,\n truncation: true,\n max_length: 512,\n return_tensor: false,\n });\n\n const inputIdsRaw: number[] = Array.from(encoded.input_ids.data ?? encoded.input_ids);\n const attentionMaskRaw: number[] = Array.from(encoded.attention_mask.data ?? encoded.attention_mask);\n const seqLen = inputIdsRaw.length;\n\n // Build ONNX input tensors (most models expect int64)\n const inputIds = new this.ort.Tensor(\n \"int64\",\n BigInt64Array.from(inputIdsRaw.map(BigInt)),\n [1, seqLen]\n );\n const attentionMask = new this.ort.Tensor(\n \"int64\",\n BigInt64Array.from(attentionMaskRaw.map(BigInt)),\n [1, seqLen]\n );\n\n const feeds: Record<string, unknown> = { input_ids: inputIds, attention_mask: attentionMask };\n\n // Some models need token_type_ids\n if (this.session.inputNames.includes(\"token_type_ids\")) {\n feeds.token_type_ids = new this.ort.Tensor(\n \"int64\",\n new BigInt64Array(seqLen),\n [1, seqLen]\n );\n }\n\n // Run inference\n const output = await this.session.run(feeds);\n const outputTensor = output[this.session.outputNames[0]];\n const hiddenSize = outputTensor.dims[outputTensor.dims.length - 1];\n\n // Mean pooling with attention mask + L2 normalization\n return this.meanPoolAndNormalize(outputTensor.data, attentionMaskRaw, seqLen, hiddenSize);\n }\n\n private meanPoolAndNormalize(\n embeddings: Float32Array,\n attentionMask: number[],\n seqLen: number,\n hiddenSize: number\n ): Float32Array {\n const result = new Float32Array(hiddenSize);\n let maskSum = 0;\n\n for (let t = 0; t < seqLen; t++) {\n const mask = attentionMask[t];\n maskSum += mask;\n const offset = t * hiddenSize;\n for (let d = 0; d < hiddenSize; d++) {\n result[d] += embeddings[offset + d] * mask;\n }\n }\n\n if (maskSum > 0) {\n for (let d = 0; d < hiddenSize; d++) result[d] /= maskSum;\n }\n\n // L2 normalize\n let norm = 0;\n for (let d = 0; d < hiddenSize; d++) norm += result[d] * result[d];\n norm = Math.sqrt(norm);\n if (norm > 0) {\n for (let d = 0; d < hiddenSize; d++) result[d] /= norm;\n }\n\n return result;\n }\n\n // Mean pool + L2 normalize the output of a batched ONNX forward pass.\n // outputData: flat Float32Array of shape [batchSize, seqLen, hiddenSize]\n // maskData: flat number[] of shape [batchSize, seqLen]\n private meanPoolAndNormalizeMany(\n outputData: Float32Array,\n maskData: number[],\n batchSize: number,\n seqLen: number,\n hiddenSize: number\n ): Float32Array[] {\n const results: Float32Array[] = [];\n for (let b = 0; b < batchSize; b++) {\n const result = new Float32Array(hiddenSize);\n let maskSum = 0;\n for (let t = 0; t < seqLen; t++) {\n const mask = maskData[b * seqLen + t];\n maskSum += mask;\n const offset = (b * seqLen + t) * hiddenSize;\n for (let d = 0; d < hiddenSize; d++) {\n result[d] += outputData[offset + d] * mask;\n }\n }\n if (maskSum > 0) {\n for (let d = 0; d < hiddenSize; d++) result[d] /= maskSum;\n }\n // L2 normalize\n let norm = 0;\n for (let d = 0; d < hiddenSize; d++) norm += result[d] * result[d];\n norm = Math.sqrt(norm);\n if (norm > 0) {\n for (let d = 0; d < hiddenSize; d++) result[d] /= norm;\n }\n results.push(result);\n }\n return results;\n }\n\n // Embed a sub-batch of texts (length <= batchSize) in a single ONNX forward pass.\n //\n // Tokenizes each text individually (well-defined output format for all HF versions),\n // then manually pads to the longest sequence in the batch and builds a [n, seqLen]\n // tensor for one ONNX call. The speedup is from batching the ONNX inference —\n // individual tokenization is negligible (<1ms each).\n private async embedSubBatch(texts: string[]): Promise<Float32Array[]> {\n if (!this.session || !this.tokenizer || !this.ort)\n throw new Error(\"Embedder not initialized. Call init() first.\");\n\n const n = texts.length;\n\n // Tokenize each text individually — avoids ambiguous batch tokenizer output format\n const encodings = await Promise.all(\n texts.map((text) =>\n this.tokenizer!(text, {\n padding: false,\n truncation: true,\n max_length: 512,\n return_tensor: false,\n })\n )\n );\n\n // Extract flat token arrays and find max sequence length for this batch\n const tokenized = encodings.map((enc) => ({\n ids: Array.from(enc.input_ids.data ?? enc.input_ids) as number[],\n mask: Array.from(enc.attention_mask.data ?? enc.attention_mask) as number[],\n }));\n const seqLen = Math.max(...tokenized.map((t) => t.ids.length));\n\n // Build flat padded tensors [n * seqLen] — pad with 0 (PAD token, zero attention)\n const flatIds = new BigInt64Array(n * seqLen);\n const flatMask = new BigInt64Array(n * seqLen);\n const flatMaskNums = new Array<number>(n * seqLen).fill(0);\n\n for (let i = 0; i < n; i++) {\n const { ids, mask } = tokenized[i];\n for (let j = 0; j < ids.length; j++) {\n flatIds[i * seqLen + j] = BigInt(ids[j]);\n flatMask[i * seqLen + j] = BigInt(mask[j]);\n flatMaskNums[i * seqLen + j] = mask[j];\n }\n // Positions beyond ids.length remain 0 (padding)\n }\n\n // Build batched ONNX tensors [n, seqLen]\n const inputIds = new this.ort.Tensor(\"int64\", flatIds, [n, seqLen]);\n const attentionMask = new this.ort.Tensor(\"int64\", flatMask, [n, seqLen]);\n\n const feeds: Record<string, unknown> = { input_ids: inputIds, attention_mask: attentionMask };\n\n if (this.session.inputNames.includes(\"token_type_ids\")) {\n feeds.token_type_ids = new this.ort.Tensor(\n \"int64\",\n new BigInt64Array(n * seqLen),\n [n, seqLen]\n );\n }\n\n // Single forward pass → output shape [n, seqLen, hiddenSize]\n const output = await this.session.run(feeds);\n const outputTensor = output[this.session.outputNames[0]];\n const hiddenSize = outputTensor.dims[outputTensor.dims.length - 1];\n\n return this.meanPoolAndNormalizeMany(\n outputTensor.data,\n flatMaskNums,\n n,\n seqLen,\n hiddenSize\n );\n }\n\n async embedBatch(\n texts: string[],\n onProgress?: (embedded: number, total: number, subBatch?: Float32Array[]) => Promise<void> | void\n ): Promise<Float32Array[]> {\n // Workers path (disabled by default; kept for --workers N users)\n if (this.numWorkers > 1 && texts.length >= this.numWorkers * 2) {\n return this.embedBatchParallel(texts, onProgress);\n }\n\n // True batched inference: slice into sub-batches and run one ONNX call each\n const results: Float32Array[] = [];\n for (let i = 0; i < texts.length; i += this.batchSize) {\n const subBatch = texts.slice(i, i + this.batchSize);\n const embeddings = await this.embedSubBatch(subBatch);\n results.push(...embeddings);\n await onProgress?.(Math.min(i + subBatch.length, texts.length), texts.length, embeddings);\n }\n return results;\n }\n\n private async embedBatchParallel(\n texts: string[],\n onProgress?: (embedded: number, total: number, subBatch?: Float32Array[]) => Promise<void> | void\n ): Promise<Float32Array[]> {\n // Resolve worker script path. With tsup splitting, embedder code may land\n // in a top-level chunk (dist/chunk-*.js) rather than dist/core/index.js,\n // so we try two locations: adjacent to this chunk, and dist/core/ relative\n // to the package root (two levels up from a dist/ chunk file).\n const thisDir = dirname(fileURLToPath(import.meta.url));\n let workerPath = join(thisDir, \"embed-worker.js\");\n if (!existsSync(workerPath)) {\n // Chunk file at dist/ level — worker is in dist/core/\n workerPath = join(thisDir, \"core\", \"embed-worker.js\");\n }\n\n if (!existsSync(workerPath)) {\n // Fallback to serial batched if worker script not found\n process.stderr.write(\"Worker script not found, falling back to batched embedding\\n\");\n return this.embedBatch(texts, onProgress);\n }\n\n // Split texts into chunks for each worker\n const chunkSize = Math.ceil(texts.length / this.numWorkers);\n const chunks: { texts: string[]; startIndex: number }[] = [];\n for (let i = 0; i < texts.length; i += chunkSize) {\n chunks.push({ texts: texts.slice(i, i + chunkSize), startIndex: i });\n }\n\n const allResults = new Array<Float32Array>(texts.length);\n let totalDone = 0;\n\n const workerPromises = chunks.map((chunk) => {\n return new Promise<void>((resolve, reject) => {\n const worker = new Worker(workerPath, {\n workerData: {\n modelPath: this.modelPath,\n modelName: this.model,\n cacheDir: CACHE_DIR,\n runtimeLabel: this.runtimeLabel,\n batchSize: this.batchSize,\n },\n });\n\n worker.on(\"message\", (msg: any) => {\n if (msg.type === \"ready\") {\n worker.postMessage({ type: \"embed\", texts: chunk.texts, startIndex: chunk.startIndex });\n } else if (msg.type === \"progress\") {\n totalDone++;\n onProgress?.(totalDone, texts.length);\n } else if (msg.type === \"result\") {\n for (let i = 0; i < msg.embeddings.length; i++) {\n allResults[chunk.startIndex + i] = new Float32Array(msg.embeddings[i]);\n }\n worker.terminate();\n resolve();\n } else if (msg.type === \"error\") {\n worker.terminate();\n reject(new Error(msg.error));\n }\n });\n\n worker.on(\"error\", (err) => {\n worker.terminate();\n reject(err);\n });\n });\n });\n\n await Promise.all(workerPromises);\n return allResults;\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n getModel(): string {\n return this.model;\n }\n\n getRuntime(): RuntimeLabel {\n return this.runtimeLabel;\n }\n\n async saveEmbeddings(\n embeddings: Map<string, Float32Array>,\n indexPath: string\n ): Promise<void> {\n const entries: Array<{ key: string; data: number[] }> = [];\n for (const [key, vec] of embeddings) {\n entries.push({ key, data: Array.from(vec) });\n }\n await writeFile(join(indexPath, \"embeddings.json\"), JSON.stringify(entries));\n }\n\n async loadEmbeddings(\n indexPath: string\n ): Promise<Map<string, Float32Array>> {\n const filePath = join(indexPath, \"embeddings.json\");\n if (!existsSync(filePath)) return new Map();\n\n const raw = await readFile(filePath, \"utf-8\");\n const entries: Array<{ key: string; data: number[] }> = JSON.parse(raw);\n const map = new Map<string, Float32Array>();\n for (const entry of entries) {\n map.set(entry.key, new Float32Array(entry.data));\n }\n return map;\n }\n}\n","import Graph from \"graphology\";\nimport { bfsFromNode } from \"graphology-traversal\";\nimport { bidirectional } from \"graphology-shortest-path\";\nimport type { IndexedDocument, GraphNode, GraphEdge, GraphStats } from \"./types.js\";\nimport { readFile, writeFile } from \"node:fs/promises\";\nimport { join } from \"node:path\";\nimport { existsSync } from \"node:fs\";\n\nexport class GraphBuilder {\n private graph: Graph;\n\n constructor() {\n this.graph = new Graph({ type: \"directed\", multi: false });\n }\n\n buildFromDocuments(documents: IndexedDocument[]): void {\n this.graph.clear();\n\n // Add nodes\n for (const doc of documents) {\n this.graph.addNode(doc.path, {\n title: doc.title,\n tags: doc.tags,\n });\n }\n\n const pathLookup = new Map<string, string>();\n for (const doc of documents) {\n const nameNoExt = doc.path.replace(/\\.md$/, \"\");\n const basename = nameNoExt.split(\"/\").pop()!;\n pathLookup.set(basename.toLowerCase(), doc.path);\n pathLookup.set(nameNoExt.toLowerCase(), doc.path);\n }\n\n // Add wikilink edges\n for (const doc of documents) {\n for (const link of doc.wikilinks) {\n const target = pathLookup.get(link.toLowerCase());\n if (target && target !== doc.path && !this.graph.hasEdge(doc.path, target)) {\n this.graph.addEdge(doc.path, target, { type: \"wikilink\", weight: 1.0 });\n }\n }\n }\n\n // Add tag-based edges\n const tagToNotes = new Map<string, string[]>();\n for (const doc of documents) {\n for (const tag of doc.tags) {\n const notes = tagToNotes.get(tag) || [];\n notes.push(doc.path);\n tagToNotes.set(tag, notes);\n }\n }\n\n for (const [, notes] of tagToNotes) {\n for (let i = 0; i < notes.length; i++) {\n for (let j = i + 1; j < notes.length; j++) {\n if (!this.graph.hasEdge(notes[i], notes[j])) {\n this.graph.addEdge(notes[i], notes[j], { type: \"tag\", weight: 0.5 });\n }\n if (!this.graph.hasEdge(notes[j], notes[i])) {\n this.graph.addEdge(notes[j], notes[i], { type: \"tag\", weight: 0.5 });\n }\n }\n }\n }\n }\n\n backlinks(notePath: string): GraphNode[] {\n if (!this.graph.hasNode(notePath)) return [];\n return this.graph.inNeighbors(notePath).map((n) => this.nodeToGraphNode(n));\n }\n\n forwardlinks(notePath: string): GraphNode[] {\n if (!this.graph.hasNode(notePath)) return [];\n return this.graph.outNeighbors(notePath).map((n) => this.nodeToGraphNode(n));\n }\n\n findPath(from: string, to: string): string[] | null {\n if (!this.graph.hasNode(from) || !this.graph.hasNode(to)) return null;\n const path = bidirectional(this.graph, from, to);\n return path;\n }\n\n searchGraph(concept: string, maxDepth = 2): GraphNode[] {\n const startNodes = this.graph\n .nodes()\n .filter((n) => {\n const attrs = this.graph.getNodeAttributes(n);\n return (\n n.toLowerCase().includes(concept.toLowerCase()) ||\n attrs.title?.toLowerCase().includes(concept.toLowerCase()) ||\n attrs.tags?.some((t: string) => t.toLowerCase().includes(concept.toLowerCase()))\n );\n });\n\n const visited = new Set<string>();\n for (const start of startNodes) {\n let depth = 0;\n bfsFromNode(this.graph, start, (node) => {\n visited.add(node);\n depth++;\n return depth > maxDepth;\n });\n }\n\n return [...visited].map((n) => this.nodeToGraphNode(n));\n }\n\n statistics(): GraphStats {\n const nodes = this.graph.order;\n const edges = this.graph.size;\n const orphans = this.graph.nodes().filter(\n (n) => this.graph.degree(n) === 0\n );\n\n const connections = this.graph.nodes().map((n) => ({\n path: n,\n connections: this.graph.degree(n),\n }));\n connections.sort((a, b) => b.connections - a.connections);\n\n const maxPossibleEdges = nodes * (nodes - 1);\n const density = maxPossibleEdges > 0 ? edges / maxPossibleEdges : 0;\n\n return {\n totalNodes: nodes,\n totalEdges: edges,\n orphanCount: orphans.length,\n mostConnected: connections.slice(0, 10),\n density,\n };\n }\n\n private nodeToGraphNode(nodePath: string): GraphNode {\n const attrs = this.graph.getNodeAttributes(nodePath);\n return {\n path: nodePath,\n title: attrs.title || nodePath,\n tags: attrs.tags || [],\n linkCount: this.graph.outDegree(nodePath),\n backlinkCount: this.graph.inDegree(nodePath),\n };\n }\n\n async save(indexPath: string): Promise<void> {\n const data = this.graph.export();\n await writeFile(join(indexPath, \"graph.json\"), JSON.stringify(data));\n }\n\n async load(indexPath: string): Promise<boolean> {\n const filePath = join(indexPath, \"graph.json\");\n if (!existsSync(filePath)) return false;\n\n const raw = await readFile(filePath, \"utf-8\");\n const data = JSON.parse(raw);\n this.graph.import(data);\n return true;\n }\n\n getGraph(): Graph {\n return this.graph;\n }\n}\n","import hnswlib from \"hnswlib-node\";\nconst { HierarchicalNSW } = hnswlib;\nimport { readFile, writeFile } from \"node:fs/promises\";\nimport { join } from \"node:path\";\nimport { existsSync } from \"node:fs\";\nimport type { SearchResult } from \"./types.js\";\n\ninterface ChunkMeta {\n docPath: string;\n chunkIndex: number;\n text: string;\n}\n\nexport class VectorIndex {\n private index: InstanceType<typeof HierarchicalNSW> | null = null;\n private dimensions: number;\n private chunkMeta: ChunkMeta[] = [];\n\n constructor(dimensions: number) {\n this.dimensions = dimensions;\n }\n\n build(\n embeddings: Float32Array[],\n meta: ChunkMeta[]\n ): void {\n if (embeddings.length === 0) {\n this.index = null;\n this.chunkMeta = [];\n return;\n }\n\n this.index = new HierarchicalNSW(\"cosine\", this.dimensions);\n this.index.initIndex(embeddings.length);\n\n for (let i = 0; i < embeddings.length; i++) {\n this.index.addPoint(Array.from(embeddings[i]), i);\n }\n\n this.chunkMeta = meta;\n }\n\n search(queryEmbedding: Float32Array, k: number = 10): SearchResult[] {\n if (!this.index || this.chunkMeta.length === 0) return [];\n\n const numResults = Math.min(k, this.chunkMeta.length);\n const result = this.index.searchKnn(Array.from(queryEmbedding), numResults);\n\n const seen = new Set<string>();\n const results: SearchResult[] = [];\n\n for (let i = 0; i < result.neighbors.length; i++) {\n const idx = result.neighbors[i];\n const meta = this.chunkMeta[idx];\n if (!meta || seen.has(meta.docPath)) continue;\n seen.add(meta.docPath);\n\n results.push({\n path: meta.docPath,\n title: meta.docPath,\n score: 1 - result.distances[i],\n snippet: meta.text.slice(0, 200),\n matchedChunk: meta.text,\n });\n }\n\n return results;\n }\n\n async save(indexPath: string): Promise<void> {\n if (!this.index) return;\n\n this.index.writeIndexSync(join(indexPath, \"hnsw.bin\"));\n await writeFile(\n join(indexPath, \"hnsw-meta.json\"),\n JSON.stringify(this.chunkMeta)\n );\n }\n\n async load(indexPath: string): Promise<boolean> {\n const hnswPath = join(indexPath, \"hnsw.bin\");\n const metaPath = join(indexPath, \"hnsw-meta.json\");\n\n if (!existsSync(hnswPath) || !existsSync(metaPath)) return false;\n\n const raw = await readFile(metaPath, \"utf-8\");\n this.chunkMeta = JSON.parse(raw);\n\n this.index = new HierarchicalNSW(\"cosine\", this.dimensions);\n this.index.initIndex(this.chunkMeta.length);\n this.index.readIndexSync(hnswPath);\n\n return true;\n }\n\n getChunkMeta(): ChunkMeta[] {\n return this.chunkMeta;\n }\n}\n","import type { IndexedDocument, SearchResult, SearchTextOptions } from \"./types.js\";\nimport { minimatch } from \"minimatch\";\n\nexport class TextSearch {\n private documents: IndexedDocument[] = [];\n\n setDocuments(documents: IndexedDocument[]): void {\n this.documents = documents;\n }\n\n search(options: SearchTextOptions): SearchResult[] {\n const { pattern, regex, caseSensitive, pathGlob, tagFilter, limit = 20 } = options;\n\n let matcher: (text: string) => { matched: boolean; index: number };\n\n if (regex) {\n const flags = caseSensitive ? \"g\" : \"gi\";\n const re = new RegExp(pattern, flags);\n matcher = (text) => {\n re.lastIndex = 0;\n const m = re.exec(text);\n return { matched: !!m, index: m?.index ?? -1 };\n };\n } else {\n const needle = caseSensitive ? pattern : pattern.toLowerCase();\n matcher = (text) => {\n const haystack = caseSensitive ? text : text.toLowerCase();\n const idx = haystack.indexOf(needle);\n return { matched: idx >= 0, index: idx };\n };\n }\n\n const results: SearchResult[] = [];\n\n for (const doc of this.documents) {\n // Path filter\n if (pathGlob && !minimatch(doc.path, pathGlob)) continue;\n\n // Tag filter\n if (tagFilter?.length) {\n const hasTag = tagFilter.some((t) => doc.tags.includes(t));\n if (!hasTag) continue;\n }\n\n const { matched, index } = matcher(doc.content);\n if (!matched) continue;\n\n const snippetStart = Math.max(0, index - 80);\n const snippetEnd = Math.min(doc.content.length, index + 120);\n const snippet = doc.content.slice(snippetStart, snippetEnd).trim();\n\n results.push({\n path: doc.path,\n title: doc.title,\n score: 1.0,\n snippet: (snippetStart > 0 ? \"...\" : \"\") + snippet + (snippetEnd < doc.content.length ? \"...\" : \"\"),\n });\n\n if (results.length >= limit) break;\n }\n\n return results;\n }\n}\n","import { readFile, writeFile, unlink, rename, mkdir } from \"node:fs/promises\";\nimport { dirname, join, relative } from \"node:path\";\nimport { existsSync } from \"node:fs\";\nimport { glob } from \"glob\";\nimport matter from \"gray-matter\";\nimport type { UpdateNoteOptions } from \"./types.js\";\n\nexport class NoteCrud {\n private notesPath: string;\n\n constructor(notesPath: string) {\n this.notesPath = notesPath;\n }\n\n async create(\n relativePath: string,\n content: string,\n frontmatter?: Record<string, unknown>\n ): Promise<string> {\n const absPath = join(this.notesPath, relativePath);\n if (existsSync(absPath)) {\n throw new Error(`Note already exists: ${relativePath}`);\n }\n\n await mkdir(dirname(absPath), { recursive: true });\n\n let fileContent: string;\n if (frontmatter && Object.keys(frontmatter).length > 0) {\n fileContent = matter.stringify(content, frontmatter);\n } else {\n fileContent = content;\n }\n\n await writeFile(absPath, fileContent, \"utf-8\");\n return relativePath;\n }\n\n async read(relativePath: string): Promise<string> {\n const absPath = join(this.notesPath, relativePath);\n return readFile(absPath, \"utf-8\");\n }\n\n async readMultiple(paths: string[]): Promise<Map<string, string>> {\n const results = new Map<string, string>();\n await Promise.all(\n paths.map(async (p) => {\n try {\n const content = await this.read(p);\n results.set(p, content);\n } catch {\n results.set(p, `[Error: could not read ${p}]`);\n }\n })\n );\n return results;\n }\n\n async update(\n relativePath: string,\n content: string,\n options: UpdateNoteOptions\n ): Promise<void> {\n const absPath = join(this.notesPath, relativePath);\n if (!existsSync(absPath)) {\n throw new Error(`Note does not exist: ${relativePath}`);\n }\n\n const existing = await readFile(absPath, \"utf-8\");\n\n let updated: string;\n\n switch (options.mode) {\n case \"overwrite\":\n updated = content;\n break;\n\n case \"append\":\n updated = existing + \"\\n\" + content;\n break;\n\n case \"prepend\": {\n const { data, content: body } = matter(existing);\n const newBody = content + \"\\n\" + body;\n updated = Object.keys(data).length > 0 ? matter.stringify(newBody, data) : newBody;\n break;\n }\n\n case \"patch-by-heading\": {\n if (!options.heading) throw new Error(\"patch-by-heading requires a heading\");\n updated = this.patchByHeading(existing, options.heading, content);\n break;\n }\n }\n\n await writeFile(absPath, updated, \"utf-8\");\n }\n\n async delete(relativePath: string): Promise<void> {\n const absPath = join(this.notesPath, relativePath);\n if (!existsSync(absPath)) {\n throw new Error(`Note does not exist: ${relativePath}`);\n }\n await unlink(absPath);\n }\n\n async move(fromPath: string, toPath: string): Promise<void> {\n const absFrom = join(this.notesPath, fromPath);\n const absTo = join(this.notesPath, toPath);\n\n if (!existsSync(absFrom)) {\n throw new Error(`Note does not exist: ${fromPath}`);\n }\n if (existsSync(absTo)) {\n throw new Error(`Destination already exists: ${toPath}`);\n }\n\n await mkdir(dirname(absTo), { recursive: true });\n await rename(absFrom, absTo);\n\n // Update wikilinks in other files that reference the old path\n await this.updateWikilinksAfterMove(fromPath, toPath);\n }\n\n private async updateWikilinksAfterMove(\n oldPath: string,\n newPath: string\n ): Promise<void> {\n const oldName = oldPath.replace(/\\.md$/, \"\").split(\"/\").pop()!;\n const newName = newPath.replace(/\\.md$/, \"\").split(\"/\").pop()!;\n\n if (oldName === newName) return;\n\n const files = await glob(\"**/*.md\", { cwd: this.notesPath });\n\n for (const file of files) {\n const absPath = join(this.notesPath, file);\n const content = await readFile(absPath, \"utf-8\");\n\n const pattern = new RegExp(`\\\\[\\\\[${escapeRegex(oldName)}(\\\\|[^\\\\]]*)?\\\\]\\\\]`, \"g\");\n if (!pattern.test(content)) continue;\n\n const updated = content.replace(pattern, `[[${newName}$1]]`);\n await writeFile(absPath, updated, \"utf-8\");\n }\n }\n\n private patchByHeading(\n content: string,\n heading: string,\n newContent: string\n ): string {\n const lines = content.split(\"\\n\");\n const headingPattern = new RegExp(`^#{1,6}\\\\s+${escapeRegex(heading)}\\\\s*$`, \"i\");\n\n let headingIndex = -1;\n let headingLevel = 0;\n\n for (let i = 0; i < lines.length; i++) {\n if (headingPattern.test(lines[i])) {\n headingIndex = i;\n const match = lines[i].match(/^(#{1,6})\\s+/);\n headingLevel = match ? match[1].length : 1;\n break;\n }\n }\n\n if (headingIndex === -1) {\n throw new Error(`Heading not found: ${heading}`);\n }\n\n // Find the end of this section (next heading of same or higher level)\n let endIndex = lines.length;\n for (let i = headingIndex + 1; i < lines.length; i++) {\n const match = lines[i].match(/^(#{1,6})\\s+/);\n if (match && match[1].length <= headingLevel) {\n endIndex = i;\n break;\n }\n }\n\n const before = lines.slice(0, headingIndex + 1);\n const after = lines.slice(endIndex);\n\n return [...before, \"\", newContent, \"\", ...after].join(\"\\n\");\n }\n}\n\nfunction escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n","import { readFile, writeFile } from \"node:fs/promises\";\nimport { join } from \"node:path\";\nimport { glob } from \"glob\";\nimport matter from \"gray-matter\";\n\nexport class FrontmatterManager {\n private notesPath: string;\n\n constructor(notesPath: string) {\n this.notesPath = notesPath;\n }\n\n async get(relativePath: string): Promise<Record<string, unknown>> {\n const absPath = join(this.notesPath, relativePath);\n const raw = await readFile(absPath, \"utf-8\");\n const { data } = matter(raw);\n return data;\n }\n\n async update(\n relativePath: string,\n fields: Record<string, unknown>\n ): Promise<void> {\n const absPath = join(this.notesPath, relativePath);\n const raw = await readFile(absPath, \"utf-8\");\n const { data, content } = matter(raw);\n\n for (const [key, value] of Object.entries(fields)) {\n if (value === null || value === undefined) {\n delete data[key];\n } else {\n data[key] = value;\n }\n }\n\n const updated = matter.stringify(content, data);\n await writeFile(absPath, updated, \"utf-8\");\n }\n}\n\nexport class TagManager {\n private notesPath: string;\n\n constructor(notesPath: string) {\n this.notesPath = notesPath;\n }\n\n async list(relativePath: string): Promise<string[]> {\n const absPath = join(this.notesPath, relativePath);\n const raw = await readFile(absPath, \"utf-8\");\n const { data, content } = matter(raw);\n\n const fmTags = Array.isArray(data.tags) ? (data.tags as string[]) : [];\n const inlineTags = [...content.matchAll(/(?:^|\\s)#([a-zA-Z][\\w-/]*)/g)].map(\n (m) => m[1]\n );\n\n return [...new Set([...fmTags, ...inlineTags])];\n }\n\n async add(relativePath: string, tags: string[]): Promise<void> {\n const absPath = join(this.notesPath, relativePath);\n const raw = await readFile(absPath, \"utf-8\");\n const { data, content } = matter(raw);\n\n const existing = Array.isArray(data.tags) ? (data.tags as string[]) : [];\n const merged = [...new Set([...existing, ...tags])];\n data.tags = merged;\n\n const updated = matter.stringify(content, data);\n await writeFile(absPath, updated, \"utf-8\");\n }\n\n async remove(relativePath: string, tags: string[]): Promise<void> {\n const absPath = join(this.notesPath, relativePath);\n const raw = await readFile(absPath, \"utf-8\");\n const { data, content } = matter(raw);\n\n // Remove from frontmatter\n if (Array.isArray(data.tags)) {\n data.tags = (data.tags as string[]).filter((t) => !tags.includes(t));\n }\n\n // Remove inline tags\n let updatedContent = content;\n for (const tag of tags) {\n const pattern = new RegExp(`(^|\\\\s)#${escapeRegex(tag)}(?=\\\\s|$)`, \"g\");\n updatedContent = updatedContent.replace(pattern, \"$1\");\n }\n\n const updated = matter.stringify(updatedContent, data);\n await writeFile(absPath, updated, \"utf-8\");\n }\n\n async renameVaultWide(oldTag: string, newTag: string): Promise<number> {\n const files = await glob(\"**/*.md\", { cwd: this.notesPath });\n let count = 0;\n\n for (const file of files) {\n const absPath = join(this.notesPath, file);\n const raw = await readFile(absPath, \"utf-8\");\n const { data, content } = matter(raw);\n let changed = false;\n\n // Rename in frontmatter\n if (Array.isArray(data.tags)) {\n const idx = (data.tags as string[]).indexOf(oldTag);\n if (idx >= 0) {\n (data.tags as string[])[idx] = newTag;\n changed = true;\n }\n }\n\n // Rename inline tags\n const pattern = new RegExp(`(^|\\\\s)#${escapeRegex(oldTag)}(?=\\\\s|$)`, \"g\");\n const updatedContent = content.replace(pattern, `$1#${newTag}`);\n if (updatedContent !== content) changed = true;\n\n if (changed) {\n const updated = matter.stringify(updatedContent, data);\n await writeFile(absPath, updated, \"utf-8\");\n count++;\n }\n }\n\n return count;\n }\n}\n\nfunction escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n","import { watch, type FSWatcher } from \"chokidar\";\nimport { EventEmitter } from \"node:events\";\n\nexport interface WatcherEvents {\n changed: (paths: string[]) => void;\n error: (error: Error) => void;\n}\n\nexport class Watcher extends EventEmitter {\n private notesPath: string;\n private fsWatcher: FSWatcher | null = null;\n private debounceMs: number;\n private pendingChanges = new Set<string>();\n private debounceTimer: ReturnType<typeof setTimeout> | null = null;\n private readyPromise: Promise<void> | null = null;\n private usePolling: boolean;\n\n constructor(notesPath: string, debounceMs: number = 500, usePolling: boolean = false) {\n super();\n this.notesPath = notesPath;\n this.debounceMs = debounceMs;\n this.usePolling = usePolling;\n }\n\n start(): void {\n if (this.fsWatcher) return;\n\n this.fsWatcher = watch(\"**/*.md\", {\n cwd: this.notesPath,\n ignoreInitial: true,\n followSymlinks: true,\n ignored: [\n \"**/node_modules/**\",\n \"**/.semantic-pages-index/**\",\n \"**/.git/**\",\n ],\n ...(this.usePolling ? { usePolling: true, interval: 100 } : {}),\n });\n\n this.readyPromise = new Promise<void>((resolve) => {\n this.fsWatcher!.on(\"ready\", resolve);\n });\n\n this.fsWatcher.on(\"add\", (path) => this.enqueue(path));\n this.fsWatcher.on(\"change\", (path) => this.enqueue(path));\n this.fsWatcher.on(\"unlink\", (path) => this.enqueue(path));\n this.fsWatcher.on(\"error\", (err) => this.emit(\"error\", err));\n }\n\n async ready(): Promise<void> {\n if (this.readyPromise) await this.readyPromise;\n }\n\n stop(): void {\n if (this.debounceTimer) clearTimeout(this.debounceTimer);\n this.fsWatcher?.close();\n this.fsWatcher = null;\n this.pendingChanges.clear();\n }\n\n private enqueue(path: string): void {\n this.pendingChanges.add(path);\n\n if (this.debounceTimer) clearTimeout(this.debounceTimer);\n this.debounceTimer = setTimeout(() => {\n const paths = [...this.pendingChanges];\n this.pendingChanges.clear();\n this.emit(\"changed\", paths);\n }, this.debounceMs);\n }\n}\n"],"mappings":";AAAA,SAAS,qBAAqB;AAC9B,SAAS,UAAU,WAAW,aAAa;AAC3C,SAAS,MAAM,eAAe;AAC9B,SAAS,YAAY,yBAAyB;AAC9C,SAAS,eAAqB;AAC9B,SAAS,gBAAgB;AACzB,SAAS,YAAY,sBAAsB;AAC3C,SAAS,cAAc;AACvB,SAAS,qBAAqB;AAE9B,IAAM,gBAAgB;AACtB,IAAM,YAAY,KAAK,QAAQ,GAAG,mBAAmB,QAAQ;AAK7D,IAAM,kBAAkB;AAGxB,IAAM,qBAAqB;AAG3B,IAAM,oBAAoB;AAG1B,IAAM,mBAA2C;AAAA,EAC/C,kCAAkC;AAAA,EAClC,0CAA0C;AAC5C;AAGA,IAAM,6BAAqD;AAAA,EACzD,kCAAkC;AAAA,EAClC,0CAA0C;AAC5C;AAiBA,eAAe,qBAAuE;AACpF,MAAI;AACF,UAAM,MAAM,MAAM,OAAO,kBAAkB;AAC3C,WAAO,EAAE,KAAkC,OAAO,SAAS;AAAA,EAC7D,QAAQ;AACN,UAAM,MAAM,MAAM,OAAO,iBAAiB;AAC1C,WAAO,EAAE,KAAkC,OAAO,OAAO;AAAA,EAC3D;AACF;AAEA,eAAe,aAAa,KAAa,UAAiC;AACxE,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,CAAC,SAAS,GAAI,OAAM,IAAI,MAAM,oBAAoB,SAAS,MAAM,MAAM,GAAG,EAAE;AAChF,MAAI,CAAC,SAAS,KAAM,OAAM,IAAI,MAAM,qBAAqB,GAAG,EAAE;AAC9D,QAAM,aAAa,kBAAkB,QAAQ;AAC7C,QAAM,eAAe,SAAS,QAAQ,SAAS,IAAa,GAAG,UAAU;AAC3E;AAEO,IAAM,WAAN,MAAe;AAAA,EACZ;AAAA,EACA,UAA6B;AAAA,EAC7B,YAA8E;AAAA,EAC9E,MAAwB;AAAA,EACxB,aAAa;AAAA,EACb,eAA6B;AAAA,EAC7B,cAAc;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EAEpB,YACE,QAAgB,eAChB,aAAqB,iBACrB,YAAoB,oBACpB,YAAqB,mBACrB;AACA,SAAK,QAAQ;AACb,SAAK,aAAa;AAClB,SAAK,YAAY;AACjB,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,OAAsB;AAC1B,QAAI,KAAK,YAAa;AAEtB,UAAM,WAAW,KAAK,WAAW,KAAK,MAAM,QAAQ,OAAO,IAAI,CAAC;AAChE,UAAM,MAAM,UAAU,EAAE,WAAW,KAAK,CAAC;AAGzC,UAAM,EAAE,KAAK,MAAM,IAAI,MAAM,mBAAmB;AAChD,SAAK,MAAM;AACX,SAAK,eAAe;AAGpB,UAAM,gBAAgB,KAAK,YAAY,yBAAyB;AAChE,SAAK,YAAY,KAAK,UAAU,aAAa;AAC7C,UAAM,YAAY,KAAK;AACvB,QAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,YAAM,UAAU,KAAK,YAAY,6BAA6B;AAC9D,YAAM,cAAc,QAAQ,KAAK,KAAK,MAAM,KAAK,YAAY,8BAA8B;AAC3F,YAAM,MAAM,0BAA0B,KAAK,KAAK,iBAAiB,WAAW;AAC5E,cAAQ,OAAO,MAAM,2BAA2B,KAAK,KAAK,KAAK,KAAK,YAAY,cAAc,gBAAgB;AAAA,CAAQ;AACtH,YAAM,aAAa,KAAK,SAAS;AACjC,cAAQ,OAAO,MAAM,uBAAuB,QAAQ;AAAA,CAAI;AAAA,IAC1D;AAGA,SAAK,YAAY,MAAM,cAAc,gBAAgB,KAAK,OAAO;AAAA,MAC/D,WAAW;AAAA,IACb,CAAC;AAGD,SAAK,UAAU,MAAM,IAAI,iBAAiB,OAAO,WAAW;AAAA,MAC1D,oBAAoB,CAAC,UAAU,WAAW,QAAQ,MAAM;AAAA,IAC1D,CAAC;AAGD,UAAM,OAAO,MAAM,KAAK,MAAM,MAAM;AACpC,SAAK,aAAa,KAAK;AACvB,SAAK,cAAc;AAEnB,YAAQ,OAAO,MAAM,mBAAmB,KAAK,aAAa,KAAK,UAAU,iBAAiB,KAAK,SAAS,KAAK,KAAK,YAAY,cAAc,MAAM;AAAA,CAAK;AAAA,EACzJ;AAAA,EAEA,MAAM,MAAM,MAAqC;AAC/C,QAAI,CAAC,KAAK,WAAW,CAAC,KAAK,aAAa,CAAC,KAAK;AAC5C,YAAM,IAAI,MAAM,8CAA8C;AAGhE,UAAM,UAAU,MAAM,KAAK,UAAU,MAAM;AAAA,MACzC,SAAS;AAAA,MACT,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,cAAwB,MAAM,KAAK,QAAQ,UAAU,QAAQ,QAAQ,SAAS;AACpF,UAAM,mBAA6B,MAAM,KAAK,QAAQ,eAAe,QAAQ,QAAQ,cAAc;AACnG,UAAM,SAAS,YAAY;AAG3B,UAAM,WAAW,IAAI,KAAK,IAAI;AAAA,MAC5B;AAAA,MACA,cAAc,KAAK,YAAY,IAAI,MAAM,CAAC;AAAA,MAC1C,CAAC,GAAG,MAAM;AAAA,IACZ;AACA,UAAM,gBAAgB,IAAI,KAAK,IAAI;AAAA,MACjC;AAAA,MACA,cAAc,KAAK,iBAAiB,IAAI,MAAM,CAAC;AAAA,MAC/C,CAAC,GAAG,MAAM;AAAA,IACZ;AAEA,UAAM,QAAiC,EAAE,WAAW,UAAU,gBAAgB,cAAc;AAG5F,QAAI,KAAK,QAAQ,WAAW,SAAS,gBAAgB,GAAG;AACtD,YAAM,iBAAiB,IAAI,KAAK,IAAI;AAAA,QAClC;AAAA,QACA,IAAI,cAAc,MAAM;AAAA,QACxB,CAAC,GAAG,MAAM;AAAA,MACZ;AAAA,IACF;AAGA,UAAM,SAAS,MAAM,KAAK,QAAQ,IAAI,KAAK;AAC3C,UAAM,eAAe,OAAO,KAAK,QAAQ,YAAY,CAAC,CAAC;AACvD,UAAM,aAAa,aAAa,KAAK,aAAa,KAAK,SAAS,CAAC;AAGjE,WAAO,KAAK,qBAAqB,aAAa,MAAM,kBAAkB,QAAQ,UAAU;AAAA,EAC1F;AAAA,EAEQ,qBACN,YACA,eACA,QACA,YACc;AACd,UAAM,SAAS,IAAI,aAAa,UAAU;AAC1C,QAAI,UAAU;AAEd,aAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,YAAM,OAAO,cAAc,CAAC;AAC5B,iBAAW;AACX,YAAM,SAAS,IAAI;AACnB,eAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,eAAO,CAAC,KAAK,WAAW,SAAS,CAAC,IAAI;AAAA,MACxC;AAAA,IACF;AAEA,QAAI,UAAU,GAAG;AACf,eAAS,IAAI,GAAG,IAAI,YAAY,IAAK,QAAO,CAAC,KAAK;AAAA,IACpD;AAGA,QAAI,OAAO;AACX,aAAS,IAAI,GAAG,IAAI,YAAY,IAAK,SAAQ,OAAO,CAAC,IAAI,OAAO,CAAC;AACjE,WAAO,KAAK,KAAK,IAAI;AACrB,QAAI,OAAO,GAAG;AACZ,eAAS,IAAI,GAAG,IAAI,YAAY,IAAK,QAAO,CAAC,KAAK;AAAA,IACpD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,yBACN,YACA,UACA,WACA,QACA,YACgB;AAChB,UAAM,UAA0B,CAAC;AACjC,aAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,YAAM,SAAS,IAAI,aAAa,UAAU;AAC1C,UAAI,UAAU;AACd,eAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,cAAM,OAAO,SAAS,IAAI,SAAS,CAAC;AACpC,mBAAW;AACX,cAAM,UAAU,IAAI,SAAS,KAAK;AAClC,iBAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,iBAAO,CAAC,KAAK,WAAW,SAAS,CAAC,IAAI;AAAA,QACxC;AAAA,MACF;AACA,UAAI,UAAU,GAAG;AACf,iBAAS,IAAI,GAAG,IAAI,YAAY,IAAK,QAAO,CAAC,KAAK;AAAA,MACpD;AAEA,UAAI,OAAO;AACX,eAAS,IAAI,GAAG,IAAI,YAAY,IAAK,SAAQ,OAAO,CAAC,IAAI,OAAO,CAAC;AACjE,aAAO,KAAK,KAAK,IAAI;AACrB,UAAI,OAAO,GAAG;AACZ,iBAAS,IAAI,GAAG,IAAI,YAAY,IAAK,QAAO,CAAC,KAAK;AAAA,MACpD;AACA,cAAQ,KAAK,MAAM;AAAA,IACrB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,cAAc,OAA0C;AACpE,QAAI,CAAC,KAAK,WAAW,CAAC,KAAK,aAAa,CAAC,KAAK;AAC5C,YAAM,IAAI,MAAM,8CAA8C;AAEhE,UAAM,IAAI,MAAM;AAGhB,UAAM,YAAY,MAAM,QAAQ;AAAA,MAC9B,MAAM;AAAA,QAAI,CAAC,SACT,KAAK,UAAW,MAAM;AAAA,UACpB,SAAS;AAAA,UACT,YAAY;AAAA,UACZ,YAAY;AAAA,UACZ,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,YAAY,UAAU,IAAI,CAAC,SAAS;AAAA,MACxC,KAAK,MAAM,KAAK,IAAI,UAAU,QAAQ,IAAI,SAAS;AAAA,MACnD,MAAM,MAAM,KAAK,IAAI,eAAe,QAAQ,IAAI,cAAc;AAAA,IAChE,EAAE;AACF,UAAM,SAAS,KAAK,IAAI,GAAG,UAAU,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC;AAG7D,UAAM,UAAU,IAAI,cAAc,IAAI,MAAM;AAC5C,UAAM,WAAW,IAAI,cAAc,IAAI,MAAM;AAC7C,UAAM,eAAe,IAAI,MAAc,IAAI,MAAM,EAAE,KAAK,CAAC;AAEzD,aAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,YAAM,EAAE,KAAK,KAAK,IAAI,UAAU,CAAC;AACjC,eAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,gBAAQ,IAAI,SAAS,CAAC,IAAI,OAAO,IAAI,CAAC,CAAC;AACvC,iBAAS,IAAI,SAAS,CAAC,IAAI,OAAO,KAAK,CAAC,CAAC;AACzC,qBAAa,IAAI,SAAS,CAAC,IAAI,KAAK,CAAC;AAAA,MACvC;AAAA,IAEF;AAGA,UAAM,WAAW,IAAI,KAAK,IAAI,OAAO,SAAS,SAAS,CAAC,GAAG,MAAM,CAAC;AAClE,UAAM,gBAAgB,IAAI,KAAK,IAAI,OAAO,SAAS,UAAU,CAAC,GAAG,MAAM,CAAC;AAExE,UAAM,QAAiC,EAAE,WAAW,UAAU,gBAAgB,cAAc;AAE5F,QAAI,KAAK,QAAQ,WAAW,SAAS,gBAAgB,GAAG;AACtD,YAAM,iBAAiB,IAAI,KAAK,IAAI;AAAA,QAClC;AAAA,QACA,IAAI,cAAc,IAAI,MAAM;AAAA,QAC5B,CAAC,GAAG,MAAM;AAAA,MACZ;AAAA,IACF;AAGA,UAAM,SAAS,MAAM,KAAK,QAAQ,IAAI,KAAK;AAC3C,UAAM,eAAe,OAAO,KAAK,QAAQ,YAAY,CAAC,CAAC;AACvD,UAAM,aAAa,aAAa,KAAK,aAAa,KAAK,SAAS,CAAC;AAEjE,WAAO,KAAK;AAAA,MACV,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,OACA,YACyB;AAEzB,QAAI,KAAK,aAAa,KAAK,MAAM,UAAU,KAAK,aAAa,GAAG;AAC9D,aAAO,KAAK,mBAAmB,OAAO,UAAU;AAAA,IAClD;AAGA,UAAM,UAA0B,CAAC;AACjC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK,WAAW;AACrD,YAAM,WAAW,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAClD,YAAM,aAAa,MAAM,KAAK,cAAc,QAAQ;AACpD,cAAQ,KAAK,GAAG,UAAU;AAC1B,YAAM,aAAa,KAAK,IAAI,IAAI,SAAS,QAAQ,MAAM,MAAM,GAAG,MAAM,QAAQ,UAAU;AAAA,IAC1F;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,mBACZ,OACA,YACyB;AAKzB,UAAM,UAAU,QAAQ,cAAc,YAAY,GAAG,CAAC;AACtD,QAAI,aAAa,KAAK,SAAS,iBAAiB;AAChD,QAAI,CAAC,WAAW,UAAU,GAAG;AAE3B,mBAAa,KAAK,SAAS,QAAQ,iBAAiB;AAAA,IACtD;AAEA,QAAI,CAAC,WAAW,UAAU,GAAG;AAE3B,cAAQ,OAAO,MAAM,8DAA8D;AACnF,aAAO,KAAK,WAAW,OAAO,UAAU;AAAA,IAC1C;AAGA,UAAM,YAAY,KAAK,KAAK,MAAM,SAAS,KAAK,UAAU;AAC1D,UAAM,SAAoD,CAAC;AAC3D,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,aAAO,KAAK,EAAE,OAAO,MAAM,MAAM,GAAG,IAAI,SAAS,GAAG,YAAY,EAAE,CAAC;AAAA,IACrE;AAEA,UAAM,aAAa,IAAI,MAAoB,MAAM,MAAM;AACvD,QAAI,YAAY;AAEhB,UAAM,iBAAiB,OAAO,IAAI,CAAC,UAAU;AAC3C,aAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,cAAM,SAAS,IAAI,OAAO,YAAY;AAAA,UACpC,YAAY;AAAA,YACV,WAAW,KAAK;AAAA,YAChB,WAAW,KAAK;AAAA,YAChB,UAAU;AAAA,YACV,cAAc,KAAK;AAAA,YACnB,WAAW,KAAK;AAAA,UAClB;AAAA,QACF,CAAC;AAED,eAAO,GAAG,WAAW,CAAC,QAAa;AACjC,cAAI,IAAI,SAAS,SAAS;AACxB,mBAAO,YAAY,EAAE,MAAM,SAAS,OAAO,MAAM,OAAO,YAAY,MAAM,WAAW,CAAC;AAAA,UACxF,WAAW,IAAI,SAAS,YAAY;AAClC;AACA,yBAAa,WAAW,MAAM,MAAM;AAAA,UACtC,WAAW,IAAI,SAAS,UAAU;AAChC,qBAAS,IAAI,GAAG,IAAI,IAAI,WAAW,QAAQ,KAAK;AAC9C,yBAAW,MAAM,aAAa,CAAC,IAAI,IAAI,aAAa,IAAI,WAAW,CAAC,CAAC;AAAA,YACvE;AACA,mBAAO,UAAU;AACjB,oBAAQ;AAAA,UACV,WAAW,IAAI,SAAS,SAAS;AAC/B,mBAAO,UAAU;AACjB,mBAAO,IAAI,MAAM,IAAI,KAAK,CAAC;AAAA,UAC7B;AAAA,QACF,CAAC;AAED,eAAO,GAAG,SAAS,CAAC,QAAQ;AAC1B,iBAAO,UAAU;AACjB,iBAAO,GAAG;AAAA,QACZ,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAED,UAAM,QAAQ,IAAI,cAAc;AAChC,WAAO;AAAA,EACT;AAAA,EAEA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,aAA2B;AACzB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,eACJ,YACA,WACe;AACf,UAAM,UAAkD,CAAC;AACzD,eAAW,CAAC,KAAK,GAAG,KAAK,YAAY;AACnC,cAAQ,KAAK,EAAE,KAAK,MAAM,MAAM,KAAK,GAAG,EAAE,CAAC;AAAA,IAC7C;AACA,UAAM,UAAU,KAAK,WAAW,iBAAiB,GAAG,KAAK,UAAU,OAAO,CAAC;AAAA,EAC7E;AAAA,EAEA,MAAM,eACJ,WACoC;AACpC,UAAM,WAAW,KAAK,WAAW,iBAAiB;AAClD,QAAI,CAAC,WAAW,QAAQ,EAAG,QAAO,oBAAI,IAAI;AAE1C,UAAM,MAAM,MAAM,SAAS,UAAU,OAAO;AAC5C,UAAM,UAAkD,KAAK,MAAM,GAAG;AACtE,UAAM,MAAM,oBAAI,IAA0B;AAC1C,eAAW,SAAS,SAAS;AAC3B,UAAI,IAAI,MAAM,KAAK,IAAI,aAAa,MAAM,IAAI,CAAC;AAAA,IACjD;AACA,WAAO;AAAA,EACT;AACF;;;ACzcA,OAAO,WAAW;AAClB,SAAS,mBAAmB;AAC5B,SAAS,qBAAqB;AAE9B,SAAS,YAAAA,WAAU,aAAAC,kBAAiB;AACpC,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAEpB,IAAM,eAAN,MAAmB;AAAA,EAChB;AAAA,EAER,cAAc;AACZ,SAAK,QAAQ,IAAI,MAAM,EAAE,MAAM,YAAY,OAAO,MAAM,CAAC;AAAA,EAC3D;AAAA,EAEA,mBAAmB,WAAoC;AACrD,SAAK,MAAM,MAAM;AAGjB,eAAW,OAAO,WAAW;AAC3B,WAAK,MAAM,QAAQ,IAAI,MAAM;AAAA,QAC3B,OAAO,IAAI;AAAA,QACX,MAAM,IAAI;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,UAAM,aAAa,oBAAI,IAAoB;AAC3C,eAAW,OAAO,WAAW;AAC3B,YAAM,YAAY,IAAI,KAAK,QAAQ,SAAS,EAAE;AAC9C,YAAM,WAAW,UAAU,MAAM,GAAG,EAAE,IAAI;AAC1C,iBAAW,IAAI,SAAS,YAAY,GAAG,IAAI,IAAI;AAC/C,iBAAW,IAAI,UAAU,YAAY,GAAG,IAAI,IAAI;AAAA,IAClD;AAGA,eAAW,OAAO,WAAW;AAC3B,iBAAW,QAAQ,IAAI,WAAW;AAChC,cAAM,SAAS,WAAW,IAAI,KAAK,YAAY,CAAC;AAChD,YAAI,UAAU,WAAW,IAAI,QAAQ,CAAC,KAAK,MAAM,QAAQ,IAAI,MAAM,MAAM,GAAG;AAC1E,eAAK,MAAM,QAAQ,IAAI,MAAM,QAAQ,EAAE,MAAM,YAAY,QAAQ,EAAI,CAAC;AAAA,QACxE;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa,oBAAI,IAAsB;AAC7C,eAAW,OAAO,WAAW;AAC3B,iBAAW,OAAO,IAAI,MAAM;AAC1B,cAAM,QAAQ,WAAW,IAAI,GAAG,KAAK,CAAC;AACtC,cAAM,KAAK,IAAI,IAAI;AACnB,mBAAW,IAAI,KAAK,KAAK;AAAA,MAC3B;AAAA,IACF;AAEA,eAAW,CAAC,EAAE,KAAK,KAAK,YAAY;AAClC,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,iBAAS,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACzC,cAAI,CAAC,KAAK,MAAM,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,GAAG;AAC3C,iBAAK,MAAM,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,MAAM,OAAO,QAAQ,IAAI,CAAC;AAAA,UACrE;AACA,cAAI,CAAC,KAAK,MAAM,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,GAAG;AAC3C,iBAAK,MAAM,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,MAAM,OAAO,QAAQ,IAAI,CAAC;AAAA,UACrE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,UAAU,UAA+B;AACvC,QAAI,CAAC,KAAK,MAAM,QAAQ,QAAQ,EAAG,QAAO,CAAC;AAC3C,WAAO,KAAK,MAAM,YAAY,QAAQ,EAAE,IAAI,CAAC,MAAM,KAAK,gBAAgB,CAAC,CAAC;AAAA,EAC5E;AAAA,EAEA,aAAa,UAA+B;AAC1C,QAAI,CAAC,KAAK,MAAM,QAAQ,QAAQ,EAAG,QAAO,CAAC;AAC3C,WAAO,KAAK,MAAM,aAAa,QAAQ,EAAE,IAAI,CAAC,MAAM,KAAK,gBAAgB,CAAC,CAAC;AAAA,EAC7E;AAAA,EAEA,SAAS,MAAc,IAA6B;AAClD,QAAI,CAAC,KAAK,MAAM,QAAQ,IAAI,KAAK,CAAC,KAAK,MAAM,QAAQ,EAAE,EAAG,QAAO;AACjE,UAAM,OAAO,cAAc,KAAK,OAAO,MAAM,EAAE;AAC/C,WAAO;AAAA,EACT;AAAA,EAEA,YAAY,SAAiB,WAAW,GAAgB;AACtD,UAAM,aAAa,KAAK,MACrB,MAAM,EACN,OAAO,CAAC,MAAM;AACb,YAAM,QAAQ,KAAK,MAAM,kBAAkB,CAAC;AAC5C,aACE,EAAE,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC,KAC9C,MAAM,OAAO,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC,KACzD,MAAM,MAAM,KAAK,CAAC,MAAc,EAAE,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC,CAAC;AAAA,IAEnF,CAAC;AAEH,UAAM,UAAU,oBAAI,IAAY;AAChC,eAAW,SAAS,YAAY;AAC9B,UAAI,QAAQ;AACZ,kBAAY,KAAK,OAAO,OAAO,CAAC,SAAS;AACvC,gBAAQ,IAAI,IAAI;AAChB;AACA,eAAO,QAAQ;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,WAAO,CAAC,GAAG,OAAO,EAAE,IAAI,CAAC,MAAM,KAAK,gBAAgB,CAAC,CAAC;AAAA,EACxD;AAAA,EAEA,aAAyB;AACvB,UAAM,QAAQ,KAAK,MAAM;AACzB,UAAM,QAAQ,KAAK,MAAM;AACzB,UAAM,UAAU,KAAK,MAAM,MAAM,EAAE;AAAA,MACjC,CAAC,MAAM,KAAK,MAAM,OAAO,CAAC,MAAM;AAAA,IAClC;AAEA,UAAM,cAAc,KAAK,MAAM,MAAM,EAAE,IAAI,CAAC,OAAO;AAAA,MACjD,MAAM;AAAA,MACN,aAAa,KAAK,MAAM,OAAO,CAAC;AAAA,IAClC,EAAE;AACF,gBAAY,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAExD,UAAM,mBAAmB,SAAS,QAAQ;AAC1C,UAAM,UAAU,mBAAmB,IAAI,QAAQ,mBAAmB;AAElE,WAAO;AAAA,MACL,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,aAAa,QAAQ;AAAA,MACrB,eAAe,YAAY,MAAM,GAAG,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,gBAAgB,UAA6B;AACnD,UAAM,QAAQ,KAAK,MAAM,kBAAkB,QAAQ;AACnD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO,MAAM,SAAS;AAAA,MACtB,MAAM,MAAM,QAAQ,CAAC;AAAA,MACrB,WAAW,KAAK,MAAM,UAAU,QAAQ;AAAA,MACxC,eAAe,KAAK,MAAM,SAAS,QAAQ;AAAA,IAC7C;AAAA,EACF;AAAA,EAEA,MAAM,KAAK,WAAkC;AAC3C,UAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,UAAMF,WAAUC,MAAK,WAAW,YAAY,GAAG,KAAK,UAAU,IAAI,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,KAAK,WAAqC;AAC9C,UAAM,WAAWA,MAAK,WAAW,YAAY;AAC7C,QAAI,CAACC,YAAW,QAAQ,EAAG,QAAO;AAElC,UAAM,MAAM,MAAMH,UAAS,UAAU,OAAO;AAC5C,UAAM,OAAO,KAAK,MAAM,GAAG;AAC3B,SAAK,MAAM,OAAO,IAAI;AACtB,WAAO;AAAA,EACT;AAAA,EAEA,WAAkB;AAChB,WAAO,KAAK;AAAA,EACd;AACF;;;ACnKA,OAAO,aAAa;AAEpB,SAAS,YAAAI,WAAU,aAAAC,kBAAiB;AACpC,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAH3B,IAAM,EAAE,gBAAgB,IAAI;AAYrB,IAAM,cAAN,MAAkB;AAAA,EACf,QAAqD;AAAA,EACrD;AAAA,EACA,YAAyB,CAAC;AAAA,EAElC,YAAY,YAAoB;AAC9B,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,MACE,YACA,MACM;AACN,QAAI,WAAW,WAAW,GAAG;AAC3B,WAAK,QAAQ;AACb,WAAK,YAAY,CAAC;AAClB;AAAA,IACF;AAEA,SAAK,QAAQ,IAAI,gBAAgB,UAAU,KAAK,UAAU;AAC1D,SAAK,MAAM,UAAU,WAAW,MAAM;AAEtC,aAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,WAAK,MAAM,SAAS,MAAM,KAAK,WAAW,CAAC,CAAC,GAAG,CAAC;AAAA,IAClD;AAEA,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,OAAO,gBAA8B,IAAY,IAAoB;AACnE,QAAI,CAAC,KAAK,SAAS,KAAK,UAAU,WAAW,EAAG,QAAO,CAAC;AAExD,UAAM,aAAa,KAAK,IAAI,GAAG,KAAK,UAAU,MAAM;AACpD,UAAM,SAAS,KAAK,MAAM,UAAU,MAAM,KAAK,cAAc,GAAG,UAAU;AAE1E,UAAM,OAAO,oBAAI,IAAY;AAC7B,UAAM,UAA0B,CAAC;AAEjC,aAAS,IAAI,GAAG,IAAI,OAAO,UAAU,QAAQ,KAAK;AAChD,YAAM,MAAM,OAAO,UAAU,CAAC;AAC9B,YAAM,OAAO,KAAK,UAAU,GAAG;AAC/B,UAAI,CAAC,QAAQ,KAAK,IAAI,KAAK,OAAO,EAAG;AACrC,WAAK,IAAI,KAAK,OAAO;AAErB,cAAQ,KAAK;AAAA,QACX,MAAM,KAAK;AAAA,QACX,OAAO,KAAK;AAAA,QACZ,OAAO,IAAI,OAAO,UAAU,CAAC;AAAA,QAC7B,SAAS,KAAK,KAAK,MAAM,GAAG,GAAG;AAAA,QAC/B,cAAc,KAAK;AAAA,MACrB,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,KAAK,WAAkC;AAC3C,QAAI,CAAC,KAAK,MAAO;AAEjB,SAAK,MAAM,eAAeD,MAAK,WAAW,UAAU,CAAC;AACrD,UAAMD;AAAA,MACJC,MAAK,WAAW,gBAAgB;AAAA,MAChC,KAAK,UAAU,KAAK,SAAS;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,MAAM,KAAK,WAAqC;AAC9C,UAAM,WAAWA,MAAK,WAAW,UAAU;AAC3C,UAAM,WAAWA,MAAK,WAAW,gBAAgB;AAEjD,QAAI,CAACC,YAAW,QAAQ,KAAK,CAACA,YAAW,QAAQ,EAAG,QAAO;AAE3D,UAAM,MAAM,MAAMH,UAAS,UAAU,OAAO;AAC5C,SAAK,YAAY,KAAK,MAAM,GAAG;AAE/B,SAAK,QAAQ,IAAI,gBAAgB,UAAU,KAAK,UAAU;AAC1D,SAAK,MAAM,UAAU,KAAK,UAAU,MAAM;AAC1C,SAAK,MAAM,cAAc,QAAQ;AAEjC,WAAO;AAAA,EACT;AAAA,EAEA,eAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AACF;;;ACjGA,SAAS,iBAAiB;AAEnB,IAAM,aAAN,MAAiB;AAAA,EACd,YAA+B,CAAC;AAAA,EAExC,aAAa,WAAoC;AAC/C,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,OAAO,SAA4C;AACjD,UAAM,EAAE,SAAS,OAAO,eAAe,UAAU,WAAW,QAAQ,GAAG,IAAI;AAE3E,QAAI;AAEJ,QAAI,OAAO;AACT,YAAM,QAAQ,gBAAgB,MAAM;AACpC,YAAM,KAAK,IAAI,OAAO,SAAS,KAAK;AACpC,gBAAU,CAAC,SAAS;AAClB,WAAG,YAAY;AACf,cAAM,IAAI,GAAG,KAAK,IAAI;AACtB,eAAO,EAAE,SAAS,CAAC,CAAC,GAAG,OAAO,GAAG,SAAS,GAAG;AAAA,MAC/C;AAAA,IACF,OAAO;AACL,YAAM,SAAS,gBAAgB,UAAU,QAAQ,YAAY;AAC7D,gBAAU,CAAC,SAAS;AAClB,cAAM,WAAW,gBAAgB,OAAO,KAAK,YAAY;AACzD,cAAM,MAAM,SAAS,QAAQ,MAAM;AACnC,eAAO,EAAE,SAAS,OAAO,GAAG,OAAO,IAAI;AAAA,MACzC;AAAA,IACF;AAEA,UAAM,UAA0B,CAAC;AAEjC,eAAW,OAAO,KAAK,WAAW;AAEhC,UAAI,YAAY,CAAC,UAAU,IAAI,MAAM,QAAQ,EAAG;AAGhD,UAAI,WAAW,QAAQ;AACrB,cAAM,SAAS,UAAU,KAAK,CAAC,MAAM,IAAI,KAAK,SAAS,CAAC,CAAC;AACzD,YAAI,CAAC,OAAQ;AAAA,MACf;AAEA,YAAM,EAAE,SAAS,MAAM,IAAI,QAAQ,IAAI,OAAO;AAC9C,UAAI,CAAC,QAAS;AAEd,YAAM,eAAe,KAAK,IAAI,GAAG,QAAQ,EAAE;AAC3C,YAAM,aAAa,KAAK,IAAI,IAAI,QAAQ,QAAQ,QAAQ,GAAG;AAC3D,YAAM,UAAU,IAAI,QAAQ,MAAM,cAAc,UAAU,EAAE,KAAK;AAEjE,cAAQ,KAAK;AAAA,QACX,MAAM,IAAI;AAAA,QACV,OAAO,IAAI;AAAA,QACX,OAAO;AAAA,QACP,UAAU,eAAe,IAAI,QAAQ,MAAM,WAAW,aAAa,IAAI,QAAQ,SAAS,QAAQ;AAAA,MAClG,CAAC;AAED,UAAI,QAAQ,UAAU,MAAO;AAAA,IAC/B;AAEA,WAAO;AAAA,EACT;AACF;;;AC/DA,SAAS,YAAAI,WAAU,aAAAC,YAAW,QAAQ,QAAQ,SAAAC,cAAa;AAC3D,SAAS,WAAAC,UAAS,QAAAC,aAAsB;AACxC,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,YAAY;AACrB,OAAO,YAAY;AAGZ,IAAM,WAAN,MAAe;AAAA,EACZ;AAAA,EAER,YAAY,WAAmB;AAC7B,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,OACJ,cACA,SACA,aACiB;AACjB,UAAM,UAAUD,MAAK,KAAK,WAAW,YAAY;AACjD,QAAIC,YAAW,OAAO,GAAG;AACvB,YAAM,IAAI,MAAM,wBAAwB,YAAY,EAAE;AAAA,IACxD;AAEA,UAAMH,OAAMC,SAAQ,OAAO,GAAG,EAAE,WAAW,KAAK,CAAC;AAEjD,QAAI;AACJ,QAAI,eAAe,OAAO,KAAK,WAAW,EAAE,SAAS,GAAG;AACtD,oBAAc,OAAO,UAAU,SAAS,WAAW;AAAA,IACrD,OAAO;AACL,oBAAc;AAAA,IAChB;AAEA,UAAMF,WAAU,SAAS,aAAa,OAAO;AAC7C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,KAAK,cAAuC;AAChD,UAAM,UAAUG,MAAK,KAAK,WAAW,YAAY;AACjD,WAAOJ,UAAS,SAAS,OAAO;AAAA,EAClC;AAAA,EAEA,MAAM,aAAa,OAA+C;AAChE,UAAM,UAAU,oBAAI,IAAoB;AACxC,UAAM,QAAQ;AAAA,MACZ,MAAM,IAAI,OAAO,MAAM;AACrB,YAAI;AACF,gBAAM,UAAU,MAAM,KAAK,KAAK,CAAC;AACjC,kBAAQ,IAAI,GAAG,OAAO;AAAA,QACxB,QAAQ;AACN,kBAAQ,IAAI,GAAG,0BAA0B,CAAC,GAAG;AAAA,QAC/C;AAAA,MACF,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,OACJ,cACA,SACA,SACe;AACf,UAAM,UAAUI,MAAK,KAAK,WAAW,YAAY;AACjD,QAAI,CAACC,YAAW,OAAO,GAAG;AACxB,YAAM,IAAI,MAAM,wBAAwB,YAAY,EAAE;AAAA,IACxD;AAEA,UAAM,WAAW,MAAML,UAAS,SAAS,OAAO;AAEhD,QAAI;AAEJ,YAAQ,QAAQ,MAAM;AAAA,MACpB,KAAK;AACH,kBAAU;AACV;AAAA,MAEF,KAAK;AACH,kBAAU,WAAW,OAAO;AAC5B;AAAA,MAEF,KAAK,WAAW;AACd,cAAM,EAAE,MAAM,SAAS,KAAK,IAAI,OAAO,QAAQ;AAC/C,cAAM,UAAU,UAAU,OAAO;AACjC,kBAAU,OAAO,KAAK,IAAI,EAAE,SAAS,IAAI,OAAO,UAAU,SAAS,IAAI,IAAI;AAC3E;AAAA,MACF;AAAA,MAEA,KAAK,oBAAoB;AACvB,YAAI,CAAC,QAAQ,QAAS,OAAM,IAAI,MAAM,qCAAqC;AAC3E,kBAAU,KAAK,eAAe,UAAU,QAAQ,SAAS,OAAO;AAChE;AAAA,MACF;AAAA,IACF;AAEA,UAAMC,WAAU,SAAS,SAAS,OAAO;AAAA,EAC3C;AAAA,EAEA,MAAM,OAAO,cAAqC;AAChD,UAAM,UAAUG,MAAK,KAAK,WAAW,YAAY;AACjD,QAAI,CAACC,YAAW,OAAO,GAAG;AACxB,YAAM,IAAI,MAAM,wBAAwB,YAAY,EAAE;AAAA,IACxD;AACA,UAAM,OAAO,OAAO;AAAA,EACtB;AAAA,EAEA,MAAM,KAAK,UAAkB,QAA+B;AAC1D,UAAM,UAAUD,MAAK,KAAK,WAAW,QAAQ;AAC7C,UAAM,QAAQA,MAAK,KAAK,WAAW,MAAM;AAEzC,QAAI,CAACC,YAAW,OAAO,GAAG;AACxB,YAAM,IAAI,MAAM,wBAAwB,QAAQ,EAAE;AAAA,IACpD;AACA,QAAIA,YAAW,KAAK,GAAG;AACrB,YAAM,IAAI,MAAM,+BAA+B,MAAM,EAAE;AAAA,IACzD;AAEA,UAAMH,OAAMC,SAAQ,KAAK,GAAG,EAAE,WAAW,KAAK,CAAC;AAC/C,UAAM,OAAO,SAAS,KAAK;AAG3B,UAAM,KAAK,yBAAyB,UAAU,MAAM;AAAA,EACtD;AAAA,EAEA,MAAc,yBACZ,SACA,SACe;AACf,UAAM,UAAU,QAAQ,QAAQ,SAAS,EAAE,EAAE,MAAM,GAAG,EAAE,IAAI;AAC5D,UAAM,UAAU,QAAQ,QAAQ,SAAS,EAAE,EAAE,MAAM,GAAG,EAAE,IAAI;AAE5D,QAAI,YAAY,QAAS;AAEzB,UAAM,QAAQ,MAAM,KAAK,WAAW,EAAE,KAAK,KAAK,UAAU,CAAC;AAE3D,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAUC,MAAK,KAAK,WAAW,IAAI;AACzC,YAAM,UAAU,MAAMJ,UAAS,SAAS,OAAO;AAE/C,YAAM,UAAU,IAAI,OAAO,SAAS,YAAY,OAAO,CAAC,uBAAuB,GAAG;AAClF,UAAI,CAAC,QAAQ,KAAK,OAAO,EAAG;AAE5B,YAAM,UAAU,QAAQ,QAAQ,SAAS,KAAK,OAAO,MAAM;AAC3D,YAAMC,WAAU,SAAS,SAAS,OAAO;AAAA,IAC3C;AAAA,EACF;AAAA,EAEQ,eACN,SACA,SACA,YACQ;AACR,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,iBAAiB,IAAI,OAAO,cAAc,YAAY,OAAO,CAAC,SAAS,GAAG;AAEhF,QAAI,eAAe;AACnB,QAAI,eAAe;AAEnB,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAI,eAAe,KAAK,MAAM,CAAC,CAAC,GAAG;AACjC,uBAAe;AACf,cAAM,QAAQ,MAAM,CAAC,EAAE,MAAM,cAAc;AAC3C,uBAAe,QAAQ,MAAM,CAAC,EAAE,SAAS;AACzC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,iBAAiB,IAAI;AACvB,YAAM,IAAI,MAAM,sBAAsB,OAAO,EAAE;AAAA,IACjD;AAGA,QAAI,WAAW,MAAM;AACrB,aAAS,IAAI,eAAe,GAAG,IAAI,MAAM,QAAQ,KAAK;AACpD,YAAM,QAAQ,MAAM,CAAC,EAAE,MAAM,cAAc;AAC3C,UAAI,SAAS,MAAM,CAAC,EAAE,UAAU,cAAc;AAC5C,mBAAW;AACX;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAS,MAAM,MAAM,GAAG,eAAe,CAAC;AAC9C,UAAM,QAAQ,MAAM,MAAM,QAAQ;AAElC,WAAO,CAAC,GAAG,QAAQ,IAAI,YAAY,IAAI,GAAG,KAAK,EAAE,KAAK,IAAI;AAAA,EAC5D;AACF;AAEA,SAAS,YAAY,KAAqB;AACxC,SAAO,IAAI,QAAQ,uBAAuB,MAAM;AAClD;;;AC7LA,SAAS,YAAAK,WAAU,aAAAC,kBAAiB;AACpC,SAAS,QAAAC,aAAY;AACrB,SAAS,QAAAC,aAAY;AACrB,OAAOC,aAAY;AAEZ,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EAER,YAAY,WAAmB;AAC7B,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,IAAI,cAAwD;AAChE,UAAM,UAAUF,MAAK,KAAK,WAAW,YAAY;AACjD,UAAM,MAAM,MAAMF,UAAS,SAAS,OAAO;AAC3C,UAAM,EAAE,KAAK,IAAII,QAAO,GAAG;AAC3B,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,OACJ,cACA,QACe;AACf,UAAM,UAAUF,MAAK,KAAK,WAAW,YAAY;AACjD,UAAM,MAAM,MAAMF,UAAS,SAAS,OAAO;AAC3C,UAAM,EAAE,MAAM,QAAQ,IAAII,QAAO,GAAG;AAEpC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAQ,UAAU,QAAW;AACzC,eAAO,KAAK,GAAG;AAAA,MACjB,OAAO;AACL,aAAK,GAAG,IAAI;AAAA,MACd;AAAA,IACF;AAEA,UAAM,UAAUA,QAAO,UAAU,SAAS,IAAI;AAC9C,UAAMH,WAAU,SAAS,SAAS,OAAO;AAAA,EAC3C;AACF;AAEO,IAAM,aAAN,MAAiB;AAAA,EACd;AAAA,EAER,YAAY,WAAmB;AAC7B,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,KAAK,cAAyC;AAClD,UAAM,UAAUC,MAAK,KAAK,WAAW,YAAY;AACjD,UAAM,MAAM,MAAMF,UAAS,SAAS,OAAO;AAC3C,UAAM,EAAE,MAAM,QAAQ,IAAII,QAAO,GAAG;AAEpC,UAAM,SAAS,MAAM,QAAQ,KAAK,IAAI,IAAK,KAAK,OAAoB,CAAC;AACrE,UAAM,aAAa,CAAC,GAAG,QAAQ,SAAS,6BAA6B,CAAC,EAAE;AAAA,MACtE,CAAC,MAAM,EAAE,CAAC;AAAA,IACZ;AAEA,WAAO,CAAC,GAAG,oBAAI,IAAI,CAAC,GAAG,QAAQ,GAAG,UAAU,CAAC,CAAC;AAAA,EAChD;AAAA,EAEA,MAAM,IAAI,cAAsB,MAA+B;AAC7D,UAAM,UAAUF,MAAK,KAAK,WAAW,YAAY;AACjD,UAAM,MAAM,MAAMF,UAAS,SAAS,OAAO;AAC3C,UAAM,EAAE,MAAM,QAAQ,IAAII,QAAO,GAAG;AAEpC,UAAM,WAAW,MAAM,QAAQ,KAAK,IAAI,IAAK,KAAK,OAAoB,CAAC;AACvE,UAAM,SAAS,CAAC,GAAG,oBAAI,IAAI,CAAC,GAAG,UAAU,GAAG,IAAI,CAAC,CAAC;AAClD,SAAK,OAAO;AAEZ,UAAM,UAAUA,QAAO,UAAU,SAAS,IAAI;AAC9C,UAAMH,WAAU,SAAS,SAAS,OAAO;AAAA,EAC3C;AAAA,EAEA,MAAM,OAAO,cAAsB,MAA+B;AAChE,UAAM,UAAUC,MAAK,KAAK,WAAW,YAAY;AACjD,UAAM,MAAM,MAAMF,UAAS,SAAS,OAAO;AAC3C,UAAM,EAAE,MAAM,QAAQ,IAAII,QAAO,GAAG;AAGpC,QAAI,MAAM,QAAQ,KAAK,IAAI,GAAG;AAC5B,WAAK,OAAQ,KAAK,KAAkB,OAAO,CAAC,MAAM,CAAC,KAAK,SAAS,CAAC,CAAC;AAAA,IACrE;AAGA,QAAI,iBAAiB;AACrB,eAAW,OAAO,MAAM;AACtB,YAAM,UAAU,IAAI,OAAO,WAAWC,aAAY,GAAG,CAAC,aAAa,GAAG;AACtE,uBAAiB,eAAe,QAAQ,SAAS,IAAI;AAAA,IACvD;AAEA,UAAM,UAAUD,QAAO,UAAU,gBAAgB,IAAI;AACrD,UAAMH,WAAU,SAAS,SAAS,OAAO;AAAA,EAC3C;AAAA,EAEA,MAAM,gBAAgB,QAAgB,QAAiC;AACrE,UAAM,QAAQ,MAAME,MAAK,WAAW,EAAE,KAAK,KAAK,UAAU,CAAC;AAC3D,QAAI,QAAQ;AAEZ,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAUD,MAAK,KAAK,WAAW,IAAI;AACzC,YAAM,MAAM,MAAMF,UAAS,SAAS,OAAO;AAC3C,YAAM,EAAE,MAAM,QAAQ,IAAII,QAAO,GAAG;AACpC,UAAI,UAAU;AAGd,UAAI,MAAM,QAAQ,KAAK,IAAI,GAAG;AAC5B,cAAM,MAAO,KAAK,KAAkB,QAAQ,MAAM;AAClD,YAAI,OAAO,GAAG;AACZ,UAAC,KAAK,KAAkB,GAAG,IAAI;AAC/B,oBAAU;AAAA,QACZ;AAAA,MACF;AAGA,YAAM,UAAU,IAAI,OAAO,WAAWC,aAAY,MAAM,CAAC,aAAa,GAAG;AACzE,YAAM,iBAAiB,QAAQ,QAAQ,SAAS,MAAM,MAAM,EAAE;AAC9D,UAAI,mBAAmB,QAAS,WAAU;AAE1C,UAAI,SAAS;AACX,cAAM,UAAUD,QAAO,UAAU,gBAAgB,IAAI;AACrD,cAAMH,WAAU,SAAS,SAAS,OAAO;AACzC;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;AAEA,SAASI,aAAY,KAAqB;AACxC,SAAO,IAAI,QAAQ,uBAAuB,MAAM;AAClD;;;ACnIA,SAAS,aAA6B;AACtC,SAAS,oBAAoB;AAOtB,IAAM,UAAN,cAAsB,aAAa;AAAA,EAChC;AAAA,EACA,YAA8B;AAAA,EAC9B;AAAA,EACA,iBAAiB,oBAAI,IAAY;AAAA,EACjC,gBAAsD;AAAA,EACtD,eAAqC;AAAA,EACrC;AAAA,EAER,YAAY,WAAmB,aAAqB,KAAK,aAAsB,OAAO;AACpF,UAAM;AACN,SAAK,YAAY;AACjB,SAAK,aAAa;AAClB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,QAAc;AACZ,QAAI,KAAK,UAAW;AAEpB,SAAK,YAAY,MAAM,WAAW;AAAA,MAChC,KAAK,KAAK;AAAA,MACV,eAAe;AAAA,MACf,gBAAgB;AAAA,MAChB,SAAS;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,GAAI,KAAK,aAAa,EAAE,YAAY,MAAM,UAAU,IAAI,IAAI,CAAC;AAAA,IAC/D,CAAC;AAED,SAAK,eAAe,IAAI,QAAc,CAAC,YAAY;AACjD,WAAK,UAAW,GAAG,SAAS,OAAO;AAAA,IACrC,CAAC;AAED,SAAK,UAAU,GAAG,OAAO,CAAC,SAAS,KAAK,QAAQ,IAAI,CAAC;AACrD,SAAK,UAAU,GAAG,UAAU,CAAC,SAAS,KAAK,QAAQ,IAAI,CAAC;AACxD,SAAK,UAAU,GAAG,UAAU,CAAC,SAAS,KAAK,QAAQ,IAAI,CAAC;AACxD,SAAK,UAAU,GAAG,SAAS,CAAC,QAAQ,KAAK,KAAK,SAAS,GAAG,CAAC;AAAA,EAC7D;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,KAAK,aAAc,OAAM,KAAK;AAAA,EACpC;AAAA,EAEA,OAAa;AACX,QAAI,KAAK,cAAe,cAAa,KAAK,aAAa;AACvD,SAAK,WAAW,MAAM;AACtB,SAAK,YAAY;AACjB,SAAK,eAAe,MAAM;AAAA,EAC5B;AAAA,EAEQ,QAAQ,MAAoB;AAClC,SAAK,eAAe,IAAI,IAAI;AAE5B,QAAI,KAAK,cAAe,cAAa,KAAK,aAAa;AACvD,SAAK,gBAAgB,WAAW,MAAM;AACpC,YAAM,QAAQ,CAAC,GAAG,KAAK,cAAc;AACrC,WAAK,eAAe,MAAM;AAC1B,WAAK,KAAK,WAAW,KAAK;AAAA,IAC5B,GAAG,KAAK,UAAU;AAAA,EACpB;AACF;","names":["readFile","writeFile","join","existsSync","readFile","writeFile","join","existsSync","readFile","writeFile","mkdir","dirname","join","existsSync","readFile","writeFile","join","glob","matter","escapeRegex"]}
|
package/dist/cli/index.js
CHANGED
|
@@ -199,7 +199,7 @@ function printToolDetail(name) {
|
|
|
199
199
|
}
|
|
200
200
|
program.name("semantic-pages").description(
|
|
201
201
|
"Semantic search + knowledge graph MCP server for markdown files\n\n Start MCP server: semantic-pages --notes ./vault\n Show vault stats: semantic-pages --notes ./vault --stats\n Force reindex: semantic-pages --notes ./vault --reindex\n List MCP tools: semantic-pages tools\n Tool details: semantic-pages tools search_semantic"
|
|
202
|
-
).version("0.4.
|
|
202
|
+
).version("0.4.2");
|
|
203
203
|
program.command("tools [name]").description("List all MCP tools, or show details for a specific tool").action((name) => {
|
|
204
204
|
if (name) {
|
|
205
205
|
printToolDetail(name);
|
|
@@ -208,7 +208,7 @@ program.command("tools [name]").description("List all MCP tools, or show details
|
|
|
208
208
|
}
|
|
209
209
|
process.exit(0);
|
|
210
210
|
});
|
|
211
|
-
program.command("serve", { isDefault: true }).description("Start the MCP server (default command)").requiredOption("--notes <path>", "Path to markdown notes directory").option("--reindex", "Force full reindex and exit").option("--stats", "Show vault statistics and exit").option("--model <name>", "Embedding model to use", "nomic-ai/nomic-embed-text-v1.5").option("--workers <n>", "Number of worker threads for parallel embedding", parseInt).option("--batch-size <n>", "Texts per ONNX forward pass (default:
|
|
211
|
+
program.command("serve", { isDefault: true }).description("Start the MCP server (default command)").requiredOption("--notes <path>", "Path to markdown notes directory").option("--reindex", "Force full reindex and exit").option("--stats", "Show vault statistics and exit").option("--model <name>", "Embedding model to use", "nomic-ai/nomic-embed-text-v1.5").option("--workers <n>", "Number of worker threads for parallel embedding", parseInt).option("--batch-size <n>", "Texts per ONNX forward pass (default: 8)", parseInt).option("--no-quantized", "Use full-precision model instead of quantized (slower, slightly higher quality)").option("--no-watch", "Disable file watcher").action(async (opts) => {
|
|
212
212
|
const notesPath = resolve(opts.notes);
|
|
213
213
|
if (!existsSync(notesPath)) {
|
|
214
214
|
console.error(`Error: notes directory not found: ${notesPath}`);
|
|
@@ -232,6 +232,7 @@ program.command("serve", { isDefault: true }).description("Start the MCP server
|
|
|
232
232
|
model: opts.model,
|
|
233
233
|
workers: opts.workers,
|
|
234
234
|
batchSize: opts.batchSize,
|
|
235
|
+
quantized: opts.quantized,
|
|
235
236
|
onProgress: (embedded, total) => {
|
|
236
237
|
process.stderr.write(`\rEmbedding ${embedded}/${total} chunks...`);
|
|
237
238
|
}
|
|
@@ -241,7 +242,7 @@ program.command("serve", { isDefault: true }).description("Start the MCP server
|
|
|
241
242
|
process.exit(0);
|
|
242
243
|
}
|
|
243
244
|
const { startServer } = await import("../mcp/server.js");
|
|
244
|
-
await startServer(notesPath, { watch: opts.watch, model: opts.model, workers: opts.workers, batchSize: opts.batchSize });
|
|
245
|
+
await startServer(notesPath, { watch: opts.watch, model: opts.model, workers: opts.workers, batchSize: opts.batchSize, quantized: opts.quantized });
|
|
245
246
|
});
|
|
246
247
|
program.parse();
|
|
247
248
|
//# sourceMappingURL=index.js.map
|
package/dist/cli/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/cli/index.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport { program } from \"commander\";\nimport { resolve } from \"node:path\";\nimport { existsSync } from \"node:fs\";\n\nconst TOOL_HELP: Record<string, { description: string; args: string; examples: string[] }> = {\n // Search\n search_semantic: {\n description: \"Vector similarity search — find notes by meaning, not just keywords\",\n args: '{ \"query\": \"string\", \"limit?\": 10 }',\n examples: [\n '{ \"query\": \"microservices architecture\", \"limit\": 5 }',\n '{ \"query\": \"how to deploy to production\" }',\n ],\n },\n search_text: {\n description: \"Full-text keyword or regex search with optional filters\",\n args: '{ \"pattern\": \"string\", \"regex?\": false, \"caseSensitive?\": false, \"pathGlob?\": \"string\", \"tagFilter?\": [\"string\"], \"limit?\": 20 }',\n examples: [\n '{ \"pattern\": \"RabbitMQ\" }',\n '{ \"pattern\": \"OAuth\\\\\\\\d\", \"regex\": true }',\n '{ \"pattern\": \"deploy\", \"pathGlob\": \"devops/**\", \"tagFilter\": [\"kubernetes\"] }',\n ],\n },\n search_graph: {\n description: \"Graph traversal — find notes connected to a concept via wikilinks and tags\",\n args: '{ \"concept\": \"string\", \"maxDepth?\": 2 }',\n examples: [\n '{ \"concept\": \"microservices\" }',\n '{ \"concept\": \"auth\", \"maxDepth\": 3 }',\n ],\n },\n search_hybrid: {\n description: \"Combined semantic + graph search — vector results re-ranked by graph proximity\",\n args: '{ \"query\": \"string\", \"limit?\": 10 }',\n examples: [\n '{ \"query\": \"event driven architecture\", \"limit\": 5 }',\n ],\n },\n\n // Read\n read_note: {\n description: \"Read the full content of a specific note by path\",\n args: '{ \"path\": \"string\" }',\n examples: [\n '{ \"path\": \"project-overview.md\" }',\n '{ \"path\": \"notes/meeting-2024-01-15.md\" }',\n ],\n },\n read_multiple_notes: {\n description: \"Batch read multiple notes in one call\",\n args: '{ \"paths\": [\"string\"] }',\n examples: [\n '{ \"paths\": [\"overview.md\", \"architecture.md\", \"deployment.md\"] }',\n ],\n },\n list_notes: {\n description: \"List all indexed notes with metadata (title, tags, link count)\",\n args: \"{}\",\n examples: [\"{}\"],\n },\n\n // Write\n create_note: {\n description: \"Create a new markdown note with optional YAML frontmatter\",\n args: '{ \"path\": \"string\", \"content\": \"string\", \"frontmatter?\": {} }',\n examples: [\n '{ \"path\": \"new-guide.md\", \"content\": \"# Guide\\\\n\\\\nContent here.\" }',\n '{ \"path\": \"tagged.md\", \"content\": \"Content.\", \"frontmatter\": { \"title\": \"Tagged Note\", \"tags\": [\"test\"] } }',\n ],\n },\n update_note: {\n description: \"Edit note content — overwrite, append, prepend, or patch by heading\",\n args: '{ \"path\": \"string\", \"content\": \"string\", \"mode\": \"overwrite|append|prepend|patch-by-heading\", \"heading?\": \"string\" }',\n examples: [\n '{ \"path\": \"guide.md\", \"content\": \"New content.\", \"mode\": \"overwrite\" }',\n '{ \"path\": \"guide.md\", \"content\": \"\\\\n## Appendix\\\\nExtra info.\", \"mode\": \"append\" }',\n '{ \"path\": \"guide.md\", \"content\": \"Updated architecture section.\", \"mode\": \"patch-by-heading\", \"heading\": \"Architecture\" }',\n ],\n },\n delete_note: {\n description: \"Delete a note permanently (requires confirm=true)\",\n args: '{ \"path\": \"string\", \"confirm\": true }',\n examples: [\n '{ \"path\": \"old-note.md\", \"confirm\": true }',\n '{ \"path\": \"old-note.md\", \"confirm\": false } // returns warning, does not delete',\n ],\n },\n move_note: {\n description: \"Move or rename a note — automatically updates wikilinks across the vault\",\n args: '{ \"from\": \"string\", \"to\": \"string\" }',\n examples: [\n '{ \"from\": \"user-service.md\", \"to\": \"auth-service.md\" }',\n '{ \"from\": \"old/note.md\", \"to\": \"new/location/note.md\" }',\n ],\n },\n\n // Metadata\n get_frontmatter: {\n description: \"Read parsed YAML frontmatter from a note as JSON\",\n args: '{ \"path\": \"string\" }',\n examples: ['{ \"path\": \"project-overview.md\" }'],\n },\n update_frontmatter: {\n description: \"Set or delete YAML frontmatter keys — pass null to delete a key\",\n args: '{ \"path\": \"string\", \"fields\": {} }',\n examples: [\n '{ \"path\": \"note.md\", \"fields\": { \"status\": \"active\", \"priority\": 1 } }',\n '{ \"path\": \"note.md\", \"fields\": { \"deprecated_field\": null } } // deletes the key',\n ],\n },\n manage_tags: {\n description: \"Add, remove, or list tags on a note (frontmatter and inline)\",\n args: '{ \"path\": \"string\", \"action\": \"add|remove|list\", \"tags?\": [\"string\"] }',\n examples: [\n '{ \"path\": \"note.md\", \"action\": \"list\" }',\n '{ \"path\": \"note.md\", \"action\": \"add\", \"tags\": [\"important\", \"reviewed\"] }',\n '{ \"path\": \"note.md\", \"action\": \"remove\", \"tags\": [\"draft\"] }',\n ],\n },\n rename_tag: {\n description: \"Rename a tag across all notes in the vault (frontmatter + inline)\",\n args: '{ \"oldTag\": \"string\", \"newTag\": \"string\" }',\n examples: ['{ \"oldTag\": \"architecture\", \"newTag\": \"arch\" }'],\n },\n\n // Graph\n backlinks: {\n description: \"Find all notes that link TO a given note via [[wikilinks]]\",\n args: '{ \"path\": \"string\" }',\n examples: ['{ \"path\": \"microservices.md\" }'],\n },\n forwardlinks: {\n description: \"Find all notes linked FROM a given note\",\n args: '{ \"path\": \"string\" }',\n examples: ['{ \"path\": \"project-overview.md\" }'],\n },\n graph_path: {\n description: \"Find the shortest path between two notes in the knowledge graph\",\n args: '{ \"from\": \"string\", \"to\": \"string\" }',\n examples: ['{ \"from\": \"project-overview.md\", \"to\": \"user-service.md\" }'],\n },\n graph_statistics: {\n description: \"Knowledge graph stats — most connected nodes, orphans, density\",\n args: \"{}\",\n examples: [\"{}\"],\n },\n\n // System\n get_stats: {\n description: \"Vault and index statistics — note count, chunks, embeddings, graph density\",\n args: \"{}\",\n examples: [\"{}\"],\n },\n reindex: {\n description: \"Force a full reindex of the vault\",\n args: \"{}\",\n examples: [\"{}\"],\n },\n};\n\nconst TOOL_CATEGORIES: Record<string, string[]> = {\n Search: [\"search_semantic\", \"search_text\", \"search_graph\", \"search_hybrid\"],\n Read: [\"read_note\", \"read_multiple_notes\", \"list_notes\"],\n Write: [\"create_note\", \"update_note\", \"delete_note\", \"move_note\"],\n Metadata: [\"get_frontmatter\", \"update_frontmatter\", \"manage_tags\", \"rename_tag\"],\n Graph: [\"backlinks\", \"forwardlinks\", \"graph_path\", \"graph_statistics\"],\n System: [\"get_stats\", \"reindex\"],\n};\n\nfunction printToolList() {\n console.log(\"\\nSemantic Pages — 21 MCP Tools\\n\");\n console.log(\"Usage: These tools are available via MCP when the server is running.\");\n console.log(\" Run `semantic-pages tools <name>` for details on a specific tool.\\n\");\n\n for (const [category, tools] of Object.entries(TOOL_CATEGORIES)) {\n console.log(` ${category}:`);\n for (const name of tools) {\n const tool = TOOL_HELP[name];\n console.log(` ${name.padEnd(24)} ${tool.description}`);\n }\n console.log();\n }\n\n console.log(\"Run `semantic-pages tools <tool-name>` for arguments and examples.\");\n}\n\nfunction printToolDetail(name: string) {\n const tool = TOOL_HELP[name];\n if (!tool) {\n console.error(`Unknown tool: ${name}`);\n console.error(`Run \\`semantic-pages tools\\` to see all available tools.`);\n process.exit(1);\n }\n\n console.log(`\\n ${name}`);\n console.log(` ${\"─\".repeat(name.length)}`);\n console.log(` ${tool.description}\\n`);\n console.log(` Arguments:`);\n console.log(` ${tool.args}\\n`);\n console.log(` Examples:`);\n for (const ex of tool.examples) {\n console.log(` ${ex}`);\n }\n console.log();\n}\n\nprogram\n .name(\"semantic-pages\")\n .description(\n \"Semantic search + knowledge graph MCP server for markdown files\\n\\n\" +\n \" Start MCP server: semantic-pages --notes ./vault\\n\" +\n \" Show vault stats: semantic-pages --notes ./vault --stats\\n\" +\n \" Force reindex: semantic-pages --notes ./vault --reindex\\n\" +\n \" List MCP tools: semantic-pages tools\\n\" +\n \" Tool details: semantic-pages tools search_semantic\"\n )\n .version(\"0.4.1\");\n\nprogram\n .command(\"tools [name]\")\n .description(\"List all MCP tools, or show details for a specific tool\")\n .action((name?: string) => {\n if (name) {\n printToolDetail(name);\n } else {\n printToolList();\n }\n process.exit(0);\n });\n\nprogram\n .command(\"serve\", { isDefault: true })\n .description(\"Start the MCP server (default command)\")\n .requiredOption(\"--notes <path>\", \"Path to markdown notes directory\")\n .option(\"--reindex\", \"Force full reindex and exit\")\n .option(\"--stats\", \"Show vault statistics and exit\")\n .option(\"--model <name>\", \"Embedding model to use\", \"nomic-ai/nomic-embed-text-v1.5\")\n .option(\"--workers <n>\", \"Number of worker threads for parallel embedding\", parseInt)\n .option(\"--batch-size <n>\", \"Texts per ONNX forward pass (default: 32)\", parseInt)\n .option(\"--no-watch\", \"Disable file watcher\")\n .action(async (opts) => {\n const notesPath = resolve(opts.notes);\n\n if (!existsSync(notesPath)) {\n console.error(`Error: notes directory not found: ${notesPath}`);\n process.exit(1);\n }\n\n if (opts.stats) {\n const { Indexer } = await import(\"../core/indexer.js\");\n const indexer = new Indexer(notesPath);\n const docs = await indexer.indexAll();\n console.log(`Notes: ${docs.length}`);\n console.log(`Chunks: ${docs.reduce((n: number, d: any) => n + d.chunks.length, 0)}`);\n console.log(`Wikilinks: ${docs.reduce((n: number, d: any) => n + d.wikilinks.length, 0)}`);\n console.log(`Tags: ${new Set(docs.flatMap((d: any) => d.tags)).size} unique`);\n process.exit(0);\n }\n\n if (opts.reindex) {\n const { createServer } = await import(\"../mcp/server.js\");\n await createServer(notesPath, {\n watch: false,\n waitForReady: true,\n model: opts.model,\n workers: opts.workers,\n batchSize: opts.batchSize,\n onProgress: (embedded, total) => {\n process.stderr.write(`\\rEmbedding ${embedded}/${total} chunks...`);\n },\n });\n process.stderr.write(\"\\n\");\n console.log(\"Reindex complete.\");\n process.exit(0);\n }\n\n // Default: start MCP server on stdio\n const { startServer } = await import(\"../mcp/server.js\");\n await startServer(notesPath, { watch: opts.watch, model: opts.model, workers: opts.workers, batchSize: opts.batchSize });\n });\n\nprogram.parse();\n"],"mappings":";;;AAEA,SAAS,eAAe;AACxB,SAAS,eAAe;AACxB,SAAS,kBAAkB;AAE3B,IAAM,YAAuF;AAAA;AAAA,EAE3F,iBAAiB;AAAA,IACf,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,cAAc;AAAA,IACZ,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,eAAe;AAAA,IACb,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,WAAW;AAAA,IACT,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,qBAAqB;AAAA,IACnB,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EACA,YAAY;AAAA,IACV,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,IAAI;AAAA,EACjB;AAAA;AAAA,EAGA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,WAAW;AAAA,IACT,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,iBAAiB;AAAA,IACf,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,mCAAmC;AAAA,EAChD;AAAA,EACA,oBAAoB;AAAA,IAClB,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,YAAY;AAAA,IACV,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,gDAAgD;AAAA,EAC7D;AAAA;AAAA,EAGA,WAAW;AAAA,IACT,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,gCAAgC;AAAA,EAC7C;AAAA,EACA,cAAc;AAAA,IACZ,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,mCAAmC;AAAA,EAChD;AAAA,EACA,YAAY;AAAA,IACV,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,4DAA4D;AAAA,EACzE;AAAA,EACA,kBAAkB;AAAA,IAChB,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,IAAI;AAAA,EACjB;AAAA;AAAA,EAGA,WAAW;AAAA,IACT,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,IAAI;AAAA,EACjB;AAAA,EACA,SAAS;AAAA,IACP,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,IAAI;AAAA,EACjB;AACF;AAEA,IAAM,kBAA4C;AAAA,EAChD,QAAQ,CAAC,mBAAmB,eAAe,gBAAgB,eAAe;AAAA,EAC1E,MAAM,CAAC,aAAa,uBAAuB,YAAY;AAAA,EACvD,OAAO,CAAC,eAAe,eAAe,eAAe,WAAW;AAAA,EAChE,UAAU,CAAC,mBAAmB,sBAAsB,eAAe,YAAY;AAAA,EAC/E,OAAO,CAAC,aAAa,gBAAgB,cAAc,kBAAkB;AAAA,EACrE,QAAQ,CAAC,aAAa,SAAS;AACjC;AAEA,SAAS,gBAAgB;AACvB,UAAQ,IAAI,wCAAmC;AAC/C,UAAQ,IAAI,sEAAsE;AAClF,UAAQ,IAAI,4EAA4E;AAExF,aAAW,CAAC,UAAU,KAAK,KAAK,OAAO,QAAQ,eAAe,GAAG;AAC/D,YAAQ,IAAI,KAAK,QAAQ,GAAG;AAC5B,eAAW,QAAQ,OAAO;AACxB,YAAM,OAAO,UAAU,IAAI;AAC3B,cAAQ,IAAI,OAAO,KAAK,OAAO,EAAE,CAAC,IAAI,KAAK,WAAW,EAAE;AAAA,IAC1D;AACA,YAAQ,IAAI;AAAA,EACd;AAEA,UAAQ,IAAI,oEAAoE;AAClF;AAEA,SAAS,gBAAgB,MAAc;AACrC,QAAM,OAAO,UAAU,IAAI;AAC3B,MAAI,CAAC,MAAM;AACT,YAAQ,MAAM,iBAAiB,IAAI,EAAE;AACrC,YAAQ,MAAM,0DAA0D;AACxE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI;AAAA,IAAO,IAAI,EAAE;AACzB,UAAQ,IAAI,KAAK,SAAI,OAAO,KAAK,MAAM,CAAC,EAAE;AAC1C,UAAQ,IAAI,KAAK,KAAK,WAAW;AAAA,CAAI;AACrC,UAAQ,IAAI,cAAc;AAC1B,UAAQ,IAAI,OAAO,KAAK,IAAI;AAAA,CAAI;AAChC,UAAQ,IAAI,aAAa;AACzB,aAAW,MAAM,KAAK,UAAU;AAC9B,YAAQ,IAAI,OAAO,EAAE,EAAE;AAAA,EACzB;AACA,UAAQ,IAAI;AACd;AAEA,QACG,KAAK,gBAAgB,EACrB;AAAA,EACC;AAMF,EACC,QAAQ,OAAO;AAElB,QACG,QAAQ,cAAc,EACtB,YAAY,yDAAyD,EACrE,OAAO,CAAC,SAAkB;AACzB,MAAI,MAAM;AACR,oBAAgB,IAAI;AAAA,EACtB,OAAO;AACL,kBAAc;AAAA,EAChB;AACA,UAAQ,KAAK,CAAC;AAChB,CAAC;AAEH,QACG,QAAQ,SAAS,EAAE,WAAW,KAAK,CAAC,EACpC,YAAY,wCAAwC,EACpD,eAAe,kBAAkB,kCAAkC,EACnE,OAAO,aAAa,6BAA6B,EACjD,OAAO,WAAW,gCAAgC,EAClD,OAAO,kBAAkB,0BAA0B,gCAAgC,EACnF,OAAO,iBAAiB,mDAAmD,QAAQ,EACnF,OAAO,oBAAoB,6CAA6C,QAAQ,EAChF,OAAO,cAAc,sBAAsB,EAC3C,OAAO,OAAO,SAAS;AACtB,QAAM,YAAY,QAAQ,KAAK,KAAK;AAEpC,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,YAAQ,MAAM,qCAAqC,SAAS,EAAE;AAC9D,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI,KAAK,OAAO;AACd,UAAM,EAAE,QAAQ,IAAI,MAAM,OAAO,wBAAoB;AACrD,UAAM,UAAU,IAAI,QAAQ,SAAS;AACrC,UAAM,OAAO,MAAM,QAAQ,SAAS;AACpC,YAAQ,IAAI,UAAU,KAAK,MAAM,EAAE;AACnC,YAAQ,IAAI,WAAW,KAAK,OAAO,CAAC,GAAW,MAAW,IAAI,EAAE,OAAO,QAAQ,CAAC,CAAC,EAAE;AACnF,YAAQ,IAAI,cAAc,KAAK,OAAO,CAAC,GAAW,MAAW,IAAI,EAAE,UAAU,QAAQ,CAAC,CAAC,EAAE;AACzF,YAAQ,IAAI,SAAS,IAAI,IAAI,KAAK,QAAQ,CAAC,MAAW,EAAE,IAAI,CAAC,EAAE,IAAI,SAAS;AAC5E,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI,KAAK,SAAS;AAChB,UAAM,EAAE,aAAa,IAAI,MAAM,OAAO,kBAAkB;AACxD,UAAM,aAAa,WAAW;AAAA,MAC5B,OAAO;AAAA,MACP,cAAc;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,SAAS,KAAK;AAAA,MACd,WAAW,KAAK;AAAA,MAChB,YAAY,CAAC,UAAU,UAAU;AAC/B,gBAAQ,OAAO,MAAM,eAAe,QAAQ,IAAI,KAAK,YAAY;AAAA,MACnE;AAAA,IACF,CAAC;AACD,YAAQ,OAAO,MAAM,IAAI;AACzB,YAAQ,IAAI,mBAAmB;AAC/B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,QAAM,EAAE,YAAY,IAAI,MAAM,OAAO,kBAAkB;AACvD,QAAM,YAAY,WAAW,EAAE,OAAO,KAAK,OAAO,OAAO,KAAK,OAAO,SAAS,KAAK,SAAS,WAAW,KAAK,UAAU,CAAC;AACzH,CAAC;AAEH,QAAQ,MAAM;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/cli/index.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport { program } from \"commander\";\nimport { resolve } from \"node:path\";\nimport { existsSync } from \"node:fs\";\n\nconst TOOL_HELP: Record<string, { description: string; args: string; examples: string[] }> = {\n // Search\n search_semantic: {\n description: \"Vector similarity search — find notes by meaning, not just keywords\",\n args: '{ \"query\": \"string\", \"limit?\": 10 }',\n examples: [\n '{ \"query\": \"microservices architecture\", \"limit\": 5 }',\n '{ \"query\": \"how to deploy to production\" }',\n ],\n },\n search_text: {\n description: \"Full-text keyword or regex search with optional filters\",\n args: '{ \"pattern\": \"string\", \"regex?\": false, \"caseSensitive?\": false, \"pathGlob?\": \"string\", \"tagFilter?\": [\"string\"], \"limit?\": 20 }',\n examples: [\n '{ \"pattern\": \"RabbitMQ\" }',\n '{ \"pattern\": \"OAuth\\\\\\\\d\", \"regex\": true }',\n '{ \"pattern\": \"deploy\", \"pathGlob\": \"devops/**\", \"tagFilter\": [\"kubernetes\"] }',\n ],\n },\n search_graph: {\n description: \"Graph traversal — find notes connected to a concept via wikilinks and tags\",\n args: '{ \"concept\": \"string\", \"maxDepth?\": 2 }',\n examples: [\n '{ \"concept\": \"microservices\" }',\n '{ \"concept\": \"auth\", \"maxDepth\": 3 }',\n ],\n },\n search_hybrid: {\n description: \"Combined semantic + graph search — vector results re-ranked by graph proximity\",\n args: '{ \"query\": \"string\", \"limit?\": 10 }',\n examples: [\n '{ \"query\": \"event driven architecture\", \"limit\": 5 }',\n ],\n },\n\n // Read\n read_note: {\n description: \"Read the full content of a specific note by path\",\n args: '{ \"path\": \"string\" }',\n examples: [\n '{ \"path\": \"project-overview.md\" }',\n '{ \"path\": \"notes/meeting-2024-01-15.md\" }',\n ],\n },\n read_multiple_notes: {\n description: \"Batch read multiple notes in one call\",\n args: '{ \"paths\": [\"string\"] }',\n examples: [\n '{ \"paths\": [\"overview.md\", \"architecture.md\", \"deployment.md\"] }',\n ],\n },\n list_notes: {\n description: \"List all indexed notes with metadata (title, tags, link count)\",\n args: \"{}\",\n examples: [\"{}\"],\n },\n\n // Write\n create_note: {\n description: \"Create a new markdown note with optional YAML frontmatter\",\n args: '{ \"path\": \"string\", \"content\": \"string\", \"frontmatter?\": {} }',\n examples: [\n '{ \"path\": \"new-guide.md\", \"content\": \"# Guide\\\\n\\\\nContent here.\" }',\n '{ \"path\": \"tagged.md\", \"content\": \"Content.\", \"frontmatter\": { \"title\": \"Tagged Note\", \"tags\": [\"test\"] } }',\n ],\n },\n update_note: {\n description: \"Edit note content — overwrite, append, prepend, or patch by heading\",\n args: '{ \"path\": \"string\", \"content\": \"string\", \"mode\": \"overwrite|append|prepend|patch-by-heading\", \"heading?\": \"string\" }',\n examples: [\n '{ \"path\": \"guide.md\", \"content\": \"New content.\", \"mode\": \"overwrite\" }',\n '{ \"path\": \"guide.md\", \"content\": \"\\\\n## Appendix\\\\nExtra info.\", \"mode\": \"append\" }',\n '{ \"path\": \"guide.md\", \"content\": \"Updated architecture section.\", \"mode\": \"patch-by-heading\", \"heading\": \"Architecture\" }',\n ],\n },\n delete_note: {\n description: \"Delete a note permanently (requires confirm=true)\",\n args: '{ \"path\": \"string\", \"confirm\": true }',\n examples: [\n '{ \"path\": \"old-note.md\", \"confirm\": true }',\n '{ \"path\": \"old-note.md\", \"confirm\": false } // returns warning, does not delete',\n ],\n },\n move_note: {\n description: \"Move or rename a note — automatically updates wikilinks across the vault\",\n args: '{ \"from\": \"string\", \"to\": \"string\" }',\n examples: [\n '{ \"from\": \"user-service.md\", \"to\": \"auth-service.md\" }',\n '{ \"from\": \"old/note.md\", \"to\": \"new/location/note.md\" }',\n ],\n },\n\n // Metadata\n get_frontmatter: {\n description: \"Read parsed YAML frontmatter from a note as JSON\",\n args: '{ \"path\": \"string\" }',\n examples: ['{ \"path\": \"project-overview.md\" }'],\n },\n update_frontmatter: {\n description: \"Set or delete YAML frontmatter keys — pass null to delete a key\",\n args: '{ \"path\": \"string\", \"fields\": {} }',\n examples: [\n '{ \"path\": \"note.md\", \"fields\": { \"status\": \"active\", \"priority\": 1 } }',\n '{ \"path\": \"note.md\", \"fields\": { \"deprecated_field\": null } } // deletes the key',\n ],\n },\n manage_tags: {\n description: \"Add, remove, or list tags on a note (frontmatter and inline)\",\n args: '{ \"path\": \"string\", \"action\": \"add|remove|list\", \"tags?\": [\"string\"] }',\n examples: [\n '{ \"path\": \"note.md\", \"action\": \"list\" }',\n '{ \"path\": \"note.md\", \"action\": \"add\", \"tags\": [\"important\", \"reviewed\"] }',\n '{ \"path\": \"note.md\", \"action\": \"remove\", \"tags\": [\"draft\"] }',\n ],\n },\n rename_tag: {\n description: \"Rename a tag across all notes in the vault (frontmatter + inline)\",\n args: '{ \"oldTag\": \"string\", \"newTag\": \"string\" }',\n examples: ['{ \"oldTag\": \"architecture\", \"newTag\": \"arch\" }'],\n },\n\n // Graph\n backlinks: {\n description: \"Find all notes that link TO a given note via [[wikilinks]]\",\n args: '{ \"path\": \"string\" }',\n examples: ['{ \"path\": \"microservices.md\" }'],\n },\n forwardlinks: {\n description: \"Find all notes linked FROM a given note\",\n args: '{ \"path\": \"string\" }',\n examples: ['{ \"path\": \"project-overview.md\" }'],\n },\n graph_path: {\n description: \"Find the shortest path between two notes in the knowledge graph\",\n args: '{ \"from\": \"string\", \"to\": \"string\" }',\n examples: ['{ \"from\": \"project-overview.md\", \"to\": \"user-service.md\" }'],\n },\n graph_statistics: {\n description: \"Knowledge graph stats — most connected nodes, orphans, density\",\n args: \"{}\",\n examples: [\"{}\"],\n },\n\n // System\n get_stats: {\n description: \"Vault and index statistics — note count, chunks, embeddings, graph density\",\n args: \"{}\",\n examples: [\"{}\"],\n },\n reindex: {\n description: \"Force a full reindex of the vault\",\n args: \"{}\",\n examples: [\"{}\"],\n },\n};\n\nconst TOOL_CATEGORIES: Record<string, string[]> = {\n Search: [\"search_semantic\", \"search_text\", \"search_graph\", \"search_hybrid\"],\n Read: [\"read_note\", \"read_multiple_notes\", \"list_notes\"],\n Write: [\"create_note\", \"update_note\", \"delete_note\", \"move_note\"],\n Metadata: [\"get_frontmatter\", \"update_frontmatter\", \"manage_tags\", \"rename_tag\"],\n Graph: [\"backlinks\", \"forwardlinks\", \"graph_path\", \"graph_statistics\"],\n System: [\"get_stats\", \"reindex\"],\n};\n\nfunction printToolList() {\n console.log(\"\\nSemantic Pages — 21 MCP Tools\\n\");\n console.log(\"Usage: These tools are available via MCP when the server is running.\");\n console.log(\" Run `semantic-pages tools <name>` for details on a specific tool.\\n\");\n\n for (const [category, tools] of Object.entries(TOOL_CATEGORIES)) {\n console.log(` ${category}:`);\n for (const name of tools) {\n const tool = TOOL_HELP[name];\n console.log(` ${name.padEnd(24)} ${tool.description}`);\n }\n console.log();\n }\n\n console.log(\"Run `semantic-pages tools <tool-name>` for arguments and examples.\");\n}\n\nfunction printToolDetail(name: string) {\n const tool = TOOL_HELP[name];\n if (!tool) {\n console.error(`Unknown tool: ${name}`);\n console.error(`Run \\`semantic-pages tools\\` to see all available tools.`);\n process.exit(1);\n }\n\n console.log(`\\n ${name}`);\n console.log(` ${\"─\".repeat(name.length)}`);\n console.log(` ${tool.description}\\n`);\n console.log(` Arguments:`);\n console.log(` ${tool.args}\\n`);\n console.log(` Examples:`);\n for (const ex of tool.examples) {\n console.log(` ${ex}`);\n }\n console.log();\n}\n\nprogram\n .name(\"semantic-pages\")\n .description(\n \"Semantic search + knowledge graph MCP server for markdown files\\n\\n\" +\n \" Start MCP server: semantic-pages --notes ./vault\\n\" +\n \" Show vault stats: semantic-pages --notes ./vault --stats\\n\" +\n \" Force reindex: semantic-pages --notes ./vault --reindex\\n\" +\n \" List MCP tools: semantic-pages tools\\n\" +\n \" Tool details: semantic-pages tools search_semantic\"\n )\n .version(\"0.4.2\");\n\nprogram\n .command(\"tools [name]\")\n .description(\"List all MCP tools, or show details for a specific tool\")\n .action((name?: string) => {\n if (name) {\n printToolDetail(name);\n } else {\n printToolList();\n }\n process.exit(0);\n });\n\nprogram\n .command(\"serve\", { isDefault: true })\n .description(\"Start the MCP server (default command)\")\n .requiredOption(\"--notes <path>\", \"Path to markdown notes directory\")\n .option(\"--reindex\", \"Force full reindex and exit\")\n .option(\"--stats\", \"Show vault statistics and exit\")\n .option(\"--model <name>\", \"Embedding model to use\", \"nomic-ai/nomic-embed-text-v1.5\")\n .option(\"--workers <n>\", \"Number of worker threads for parallel embedding\", parseInt)\n .option(\"--batch-size <n>\", \"Texts per ONNX forward pass (default: 8)\", parseInt)\n .option(\"--no-quantized\", \"Use full-precision model instead of quantized (slower, slightly higher quality)\")\n .option(\"--no-watch\", \"Disable file watcher\")\n .action(async (opts) => {\n const notesPath = resolve(opts.notes);\n\n if (!existsSync(notesPath)) {\n console.error(`Error: notes directory not found: ${notesPath}`);\n process.exit(1);\n }\n\n if (opts.stats) {\n const { Indexer } = await import(\"../core/indexer.js\");\n const indexer = new Indexer(notesPath);\n const docs = await indexer.indexAll();\n console.log(`Notes: ${docs.length}`);\n console.log(`Chunks: ${docs.reduce((n: number, d: any) => n + d.chunks.length, 0)}`);\n console.log(`Wikilinks: ${docs.reduce((n: number, d: any) => n + d.wikilinks.length, 0)}`);\n console.log(`Tags: ${new Set(docs.flatMap((d: any) => d.tags)).size} unique`);\n process.exit(0);\n }\n\n if (opts.reindex) {\n const { createServer } = await import(\"../mcp/server.js\");\n await createServer(notesPath, {\n watch: false,\n waitForReady: true,\n model: opts.model,\n workers: opts.workers,\n batchSize: opts.batchSize,\n quantized: opts.quantized,\n onProgress: (embedded, total) => {\n process.stderr.write(`\\rEmbedding ${embedded}/${total} chunks...`);\n },\n });\n process.stderr.write(\"\\n\");\n console.log(\"Reindex complete.\");\n process.exit(0);\n }\n\n // Default: start MCP server on stdio\n const { startServer } = await import(\"../mcp/server.js\");\n await startServer(notesPath, { watch: opts.watch, model: opts.model, workers: opts.workers, batchSize: opts.batchSize, quantized: opts.quantized });\n });\n\nprogram.parse();\n"],"mappings":";;;AAEA,SAAS,eAAe;AACxB,SAAS,eAAe;AACxB,SAAS,kBAAkB;AAE3B,IAAM,YAAuF;AAAA;AAAA,EAE3F,iBAAiB;AAAA,IACf,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,cAAc;AAAA,IACZ,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,eAAe;AAAA,IACb,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,WAAW;AAAA,IACT,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,qBAAqB;AAAA,IACnB,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EACA,YAAY;AAAA,IACV,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,IAAI;AAAA,EACjB;AAAA;AAAA,EAGA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,WAAW;AAAA,IACT,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,iBAAiB;AAAA,IACf,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,mCAAmC;AAAA,EAChD;AAAA,EACA,oBAAoB;AAAA,IAClB,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,YAAY;AAAA,IACV,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,gDAAgD;AAAA,EAC7D;AAAA;AAAA,EAGA,WAAW;AAAA,IACT,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,gCAAgC;AAAA,EAC7C;AAAA,EACA,cAAc;AAAA,IACZ,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,mCAAmC;AAAA,EAChD;AAAA,EACA,YAAY;AAAA,IACV,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,4DAA4D;AAAA,EACzE;AAAA,EACA,kBAAkB;AAAA,IAChB,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,IAAI;AAAA,EACjB;AAAA;AAAA,EAGA,WAAW;AAAA,IACT,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,IAAI;AAAA,EACjB;AAAA,EACA,SAAS;AAAA,IACP,aAAa;AAAA,IACb,MAAM;AAAA,IACN,UAAU,CAAC,IAAI;AAAA,EACjB;AACF;AAEA,IAAM,kBAA4C;AAAA,EAChD,QAAQ,CAAC,mBAAmB,eAAe,gBAAgB,eAAe;AAAA,EAC1E,MAAM,CAAC,aAAa,uBAAuB,YAAY;AAAA,EACvD,OAAO,CAAC,eAAe,eAAe,eAAe,WAAW;AAAA,EAChE,UAAU,CAAC,mBAAmB,sBAAsB,eAAe,YAAY;AAAA,EAC/E,OAAO,CAAC,aAAa,gBAAgB,cAAc,kBAAkB;AAAA,EACrE,QAAQ,CAAC,aAAa,SAAS;AACjC;AAEA,SAAS,gBAAgB;AACvB,UAAQ,IAAI,wCAAmC;AAC/C,UAAQ,IAAI,sEAAsE;AAClF,UAAQ,IAAI,4EAA4E;AAExF,aAAW,CAAC,UAAU,KAAK,KAAK,OAAO,QAAQ,eAAe,GAAG;AAC/D,YAAQ,IAAI,KAAK,QAAQ,GAAG;AAC5B,eAAW,QAAQ,OAAO;AACxB,YAAM,OAAO,UAAU,IAAI;AAC3B,cAAQ,IAAI,OAAO,KAAK,OAAO,EAAE,CAAC,IAAI,KAAK,WAAW,EAAE;AAAA,IAC1D;AACA,YAAQ,IAAI;AAAA,EACd;AAEA,UAAQ,IAAI,oEAAoE;AAClF;AAEA,SAAS,gBAAgB,MAAc;AACrC,QAAM,OAAO,UAAU,IAAI;AAC3B,MAAI,CAAC,MAAM;AACT,YAAQ,MAAM,iBAAiB,IAAI,EAAE;AACrC,YAAQ,MAAM,0DAA0D;AACxE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI;AAAA,IAAO,IAAI,EAAE;AACzB,UAAQ,IAAI,KAAK,SAAI,OAAO,KAAK,MAAM,CAAC,EAAE;AAC1C,UAAQ,IAAI,KAAK,KAAK,WAAW;AAAA,CAAI;AACrC,UAAQ,IAAI,cAAc;AAC1B,UAAQ,IAAI,OAAO,KAAK,IAAI;AAAA,CAAI;AAChC,UAAQ,IAAI,aAAa;AACzB,aAAW,MAAM,KAAK,UAAU;AAC9B,YAAQ,IAAI,OAAO,EAAE,EAAE;AAAA,EACzB;AACA,UAAQ,IAAI;AACd;AAEA,QACG,KAAK,gBAAgB,EACrB;AAAA,EACC;AAMF,EACC,QAAQ,OAAO;AAElB,QACG,QAAQ,cAAc,EACtB,YAAY,yDAAyD,EACrE,OAAO,CAAC,SAAkB;AACzB,MAAI,MAAM;AACR,oBAAgB,IAAI;AAAA,EACtB,OAAO;AACL,kBAAc;AAAA,EAChB;AACA,UAAQ,KAAK,CAAC;AAChB,CAAC;AAEH,QACG,QAAQ,SAAS,EAAE,WAAW,KAAK,CAAC,EACpC,YAAY,wCAAwC,EACpD,eAAe,kBAAkB,kCAAkC,EACnE,OAAO,aAAa,6BAA6B,EACjD,OAAO,WAAW,gCAAgC,EAClD,OAAO,kBAAkB,0BAA0B,gCAAgC,EACnF,OAAO,iBAAiB,mDAAmD,QAAQ,EACnF,OAAO,oBAAoB,4CAA4C,QAAQ,EAC/E,OAAO,kBAAkB,iFAAiF,EAC1G,OAAO,cAAc,sBAAsB,EAC3C,OAAO,OAAO,SAAS;AACtB,QAAM,YAAY,QAAQ,KAAK,KAAK;AAEpC,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,YAAQ,MAAM,qCAAqC,SAAS,EAAE;AAC9D,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI,KAAK,OAAO;AACd,UAAM,EAAE,QAAQ,IAAI,MAAM,OAAO,wBAAoB;AACrD,UAAM,UAAU,IAAI,QAAQ,SAAS;AACrC,UAAM,OAAO,MAAM,QAAQ,SAAS;AACpC,YAAQ,IAAI,UAAU,KAAK,MAAM,EAAE;AACnC,YAAQ,IAAI,WAAW,KAAK,OAAO,CAAC,GAAW,MAAW,IAAI,EAAE,OAAO,QAAQ,CAAC,CAAC,EAAE;AACnF,YAAQ,IAAI,cAAc,KAAK,OAAO,CAAC,GAAW,MAAW,IAAI,EAAE,UAAU,QAAQ,CAAC,CAAC,EAAE;AACzF,YAAQ,IAAI,SAAS,IAAI,IAAI,KAAK,QAAQ,CAAC,MAAW,EAAE,IAAI,CAAC,EAAE,IAAI,SAAS;AAC5E,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI,KAAK,SAAS;AAChB,UAAM,EAAE,aAAa,IAAI,MAAM,OAAO,kBAAkB;AACxD,UAAM,aAAa,WAAW;AAAA,MAC5B,OAAO;AAAA,MACP,cAAc;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,SAAS,KAAK;AAAA,MACd,WAAW,KAAK;AAAA,MAChB,WAAW,KAAK;AAAA,MAChB,YAAY,CAAC,UAAU,UAAU;AAC/B,gBAAQ,OAAO,MAAM,eAAe,QAAQ,IAAI,KAAK,YAAY;AAAA,MACnE;AAAA,IACF,CAAC;AACD,YAAQ,OAAO,MAAM,IAAI;AACzB,YAAQ,IAAI,mBAAmB;AAC/B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,QAAM,EAAE,YAAY,IAAI,MAAM,OAAO,kBAAkB;AACvD,QAAM,YAAY,WAAW,EAAE,OAAO,KAAK,OAAO,OAAO,KAAK,OAAO,SAAS,KAAK,SAAS,WAAW,KAAK,WAAW,WAAW,KAAK,UAAU,CAAC;AACpJ,CAAC;AAEH,QAAQ,MAAM;","names":[]}
|
package/dist/core/index.d.ts
CHANGED
|
@@ -97,8 +97,9 @@ declare class Embedder {
|
|
|
97
97
|
private initialized;
|
|
98
98
|
private numWorkers;
|
|
99
99
|
private batchSize;
|
|
100
|
+
private quantized;
|
|
100
101
|
private modelPath;
|
|
101
|
-
constructor(model?: string, numWorkers?: number, batchSize?: number);
|
|
102
|
+
constructor(model?: string, numWorkers?: number, batchSize?: number, quantized?: boolean);
|
|
102
103
|
init(): Promise<void>;
|
|
103
104
|
embed(text: string): Promise<Float32Array>;
|
|
104
105
|
private meanPoolAndNormalize;
|
package/dist/core/index.js
CHANGED
package/dist/mcp/server.d.ts
CHANGED
|
@@ -6,6 +6,7 @@ interface ServerOptions {
|
|
|
6
6
|
model?: string;
|
|
7
7
|
workers?: number;
|
|
8
8
|
batchSize?: number;
|
|
9
|
+
quantized?: boolean;
|
|
9
10
|
onProgress?: (embedded: number, total: number) => void;
|
|
10
11
|
}
|
|
11
12
|
declare function createServer(notesPath: string, options?: ServerOptions): Promise<McpServer>;
|
package/dist/mcp/server.js
CHANGED
|
@@ -7,7 +7,7 @@ import {
|
|
|
7
7
|
TextSearch,
|
|
8
8
|
VectorIndex,
|
|
9
9
|
Watcher
|
|
10
|
-
} from "../chunk-
|
|
10
|
+
} from "../chunk-UTXV7ZIQ.js";
|
|
11
11
|
import {
|
|
12
12
|
Indexer,
|
|
13
13
|
__export
|
|
@@ -4066,7 +4066,7 @@ async function createServer(notesPath, options = {}) {
|
|
|
4066
4066
|
const indexPath = join(notesPath, ".semantic-pages-index");
|
|
4067
4067
|
await mkdir(indexPath, { recursive: true });
|
|
4068
4068
|
const indexer = new Indexer(notesPath);
|
|
4069
|
-
const embedder = new Embedder(options.model, options.workers, options.batchSize);
|
|
4069
|
+
const embedder = new Embedder(options.model, options.workers, options.batchSize, options.quantized);
|
|
4070
4070
|
const graph = new GraphBuilder();
|
|
4071
4071
|
const textSearch = new TextSearch();
|
|
4072
4072
|
const crud = new NoteCrud(notesPath);
|