@softerist/heuristic-mcp 3.2.3 → 3.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/README.md +387 -376
  2. package/config.jsonc +800 -800
  3. package/features/ann-config.js +102 -110
  4. package/features/clear-cache.js +81 -84
  5. package/features/find-similar-code.js +265 -286
  6. package/features/hybrid-search.js +487 -536
  7. package/features/index-codebase.js +3139 -3270
  8. package/features/lifecycle.js +1011 -1063
  9. package/features/package-version.js +277 -291
  10. package/features/register.js +351 -370
  11. package/features/resources.js +115 -130
  12. package/features/set-workspace.js +214 -240
  13. package/index.js +693 -758
  14. package/lib/cache-ops.js +22 -22
  15. package/lib/cache-utils.js +465 -519
  16. package/lib/cache.js +1749 -1849
  17. package/lib/call-graph.js +396 -396
  18. package/lib/cli.js +232 -226
  19. package/lib/config.js +1483 -1495
  20. package/lib/constants.js +511 -493
  21. package/lib/embed-query-process.js +206 -212
  22. package/lib/embedding-process.js +434 -451
  23. package/lib/embedding-worker.js +862 -934
  24. package/lib/ignore-patterns.js +276 -316
  25. package/lib/json-worker.js +14 -14
  26. package/lib/json-writer.js +302 -310
  27. package/lib/logging.js +116 -127
  28. package/lib/memory-logger.js +13 -13
  29. package/lib/onnx-backend.js +188 -193
  30. package/lib/path-utils.js +18 -23
  31. package/lib/project-detector.js +82 -84
  32. package/lib/server-lifecycle.js +133 -145
  33. package/lib/settings-editor.js +738 -739
  34. package/lib/slice-normalize.js +25 -31
  35. package/lib/tokenizer.js +168 -203
  36. package/lib/utils.js +364 -409
  37. package/lib/vector-store-binary.js +973 -991
  38. package/lib/vector-store-sqlite.js +377 -414
  39. package/lib/workspace-env.js +32 -34
  40. package/mcp_config.json +9 -9
  41. package/package.json +86 -86
  42. package/scripts/clear-cache.js +20 -20
  43. package/scripts/download-model.js +43 -43
  44. package/scripts/mcp-launcher.js +49 -49
  45. package/scripts/postinstall.js +12 -12
  46. package/search-configs.js +36 -36
@@ -1,414 +1,377 @@
1
-
2
-
3
- import Database from 'better-sqlite3';
4
- import path from 'path';
5
- import fs from 'fs/promises';
6
- import {
7
- SQLITE_FILE_RETRY_DELAY_MS,
8
- SQLITE_FILE_RETRY_COUNT,
9
- SQLITE_STORE_VERSION as STORE_VERSION,
10
- } from './constants.js';
11
-
12
- const SQLITE_FILE = 'vectors.sqlite';
13
-
14
- async function retryUnlink(targetPath, retries = SQLITE_FILE_RETRY_COUNT) {
15
- for (let attempt = 0; attempt <= retries; attempt += 1) {
16
- try {
17
- await fs.unlink(targetPath);
18
- return;
19
- } catch (error) {
20
- if (error?.code !== 'ENOENT' && error?.code !== 'EBUSY') {
21
- throw error;
22
- }
23
- if (attempt === retries) {
24
- if (error?.code !== 'ENOENT') {
25
- throw error;
26
- }
27
- return;
28
- }
29
- if (error?.code === 'EBUSY') {
30
- await new Promise((resolve) => setTimeout(resolve, SQLITE_FILE_RETRY_DELAY_MS));
31
- }
32
- }
33
- }
34
- }
35
-
36
- async function bestEffortUnlink(targetPath) {
37
- try {
38
- await retryUnlink(targetPath);
39
- } catch {
40
-
41
- }
42
- }
43
-
44
- async function retryRename(fromPath, toPath, retries = SQLITE_FILE_RETRY_COUNT) {
45
- for (let attempt = 0; attempt <= retries; attempt += 1) {
46
- try {
47
- await fs.rename(fromPath, toPath);
48
- return;
49
- } catch (error) {
50
- if (error?.code !== 'EBUSY') {
51
- throw error;
52
- }
53
- if (attempt === retries) {
54
- throw error;
55
- }
56
- await new Promise((resolve) => setTimeout(resolve, SQLITE_FILE_RETRY_DELAY_MS));
57
- }
58
- }
59
- }
60
-
61
-
62
- export class SqliteVectorStore {
63
- constructor({ db, dim, count }) {
64
- this.db = db;
65
- this.dim = dim;
66
- this.count = count;
67
-
68
-
69
- this._stmtGetChunk = db.prepare(`
70
- SELECT file, startLine, endLine, content, vector FROM chunks WHERE id = ?
71
- `);
72
- this._stmtGetVector = db.prepare(`SELECT vector FROM chunks WHERE id = ?`);
73
- this._stmtGetContent = db.prepare(`SELECT content FROM chunks WHERE id = ?`);
74
- this._stmtGetAllFiles = db.prepare(`SELECT DISTINCT file FROM chunks`);
75
- this._stmtGetChunksForFile = db.prepare(`SELECT id FROM chunks WHERE file = ?`);
76
- }
77
-
78
-
79
- close() {
80
- if (this._closed) return;
81
- this._closed = true;
82
- try {
83
- if (this.db && this.db.open) {
84
- this.db.close();
85
- }
86
- } catch {
87
-
88
- }
89
- }
90
-
91
-
92
- static getPath(cacheDir) {
93
- return path.join(cacheDir, SQLITE_FILE);
94
- }
95
-
96
-
97
- static async load(cacheDir, _options = {}) {
98
- const dbPath = SqliteVectorStore.getPath(cacheDir);
99
-
100
- try {
101
- await fs.access(dbPath);
102
- } catch {
103
- return null;
104
- }
105
-
106
- let db;
107
- try {
108
- db = new Database(dbPath, { readonly: true });
109
- } catch (err) {
110
-
111
- console.warn(`[SQLite] Failed to open database: ${err.message}`);
112
- return null;
113
- }
114
-
115
-
116
- let meta;
117
- try {
118
- meta = db.prepare(`SELECT key, value FROM metadata`).all();
119
- } catch (err) {
120
-
121
- console.warn(`[SQLite] Failed to read metadata: ${err.message}`);
122
- db.close();
123
- return null;
124
- }
125
- const metaMap = new Map(meta.map((r) => [r.key, r.value]));
126
-
127
- const version = parseInt(metaMap.get('version') || '0', 10);
128
- if (version !== STORE_VERSION) {
129
- db.close();
130
- return null;
131
- }
132
-
133
- const dim = parseInt(metaMap.get('dim') || '0', 10);
134
- const count = parseInt(metaMap.get('count') || '0', 10);
135
-
136
- return new SqliteVectorStore({ db, dim, count });
137
- }
138
-
139
-
140
- length() {
141
- return this.count;
142
- }
143
-
144
-
145
- getRecord(index) {
146
- if (index < 0 || index >= this.count) return null;
147
-
148
- const row = this._stmtGetChunk.get(index);
149
- if (!row) return null;
150
-
151
- return {
152
- file: row.file,
153
- startLine: row.startLine,
154
- endLine: row.endLine,
155
- };
156
- }
157
-
158
-
159
- getVector(index) {
160
- if (index < 0 || index >= this.count) return null;
161
-
162
- const row = this._stmtGetVector.get(index);
163
- if (!row || !row.vector) return null;
164
-
165
-
166
- const expectedBytes = this.dim * Float32Array.BYTES_PER_ELEMENT;
167
- if (row.vector.byteLength < expectedBytes) return null;
168
-
169
- const view = new Float32Array(row.vector.buffer, row.vector.byteOffset, this.dim);
170
- return new Float32Array(view);
171
- }
172
-
173
-
174
- getContent(index) {
175
- if (index < 0 || index >= this.count) return null;
176
-
177
- const row = this._stmtGetContent.get(index);
178
- return row ? row.content : null;
179
- }
180
-
181
-
182
- toChunkViews({ includeContent = false, includeVector = true } = {}) {
183
- const views = [];
184
- const stmt = this.db.prepare(`
185
- SELECT id, file, startLine, endLine${includeContent ? ', content' : ''}${includeVector ? ', vector' : ''}
186
- FROM chunks ORDER BY id
187
- `);
188
-
189
- for (const row of stmt.iterate()) {
190
- const view = {
191
- index: row.id,
192
- file: row.file,
193
- startLine: row.startLine,
194
- endLine: row.endLine,
195
- _sqliteIndex: row.id,
196
- };
197
-
198
- if (includeContent) {
199
- view.content = row.content;
200
- }
201
-
202
- if (includeVector && row.vector) {
203
- const expectedBytes = this.dim * Float32Array.BYTES_PER_ELEMENT;
204
- if (row.vector.byteLength >= expectedBytes) {
205
-
206
- const bufferView = new Float32Array(
207
- row.vector.buffer,
208
- row.vector.byteOffset,
209
- this.dim
210
- );
211
- view.vector = new Float32Array(bufferView);
212
- }
213
- }
214
-
215
- views.push(view);
216
- }
217
-
218
- return views;
219
- }
220
-
221
-
222
- getAllFileIndices() {
223
- const fileIndices = new Map();
224
-
225
- const files = this._stmtGetAllFiles.all();
226
- for (const { file } of files) {
227
- const chunks = this._stmtGetChunksForFile.all(file);
228
- fileIndices.set(file, chunks.map((c) => c.id));
229
- }
230
-
231
- return fileIndices;
232
- }
233
-
234
-
235
- static async write(cacheDir, chunks, { getContent, getVector, preRename } = {}) {
236
- if (!chunks || chunks.length === 0) {
237
- return null;
238
- }
239
-
240
- await fs.mkdir(cacheDir, { recursive: true });
241
-
242
- const dbPath = SqliteVectorStore.getPath(cacheDir);
243
- const useTemp = process.platform !== 'win32';
244
- const tempSuffix = `${process.pid}-${Date.now()}-${Math.random().toString(16).slice(2)}`;
245
- const tempPath = `${dbPath}.tmp-${tempSuffix}`;
246
- const writePath = useTemp ? tempPath : dbPath;
247
-
248
- const denseChunks = [];
249
- const denseSourceIndices = [];
250
- for (let i = 0; i < chunks.length; i += 1) {
251
- const chunk = chunks[i];
252
- if (!chunk) continue;
253
- denseChunks.push(chunk);
254
- denseSourceIndices.push(i);
255
- }
256
-
257
- const resolveContent = async (chunk, sourceIndex) => {
258
- if (chunk.content !== undefined && chunk.content !== null) {
259
- return chunk.content;
260
- }
261
- if (typeof getContent === 'function') {
262
- const value = getContent(chunk, sourceIndex);
263
- if (value && typeof value.then === 'function') {
264
- return await value;
265
- }
266
- return value;
267
- }
268
- return null;
269
- };
270
-
271
- const resolveVector = async (chunk, sourceIndex) => {
272
- let vectorSource = chunk.vector;
273
- if (
274
- (vectorSource === undefined || vectorSource === null) &&
275
- typeof getVector === 'function'
276
- ) {
277
- vectorSource = getVector(chunk, sourceIndex);
278
- if (vectorSource && typeof vectorSource.then === 'function') {
279
- vectorSource = await vectorSource;
280
- }
281
- }
282
- if (vectorSource === undefined || vectorSource === null) {
283
- throw new Error(`Missing vector data for sqlite cache write at index ${sourceIndex}`);
284
- }
285
- const vector =
286
- vectorSource instanceof Float32Array
287
- ? vectorSource
288
- : ArrayBuffer.isView(vectorSource)
289
- ? Float32Array.from(vectorSource)
290
- : Float32Array.from(vectorSource);
291
- if (!vector || vector.length === 0) {
292
- throw new Error(`Empty vector data for sqlite cache write at index ${sourceIndex}`);
293
- }
294
- return vector;
295
- };
296
-
297
- const dim =
298
- denseChunks.length > 0
299
- ? (await resolveVector(denseChunks[0], denseSourceIndices[0])).length
300
- : 0;
301
-
302
-
303
- const db = new Database(writePath);
304
-
305
-
306
- db.pragma(`journal_mode = ${useTemp ? 'WAL' : 'DELETE'}`);
307
- db.pragma('synchronous = NORMAL');
308
-
309
-
310
- if (!useTemp) {
311
- db.exec(`
312
- DROP TABLE IF EXISTS metadata;
313
- DROP TABLE IF EXISTS chunks;
314
- `);
315
- }
316
-
317
- db.exec(`
318
- CREATE TABLE metadata (
319
- key TEXT PRIMARY KEY,
320
- value TEXT
321
- );
322
-
323
- CREATE TABLE chunks (
324
- id INTEGER PRIMARY KEY,
325
- file TEXT NOT NULL,
326
- startLine INTEGER NOT NULL,
327
- endLine INTEGER NOT NULL,
328
- content TEXT,
329
- vector BLOB
330
- );
331
-
332
- CREATE INDEX idx_chunks_file ON chunks(file);
333
- `);
334
-
335
-
336
- const insertMeta = db.prepare(`INSERT INTO metadata (key, value) VALUES (?, ?)`);
337
- insertMeta.run('version', String(STORE_VERSION));
338
- insertMeta.run('dim', String(dim));
339
- insertMeta.run('count', String(denseChunks.length));
340
- insertMeta.run('createdAt', new Date().toISOString());
341
-
342
-
343
- const insertChunk = db.prepare(`
344
- INSERT INTO chunks (id, file, startLine, endLine, content, vector)
345
- VALUES (?, ?, ?, ?, ?, ?)
346
- `);
347
-
348
- db.exec('BEGIN');
349
- try {
350
- for (let i = 0; i < denseChunks.length; i += 1) {
351
- const chunk = denseChunks[i];
352
- const sourceIndex = denseSourceIndices[i];
353
- const vector = await resolveVector(chunk, sourceIndex);
354
- if (vector.length !== dim) {
355
- throw new Error('Vector dimension mismatch in sqlite cache write');
356
- }
357
- let content = await resolveContent(chunk, sourceIndex);
358
- if (content === undefined) content = null;
359
- if (content !== null && typeof content !== 'string') {
360
- content = String(content);
361
- }
362
- insertChunk.run(
363
- i,
364
- chunk.file,
365
- chunk.startLine ?? 0,
366
- chunk.endLine ?? 0,
367
- content,
368
- Buffer.from(vector.buffer, vector.byteOffset, vector.byteLength)
369
- );
370
- }
371
- db.exec('COMMIT');
372
- } catch (error) {
373
- try {
374
- db.exec('ROLLBACK');
375
- } catch {
376
-
377
- }
378
- throw error;
379
- }
380
-
381
-
382
- db.exec('ANALYZE');
383
- db.close();
384
- if (process.platform === 'win32') {
385
-
386
- await new Promise((resolve) => setTimeout(resolve, 200));
387
- }
388
-
389
-
390
- if (typeof preRename === 'function') {
391
- await preRename();
392
- }
393
-
394
-
395
- if (useTemp) {
396
- await retryUnlink(dbPath);
397
- await retryRename(tempPath, dbPath);
398
- }
399
-
400
-
401
- if (useTemp) {
402
- await bestEffortUnlink(tempPath + '-wal');
403
- await bestEffortUnlink(tempPath + '-shm');
404
- } else {
405
- await bestEffortUnlink(dbPath + '-wal');
406
- await bestEffortUnlink(dbPath + '-shm');
407
- }
408
-
409
-
410
- return SqliteVectorStore.load(cacheDir);
411
- }
412
- }
413
-
414
- export default SqliteVectorStore;
1
+ import Database from 'better-sqlite3';
2
+ import path from 'path';
3
+ import fs from 'fs/promises';
4
+ import {
5
+ SQLITE_FILE_RETRY_DELAY_MS,
6
+ SQLITE_FILE_RETRY_COUNT,
7
+ SQLITE_STORE_VERSION as STORE_VERSION,
8
+ } from './constants.js';
9
+
10
+ const SQLITE_FILE = 'vectors.sqlite';
11
+
12
+ async function retryUnlink(targetPath, retries = SQLITE_FILE_RETRY_COUNT) {
13
+ for (let attempt = 0; attempt <= retries; attempt += 1) {
14
+ try {
15
+ await fs.unlink(targetPath);
16
+ return;
17
+ } catch (error) {
18
+ if (error?.code !== 'ENOENT' && error?.code !== 'EBUSY') {
19
+ throw error;
20
+ }
21
+ if (attempt === retries) {
22
+ if (error?.code !== 'ENOENT') {
23
+ throw error;
24
+ }
25
+ return;
26
+ }
27
+ if (error?.code === 'EBUSY') {
28
+ await new Promise((resolve) => setTimeout(resolve, SQLITE_FILE_RETRY_DELAY_MS));
29
+ }
30
+ }
31
+ }
32
+ }
33
+
34
+ async function bestEffortUnlink(targetPath) {
35
+ try {
36
+ await retryUnlink(targetPath);
37
+ } catch {}
38
+ }
39
+
40
+ async function retryRename(fromPath, toPath, retries = SQLITE_FILE_RETRY_COUNT) {
41
+ for (let attempt = 0; attempt <= retries; attempt += 1) {
42
+ try {
43
+ await fs.rename(fromPath, toPath);
44
+ return;
45
+ } catch (error) {
46
+ if (error?.code !== 'EBUSY') {
47
+ throw error;
48
+ }
49
+ if (attempt === retries) {
50
+ throw error;
51
+ }
52
+ await new Promise((resolve) => setTimeout(resolve, SQLITE_FILE_RETRY_DELAY_MS));
53
+ }
54
+ }
55
+ }
56
+
57
+ export class SqliteVectorStore {
58
+ constructor({ db, dim, count }) {
59
+ this.db = db;
60
+ this.dim = dim;
61
+ this.count = count;
62
+
63
+ this._stmtGetChunk = db.prepare(`
64
+ SELECT file, startLine, endLine, content, vector FROM chunks WHERE id = ?
65
+ `);
66
+ this._stmtGetVector = db.prepare(`SELECT vector FROM chunks WHERE id = ?`);
67
+ this._stmtGetContent = db.prepare(`SELECT content FROM chunks WHERE id = ?`);
68
+ this._stmtGetAllFiles = db.prepare(`SELECT DISTINCT file FROM chunks`);
69
+ this._stmtGetChunksForFile = db.prepare(`SELECT id FROM chunks WHERE file = ?`);
70
+ }
71
+
72
+ close() {
73
+ if (this._closed) return;
74
+ this._closed = true;
75
+ try {
76
+ if (this.db && this.db.open) {
77
+ this.db.close();
78
+ }
79
+ } catch {}
80
+ }
81
+
82
+ static getPath(cacheDir) {
83
+ return path.join(cacheDir, SQLITE_FILE);
84
+ }
85
+
86
+ static async load(cacheDir, _options = {}) {
87
+ const dbPath = SqliteVectorStore.getPath(cacheDir);
88
+
89
+ try {
90
+ await fs.access(dbPath);
91
+ } catch {
92
+ return null;
93
+ }
94
+
95
+ let db;
96
+ try {
97
+ db = new Database(dbPath, { readonly: true });
98
+ } catch (err) {
99
+ console.warn(`[SQLite] Failed to open database: ${err.message}`);
100
+ return null;
101
+ }
102
+
103
+ let meta;
104
+ try {
105
+ meta = db.prepare(`SELECT key, value FROM metadata`).all();
106
+ } catch (err) {
107
+ console.warn(`[SQLite] Failed to read metadata: ${err.message}`);
108
+ db.close();
109
+ return null;
110
+ }
111
+ const metaMap = new Map(meta.map((r) => [r.key, r.value]));
112
+
113
+ const version = parseInt(metaMap.get('version') || '0', 10);
114
+ if (version !== STORE_VERSION) {
115
+ db.close();
116
+ return null;
117
+ }
118
+
119
+ const dim = parseInt(metaMap.get('dim') || '0', 10);
120
+ const count = parseInt(metaMap.get('count') || '0', 10);
121
+
122
+ return new SqliteVectorStore({ db, dim, count });
123
+ }
124
+
125
+ length() {
126
+ return this.count;
127
+ }
128
+
129
+ getRecord(index) {
130
+ if (index < 0 || index >= this.count) return null;
131
+
132
+ const row = this._stmtGetChunk.get(index);
133
+ if (!row) return null;
134
+
135
+ return {
136
+ file: row.file,
137
+ startLine: row.startLine,
138
+ endLine: row.endLine,
139
+ };
140
+ }
141
+
142
+ getVector(index) {
143
+ if (index < 0 || index >= this.count) return null;
144
+
145
+ const row = this._stmtGetVector.get(index);
146
+ if (!row || !row.vector) return null;
147
+
148
+ const expectedBytes = this.dim * Float32Array.BYTES_PER_ELEMENT;
149
+ if (row.vector.byteLength < expectedBytes) return null;
150
+
151
+ const view = new Float32Array(row.vector.buffer, row.vector.byteOffset, this.dim);
152
+ return new Float32Array(view);
153
+ }
154
+
155
+ getContent(index) {
156
+ if (index < 0 || index >= this.count) return null;
157
+
158
+ const row = this._stmtGetContent.get(index);
159
+ return row ? row.content : null;
160
+ }
161
+
162
+ toChunkViews({ includeContent = false, includeVector = true } = {}) {
163
+ const views = [];
164
+ const stmt = this.db.prepare(`
165
+ SELECT id, file, startLine, endLine${includeContent ? ', content' : ''}${includeVector ? ', vector' : ''}
166
+ FROM chunks ORDER BY id
167
+ `);
168
+
169
+ for (const row of stmt.iterate()) {
170
+ const view = {
171
+ index: row.id,
172
+ file: row.file,
173
+ startLine: row.startLine,
174
+ endLine: row.endLine,
175
+ _sqliteIndex: row.id,
176
+ };
177
+
178
+ if (includeContent) {
179
+ view.content = row.content;
180
+ }
181
+
182
+ if (includeVector && row.vector) {
183
+ const expectedBytes = this.dim * Float32Array.BYTES_PER_ELEMENT;
184
+ if (row.vector.byteLength >= expectedBytes) {
185
+ const bufferView = new Float32Array(row.vector.buffer, row.vector.byteOffset, this.dim);
186
+ view.vector = new Float32Array(bufferView);
187
+ }
188
+ }
189
+
190
+ views.push(view);
191
+ }
192
+
193
+ return views;
194
+ }
195
+
196
+ getAllFileIndices() {
197
+ const fileIndices = new Map();
198
+
199
+ const files = this._stmtGetAllFiles.all();
200
+ for (const { file } of files) {
201
+ const chunks = this._stmtGetChunksForFile.all(file);
202
+ fileIndices.set(
203
+ file,
204
+ chunks.map((c) => c.id)
205
+ );
206
+ }
207
+
208
+ return fileIndices;
209
+ }
210
+
211
+ static async write(cacheDir, chunks, { getContent, getVector, preRename } = {}) {
212
+ if (!chunks || chunks.length === 0) {
213
+ return null;
214
+ }
215
+
216
+ await fs.mkdir(cacheDir, { recursive: true });
217
+
218
+ const dbPath = SqliteVectorStore.getPath(cacheDir);
219
+ const useTemp = process.platform !== 'win32';
220
+ const tempSuffix = `${process.pid}-${Date.now()}-${Math.random().toString(16).slice(2)}`;
221
+ const tempPath = `${dbPath}.tmp-${tempSuffix}`;
222
+ const writePath = useTemp ? tempPath : dbPath;
223
+
224
+ const denseChunks = [];
225
+ const denseSourceIndices = [];
226
+ for (let i = 0; i < chunks.length; i += 1) {
227
+ const chunk = chunks[i];
228
+ if (!chunk) continue;
229
+ denseChunks.push(chunk);
230
+ denseSourceIndices.push(i);
231
+ }
232
+
233
+ const resolveContent = async (chunk, sourceIndex) => {
234
+ if (chunk.content !== undefined && chunk.content !== null) {
235
+ return chunk.content;
236
+ }
237
+ if (typeof getContent === 'function') {
238
+ const value = getContent(chunk, sourceIndex);
239
+ if (value && typeof value.then === 'function') {
240
+ return await value;
241
+ }
242
+ return value;
243
+ }
244
+ return null;
245
+ };
246
+
247
+ const resolveVector = async (chunk, sourceIndex) => {
248
+ let vectorSource = chunk.vector;
249
+ if (
250
+ (vectorSource === undefined || vectorSource === null) &&
251
+ typeof getVector === 'function'
252
+ ) {
253
+ vectorSource = getVector(chunk, sourceIndex);
254
+ if (vectorSource && typeof vectorSource.then === 'function') {
255
+ vectorSource = await vectorSource;
256
+ }
257
+ }
258
+ if (vectorSource === undefined || vectorSource === null) {
259
+ throw new Error(`Missing vector data for sqlite cache write at index ${sourceIndex}`);
260
+ }
261
+ const vector =
262
+ vectorSource instanceof Float32Array
263
+ ? vectorSource
264
+ : ArrayBuffer.isView(vectorSource)
265
+ ? Float32Array.from(vectorSource)
266
+ : Float32Array.from(vectorSource);
267
+ if (!vector || vector.length === 0) {
268
+ throw new Error(`Empty vector data for sqlite cache write at index ${sourceIndex}`);
269
+ }
270
+ return vector;
271
+ };
272
+
273
+ const dim =
274
+ denseChunks.length > 0
275
+ ? (await resolveVector(denseChunks[0], denseSourceIndices[0])).length
276
+ : 0;
277
+
278
+ const db = new Database(writePath);
279
+
280
+ db.pragma(`journal_mode = ${useTemp ? 'WAL' : 'DELETE'}`);
281
+ db.pragma('synchronous = NORMAL');
282
+
283
+ if (!useTemp) {
284
+ db.exec(`
285
+ DROP TABLE IF EXISTS metadata;
286
+ DROP TABLE IF EXISTS chunks;
287
+ `);
288
+ }
289
+
290
+ db.exec(`
291
+ CREATE TABLE metadata (
292
+ key TEXT PRIMARY KEY,
293
+ value TEXT
294
+ );
295
+
296
+ CREATE TABLE chunks (
297
+ id INTEGER PRIMARY KEY,
298
+ file TEXT NOT NULL,
299
+ startLine INTEGER NOT NULL,
300
+ endLine INTEGER NOT NULL,
301
+ content TEXT,
302
+ vector BLOB
303
+ );
304
+
305
+ CREATE INDEX idx_chunks_file ON chunks(file);
306
+ `);
307
+
308
+ const insertMeta = db.prepare(`INSERT INTO metadata (key, value) VALUES (?, ?)`);
309
+ insertMeta.run('version', String(STORE_VERSION));
310
+ insertMeta.run('dim', String(dim));
311
+ insertMeta.run('count', String(denseChunks.length));
312
+ insertMeta.run('createdAt', new Date().toISOString());
313
+
314
+ const insertChunk = db.prepare(`
315
+ INSERT INTO chunks (id, file, startLine, endLine, content, vector)
316
+ VALUES (?, ?, ?, ?, ?, ?)
317
+ `);
318
+
319
+ db.exec('BEGIN');
320
+ try {
321
+ for (let i = 0; i < denseChunks.length; i += 1) {
322
+ const chunk = denseChunks[i];
323
+ const sourceIndex = denseSourceIndices[i];
324
+ const vector = await resolveVector(chunk, sourceIndex);
325
+ if (vector.length !== dim) {
326
+ throw new Error('Vector dimension mismatch in sqlite cache write');
327
+ }
328
+ let content = await resolveContent(chunk, sourceIndex);
329
+ if (content === undefined) content = null;
330
+ if (content !== null && typeof content !== 'string') {
331
+ content = String(content);
332
+ }
333
+ insertChunk.run(
334
+ i,
335
+ chunk.file,
336
+ chunk.startLine ?? 0,
337
+ chunk.endLine ?? 0,
338
+ content,
339
+ Buffer.from(vector.buffer, vector.byteOffset, vector.byteLength)
340
+ );
341
+ }
342
+ db.exec('COMMIT');
343
+ } catch (error) {
344
+ try {
345
+ db.exec('ROLLBACK');
346
+ } catch {}
347
+ throw error;
348
+ }
349
+
350
+ db.exec('ANALYZE');
351
+ db.close();
352
+ if (process.platform === 'win32') {
353
+ await new Promise((resolve) => setTimeout(resolve, 200));
354
+ }
355
+
356
+ if (typeof preRename === 'function') {
357
+ await preRename();
358
+ }
359
+
360
+ if (useTemp) {
361
+ await retryUnlink(dbPath);
362
+ await retryRename(tempPath, dbPath);
363
+ }
364
+
365
+ if (useTemp) {
366
+ await bestEffortUnlink(tempPath + '-wal');
367
+ await bestEffortUnlink(tempPath + '-shm');
368
+ } else {
369
+ await bestEffortUnlink(dbPath + '-wal');
370
+ await bestEffortUnlink(dbPath + '-shm');
371
+ }
372
+
373
+ return SqliteVectorStore.load(cacheDir);
374
+ }
375
+ }
376
+
377
+ export default SqliteVectorStore;