indexer-cli 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +156 -0
- package/bin/indexer-cli.js +97 -0
- package/dist/_temp_test.d.ts +1 -0
- package/dist/_temp_test.js +4 -0
- package/dist/_temp_test.js.map +1 -0
- package/dist/chunking/adaptive.d.ts +15 -0
- package/dist/chunking/adaptive.js +43 -0
- package/dist/chunking/adaptive.js.map +1 -0
- package/dist/chunking/function.d.ts +6 -0
- package/dist/chunking/function.js +96 -0
- package/dist/chunking/function.js.map +1 -0
- package/dist/chunking/index.d.ts +5 -0
- package/dist/chunking/index.js +22 -0
- package/dist/chunking/index.js.map +1 -0
- package/dist/chunking/module.d.ts +6 -0
- package/dist/chunking/module.js +33 -0
- package/dist/chunking/module.js.map +1 -0
- package/dist/chunking/single.d.ts +4 -0
- package/dist/chunking/single.js +19 -0
- package/dist/chunking/single.js.map +1 -0
- package/dist/chunking/types.d.ts +17 -0
- package/dist/chunking/types.js +3 -0
- package/dist/chunking/types.js.map +1 -0
- package/dist/cli/commands/architecture.d.ts +2 -0
- package/dist/cli/commands/architecture.js +162 -0
- package/dist/cli/commands/architecture.js.map +1 -0
- package/dist/cli/commands/context.d.ts +2 -0
- package/dist/cli/commands/context.js +241 -0
- package/dist/cli/commands/context.js.map +1 -0
- package/dist/cli/commands/deps.d.ts +2 -0
- package/dist/cli/commands/deps.js +129 -0
- package/dist/cli/commands/deps.js.map +1 -0
- package/dist/cli/commands/enrich.d.ts +2 -0
- package/dist/cli/commands/ensure-indexed.d.ts +4 -0
- package/dist/cli/commands/ensure-indexed.js +168 -0
- package/dist/cli/commands/ensure-indexed.js.map +1 -0
- package/dist/cli/commands/explain.d.ts +2 -0
- package/dist/cli/commands/explain.js +165 -0
- package/dist/cli/commands/explain.js.map +1 -0
- package/dist/cli/commands/index.d.ts +2 -0
- package/dist/cli/commands/index.js +271 -0
- package/dist/cli/commands/index.js.map +1 -0
- package/dist/cli/commands/init.d.ts +2 -0
- package/dist/cli/commands/init.js +132 -0
- package/dist/cli/commands/init.js.map +1 -0
- package/dist/cli/commands/search.d.ts +2 -0
- package/dist/cli/commands/search.js +206 -0
- package/dist/cli/commands/search.js.map +1 -0
- package/dist/cli/commands/setup.d.ts +2 -0
- package/dist/cli/commands/setup.js +425 -0
- package/dist/cli/commands/setup.js.map +1 -0
- package/dist/cli/commands/skill-template.d.ts +6 -0
- package/dist/cli/commands/skill-template.js +72 -0
- package/dist/cli/commands/skill-template.js.map +1 -0
- package/dist/cli/commands/structure.d.ts +2 -0
- package/dist/cli/commands/structure.js +243 -0
- package/dist/cli/commands/structure.js.map +1 -0
- package/dist/cli/commands/uninstall.d.ts +2 -0
- package/dist/cli/commands/uninstall.js +138 -0
- package/dist/cli/commands/uninstall.js.map +1 -0
- package/dist/cli/entry.d.ts +1 -0
- package/dist/cli/entry.js +55 -0
- package/dist/cli/entry.js.map +1 -0
- package/dist/cli/help-text.d.ts +2 -0
- package/dist/cli/help-text.js +9 -0
- package/dist/cli/help-text.js.map +1 -0
- package/dist/cli/version.d.ts +1 -0
- package/dist/cli/version.js +9 -0
- package/dist/cli/version.js.map +1 -0
- package/dist/core/config.d.ts +21 -0
- package/dist/core/config.js +77 -0
- package/dist/core/config.js.map +1 -0
- package/dist/core/logger.d.ts +19 -0
- package/dist/core/logger.js +116 -0
- package/dist/core/logger.js.map +1 -0
- package/dist/core/types.d.ts +194 -0
- package/dist/core/types.js +5 -0
- package/dist/core/types.js.map +1 -0
- package/dist/core/update-check.d.ts +1 -0
- package/dist/core/update-check.js +61 -0
- package/dist/core/update-check.js.map +1 -0
- package/dist/embedding/ollama.d.ts +29 -0
- package/dist/embedding/ollama.js +264 -0
- package/dist/embedding/ollama.js.map +1 -0
- package/dist/engine/architecture.d.ts +55 -0
- package/dist/engine/architecture.js +359 -0
- package/dist/engine/architecture.js.map +1 -0
- package/dist/engine/dependency-resolver.d.ts +4 -0
- package/dist/engine/dependency-resolver.js +69 -0
- package/dist/engine/dependency-resolver.js.map +1 -0
- package/dist/engine/git.d.ts +11 -0
- package/dist/engine/git.js +246 -0
- package/dist/engine/git.js.map +1 -0
- package/dist/engine/indexer.d.ts +86 -0
- package/dist/engine/indexer.js +933 -0
- package/dist/engine/indexer.js.map +1 -0
- package/dist/engine/scanner.d.ts +1 -0
- package/dist/engine/scanner.js +42 -0
- package/dist/engine/scanner.js.map +1 -0
- package/dist/engine/searcher.d.ts +26 -0
- package/dist/engine/searcher.js +70 -0
- package/dist/engine/searcher.js.map +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +4 -0
- package/dist/index.js.map +1 -0
- package/dist/languages/csharp.d.ts +25 -0
- package/dist/languages/csharp.js +311 -0
- package/dist/languages/csharp.js.map +1 -0
- package/dist/languages/gdscript.d.ts +25 -0
- package/dist/languages/gdscript.js +382 -0
- package/dist/languages/gdscript.js.map +1 -0
- package/dist/languages/plugin.d.ts +73 -0
- package/dist/languages/plugin.js +35 -0
- package/dist/languages/plugin.js.map +1 -0
- package/dist/languages/python.d.ts +24 -0
- package/dist/languages/python.js +292 -0
- package/dist/languages/python.js.map +1 -0
- package/dist/languages/ruby.d.ts +25 -0
- package/dist/languages/ruby.js +328 -0
- package/dist/languages/ruby.js.map +1 -0
- package/dist/languages/typescript.d.ts +21 -0
- package/dist/languages/typescript.js +439 -0
- package/dist/languages/typescript.js.map +1 -0
- package/dist/storage/sqlite.d.ts +51 -0
- package/dist/storage/sqlite.js +726 -0
- package/dist/storage/sqlite.js.map +1 -0
- package/dist/storage/vectors.d.ts +39 -0
- package/dist/storage/vectors.js +450 -0
- package/dist/storage/vectors.js.map +1 -0
- package/dist/utils/gitignore.d.ts +4 -0
- package/dist/utils/gitignore.js +85 -0
- package/dist/utils/gitignore.js.map +1 -0
- package/dist/utils/hash.d.ts +1 -0
- package/dist/utils/hash.js +12 -0
- package/dist/utils/hash.js.map +1 -0
- package/dist/utils/token-estimator.d.ts +3 -0
- package/dist/utils/token-estimator.js +13 -0
- package/dist/utils/token-estimator.js.map +1 -0
- package/package.json +54 -0
|
@@ -0,0 +1,726 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.SqliteMetadataStore = void 0;
|
|
7
|
+
const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
|
|
8
|
+
const node_async_hooks_1 = require("node:async_hooks");
|
|
9
|
+
const node_crypto_1 = require("node:crypto");
|
|
10
|
+
const logger_js_1 = require("../core/logger.js");
|
|
11
|
+
const logger = new logger_js_1.SystemLogger("storage-sqlite");
|
|
12
|
+
const txCtx = new node_async_hooks_1.AsyncLocalStorage();
|
|
13
|
+
// To add a new migration:
|
|
14
|
+
// 1. Add an entry to this array with the next sequential version number
|
|
15
|
+
// 2. Use PRAGMA table_info / IF NOT EXISTS patterns to make migrations idempotent
|
|
16
|
+
// 3. Add a test case in tests/unit/storage/sqlite.test.ts
|
|
17
|
+
const migrations = [
|
|
18
|
+
{
|
|
19
|
+
version: 1,
|
|
20
|
+
name: "add_symbol_metadata_json",
|
|
21
|
+
up: (db) => {
|
|
22
|
+
const columns = db.prepare("PRAGMA table_info(symbols)").all();
|
|
23
|
+
const hasMetadataColumn = columns.some((column) => column.name === "metadata_json");
|
|
24
|
+
if (!hasMetadataColumn) {
|
|
25
|
+
db.exec("ALTER TABLE symbols ADD COLUMN metadata_json TEXT");
|
|
26
|
+
}
|
|
27
|
+
},
|
|
28
|
+
},
|
|
29
|
+
];
|
|
30
|
+
class SqliteMetadataStore {
|
|
31
|
+
dbPath;
|
|
32
|
+
db;
|
|
33
|
+
initialized = false;
|
|
34
|
+
constructor(dbPath) {
|
|
35
|
+
this.dbPath = dbPath;
|
|
36
|
+
this.db = new better_sqlite3_1.default(this.dbPath);
|
|
37
|
+
this.db.pragma("journal_mode = WAL");
|
|
38
|
+
this.db.pragma("busy_timeout = 5000");
|
|
39
|
+
}
|
|
40
|
+
async initialize() {
|
|
41
|
+
if (this.initialized)
|
|
42
|
+
return;
|
|
43
|
+
this.initialized = true;
|
|
44
|
+
logger.info("[SqliteMetadataStore] Initializing database at:", this.dbPath);
|
|
45
|
+
this.db.pragma("foreign_keys = ON");
|
|
46
|
+
this.createSchema();
|
|
47
|
+
await this.runMigrations();
|
|
48
|
+
}
|
|
49
|
+
async close() {
|
|
50
|
+
this.db.close();
|
|
51
|
+
}
|
|
52
|
+
async transaction(callback) {
|
|
53
|
+
const ctx = txCtx.getStore();
|
|
54
|
+
if (ctx) {
|
|
55
|
+
const spName = `sp_${ctx.depth}_${++ctx.spSeq}`;
|
|
56
|
+
ctx.depth += 1;
|
|
57
|
+
this.db.prepare(`SAVEPOINT ${spName}`).run();
|
|
58
|
+
try {
|
|
59
|
+
const result = await callback();
|
|
60
|
+
this.db.prepare(`RELEASE ${spName}`).run();
|
|
61
|
+
return result;
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
this.db.prepare(`ROLLBACK TO ${spName}`).run();
|
|
65
|
+
this.db.prepare(`RELEASE ${spName}`).run();
|
|
66
|
+
throw error;
|
|
67
|
+
}
|
|
68
|
+
finally {
|
|
69
|
+
ctx.depth -= 1;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return txCtx.run({ depth: 1, spSeq: 0 }, async () => {
|
|
73
|
+
this.db.prepare("BEGIN IMMEDIATE").run();
|
|
74
|
+
try {
|
|
75
|
+
const result = await callback();
|
|
76
|
+
this.db.prepare("COMMIT").run();
|
|
77
|
+
return result;
|
|
78
|
+
}
|
|
79
|
+
catch (error) {
|
|
80
|
+
this.db.prepare("ROLLBACK").run();
|
|
81
|
+
throw error;
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
async createSnapshot(projectId, meta) {
|
|
86
|
+
return this.transaction(async () => {
|
|
87
|
+
const id = (0, node_crypto_1.randomUUID)();
|
|
88
|
+
const createdAt = Date.now();
|
|
89
|
+
this.db
|
|
90
|
+
.prepare("INSERT INTO snapshots (id, project_id, git_ref, status, created_at, failure_reason) VALUES (?, ?, ?, ?, ?, ?)")
|
|
91
|
+
.run(id, projectId, meta.headCommit ?? "", "indexing", createdAt, null);
|
|
92
|
+
return {
|
|
93
|
+
id,
|
|
94
|
+
projectId,
|
|
95
|
+
status: "indexing",
|
|
96
|
+
createdAt,
|
|
97
|
+
meta: {
|
|
98
|
+
...meta,
|
|
99
|
+
indexedAt: createdAt,
|
|
100
|
+
},
|
|
101
|
+
};
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
async getSnapshot(id) {
|
|
105
|
+
const row = this.db
|
|
106
|
+
.prepare("SELECT * FROM snapshots WHERE id = ?")
|
|
107
|
+
.get(id);
|
|
108
|
+
return row ? this.mapSnapshotRow(row) : null;
|
|
109
|
+
}
|
|
110
|
+
async getLatestSnapshot(projectId) {
|
|
111
|
+
const row = this.db
|
|
112
|
+
.prepare("SELECT * FROM snapshots WHERE project_id = ? ORDER BY created_at DESC, id DESC LIMIT 1")
|
|
113
|
+
.get(projectId);
|
|
114
|
+
return row ? this.mapSnapshotRow(row) : null;
|
|
115
|
+
}
|
|
116
|
+
async getLatestCompletedSnapshot(projectId) {
|
|
117
|
+
const row = this.db
|
|
118
|
+
.prepare("SELECT * FROM snapshots WHERE project_id = ? AND status = ? ORDER BY created_at DESC, id DESC LIMIT 1")
|
|
119
|
+
.get(projectId, this.mapToDbStatus("completed"));
|
|
120
|
+
return row ? this.mapSnapshotRow(row) : null;
|
|
121
|
+
}
|
|
122
|
+
async listSnapshots(projectId, options) {
|
|
123
|
+
const limit = options?.limit ?? 25;
|
|
124
|
+
const offset = options?.offset ?? 0;
|
|
125
|
+
const rows = this.db
|
|
126
|
+
.prepare("SELECT * FROM snapshots WHERE project_id = ? ORDER BY created_at DESC, id DESC LIMIT ? OFFSET ?")
|
|
127
|
+
.all(projectId, limit, offset);
|
|
128
|
+
return rows.map((row) => this.mapSnapshotRow(row));
|
|
129
|
+
}
|
|
130
|
+
async updateSnapshotStatus(id, status, error) {
|
|
131
|
+
await this.transaction(async () => {
|
|
132
|
+
this.db
|
|
133
|
+
.prepare("UPDATE snapshots SET status = ?, failure_reason = ? WHERE id = ?")
|
|
134
|
+
.run(this.mapToDbStatus(status), error ?? null, id);
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
async updateSnapshotProgress(id, processedFiles, totalFiles) {
|
|
138
|
+
await this.transaction(async () => {
|
|
139
|
+
this.db
|
|
140
|
+
.prepare("UPDATE snapshots SET processed_files = ?, total_files = ? WHERE id = ?")
|
|
141
|
+
.run(processedFiles, totalFiles, id);
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
async upsertFile(projectId, file) {
|
|
145
|
+
await this.transaction(async () => {
|
|
146
|
+
this.db
|
|
147
|
+
.prepare(`INSERT INTO files (project_id, sha256, mtime_ms, size, path, snapshot_id, language_id)
|
|
148
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
149
|
+
ON CONFLICT(project_id, snapshot_id, path) DO UPDATE SET
|
|
150
|
+
sha256 = excluded.sha256,
|
|
151
|
+
mtime_ms = excluded.mtime_ms,
|
|
152
|
+
size = excluded.size,
|
|
153
|
+
language_id = excluded.language_id`)
|
|
154
|
+
.run(projectId, file.sha256, file.mtimeMs, file.size, file.path, file.snapshotId, file.languageId);
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
async listFiles(projectId, snapshotId, options) {
|
|
158
|
+
let sql = "SELECT * FROM files WHERE project_id = ? AND snapshot_id = ?";
|
|
159
|
+
const params = [projectId, snapshotId];
|
|
160
|
+
const pathPrefix = options?.pathPrefix?.replace(/\/+$/, "");
|
|
161
|
+
if (pathPrefix) {
|
|
162
|
+
sql += " AND (path = ? OR path LIKE ?)";
|
|
163
|
+
params.push(pathPrefix, `${pathPrefix}/%`);
|
|
164
|
+
}
|
|
165
|
+
sql += " ORDER BY path";
|
|
166
|
+
const rows = this.db.prepare(sql).all(...params);
|
|
167
|
+
return rows.map((row) => ({
|
|
168
|
+
snapshotId: row.snapshot_id,
|
|
169
|
+
path: row.path,
|
|
170
|
+
sha256: row.sha256,
|
|
171
|
+
mtimeMs: row.mtime_ms,
|
|
172
|
+
size: row.size,
|
|
173
|
+
languageId: row.language_id,
|
|
174
|
+
}));
|
|
175
|
+
}
|
|
176
|
+
async getFile(projectId, snapshotId, path) {
|
|
177
|
+
const row = this.db
|
|
178
|
+
.prepare("SELECT * FROM files WHERE project_id = ? AND snapshot_id = ? AND path = ?")
|
|
179
|
+
.get(projectId, snapshotId, path);
|
|
180
|
+
if (!row) {
|
|
181
|
+
return null;
|
|
182
|
+
}
|
|
183
|
+
return {
|
|
184
|
+
snapshotId: row.snapshot_id,
|
|
185
|
+
path: row.path,
|
|
186
|
+
sha256: row.sha256,
|
|
187
|
+
mtimeMs: row.mtime_ms,
|
|
188
|
+
size: row.size,
|
|
189
|
+
languageId: row.language_id,
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
async replaceChunks(projectId, snapshotId, filePath, chunks) {
|
|
193
|
+
const transaction = this.db.transaction((nextChunks) => {
|
|
194
|
+
this.db
|
|
195
|
+
.prepare("DELETE FROM chunks WHERE project_id = ? AND snapshot_id = ? AND file_path = ?")
|
|
196
|
+
.run(projectId, snapshotId, filePath);
|
|
197
|
+
if (nextChunks.length === 0) {
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
const insertStmt = this.db.prepare(`INSERT INTO chunks (
|
|
201
|
+
project_id,
|
|
202
|
+
chunk_id,
|
|
203
|
+
file_path,
|
|
204
|
+
snapshot_id,
|
|
205
|
+
start_line,
|
|
206
|
+
end_line,
|
|
207
|
+
content_hash,
|
|
208
|
+
token_estimate,
|
|
209
|
+
chunk_type,
|
|
210
|
+
primary_symbol,
|
|
211
|
+
has_overlap
|
|
212
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
|
|
213
|
+
for (const chunk of nextChunks) {
|
|
214
|
+
insertStmt.run(projectId, chunk.chunkId, filePath, snapshotId, chunk.startLine, chunk.endLine, chunk.contentHash, chunk.tokenEstimate, chunk.chunkType ?? "full_file", chunk.primarySymbol ?? null, chunk.hasOverlap ? 1 : 0);
|
|
215
|
+
}
|
|
216
|
+
});
|
|
217
|
+
transaction(chunks);
|
|
218
|
+
}
|
|
219
|
+
async listChunks(projectId, snapshotId, filePath) {
|
|
220
|
+
let sql = "SELECT * FROM chunks WHERE project_id = ? AND snapshot_id = ?";
|
|
221
|
+
const params = [projectId, snapshotId];
|
|
222
|
+
if (filePath) {
|
|
223
|
+
sql += " AND file_path = ?";
|
|
224
|
+
params.push(filePath);
|
|
225
|
+
}
|
|
226
|
+
sql += " ORDER BY file_path, start_line";
|
|
227
|
+
const rows = this.db.prepare(sql).all(...params);
|
|
228
|
+
return rows.map((row) => ({
|
|
229
|
+
snapshotId: row.snapshot_id,
|
|
230
|
+
chunkId: row.chunk_id,
|
|
231
|
+
filePath: row.file_path,
|
|
232
|
+
startLine: row.start_line,
|
|
233
|
+
endLine: row.end_line,
|
|
234
|
+
contentHash: row.content_hash,
|
|
235
|
+
tokenEstimate: row.token_estimate,
|
|
236
|
+
chunkType: row.chunk_type ?? "full_file",
|
|
237
|
+
primarySymbol: row.primary_symbol ?? undefined,
|
|
238
|
+
hasOverlap: Boolean(row.has_overlap),
|
|
239
|
+
}));
|
|
240
|
+
}
|
|
241
|
+
async getChunk(projectId, snapshotId, chunkId) {
|
|
242
|
+
const row = this.db
|
|
243
|
+
.prepare("SELECT * FROM chunks WHERE project_id = ? AND snapshot_id = ? AND chunk_id = ?")
|
|
244
|
+
.get(projectId, snapshotId, chunkId);
|
|
245
|
+
if (!row) {
|
|
246
|
+
return null;
|
|
247
|
+
}
|
|
248
|
+
return {
|
|
249
|
+
snapshotId: row.snapshot_id,
|
|
250
|
+
chunkId: row.chunk_id,
|
|
251
|
+
filePath: row.file_path,
|
|
252
|
+
startLine: row.start_line,
|
|
253
|
+
endLine: row.end_line,
|
|
254
|
+
contentHash: row.content_hash,
|
|
255
|
+
tokenEstimate: row.token_estimate,
|
|
256
|
+
chunkType: row.chunk_type ?? "full_file",
|
|
257
|
+
primarySymbol: row.primary_symbol ?? undefined,
|
|
258
|
+
hasOverlap: Boolean(row.has_overlap),
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
async replaceSymbols(projectId, snapshotId, filePath, symbols) {
|
|
262
|
+
const transaction = this.db.transaction((nextSymbols) => {
|
|
263
|
+
this.db
|
|
264
|
+
.prepare("DELETE FROM symbols WHERE project_id = ? AND snapshot_id = ? AND file_path = ?")
|
|
265
|
+
.run(projectId, snapshotId, filePath);
|
|
266
|
+
if (nextSymbols.length === 0) {
|
|
267
|
+
return;
|
|
268
|
+
}
|
|
269
|
+
const insertStmt = this.db.prepare(`INSERT INTO symbols (project_id, id, snapshot_id, file_path, kind, name, container_name, exported, range_json, signature, doc_comment, metadata_json)
|
|
270
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
|
|
271
|
+
for (const symbol of nextSymbols) {
|
|
272
|
+
insertStmt.run(projectId, symbol.id, snapshotId, filePath, symbol.kind, symbol.name, symbol.containerName ?? null, symbol.exported ? 1 : 0, JSON.stringify(symbol.range), symbol.signature ?? null, symbol.docComment ?? null, symbol.metadata ? JSON.stringify(symbol.metadata) : null);
|
|
273
|
+
}
|
|
274
|
+
});
|
|
275
|
+
transaction(symbols);
|
|
276
|
+
}
|
|
277
|
+
async listSymbols(projectId, snapshotId, filePath) {
|
|
278
|
+
let sql = "SELECT * FROM symbols WHERE project_id = ? AND snapshot_id = ?";
|
|
279
|
+
const params = [projectId, snapshotId];
|
|
280
|
+
if (filePath) {
|
|
281
|
+
sql += " AND file_path = ?";
|
|
282
|
+
params.push(filePath);
|
|
283
|
+
}
|
|
284
|
+
sql += " ORDER BY file_path, name";
|
|
285
|
+
const rows = this.db.prepare(sql).all(...params);
|
|
286
|
+
return rows.map((row) => ({
|
|
287
|
+
snapshotId: row.snapshot_id,
|
|
288
|
+
id: row.id,
|
|
289
|
+
filePath: row.file_path,
|
|
290
|
+
kind: row.kind,
|
|
291
|
+
name: row.name,
|
|
292
|
+
containerName: row.container_name ?? undefined,
|
|
293
|
+
exported: row.exported === 1,
|
|
294
|
+
range: JSON.parse(row.range_json),
|
|
295
|
+
signature: row.signature ?? undefined,
|
|
296
|
+
docComment: row.doc_comment ?? undefined,
|
|
297
|
+
metadata: row.metadata_json ? JSON.parse(row.metadata_json) : undefined,
|
|
298
|
+
}));
|
|
299
|
+
}
|
|
300
|
+
async searchSymbols(projectId, snapshotId, namePattern) {
|
|
301
|
+
const rows = this.db
|
|
302
|
+
.prepare("SELECT * FROM symbols WHERE project_id = ? AND snapshot_id = ? AND name LIKE ? ORDER BY name")
|
|
303
|
+
.all(projectId, snapshotId, `%${namePattern}%`);
|
|
304
|
+
return rows.map((row) => ({
|
|
305
|
+
snapshotId: row.snapshot_id,
|
|
306
|
+
id: row.id,
|
|
307
|
+
filePath: row.file_path,
|
|
308
|
+
kind: row.kind,
|
|
309
|
+
name: row.name,
|
|
310
|
+
containerName: row.container_name ?? undefined,
|
|
311
|
+
exported: row.exported === 1,
|
|
312
|
+
range: JSON.parse(row.range_json),
|
|
313
|
+
signature: row.signature ?? undefined,
|
|
314
|
+
docComment: row.doc_comment ?? undefined,
|
|
315
|
+
metadata: row.metadata_json ? JSON.parse(row.metadata_json) : undefined,
|
|
316
|
+
}));
|
|
317
|
+
}
|
|
318
|
+
async replaceDependencies(projectId, snapshotId, filePath, dependencies) {
|
|
319
|
+
const transaction = this.db.transaction((nextDependencies) => {
|
|
320
|
+
this.db
|
|
321
|
+
.prepare("DELETE FROM dependencies WHERE project_id = ? AND snapshot_id = ? AND from_path = ?")
|
|
322
|
+
.run(projectId, snapshotId, filePath);
|
|
323
|
+
if (nextDependencies.length === 0) {
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
const insertStmt = this.db.prepare(`INSERT INTO dependencies (project_id, id, snapshot_id, from_path, to_specifier, to_path, kind, dependency_type)
|
|
327
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`);
|
|
328
|
+
for (const dependency of nextDependencies) {
|
|
329
|
+
insertStmt.run(projectId, dependency.id, snapshotId, filePath, dependency.toSpecifier, dependency.toPath ?? null, dependency.kind, dependency.dependencyType ?? "unresolved");
|
|
330
|
+
}
|
|
331
|
+
});
|
|
332
|
+
transaction(dependencies);
|
|
333
|
+
}
|
|
334
|
+
async listDependencies(projectId, snapshotId, filePath) {
|
|
335
|
+
let sql = "SELECT * FROM dependencies WHERE project_id = ? AND snapshot_id = ?";
|
|
336
|
+
const params = [projectId, snapshotId];
|
|
337
|
+
if (filePath) {
|
|
338
|
+
sql += " AND from_path = ?";
|
|
339
|
+
params.push(filePath);
|
|
340
|
+
}
|
|
341
|
+
sql += " ORDER BY from_path, to_specifier";
|
|
342
|
+
const rows = this.db.prepare(sql).all(...params);
|
|
343
|
+
return rows.map((row) => ({
|
|
344
|
+
snapshotId: row.snapshot_id,
|
|
345
|
+
id: row.id,
|
|
346
|
+
fromPath: row.from_path,
|
|
347
|
+
toSpecifier: row.to_specifier,
|
|
348
|
+
toPath: row.to_path ?? undefined,
|
|
349
|
+
kind: row.kind,
|
|
350
|
+
dependencyType: row.dependency_type,
|
|
351
|
+
}));
|
|
352
|
+
}
|
|
353
|
+
async getDependents(projectId, snapshotId, targetPath) {
|
|
354
|
+
const rows = this.db
|
|
355
|
+
.prepare("SELECT * FROM dependencies WHERE project_id = ? AND snapshot_id = ? AND to_path = ? ORDER BY from_path")
|
|
356
|
+
.all(projectId, snapshotId, targetPath);
|
|
357
|
+
return rows.map((row) => ({
|
|
358
|
+
snapshotId: row.snapshot_id,
|
|
359
|
+
id: row.id,
|
|
360
|
+
fromPath: row.from_path,
|
|
361
|
+
toSpecifier: row.to_specifier,
|
|
362
|
+
toPath: row.to_path ?? undefined,
|
|
363
|
+
kind: row.kind,
|
|
364
|
+
dependencyType: row.dependency_type,
|
|
365
|
+
}));
|
|
366
|
+
}
|
|
367
|
+
async upsertFileMetrics(projectId, metrics) {
|
|
368
|
+
await this.transaction(async () => {
|
|
369
|
+
this.db
|
|
370
|
+
.prepare(`INSERT INTO file_metrics (project_id, snapshot_id, file_path, metrics_json, updated_at)
|
|
371
|
+
VALUES (?, ?, ?, ?, ?)
|
|
372
|
+
ON CONFLICT(project_id, snapshot_id, file_path) DO UPDATE SET
|
|
373
|
+
metrics_json = excluded.metrics_json,
|
|
374
|
+
updated_at = excluded.updated_at`)
|
|
375
|
+
.run(projectId, metrics.snapshotId, metrics.filePath, JSON.stringify(metrics.metrics), Date.now());
|
|
376
|
+
});
|
|
377
|
+
}
|
|
378
|
+
async getFileMetrics(projectId, snapshotId, filePath) {
|
|
379
|
+
const row = this.db
|
|
380
|
+
.prepare("SELECT metrics_json FROM file_metrics WHERE project_id = ? AND snapshot_id = ? AND file_path = ?")
|
|
381
|
+
.get(projectId, snapshotId, filePath);
|
|
382
|
+
if (!row) {
|
|
383
|
+
return null;
|
|
384
|
+
}
|
|
385
|
+
return {
|
|
386
|
+
snapshotId,
|
|
387
|
+
filePath,
|
|
388
|
+
metrics: this.parseMetrics(row.metrics_json),
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
async listFileMetrics(projectId, snapshotId) {
|
|
392
|
+
const rows = this.db
|
|
393
|
+
.prepare("SELECT file_path, metrics_json FROM file_metrics WHERE project_id = ? AND snapshot_id = ?")
|
|
394
|
+
.all(projectId, snapshotId);
|
|
395
|
+
return rows.map((row) => ({
|
|
396
|
+
snapshotId,
|
|
397
|
+
filePath: row.file_path,
|
|
398
|
+
metrics: this.parseMetrics(row.metrics_json),
|
|
399
|
+
}));
|
|
400
|
+
}
|
|
401
|
+
async copyUnchangedFileData(projectId, fromSnapshotId, toSnapshotId, unchangedPaths) {
|
|
402
|
+
if (unchangedPaths.length === 0) {
|
|
403
|
+
return;
|
|
404
|
+
}
|
|
405
|
+
const placeholders = unchangedPaths.map(() => "?").join(",");
|
|
406
|
+
const params = [
|
|
407
|
+
projectId,
|
|
408
|
+
toSnapshotId,
|
|
409
|
+
projectId,
|
|
410
|
+
fromSnapshotId,
|
|
411
|
+
...unchangedPaths,
|
|
412
|
+
];
|
|
413
|
+
const transaction = this.db.transaction(() => {
|
|
414
|
+
this.db
|
|
415
|
+
.prepare(`INSERT INTO files (project_id, sha256, mtime_ms, size, path, snapshot_id, language_id)
|
|
416
|
+
SELECT ?, sha256, mtime_ms, size, path, ?, language_id
|
|
417
|
+
FROM files
|
|
418
|
+
WHERE project_id = ? AND snapshot_id = ? AND path IN (${placeholders})`)
|
|
419
|
+
.run(...params);
|
|
420
|
+
this.db
|
|
421
|
+
.prepare(`INSERT INTO chunks (
|
|
422
|
+
project_id,
|
|
423
|
+
chunk_id,
|
|
424
|
+
file_path,
|
|
425
|
+
snapshot_id,
|
|
426
|
+
start_line,
|
|
427
|
+
end_line,
|
|
428
|
+
content_hash,
|
|
429
|
+
token_estimate,
|
|
430
|
+
chunk_type,
|
|
431
|
+
primary_symbol,
|
|
432
|
+
has_overlap
|
|
433
|
+
)
|
|
434
|
+
SELECT ?, chunk_id, file_path, ?, start_line, end_line, content_hash, token_estimate, chunk_type, primary_symbol, has_overlap
|
|
435
|
+
FROM chunks
|
|
436
|
+
WHERE project_id = ? AND snapshot_id = ? AND file_path IN (${placeholders})`)
|
|
437
|
+
.run(...params);
|
|
438
|
+
this.db
|
|
439
|
+
.prepare(`INSERT INTO symbols (project_id, id, snapshot_id, file_path, kind, name, container_name, exported, range_json, signature, doc_comment, metadata_json)
|
|
440
|
+
SELECT ?, id, ?, file_path, kind, name, container_name, exported, range_json, signature, doc_comment, metadata_json
|
|
441
|
+
FROM symbols
|
|
442
|
+
WHERE project_id = ? AND snapshot_id = ? AND file_path IN (${placeholders})`)
|
|
443
|
+
.run(...params);
|
|
444
|
+
this.db
|
|
445
|
+
.prepare(`INSERT INTO dependencies (project_id, id, snapshot_id, from_path, to_specifier, to_path, kind, dependency_type)
|
|
446
|
+
SELECT ?, id, ?, from_path, to_specifier, to_path, kind, dependency_type
|
|
447
|
+
FROM dependencies
|
|
448
|
+
WHERE project_id = ? AND snapshot_id = ? AND from_path IN (${placeholders})`)
|
|
449
|
+
.run(...params);
|
|
450
|
+
this.db
|
|
451
|
+
.prepare(`INSERT INTO file_metrics (project_id, snapshot_id, file_path, metrics_json, updated_at)
|
|
452
|
+
SELECT ?, ?, file_path, metrics_json, updated_at
|
|
453
|
+
FROM file_metrics
|
|
454
|
+
WHERE project_id = ? AND snapshot_id = ? AND file_path IN (${placeholders})`)
|
|
455
|
+
.run(...params);
|
|
456
|
+
});
|
|
457
|
+
transaction();
|
|
458
|
+
}
|
|
459
|
+
async upsertArtifact(projectId, artifact) {
|
|
460
|
+
await this.transaction(async () => {
|
|
461
|
+
this.db
|
|
462
|
+
.prepare(`INSERT INTO artifacts (project_id, snapshot_id, artifact_type, scope, data_json, updated_at)
|
|
463
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
464
|
+
ON CONFLICT(project_id, snapshot_id, artifact_type, scope) DO UPDATE SET
|
|
465
|
+
data_json = excluded.data_json,
|
|
466
|
+
updated_at = excluded.updated_at`)
|
|
467
|
+
.run(projectId, artifact.snapshotId, artifact.artifactType, artifact.scope, artifact.dataJson, Date.now());
|
|
468
|
+
});
|
|
469
|
+
}
|
|
470
|
+
async getArtifact(projectId, snapshotId, artifactType, scope) {
|
|
471
|
+
const row = this.db
|
|
472
|
+
.prepare("SELECT * FROM artifacts WHERE project_id = ? AND snapshot_id = ? AND artifact_type = ? AND scope = ?")
|
|
473
|
+
.get(projectId, snapshotId, artifactType, scope);
|
|
474
|
+
if (!row) {
|
|
475
|
+
return null;
|
|
476
|
+
}
|
|
477
|
+
return {
|
|
478
|
+
projectId: row.project_id,
|
|
479
|
+
snapshotId: row.snapshot_id,
|
|
480
|
+
artifactType: row.artifact_type,
|
|
481
|
+
scope: row.scope,
|
|
482
|
+
dataJson: row.data_json,
|
|
483
|
+
updatedAt: row.updated_at,
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
async listArtifacts(projectId, snapshotId, artifactType) {
|
|
487
|
+
let sql = "SELECT * FROM artifacts WHERE project_id = ? AND snapshot_id = ?";
|
|
488
|
+
const params = [projectId, snapshotId];
|
|
489
|
+
if (artifactType) {
|
|
490
|
+
sql += " AND artifact_type = ?";
|
|
491
|
+
params.push(artifactType);
|
|
492
|
+
}
|
|
493
|
+
const rows = this.db.prepare(sql).all(...params);
|
|
494
|
+
return rows.map((row) => ({
|
|
495
|
+
projectId: row.project_id,
|
|
496
|
+
snapshotId: row.snapshot_id,
|
|
497
|
+
artifactType: row.artifact_type,
|
|
498
|
+
scope: row.scope,
|
|
499
|
+
dataJson: row.data_json,
|
|
500
|
+
updatedAt: row.updated_at,
|
|
501
|
+
}));
|
|
502
|
+
}
|
|
503
|
+
async clearProjectMetadata(projectId, keepSnapshotId, options) {
|
|
504
|
+
await this.transaction(async () => {
|
|
505
|
+
let sql = "DELETE FROM snapshots WHERE project_id = ?";
|
|
506
|
+
const params = [projectId];
|
|
507
|
+
if (keepSnapshotId) {
|
|
508
|
+
sql += " AND id != ?";
|
|
509
|
+
params.push(keepSnapshotId);
|
|
510
|
+
}
|
|
511
|
+
if (options?.preserveActiveIndexing) {
|
|
512
|
+
// Protect "indexing" snapshots created within the last 5 minutes —
|
|
513
|
+
// they may belong to a concurrent process still running.
|
|
514
|
+
sql += " AND (status != 'indexing' OR created_at < ?)";
|
|
515
|
+
params.push(Date.now() - 5 * 60 * 1000);
|
|
516
|
+
}
|
|
517
|
+
this.db.prepare(sql).run(...params);
|
|
518
|
+
});
|
|
519
|
+
}
|
|
520
|
+
parseMetrics(metricsJson) {
|
|
521
|
+
try {
|
|
522
|
+
const parsed = JSON.parse(metricsJson || "{}");
|
|
523
|
+
return {
|
|
524
|
+
complexity: parsed.complexity ?? 0,
|
|
525
|
+
maintainability: parsed.maintainability ?? 0,
|
|
526
|
+
churn: parsed.churn ?? 0,
|
|
527
|
+
testCoverage: parsed.testCoverage,
|
|
528
|
+
};
|
|
529
|
+
}
|
|
530
|
+
catch {
|
|
531
|
+
return {
|
|
532
|
+
complexity: 0,
|
|
533
|
+
maintainability: 0,
|
|
534
|
+
churn: 0,
|
|
535
|
+
testCoverage: undefined,
|
|
536
|
+
};
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
mapSnapshotRow(row) {
|
|
540
|
+
return {
|
|
541
|
+
id: row.id,
|
|
542
|
+
projectId: row.project_id,
|
|
543
|
+
status: this.mapSnapshotStatus(row.status),
|
|
544
|
+
createdAt: row.created_at,
|
|
545
|
+
meta: {
|
|
546
|
+
headCommit: row.git_ref ?? undefined,
|
|
547
|
+
indexedAt: row.created_at,
|
|
548
|
+
},
|
|
549
|
+
processedFiles: row.processed_files ?? undefined,
|
|
550
|
+
totalFiles: row.total_files ?? undefined,
|
|
551
|
+
error: row.failure_reason ?? undefined,
|
|
552
|
+
};
|
|
553
|
+
}
|
|
554
|
+
createSchema() {
|
|
555
|
+
this.db.exec(`
|
|
556
|
+
DROP TABLE IF EXISTS logs;
|
|
557
|
+
|
|
558
|
+
CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
559
|
+
version INTEGER PRIMARY KEY,
|
|
560
|
+
applied_at INTEGER NOT NULL
|
|
561
|
+
);
|
|
562
|
+
|
|
563
|
+
CREATE TABLE IF NOT EXISTS snapshots (
|
|
564
|
+
id TEXT PRIMARY KEY,
|
|
565
|
+
project_id TEXT NOT NULL,
|
|
566
|
+
git_ref TEXT,
|
|
567
|
+
status TEXT NOT NULL,
|
|
568
|
+
created_at INTEGER NOT NULL,
|
|
569
|
+
failure_reason TEXT,
|
|
570
|
+
processed_files INTEGER DEFAULT 0,
|
|
571
|
+
total_files INTEGER DEFAULT 0
|
|
572
|
+
);
|
|
573
|
+
|
|
574
|
+
CREATE TABLE IF NOT EXISTS files (
|
|
575
|
+
project_id TEXT NOT NULL,
|
|
576
|
+
snapshot_id TEXT NOT NULL,
|
|
577
|
+
path TEXT NOT NULL,
|
|
578
|
+
sha256 TEXT NOT NULL,
|
|
579
|
+
mtime_ms INTEGER NOT NULL,
|
|
580
|
+
size INTEGER NOT NULL,
|
|
581
|
+
language_id TEXT NOT NULL,
|
|
582
|
+
PRIMARY KEY (project_id, snapshot_id, path),
|
|
583
|
+
FOREIGN KEY (snapshot_id) REFERENCES snapshots(id) ON DELETE CASCADE
|
|
584
|
+
);
|
|
585
|
+
|
|
586
|
+
CREATE TABLE IF NOT EXISTS chunks (
|
|
587
|
+
project_id TEXT NOT NULL,
|
|
588
|
+
chunk_id TEXT NOT NULL,
|
|
589
|
+
file_path TEXT NOT NULL,
|
|
590
|
+
snapshot_id TEXT NOT NULL,
|
|
591
|
+
start_line INTEGER NOT NULL,
|
|
592
|
+
end_line INTEGER NOT NULL,
|
|
593
|
+
content_hash TEXT NOT NULL,
|
|
594
|
+
token_estimate INTEGER NOT NULL,
|
|
595
|
+
chunk_type TEXT DEFAULT 'full_file',
|
|
596
|
+
primary_symbol TEXT,
|
|
597
|
+
has_overlap BOOLEAN DEFAULT 0,
|
|
598
|
+
PRIMARY KEY (project_id, snapshot_id, chunk_id),
|
|
599
|
+
FOREIGN KEY (snapshot_id) REFERENCES snapshots(id) ON DELETE CASCADE
|
|
600
|
+
);
|
|
601
|
+
|
|
602
|
+
CREATE TABLE IF NOT EXISTS symbols (
|
|
603
|
+
project_id TEXT NOT NULL,
|
|
604
|
+
id TEXT NOT NULL,
|
|
605
|
+
snapshot_id TEXT NOT NULL,
|
|
606
|
+
file_path TEXT NOT NULL,
|
|
607
|
+
kind TEXT NOT NULL,
|
|
608
|
+
name TEXT NOT NULL,
|
|
609
|
+
container_name TEXT,
|
|
610
|
+
exported INTEGER NOT NULL,
|
|
611
|
+
range_json TEXT NOT NULL,
|
|
612
|
+
signature TEXT,
|
|
613
|
+
doc_comment TEXT,
|
|
614
|
+
metadata_json TEXT,
|
|
615
|
+
PRIMARY KEY (project_id, snapshot_id, id),
|
|
616
|
+
FOREIGN KEY (snapshot_id) REFERENCES snapshots(id) ON DELETE CASCADE
|
|
617
|
+
);
|
|
618
|
+
|
|
619
|
+
CREATE TABLE IF NOT EXISTS dependencies (
|
|
620
|
+
project_id TEXT NOT NULL,
|
|
621
|
+
id TEXT NOT NULL,
|
|
622
|
+
snapshot_id TEXT NOT NULL,
|
|
623
|
+
from_path TEXT NOT NULL,
|
|
624
|
+
to_specifier TEXT NOT NULL,
|
|
625
|
+
to_path TEXT,
|
|
626
|
+
kind TEXT NOT NULL,
|
|
627
|
+
dependency_type TEXT DEFAULT 'unresolved',
|
|
628
|
+
PRIMARY KEY (project_id, snapshot_id, id),
|
|
629
|
+
FOREIGN KEY (snapshot_id) REFERENCES snapshots(id) ON DELETE CASCADE
|
|
630
|
+
);
|
|
631
|
+
|
|
632
|
+
CREATE TABLE IF NOT EXISTS artifacts (
|
|
633
|
+
project_id TEXT NOT NULL,
|
|
634
|
+
snapshot_id TEXT NOT NULL,
|
|
635
|
+
artifact_type TEXT NOT NULL,
|
|
636
|
+
scope TEXT NOT NULL,
|
|
637
|
+
data_json TEXT NOT NULL,
|
|
638
|
+
updated_at INTEGER NOT NULL,
|
|
639
|
+
PRIMARY KEY (project_id, snapshot_id, artifact_type, scope),
|
|
640
|
+
FOREIGN KEY (snapshot_id) REFERENCES snapshots(id) ON DELETE CASCADE
|
|
641
|
+
);
|
|
642
|
+
|
|
643
|
+
CREATE TABLE IF NOT EXISTS file_metrics (
|
|
644
|
+
project_id TEXT NOT NULL,
|
|
645
|
+
snapshot_id TEXT NOT NULL,
|
|
646
|
+
file_path TEXT NOT NULL,
|
|
647
|
+
metrics_json TEXT NOT NULL,
|
|
648
|
+
updated_at INTEGER NOT NULL,
|
|
649
|
+
PRIMARY KEY (project_id, snapshot_id, file_path),
|
|
650
|
+
FOREIGN KEY (snapshot_id) REFERENCES snapshots(id) ON DELETE CASCADE
|
|
651
|
+
);
|
|
652
|
+
|
|
653
|
+
CREATE INDEX IF NOT EXISTS idx_symbols_snapshot_name
|
|
654
|
+
ON symbols(snapshot_id, name);
|
|
655
|
+
CREATE INDEX IF NOT EXISTS idx_symbols_project_kind
|
|
656
|
+
ON symbols(project_id, kind);
|
|
657
|
+
CREATE INDEX IF NOT EXISTS idx_symbols_file_path
|
|
658
|
+
ON symbols(file_path);
|
|
659
|
+
|
|
660
|
+
CREATE INDEX IF NOT EXISTS idx_files_project_path
|
|
661
|
+
ON files(project_id, path);
|
|
662
|
+
CREATE INDEX IF NOT EXISTS idx_files_snapshot
|
|
663
|
+
ON files(snapshot_id);
|
|
664
|
+
|
|
665
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_file_path
|
|
666
|
+
ON chunks(file_path);
|
|
667
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_primary_symbol
|
|
668
|
+
ON chunks(primary_symbol);
|
|
669
|
+
|
|
670
|
+
CREATE INDEX IF NOT EXISTS idx_dependencies_from_path
|
|
671
|
+
ON dependencies(from_path);
|
|
672
|
+
CREATE INDEX IF NOT EXISTS idx_dependencies_to_path
|
|
673
|
+
ON dependencies(to_path);
|
|
674
|
+
CREATE INDEX IF NOT EXISTS idx_dependencies_snapshot
|
|
675
|
+
ON dependencies(snapshot_id);
|
|
676
|
+
`);
|
|
677
|
+
}
|
|
678
|
+
async runMigrations() {
|
|
679
|
+
const currentVersion = this.getCurrentSchemaVersion();
|
|
680
|
+
logger.info("[SqliteMetadataStore] Current schema version:", currentVersion);
|
|
681
|
+
for (const migration of migrations) {
|
|
682
|
+
if (migration.version > currentVersion) {
|
|
683
|
+
logger.info(`[SqliteMetadataStore] Running migration ${migration.version}: ${migration.name}`);
|
|
684
|
+
migration.up(this.db);
|
|
685
|
+
this.db
|
|
686
|
+
.prepare("INSERT INTO schema_migrations (version, applied_at) VALUES (?, ?)")
|
|
687
|
+
.run(migration.version, Date.now());
|
|
688
|
+
}
|
|
689
|
+
else {
|
|
690
|
+
logger.info(`[SqliteMetadataStore] Skipping migration ${migration.version}: ${migration.name} (already applied)`);
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
getCurrentSchemaVersion() {
|
|
695
|
+
try {
|
|
696
|
+
const row = this.db
|
|
697
|
+
.prepare("SELECT MAX(version) as version FROM schema_migrations")
|
|
698
|
+
.get();
|
|
699
|
+
return row?.version ?? 0;
|
|
700
|
+
}
|
|
701
|
+
catch {
|
|
702
|
+
return 0;
|
|
703
|
+
}
|
|
704
|
+
}
|
|
705
|
+
mapSnapshotStatus(status) {
|
|
706
|
+
const statusMap = {
|
|
707
|
+
pending: "pending",
|
|
708
|
+
indexing: "indexing",
|
|
709
|
+
ready: "completed",
|
|
710
|
+
completed: "completed",
|
|
711
|
+
failed: "failed",
|
|
712
|
+
};
|
|
713
|
+
return statusMap[status] ?? "pending";
|
|
714
|
+
}
|
|
715
|
+
mapToDbStatus(status) {
|
|
716
|
+
const statusMap = {
|
|
717
|
+
pending: "pending",
|
|
718
|
+
indexing: "indexing",
|
|
719
|
+
completed: "ready",
|
|
720
|
+
failed: "failed",
|
|
721
|
+
};
|
|
722
|
+
return statusMap[status];
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
exports.SqliteMetadataStore = SqliteMetadataStore;
|
|
726
|
+
//# sourceMappingURL=sqlite.js.map
|