@adhisang/minecraft-modding-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/LICENSE +21 -0
- package/README.md +765 -0
- package/dist/access-widener-parser.d.ts +24 -0
- package/dist/access-widener-parser.js +77 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +4 -0
- package/dist/config.d.ts +27 -0
- package/dist/config.js +178 -0
- package/dist/decompiler/vineflower.d.ts +15 -0
- package/dist/decompiler/vineflower.js +185 -0
- package/dist/errors.d.ts +50 -0
- package/dist/errors.js +49 -0
- package/dist/hash.d.ts +1 -0
- package/dist/hash.js +12 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +1447 -0
- package/dist/java-process.d.ts +16 -0
- package/dist/java-process.js +120 -0
- package/dist/logger.d.ts +3 -0
- package/dist/logger.js +21 -0
- package/dist/mapping-pipeline-service.d.ts +18 -0
- package/dist/mapping-pipeline-service.js +60 -0
- package/dist/mapping-service.d.ts +161 -0
- package/dist/mapping-service.js +1706 -0
- package/dist/maven-resolver.d.ts +22 -0
- package/dist/maven-resolver.js +122 -0
- package/dist/minecraft-explorer-service.d.ts +43 -0
- package/dist/minecraft-explorer-service.js +562 -0
- package/dist/mixin-parser.d.ts +34 -0
- package/dist/mixin-parser.js +194 -0
- package/dist/mixin-validator.d.ts +59 -0
- package/dist/mixin-validator.js +274 -0
- package/dist/mod-analyzer.d.ts +23 -0
- package/dist/mod-analyzer.js +346 -0
- package/dist/mod-decompile-service.d.ts +39 -0
- package/dist/mod-decompile-service.js +136 -0
- package/dist/mod-remap-service.d.ts +17 -0
- package/dist/mod-remap-service.js +186 -0
- package/dist/mod-search-service.d.ts +28 -0
- package/dist/mod-search-service.js +174 -0
- package/dist/mojang-tiny-mapping-service.d.ts +13 -0
- package/dist/mojang-tiny-mapping-service.js +351 -0
- package/dist/nbt/java-nbt-codec.d.ts +3 -0
- package/dist/nbt/java-nbt-codec.js +385 -0
- package/dist/nbt/json-patch.d.ts +3 -0
- package/dist/nbt/json-patch.js +352 -0
- package/dist/nbt/pipeline.d.ts +39 -0
- package/dist/nbt/pipeline.js +173 -0
- package/dist/nbt/typed-json.d.ts +10 -0
- package/dist/nbt/typed-json.js +205 -0
- package/dist/nbt/types.d.ts +66 -0
- package/dist/nbt/types.js +2 -0
- package/dist/observability.d.ts +88 -0
- package/dist/observability.js +165 -0
- package/dist/path-converter.d.ts +12 -0
- package/dist/path-converter.js +161 -0
- package/dist/path-resolver.d.ts +19 -0
- package/dist/path-resolver.js +78 -0
- package/dist/registry-service.d.ts +29 -0
- package/dist/registry-service.js +214 -0
- package/dist/repo-downloader.d.ts +15 -0
- package/dist/repo-downloader.js +111 -0
- package/dist/resources.d.ts +3 -0
- package/dist/resources.js +154 -0
- package/dist/search-hit-accumulator.d.ts +38 -0
- package/dist/search-hit-accumulator.js +153 -0
- package/dist/source-jar-reader.d.ts +13 -0
- package/dist/source-jar-reader.js +216 -0
- package/dist/source-resolver.d.ts +14 -0
- package/dist/source-resolver.js +274 -0
- package/dist/source-service.d.ts +404 -0
- package/dist/source-service.js +2881 -0
- package/dist/storage/artifacts-repo.d.ts +45 -0
- package/dist/storage/artifacts-repo.js +209 -0
- package/dist/storage/db.d.ts +14 -0
- package/dist/storage/db.js +132 -0
- package/dist/storage/files-repo.d.ts +78 -0
- package/dist/storage/files-repo.js +437 -0
- package/dist/storage/index-meta-repo.d.ts +35 -0
- package/dist/storage/index-meta-repo.js +97 -0
- package/dist/storage/migrations.d.ts +11 -0
- package/dist/storage/migrations.js +71 -0
- package/dist/storage/schema.d.ts +1 -0
- package/dist/storage/schema.js +160 -0
- package/dist/storage/sqlite.d.ts +20 -0
- package/dist/storage/sqlite.js +111 -0
- package/dist/storage/symbols-repo.d.ts +63 -0
- package/dist/storage/symbols-repo.js +401 -0
- package/dist/symbols/symbol-extractor.d.ts +7 -0
- package/dist/symbols/symbol-extractor.js +64 -0
- package/dist/tiny-remapper-resolver.d.ts +1 -0
- package/dist/tiny-remapper-resolver.js +62 -0
- package/dist/tiny-remapper-service.d.ts +16 -0
- package/dist/tiny-remapper-service.js +73 -0
- package/dist/types.d.ts +120 -0
- package/dist/types.js +2 -0
- package/dist/version-diff-service.d.ts +41 -0
- package/dist/version-diff-service.js +222 -0
- package/dist/version-service.d.ts +70 -0
- package/dist/version-service.js +411 -0
- package/dist/vineflower-resolver.d.ts +1 -0
- package/dist/vineflower-resolver.js +62 -0
- package/dist/workspace-mapping-service.d.ts +18 -0
- package/dist/workspace-mapping-service.js +89 -0
- package/package.json +61 -0
|
@@ -0,0 +1,437 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import { log } from "../logger.js";
|
|
3
|
+
function buildCursor(sortKey) {
|
|
4
|
+
return Buffer.from(JSON.stringify({ sortKey }), "utf8").toString("base64");
|
|
5
|
+
}
|
|
6
|
+
function parseCursor(cursor) {
|
|
7
|
+
if (!cursor) {
|
|
8
|
+
return undefined;
|
|
9
|
+
}
|
|
10
|
+
try {
|
|
11
|
+
const decoded = Buffer.from(cursor, "base64").toString("utf8");
|
|
12
|
+
const parsed = JSON.parse(decoded);
|
|
13
|
+
if (typeof parsed.sortKey !== "string") {
|
|
14
|
+
return undefined;
|
|
15
|
+
}
|
|
16
|
+
return parsed;
|
|
17
|
+
}
|
|
18
|
+
catch {
|
|
19
|
+
log("warn", "storage.files.invalid_list_cursor", { cursor });
|
|
20
|
+
return undefined;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
function buildSearchCursor(score, filePath) {
|
|
24
|
+
return Buffer.from(JSON.stringify({ score, filePath }), "utf8").toString("base64");
|
|
25
|
+
}
|
|
26
|
+
function parseSearchCursor(cursor) {
|
|
27
|
+
if (!cursor) {
|
|
28
|
+
return undefined;
|
|
29
|
+
}
|
|
30
|
+
try {
|
|
31
|
+
const decoded = Buffer.from(cursor, "base64").toString("utf8");
|
|
32
|
+
const parsed = JSON.parse(decoded);
|
|
33
|
+
if (typeof parsed.score !== "number" || typeof parsed.filePath !== "string") {
|
|
34
|
+
return undefined;
|
|
35
|
+
}
|
|
36
|
+
return parsed;
|
|
37
|
+
}
|
|
38
|
+
catch {
|
|
39
|
+
log("warn", "storage.files.invalid_search_cursor", { cursor });
|
|
40
|
+
return undefined;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
function nextCursorFromRows(rows) {
|
|
44
|
+
if (rows.length === 0) {
|
|
45
|
+
return undefined;
|
|
46
|
+
}
|
|
47
|
+
const last = rows[rows.length - 1];
|
|
48
|
+
return buildCursor(last.file_path);
|
|
49
|
+
}
|
|
50
|
+
function compareSearchOrdering(left, right) {
|
|
51
|
+
if (right.score !== left.score) {
|
|
52
|
+
return right.score - left.score;
|
|
53
|
+
}
|
|
54
|
+
return left.filePath.localeCompare(right.filePath);
|
|
55
|
+
}
|
|
56
|
+
function isAfterSearchCursor(hit, cursor) {
|
|
57
|
+
if (hit.score < cursor.score) {
|
|
58
|
+
return true;
|
|
59
|
+
}
|
|
60
|
+
if (hit.score > cursor.score) {
|
|
61
|
+
return false;
|
|
62
|
+
}
|
|
63
|
+
return hit.filePath.localeCompare(cursor.filePath) > 0;
|
|
64
|
+
}
|
|
65
|
+
function buildPreview(content, query) {
|
|
66
|
+
const normalizedQuery = query.toLowerCase();
|
|
67
|
+
const normalizedContent = content.toLowerCase();
|
|
68
|
+
const index = normalizedContent.indexOf(normalizedQuery);
|
|
69
|
+
if (index < 0) {
|
|
70
|
+
return content.slice(0, 120);
|
|
71
|
+
}
|
|
72
|
+
const start = Math.max(0, index - 24);
|
|
73
|
+
const end = Math.min(content.length, index + normalizedQuery.length + 24);
|
|
74
|
+
const prefix = start > 0 ? "..." : "";
|
|
75
|
+
const suffix = end < content.length ? "..." : "";
|
|
76
|
+
return `${prefix}${content.slice(start, end)}${suffix}`.replace(/\s+/g, " ").trim();
|
|
77
|
+
}
|
|
78
|
+
export class FilesRepo {
|
|
79
|
+
db;
|
|
80
|
+
deleteStmt;
|
|
81
|
+
insertFilesStmt;
|
|
82
|
+
insertFtsStmt;
|
|
83
|
+
deleteFtsStmt;
|
|
84
|
+
getContentStmt;
|
|
85
|
+
listStmt;
|
|
86
|
+
listRowsStmt;
|
|
87
|
+
searchPathStmt;
|
|
88
|
+
searchFtsStmt;
|
|
89
|
+
getByPathsStmtCache = new Map();
|
|
90
|
+
constructor(db) {
|
|
91
|
+
this.db = db;
|
|
92
|
+
this.deleteStmt = this.db.prepare(`
|
|
93
|
+
DELETE FROM files WHERE artifact_id = ?
|
|
94
|
+
`);
|
|
95
|
+
this.deleteFtsStmt = this.db.prepare(`
|
|
96
|
+
DELETE FROM files_fts WHERE artifact_id = ?
|
|
97
|
+
`);
|
|
98
|
+
this.insertFilesStmt = this.db.prepare(`
|
|
99
|
+
INSERT INTO files (artifact_id, file_path, content, content_bytes, content_hash)
|
|
100
|
+
VALUES (?, ?, ?, ?, ?)
|
|
101
|
+
`);
|
|
102
|
+
this.insertFtsStmt = this.db.prepare(`
|
|
103
|
+
INSERT INTO files_fts (artifact_id, file_path, content)
|
|
104
|
+
VALUES (?, ?, ?)
|
|
105
|
+
`);
|
|
106
|
+
this.getContentStmt = this.db.prepare(`
|
|
107
|
+
SELECT artifact_id, file_path, content, content_bytes, content_hash
|
|
108
|
+
FROM files
|
|
109
|
+
WHERE artifact_id = ? AND file_path = ?
|
|
110
|
+
LIMIT 1
|
|
111
|
+
`);
|
|
112
|
+
this.listStmt = this.db.prepare(`
|
|
113
|
+
SELECT artifact_id, file_path
|
|
114
|
+
FROM files
|
|
115
|
+
WHERE artifact_id = ? AND file_path > ? AND (? IS NULL OR file_path LIKE ? || '%')
|
|
116
|
+
ORDER BY file_path ASC
|
|
117
|
+
LIMIT ?
|
|
118
|
+
`);
|
|
119
|
+
this.listRowsStmt = this.db.prepare(`
|
|
120
|
+
SELECT artifact_id, file_path, content, content_bytes, content_hash
|
|
121
|
+
FROM files
|
|
122
|
+
WHERE artifact_id = ? AND file_path > ? AND (? IS NULL OR file_path LIKE ? || '%')
|
|
123
|
+
ORDER BY file_path ASC
|
|
124
|
+
LIMIT ?
|
|
125
|
+
`);
|
|
126
|
+
this.searchPathStmt = this.db.prepare(`
|
|
127
|
+
SELECT file_path
|
|
128
|
+
FROM files
|
|
129
|
+
WHERE artifact_id = ? AND file_path LIKE ? ESCAPE '\\'
|
|
130
|
+
ORDER BY file_path ASC
|
|
131
|
+
LIMIT ?
|
|
132
|
+
`);
|
|
133
|
+
this.searchFtsStmt = this.db.prepare(`
|
|
134
|
+
SELECT file_path, rank
|
|
135
|
+
FROM files_fts
|
|
136
|
+
WHERE artifact_id = ? AND files_fts MATCH ?
|
|
137
|
+
ORDER BY rank
|
|
138
|
+
LIMIT ?
|
|
139
|
+
`);
|
|
140
|
+
}
|
|
141
|
+
clearFilesForArtifact(artifactId) {
|
|
142
|
+
this.deleteStmt.run([artifactId]);
|
|
143
|
+
this.deleteFtsStmt.run([artifactId]);
|
|
144
|
+
}
|
|
145
|
+
insertFilesForArtifact(artifactId, files) {
|
|
146
|
+
for (const file of files) {
|
|
147
|
+
const contentHash = file.contentHash || createHash("sha256").update(file.content).digest("hex");
|
|
148
|
+
this.insertFilesStmt.run([
|
|
149
|
+
artifactId,
|
|
150
|
+
file.filePath,
|
|
151
|
+
file.content,
|
|
152
|
+
file.contentBytes,
|
|
153
|
+
contentHash
|
|
154
|
+
]);
|
|
155
|
+
this.insertFtsStmt.run([artifactId, file.filePath, file.content]);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
replaceFilesForArtifact(artifactId, files) {
|
|
159
|
+
const transaction = this.db.transaction(() => {
|
|
160
|
+
this.clearFilesForArtifact(artifactId);
|
|
161
|
+
this.insertFilesForArtifact(artifactId, files);
|
|
162
|
+
});
|
|
163
|
+
transaction();
|
|
164
|
+
}
|
|
165
|
+
deleteFilesForArtifact(artifactId) {
|
|
166
|
+
const transaction = this.db.transaction(() => {
|
|
167
|
+
this.clearFilesForArtifact(artifactId);
|
|
168
|
+
});
|
|
169
|
+
transaction();
|
|
170
|
+
}
|
|
171
|
+
getFileContent(artifactId, filePath) {
|
|
172
|
+
const row = this.getContentStmt.get([artifactId, filePath]);
|
|
173
|
+
if (!row) {
|
|
174
|
+
return undefined;
|
|
175
|
+
}
|
|
176
|
+
return {
|
|
177
|
+
artifactId: row.artifact_id,
|
|
178
|
+
filePath: row.file_path,
|
|
179
|
+
content: row.content,
|
|
180
|
+
contentBytes: row.content_bytes,
|
|
181
|
+
contentHash: row.content_hash
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
listFiles(artifactId, options) {
|
|
185
|
+
const cursor = parseCursor(options.cursor);
|
|
186
|
+
const rows = this.listStmt.all(artifactId, cursor?.sortKey ?? "", options.prefix ?? null, options.prefix ?? "", Math.max(1, options.limit));
|
|
187
|
+
return {
|
|
188
|
+
items: rows.map((row) => row.file_path),
|
|
189
|
+
nextCursor: nextCursorFromRows(rows)
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
listFileRows(artifactId, options) {
|
|
193
|
+
const cursor = parseCursor(options.cursor);
|
|
194
|
+
const rows = this.listRowsStmt.all(artifactId, cursor?.sortKey ?? "", options.prefix ?? null, options.prefix ?? "", Math.max(1, options.limit));
|
|
195
|
+
return {
|
|
196
|
+
items: rows.map((row) => ({
|
|
197
|
+
artifactId: row.artifact_id,
|
|
198
|
+
filePath: row.file_path,
|
|
199
|
+
content: row.content,
|
|
200
|
+
contentBytes: row.content_bytes,
|
|
201
|
+
contentHash: row.content_hash
|
|
202
|
+
})),
|
|
203
|
+
nextCursor: nextCursorFromRows(rows)
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
getFileContentsByPaths(artifactId, filePaths) {
|
|
207
|
+
if (filePaths.length === 0) {
|
|
208
|
+
return [];
|
|
209
|
+
}
|
|
210
|
+
const uniquePaths = [...new Set(filePaths)];
|
|
211
|
+
const stmt = this.getFileContentsByPathsStmt(uniquePaths.length);
|
|
212
|
+
const rows = stmt.all(artifactId, ...uniquePaths);
|
|
213
|
+
const byPath = new Map(rows.map((row) => [
|
|
214
|
+
row.file_path,
|
|
215
|
+
{
|
|
216
|
+
artifactId: row.artifact_id,
|
|
217
|
+
filePath: row.file_path,
|
|
218
|
+
content: row.content,
|
|
219
|
+
contentBytes: row.content_bytes,
|
|
220
|
+
contentHash: row.content_hash
|
|
221
|
+
}
|
|
222
|
+
]));
|
|
223
|
+
return uniquePaths.map((path) => byPath.get(path)).filter((row) => row != null);
|
|
224
|
+
}
|
|
225
|
+
searchFileCandidates(artifactId, options) {
|
|
226
|
+
const normalized = options.query.trim();
|
|
227
|
+
if (!normalized) {
|
|
228
|
+
return { items: [], nextCursor: undefined, scannedRows: 0, dbRoundtrips: 0 };
|
|
229
|
+
}
|
|
230
|
+
const cursor = parseSearchCursor(options.cursor);
|
|
231
|
+
const likeQuery = `%${normalized}%`;
|
|
232
|
+
const mode = options.mode ?? "mixed";
|
|
233
|
+
// Cursor-adaptive fetch limit: when no cursor, use a generous limit;
|
|
234
|
+
// with cursor + SQL pushdown, we need far fewer rows.
|
|
235
|
+
const baseFetchLimit = options.fetchLimitOverride
|
|
236
|
+
?? (cursor ? Math.max(options.limit * 3, 50) : Math.max(options.limit * 5, 200));
|
|
237
|
+
const fetchLimit = baseFetchLimit;
|
|
238
|
+
// When cursor score is below all possible bands, skip both queries
|
|
239
|
+
const cursorExhausted = cursor != null && cursor.score < 100;
|
|
240
|
+
// Skip path query entirely when cursor is within the content-only tier
|
|
241
|
+
const cursorPastPath = cursor != null && cursor.score < 120;
|
|
242
|
+
const includePath = mode !== "text" && !cursorExhausted && !cursorPastPath;
|
|
243
|
+
const includeContent = mode !== "path" && !cursorExhausted;
|
|
244
|
+
// Path query: push cursor into SQL when cursor.score == 120 (within path tier)
|
|
245
|
+
let pathRows;
|
|
246
|
+
if (includePath && cursor && cursor.score === 120) {
|
|
247
|
+
// Cursor is within the path tier — only fetch paths after cursor.filePath
|
|
248
|
+
pathRows = this.db.prepare(`
|
|
249
|
+
SELECT file_path
|
|
250
|
+
FROM files
|
|
251
|
+
WHERE artifact_id = ? AND file_path LIKE ? ESCAPE '\\' AND file_path > ?
|
|
252
|
+
ORDER BY file_path ASC
|
|
253
|
+
LIMIT ?
|
|
254
|
+
`).all(artifactId, likeQuery, cursor.filePath, fetchLimit);
|
|
255
|
+
}
|
|
256
|
+
else if (includePath) {
|
|
257
|
+
pathRows = this.searchPathStmt.all(artifactId, likeQuery, fetchLimit);
|
|
258
|
+
}
|
|
259
|
+
else {
|
|
260
|
+
pathRows = [];
|
|
261
|
+
}
|
|
262
|
+
const merged = pathRows.map((row) => ({
|
|
263
|
+
filePath: row.file_path,
|
|
264
|
+
score: 120,
|
|
265
|
+
matchedIn: "path"
|
|
266
|
+
}));
|
|
267
|
+
const mergedByPath = new Map(merged.map((hit) => [hit.filePath, hit]));
|
|
268
|
+
let contentRows = [];
|
|
269
|
+
if (includeContent) {
|
|
270
|
+
try {
|
|
271
|
+
contentRows = this.searchFtsStmt.all(artifactId, normalized, fetchLimit);
|
|
272
|
+
}
|
|
273
|
+
catch (error) {
|
|
274
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
275
|
+
if (!/fts5:\s*syntax error/i.test(message)) {
|
|
276
|
+
throw error;
|
|
277
|
+
}
|
|
278
|
+
log("warn", "storage.files.fts_syntax_error", {
|
|
279
|
+
artifactId,
|
|
280
|
+
query: normalized,
|
|
281
|
+
message
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
for (const row of contentRows) {
|
|
286
|
+
// BM25 rank is negative (lower = better). Clamp -rank to [0, 19] for sub-tier scoring.
|
|
287
|
+
const rankBonus = Math.min(19, Math.max(0, Math.round(-row.rank)));
|
|
288
|
+
const existing = mergedByPath.get(row.file_path);
|
|
289
|
+
if (existing) {
|
|
290
|
+
existing.matchedIn = "both";
|
|
291
|
+
existing.score = 140 + rankBonus;
|
|
292
|
+
continue;
|
|
293
|
+
}
|
|
294
|
+
merged.push({
|
|
295
|
+
filePath: row.file_path,
|
|
296
|
+
score: 100 + rankBonus,
|
|
297
|
+
matchedIn: "content"
|
|
298
|
+
});
|
|
299
|
+
mergedByPath.set(row.file_path, merged[merged.length - 1]);
|
|
300
|
+
}
|
|
301
|
+
const ordered = merged.sort(compareSearchOrdering);
|
|
302
|
+
// Safety-net: still apply cursor filter for any rows that slipped through
|
|
303
|
+
const filtered = cursor ? ordered.filter((hit) => isAfterSearchCursor(hit, cursor)) : ordered;
|
|
304
|
+
const page = filtered.slice(0, options.limit);
|
|
305
|
+
const hasMore = filtered.length > page.length;
|
|
306
|
+
const nextCursor = hasMore && page.length > 0
|
|
307
|
+
? buildSearchCursor(page[page.length - 1].score, page[page.length - 1].filePath)
|
|
308
|
+
: undefined;
|
|
309
|
+
return {
|
|
310
|
+
items: page,
|
|
311
|
+
nextCursor,
|
|
312
|
+
scannedRows: pathRows.length + contentRows.length,
|
|
313
|
+
dbRoundtrips: (includePath ? 1 : 0) + (includeContent ? 1 : 0)
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
searchFilesWithContent(artifactId, options) {
|
|
317
|
+
const page = this.searchFileCandidates(artifactId, options);
|
|
318
|
+
if (page.items.length === 0) {
|
|
319
|
+
return {
|
|
320
|
+
items: [],
|
|
321
|
+
nextCursor: page.nextCursor,
|
|
322
|
+
scannedRows: page.scannedRows,
|
|
323
|
+
dbRoundtrips: page.dbRoundtrips
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
const rows = this.getFileContentsByPaths(artifactId, page.items.map((item) => item.filePath));
|
|
327
|
+
const byPath = new Map(rows.map((row) => [row.filePath, row]));
|
|
328
|
+
return {
|
|
329
|
+
items: page.items
|
|
330
|
+
.map((item) => {
|
|
331
|
+
const contentRow = byPath.get(item.filePath);
|
|
332
|
+
if (!contentRow) {
|
|
333
|
+
return undefined;
|
|
334
|
+
}
|
|
335
|
+
return {
|
|
336
|
+
filePath: item.filePath,
|
|
337
|
+
score: item.score,
|
|
338
|
+
matchedIn: item.matchedIn,
|
|
339
|
+
preview: buildPreview(contentRow.content, options.query),
|
|
340
|
+
content: contentRow.content
|
|
341
|
+
};
|
|
342
|
+
})
|
|
343
|
+
.filter((row) => row != null),
|
|
344
|
+
nextCursor: page.nextCursor,
|
|
345
|
+
scannedRows: page.scannedRows + rows.length,
|
|
346
|
+
dbRoundtrips: page.dbRoundtrips + 1
|
|
347
|
+
};
|
|
348
|
+
}
|
|
349
|
+
countTextCandidates(artifactId, query) {
|
|
350
|
+
const normalized = query.trim();
|
|
351
|
+
if (!normalized) {
|
|
352
|
+
return 0;
|
|
353
|
+
}
|
|
354
|
+
try {
|
|
355
|
+
const row = this.db.prepare(`SELECT COUNT(*) AS cnt FROM files_fts WHERE artifact_id = ? AND files_fts MATCH ?`).get(artifactId, normalized);
|
|
356
|
+
return row?.cnt ?? 0;
|
|
357
|
+
}
|
|
358
|
+
catch {
|
|
359
|
+
log("warn", "storage.files.count_text_candidates_failed", {
|
|
360
|
+
artifactId,
|
|
361
|
+
query: normalized
|
|
362
|
+
});
|
|
363
|
+
return 0;
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
countPathCandidates(artifactId, query) {
|
|
367
|
+
const normalized = query.trim();
|
|
368
|
+
if (!normalized) {
|
|
369
|
+
return 0;
|
|
370
|
+
}
|
|
371
|
+
const likeQuery = `%${normalized}%`;
|
|
372
|
+
const row = this.db.prepare(`SELECT COUNT(*) AS cnt FROM files WHERE artifact_id = ? AND file_path LIKE ? ESCAPE '\\'`).get(artifactId, likeQuery);
|
|
373
|
+
return row?.cnt ?? 0;
|
|
374
|
+
}
|
|
375
|
+
findFirstFilePathByName(artifactId, fileName) {
|
|
376
|
+
const normalized = fileName.trim();
|
|
377
|
+
if (!normalized) {
|
|
378
|
+
return undefined;
|
|
379
|
+
}
|
|
380
|
+
const row = this.db
|
|
381
|
+
.prepare(`
|
|
382
|
+
SELECT file_path
|
|
383
|
+
FROM files
|
|
384
|
+
WHERE artifact_id = ?
|
|
385
|
+
AND (file_path = ? OR file_path LIKE ? ESCAPE '\\')
|
|
386
|
+
ORDER BY file_path ASC
|
|
387
|
+
LIMIT 1
|
|
388
|
+
`)
|
|
389
|
+
.get(artifactId, normalized, `%/${normalized}`);
|
|
390
|
+
return row?.file_path;
|
|
391
|
+
}
|
|
392
|
+
searchFiles(artifactId, options) {
|
|
393
|
+
const page = this.searchFilesWithContent(artifactId, options);
|
|
394
|
+
return {
|
|
395
|
+
items: page.items.map(({ content: _content, ...rest }) => rest),
|
|
396
|
+
nextCursor: page.nextCursor
|
|
397
|
+
};
|
|
398
|
+
}
|
|
399
|
+
totalContentBytes() {
|
|
400
|
+
const row = this.db
|
|
401
|
+
.prepare(`
|
|
402
|
+
SELECT COALESCE(SUM(content_bytes), 0) AS total
|
|
403
|
+
FROM files
|
|
404
|
+
`)
|
|
405
|
+
.get();
|
|
406
|
+
return row?.total ?? 0;
|
|
407
|
+
}
|
|
408
|
+
contentBytesForArtifact(artifactId) {
|
|
409
|
+
const row = this.db
|
|
410
|
+
.prepare(`
|
|
411
|
+
SELECT COALESCE(SUM(content_bytes), 0) AS total
|
|
412
|
+
FROM files
|
|
413
|
+
WHERE artifact_id = ?
|
|
414
|
+
`)
|
|
415
|
+
.get([artifactId]);
|
|
416
|
+
return row?.total ?? 0;
|
|
417
|
+
}
|
|
418
|
+
getFileContentsByPathsStmt(pathCount) {
|
|
419
|
+
const normalizedCount = Math.max(1, Math.trunc(pathCount));
|
|
420
|
+
const cached = this.getByPathsStmtCache.get(normalizedCount);
|
|
421
|
+
if (cached) {
|
|
422
|
+
return cached;
|
|
423
|
+
}
|
|
424
|
+
if (this.getByPathsStmtCache.size >= 64) {
|
|
425
|
+
this.getByPathsStmtCache.clear();
|
|
426
|
+
}
|
|
427
|
+
const placeholders = Array.from({ length: normalizedCount }, () => "?").join(", ");
|
|
428
|
+
const stmt = this.db.prepare(`
|
|
429
|
+
SELECT artifact_id, file_path, content, content_bytes, content_hash
|
|
430
|
+
FROM files
|
|
431
|
+
WHERE artifact_id = ? AND file_path IN (${placeholders})
|
|
432
|
+
`);
|
|
433
|
+
this.getByPathsStmtCache.set(normalizedCount, stmt);
|
|
434
|
+
return stmt;
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
//# sourceMappingURL=files-repo.js.map
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import Database from "./sqlite.js";
|
|
2
|
+
type SqliteDatabase = InstanceType<typeof Database>;
|
|
3
|
+
export interface ArtifactIndexMetaRow {
|
|
4
|
+
artifactId: string;
|
|
5
|
+
artifactSignature: string;
|
|
6
|
+
indexSchemaVersion: number;
|
|
7
|
+
filesCount: number;
|
|
8
|
+
symbolsCount: number;
|
|
9
|
+
ftsRowsCount: number;
|
|
10
|
+
indexedAt: string;
|
|
11
|
+
indexDurationMs: number;
|
|
12
|
+
lastError?: string;
|
|
13
|
+
}
|
|
14
|
+
export interface UpsertArtifactIndexMetaInput {
|
|
15
|
+
artifactId: string;
|
|
16
|
+
artifactSignature: string;
|
|
17
|
+
indexSchemaVersion: number;
|
|
18
|
+
filesCount: number;
|
|
19
|
+
symbolsCount: number;
|
|
20
|
+
ftsRowsCount: number;
|
|
21
|
+
indexedAt: string;
|
|
22
|
+
indexDurationMs: number;
|
|
23
|
+
lastError?: string;
|
|
24
|
+
}
|
|
25
|
+
export declare class IndexMetaRepo {
|
|
26
|
+
private readonly db;
|
|
27
|
+
private readonly getStmt;
|
|
28
|
+
private readonly upsertStmt;
|
|
29
|
+
private readonly deleteStmt;
|
|
30
|
+
constructor(db: SqliteDatabase);
|
|
31
|
+
get(artifactId: string): ArtifactIndexMetaRow | undefined;
|
|
32
|
+
upsert(input: UpsertArtifactIndexMetaInput): void;
|
|
33
|
+
delete(artifactId: string): void;
|
|
34
|
+
}
|
|
35
|
+
export {};
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
function toRow(record) {
|
|
2
|
+
return {
|
|
3
|
+
artifactId: record.artifact_id,
|
|
4
|
+
artifactSignature: record.artifact_signature,
|
|
5
|
+
indexSchemaVersion: record.index_schema_version,
|
|
6
|
+
filesCount: record.files_count,
|
|
7
|
+
symbolsCount: record.symbols_count,
|
|
8
|
+
ftsRowsCount: record.fts_rows_count,
|
|
9
|
+
indexedAt: record.indexed_at,
|
|
10
|
+
indexDurationMs: record.index_duration_ms,
|
|
11
|
+
lastError: record.last_error ?? undefined
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
export class IndexMetaRepo {
|
|
15
|
+
db;
|
|
16
|
+
getStmt;
|
|
17
|
+
upsertStmt;
|
|
18
|
+
deleteStmt;
|
|
19
|
+
constructor(db) {
|
|
20
|
+
this.db = db;
|
|
21
|
+
this.getStmt = this.db.prepare(`
|
|
22
|
+
SELECT
|
|
23
|
+
artifact_id,
|
|
24
|
+
artifact_signature,
|
|
25
|
+
index_schema_version,
|
|
26
|
+
files_count,
|
|
27
|
+
symbols_count,
|
|
28
|
+
fts_rows_count,
|
|
29
|
+
indexed_at,
|
|
30
|
+
index_duration_ms,
|
|
31
|
+
last_error
|
|
32
|
+
FROM artifact_index_meta
|
|
33
|
+
WHERE artifact_id = ?
|
|
34
|
+
LIMIT 1
|
|
35
|
+
`);
|
|
36
|
+
this.upsertStmt = this.db.prepare(`
|
|
37
|
+
INSERT INTO artifact_index_meta (
|
|
38
|
+
artifact_id,
|
|
39
|
+
artifact_signature,
|
|
40
|
+
index_schema_version,
|
|
41
|
+
files_count,
|
|
42
|
+
symbols_count,
|
|
43
|
+
fts_rows_count,
|
|
44
|
+
indexed_at,
|
|
45
|
+
index_duration_ms,
|
|
46
|
+
last_error
|
|
47
|
+
) VALUES (
|
|
48
|
+
@artifact_id,
|
|
49
|
+
@artifact_signature,
|
|
50
|
+
@index_schema_version,
|
|
51
|
+
@files_count,
|
|
52
|
+
@symbols_count,
|
|
53
|
+
@fts_rows_count,
|
|
54
|
+
@indexed_at,
|
|
55
|
+
@index_duration_ms,
|
|
56
|
+
@last_error
|
|
57
|
+
)
|
|
58
|
+
ON CONFLICT(artifact_id) DO UPDATE SET
|
|
59
|
+
artifact_signature = excluded.artifact_signature,
|
|
60
|
+
index_schema_version = excluded.index_schema_version,
|
|
61
|
+
files_count = excluded.files_count,
|
|
62
|
+
symbols_count = excluded.symbols_count,
|
|
63
|
+
fts_rows_count = excluded.fts_rows_count,
|
|
64
|
+
indexed_at = excluded.indexed_at,
|
|
65
|
+
index_duration_ms = excluded.index_duration_ms,
|
|
66
|
+
last_error = excluded.last_error
|
|
67
|
+
`);
|
|
68
|
+
this.deleteStmt = this.db.prepare(`
|
|
69
|
+
DELETE FROM artifact_index_meta
|
|
70
|
+
WHERE artifact_id = ?
|
|
71
|
+
`);
|
|
72
|
+
}
|
|
73
|
+
get(artifactId) {
|
|
74
|
+
const row = this.getStmt.get([artifactId]);
|
|
75
|
+
if (!row) {
|
|
76
|
+
return undefined;
|
|
77
|
+
}
|
|
78
|
+
return toRow(row);
|
|
79
|
+
}
|
|
80
|
+
upsert(input) {
|
|
81
|
+
this.upsertStmt.run({
|
|
82
|
+
artifact_id: input.artifactId,
|
|
83
|
+
artifact_signature: input.artifactSignature,
|
|
84
|
+
index_schema_version: input.indexSchemaVersion,
|
|
85
|
+
files_count: input.filesCount,
|
|
86
|
+
symbols_count: input.symbolsCount,
|
|
87
|
+
fts_rows_count: input.ftsRowsCount,
|
|
88
|
+
indexed_at: input.indexedAt,
|
|
89
|
+
index_duration_ms: Math.max(0, Math.trunc(input.indexDurationMs)),
|
|
90
|
+
last_error: input.lastError ?? null
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
delete(artifactId) {
|
|
94
|
+
this.deleteStmt.run([artifactId]);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
//# sourceMappingURL=index-meta-repo.js.map
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
type MigrationRunner = {
|
|
2
|
+
prepare: (sql: string) => {
|
|
3
|
+
run: (...params: unknown[]) => unknown;
|
|
4
|
+
get: (...params: unknown[]) => unknown;
|
|
5
|
+
all: (...params: unknown[]) => unknown[];
|
|
6
|
+
};
|
|
7
|
+
transaction<T>(fn: () => T): () => T;
|
|
8
|
+
};
|
|
9
|
+
export declare const LATEST_SCHEMA_VERSION = 1;
|
|
10
|
+
export declare function runMigrations(db: MigrationRunner): number;
|
|
11
|
+
export {};
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { createError, ERROR_CODES } from "../errors.js";
|
|
2
|
+
import { SCHEMA_V1_STATEMENTS } from "./schema.js";
|
|
3
|
+
export const LATEST_SCHEMA_VERSION = 1;
|
|
4
|
+
const migrations = [
|
|
5
|
+
{
|
|
6
|
+
version: 1,
|
|
7
|
+
statements: SCHEMA_V1_STATEMENTS
|
|
8
|
+
}
|
|
9
|
+
];
|
|
10
|
+
function selectSchemaVersion(tx) {
|
|
11
|
+
tx.prepare(`CREATE TABLE IF NOT EXISTS cache_meta (
|
|
12
|
+
key TEXT PRIMARY KEY,
|
|
13
|
+
value TEXT NOT NULL
|
|
14
|
+
)`).run();
|
|
15
|
+
const row = tx.prepare(`SELECT value FROM cache_meta WHERE key = ?`).get(["schema_version"]);
|
|
16
|
+
if (!row?.value) {
|
|
17
|
+
return 0;
|
|
18
|
+
}
|
|
19
|
+
const parsed = Number.parseInt(row.value, 10);
|
|
20
|
+
if (!Number.isFinite(parsed)) {
|
|
21
|
+
throw createError({
|
|
22
|
+
code: ERROR_CODES.DB_FAILURE,
|
|
23
|
+
message: `Invalid SQLite schema version '${row.value}'.`,
|
|
24
|
+
details: {
|
|
25
|
+
reason: "schema_version_invalid",
|
|
26
|
+
schemaVersion: row.value
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
return parsed;
|
|
31
|
+
}
|
|
32
|
+
function assertSchemaVersionSupported(version) {
|
|
33
|
+
if (version > LATEST_SCHEMA_VERSION) {
|
|
34
|
+
throw createError({
|
|
35
|
+
code: ERROR_CODES.DB_FAILURE,
|
|
36
|
+
message: `SQLite schema version ${version} exceeds supported version ${LATEST_SCHEMA_VERSION}.`,
|
|
37
|
+
details: {
|
|
38
|
+
reason: "schema_version_unsupported",
|
|
39
|
+
schemaVersion: version,
|
|
40
|
+
latestSchemaVersion: LATEST_SCHEMA_VERSION
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
function setSchemaVersion(tx, version) {
|
|
46
|
+
tx.prepare(`
|
|
47
|
+
INSERT INTO cache_meta(key, value)
|
|
48
|
+
VALUES (?, ?)
|
|
49
|
+
ON CONFLICT(key) DO UPDATE SET value = excluded.value
|
|
50
|
+
`).run(["schema_version", String(version)]);
|
|
51
|
+
}
|
|
52
|
+
export function runMigrations(db) {
|
|
53
|
+
const txn = db.transaction(() => {
|
|
54
|
+
const initialVersion = selectSchemaVersion(db);
|
|
55
|
+
assertSchemaVersionSupported(initialVersion);
|
|
56
|
+
let currentVersion = initialVersion;
|
|
57
|
+
for (const migration of migrations) {
|
|
58
|
+
if (migration.version <= currentVersion) {
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
for (const statement of migration.statements) {
|
|
62
|
+
db.prepare(statement).run();
|
|
63
|
+
}
|
|
64
|
+
setSchemaVersion(db, migration.version);
|
|
65
|
+
currentVersion = migration.version;
|
|
66
|
+
}
|
|
67
|
+
return currentVersion;
|
|
68
|
+
});
|
|
69
|
+
return txn();
|
|
70
|
+
}
|
|
71
|
+
//# sourceMappingURL=migrations.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const SCHEMA_V1_STATEMENTS: string[];
|