@adhisang/minecraft-modding-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/LICENSE +21 -0
- package/README.md +765 -0
- package/dist/access-widener-parser.d.ts +24 -0
- package/dist/access-widener-parser.js +77 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +4 -0
- package/dist/config.d.ts +27 -0
- package/dist/config.js +178 -0
- package/dist/decompiler/vineflower.d.ts +15 -0
- package/dist/decompiler/vineflower.js +185 -0
- package/dist/errors.d.ts +50 -0
- package/dist/errors.js +49 -0
- package/dist/hash.d.ts +1 -0
- package/dist/hash.js +12 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +1447 -0
- package/dist/java-process.d.ts +16 -0
- package/dist/java-process.js +120 -0
- package/dist/logger.d.ts +3 -0
- package/dist/logger.js +21 -0
- package/dist/mapping-pipeline-service.d.ts +18 -0
- package/dist/mapping-pipeline-service.js +60 -0
- package/dist/mapping-service.d.ts +161 -0
- package/dist/mapping-service.js +1706 -0
- package/dist/maven-resolver.d.ts +22 -0
- package/dist/maven-resolver.js +122 -0
- package/dist/minecraft-explorer-service.d.ts +43 -0
- package/dist/minecraft-explorer-service.js +562 -0
- package/dist/mixin-parser.d.ts +34 -0
- package/dist/mixin-parser.js +194 -0
- package/dist/mixin-validator.d.ts +59 -0
- package/dist/mixin-validator.js +274 -0
- package/dist/mod-analyzer.d.ts +23 -0
- package/dist/mod-analyzer.js +346 -0
- package/dist/mod-decompile-service.d.ts +39 -0
- package/dist/mod-decompile-service.js +136 -0
- package/dist/mod-remap-service.d.ts +17 -0
- package/dist/mod-remap-service.js +186 -0
- package/dist/mod-search-service.d.ts +28 -0
- package/dist/mod-search-service.js +174 -0
- package/dist/mojang-tiny-mapping-service.d.ts +13 -0
- package/dist/mojang-tiny-mapping-service.js +351 -0
- package/dist/nbt/java-nbt-codec.d.ts +3 -0
- package/dist/nbt/java-nbt-codec.js +385 -0
- package/dist/nbt/json-patch.d.ts +3 -0
- package/dist/nbt/json-patch.js +352 -0
- package/dist/nbt/pipeline.d.ts +39 -0
- package/dist/nbt/pipeline.js +173 -0
- package/dist/nbt/typed-json.d.ts +10 -0
- package/dist/nbt/typed-json.js +205 -0
- package/dist/nbt/types.d.ts +66 -0
- package/dist/nbt/types.js +2 -0
- package/dist/observability.d.ts +88 -0
- package/dist/observability.js +165 -0
- package/dist/path-converter.d.ts +12 -0
- package/dist/path-converter.js +161 -0
- package/dist/path-resolver.d.ts +19 -0
- package/dist/path-resolver.js +78 -0
- package/dist/registry-service.d.ts +29 -0
- package/dist/registry-service.js +214 -0
- package/dist/repo-downloader.d.ts +15 -0
- package/dist/repo-downloader.js +111 -0
- package/dist/resources.d.ts +3 -0
- package/dist/resources.js +154 -0
- package/dist/search-hit-accumulator.d.ts +38 -0
- package/dist/search-hit-accumulator.js +153 -0
- package/dist/source-jar-reader.d.ts +13 -0
- package/dist/source-jar-reader.js +216 -0
- package/dist/source-resolver.d.ts +14 -0
- package/dist/source-resolver.js +274 -0
- package/dist/source-service.d.ts +404 -0
- package/dist/source-service.js +2881 -0
- package/dist/storage/artifacts-repo.d.ts +45 -0
- package/dist/storage/artifacts-repo.js +209 -0
- package/dist/storage/db.d.ts +14 -0
- package/dist/storage/db.js +132 -0
- package/dist/storage/files-repo.d.ts +78 -0
- package/dist/storage/files-repo.js +437 -0
- package/dist/storage/index-meta-repo.d.ts +35 -0
- package/dist/storage/index-meta-repo.js +97 -0
- package/dist/storage/migrations.d.ts +11 -0
- package/dist/storage/migrations.js +71 -0
- package/dist/storage/schema.d.ts +1 -0
- package/dist/storage/schema.js +160 -0
- package/dist/storage/sqlite.d.ts +20 -0
- package/dist/storage/sqlite.js +111 -0
- package/dist/storage/symbols-repo.d.ts +63 -0
- package/dist/storage/symbols-repo.js +401 -0
- package/dist/symbols/symbol-extractor.d.ts +7 -0
- package/dist/symbols/symbol-extractor.js +64 -0
- package/dist/tiny-remapper-resolver.d.ts +1 -0
- package/dist/tiny-remapper-resolver.js +62 -0
- package/dist/tiny-remapper-service.d.ts +16 -0
- package/dist/tiny-remapper-service.js +73 -0
- package/dist/types.d.ts +120 -0
- package/dist/types.js +2 -0
- package/dist/version-diff-service.d.ts +41 -0
- package/dist/version-diff-service.js +222 -0
- package/dist/version-service.d.ts +70 -0
- package/dist/version-service.js +411 -0
- package/dist/vineflower-resolver.d.ts +1 -0
- package/dist/vineflower-resolver.js +62 -0
- package/dist/workspace-mapping-service.d.ts +18 -0
- package/dist/workspace-mapping-service.js +89 -0
- package/package.json +61 -0
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import Database from "./sqlite.js";
|
|
2
|
+
import type { ArtifactProvenance, ArtifactRow, SourceMapping, SourceOrigin } from "../types.js";
|
|
3
|
+
type SqliteDatabase = InstanceType<typeof Database>;
|
|
4
|
+
interface UpsertArtifactInput {
|
|
5
|
+
artifactId: string;
|
|
6
|
+
origin: SourceOrigin;
|
|
7
|
+
coordinate?: string;
|
|
8
|
+
version?: string;
|
|
9
|
+
binaryJarPath?: string;
|
|
10
|
+
sourceJarPath?: string;
|
|
11
|
+
repoUrl?: string;
|
|
12
|
+
requestedMapping?: SourceMapping;
|
|
13
|
+
mappingApplied?: SourceMapping;
|
|
14
|
+
provenance?: ArtifactProvenance;
|
|
15
|
+
qualityFlags?: string[];
|
|
16
|
+
artifactSignature?: string;
|
|
17
|
+
isDecompiled: boolean;
|
|
18
|
+
timestamp: string;
|
|
19
|
+
}
|
|
20
|
+
export interface ArtifactContentBytesRow {
|
|
21
|
+
artifactId: string;
|
|
22
|
+
totalContentBytes: number;
|
|
23
|
+
updatedAt: string;
|
|
24
|
+
}
|
|
25
|
+
export declare class ArtifactsRepo {
|
|
26
|
+
private readonly db;
|
|
27
|
+
private readonly upsertStmt;
|
|
28
|
+
private readonly getStmt;
|
|
29
|
+
private readonly touchStmt;
|
|
30
|
+
private readonly deleteStmt;
|
|
31
|
+
private readonly listStmt;
|
|
32
|
+
private readonly countStmt;
|
|
33
|
+
private readonly totalContentBytesStmt;
|
|
34
|
+
private readonly listLruWithContentBytesStmt;
|
|
35
|
+
constructor(db: SqliteDatabase);
|
|
36
|
+
upsertArtifact(input: UpsertArtifactInput): void;
|
|
37
|
+
getArtifact(artifactId: string): ArtifactRow | undefined;
|
|
38
|
+
touchArtifact(artifactId: string, timestamp: string): void;
|
|
39
|
+
deleteArtifact(artifactId: string): void;
|
|
40
|
+
listArtifactsByLru(limit: number): ArtifactRow[];
|
|
41
|
+
countArtifacts(): number;
|
|
42
|
+
totalContentBytes(): number;
|
|
43
|
+
listArtifactsByLruWithContentBytes(limit: number): ArtifactContentBytesRow[];
|
|
44
|
+
}
|
|
45
|
+
export {};
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
import { log } from "../logger.js";
|
|
2
|
+
function parseProvenance(value) {
|
|
3
|
+
if (!value) {
|
|
4
|
+
return undefined;
|
|
5
|
+
}
|
|
6
|
+
try {
|
|
7
|
+
const parsed = JSON.parse(value);
|
|
8
|
+
return parsed;
|
|
9
|
+
}
|
|
10
|
+
catch {
|
|
11
|
+
log("warn", "storage.artifacts.invalid_provenance_json", {
|
|
12
|
+
value: value.slice(0, 200)
|
|
13
|
+
});
|
|
14
|
+
return undefined;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
function parseQualityFlags(value) {
|
|
18
|
+
if (!value) {
|
|
19
|
+
return [];
|
|
20
|
+
}
|
|
21
|
+
try {
|
|
22
|
+
const parsed = JSON.parse(value);
|
|
23
|
+
if (!Array.isArray(parsed)) {
|
|
24
|
+
return [];
|
|
25
|
+
}
|
|
26
|
+
return parsed.filter((entry) => typeof entry === "string");
|
|
27
|
+
}
|
|
28
|
+
catch {
|
|
29
|
+
log("warn", "storage.artifacts.invalid_quality_flags_json", {
|
|
30
|
+
value: value.slice(0, 200)
|
|
31
|
+
});
|
|
32
|
+
return [];
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
function toArtifactRow(record) {
|
|
36
|
+
return {
|
|
37
|
+
artifactId: record.artifact_id,
|
|
38
|
+
origin: record.origin,
|
|
39
|
+
coordinate: record.coordinate ?? undefined,
|
|
40
|
+
version: record.version ?? undefined,
|
|
41
|
+
binaryJarPath: record.binary_jar_path ?? undefined,
|
|
42
|
+
sourceJarPath: record.source_jar_path ?? undefined,
|
|
43
|
+
repoUrl: record.repo_url ?? undefined,
|
|
44
|
+
requestedMapping: record.requested_mapping ?? undefined,
|
|
45
|
+
mappingApplied: record.mapping_applied ?? undefined,
|
|
46
|
+
provenance: parseProvenance(record.provenance_json),
|
|
47
|
+
qualityFlags: parseQualityFlags(record.quality_flags_json),
|
|
48
|
+
artifactSignature: record.artifact_signature ?? undefined,
|
|
49
|
+
isDecompiled: record.is_decompiled === 1,
|
|
50
|
+
createdAt: record.created_at,
|
|
51
|
+
updatedAt: record.updated_at
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
function toBooleanValue(isDecompiled) {
|
|
55
|
+
return isDecompiled ? 1 : 0;
|
|
56
|
+
}
|
|
57
|
+
export class ArtifactsRepo {
|
|
58
|
+
db;
|
|
59
|
+
upsertStmt;
|
|
60
|
+
getStmt;
|
|
61
|
+
touchStmt;
|
|
62
|
+
deleteStmt;
|
|
63
|
+
listStmt;
|
|
64
|
+
countStmt;
|
|
65
|
+
totalContentBytesStmt;
|
|
66
|
+
listLruWithContentBytesStmt;
|
|
67
|
+
constructor(db) {
|
|
68
|
+
this.db = db;
|
|
69
|
+
this.upsertStmt = this.db.prepare(`
|
|
70
|
+
INSERT INTO artifacts (
|
|
71
|
+
artifact_id, origin, coordinate, version, binary_jar_path, source_jar_path, repo_url, requested_mapping, mapping_applied, provenance_json, quality_flags_json, artifact_signature, is_decompiled, created_at, updated_at
|
|
72
|
+
) VALUES (
|
|
73
|
+
@artifact_id, @origin, @coordinate, @version, @binary_jar_path, @source_jar_path, @repo_url, @requested_mapping, @mapping_applied, @provenance_json, @quality_flags_json, @artifact_signature, @is_decompiled, @created_at, @updated_at
|
|
74
|
+
)
|
|
75
|
+
ON CONFLICT(artifact_id) DO UPDATE SET
|
|
76
|
+
origin = excluded.origin,
|
|
77
|
+
coordinate = excluded.coordinate,
|
|
78
|
+
version = excluded.version,
|
|
79
|
+
binary_jar_path = excluded.binary_jar_path,
|
|
80
|
+
source_jar_path = excluded.source_jar_path,
|
|
81
|
+
repo_url = excluded.repo_url,
|
|
82
|
+
requested_mapping = excluded.requested_mapping,
|
|
83
|
+
mapping_applied = excluded.mapping_applied,
|
|
84
|
+
provenance_json = excluded.provenance_json,
|
|
85
|
+
quality_flags_json = excluded.quality_flags_json,
|
|
86
|
+
artifact_signature = excluded.artifact_signature,
|
|
87
|
+
is_decompiled = excluded.is_decompiled,
|
|
88
|
+
updated_at = excluded.updated_at
|
|
89
|
+
`);
|
|
90
|
+
this.getStmt = this.db.prepare(`
|
|
91
|
+
SELECT
|
|
92
|
+
artifact_id,
|
|
93
|
+
origin,
|
|
94
|
+
coordinate,
|
|
95
|
+
version,
|
|
96
|
+
binary_jar_path,
|
|
97
|
+
source_jar_path,
|
|
98
|
+
repo_url,
|
|
99
|
+
requested_mapping,
|
|
100
|
+
mapping_applied,
|
|
101
|
+
provenance_json,
|
|
102
|
+
quality_flags_json,
|
|
103
|
+
artifact_signature,
|
|
104
|
+
is_decompiled,
|
|
105
|
+
created_at,
|
|
106
|
+
updated_at
|
|
107
|
+
FROM artifacts
|
|
108
|
+
WHERE artifact_id = ?
|
|
109
|
+
`);
|
|
110
|
+
this.touchStmt = this.db.prepare(`
|
|
111
|
+
UPDATE artifacts
|
|
112
|
+
SET updated_at = ?
|
|
113
|
+
WHERE artifact_id = ?
|
|
114
|
+
`);
|
|
115
|
+
this.deleteStmt = this.db.prepare(`DELETE FROM artifacts WHERE artifact_id = ?`);
|
|
116
|
+
this.listStmt = this.db.prepare(`
|
|
117
|
+
SELECT
|
|
118
|
+
artifact_id,
|
|
119
|
+
origin,
|
|
120
|
+
coordinate,
|
|
121
|
+
version,
|
|
122
|
+
binary_jar_path,
|
|
123
|
+
source_jar_path,
|
|
124
|
+
repo_url,
|
|
125
|
+
requested_mapping,
|
|
126
|
+
mapping_applied,
|
|
127
|
+
provenance_json,
|
|
128
|
+
quality_flags_json,
|
|
129
|
+
artifact_signature,
|
|
130
|
+
is_decompiled,
|
|
131
|
+
created_at,
|
|
132
|
+
updated_at
|
|
133
|
+
FROM artifacts
|
|
134
|
+
ORDER BY updated_at ASC
|
|
135
|
+
LIMIT ?
|
|
136
|
+
`);
|
|
137
|
+
this.countStmt = this.db.prepare(`
|
|
138
|
+
SELECT COUNT(*) AS total
|
|
139
|
+
FROM artifacts
|
|
140
|
+
`);
|
|
141
|
+
this.totalContentBytesStmt = this.db.prepare(`
|
|
142
|
+
SELECT COALESCE(SUM(total_content_bytes), 0) AS total
|
|
143
|
+
FROM artifact_content_bytes
|
|
144
|
+
`);
|
|
145
|
+
this.listLruWithContentBytesStmt = this.db.prepare(`
|
|
146
|
+
SELECT
|
|
147
|
+
artifacts.artifact_id,
|
|
148
|
+
COALESCE(artifact_content_bytes.total_content_bytes, 0) AS total_content_bytes,
|
|
149
|
+
artifacts.updated_at
|
|
150
|
+
FROM artifacts
|
|
151
|
+
LEFT JOIN artifact_content_bytes
|
|
152
|
+
ON artifact_content_bytes.artifact_id = artifacts.artifact_id
|
|
153
|
+
ORDER BY artifacts.updated_at ASC
|
|
154
|
+
LIMIT ?
|
|
155
|
+
`);
|
|
156
|
+
}
|
|
157
|
+
upsertArtifact(input) {
|
|
158
|
+
this.upsertStmt.run({
|
|
159
|
+
artifact_id: input.artifactId,
|
|
160
|
+
origin: input.origin,
|
|
161
|
+
coordinate: input.coordinate ?? null,
|
|
162
|
+
version: input.version ?? null,
|
|
163
|
+
binary_jar_path: input.binaryJarPath ?? null,
|
|
164
|
+
source_jar_path: input.sourceJarPath ?? null,
|
|
165
|
+
repo_url: input.repoUrl ?? null,
|
|
166
|
+
requested_mapping: input.requestedMapping ?? null,
|
|
167
|
+
mapping_applied: input.mappingApplied ?? null,
|
|
168
|
+
provenance_json: input.provenance ? JSON.stringify(input.provenance) : null,
|
|
169
|
+
quality_flags_json: input.qualityFlags ? JSON.stringify(input.qualityFlags) : null,
|
|
170
|
+
artifact_signature: input.artifactSignature ?? null,
|
|
171
|
+
is_decompiled: toBooleanValue(input.isDecompiled),
|
|
172
|
+
created_at: input.timestamp,
|
|
173
|
+
updated_at: input.timestamp
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
getArtifact(artifactId) {
|
|
177
|
+
const row = this.getStmt.get([artifactId]);
|
|
178
|
+
if (!row) {
|
|
179
|
+
return undefined;
|
|
180
|
+
}
|
|
181
|
+
return toArtifactRow(row);
|
|
182
|
+
}
|
|
183
|
+
touchArtifact(artifactId, timestamp) {
|
|
184
|
+
this.touchStmt.run([timestamp, artifactId]);
|
|
185
|
+
}
|
|
186
|
+
deleteArtifact(artifactId) {
|
|
187
|
+
this.deleteStmt.run([artifactId]);
|
|
188
|
+
}
|
|
189
|
+
listArtifactsByLru(limit) {
|
|
190
|
+
return this.listStmt.all([limit]).map(toArtifactRow);
|
|
191
|
+
}
|
|
192
|
+
countArtifacts() {
|
|
193
|
+
const row = this.countStmt.get();
|
|
194
|
+
return row?.total ?? 0;
|
|
195
|
+
}
|
|
196
|
+
totalContentBytes() {
|
|
197
|
+
const row = this.totalContentBytesStmt.get();
|
|
198
|
+
return row?.total ?? 0;
|
|
199
|
+
}
|
|
200
|
+
listArtifactsByLruWithContentBytes(limit) {
|
|
201
|
+
const rows = this.listLruWithContentBytesStmt.all([Math.max(1, Math.trunc(limit))]);
|
|
202
|
+
return rows.map((row) => ({
|
|
203
|
+
artifactId: row.artifact_id,
|
|
204
|
+
totalContentBytes: row.total_content_bytes,
|
|
205
|
+
updatedAt: row.updated_at
|
|
206
|
+
}));
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
//# sourceMappingURL=artifacts-repo.js.map
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import Database from "./sqlite.js";
|
|
2
|
+
import type { Config } from "../types.js";
|
|
3
|
+
type SqliteDatabase = InstanceType<typeof Database>;
|
|
4
|
+
type Logger = {
|
|
5
|
+
warn: (message: string, details?: Record<string, unknown>) => void;
|
|
6
|
+
info: (message: string, details?: Record<string, unknown>) => void;
|
|
7
|
+
error: (message: string, details?: Record<string, unknown>) => void;
|
|
8
|
+
};
|
|
9
|
+
export interface InitializedDatabase {
|
|
10
|
+
db: SqliteDatabase;
|
|
11
|
+
schemaVersion: number;
|
|
12
|
+
}
|
|
13
|
+
export declare function openDatabase(config: Config, logger?: Logger): InitializedDatabase;
|
|
14
|
+
export {};
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import { existsSync, renameSync } from "node:fs";
|
|
2
|
+
import { dirname } from "node:path";
|
|
3
|
+
import { mkdirSync } from "node:fs";
|
|
4
|
+
import Database from "./sqlite.js";
|
|
5
|
+
import { runMigrations } from "./migrations.js";
|
|
6
|
+
import { createError, ERROR_CODES, isAppError } from "../errors.js";
|
|
7
|
+
import { log } from "../logger.js";
|
|
8
|
+
function ensureParentDirectory(path) {
|
|
9
|
+
mkdirSync(dirname(path), { recursive: true });
|
|
10
|
+
}
|
|
11
|
+
function runIntegrityCheck(db) {
|
|
12
|
+
const result = db.prepare("PRAGMA integrity_check").get();
|
|
13
|
+
if (!result || result.integrity_check !== "ok") {
|
|
14
|
+
throw createError({
|
|
15
|
+
code: ERROR_CODES.DB_FAILURE,
|
|
16
|
+
message: "SQLite integrity check failed.",
|
|
17
|
+
details: { integrityCheck: result }
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
function backupCorruptedDb(sqlitePath) {
|
|
22
|
+
const backupPath = `${sqlitePath}.corrupted.${Date.now()}`;
|
|
23
|
+
renameSync(sqlitePath, backupPath);
|
|
24
|
+
return backupPath;
|
|
25
|
+
}
|
|
26
|
+
function isMissingPath(path) {
|
|
27
|
+
return !existsSync(path);
|
|
28
|
+
}
|
|
29
|
+
function safeCloseDatabase(db) {
|
|
30
|
+
if (!db) {
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
try {
|
|
34
|
+
db.close();
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
// best-effort cleanup
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
function isSchemaVersionMismatchError(error) {
|
|
41
|
+
if (!isAppError(error)) {
|
|
42
|
+
return false;
|
|
43
|
+
}
|
|
44
|
+
if (error.code !== ERROR_CODES.DB_FAILURE) {
|
|
45
|
+
return false;
|
|
46
|
+
}
|
|
47
|
+
return (error.details?.reason === "schema_version_unsupported" ||
|
|
48
|
+
error.details?.reason === "schema_version_invalid");
|
|
49
|
+
}
|
|
50
|
+
function buildDefaultLogger() {
|
|
51
|
+
return {
|
|
52
|
+
warn: (message, details) => {
|
|
53
|
+
log("warn", "db.warn", {
|
|
54
|
+
message,
|
|
55
|
+
...(details ?? {})
|
|
56
|
+
});
|
|
57
|
+
},
|
|
58
|
+
info: (message, details) => {
|
|
59
|
+
log("info", "db.info", {
|
|
60
|
+
message,
|
|
61
|
+
...(details ?? {})
|
|
62
|
+
});
|
|
63
|
+
},
|
|
64
|
+
error: (message, details) => {
|
|
65
|
+
log("error", "db.error", {
|
|
66
|
+
message,
|
|
67
|
+
...(details ?? {})
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
export function openDatabase(config, logger = buildDefaultLogger()) {
|
|
73
|
+
let db;
|
|
74
|
+
try {
|
|
75
|
+
ensureParentDirectory(config.sqlitePath);
|
|
76
|
+
db = new Database(config.sqlitePath);
|
|
77
|
+
db.pragma("foreign_keys = ON");
|
|
78
|
+
db.pragma("journal_mode = WAL");
|
|
79
|
+
db.pragma("synchronous = NORMAL");
|
|
80
|
+
db.pragma("busy_timeout = 5000");
|
|
81
|
+
const schemaVersion = runMigrations(db);
|
|
82
|
+
runIntegrityCheck(db);
|
|
83
|
+
return { db, schemaVersion };
|
|
84
|
+
}
|
|
85
|
+
catch (caughtError) {
|
|
86
|
+
safeCloseDatabase(db);
|
|
87
|
+
const errorMessage = caughtError instanceof Error ? caughtError.message : String(caughtError);
|
|
88
|
+
if (caughtError?.code === "ERR_IO") {
|
|
89
|
+
logger.error("Failed to open SQLite database", {
|
|
90
|
+
path: config.sqlitePath,
|
|
91
|
+
reason: errorMessage
|
|
92
|
+
});
|
|
93
|
+
throw createError({
|
|
94
|
+
code: ERROR_CODES.DB_FAILURE,
|
|
95
|
+
message: `Failed to open SQLite database at ${config.sqlitePath}`,
|
|
96
|
+
details: { sqlitePath: config.sqlitePath }
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
if (isSchemaVersionMismatchError(caughtError)) {
|
|
100
|
+
logger.error("SQLite schema version mismatch", {
|
|
101
|
+
path: config.sqlitePath,
|
|
102
|
+
reason: errorMessage
|
|
103
|
+
});
|
|
104
|
+
throw caughtError;
|
|
105
|
+
}
|
|
106
|
+
if (!isMissingPath(config.sqlitePath)) {
|
|
107
|
+
const backupPath = backupCorruptedDb(config.sqlitePath);
|
|
108
|
+
logger.warn("SQLite database integrity check failed. Recreated database after backup", {
|
|
109
|
+
sqlitePath: config.sqlitePath,
|
|
110
|
+
backupPath
|
|
111
|
+
});
|
|
112
|
+
const rebuilt = new Database(config.sqlitePath);
|
|
113
|
+
rebuilt.pragma("foreign_keys = ON");
|
|
114
|
+
rebuilt.pragma("journal_mode = WAL");
|
|
115
|
+
rebuilt.pragma("synchronous = NORMAL");
|
|
116
|
+
rebuilt.pragma("busy_timeout = 5000");
|
|
117
|
+
const schemaVersion = runMigrations(rebuilt);
|
|
118
|
+
runIntegrityCheck(rebuilt);
|
|
119
|
+
return { db: rebuilt, schemaVersion };
|
|
120
|
+
}
|
|
121
|
+
logger.error("SQLite initialization failed", {
|
|
122
|
+
path: config.sqlitePath,
|
|
123
|
+
reason: errorMessage
|
|
124
|
+
});
|
|
125
|
+
throw createError({
|
|
126
|
+
code: ERROR_CODES.DB_FAILURE,
|
|
127
|
+
message: "Failed to initialize SQLite database.",
|
|
128
|
+
details: { sqlitePath: config.sqlitePath }
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
//# sourceMappingURL=db.js.map
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import Database from "./sqlite.js";
|
|
2
|
+
import type { FileRow, PagedResult } from "../types.js";
|
|
3
|
+
type SqliteDatabase = InstanceType<typeof Database>;
|
|
4
|
+
export interface IndexedFile {
|
|
5
|
+
filePath: string;
|
|
6
|
+
content: string;
|
|
7
|
+
contentBytes: number;
|
|
8
|
+
contentHash: string;
|
|
9
|
+
}
|
|
10
|
+
export interface ListFilesOptions {
|
|
11
|
+
limit: number;
|
|
12
|
+
cursor?: string;
|
|
13
|
+
prefix?: string;
|
|
14
|
+
}
|
|
15
|
+
export interface SearchFilesOptions {
|
|
16
|
+
limit: number;
|
|
17
|
+
query: string;
|
|
18
|
+
cursor?: string;
|
|
19
|
+
mode?: "mixed" | "text" | "path";
|
|
20
|
+
fetchLimitOverride?: number;
|
|
21
|
+
}
|
|
22
|
+
export interface SearchFilesResult {
|
|
23
|
+
filePath: string;
|
|
24
|
+
score: number;
|
|
25
|
+
matchedIn: "path" | "content" | "both";
|
|
26
|
+
preview: string;
|
|
27
|
+
}
|
|
28
|
+
export interface SearchFileCandidateResult {
|
|
29
|
+
filePath: string;
|
|
30
|
+
score: number;
|
|
31
|
+
matchedIn: "path" | "content" | "both";
|
|
32
|
+
}
|
|
33
|
+
export interface SearchFilesWithContentResult extends SearchFilesResult {
|
|
34
|
+
content: string;
|
|
35
|
+
}
|
|
36
|
+
export interface ListFileRowsOptions {
|
|
37
|
+
limit: number;
|
|
38
|
+
cursor?: string;
|
|
39
|
+
prefix?: string;
|
|
40
|
+
}
|
|
41
|
+
export declare class FilesRepo {
|
|
42
|
+
private readonly db;
|
|
43
|
+
private readonly deleteStmt;
|
|
44
|
+
private readonly insertFilesStmt;
|
|
45
|
+
private readonly insertFtsStmt;
|
|
46
|
+
private readonly deleteFtsStmt;
|
|
47
|
+
private readonly getContentStmt;
|
|
48
|
+
private readonly listStmt;
|
|
49
|
+
private readonly listRowsStmt;
|
|
50
|
+
private readonly searchPathStmt;
|
|
51
|
+
private readonly searchFtsStmt;
|
|
52
|
+
private readonly getByPathsStmtCache;
|
|
53
|
+
constructor(db: SqliteDatabase);
|
|
54
|
+
clearFilesForArtifact(artifactId: string): void;
|
|
55
|
+
insertFilesForArtifact(artifactId: string, files: IndexedFile[]): void;
|
|
56
|
+
replaceFilesForArtifact(artifactId: string, files: IndexedFile[]): void;
|
|
57
|
+
deleteFilesForArtifact(artifactId: string): void;
|
|
58
|
+
getFileContent(artifactId: string, filePath: string): FileRow | undefined;
|
|
59
|
+
listFiles(artifactId: string, options: ListFilesOptions): PagedResult<string>;
|
|
60
|
+
listFileRows(artifactId: string, options: ListFileRowsOptions): PagedResult<FileRow>;
|
|
61
|
+
getFileContentsByPaths(artifactId: string, filePaths: string[]): FileRow[];
|
|
62
|
+
searchFileCandidates(artifactId: string, options: SearchFilesOptions): PagedResult<SearchFileCandidateResult> & {
|
|
63
|
+
scannedRows: number;
|
|
64
|
+
dbRoundtrips: number;
|
|
65
|
+
};
|
|
66
|
+
searchFilesWithContent(artifactId: string, options: SearchFilesOptions): PagedResult<SearchFilesWithContentResult> & {
|
|
67
|
+
scannedRows: number;
|
|
68
|
+
dbRoundtrips: number;
|
|
69
|
+
};
|
|
70
|
+
countTextCandidates(artifactId: string, query: string): number;
|
|
71
|
+
countPathCandidates(artifactId: string, query: string): number;
|
|
72
|
+
findFirstFilePathByName(artifactId: string, fileName: string): string | undefined;
|
|
73
|
+
searchFiles(artifactId: string, options: SearchFilesOptions): PagedResult<SearchFilesResult>;
|
|
74
|
+
totalContentBytes(): number;
|
|
75
|
+
contentBytesForArtifact(artifactId: string): number;
|
|
76
|
+
private getFileContentsByPathsStmt;
|
|
77
|
+
}
|
|
78
|
+
export {};
|