@adhisang/minecraft-modding-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/LICENSE +21 -0
- package/README.md +765 -0
- package/dist/access-widener-parser.d.ts +24 -0
- package/dist/access-widener-parser.js +77 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +4 -0
- package/dist/config.d.ts +27 -0
- package/dist/config.js +178 -0
- package/dist/decompiler/vineflower.d.ts +15 -0
- package/dist/decompiler/vineflower.js +185 -0
- package/dist/errors.d.ts +50 -0
- package/dist/errors.js +49 -0
- package/dist/hash.d.ts +1 -0
- package/dist/hash.js +12 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +1447 -0
- package/dist/java-process.d.ts +16 -0
- package/dist/java-process.js +120 -0
- package/dist/logger.d.ts +3 -0
- package/dist/logger.js +21 -0
- package/dist/mapping-pipeline-service.d.ts +18 -0
- package/dist/mapping-pipeline-service.js +60 -0
- package/dist/mapping-service.d.ts +161 -0
- package/dist/mapping-service.js +1706 -0
- package/dist/maven-resolver.d.ts +22 -0
- package/dist/maven-resolver.js +122 -0
- package/dist/minecraft-explorer-service.d.ts +43 -0
- package/dist/minecraft-explorer-service.js +562 -0
- package/dist/mixin-parser.d.ts +34 -0
- package/dist/mixin-parser.js +194 -0
- package/dist/mixin-validator.d.ts +59 -0
- package/dist/mixin-validator.js +274 -0
- package/dist/mod-analyzer.d.ts +23 -0
- package/dist/mod-analyzer.js +346 -0
- package/dist/mod-decompile-service.d.ts +39 -0
- package/dist/mod-decompile-service.js +136 -0
- package/dist/mod-remap-service.d.ts +17 -0
- package/dist/mod-remap-service.js +186 -0
- package/dist/mod-search-service.d.ts +28 -0
- package/dist/mod-search-service.js +174 -0
- package/dist/mojang-tiny-mapping-service.d.ts +13 -0
- package/dist/mojang-tiny-mapping-service.js +351 -0
- package/dist/nbt/java-nbt-codec.d.ts +3 -0
- package/dist/nbt/java-nbt-codec.js +385 -0
- package/dist/nbt/json-patch.d.ts +3 -0
- package/dist/nbt/json-patch.js +352 -0
- package/dist/nbt/pipeline.d.ts +39 -0
- package/dist/nbt/pipeline.js +173 -0
- package/dist/nbt/typed-json.d.ts +10 -0
- package/dist/nbt/typed-json.js +205 -0
- package/dist/nbt/types.d.ts +66 -0
- package/dist/nbt/types.js +2 -0
- package/dist/observability.d.ts +88 -0
- package/dist/observability.js +165 -0
- package/dist/path-converter.d.ts +12 -0
- package/dist/path-converter.js +161 -0
- package/dist/path-resolver.d.ts +19 -0
- package/dist/path-resolver.js +78 -0
- package/dist/registry-service.d.ts +29 -0
- package/dist/registry-service.js +214 -0
- package/dist/repo-downloader.d.ts +15 -0
- package/dist/repo-downloader.js +111 -0
- package/dist/resources.d.ts +3 -0
- package/dist/resources.js +154 -0
- package/dist/search-hit-accumulator.d.ts +38 -0
- package/dist/search-hit-accumulator.js +153 -0
- package/dist/source-jar-reader.d.ts +13 -0
- package/dist/source-jar-reader.js +216 -0
- package/dist/source-resolver.d.ts +14 -0
- package/dist/source-resolver.js +274 -0
- package/dist/source-service.d.ts +404 -0
- package/dist/source-service.js +2881 -0
- package/dist/storage/artifacts-repo.d.ts +45 -0
- package/dist/storage/artifacts-repo.js +209 -0
- package/dist/storage/db.d.ts +14 -0
- package/dist/storage/db.js +132 -0
- package/dist/storage/files-repo.d.ts +78 -0
- package/dist/storage/files-repo.js +437 -0
- package/dist/storage/index-meta-repo.d.ts +35 -0
- package/dist/storage/index-meta-repo.js +97 -0
- package/dist/storage/migrations.d.ts +11 -0
- package/dist/storage/migrations.js +71 -0
- package/dist/storage/schema.d.ts +1 -0
- package/dist/storage/schema.js +160 -0
- package/dist/storage/sqlite.d.ts +20 -0
- package/dist/storage/sqlite.js +111 -0
- package/dist/storage/symbols-repo.d.ts +63 -0
- package/dist/storage/symbols-repo.js +401 -0
- package/dist/symbols/symbol-extractor.d.ts +7 -0
- package/dist/symbols/symbol-extractor.js +64 -0
- package/dist/tiny-remapper-resolver.d.ts +1 -0
- package/dist/tiny-remapper-resolver.js +62 -0
- package/dist/tiny-remapper-service.d.ts +16 -0
- package/dist/tiny-remapper-service.js +73 -0
- package/dist/types.d.ts +120 -0
- package/dist/types.js +2 -0
- package/dist/version-diff-service.d.ts +41 -0
- package/dist/version-diff-service.js +222 -0
- package/dist/version-service.d.ts +70 -0
- package/dist/version-service.js +411 -0
- package/dist/vineflower-resolver.d.ts +1 -0
- package/dist/vineflower-resolver.js +62 -0
- package/dist/workspace-mapping-service.d.ts +18 -0
- package/dist/workspace-mapping-service.js +89 -0
- package/package.json +61 -0
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import { text, object, error } from "mcp-use/server";
|
|
2
|
+
import { createError, ERROR_CODES, isAppError } from "./errors.js";
|
|
3
|
+
function decodeTemplateParam(params, key) {
|
|
4
|
+
const value = params[key];
|
|
5
|
+
if (typeof value !== "string" || value.length === 0) {
|
|
6
|
+
throw createError({
|
|
7
|
+
code: ERROR_CODES.INVALID_INPUT,
|
|
8
|
+
message: `Missing template parameter: ${key}.`,
|
|
9
|
+
details: { key }
|
|
10
|
+
});
|
|
11
|
+
}
|
|
12
|
+
try {
|
|
13
|
+
return decodeURIComponent(value);
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
throw createError({
|
|
17
|
+
code: ERROR_CODES.INVALID_INPUT,
|
|
18
|
+
message: `${key} contains invalid URL encoding.`,
|
|
19
|
+
details: { key, value }
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
export function registerResources(server, sourceService) {
|
|
24
|
+
// ── Fixed resources ──────────────────────────────────────────────
|
|
25
|
+
server.resource({
|
|
26
|
+
name: "versions-list",
|
|
27
|
+
uri: "mc://versions/list",
|
|
28
|
+
description: "List all available Minecraft versions with their metadata.",
|
|
29
|
+
mimeType: "application/json"
|
|
30
|
+
}, async () => {
|
|
31
|
+
try {
|
|
32
|
+
const result = await sourceService.listVersions();
|
|
33
|
+
return object(result);
|
|
34
|
+
}
|
|
35
|
+
catch (e) {
|
|
36
|
+
if (isAppError(e))
|
|
37
|
+
return error(e.message);
|
|
38
|
+
throw e;
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
server.resource({
|
|
42
|
+
name: "runtime-metrics",
|
|
43
|
+
uri: "mc://metrics",
|
|
44
|
+
description: "Runtime metrics and performance counters for the MCP server.",
|
|
45
|
+
mimeType: "application/json"
|
|
46
|
+
}, async () => {
|
|
47
|
+
try {
|
|
48
|
+
const result = sourceService.getRuntimeMetrics();
|
|
49
|
+
return object(result);
|
|
50
|
+
}
|
|
51
|
+
catch (e) {
|
|
52
|
+
if (isAppError(e))
|
|
53
|
+
return error(e.message);
|
|
54
|
+
throw e;
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
// ── Template resources ───────────────────────────────────────────
|
|
58
|
+
server.resourceTemplate({
|
|
59
|
+
name: "class-source",
|
|
60
|
+
uriTemplate: "mc://source/{artifactId}/{className}",
|
|
61
|
+
description: "Java source code for a class within a resolved artifact. className may use dot or slash separators.",
|
|
62
|
+
mimeType: "text/x-java"
|
|
63
|
+
}, async (_uri, params) => {
|
|
64
|
+
try {
|
|
65
|
+
const result = await sourceService.getClassSource({
|
|
66
|
+
artifactId: params.artifactId,
|
|
67
|
+
className: decodeTemplateParam(params, "className")
|
|
68
|
+
});
|
|
69
|
+
return text(result.sourceText);
|
|
70
|
+
}
|
|
71
|
+
catch (e) {
|
|
72
|
+
if (isAppError(e))
|
|
73
|
+
return error(e.message);
|
|
74
|
+
throw e;
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
server.resourceTemplate({
|
|
78
|
+
name: "artifact-file",
|
|
79
|
+
uriTemplate: "mc://artifact/{artifactId}/files/{filePath}",
|
|
80
|
+
description: "Raw content of a file within a resolved artifact. filePath is the archive-relative path.",
|
|
81
|
+
mimeType: "text/plain"
|
|
82
|
+
}, async (_uri, params) => {
|
|
83
|
+
try {
|
|
84
|
+
const result = await sourceService.getArtifactFile({
|
|
85
|
+
artifactId: params.artifactId,
|
|
86
|
+
filePath: decodeTemplateParam(params, "filePath")
|
|
87
|
+
});
|
|
88
|
+
return text(result.content);
|
|
89
|
+
}
|
|
90
|
+
catch (e) {
|
|
91
|
+
if (isAppError(e))
|
|
92
|
+
return error(e.message);
|
|
93
|
+
throw e;
|
|
94
|
+
}
|
|
95
|
+
});
|
|
96
|
+
server.resourceTemplate({
|
|
97
|
+
name: "find-mapping",
|
|
98
|
+
uriTemplate: "mc://mappings/{version}/{sourceMapping}/{targetMapping}/{kind}/{name}",
|
|
99
|
+
description: "Look up a mapping for a class, field, or method between two naming namespaces.",
|
|
100
|
+
mimeType: "application/json"
|
|
101
|
+
}, async (_uri, params) => {
|
|
102
|
+
try {
|
|
103
|
+
const result = await sourceService.findMapping({
|
|
104
|
+
version: params.version,
|
|
105
|
+
kind: params.kind,
|
|
106
|
+
name: decodeTemplateParam(params, "name"),
|
|
107
|
+
sourceMapping: params.sourceMapping,
|
|
108
|
+
targetMapping: params.targetMapping
|
|
109
|
+
});
|
|
110
|
+
return object(result);
|
|
111
|
+
}
|
|
112
|
+
catch (e) {
|
|
113
|
+
if (isAppError(e))
|
|
114
|
+
return error(e.message);
|
|
115
|
+
throw e;
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
server.resourceTemplate({
|
|
119
|
+
name: "class-members",
|
|
120
|
+
uriTemplate: "mc://artifact/{artifactId}/members/{className}",
|
|
121
|
+
description: "List constructors, methods, and fields for a class within a resolved artifact.",
|
|
122
|
+
mimeType: "application/json"
|
|
123
|
+
}, async (_uri, params) => {
|
|
124
|
+
try {
|
|
125
|
+
const result = await sourceService.getClassMembers({
|
|
126
|
+
artifactId: params.artifactId,
|
|
127
|
+
className: decodeTemplateParam(params, "className")
|
|
128
|
+
});
|
|
129
|
+
return object(result);
|
|
130
|
+
}
|
|
131
|
+
catch (e) {
|
|
132
|
+
if (isAppError(e))
|
|
133
|
+
return error(e.message);
|
|
134
|
+
throw e;
|
|
135
|
+
}
|
|
136
|
+
});
|
|
137
|
+
server.resourceTemplate({
|
|
138
|
+
name: "artifact-metadata",
|
|
139
|
+
uriTemplate: "mc://artifact/{artifactId}",
|
|
140
|
+
description: "Metadata for a previously resolved artifact (origin, coordinate, mapping, provenance).",
|
|
141
|
+
mimeType: "application/json"
|
|
142
|
+
}, async (_uri, params) => {
|
|
143
|
+
try {
|
|
144
|
+
const artifact = sourceService.getArtifact(params.artifactId);
|
|
145
|
+
return object(artifact);
|
|
146
|
+
}
|
|
147
|
+
catch (e) {
|
|
148
|
+
if (isAppError(e))
|
|
149
|
+
return error(e.message);
|
|
150
|
+
throw e;
|
|
151
|
+
}
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
//# sourceMappingURL=resources.js.map
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
export type SymbolKind = "class" | "interface" | "enum" | "record" | "method" | "field";
|
|
2
|
+
export type SearchResultSymbol = {
|
|
3
|
+
symbolKind: SymbolKind;
|
|
4
|
+
symbolName: string;
|
|
5
|
+
qualifiedName?: string;
|
|
6
|
+
line: number;
|
|
7
|
+
};
|
|
8
|
+
export type SearchSourceHit = {
|
|
9
|
+
filePath: string;
|
|
10
|
+
score: number;
|
|
11
|
+
matchedIn: "symbol" | "path" | "content";
|
|
12
|
+
startLine: number;
|
|
13
|
+
endLine: number;
|
|
14
|
+
snippet: string;
|
|
15
|
+
reasonCodes: string[];
|
|
16
|
+
symbol?: SearchResultSymbol;
|
|
17
|
+
};
|
|
18
|
+
export interface SearchCursorPayload {
|
|
19
|
+
score: number;
|
|
20
|
+
filePath: string;
|
|
21
|
+
symbolName: string;
|
|
22
|
+
line: number;
|
|
23
|
+
contextKey?: string;
|
|
24
|
+
}
|
|
25
|
+
export interface SearchHitAccumulator {
|
|
26
|
+
add(hit: SearchSourceHit): void;
|
|
27
|
+
setTotalApproxOverride(count: number): void;
|
|
28
|
+
finalize(): {
|
|
29
|
+
page: SearchSourceHit[];
|
|
30
|
+
nextCursorHit?: SearchSourceHit;
|
|
31
|
+
totalApprox: number;
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
export declare function scoreHitOrder(left: SearchSourceHit, right: SearchSourceHit): number;
|
|
35
|
+
export declare function encodeSearchCursor(hit: SearchSourceHit, contextKey?: string): string;
|
|
36
|
+
export declare function decodeSearchCursor(cursor: string | undefined): SearchCursorPayload | undefined;
|
|
37
|
+
export declare function isAfterSearchCursor(hit: SearchSourceHit, cursor: SearchCursorPayload): boolean;
|
|
38
|
+
export declare function createSearchHitAccumulator(limit: number, cursor: SearchCursorPayload | undefined): SearchHitAccumulator;
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
export function scoreHitOrder(left, right) {
|
|
2
|
+
if (right.score !== left.score) {
|
|
3
|
+
return right.score - left.score;
|
|
4
|
+
}
|
|
5
|
+
const pathCompare = left.filePath.localeCompare(right.filePath);
|
|
6
|
+
if (pathCompare !== 0) {
|
|
7
|
+
return pathCompare;
|
|
8
|
+
}
|
|
9
|
+
const leftSymbol = left.symbol?.symbolName ?? "";
|
|
10
|
+
const rightSymbol = right.symbol?.symbolName ?? "";
|
|
11
|
+
const symbolCompare = leftSymbol.localeCompare(rightSymbol);
|
|
12
|
+
if (symbolCompare !== 0) {
|
|
13
|
+
return symbolCompare;
|
|
14
|
+
}
|
|
15
|
+
const leftLine = left.symbol?.line ?? left.startLine;
|
|
16
|
+
const rightLine = right.symbol?.line ?? right.startLine;
|
|
17
|
+
return leftLine - rightLine;
|
|
18
|
+
}
|
|
19
|
+
export function encodeSearchCursor(hit, contextKey) {
|
|
20
|
+
return Buffer.from(JSON.stringify({
|
|
21
|
+
score: hit.score,
|
|
22
|
+
filePath: hit.filePath,
|
|
23
|
+
symbolName: hit.symbol?.symbolName ?? "",
|
|
24
|
+
line: hit.symbol?.line ?? hit.startLine,
|
|
25
|
+
contextKey
|
|
26
|
+
}), "utf8").toString("base64");
|
|
27
|
+
}
|
|
28
|
+
export function decodeSearchCursor(cursor) {
|
|
29
|
+
if (!cursor) {
|
|
30
|
+
return undefined;
|
|
31
|
+
}
|
|
32
|
+
try {
|
|
33
|
+
const parsed = JSON.parse(Buffer.from(cursor, "base64").toString("utf8"));
|
|
34
|
+
if (typeof parsed.score !== "number" ||
|
|
35
|
+
typeof parsed.filePath !== "string" ||
|
|
36
|
+
typeof parsed.symbolName !== "string" ||
|
|
37
|
+
typeof parsed.line !== "number" ||
|
|
38
|
+
(parsed.contextKey != null && typeof parsed.contextKey !== "string")) {
|
|
39
|
+
return undefined;
|
|
40
|
+
}
|
|
41
|
+
return parsed;
|
|
42
|
+
}
|
|
43
|
+
catch {
|
|
44
|
+
return undefined;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
export function isAfterSearchCursor(hit, cursor) {
|
|
48
|
+
if (hit.score < cursor.score) {
|
|
49
|
+
return true;
|
|
50
|
+
}
|
|
51
|
+
if (hit.score > cursor.score) {
|
|
52
|
+
return false;
|
|
53
|
+
}
|
|
54
|
+
const fileCompare = hit.filePath.localeCompare(cursor.filePath);
|
|
55
|
+
if (fileCompare > 0) {
|
|
56
|
+
return true;
|
|
57
|
+
}
|
|
58
|
+
if (fileCompare < 0) {
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
const hitSymbolName = hit.symbol?.symbolName ?? "";
|
|
62
|
+
const symbolCompare = hitSymbolName.localeCompare(cursor.symbolName);
|
|
63
|
+
if (symbolCompare > 0) {
|
|
64
|
+
return true;
|
|
65
|
+
}
|
|
66
|
+
if (symbolCompare < 0) {
|
|
67
|
+
return false;
|
|
68
|
+
}
|
|
69
|
+
const hitLine = hit.symbol?.line ?? hit.startLine;
|
|
70
|
+
return hitLine > cursor.line;
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Binary min-heap that keeps the top-K highest-quality hits.
|
|
74
|
+
* The root is the worst (lowest-quality) item in the heap.
|
|
75
|
+
* Compare function: positive means `a` is worse than `b` (lower quality).
|
|
76
|
+
*/
|
|
77
|
+
function heapSiftDown(heap, index, size) {
|
|
78
|
+
while (true) {
|
|
79
|
+
let smallest = index;
|
|
80
|
+
const left = 2 * index + 1;
|
|
81
|
+
const right = 2 * index + 2;
|
|
82
|
+
if (left < size && scoreHitOrder(heap[left], heap[smallest]) > 0) {
|
|
83
|
+
smallest = left;
|
|
84
|
+
}
|
|
85
|
+
if (right < size && scoreHitOrder(heap[right], heap[smallest]) > 0) {
|
|
86
|
+
smallest = right;
|
|
87
|
+
}
|
|
88
|
+
if (smallest === index) {
|
|
89
|
+
break;
|
|
90
|
+
}
|
|
91
|
+
const temp = heap[index];
|
|
92
|
+
heap[index] = heap[smallest];
|
|
93
|
+
heap[smallest] = temp;
|
|
94
|
+
index = smallest;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
function heapSiftUp(heap, index) {
|
|
98
|
+
while (index > 0) {
|
|
99
|
+
const parent = (index - 1) >> 1;
|
|
100
|
+
// Keep the worst item at the root: bubble up only when child is worse than parent.
|
|
101
|
+
if (scoreHitOrder(heap[index], heap[parent]) <= 0) {
|
|
102
|
+
break;
|
|
103
|
+
}
|
|
104
|
+
const temp = heap[index];
|
|
105
|
+
heap[index] = heap[parent];
|
|
106
|
+
heap[parent] = temp;
|
|
107
|
+
index = parent;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
export function createSearchHitAccumulator(limit, cursor) {
|
|
111
|
+
const pageLimit = Math.max(1, limit);
|
|
112
|
+
const keepLimit = pageLimit + 1;
|
|
113
|
+
const heap = [];
|
|
114
|
+
let totalApprox = 0;
|
|
115
|
+
let totalApproxOverride = undefined;
|
|
116
|
+
let totalAfterCursor = 0;
|
|
117
|
+
return {
|
|
118
|
+
add(hit) {
|
|
119
|
+
totalApprox += 1;
|
|
120
|
+
if (cursor && !isAfterSearchCursor(hit, cursor)) {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
totalAfterCursor += 1;
|
|
124
|
+
if (heap.length < keepLimit) {
|
|
125
|
+
heap.push(hit);
|
|
126
|
+
heapSiftUp(heap, heap.length - 1);
|
|
127
|
+
return;
|
|
128
|
+
}
|
|
129
|
+
// heap[0] is the worst item in our top-K. If hit is worse or equal, discard.
|
|
130
|
+
if (scoreHitOrder(hit, heap[0]) >= 0) {
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
// Replace root with new hit and restore heap property
|
|
134
|
+
heap[0] = hit;
|
|
135
|
+
heapSiftDown(heap, 0, heap.length);
|
|
136
|
+
},
|
|
137
|
+
setTotalApproxOverride(count) {
|
|
138
|
+
totalApproxOverride = Math.max(0, Math.trunc(count));
|
|
139
|
+
},
|
|
140
|
+
finalize() {
|
|
141
|
+
// Sort heap contents by scoreHitOrder (best first)
|
|
142
|
+
const sorted = heap.slice().sort(scoreHitOrder);
|
|
143
|
+
const page = sorted.slice(0, pageLimit);
|
|
144
|
+
const hasMore = totalAfterCursor > page.length;
|
|
145
|
+
return {
|
|
146
|
+
page,
|
|
147
|
+
nextCursorHit: hasMore && page.length > 0 ? page[page.length - 1] : undefined,
|
|
148
|
+
totalApprox: totalApproxOverride ?? totalApprox
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
//# sourceMappingURL=search-hit-accumulator.js.map
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export interface JavaEntryText {
|
|
2
|
+
filePath: string;
|
|
3
|
+
content: string;
|
|
4
|
+
}
|
|
5
|
+
export declare class EntryTooLargeError extends Error {
|
|
6
|
+
constructor(entryPath: string, jarPath: string, maxBytes: number);
|
|
7
|
+
}
|
|
8
|
+
export declare function listJarEntries(jarPath: string): Promise<string[]>;
|
|
9
|
+
export declare function listJavaEntries(jarPath: string): Promise<string[]>;
|
|
10
|
+
export declare function readJarEntryAsUtf8(jarPath: string, entryPath: string): Promise<string>;
|
|
11
|
+
export declare function readJarEntryAsBuffer(jarPath: string, entryPath: string): Promise<Buffer>;
|
|
12
|
+
export declare function iterateJavaEntriesAsUtf8(jarPath: string, maxBytes?: number): AsyncGenerator<JavaEntryText>;
|
|
13
|
+
export declare function readAllJavaEntriesAsUtf8(jarPath: string, maxBytes?: number): Promise<JavaEntryText[]>;
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
import * as yauzl from "yauzl";
|
|
2
|
+
import { createError, ERROR_CODES } from "./errors.js";
|
|
3
|
+
import { log } from "./logger.js";
|
|
4
|
+
import { isSecureJarEntryPath } from "./path-resolver.js";
|
|
5
|
+
const UTF8_DECODER = new TextDecoder("utf-8", { fatal: true });
|
|
6
|
+
function toErrorMessage(value) {
|
|
7
|
+
if (value instanceof Error) {
|
|
8
|
+
return value.message;
|
|
9
|
+
}
|
|
10
|
+
return String(value);
|
|
11
|
+
}
|
|
12
|
+
function openZipFile(jarPath) {
|
|
13
|
+
return new Promise((resolve, reject) => {
|
|
14
|
+
yauzl.open(jarPath, {
|
|
15
|
+
lazyEntries: true,
|
|
16
|
+
autoClose: false
|
|
17
|
+
}, (error, zipFile) => {
|
|
18
|
+
if (error || !zipFile) {
|
|
19
|
+
reject(new Error(`Failed to read jar "${jarPath}": ${toErrorMessage(error)}`));
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
resolve(zipFile);
|
|
23
|
+
});
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
async function withZipFile(jarPath, action) {
|
|
27
|
+
const zipFile = await openZipFile(jarPath);
|
|
28
|
+
try {
|
|
29
|
+
return await action(zipFile);
|
|
30
|
+
}
|
|
31
|
+
finally {
|
|
32
|
+
zipFile.close();
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
function readNextEntry(zipFile) {
|
|
36
|
+
return new Promise((resolve, reject) => {
|
|
37
|
+
const onEntry = (entry) => {
|
|
38
|
+
cleanup();
|
|
39
|
+
resolve(entry);
|
|
40
|
+
};
|
|
41
|
+
const onEnd = () => {
|
|
42
|
+
cleanup();
|
|
43
|
+
resolve(undefined);
|
|
44
|
+
};
|
|
45
|
+
const onError = (error) => {
|
|
46
|
+
cleanup();
|
|
47
|
+
reject(error);
|
|
48
|
+
};
|
|
49
|
+
const cleanup = () => {
|
|
50
|
+
zipFile.removeListener("entry", onEntry);
|
|
51
|
+
zipFile.removeListener("end", onEnd);
|
|
52
|
+
zipFile.removeListener("error", onError);
|
|
53
|
+
};
|
|
54
|
+
zipFile.once("entry", onEntry);
|
|
55
|
+
zipFile.once("end", onEnd);
|
|
56
|
+
zipFile.once("error", onError);
|
|
57
|
+
zipFile.readEntry();
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
export class EntryTooLargeError extends Error {
|
|
61
|
+
constructor(entryPath, jarPath, maxBytes) {
|
|
62
|
+
super(`Entry "${entryPath}" in "${jarPath}" exceeds size limit of ${maxBytes} bytes`);
|
|
63
|
+
this.name = "EntryTooLargeError";
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
function readEntryStream(zipFile, entry, jarPath, maxBytes) {
|
|
67
|
+
return new Promise((resolve, reject) => {
|
|
68
|
+
zipFile.openReadStream(entry, (error, stream) => {
|
|
69
|
+
if (error || !stream) {
|
|
70
|
+
reject(new Error(`Failed to read entry "${entry.fileName}" from "${jarPath}": ${toErrorMessage(error)}`));
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
let settled = false;
|
|
74
|
+
let totalBytes = 0;
|
|
75
|
+
const chunks = [];
|
|
76
|
+
stream.on("data", (chunk) => {
|
|
77
|
+
if (settled)
|
|
78
|
+
return;
|
|
79
|
+
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
|
80
|
+
totalBytes += buf.length;
|
|
81
|
+
if (maxBytes !== undefined && totalBytes > maxBytes) {
|
|
82
|
+
settled = true;
|
|
83
|
+
stream.destroy();
|
|
84
|
+
reject(new EntryTooLargeError(entry.fileName, jarPath, maxBytes));
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
chunks.push(buf);
|
|
88
|
+
});
|
|
89
|
+
stream.once("error", (streamError) => {
|
|
90
|
+
if (settled)
|
|
91
|
+
return;
|
|
92
|
+
settled = true;
|
|
93
|
+
reject(new Error(`Failed to read entry "${entry.fileName}" from "${jarPath}": ${toErrorMessage(streamError)}`));
|
|
94
|
+
});
|
|
95
|
+
stream.once("end", () => {
|
|
96
|
+
if (settled)
|
|
97
|
+
return;
|
|
98
|
+
settled = true;
|
|
99
|
+
resolve(Buffer.concat(chunks));
|
|
100
|
+
});
|
|
101
|
+
});
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
function decodeUtf8OrThrow(contentBuffer, jarPath, entryPath) {
|
|
105
|
+
try {
|
|
106
|
+
return UTF8_DECODER.decode(contentBuffer);
|
|
107
|
+
}
|
|
108
|
+
catch {
|
|
109
|
+
throw createError({
|
|
110
|
+
code: ERROR_CODES.INVALID_INPUT,
|
|
111
|
+
message: `Entry "${entryPath.replaceAll("\\", "/")}" in "${jarPath}" is not valid UTF-8. Only UTF-8 source files are supported.`,
|
|
112
|
+
details: { jarPath, entryPath: entryPath.replaceAll("\\", "/") }
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
export async function listJarEntries(jarPath) {
|
|
117
|
+
return withZipFile(jarPath, async (zipFile) => {
|
|
118
|
+
const entries = [];
|
|
119
|
+
while (true) {
|
|
120
|
+
const entry = await readNextEntry(zipFile);
|
|
121
|
+
if (!entry) {
|
|
122
|
+
break;
|
|
123
|
+
}
|
|
124
|
+
entries.push(entry.fileName);
|
|
125
|
+
}
|
|
126
|
+
return entries;
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
export async function listJavaEntries(jarPath) {
|
|
130
|
+
const entries = await listJarEntries(jarPath);
|
|
131
|
+
return entries.filter((entry) => entry.toLowerCase().endsWith(".java") && isSecureJarEntryPath(entry));
|
|
132
|
+
}
|
|
133
|
+
export async function readJarEntryAsUtf8(jarPath, entryPath) {
|
|
134
|
+
const contentBuffer = await readJarEntryAsBuffer(jarPath, entryPath);
|
|
135
|
+
return decodeUtf8OrThrow(contentBuffer, jarPath, entryPath);
|
|
136
|
+
}
|
|
137
|
+
export async function readJarEntryAsBuffer(jarPath, entryPath) {
|
|
138
|
+
const normalizedTargetPath = entryPath.replaceAll("\\", "/");
|
|
139
|
+
if (!isSecureJarEntryPath(normalizedTargetPath)) {
|
|
140
|
+
throw createError({
|
|
141
|
+
code: ERROR_CODES.INVALID_INPUT,
|
|
142
|
+
message: `Entry path "${normalizedTargetPath}" is not allowed.`,
|
|
143
|
+
details: { jarPath, entryPath: normalizedTargetPath }
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
return withZipFile(jarPath, async (zipFile) => {
|
|
147
|
+
while (true) {
|
|
148
|
+
const entry = await readNextEntry(zipFile);
|
|
149
|
+
if (!entry) {
|
|
150
|
+
throw createError({
|
|
151
|
+
code: ERROR_CODES.SOURCE_NOT_FOUND,
|
|
152
|
+
message: `Entry "${normalizedTargetPath}" was not found in "${jarPath}".`,
|
|
153
|
+
details: { jarPath, entryPath: normalizedTargetPath }
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
if (!isSecureJarEntryPath(entry.fileName)) {
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
if (entry.fileName !== normalizedTargetPath) {
|
|
160
|
+
continue;
|
|
161
|
+
}
|
|
162
|
+
return readEntryStream(zipFile, entry, jarPath);
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
export async function* iterateJavaEntriesAsUtf8(jarPath, maxBytes) {
|
|
167
|
+
const zipFile = await openZipFile(jarPath);
|
|
168
|
+
try {
|
|
169
|
+
while (true) {
|
|
170
|
+
const entry = await readNextEntry(zipFile);
|
|
171
|
+
if (!entry) {
|
|
172
|
+
break;
|
|
173
|
+
}
|
|
174
|
+
if (!entry.fileName.toLowerCase().endsWith(".java")) {
|
|
175
|
+
continue;
|
|
176
|
+
}
|
|
177
|
+
if (!isSecureJarEntryPath(entry.fileName)) {
|
|
178
|
+
continue;
|
|
179
|
+
}
|
|
180
|
+
let buf;
|
|
181
|
+
try {
|
|
182
|
+
buf = await readEntryStream(zipFile, entry, jarPath, maxBytes);
|
|
183
|
+
}
|
|
184
|
+
catch (err) {
|
|
185
|
+
if (err instanceof EntryTooLargeError) {
|
|
186
|
+
log("warn", "source_jar.entry_too_large", {
|
|
187
|
+
jarPath,
|
|
188
|
+
entryPath: entry.fileName,
|
|
189
|
+
maxBytes
|
|
190
|
+
});
|
|
191
|
+
continue;
|
|
192
|
+
}
|
|
193
|
+
throw err;
|
|
194
|
+
}
|
|
195
|
+
const content = decodeUtf8OrThrow(buf, jarPath, entry.fileName);
|
|
196
|
+
yield {
|
|
197
|
+
filePath: entry.fileName,
|
|
198
|
+
content
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
finally {
|
|
203
|
+
zipFile.close();
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
export async function readAllJavaEntriesAsUtf8(jarPath, maxBytes) {
|
|
207
|
+
const entries = [];
|
|
208
|
+
for await (const entry of iterateJavaEntriesAsUtf8(jarPath, maxBytes)) {
|
|
209
|
+
entries.push({
|
|
210
|
+
filePath: entry.filePath,
|
|
211
|
+
content: entry.content
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
return entries;
|
|
215
|
+
}
|
|
216
|
+
//# sourceMappingURL=source-jar-reader.js.map
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { Config, ResolvedSourceArtifact, SourceTargetInput } from "./types.js";
|
|
2
|
+
export interface ResolveSourceTargetOptions {
|
|
3
|
+
allowDecompile: boolean;
|
|
4
|
+
preferredRepos?: string[];
|
|
5
|
+
onRepoFailover?: (event: {
|
|
6
|
+
stage: "source" | "binary";
|
|
7
|
+
repoUrl: string;
|
|
8
|
+
statusCode?: number;
|
|
9
|
+
reason: string;
|
|
10
|
+
attempt: number;
|
|
11
|
+
totalAttempts: number;
|
|
12
|
+
}) => void;
|
|
13
|
+
}
|
|
14
|
+
export declare function resolveSourceTarget(input: SourceTargetInput, options: ResolveSourceTargetOptions, explicitConfig: Config): Promise<ResolvedSourceArtifact>;
|