@adhisang/minecraft-modding-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/LICENSE +21 -0
- package/README.md +765 -0
- package/dist/access-widener-parser.d.ts +24 -0
- package/dist/access-widener-parser.js +77 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +4 -0
- package/dist/config.d.ts +27 -0
- package/dist/config.js +178 -0
- package/dist/decompiler/vineflower.d.ts +15 -0
- package/dist/decompiler/vineflower.js +185 -0
- package/dist/errors.d.ts +50 -0
- package/dist/errors.js +49 -0
- package/dist/hash.d.ts +1 -0
- package/dist/hash.js +12 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +1447 -0
- package/dist/java-process.d.ts +16 -0
- package/dist/java-process.js +120 -0
- package/dist/logger.d.ts +3 -0
- package/dist/logger.js +21 -0
- package/dist/mapping-pipeline-service.d.ts +18 -0
- package/dist/mapping-pipeline-service.js +60 -0
- package/dist/mapping-service.d.ts +161 -0
- package/dist/mapping-service.js +1706 -0
- package/dist/maven-resolver.d.ts +22 -0
- package/dist/maven-resolver.js +122 -0
- package/dist/minecraft-explorer-service.d.ts +43 -0
- package/dist/minecraft-explorer-service.js +562 -0
- package/dist/mixin-parser.d.ts +34 -0
- package/dist/mixin-parser.js +194 -0
- package/dist/mixin-validator.d.ts +59 -0
- package/dist/mixin-validator.js +274 -0
- package/dist/mod-analyzer.d.ts +23 -0
- package/dist/mod-analyzer.js +346 -0
- package/dist/mod-decompile-service.d.ts +39 -0
- package/dist/mod-decompile-service.js +136 -0
- package/dist/mod-remap-service.d.ts +17 -0
- package/dist/mod-remap-service.js +186 -0
- package/dist/mod-search-service.d.ts +28 -0
- package/dist/mod-search-service.js +174 -0
- package/dist/mojang-tiny-mapping-service.d.ts +13 -0
- package/dist/mojang-tiny-mapping-service.js +351 -0
- package/dist/nbt/java-nbt-codec.d.ts +3 -0
- package/dist/nbt/java-nbt-codec.js +385 -0
- package/dist/nbt/json-patch.d.ts +3 -0
- package/dist/nbt/json-patch.js +352 -0
- package/dist/nbt/pipeline.d.ts +39 -0
- package/dist/nbt/pipeline.js +173 -0
- package/dist/nbt/typed-json.d.ts +10 -0
- package/dist/nbt/typed-json.js +205 -0
- package/dist/nbt/types.d.ts +66 -0
- package/dist/nbt/types.js +2 -0
- package/dist/observability.d.ts +88 -0
- package/dist/observability.js +165 -0
- package/dist/path-converter.d.ts +12 -0
- package/dist/path-converter.js +161 -0
- package/dist/path-resolver.d.ts +19 -0
- package/dist/path-resolver.js +78 -0
- package/dist/registry-service.d.ts +29 -0
- package/dist/registry-service.js +214 -0
- package/dist/repo-downloader.d.ts +15 -0
- package/dist/repo-downloader.js +111 -0
- package/dist/resources.d.ts +3 -0
- package/dist/resources.js +154 -0
- package/dist/search-hit-accumulator.d.ts +38 -0
- package/dist/search-hit-accumulator.js +153 -0
- package/dist/source-jar-reader.d.ts +13 -0
- package/dist/source-jar-reader.js +216 -0
- package/dist/source-resolver.d.ts +14 -0
- package/dist/source-resolver.js +274 -0
- package/dist/source-service.d.ts +404 -0
- package/dist/source-service.js +2881 -0
- package/dist/storage/artifacts-repo.d.ts +45 -0
- package/dist/storage/artifacts-repo.js +209 -0
- package/dist/storage/db.d.ts +14 -0
- package/dist/storage/db.js +132 -0
- package/dist/storage/files-repo.d.ts +78 -0
- package/dist/storage/files-repo.js +437 -0
- package/dist/storage/index-meta-repo.d.ts +35 -0
- package/dist/storage/index-meta-repo.js +97 -0
- package/dist/storage/migrations.d.ts +11 -0
- package/dist/storage/migrations.js +71 -0
- package/dist/storage/schema.d.ts +1 -0
- package/dist/storage/schema.js +160 -0
- package/dist/storage/sqlite.d.ts +20 -0
- package/dist/storage/sqlite.js +111 -0
- package/dist/storage/symbols-repo.d.ts +63 -0
- package/dist/storage/symbols-repo.js +401 -0
- package/dist/symbols/symbol-extractor.d.ts +7 -0
- package/dist/symbols/symbol-extractor.js +64 -0
- package/dist/tiny-remapper-resolver.d.ts +1 -0
- package/dist/tiny-remapper-resolver.js +62 -0
- package/dist/tiny-remapper-service.d.ts +16 -0
- package/dist/tiny-remapper-service.js +73 -0
- package/dist/types.d.ts +120 -0
- package/dist/types.js +2 -0
- package/dist/version-diff-service.d.ts +41 -0
- package/dist/version-diff-service.js +222 -0
- package/dist/version-service.d.ts +70 -0
- package/dist/version-service.js +411 -0
- package/dist/vineflower-resolver.d.ts +1 -0
- package/dist/vineflower-resolver.js +62 -0
- package/dist/workspace-mapping-service.d.ts +18 -0
- package/dist/workspace-mapping-service.js +89 -0
- package/package.json +61 -0
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
import { isDeepStrictEqual } from "node:util";
|
|
2
|
+
import { createError, ERROR_CODES } from "../errors.js";
|
|
3
|
+
import { assertValidTypedNbtDocument, validateTypedNbtDocument } from "./typed-json.js";
|
|
4
|
+
function isRecord(value) {
|
|
5
|
+
return (typeof value === "object" &&
|
|
6
|
+
value !== null &&
|
|
7
|
+
!Array.isArray(value) &&
|
|
8
|
+
!ArrayBuffer.isView(value));
|
|
9
|
+
}
|
|
10
|
+
function invalidPatch(message, details) {
|
|
11
|
+
throw createError({
|
|
12
|
+
code: ERROR_CODES.JSON_PATCH_INVALID,
|
|
13
|
+
message,
|
|
14
|
+
details
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
function unsupportedFeature(message, details) {
|
|
18
|
+
throw createError({
|
|
19
|
+
code: ERROR_CODES.NBT_UNSUPPORTED_FEATURE,
|
|
20
|
+
message,
|
|
21
|
+
details
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
function patchConflict(message, details) {
|
|
25
|
+
throw createError({
|
|
26
|
+
code: ERROR_CODES.JSON_PATCH_CONFLICT,
|
|
27
|
+
message,
|
|
28
|
+
details
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
function parsePatchOperations(patch) {
|
|
32
|
+
if (!Array.isArray(patch)) {
|
|
33
|
+
invalidPatch("JSON Patch must be an array.");
|
|
34
|
+
}
|
|
35
|
+
const normalized = [];
|
|
36
|
+
for (let i = 0; i < patch.length; i += 1) {
|
|
37
|
+
const rawOperation = patch[i];
|
|
38
|
+
if (!isRecord(rawOperation)) {
|
|
39
|
+
invalidPatch("JSON Patch operation must be an object.", { opIndex: i });
|
|
40
|
+
}
|
|
41
|
+
if (typeof rawOperation.op !== "string") {
|
|
42
|
+
invalidPatch('JSON Patch operation requires string field "op".', { opIndex: i });
|
|
43
|
+
}
|
|
44
|
+
if (typeof rawOperation.path !== "string") {
|
|
45
|
+
invalidPatch('JSON Patch operation requires string field "path".', { opIndex: i });
|
|
46
|
+
}
|
|
47
|
+
if (rawOperation.op === "move" || rawOperation.op === "copy") {
|
|
48
|
+
unsupportedFeature(`JSON Patch operation "${rawOperation.op}" is not supported in v1.`, {
|
|
49
|
+
opIndex: i,
|
|
50
|
+
op: rawOperation.op
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
if (rawOperation.op !== "add" &&
|
|
54
|
+
rawOperation.op !== "remove" &&
|
|
55
|
+
rawOperation.op !== "replace" &&
|
|
56
|
+
rawOperation.op !== "test") {
|
|
57
|
+
invalidPatch(`Unsupported JSON Patch operation "${rawOperation.op}".`, {
|
|
58
|
+
opIndex: i,
|
|
59
|
+
op: rawOperation.op
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
if ((rawOperation.op === "add" ||
|
|
63
|
+
rawOperation.op === "replace" ||
|
|
64
|
+
rawOperation.op === "test") &&
|
|
65
|
+
!("value" in rawOperation)) {
|
|
66
|
+
invalidPatch(`JSON Patch operation "${rawOperation.op}" requires "value".`, {
|
|
67
|
+
opIndex: i
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
normalized.push({
|
|
71
|
+
op: rawOperation.op,
|
|
72
|
+
path: rawOperation.path,
|
|
73
|
+
from: typeof rawOperation.from === "string" ? rawOperation.from : undefined,
|
|
74
|
+
value: rawOperation.value
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
return normalized;
|
|
78
|
+
}
|
|
79
|
+
function decodePointerToken(path, token, opIndex) {
|
|
80
|
+
if (/~(?:[^01]|$)/.test(token)) {
|
|
81
|
+
invalidPatch("Invalid JSON Pointer escape sequence.", {
|
|
82
|
+
opIndex,
|
|
83
|
+
path
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
return token.replace(/~1/g, "/").replace(/~0/g, "~");
|
|
87
|
+
}
|
|
88
|
+
function parsePointer(path, opIndex) {
|
|
89
|
+
if (path === "") {
|
|
90
|
+
return [];
|
|
91
|
+
}
|
|
92
|
+
if (!path.startsWith("/")) {
|
|
93
|
+
invalidPatch('JSON Pointer path must be empty or start with "/".', {
|
|
94
|
+
opIndex,
|
|
95
|
+
path
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
return path
|
|
99
|
+
.split("/")
|
|
100
|
+
.slice(1)
|
|
101
|
+
.map((token) => decodePointerToken(path, token, opIndex));
|
|
102
|
+
}
|
|
103
|
+
function hasOwn(target, key) {
|
|
104
|
+
return Object.prototype.hasOwnProperty.call(target, key);
|
|
105
|
+
}
|
|
106
|
+
function parseArrayIndex(token, length, options) {
|
|
107
|
+
if (token === "-") {
|
|
108
|
+
if (options.allowAppend) {
|
|
109
|
+
return length;
|
|
110
|
+
}
|
|
111
|
+
patchConflict('"-" is only allowed for add on arrays.', {
|
|
112
|
+
opIndex: options.opIndex,
|
|
113
|
+
jsonPointer: options.path
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
if (!/^(0|[1-9][0-9]*)$/.test(token)) {
|
|
117
|
+
patchConflict("Invalid array index in JSON Pointer.", {
|
|
118
|
+
opIndex: options.opIndex,
|
|
119
|
+
jsonPointer: options.path
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
const index = Number.parseInt(token, 10);
|
|
123
|
+
if (options.allowAppend) {
|
|
124
|
+
if (index > length) {
|
|
125
|
+
patchConflict("Array add index is out of bounds.", {
|
|
126
|
+
opIndex: options.opIndex,
|
|
127
|
+
jsonPointer: options.path
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
else if (index >= length) {
|
|
132
|
+
patchConflict("Array index is out of bounds.", {
|
|
133
|
+
opIndex: options.opIndex,
|
|
134
|
+
jsonPointer: options.path
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
return index;
|
|
138
|
+
}
|
|
139
|
+
function resolveParent(root, tokens, opIndex, path) {
|
|
140
|
+
if (tokens.length === 0) {
|
|
141
|
+
patchConflict("Operation path does not reference a child location.", {
|
|
142
|
+
opIndex,
|
|
143
|
+
jsonPointer: path
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
let cursor = root;
|
|
147
|
+
for (let i = 0; i < tokens.length - 1; i += 1) {
|
|
148
|
+
const token = tokens[i];
|
|
149
|
+
const pointer = `/${tokens.slice(0, i + 1).join("/")}`;
|
|
150
|
+
if (Array.isArray(cursor)) {
|
|
151
|
+
const index = parseArrayIndex(token, cursor.length, {
|
|
152
|
+
allowAppend: false,
|
|
153
|
+
opIndex,
|
|
154
|
+
path: pointer
|
|
155
|
+
});
|
|
156
|
+
cursor = cursor[index];
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
if (!isRecord(cursor)) {
|
|
160
|
+
patchConflict("JSON Pointer traversed into a non-container value.", {
|
|
161
|
+
opIndex,
|
|
162
|
+
jsonPointer: pointer
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
if (!hasOwn(cursor, token)) {
|
|
166
|
+
patchConflict("JSON Pointer path does not exist.", {
|
|
167
|
+
opIndex,
|
|
168
|
+
jsonPointer: pointer
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
cursor = cursor[token];
|
|
172
|
+
}
|
|
173
|
+
return { parent: cursor, key: tokens[tokens.length - 1] };
|
|
174
|
+
}
|
|
175
|
+
function readValueAtPath(root, tokens, opIndex, path) {
|
|
176
|
+
if (tokens.length === 0) {
|
|
177
|
+
return root;
|
|
178
|
+
}
|
|
179
|
+
let cursor = root;
|
|
180
|
+
for (let i = 0; i < tokens.length; i += 1) {
|
|
181
|
+
const token = tokens[i];
|
|
182
|
+
const pointer = `/${tokens.slice(0, i + 1).join("/")}`;
|
|
183
|
+
if (Array.isArray(cursor)) {
|
|
184
|
+
const index = parseArrayIndex(token, cursor.length, {
|
|
185
|
+
allowAppend: false,
|
|
186
|
+
opIndex,
|
|
187
|
+
path: pointer
|
|
188
|
+
});
|
|
189
|
+
cursor = cursor[index];
|
|
190
|
+
continue;
|
|
191
|
+
}
|
|
192
|
+
if (!isRecord(cursor)) {
|
|
193
|
+
patchConflict("JSON Pointer traversed into a non-container value.", {
|
|
194
|
+
opIndex,
|
|
195
|
+
jsonPointer: pointer
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
if (!hasOwn(cursor, token)) {
|
|
199
|
+
patchConflict("JSON Pointer path does not exist.", {
|
|
200
|
+
opIndex,
|
|
201
|
+
jsonPointer: pointer
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
cursor = cursor[token];
|
|
205
|
+
}
|
|
206
|
+
return cursor;
|
|
207
|
+
}
|
|
208
|
+
function assertTypedNbtInvariant(root, opIndex, path) {
|
|
209
|
+
const validation = validateTypedNbtDocument(root);
|
|
210
|
+
if (!validation.ok) {
|
|
211
|
+
patchConflict("JSON Patch operation produced invalid typed NBT JSON.", {
|
|
212
|
+
opIndex,
|
|
213
|
+
jsonPointer: validation.issue.jsonPointer,
|
|
214
|
+
expectedType: validation.issue.expectedType,
|
|
215
|
+
actualType: validation.issue.actualType,
|
|
216
|
+
path
|
|
217
|
+
});
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
export function applyJsonPatch(document, patch) {
|
|
221
|
+
assertValidTypedNbtDocument(document);
|
|
222
|
+
const operations = parsePatchOperations(patch);
|
|
223
|
+
let working = structuredClone(document);
|
|
224
|
+
let testOps = 0;
|
|
225
|
+
let changed = false;
|
|
226
|
+
for (let i = 0; i < operations.length; i += 1) {
|
|
227
|
+
const operation = operations[i];
|
|
228
|
+
const tokens = parsePointer(operation.path, i);
|
|
229
|
+
if (operation.op === "test") {
|
|
230
|
+
const actual = readValueAtPath(working, tokens, i, operation.path);
|
|
231
|
+
testOps += 1;
|
|
232
|
+
if (!isDeepStrictEqual(actual, operation.value)) {
|
|
233
|
+
patchConflict("JSON Patch test operation failed.", {
|
|
234
|
+
opIndex: i,
|
|
235
|
+
jsonPointer: operation.path,
|
|
236
|
+
expectedType: typeof operation.value,
|
|
237
|
+
actualType: typeof actual
|
|
238
|
+
});
|
|
239
|
+
}
|
|
240
|
+
continue;
|
|
241
|
+
}
|
|
242
|
+
if (operation.op === "add") {
|
|
243
|
+
const nextValue = structuredClone(operation.value);
|
|
244
|
+
if (tokens.length === 0) {
|
|
245
|
+
working = nextValue;
|
|
246
|
+
}
|
|
247
|
+
else {
|
|
248
|
+
const { parent, key } = resolveParent(working, tokens, i, operation.path);
|
|
249
|
+
if (Array.isArray(parent)) {
|
|
250
|
+
const index = parseArrayIndex(key, parent.length, {
|
|
251
|
+
allowAppend: true,
|
|
252
|
+
opIndex: i,
|
|
253
|
+
path: operation.path
|
|
254
|
+
});
|
|
255
|
+
parent.splice(index, 0, nextValue);
|
|
256
|
+
}
|
|
257
|
+
else if (isRecord(parent)) {
|
|
258
|
+
parent[key] = nextValue;
|
|
259
|
+
}
|
|
260
|
+
else {
|
|
261
|
+
patchConflict("Add target parent is not a container.", {
|
|
262
|
+
opIndex: i,
|
|
263
|
+
jsonPointer: operation.path
|
|
264
|
+
});
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
changed = true;
|
|
268
|
+
assertTypedNbtInvariant(working, i, operation.path);
|
|
269
|
+
continue;
|
|
270
|
+
}
|
|
271
|
+
if (operation.op === "remove") {
|
|
272
|
+
if (tokens.length === 0) {
|
|
273
|
+
patchConflict("Cannot remove the typed NBT document root.", {
|
|
274
|
+
opIndex: i,
|
|
275
|
+
jsonPointer: operation.path
|
|
276
|
+
});
|
|
277
|
+
}
|
|
278
|
+
const { parent, key } = resolveParent(working, tokens, i, operation.path);
|
|
279
|
+
if (Array.isArray(parent)) {
|
|
280
|
+
const index = parseArrayIndex(key, parent.length, {
|
|
281
|
+
allowAppend: false,
|
|
282
|
+
opIndex: i,
|
|
283
|
+
path: operation.path
|
|
284
|
+
});
|
|
285
|
+
parent.splice(index, 1);
|
|
286
|
+
}
|
|
287
|
+
else if (isRecord(parent)) {
|
|
288
|
+
if (!hasOwn(parent, key)) {
|
|
289
|
+
patchConflict("Remove target path does not exist.", {
|
|
290
|
+
opIndex: i,
|
|
291
|
+
jsonPointer: operation.path
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
delete parent[key];
|
|
295
|
+
}
|
|
296
|
+
else {
|
|
297
|
+
patchConflict("Remove target parent is not a container.", {
|
|
298
|
+
opIndex: i,
|
|
299
|
+
jsonPointer: operation.path
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
changed = true;
|
|
303
|
+
assertTypedNbtInvariant(working, i, operation.path);
|
|
304
|
+
continue;
|
|
305
|
+
}
|
|
306
|
+
if (operation.op === "replace") {
|
|
307
|
+
const nextValue = structuredClone(operation.value);
|
|
308
|
+
if (tokens.length === 0) {
|
|
309
|
+
working = nextValue;
|
|
310
|
+
}
|
|
311
|
+
else {
|
|
312
|
+
const { parent, key } = resolveParent(working, tokens, i, operation.path);
|
|
313
|
+
if (Array.isArray(parent)) {
|
|
314
|
+
const index = parseArrayIndex(key, parent.length, {
|
|
315
|
+
allowAppend: false,
|
|
316
|
+
opIndex: i,
|
|
317
|
+
path: operation.path
|
|
318
|
+
});
|
|
319
|
+
parent[index] = nextValue;
|
|
320
|
+
}
|
|
321
|
+
else if (isRecord(parent)) {
|
|
322
|
+
if (!hasOwn(parent, key)) {
|
|
323
|
+
patchConflict("Replace target path does not exist.", {
|
|
324
|
+
opIndex: i,
|
|
325
|
+
jsonPointer: operation.path
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
parent[key] = nextValue;
|
|
329
|
+
}
|
|
330
|
+
else {
|
|
331
|
+
patchConflict("Replace target parent is not a container.", {
|
|
332
|
+
opIndex: i,
|
|
333
|
+
jsonPointer: operation.path
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
changed = true;
|
|
338
|
+
assertTypedNbtInvariant(working, i, operation.path);
|
|
339
|
+
continue;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
assertValidTypedNbtDocument(working);
|
|
343
|
+
return {
|
|
344
|
+
typedJson: working,
|
|
345
|
+
meta: {
|
|
346
|
+
appliedOps: operations.length,
|
|
347
|
+
testOps,
|
|
348
|
+
changed
|
|
349
|
+
}
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
//# sourceMappingURL=json-patch.js.map
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { type TypedNbtDocument } from "./typed-json.js";
|
|
2
|
+
import type { ApplyJsonPatchResult } from "./types.js";
|
|
3
|
+
export type DecodeCompression = "none" | "gzip" | "auto";
|
|
4
|
+
export type EncodeCompression = "none" | "gzip";
|
|
5
|
+
export interface NbtLimits {
|
|
6
|
+
maxInputBytes: number;
|
|
7
|
+
maxInflatedBytes: number;
|
|
8
|
+
maxResponseBytes: number;
|
|
9
|
+
}
|
|
10
|
+
export declare const DEFAULT_NBT_LIMITS: NbtLimits;
|
|
11
|
+
export interface NbtToJsonInput {
|
|
12
|
+
nbtBase64: string;
|
|
13
|
+
compression?: DecodeCompression;
|
|
14
|
+
}
|
|
15
|
+
export interface JsonToNbtInput {
|
|
16
|
+
typedJson: unknown;
|
|
17
|
+
compression?: EncodeCompression;
|
|
18
|
+
}
|
|
19
|
+
export interface ApplyPatchInput {
|
|
20
|
+
typedJson: unknown;
|
|
21
|
+
patch: unknown;
|
|
22
|
+
}
|
|
23
|
+
export interface NbtToJsonOutput {
|
|
24
|
+
typedJson: TypedNbtDocument;
|
|
25
|
+
meta: {
|
|
26
|
+
compressionDetected: "none" | "gzip";
|
|
27
|
+
inputBytes: number;
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
export interface JsonToNbtOutput {
|
|
31
|
+
nbtBase64: string;
|
|
32
|
+
meta: {
|
|
33
|
+
outputBytes: number;
|
|
34
|
+
compressionApplied: EncodeCompression;
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
export declare function nbtBase64ToTypedJson(input: NbtToJsonInput, limits?: NbtLimits): NbtToJsonOutput;
|
|
38
|
+
export declare function typedJsonToNbtBase64(input: JsonToNbtInput, limits?: NbtLimits): JsonToNbtOutput;
|
|
39
|
+
export declare function applyNbtJsonPatch(input: ApplyPatchInput, limits?: NbtLimits): ApplyJsonPatchResult;
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import { gunzipSync, gzipSync } from "node:zlib";
|
|
2
|
+
import { createError, ERROR_CODES } from "../errors.js";
|
|
3
|
+
import { decodeJavaNbt, encodeJavaNbt } from "./java-nbt-codec.js";
|
|
4
|
+
import { applyJsonPatch } from "./json-patch.js";
|
|
5
|
+
import { assertValidTypedNbtDocument } from "./typed-json.js";
|
|
6
|
+
export const DEFAULT_NBT_LIMITS = {
|
|
7
|
+
maxInputBytes: 4 * 1024 * 1024,
|
|
8
|
+
maxInflatedBytes: 16 * 1024 * 1024,
|
|
9
|
+
maxResponseBytes: 8 * 1024 * 1024
|
|
10
|
+
};
|
|
11
|
+
function invalidInput(message, details) {
|
|
12
|
+
throw createError({
|
|
13
|
+
code: ERROR_CODES.INVALID_INPUT,
|
|
14
|
+
message,
|
|
15
|
+
details
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
function limitExceeded(stage, field, actual, limit) {
|
|
19
|
+
let message = "NBT payload exceeds configured size limit.";
|
|
20
|
+
let nextAction = "Reduce payload size or increase NBT limits.";
|
|
21
|
+
if (stage === "decode-input") {
|
|
22
|
+
message = "Decoded NBT payload exceeds max input bytes.";
|
|
23
|
+
nextAction = "Use smaller nbtBase64 input or increase MCP_MAX_NBT_INPUT_BYTES.";
|
|
24
|
+
}
|
|
25
|
+
else if (stage === "inflate") {
|
|
26
|
+
message = "Inflated gzip NBT payload exceeds max inflated bytes.";
|
|
27
|
+
nextAction = "Use smaller compressed payload or increase MCP_MAX_NBT_INFLATED_BYTES.";
|
|
28
|
+
}
|
|
29
|
+
else if (stage === "decode-output" || stage === "patch-output") {
|
|
30
|
+
message = "Typed NBT JSON response exceeds max response bytes.";
|
|
31
|
+
nextAction = "Reduce typedJson size or increase MCP_MAX_NBT_RESPONSE_BYTES.";
|
|
32
|
+
}
|
|
33
|
+
else if (stage === "encode-output") {
|
|
34
|
+
message = "NBT base64 response exceeds max response bytes.";
|
|
35
|
+
nextAction = "Reduce typedJson size or increase MCP_MAX_NBT_RESPONSE_BYTES.";
|
|
36
|
+
}
|
|
37
|
+
throw createError({
|
|
38
|
+
code: ERROR_CODES.LIMIT_EXCEEDED,
|
|
39
|
+
message,
|
|
40
|
+
details: {
|
|
41
|
+
stage,
|
|
42
|
+
field,
|
|
43
|
+
actual,
|
|
44
|
+
limit,
|
|
45
|
+
nextAction
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
function assertByteLimit(stage, field, actual, limit) {
|
|
50
|
+
if (actual > limit) {
|
|
51
|
+
limitExceeded(stage, field, actual, limit);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
function jsonUtf8Bytes(value) {
|
|
55
|
+
return Buffer.byteLength(JSON.stringify(value), "utf8");
|
|
56
|
+
}
|
|
57
|
+
function estimatedDecodedBase64Bytes(normalizedBase64) {
|
|
58
|
+
const paddingBytes = normalizedBase64.endsWith("==") ? 2 : normalizedBase64.endsWith("=") ? 1 : 0;
|
|
59
|
+
return (normalizedBase64.length / 4) * 3 - paddingBytes;
|
|
60
|
+
}
|
|
61
|
+
function isInflateLimitError(error) {
|
|
62
|
+
if (typeof error !== "object" || error == null) {
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
const maybeError = error;
|
|
66
|
+
if (maybeError.code === "ERR_BUFFER_TOO_LARGE") {
|
|
67
|
+
return true;
|
|
68
|
+
}
|
|
69
|
+
if (typeof maybeError.message !== "string") {
|
|
70
|
+
return false;
|
|
71
|
+
}
|
|
72
|
+
const normalizedMessage = maybeError.message.toLowerCase();
|
|
73
|
+
return normalizedMessage.includes("maxoutputlength") || normalizedMessage.includes("cannot create a buffer larger");
|
|
74
|
+
}
|
|
75
|
+
function inflateParseFailed(error) {
|
|
76
|
+
throw createError({
|
|
77
|
+
code: ERROR_CODES.NBT_PARSE_FAILED,
|
|
78
|
+
message: "Failed to inflate gzip NBT payload.",
|
|
79
|
+
details: {
|
|
80
|
+
reason: error instanceof Error ? error.message : String(error),
|
|
81
|
+
nextAction: "Provide valid gzip-compressed NBT data or set compression to none for uncompressed payloads."
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
function parseDecodeCompression(value) {
|
|
86
|
+
if (value === undefined) {
|
|
87
|
+
return "auto";
|
|
88
|
+
}
|
|
89
|
+
if (value === "none" || value === "gzip" || value === "auto") {
|
|
90
|
+
return value;
|
|
91
|
+
}
|
|
92
|
+
invalidInput('Field "compression" must be one of none|gzip|auto.');
|
|
93
|
+
}
|
|
94
|
+
function parseEncodeCompression(value) {
|
|
95
|
+
if (value === undefined) {
|
|
96
|
+
return "none";
|
|
97
|
+
}
|
|
98
|
+
if (value === "none" || value === "gzip") {
|
|
99
|
+
return value;
|
|
100
|
+
}
|
|
101
|
+
invalidInput('Field "compression" must be one of none|gzip.');
|
|
102
|
+
}
|
|
103
|
+
function decodeBase64Nbt(value, limits) {
|
|
104
|
+
if (typeof value !== "string" || value.trim().length === 0) {
|
|
105
|
+
invalidInput('Field "nbtBase64" must be a non-empty base64 string.');
|
|
106
|
+
}
|
|
107
|
+
const normalized = value.replace(/\s+/g, "");
|
|
108
|
+
if (normalized.length === 0 || normalized.length % 4 !== 0) {
|
|
109
|
+
invalidInput('Field "nbtBase64" is not valid base64.');
|
|
110
|
+
}
|
|
111
|
+
if (!/^[A-Za-z0-9+/]*={0,2}$/.test(normalized)) {
|
|
112
|
+
invalidInput('Field "nbtBase64" is not valid base64.');
|
|
113
|
+
}
|
|
114
|
+
const estimatedDecodedBytes = estimatedDecodedBase64Bytes(normalized);
|
|
115
|
+
assertByteLimit("decode-input", "nbtBase64", estimatedDecodedBytes, limits.maxInputBytes);
|
|
116
|
+
const decoded = Buffer.from(normalized, "base64");
|
|
117
|
+
assertByteLimit("decode-input", "nbtBase64", decoded.length, limits.maxInputBytes);
|
|
118
|
+
return decoded;
|
|
119
|
+
}
|
|
120
|
+
function isGzipBuffer(buffer) {
|
|
121
|
+
return buffer.length >= 2 && buffer[0] === 0x1f && buffer[1] === 0x8b;
|
|
122
|
+
}
|
|
123
|
+
export function nbtBase64ToTypedJson(input, limits = DEFAULT_NBT_LIMITS) {
|
|
124
|
+
const compression = parseDecodeCompression(input.compression);
|
|
125
|
+
const encoded = decodeBase64Nbt(input.nbtBase64, limits);
|
|
126
|
+
let payload = encoded;
|
|
127
|
+
let compressionDetected = "none";
|
|
128
|
+
if (compression === "gzip" || (compression === "auto" && isGzipBuffer(encoded))) {
|
|
129
|
+
try {
|
|
130
|
+
payload = gunzipSync(encoded, { maxOutputLength: limits.maxInflatedBytes });
|
|
131
|
+
}
|
|
132
|
+
catch (error) {
|
|
133
|
+
if (isInflateLimitError(error)) {
|
|
134
|
+
limitExceeded("inflate", "nbtBase64", limits.maxInflatedBytes + 1, limits.maxInflatedBytes);
|
|
135
|
+
}
|
|
136
|
+
inflateParseFailed(error);
|
|
137
|
+
}
|
|
138
|
+
compressionDetected = "gzip";
|
|
139
|
+
}
|
|
140
|
+
const typedJson = decodeJavaNbt(payload);
|
|
141
|
+
assertByteLimit("decode-output", "typedJson", jsonUtf8Bytes(typedJson), limits.maxResponseBytes);
|
|
142
|
+
return {
|
|
143
|
+
typedJson,
|
|
144
|
+
meta: {
|
|
145
|
+
compressionDetected,
|
|
146
|
+
inputBytes: encoded.length
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
export function typedJsonToNbtBase64(input, limits = DEFAULT_NBT_LIMITS) {
|
|
151
|
+
const compression = parseEncodeCompression(input.compression);
|
|
152
|
+
assertValidTypedNbtDocument(input.typedJson);
|
|
153
|
+
let output = encodeJavaNbt(input.typedJson);
|
|
154
|
+
if (compression === "gzip") {
|
|
155
|
+
output = gzipSync(output);
|
|
156
|
+
}
|
|
157
|
+
const nbtBase64 = output.toString("base64");
|
|
158
|
+
assertByteLimit("encode-output", "result", Buffer.byteLength(nbtBase64, "utf8"), limits.maxResponseBytes);
|
|
159
|
+
return {
|
|
160
|
+
nbtBase64,
|
|
161
|
+
meta: {
|
|
162
|
+
outputBytes: output.length,
|
|
163
|
+
compressionApplied: compression
|
|
164
|
+
}
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
export function applyNbtJsonPatch(input, limits = DEFAULT_NBT_LIMITS) {
|
|
168
|
+
assertValidTypedNbtDocument(input.typedJson);
|
|
169
|
+
const patched = applyJsonPatch(input.typedJson, input.patch);
|
|
170
|
+
assertByteLimit("patch-output", "typedJson", jsonUtf8Bytes(patched.typedJson), limits.maxResponseBytes);
|
|
171
|
+
return patched;
|
|
172
|
+
}
|
|
173
|
+
//# sourceMappingURL=pipeline.js.map
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { TypedNbtDocument, TypedNbtValidationIssue } from "./types.js";
|
|
2
|
+
type ValidationResult = {
|
|
3
|
+
ok: true;
|
|
4
|
+
} | {
|
|
5
|
+
ok: false;
|
|
6
|
+
issue: TypedNbtValidationIssue;
|
|
7
|
+
};
|
|
8
|
+
export declare function validateTypedNbtDocument(value: unknown): ValidationResult;
|
|
9
|
+
export declare function assertValidTypedNbtDocument(value: unknown): asserts value is TypedNbtDocument;
|
|
10
|
+
export type { TypedNbtDocument, TypedNbtValidationIssue } from "./types.js";
|