mcbe-leveldb 1.8.0 → 1.9.1-jsonly

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/Changelog.md CHANGED
@@ -1,3 +1,29 @@
1
+ # v1.9.1
2
+
3
+ ## Critical Fixes
4
+
5
+ - Fixed an issue where the Data3D serializer could corrupt the data by serializing it with a number of bits per block that 32 was not divisible by (also made the parser able to parse those corrupted subchunks, though the referenced palette indices in the values arrays will likely be incorrect for those corrupted subchunks).
6
+
7
+ ## Additions
8
+
9
+ - Added the optional `name` property to the `GenericPrismarineJSONNBTSchema` JSON schema.
10
+ - Made many types `exactOptionalPropertyTypes` friendly.
11
+
12
+ # v1.9.0
13
+
14
+ ## Additions
15
+
16
+ - Added the `LevelChunkMetaDataDictionary` NBT schema.
17
+ - Added the `Digest` NBT schema.
18
+
19
+ ## Changes
20
+
21
+ - Changed the custom data structure of the `Digest` content type.
22
+
23
+ ## Fixes
24
+
25
+ - `prettyPrintSNBT` no longer inserts newlines when the `indent` option is set to `0`.
26
+
1
27
  # v1.8.0
2
28
 
3
29
  ## Critical Fixes
package/LevelUtils.d.ts CHANGED
@@ -662,7 +662,7 @@ export declare const entryContentTypeToFormatMap: {
662
662
  */
663
663
  readonly type: "custom";
664
664
  /**
665
- * The format type that results from the {@link entryContentTypeToFormatMap.Entity.parse | parse} method.
665
+ * The format type that results from the {@link entryContentTypeToFormatMap.Digest.parse | parse} method.
666
666
  */
667
667
  readonly resultType: "JSONNBT";
668
668
  /**
@@ -675,21 +675,7 @@ export declare const entryContentTypeToFormatMap: {
675
675
  *
676
676
  * @throws {any} If an error occurs while parsing the data.
677
677
  */
678
- readonly parse: (data: Buffer) => Promise<{
679
- type: "compound";
680
- value: {
681
- entityIds: {
682
- type: "list";
683
- value: {
684
- type: "list";
685
- value: {
686
- type: "int";
687
- value: [id1: number, id2: number];
688
- }[];
689
- };
690
- };
691
- };
692
- }>;
678
+ readonly parse: (data: Buffer) => Promise<NBTSchemas.NBTSchemaTypes.Digest>;
693
679
  /**
694
680
  * The function to serialize the data.
695
681
  *
@@ -697,22 +683,10 @@ export declare const entryContentTypeToFormatMap: {
697
683
  *
698
684
  * @param data The data to serialize.
699
685
  * @returns The serialized data, as a buffer.
686
+ *
687
+ * @throws {any} If an error occurs while parsing the data.
700
688
  */
701
- readonly serialize: (data: {
702
- type: "compound";
703
- value: {
704
- entityIds: {
705
- type: "list";
706
- value: {
707
- type: "list";
708
- value: {
709
- type: "int";
710
- value: [id1: number, id2: number];
711
- }[];
712
- };
713
- };
714
- };
715
- }) => Buffer<ArrayBuffer>;
689
+ readonly serialize: (data: NBTSchemas.NBTSchemaTypes.Digest) => Buffer<ArrayBuffer>;
716
690
  };
717
691
  /**
718
692
  * The data for a map.
@@ -996,136 +970,11 @@ export declare const entryContentTypeToFormatMap: {
996
970
  *
997
971
  * The first 4 bytes represent the number of entries as a 32-bit little-endian integer (it is unknown if it is signed or not).
998
972
  *
999
- * `Array.from(data).reduce((a, b, i, ar)=>[...a, ...(Array.from(Buffer.from("0a000008", "hex")).every((v, ib)=>v === ar[i + ib]) ? [i] : [])], [])`
973
+ * The first 4 bytes are followed by multiple chunks of data formatted as the 8 byte hash of the NBT data plus the NBT compound.
1000
974
  *
1001
- * This seems to be 4 bytes, followed by multiple chunks of data formatted as 8 bytes plus an NBT compound.
1002
975
  * ```
1003
976
  * {BYTEx4}{BYTEx8}{NBTCompound}{BYTEx8}{NBTCompound}{BYTEx8}{NBTCompound}{BYTEx8}{NBTCompound}
1004
977
  * ```
1005
- *
1006
- * ```js
1007
- * const data = (await tabManager.selectedTab.db.get("LevelChunkMetaDataDictionary"));
1008
- * const extractedData = [];
1009
- * for (let i = 12; i < data.length; i += 8) {
1010
- * const parsedData = await require("prismarine-nbt").parse(data.slice(i), "little");
1011
- * extractedData.push([data.slice(i - 8, i), parsedData]);
1012
- * i += parsedData.metadata.size;
1013
- * }
1014
- * ```
1015
- *
1016
- * Example first 4 bytes:
1017
- * ```json
1018
- * {
1019
- * "type": "Buffer",
1020
- * "data": [
1021
- * 109,
1022
- * 0,
1023
- * 0,
1024
- * 0
1025
- * ]
1026
- * }
1027
- * ```
1028
- *
1029
- * Example parsed NBT data chunk:
1030
- * ```json
1031
- * [
1032
- * {
1033
- * "type": "Buffer",
1034
- * "data": [
1035
- * 190,
1036
- * 156,
1037
- * 149,
1038
- * 210,
1039
- * 211,
1040
- * 150,
1041
- * 247,
1042
- * 2
1043
- * ]
1044
- * },
1045
- * {
1046
- * "metadata": {
1047
- * "size": 392,
1048
- * "buffer": {} // ...
1049
- * },
1050
- * "parsed": {
1051
- * "type": "compound",
1052
- * "name": "",
1053
- * "value": {
1054
- * "BiomeBaseGameVersion": {
1055
- * "type": "string",
1056
- * "value": "1.18.0"
1057
- * },
1058
- * "DimensionName": {
1059
- * "type": "string",
1060
- * "value": "Overworld"
1061
- * },
1062
- * "GenerationSeed": {
1063
- * "type": "long",
1064
- * "value": [
1065
- * -555685613,
1066
- * 920043062
1067
- * ]
1068
- * },
1069
- * "GeneratorType": {
1070
- * "type": "int",
1071
- * "value": 1
1072
- * },
1073
- * "LastSavedBaseGameVersion": {
1074
- * "type": "string",
1075
- * "value": "1.21.122"
1076
- * },
1077
- * "LastSavedDimensionHeightRange": {
1078
- * "type": "compound",
1079
- * "value": {
1080
- * "max": {
1081
- * "type": "short",
1082
- * "value": 320
1083
- * },
1084
- * "min": {
1085
- * "type": "short",
1086
- * "value": -64
1087
- * }
1088
- * }
1089
- * },
1090
- * "OriginalBaseGameVersion": {
1091
- * "type": "string",
1092
- * "value": "1.21.71"
1093
- * },
1094
- * "OriginalDimensionHeightRange": {
1095
- * "type": "compound",
1096
- * "value": {
1097
- * "max": {
1098
- * "type": "short",
1099
- * "value": 320
1100
- * },
1101
- * "min": {
1102
- * "type": "short",
1103
- * "value": -64
1104
- * }
1105
- * }
1106
- * },
1107
- * "Overworld1_18HeightExtended": {
1108
- * "type": "short",
1109
- * "value": 1
1110
- * },
1111
- * "SkullFlatteningPerformed": {
1112
- * "type": "short",
1113
- * "value": 1
1114
- * },
1115
- * "UnderwaterLavaLakeFixed": {
1116
- * "type": "short",
1117
- * "value": 1
1118
- * },
1119
- * "WorldGenBelowZeroFixed": {
1120
- * "type": "short",
1121
- * "value": 1
1122
- * }
1123
- * }
1124
- * },
1125
- * "type": "little"
1126
- * }
1127
- * ]
1128
- * ```
1129
978
  */
1130
979
  readonly LevelChunkMetaDataDictionary: {
1131
980
  /**
@@ -1133,18 +982,8 @@ export declare const entryContentTypeToFormatMap: {
1133
982
  */
1134
983
  readonly type: "custom";
1135
984
  readonly resultType: "JSONNBT";
1136
- readonly parse: (data: Buffer) => Promise<{
1137
- type: "compound";
1138
- value: {
1139
- [hashHex: string]: NBT.NBT;
1140
- };
1141
- }>;
1142
- readonly serialize: (data: {
1143
- type: "compound";
1144
- value: {
1145
- [hashHex: string]: NBT.NBT | NBT.Compound;
1146
- };
1147
- }) => Buffer<ArrayBuffer>;
985
+ readonly parse: (data: Buffer) => Promise<NBTSchemas.NBTSchemaTypes.LevelChunkMetaDataDictionary>;
986
+ readonly serialize: (data: NBTSchemas.NBTSchemaTypes.LevelChunkMetaDataDictionary) => Buffer<ArrayBuffer>;
1148
987
  };
1149
988
  /**
1150
989
  * @todo Figure out how to parse this. (It seems that each one just has a value of 1 (`0x31`). It also seems that the data is actually based on the key, which has an id that can be used with the realms API to get the corresponding data.)
package/LevelUtils.js CHANGED
@@ -229,12 +229,37 @@ function readSubchunkPaletteIds(buffer, offset, end) {
229
229
  }
230
230
  const values = new Array(4096);
231
231
  if (bitsPerBlock > 0) {
232
- const blocksPerWord = Math.floor(32 / bitsPerBlock);
232
+ const blocksPerWord = 32 / bitsPerBlock;
233
233
  const wordCount = Math.floor(4095 / blocksPerWord) + 1;
234
234
  const mask = (1 << bitsPerBlock) - 1;
235
+ // console.warn(
236
+ // "blocksPerWord:",
237
+ // blocksPerWord,
238
+ // "wordCount:",
239
+ // wordCount,
240
+ // "bitsPerBlock:",
241
+ // bitsPerBlock,
242
+ // "mask:",
243
+ // mask,
244
+ // "p:",
245
+ // p,
246
+ // "end:",
247
+ // end,
248
+ // "end-p:",
249
+ // end - p,
250
+ // "4*wordCount:",
251
+ // 4 * wordCount,
252
+ // "flags:",
253
+ // flags,
254
+ // "isPersistent:",
255
+ // isPersistent,
256
+ // "offset:",
257
+ // offset
258
+ // );
235
259
  if (end - p < 4 * wordCount) {
236
260
  throw new Error("Subchunk biome error: not enough data for block words.");
237
261
  }
262
+ // const originalP = p;
238
263
  let u = 0;
239
264
  for (let j = 0; j < wordCount; j++) {
240
265
  let temp = buffer[p] | (buffer[p + 1] << 8) | (buffer[p + 2] << 16) | (buffer[p + 3] << 24);
@@ -253,6 +278,15 @@ function readSubchunkPaletteIds(buffer, offset, end) {
253
278
  }
254
279
  const paletteSize = buffer[p] | (buffer[p + 1] << 8) | (buffer[p + 2] << 16) | (buffer[p + 3] << 24);
255
280
  p += 4;
281
+ // UNDONE: This does not actually restore the original data.
282
+ // Attempt to repair corrupted value data from versions <=v1.9.0 of the module.
283
+ // if (blocksPerWord !== Math.floor(blocksPerWord) && end - originalP < 4 * (Math.floor(4095 / Math.floor(blocksPerWord)) + 1)) {
284
+ // for (let u = 0; u < 4096; u++) {
285
+ // const v = values[u]!;
286
+ // const repairedValue: number = Math.floor(v / 2);
287
+ // values[u] = repairedValue;
288
+ // }
289
+ // }
256
290
  return { values, isPersistent, paletteSize, newOffset: p };
257
291
  }
258
292
  else {
@@ -265,7 +299,10 @@ function readSubchunkPaletteIds(buffer, offset, end) {
265
299
  }
266
300
  function writeSubchunkPaletteIds(values, paletteSize) {
267
301
  const blockCount = values.length; // usually 16*16*16 = 4096
268
- const bitsPerBlock = Math.max(1, Math.ceil(Math.log2(paletteSize)));
302
+ const bitsPerBlockOriginal = Math.max(1, Math.ceil(Math.log2(paletteSize)));
303
+ const bitsPerBlockDivisors = [1, 2, 4, 8, 16, 32];
304
+ const bitsPerBlock = bitsPerBlockDivisors.find((d) => d >= bitsPerBlockOriginal) ?? 32;
305
+ // console.log(bitsPerBlock);
269
306
  const wordsPerBlock = Math.ceil((blockCount * bitsPerBlock) / 32);
270
307
  const words = new Uint32Array(wordsPerBlock);
271
308
  // let bitIndex: number = 0;
@@ -1169,7 +1206,7 @@ export const entryContentTypeToFormatMap = {
1169
1206
  */
1170
1207
  type: "custom",
1171
1208
  /**
1172
- * The format type that results from the {@link entryContentTypeToFormatMap.Entity.parse | parse} method.
1209
+ * The format type that results from the {@link entryContentTypeToFormatMap.Digest.parse | parse} method.
1173
1210
  */
1174
1211
  resultType: "JSONNBT",
1175
1212
  /**
@@ -1185,7 +1222,7 @@ export const entryContentTypeToFormatMap = {
1185
1222
  async parse(data) {
1186
1223
  const entityIds = [];
1187
1224
  for (let i = 0; i < data.length; i += 8) {
1188
- entityIds.push({ type: "int", value: [data.readInt32LE(i), data.readInt32LE(i + 4)] });
1225
+ entityIds.push([data.readInt32LE(i), data.readInt32LE(i + 4)]);
1189
1226
  }
1190
1227
  return {
1191
1228
  type: "compound",
@@ -1193,7 +1230,7 @@ export const entryContentTypeToFormatMap = {
1193
1230
  entityIds: {
1194
1231
  type: "list",
1195
1232
  value: {
1196
- type: "list",
1233
+ type: "long",
1197
1234
  value: entityIds,
1198
1235
  },
1199
1236
  },
@@ -1207,12 +1244,14 @@ export const entryContentTypeToFormatMap = {
1207
1244
  *
1208
1245
  * @param data The data to serialize.
1209
1246
  * @returns The serialized data, as a buffer.
1247
+ *
1248
+ * @throws {any} If an error occurs while parsing the data.
1210
1249
  */
1211
1250
  serialize(data) {
1212
1251
  const rawData = data.value.entityIds.value.value.map((entityIds) => {
1213
1252
  const buffer = Buffer.alloc(8);
1214
- buffer.writeInt32LE(entityIds.value[0], 0);
1215
- buffer.writeInt32LE(entityIds.value[1], 4);
1253
+ buffer.writeInt32LE(entityIds[0], 0);
1254
+ buffer.writeInt32LE(entityIds[1], 4);
1216
1255
  return buffer;
1217
1256
  });
1218
1257
  return Buffer.concat(rawData);
@@ -1516,136 +1555,11 @@ export const entryContentTypeToFormatMap = {
1516
1555
  *
1517
1556
  * The first 4 bytes represent the number of entries as a 32-bit little-endian integer (it is unknown if it is signed or not).
1518
1557
  *
1519
- * `Array.from(data).reduce((a, b, i, ar)=>[...a, ...(Array.from(Buffer.from("0a000008", "hex")).every((v, ib)=>v === ar[i + ib]) ? [i] : [])], [])`
1558
+ * The first 4 bytes are followed by multiple chunks of data formatted as the 8 byte hash of the NBT data plus the NBT compound.
1520
1559
  *
1521
- * This seems to be 4 bytes, followed by multiple chunks of data formatted as 8 bytes plus an NBT compound.
1522
1560
  * ```
1523
1561
  * {BYTEx4}{BYTEx8}{NBTCompound}{BYTEx8}{NBTCompound}{BYTEx8}{NBTCompound}{BYTEx8}{NBTCompound}
1524
1562
  * ```
1525
- *
1526
- * ```js
1527
- * const data = (await tabManager.selectedTab.db.get("LevelChunkMetaDataDictionary"));
1528
- * const extractedData = [];
1529
- * for (let i = 12; i < data.length; i += 8) {
1530
- * const parsedData = await require("prismarine-nbt").parse(data.slice(i), "little");
1531
- * extractedData.push([data.slice(i - 8, i), parsedData]);
1532
- * i += parsedData.metadata.size;
1533
- * }
1534
- * ```
1535
- *
1536
- * Example first 4 bytes:
1537
- * ```json
1538
- * {
1539
- * "type": "Buffer",
1540
- * "data": [
1541
- * 109,
1542
- * 0,
1543
- * 0,
1544
- * 0
1545
- * ]
1546
- * }
1547
- * ```
1548
- *
1549
- * Example parsed NBT data chunk:
1550
- * ```json
1551
- * [
1552
- * {
1553
- * "type": "Buffer",
1554
- * "data": [
1555
- * 190,
1556
- * 156,
1557
- * 149,
1558
- * 210,
1559
- * 211,
1560
- * 150,
1561
- * 247,
1562
- * 2
1563
- * ]
1564
- * },
1565
- * {
1566
- * "metadata": {
1567
- * "size": 392,
1568
- * "buffer": {} // ...
1569
- * },
1570
- * "parsed": {
1571
- * "type": "compound",
1572
- * "name": "",
1573
- * "value": {
1574
- * "BiomeBaseGameVersion": {
1575
- * "type": "string",
1576
- * "value": "1.18.0"
1577
- * },
1578
- * "DimensionName": {
1579
- * "type": "string",
1580
- * "value": "Overworld"
1581
- * },
1582
- * "GenerationSeed": {
1583
- * "type": "long",
1584
- * "value": [
1585
- * -555685613,
1586
- * 920043062
1587
- * ]
1588
- * },
1589
- * "GeneratorType": {
1590
- * "type": "int",
1591
- * "value": 1
1592
- * },
1593
- * "LastSavedBaseGameVersion": {
1594
- * "type": "string",
1595
- * "value": "1.21.122"
1596
- * },
1597
- * "LastSavedDimensionHeightRange": {
1598
- * "type": "compound",
1599
- * "value": {
1600
- * "max": {
1601
- * "type": "short",
1602
- * "value": 320
1603
- * },
1604
- * "min": {
1605
- * "type": "short",
1606
- * "value": -64
1607
- * }
1608
- * }
1609
- * },
1610
- * "OriginalBaseGameVersion": {
1611
- * "type": "string",
1612
- * "value": "1.21.71"
1613
- * },
1614
- * "OriginalDimensionHeightRange": {
1615
- * "type": "compound",
1616
- * "value": {
1617
- * "max": {
1618
- * "type": "short",
1619
- * "value": 320
1620
- * },
1621
- * "min": {
1622
- * "type": "short",
1623
- * "value": -64
1624
- * }
1625
- * }
1626
- * },
1627
- * "Overworld1_18HeightExtended": {
1628
- * "type": "short",
1629
- * "value": 1
1630
- * },
1631
- * "SkullFlatteningPerformed": {
1632
- * "type": "short",
1633
- * "value": 1
1634
- * },
1635
- * "UnderwaterLavaLakeFixed": {
1636
- * "type": "short",
1637
- * "value": 1
1638
- * },
1639
- * "WorldGenBelowZeroFixed": {
1640
- * "type": "short",
1641
- * "value": 1
1642
- * }
1643
- * }
1644
- * },
1645
- * "type": "little"
1646
- * }
1647
- * ]
1648
- * ```
1649
1563
  */
1650
1564
  LevelChunkMetaDataDictionary: {
1651
1565
  /**