@rhyster/wow-casc-dbc 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,1850 @@
1
+ import assert from 'node:assert';
2
+ import crypto from 'node:crypto';
3
+ import fs from 'node:fs/promises';
4
+ import path from 'node:path';
5
+ import http from 'node:http';
6
+ import zlib from 'node:zlib';
7
+
8
+ var __defProp$5 = Object.defineProperty;
9
+ var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
10
+ var __publicField$5 = (obj, key, value) => {
11
+ __defNormalProp$5(obj, typeof key !== "symbol" ? key + "" : key, value);
12
+ return value;
13
+ };
14
+ class Store {
15
+ constructor(dataFile) {
16
+ __publicField$5(this, "data");
17
+ __publicField$5(this, "dataFile");
18
+ __publicField$5(this, "promise");
19
+ this.dataFile = dataFile;
20
+ this.data = {};
21
+ this.promise = new Promise((resolve) => {
22
+ fs.readFile(dataFile, "utf-8").then((file) => {
23
+ this.data = JSON.parse(file);
24
+ resolve();
25
+ }).catch(() => {
26
+ resolve();
27
+ });
28
+ });
29
+ }
30
+ async get(key) {
31
+ await this.promise;
32
+ return this.data[key];
33
+ }
34
+ async set(key, value) {
35
+ await this.promise;
36
+ this.data[key] = value;
37
+ await fs.writeFile(this.dataFile, JSON.stringify(this.data), "utf-8");
38
+ }
39
+ }
40
+
41
+ const USER_AGENT = "node-wow-casc-dbc";
42
+ const CACHE_ROOT = path.resolve("cache");
43
+ const CACHE_DIRS = {
44
+ build: "builds",
45
+ indexes: "indices",
46
+ data: "data",
47
+ dbd: "dbd"
48
+ };
49
+ const CACHE_INTEGRITY_FILE = path.resolve(CACHE_ROOT, "integrity.json");
50
+ const cacheIntegrity = new Store(CACHE_INTEGRITY_FILE);
51
+ const formatCDNKey = (key) => `${key.substring(0, 2)}/${key.substring(2, 4)}/${key}`;
52
+ const requestData = async (url, partialOffset = void 0, partialLength = void 0) => new Promise((resolve, reject) => {
53
+ const options = {
54
+ headers: {
55
+ "User-Agent": USER_AGENT,
56
+ Range: partialOffset && partialLength ? `bytes=${partialOffset.toString()}-${(partialOffset + partialLength - 1).toString()}` : "bytes=0-"
57
+ }
58
+ };
59
+ http.get(url, options, (res) => {
60
+ if (res.statusCode === 301 || res.statusCode === 302) {
61
+ if (res.headers.location) {
62
+ requestData(res.headers.location, partialOffset, partialLength).then(resolve).catch(reject);
63
+ } else {
64
+ reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode.toString()}`));
65
+ }
66
+ return;
67
+ }
68
+ if (!res.statusCode || res.statusCode < 200 || res.statusCode > 302) {
69
+ reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode?.toString() ?? "undefined"}`));
70
+ return;
71
+ }
72
+ const chunks = [];
73
+ res.on("data", (chunk) => chunks.push(chunk));
74
+ res.on("end", () => {
75
+ resolve(Buffer.concat(chunks));
76
+ });
77
+ }).on("error", reject).end();
78
+ });
79
+ const downloadFile = (prefixes, type, key, partialOffset = void 0, partialLength = void 0) => {
80
+ const urls = prefixes.map((prefix) => `${prefix}/${type}/${formatCDNKey(key)}`);
81
+ return urls.reduce(
82
+ (prev, url) => prev.catch(() => requestData(url, partialOffset, partialLength)),
83
+ Promise.reject(new Error(""))
84
+ );
85
+ };
86
+ const getFileCache = async (file) => {
87
+ const integrity = await cacheIntegrity.get(file);
88
+ if (integrity) {
89
+ try {
90
+ const buffer = await fs.readFile(path.resolve(CACHE_ROOT, file));
91
+ const hash = crypto.createHash("sha256").update(buffer).digest("hex");
92
+ if (hash === integrity) {
93
+ return buffer;
94
+ }
95
+ } catch {
96
+ }
97
+ }
98
+ return void 0;
99
+ };
100
+ const getDataFile = async (prefixes, key, type, buildCKey, name, partialOffset = void 0, partialLength = void 0) => {
101
+ const dir = type === "build" ? path.join(CACHE_DIRS[type], buildCKey) : CACHE_DIRS[type];
102
+ const file = name ? path.join(dir, name) : path.join(dir, key);
103
+ const cacheBuffer = await getFileCache(file);
104
+ if (cacheBuffer) {
105
+ if (name === void 0 && partialOffset !== void 0 && partialLength !== void 0) {
106
+ return cacheBuffer.subarray(partialOffset, partialOffset + partialLength);
107
+ }
108
+ return cacheBuffer;
109
+ }
110
+ const downloadBuffer = await downloadFile(prefixes, "data", key, partialOffset, partialLength);
111
+ if (partialOffset === void 0 && partialLength === void 0 || name) {
112
+ await fs.mkdir(path.resolve(CACHE_ROOT, dir), { recursive: true });
113
+ await fs.writeFile(path.resolve(CACHE_ROOT, file), downloadBuffer);
114
+ const hash = crypto.createHash("sha256").update(downloadBuffer).digest("hex");
115
+ await cacheIntegrity.set(file, hash);
116
+ }
117
+ return downloadBuffer;
118
+ };
119
+ const getConfigFile = async (prefixes, key) => {
120
+ const downloadBuffer = await downloadFile(prefixes, "config", key);
121
+ return downloadBuffer.toString("utf-8");
122
+ };
123
+ const getProductVersions = async (region, product) => {
124
+ const url = `http://${region}.patch.battle.net:1119/${product}/versions`;
125
+ const headers = {
126
+ "User-Agent": USER_AGENT
127
+ };
128
+ const res = await fetch(url, { headers });
129
+ return res.text();
130
+ };
131
+ const getProductCDNs = async (region, product) => {
132
+ const url = `http://${region}.patch.battle.net:1119/${product}/cdns`;
133
+ const headers = {
134
+ "User-Agent": USER_AGENT
135
+ };
136
+ const res = await fetch(url, { headers });
137
+ return res.text();
138
+ };
139
+
140
+ const parseProductConfig = (text) => {
141
+ const lines = text.split(/\r?\n/);
142
+ const headers = lines[0].split("|").map((header) => header.split("!")[0].replace(" ", ""));
143
+ const entries = lines.filter((line, index) => index > 0 && line.trim().length !== 0 && !line.startsWith("#")).map((line) => {
144
+ const node = {};
145
+ const entryFields = line.split("|");
146
+ for (let i = 0, n = entryFields.length; i < n; i += 1) {
147
+ node[headers[i]] = entryFields[i];
148
+ }
149
+ return node;
150
+ });
151
+ return entries;
152
+ };
153
+ const parseProductVersions = (text) => parseProductConfig(text);
154
+ const parseProductCDNs = (text) => parseProductConfig(text);
155
+
156
+ const normalizeKey = (key) => key.split("-").map((part, index) => index === 0 ? part : `${part.charAt(0).toUpperCase()}${part.slice(1)}`).join("");
157
+ const parseConfig = (text) => {
158
+ const entries = {};
159
+ text.split(/\r?\n/).filter((line) => line.trim().length !== 0 && !line.startsWith("#")).forEach((line) => {
160
+ const match = line.match(/([^\s]+)\s?=\s?(.*)/);
161
+ assert(match !== null, "Invalid token encountered parsing CDN config");
162
+ const [key, value] = match.slice(1);
163
+ entries[normalizeKey(key)] = value;
164
+ });
165
+ return entries;
166
+ };
167
+ const parseCDNConfig = (text) => parseConfig(text);
168
+ const parseBuildConfig = (text) => parseConfig(text);
169
+
170
+ const VERSION_SUB_OFFSET = -12;
171
+ const CHECKSUM_SIZE_SUB_OFFSET = -5;
172
+ const BLOCK_SIZE_OFFSET = 3;
173
+ const OFFSET_BYTES_OFFSET = 4;
174
+ const SIZE_BYTES_OFFSET = 5;
175
+ const KEY_SIZE_OFFSET = 6;
176
+ const NUM_ELEMENTS_OFFSET = 8;
177
+ const CHECKSUM_OFFSET = 12;
178
+ const CHECKSUM_TRIES = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0];
179
+ const tryArchiveIndexChecksumSize = (buffer, cKey) => {
180
+ const res = CHECKSUM_TRIES.filter(
181
+ (index) => buffer.readUInt8(buffer.byteLength - index + CHECKSUM_SIZE_SUB_OFFSET) === index && buffer.readUInt8(buffer.byteLength - index + VERSION_SUB_OFFSET) === 1
182
+ );
183
+ if (res.length === 1) {
184
+ return res[0];
185
+ }
186
+ throw new Error(`Invalid checksum size: ${res.join(", ")} in ${cKey}`);
187
+ };
188
+ const parseArchiveIndex = (buffer, cKey) => {
189
+ const checksumSize = tryArchiveIndexChecksumSize(buffer, cKey);
190
+ const versionOffset = buffer.byteLength - checksumSize + VERSION_SUB_OFFSET;
191
+ const footerOffset = versionOffset - checksumSize;
192
+ const tocChecksum = buffer.toString("hex", footerOffset, versionOffset);
193
+ const version = buffer.readUInt8(versionOffset);
194
+ const blockSizeKB = buffer.readUInt8(versionOffset + BLOCK_SIZE_OFFSET);
195
+ const offsetBytes = buffer.readUInt8(versionOffset + OFFSET_BYTES_OFFSET);
196
+ const sizeBytes = buffer.readUInt8(versionOffset + SIZE_BYTES_OFFSET);
197
+ const keySize = buffer.readUInt8(versionOffset + KEY_SIZE_OFFSET);
198
+ const numElements = buffer.readUInt32LE(versionOffset + NUM_ELEMENTS_OFFSET);
199
+ const footerChecksum = buffer.toString("hex", versionOffset + CHECKSUM_OFFSET);
200
+ assert(version === 1, `Invalid version: ${version.toString()} in ${cKey}`);
201
+ const entrySize = keySize + offsetBytes + sizeBytes;
202
+ const blockSize = blockSizeKB * 1024;
203
+ const numBlocks = footerOffset / (blockSize + keySize + checksumSize);
204
+ const tocSize = (keySize + checksumSize) * numBlocks;
205
+ const toc = buffer.subarray(footerOffset - tocSize, footerOffset);
206
+ const footer = buffer.subarray(footerOffset);
207
+ const footerCheckBuffer = Buffer.concat([
208
+ buffer.subarray(versionOffset, buffer.byteLength - checksumSize),
209
+ Buffer.alloc(checksumSize)
210
+ ]);
211
+ const hash = crypto.createHash("md5").update(footer).digest("hex");
212
+ assert(hash === cKey, `Invalid footer hash in ${cKey}: expected ${cKey}, got ${hash}`);
213
+ const footerHash = crypto.createHash("md5").update(footerCheckBuffer).digest("hex").slice(0, checksumSize * 2);
214
+ assert(footerHash === footerChecksum, `Invalid footer checksum in ${cKey}: expected ${footerChecksum}, got ${footerHash}`);
215
+ const tocHash = crypto.createHash("md5").update(toc).digest("hex").slice(0, checksumSize * 2);
216
+ assert(tocHash === tocChecksum, `Invalid toc checksum in ${cKey}: expected ${tocChecksum}, got ${tocHash}`);
217
+ const result = /* @__PURE__ */ new Map();
218
+ for (let i = 0; i < numBlocks; i += 1) {
219
+ const lastEkey = toc.toString("hex", i * keySize, (i + 1) * keySize);
220
+ const blockChecksum = toc.toString("hex", numBlocks * keySize + i * checksumSize, numBlocks * keySize + (i + 1) * checksumSize);
221
+ const blockOffset = i * blockSize;
222
+ const blockHash = crypto.createHash("md5").update(buffer.subarray(i * blockSize, (i + 1) * blockSize)).digest("hex").slice(0, checksumSize * 2);
223
+ assert(blockChecksum === blockHash, `Invalid block hash in ${cKey} at ${i.toString()}: expected ${blockChecksum}, got ${blockHash}`);
224
+ let length = 0;
225
+ while (length < blockSize) {
226
+ const entryOffset = blockOffset + length * entrySize;
227
+ const eKey = buffer.toString("hex", entryOffset, entryOffset + keySize);
228
+ const size = buffer.readUIntBE(entryOffset + keySize, sizeBytes);
229
+ const offset = buffer.readUIntBE(entryOffset + keySize + sizeBytes, offsetBytes);
230
+ result.set(eKey, { key: cKey, size, offset });
231
+ length += 1;
232
+ if (eKey === lastEkey) {
233
+ break;
234
+ }
235
+ }
236
+ }
237
+ assert(result.size === numElements, `Invalid number of elements: ${result.size.toString()} != ${numElements.toString()} in ${cKey}`);
238
+ return result;
239
+ };
240
+
241
+ var __defProp$4 = Object.defineProperty;
242
+ var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
243
+ var __publicField$4 = (obj, key, value) => {
244
+ __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
245
+ return value;
246
+ };
247
+ class Salsa20 {
248
+ constructor(key, nonce) {
249
+ __publicField$4(this, "fixed");
250
+ __publicField$4(this, "key");
251
+ __publicField$4(this, "nonce");
252
+ __publicField$4(this, "counter", new Uint32Array([0, 0]));
253
+ __publicField$4(this, "state", new Uint32Array(16));
254
+ __publicField$4(this, "block", new Uint8Array(64));
255
+ __publicField$4(this, "position", 0);
256
+ assert(key.length === 32 || key.length === 16, "Salsa20 requires 128-bit or 256-bit key");
257
+ assert(nonce.length === 8, "Salsa20 requires 64-bit nonce");
258
+ this.key = new Uint32Array(8);
259
+ const keyView = new DataView(key.buffer);
260
+ if (key.length === 32) {
261
+ for (let i = 0; i < 8; i += 1) {
262
+ this.key[i] = keyView.getUint32(i * 4, true);
263
+ }
264
+ this.fixed = new Uint32Array([
265
+ 1634760805,
266
+ 857760878,
267
+ 2036477234,
268
+ 1797285236
269
+ ]);
270
+ } else {
271
+ for (let i = 0; i < 4; i += 1) {
272
+ const word = keyView.getUint32(i * 4, true);
273
+ this.key[i] = word;
274
+ this.key[i + 4] = word;
275
+ }
276
+ this.fixed = new Uint32Array([
277
+ 1634760805,
278
+ 824206446,
279
+ 2036477238,
280
+ 1797285236
281
+ ]);
282
+ }
283
+ this.nonce = new Uint32Array(2);
284
+ const nonceView = new DataView(nonce.buffer);
285
+ for (let i = 0; i < 2; i += 1) {
286
+ this.nonce[i] = nonceView.getUint32(i * 4, true);
287
+ }
288
+ this.generateBlock();
289
+ }
290
+ QR(a, b, c, d) {
291
+ let t;
292
+ t = this.state[a] + this.state[d] & 4294967295;
293
+ this.state[b] ^= t << 7 | t >>> 25;
294
+ t = this.state[b] + this.state[a] & 4294967295;
295
+ this.state[c] ^= t << 9 | t >>> 23;
296
+ t = this.state[c] + this.state[b] & 4294967295;
297
+ this.state[d] ^= t << 13 | t >>> 19;
298
+ t = this.state[d] + this.state[c] & 4294967295;
299
+ this.state[a] ^= t << 18 | t >>> 14;
300
+ }
301
+ generateBlock() {
302
+ const init = new Uint32Array([
303
+ this.fixed[0],
304
+ this.key[0],
305
+ this.key[1],
306
+ this.key[2],
307
+ this.key[3],
308
+ this.fixed[1],
309
+ this.nonce[0],
310
+ this.nonce[1],
311
+ this.counter[0],
312
+ this.counter[1],
313
+ this.fixed[2],
314
+ this.key[4],
315
+ this.key[5],
316
+ this.key[6],
317
+ this.key[7],
318
+ this.fixed[3]
319
+ ]);
320
+ this.state = new Uint32Array(init);
321
+ for (let i = 0; i < 20; i += 2) {
322
+ this.QR(0, 4, 8, 12);
323
+ this.QR(5, 9, 13, 1);
324
+ this.QR(10, 14, 2, 6);
325
+ this.QR(15, 3, 7, 11);
326
+ this.QR(0, 1, 2, 3);
327
+ this.QR(5, 6, 7, 4);
328
+ this.QR(10, 11, 8, 9);
329
+ this.QR(15, 12, 13, 14);
330
+ }
331
+ for (let i = 0; i < 16; i += 1) {
332
+ const word = this.state[i] + init[i] & 4294967295;
333
+ this.block[i * 4] = word & 255;
334
+ this.block[i * 4 + 1] = word >>> 8 & 255;
335
+ this.block[i * 4 + 2] = word >>> 16 & 255;
336
+ this.block[i * 4 + 3] = word >>> 24 & 255;
337
+ }
338
+ this.counter[0] = this.counter[0] + 1 & 4294967295;
339
+ if (this.counter[0] === 0) {
340
+ this.counter[1] = this.counter[1] + 1 & 4294967295;
341
+ }
342
+ }
343
+ process(input) {
344
+ const { length } = input;
345
+ const result = new Uint8Array(length);
346
+ for (let i = 0; i < length; i += 1) {
347
+ if (this.position === 64) {
348
+ this.generateBlock();
349
+ this.position = 0;
350
+ }
351
+ result[i] = input[i] ^ this.block[this.position];
352
+ this.position += 1;
353
+ }
354
+ return result;
355
+ }
356
+ }
357
+
358
+ var __defProp$3 = Object.defineProperty;
359
+ var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
360
+ var __publicField$3 = (obj, key, value) => {
361
+ __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
362
+ return value;
363
+ };
364
+ const BLTE_MAGIC = 1112298565;
365
+ const ENC_TYPE_SALSA20 = 83;
366
+ const EMPTY_HASH = "00000000000000000000000000000000";
367
+ class BLTEReader {
368
+ constructor(buffer, eKey, keys = /* @__PURE__ */ new Map()) {
369
+ __publicField$3(this, "buffer");
370
+ __publicField$3(this, "blte");
371
+ __publicField$3(this, "blocks", []);
372
+ __publicField$3(this, "keys");
373
+ __publicField$3(this, "processedBlock", 0);
374
+ __publicField$3(this, "processedOffset", 0);
375
+ this.blte = buffer;
376
+ this.buffer = Buffer.alloc(0);
377
+ this.keys = keys;
378
+ const size = buffer.byteLength;
379
+ assert(size >= 8, `[BLTE]: Invalid size: ${size.toString()} < 8`);
380
+ const magic = buffer.readUInt32BE(0);
381
+ assert(magic === BLTE_MAGIC, `[BLTE]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
382
+ const headerSize = buffer.readUInt32BE(4);
383
+ if (headerSize === 0) {
384
+ const blteHash2 = crypto.createHash("md5").update(buffer).digest("hex");
385
+ assert(blteHash2 === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash2}`);
386
+ this.blocks.push({
387
+ compressedSize: size - 8,
388
+ decompressedSize: size - 9,
389
+ hash: EMPTY_HASH
390
+ });
391
+ this.processedOffset = 8;
392
+ return;
393
+ }
394
+ const blteHash = crypto.createHash("md5").update(buffer.subarray(0, headerSize)).digest("hex");
395
+ assert(blteHash === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash}`);
396
+ assert(size >= 12, `[BLTE]: Invalid size: ${size.toString()} < 12`);
397
+ const flag = buffer.readUInt8(8);
398
+ const numBlocks = buffer.readIntBE(9, 3);
399
+ assert(numBlocks > 0, `[BLTE]: Invalid number of blocks: ${numBlocks.toString()}`);
400
+ assert(flag === 15, `[BLTE]: Invalid flag: ${flag.toString(16).padStart(2, "0")}`);
401
+ const blockHeaderSize = numBlocks * 24;
402
+ assert(headerSize === blockHeaderSize + 12, `[BLTE]: Invalid header size: header size ${headerSize.toString()} != block header size ${blockHeaderSize.toString()} + 12`);
403
+ assert(size >= headerSize, `[BLTE]: Invalid size: ${size.toString()} < ${headerSize.toString()}`);
404
+ for (let i = 0; i < numBlocks; i += 1) {
405
+ const offset = 12 + i * 24;
406
+ const compressedSize = buffer.readUInt32BE(offset);
407
+ const decompressedSize = buffer.readUInt32BE(offset + 4);
408
+ const hash = buffer.toString("hex", offset + 8, offset + 24);
409
+ this.blocks.push({
410
+ compressedSize,
411
+ decompressedSize,
412
+ hash
413
+ });
414
+ }
415
+ this.processedOffset = headerSize;
416
+ }
417
+ processBlock(buffer, index, allowMissingKey) {
418
+ const flag = buffer.readUInt8(0);
419
+ switch (flag) {
420
+ case 69: {
421
+ let offset = 1;
422
+ const keyNameLength = buffer.readUInt8(offset);
423
+ offset += 1;
424
+ const keyNameBE = buffer.toString("hex", offset, offset + keyNameLength);
425
+ offset += keyNameLength;
426
+ const ivLength = buffer.readUInt8(offset);
427
+ offset += 1;
428
+ const ivBuffer = buffer.subarray(offset, offset + ivLength);
429
+ offset += ivLength;
430
+ const encryptType = buffer.readUInt8(offset);
431
+ offset += 1;
432
+ assert(encryptType === ENC_TYPE_SALSA20, `[BLTE]: Invalid encrypt type: ${encryptType.toString(16).padStart(2, "0")}`);
433
+ const keyName = [...keyNameBE.matchAll(/.{2}/g)].map((v) => v[0]).reverse().join("").toLowerCase();
434
+ const key = this.keys.get(keyName);
435
+ if (!key) {
436
+ if (allowMissingKey) {
437
+ return keyName;
438
+ }
439
+ throw new Error(`[BLTE]: Missing key: ${keyName}`);
440
+ }
441
+ const iv = new Uint8Array(8);
442
+ for (let i = 0; i < 8; i += 1) {
443
+ const byte = ivBuffer.byteLength > i ? ivBuffer.readUInt8(i) : void 0;
444
+ iv[i] = byte ? byte ^ index >> 8 * i & 255 : 0;
445
+ }
446
+ const handler = new Salsa20(key, iv);
447
+ const decrypted = handler.process(buffer.subarray(offset));
448
+ if (allowMissingKey) {
449
+ return this.processBlock(Buffer.from(decrypted.buffer), index, true);
450
+ }
451
+ return this.processBlock(Buffer.from(decrypted.buffer), index, false);
452
+ }
453
+ case 70:
454
+ throw new Error("[BLTE]: Frame (Recursive) block not supported");
455
+ case 78:
456
+ return buffer.subarray(1);
457
+ case 90:
458
+ return zlib.inflateSync(buffer.subarray(1));
459
+ default:
460
+ throw new Error(`[BLTE]: Invalid block flag: ${flag.toString(16).padStart(2, "0")}`);
461
+ }
462
+ }
463
+ processBytes(allowMissingKey = false, size = Infinity) {
464
+ const missingKeyBlocks = [];
465
+ while (this.processedBlock < this.blocks.length && size > this.buffer.byteLength) {
466
+ const blockIndex = this.processedBlock;
467
+ const block = this.blocks[blockIndex];
468
+ const blockBuffer = this.blte.subarray(
469
+ this.processedOffset,
470
+ this.processedOffset + block.compressedSize
471
+ );
472
+ if (block.hash !== EMPTY_HASH) {
473
+ const blockHash = crypto.createHash("md5").update(blockBuffer).digest("hex");
474
+ assert(blockHash === block.hash, `[BLTE]: Invalid block hash: expected ${block.hash}, got ${blockHash}`);
475
+ }
476
+ if (allowMissingKey) {
477
+ const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
478
+ if (buffer instanceof Buffer) {
479
+ assert(
480
+ buffer.byteLength === block.decompressedSize,
481
+ `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
482
+ );
483
+ this.buffer = Buffer.concat([this.buffer, buffer]);
484
+ } else {
485
+ missingKeyBlocks.push({
486
+ offset: this.buffer.byteLength,
487
+ size: block.decompressedSize,
488
+ blockIndex,
489
+ keyName: buffer
490
+ });
491
+ this.buffer = Buffer.concat([
492
+ this.buffer,
493
+ Buffer.alloc(block.decompressedSize)
494
+ ]);
495
+ }
496
+ } else {
497
+ const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
498
+ assert(
499
+ buffer.byteLength === block.decompressedSize,
500
+ `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
501
+ );
502
+ this.buffer = Buffer.concat([this.buffer, buffer]);
503
+ }
504
+ this.processedBlock += 1;
505
+ this.processedOffset += block.compressedSize;
506
+ }
507
+ return allowMissingKey ? missingKeyBlocks : void 0;
508
+ }
509
+ }
510
+
511
+ const ENC_MAGIC = 17742;
512
+ const MAGIC_OFFSET = 0;
513
+ const VERSION_OFFSET = 2;
514
+ const HASH_SIZE_CKEY_OFFSET = 3;
515
+ const HASH_SIZE_EKEY_OFFSET = 4;
516
+ const CKEY_PAGE_SIZE_OFFSET = 5;
517
+ const EKEY_PAGE_SIZE_OFFSET = 7;
518
+ const CKEY_PAGE_COUNT_OFFSET = 9;
519
+ const EKEY_PAGE_COUNT_OFFSET = 13;
520
+ const SPEC_BLOCK_SIZE_OFFSET = 18;
521
+ const SPEC_BLOCK_OFFSET = 22;
522
+ const parseEncodingFile = (inputBuffer, eKey, cKey) => {
523
+ const reader = new BLTEReader(inputBuffer, eKey);
524
+ reader.processBytes();
525
+ const { buffer } = reader;
526
+ const encodingHash = crypto.createHash("md5").update(buffer).digest("hex");
527
+ assert(encodingHash === cKey, `Invalid encoding hash: expected ${cKey}, got ${encodingHash}`);
528
+ const magic = buffer.readUInt16BE(MAGIC_OFFSET);
529
+ assert(magic === ENC_MAGIC, `Invalid encoding magic: ${magic.toString(16).padStart(4, "0")}`);
530
+ const version = buffer.readUInt8(VERSION_OFFSET);
531
+ const hashSizeCKey = buffer.readUInt8(HASH_SIZE_CKEY_OFFSET);
532
+ const hashSizeEKey = buffer.readUInt8(HASH_SIZE_EKEY_OFFSET);
533
+ const cKeyPageSizeKB = buffer.readUInt16BE(CKEY_PAGE_SIZE_OFFSET);
534
+ const eKeyPageSizeKB = buffer.readUInt16BE(EKEY_PAGE_SIZE_OFFSET);
535
+ const cKeyPageCount = buffer.readUInt32BE(CKEY_PAGE_COUNT_OFFSET);
536
+ const eKeyPageCount = buffer.readUInt32BE(EKEY_PAGE_COUNT_OFFSET);
537
+ const specBlockSize = buffer.readUInt32BE(SPEC_BLOCK_SIZE_OFFSET);
538
+ assert(version === 1, `Invalid encoding version: ${version.toString()}`);
539
+ const eSpec = [];
540
+ let eSpecStringStart = SPEC_BLOCK_OFFSET;
541
+ for (let i = SPEC_BLOCK_OFFSET; i < SPEC_BLOCK_OFFSET + specBlockSize; i += 1) {
542
+ if (buffer[i] === 0) {
543
+ eSpec.push(buffer.toString("ascii", eSpecStringStart, i));
544
+ eSpecStringStart = i + 1;
545
+ }
546
+ }
547
+ const cKey2FileSize = /* @__PURE__ */ new Map();
548
+ const cKey2EKey = /* @__PURE__ */ new Map();
549
+ const cKeyPageIndexOffset = SPEC_BLOCK_OFFSET + specBlockSize;
550
+ const cKeyPageIndexEntrySize = hashSizeCKey + 16;
551
+ const cKeyPageOffset = cKeyPageIndexOffset + cKeyPageIndexEntrySize * cKeyPageCount;
552
+ const cKeyPageSize = cKeyPageSizeKB * 1024;
553
+ for (let i = 0; i < cKeyPageCount; i += 1) {
554
+ const indexOffset = cKeyPageIndexOffset + i * cKeyPageIndexEntrySize;
555
+ const pageOffset = cKeyPageOffset + i * cKeyPageSize;
556
+ const firstCKey = buffer.toString("hex", indexOffset, indexOffset + hashSizeCKey);
557
+ const pageChecksum = buffer.toString("hex", indexOffset + hashSizeCKey, indexOffset + hashSizeCKey + 16);
558
+ const pageBuffer = buffer.subarray(pageOffset, pageOffset + cKeyPageSize);
559
+ const pageHash = crypto.createHash("md5").update(pageBuffer).digest("hex");
560
+ assert(pageHash === pageChecksum, `Invalid ckey page ${i.toString()} checksum: expected ${pageChecksum}, got ${pageHash}`);
561
+ const pageFirstCKey = pageBuffer.toString("hex", 6, 6 + hashSizeCKey);
562
+ assert(pageFirstCKey === firstCKey, `Invalid ckey page ${i.toString()} first ckey: expected ${firstCKey}, got ${pageFirstCKey}`);
563
+ let pagePointer = 0;
564
+ while (pagePointer < cKeyPageSize) {
565
+ const keyCount = pageBuffer.readUInt8(pagePointer);
566
+ pagePointer += 1;
567
+ if (keyCount === 0) {
568
+ break;
569
+ }
570
+ const fileSize = pageBuffer.readUIntBE(pagePointer, 5);
571
+ pagePointer += 5;
572
+ const fileCKey = pageBuffer.toString("hex", pagePointer, pagePointer + hashSizeCKey);
573
+ pagePointer += hashSizeCKey;
574
+ cKey2FileSize.set(fileCKey, fileSize);
575
+ if (keyCount === 1) {
576
+ const fileEKey = pageBuffer.toString("hex", pagePointer, pagePointer + hashSizeEKey);
577
+ cKey2EKey.set(fileCKey, fileEKey);
578
+ pagePointer += hashSizeEKey;
579
+ } else {
580
+ const fileEKeys = [];
581
+ for (let j = 0; j < keyCount; j += 1) {
582
+ const fileEKey = pageBuffer.toString("hex", pagePointer, pagePointer + hashSizeEKey);
583
+ fileEKeys.push(fileEKey);
584
+ pagePointer += hashSizeEKey;
585
+ }
586
+ cKey2EKey.set(fileCKey, fileEKeys);
587
+ }
588
+ }
589
+ }
590
+ const eKey2ESpecIndex = /* @__PURE__ */ new Map();
591
+ const eKey2FileSize = /* @__PURE__ */ new Map();
592
+ const eKeyPageIndexOffset = cKeyPageOffset + cKeyPageSize * cKeyPageCount;
593
+ const eKeyPageIndexEntrySize = hashSizeEKey + 16;
594
+ const eKeyPageOffset = eKeyPageIndexOffset + eKeyPageIndexEntrySize * eKeyPageCount;
595
+ const eKeyPageSize = eKeyPageSizeKB * 1024;
596
+ const eKeyPageEntrySize = hashSizeEKey + 4 + 5;
597
+ for (let i = 0; i < eKeyPageCount; i += 1) {
598
+ const indexOffset = eKeyPageIndexOffset + i * eKeyPageIndexEntrySize;
599
+ const pageOffset = eKeyPageOffset + i * eKeyPageSize;
600
+ const firstEKey = buffer.toString("hex", indexOffset, indexOffset + hashSizeEKey);
601
+ const pageChecksum = buffer.toString("hex", indexOffset + hashSizeEKey, indexOffset + hashSizeEKey + 16);
602
+ const pageBuffer = buffer.subarray(pageOffset, pageOffset + eKeyPageSize);
603
+ const pageHash = crypto.createHash("md5").update(pageBuffer).digest("hex");
604
+ assert(pageHash === pageChecksum, `Invalid ekey page ${i.toString()} checksum: expected ${pageChecksum}, got ${pageHash}`);
605
+ const pageFirstEKey = pageBuffer.toString("hex", 0, hashSizeEKey);
606
+ assert(pageFirstEKey === firstEKey, `Invalid ekey page ${i.toString()} first ekey: expected ${firstEKey}, got ${pageFirstEKey}`);
607
+ let pagePointer = 0;
608
+ while (pagePointer + eKeyPageEntrySize <= eKeyPageSize) {
609
+ const fileEKey = pageBuffer.toString("hex", pagePointer, pagePointer + hashSizeEKey);
610
+ pagePointer += hashSizeEKey;
611
+ const eSpecIndex = pageBuffer.readUInt32BE(pagePointer);
612
+ pagePointer += 4;
613
+ eKey2ESpecIndex.set(fileEKey, eSpecIndex);
614
+ const fileSize = pageBuffer.readUIntBE(pagePointer, 5);
615
+ pagePointer += 5;
616
+ eKey2FileSize.set(fileEKey, fileSize);
617
+ }
618
+ }
619
+ return {
620
+ eSpec,
621
+ cKey2FileSize,
622
+ cKey2EKey,
623
+ eKey2ESpecIndex,
624
+ eKey2FileSize
625
+ };
626
+ };
627
+
628
+ const MFST_MAGIC = 1296454484;
629
+ const ContentFlags = {
630
+ Install: 4,
631
+ LoadOnWindows: 8,
632
+ LoadOnMacOS: 16,
633
+ x86_32: 32,
634
+ x86_64: 64,
635
+ LowViolence: 128,
636
+ DoNotLoad: 256,
637
+ UpdatePlugin: 2048,
638
+ ARM64: 32768,
639
+ Encrypted: 134217728,
640
+ NoNameHash: 268435456,
641
+ UncommonResolution: 536870912,
642
+ Bundle: 1073741824,
643
+ NoCompression: 2147483648
644
+ };
645
+ const LocaleFlags = {
646
+ enUS: 2,
647
+ koKR: 4,
648
+ frFR: 16,
649
+ deDE: 32,
650
+ zhCN: 64,
651
+ esES: 128,
652
+ zhTW: 256,
653
+ enGB: 512,
654
+ // enCN: 0x400,
655
+ // enTW: 0x800,
656
+ esMX: 4096,
657
+ ruRU: 8192,
658
+ ptBR: 16384,
659
+ itIT: 32768,
660
+ ptPT: 65536
661
+ };
662
+ const parseRootFile = (inputBuffer, eKey, cKey) => {
663
+ const reader = new BLTEReader(inputBuffer, eKey);
664
+ reader.processBytes();
665
+ const { buffer } = reader;
666
+ const rootHash = crypto.createHash("md5").update(buffer).digest("hex");
667
+ assert(rootHash === cKey, `Invalid root hash: expected ${cKey}, got ${rootHash}`);
668
+ const fileDataID2CKey = /* @__PURE__ */ new Map();
669
+ const nameHash2FileDataID = /* @__PURE__ */ new Map();
670
+ const magic = buffer.readUInt32LE(0);
671
+ if (magic === MFST_MAGIC) {
672
+ const firstEntry = buffer.readUInt32LE(4);
673
+ const newFormat = firstEntry < 100;
674
+ const headerSize = newFormat ? firstEntry : 12;
675
+ const totalFileCount = newFormat ? buffer.readUInt32LE(12) : firstEntry;
676
+ const namedFileCount = newFormat ? buffer.readUInt32LE(16) : buffer.readUInt32LE(8);
677
+ const allowNonNamedFiles = totalFileCount !== namedFileCount;
678
+ let pointer = headerSize;
679
+ while (pointer < buffer.byteLength) {
680
+ const numRecords = buffer.readUInt32LE(pointer);
681
+ const contentFlags = buffer.readUInt32LE(pointer + 4);
682
+ const localeFlags = buffer.readUInt32LE(pointer + 8);
683
+ pointer += 12;
684
+ const fileDataIDs = [];
685
+ let currFileDataID = -1;
686
+ for (let i = 0; i < numRecords; i += 1) {
687
+ currFileDataID += buffer.readUInt32LE(pointer) + 1;
688
+ fileDataIDs.push(currFileDataID);
689
+ pointer += 4;
690
+ }
691
+ for (let i = 0; i < numRecords; i += 1) {
692
+ const fileDataID = fileDataIDs[i];
693
+ const fileCKey = buffer.toString("hex", pointer, pointer + 16);
694
+ pointer += 16;
695
+ if (fileDataID2CKey.has(fileDataID)) {
696
+ fileDataID2CKey.get(fileDataID)?.push({
697
+ cKey: fileCKey,
698
+ contentFlags,
699
+ localeFlags
700
+ });
701
+ } else {
702
+ fileDataID2CKey.set(fileDataID, [
703
+ { cKey: fileCKey, contentFlags, localeFlags }
704
+ ]);
705
+ }
706
+ }
707
+ if (!(allowNonNamedFiles && contentFlags & ContentFlags.NoNameHash)) {
708
+ for (let i = 0; i < numRecords; i += 1) {
709
+ const fileDataID = fileDataIDs[i];
710
+ const nameHash = buffer.readBigUInt64LE(pointer).toString(16).padStart(16, "0");
711
+ pointer += 8;
712
+ nameHash2FileDataID.set(nameHash, fileDataID);
713
+ }
714
+ }
715
+ }
716
+ } else {
717
+ let pointer = 0;
718
+ while (pointer < buffer.byteLength) {
719
+ const numRecords = buffer.readUInt32LE(pointer);
720
+ const contentFlags = buffer.readUInt32LE(pointer + 4);
721
+ const localeFlags = buffer.readUInt32LE(pointer + 8);
722
+ pointer += 12;
723
+ const fileDataIDs = [];
724
+ let currFileDataID = -1;
725
+ for (let i = 0; i < numRecords; i += 1) {
726
+ currFileDataID += buffer.readUInt32LE(pointer) + 1;
727
+ fileDataIDs.push(currFileDataID);
728
+ pointer += 4;
729
+ }
730
+ for (let i = 0; i < numRecords; i += 1) {
731
+ const fileDataID = fileDataIDs[i];
732
+ const fileCKey = buffer.toString("hex", pointer, pointer + 16);
733
+ const nameHash = buffer.toString("hex", pointer + 16, pointer + 24);
734
+ pointer += 24;
735
+ if (fileDataID2CKey.has(fileDataID)) {
736
+ fileDataID2CKey.get(fileDataID)?.push({
737
+ cKey: fileCKey,
738
+ contentFlags,
739
+ localeFlags
740
+ });
741
+ } else {
742
+ fileDataID2CKey.set(fileDataID, [
743
+ { cKey: fileCKey, contentFlags, localeFlags }
744
+ ]);
745
+ }
746
+ nameHash2FileDataID.set(nameHash, fileDataID);
747
+ }
748
+ }
749
+ }
750
+ return { fileDataID2CKey, nameHash2FileDataID };
751
+ };
752
+
753
+ const hashlittle2 = (key, pc = 0, pb = 0) => {
754
+ const { length } = key;
755
+ let offset = 0;
756
+ let a = 3735928559 + length + pc | 0;
757
+ let b = 3735928559 + length + pc | 0;
758
+ let c = 3735928559 + length + pc + pb | 0;
759
+ while (length - offset > 12) {
760
+ a += key.charCodeAt(offset + 0);
761
+ a += key.charCodeAt(offset + 1) << 8;
762
+ a += key.charCodeAt(offset + 2) << 16;
763
+ a += key.charCodeAt(offset + 3) << 24;
764
+ b += key.charCodeAt(offset + 4);
765
+ b += key.charCodeAt(offset + 5) << 8;
766
+ b += key.charCodeAt(offset + 6) << 16;
767
+ b += key.charCodeAt(offset + 7) << 24;
768
+ c += key.charCodeAt(offset + 8);
769
+ c += key.charCodeAt(offset + 9) << 8;
770
+ c += key.charCodeAt(offset + 10) << 16;
771
+ c += key.charCodeAt(offset + 11) << 24;
772
+ a -= c;
773
+ a ^= c << 4 | c >>> 28;
774
+ c = c + b | 0;
775
+ b -= a;
776
+ b ^= a << 6 | a >>> 26;
777
+ a = a + c | 0;
778
+ c -= b;
779
+ c ^= b << 8 | b >>> 24;
780
+ b = b + a | 0;
781
+ a -= c;
782
+ a ^= c << 16 | c >>> 16;
783
+ c = c + b | 0;
784
+ b -= a;
785
+ b ^= a << 19 | a >>> 13;
786
+ a = a + c | 0;
787
+ c -= b;
788
+ c ^= b << 4 | b >>> 28;
789
+ b = b + a | 0;
790
+ offset += 12;
791
+ }
792
+ if (length - offset > 0) {
793
+ switch (length - offset) {
794
+ case 12:
795
+ c += key.charCodeAt(offset + 11) << 24;
796
+ case 11:
797
+ c += key.charCodeAt(offset + 10) << 16;
798
+ case 10:
799
+ c += key.charCodeAt(offset + 9) << 8;
800
+ case 9:
801
+ c += key.charCodeAt(offset + 8);
802
+ case 8:
803
+ b += key.charCodeAt(offset + 7) << 24;
804
+ case 7:
805
+ b += key.charCodeAt(offset + 6) << 16;
806
+ case 6:
807
+ b += key.charCodeAt(offset + 5) << 8;
808
+ case 5:
809
+ b += key.charCodeAt(offset + 4);
810
+ case 4:
811
+ a += key.charCodeAt(offset + 3) << 24;
812
+ case 3:
813
+ a += key.charCodeAt(offset + 2) << 16;
814
+ case 2:
815
+ a += key.charCodeAt(offset + 1) << 8;
816
+ case 1:
817
+ a += key.charCodeAt(offset + 0);
818
+ }
819
+ c ^= b;
820
+ c -= b << 14 | b >>> 18;
821
+ a ^= c;
822
+ a -= c << 11 | c >>> 21;
823
+ b ^= a;
824
+ b -= a << 25 | a >>> 7;
825
+ c ^= b;
826
+ c -= b << 16 | b >>> 16;
827
+ a ^= c;
828
+ a -= c << 4 | c >>> 28;
829
+ b ^= a;
830
+ b -= a << 14 | a >>> 18;
831
+ c ^= b;
832
+ c -= b << 24 | b >>> 8;
833
+ }
834
+ return [c >>> 0, b >>> 0];
835
+ };
836
+ const getNameHash = (name) => {
837
+ const normalized = name.replace(/\//g, "\\").toUpperCase();
838
+ const [pc, pb] = hashlittle2(normalized);
839
+ return `${pc.toString(16).padStart(8, "0")}${pb.toString(16).padStart(8, "0")}`;
840
+ };
841
+
842
+ var __defProp$2 = Object.defineProperty;
843
+ var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
844
+ var __publicField$2 = (obj, key, value) => {
845
+ __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
846
+ return value;
847
+ };
848
+ const WDC5_MAGIC = 1464091445;
849
+ const readBitpackedValue = (buffer, fieldOffsetBits, fieldSizeBits, signed = false) => {
850
+ const offsetBytes = fieldOffsetBits >>> 3;
851
+ const bitOffset = fieldOffsetBits & 7;
852
+ const sizeBytes = Math.ceil((fieldSizeBits + bitOffset) / 8);
853
+ if (sizeBytes <= 6) {
854
+ const rawValue = buffer.readUIntLE(offsetBytes, sizeBytes);
855
+ return Number(
856
+ signed ? BigInt.asIntN(fieldSizeBits, BigInt(rawValue >>> bitOffset)) : BigInt.asUintN(fieldSizeBits, BigInt(rawValue >>> bitOffset))
857
+ );
858
+ }
859
+ let remain = sizeBytes;
860
+ let value = 0n;
861
+ while (remain > 0) {
862
+ const byteLength = Math.min(remain, 6);
863
+ const offset = offsetBytes + remain - byteLength;
864
+ const rawValue = buffer.readUIntLE(offset, byteLength);
865
+ value = value << BigInt(byteLength * 8) | BigInt(rawValue);
866
+ remain -= byteLength;
867
+ }
868
+ return signed ? BigInt.asIntN(fieldSizeBits, value >> BigInt(bitOffset)) : BigInt.asUintN(fieldSizeBits, value >> BigInt(bitOffset));
869
+ };
870
+ class WDCReader {
871
+ constructor(buffer, blocks = []) {
872
+ __publicField$2(this, "tableHash");
873
+ __publicField$2(this, "layoutHash");
874
+ __publicField$2(this, "locale");
875
+ __publicField$2(this, "isNormal");
876
+ __publicField$2(this, "hasRelationshipData");
877
+ __publicField$2(this, "fields");
878
+ __publicField$2(this, "fieldsInfo");
879
+ __publicField$2(this, "rows", /* @__PURE__ */ new Map());
880
+ __publicField$2(this, "relationships", /* @__PURE__ */ new Map());
881
+ __publicField$2(this, "copyTable", /* @__PURE__ */ new Map());
882
+ const magic = buffer.readUInt32BE(0);
883
+ const fieldCount = buffer.readUInt32LE(140);
884
+ const recordSize = buffer.readUInt32LE(144);
885
+ const tableHash = buffer.readUInt32LE(152);
886
+ const layoutHash = buffer.readUInt32LE(156);
887
+ const locale = buffer.readUInt32LE(168);
888
+ const flags = buffer.readUInt16LE(172);
889
+ const idIndex = buffer.readUInt16LE(174);
890
+ const fieldStorageInfoSize = buffer.readUInt32LE(188);
891
+ const commonDataSize = buffer.readUInt32LE(192);
892
+ const palletDataSize = buffer.readUInt32LE(196);
893
+ const sectionCount = buffer.readUInt32LE(200);
894
+ assert(magic === WDC5_MAGIC, `Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
895
+ this.tableHash = tableHash;
896
+ this.layoutHash = layoutHash;
897
+ this.locale = locale;
898
+ const isNormal = !(flags & 1);
899
+ const hasRelationshipData = !!(flags & 2);
900
+ this.isNormal = isNormal;
901
+ this.hasRelationshipData = hasRelationshipData;
902
+ const sectionHeaders = [];
903
+ const sectionHeadersOffset = 204;
904
+ for (let i = 0; i < sectionCount; i += 1) {
905
+ const sectionHeaderOffset = sectionHeadersOffset + i * 40;
906
+ sectionHeaders.push({
907
+ tactKeyHash: buffer.readBigUInt64LE(sectionHeaderOffset),
908
+ fileOffset: buffer.readUInt32LE(sectionHeaderOffset + 8),
909
+ recordCount: buffer.readUInt32LE(sectionHeaderOffset + 12),
910
+ stringTableSize: buffer.readUInt32LE(sectionHeaderOffset + 16),
911
+ offsetRecordsEnd: buffer.readUInt32LE(sectionHeaderOffset + 20),
912
+ idListSize: buffer.readUInt32LE(sectionHeaderOffset + 24),
913
+ relationshipDataSize: buffer.readUInt32LE(sectionHeaderOffset + 28),
914
+ offsetMapIDCount: buffer.readUInt32LE(sectionHeaderOffset + 32),
915
+ copyTableCount: buffer.readUInt32LE(sectionHeaderOffset + 36)
916
+ });
917
+ }
918
+ const fields = [];
919
+ const fieldsOffset = 204 + sectionCount * 40;
920
+ for (let i = 0; i < fieldCount; i += 1) {
921
+ const fieldOffset = fieldsOffset + i * 4;
922
+ fields.push({
923
+ size: buffer.readInt16LE(fieldOffset),
924
+ position: buffer.readUInt16LE(fieldOffset + 2)
925
+ });
926
+ }
927
+ this.fields = fields;
928
+ const fieldsInfo = [];
929
+ const fieldsInfoOffset = fieldsOffset + fieldCount * 4;
930
+ for (let i = 0; i < fieldStorageInfoSize / 24; i += 1) {
931
+ const fieldInfoOffset = fieldsInfoOffset + i * 24;
932
+ const fieldOffsetBits = buffer.readUInt16LE(fieldInfoOffset);
933
+ const fieldSizeBits = buffer.readUInt16LE(fieldInfoOffset + 2);
934
+ const additionalDataSize = buffer.readUInt32LE(fieldInfoOffset + 4);
935
+ const storageType = buffer.readUInt32LE(fieldInfoOffset + 8);
936
+ const arg1 = buffer.readUInt32LE(fieldInfoOffset + 12);
937
+ const arg2 = buffer.readUInt32LE(fieldInfoOffset + 16);
938
+ const arg3 = buffer.readUInt32LE(fieldInfoOffset + 20);
939
+ switch (storageType) {
940
+ case 0:
941
+ fieldsInfo.push({
942
+ fieldOffsetBits,
943
+ fieldSizeBits,
944
+ additionalDataSize,
945
+ storageType: "none"
946
+ });
947
+ break;
948
+ case 1:
949
+ fieldsInfo.push({
950
+ fieldOffsetBits,
951
+ fieldSizeBits,
952
+ additionalDataSize,
953
+ storageType: "bitpacked",
954
+ bitpackingOffsetBits: arg1,
955
+ bitpackingSizeBits: arg2,
956
+ flags: arg3
957
+ });
958
+ break;
959
+ case 2:
960
+ fieldsInfo.push({
961
+ fieldOffsetBits,
962
+ fieldSizeBits,
963
+ additionalDataSize,
964
+ storageType: "commonData",
965
+ defaultValue: arg1
966
+ });
967
+ break;
968
+ case 3:
969
+ fieldsInfo.push({
970
+ fieldOffsetBits,
971
+ fieldSizeBits,
972
+ additionalDataSize,
973
+ storageType: "bitpackedIndexed",
974
+ bitpackingOffsetBits: arg1,
975
+ bitpackingSizeBits: arg2
976
+ });
977
+ break;
978
+ case 4:
979
+ fieldsInfo.push({
980
+ fieldOffsetBits,
981
+ fieldSizeBits,
982
+ additionalDataSize,
983
+ storageType: "bitpackedIndexedArray",
984
+ bitpackingOffsetBits: arg1,
985
+ bitpackingSizeBits: arg2,
986
+ arrayCount: arg3
987
+ });
988
+ break;
989
+ case 5:
990
+ fieldsInfo.push({
991
+ fieldOffsetBits,
992
+ fieldSizeBits,
993
+ additionalDataSize,
994
+ storageType: "bitpackedSigned",
995
+ bitpackingOffsetBits: arg1,
996
+ bitpackingSizeBits: arg2,
997
+ flags: arg3
998
+ });
999
+ break;
1000
+ default:
1001
+ throw new Error(`Unknown storage type: ${storageType.toString(16).padStart(8, "0")}`);
1002
+ }
1003
+ }
1004
+ this.fieldsInfo = fieldsInfo;
1005
+ const palletData = /* @__PURE__ */ new Map();
1006
+ const palletDataOffset = fieldsInfoOffset + fieldStorageInfoSize;
1007
+ let palletDataPointer = palletDataOffset;
1008
+ for (let i = 0; i < fieldsInfo.length; i += 1) {
1009
+ const fieldInfo = fieldsInfo[i];
1010
+ if (fieldInfo.storageType === "bitpackedIndexed" || fieldInfo.storageType === "bitpackedIndexedArray") {
1011
+ const data = [];
1012
+ for (let j = 0; j < fieldInfo.additionalDataSize / 4; j += 1) {
1013
+ data.push(buffer.readUInt32LE(palletDataPointer));
1014
+ palletDataPointer += 4;
1015
+ }
1016
+ palletData.set(i, data);
1017
+ }
1018
+ }
1019
+ assert(
1020
+ palletDataPointer === palletDataOffset + palletDataSize,
1021
+ `Invalid pallet data size: ${(palletDataPointer - palletDataOffset).toString()} != ${palletDataSize.toString()}`
1022
+ );
1023
+ const commonData = /* @__PURE__ */ new Map();
1024
+ const commonDataOffset = palletDataPointer;
1025
+ let commonDataPointer = commonDataOffset;
1026
+ for (let i = 0; i < fieldsInfo.length; i += 1) {
1027
+ const fieldInfo = fieldsInfo[i];
1028
+ if (fieldInfo.storageType === "commonData") {
1029
+ const map = /* @__PURE__ */ new Map();
1030
+ for (let j = 0; j < fieldInfo.additionalDataSize / 8; j += 1) {
1031
+ map.set(
1032
+ buffer.readUInt32LE(commonDataPointer),
1033
+ buffer.readUInt32LE(commonDataPointer + 4)
1034
+ );
1035
+ commonDataPointer += 8;
1036
+ }
1037
+ commonData.set(i, map);
1038
+ }
1039
+ }
1040
+ assert(
1041
+ commonDataPointer === commonDataOffset + commonDataSize,
1042
+ `Invalid common data size: ${(commonDataPointer - commonDataOffset).toString()} != ${commonDataSize.toString()}`
1043
+ );
1044
+ const encryptedIDs = /* @__PURE__ */ new Map();
1045
+ const encryptedRecordsOffset = commonDataPointer;
1046
+ let encryptedRecordsPointer = encryptedRecordsOffset;
1047
+ for (let i = 0; i < sectionHeaders.length; i += 1) {
1048
+ const sectionHeader = sectionHeaders[i];
1049
+ if (sectionHeader.tactKeyHash !== 0n) {
1050
+ const count = buffer.readUInt32LE(encryptedRecordsPointer);
1051
+ encryptedRecordsPointer += 4;
1052
+ const data = [];
1053
+ for (let j = 0; j < count; j += 1) {
1054
+ data.push(buffer.readUInt32LE(encryptedRecordsPointer));
1055
+ encryptedRecordsPointer += 4;
1056
+ }
1057
+ encryptedIDs.set(i, data);
1058
+ }
1059
+ }
1060
+ const stringTable = /* @__PURE__ */ new Map();
1061
+ let stringTableDelta = 0;
1062
+ const sectionsOffset = encryptedRecordsPointer;
1063
+ let sectionPointer = sectionsOffset;
1064
+ const sections = sectionHeaders.map((sectionHeader) => {
1065
+ assert(
1066
+ sectionPointer === sectionHeader.fileOffset,
1067
+ `Invalid section offset: ${sectionPointer.toString()} != ${sectionHeader.fileOffset.toString()}`
1068
+ );
1069
+ const sectionSize = (isNormal ? sectionHeader.recordCount * recordSize + sectionHeader.stringTableSize : sectionHeader.offsetRecordsEnd - sectionPointer) + sectionHeader.idListSize + sectionHeader.copyTableCount * 8 + sectionHeader.offsetMapIDCount * 10 + sectionHeader.relationshipDataSize;
1070
+ const recordDataSize = isNormal ? recordSize * sectionHeader.recordCount : sectionHeader.offsetRecordsEnd - sectionHeader.fileOffset;
1071
+ const isZeroed = blocks.some((block) => {
1072
+ const sectionStart = sectionHeader.fileOffset;
1073
+ const sectionEnd = sectionStart + sectionSize;
1074
+ const blockStart = block.offset;
1075
+ const blockEnd = blockStart + block.size;
1076
+ return sectionStart >= blockStart && sectionEnd <= blockEnd;
1077
+ });
1078
+ if (isZeroed) {
1079
+ sectionPointer += sectionSize;
1080
+ if (isNormal) {
1081
+ stringTableDelta += sectionHeader.stringTableSize;
1082
+ }
1083
+ return {
1084
+ header: sectionHeader,
1085
+ isZeroed,
1086
+ recordDataSize,
1087
+ records: [],
1088
+ idList: [],
1089
+ offsetMap: [],
1090
+ relationshipMap: /* @__PURE__ */ new Map()
1091
+ };
1092
+ }
1093
+ const records = [];
1094
+ if (isNormal) {
1095
+ for (let j = 0; j < sectionHeader.recordCount; j += 1) {
1096
+ records.push(buffer.subarray(sectionPointer, sectionPointer + recordSize));
1097
+ sectionPointer += recordSize;
1098
+ }
1099
+ const stringTableOffset = sectionPointer;
1100
+ let stringStartPointer = stringTableOffset;
1101
+ while (sectionPointer < stringTableOffset + sectionHeader.stringTableSize) {
1102
+ if (buffer[sectionPointer] === 0) {
1103
+ if (sectionPointer - stringStartPointer > 0) {
1104
+ const string = buffer.toString("utf-8", stringStartPointer, sectionPointer);
1105
+ stringTable.set(
1106
+ stringStartPointer - stringTableOffset + stringTableDelta,
1107
+ string
1108
+ );
1109
+ }
1110
+ stringStartPointer = sectionPointer + 1;
1111
+ }
1112
+ sectionPointer += 1;
1113
+ }
1114
+ stringTableDelta += sectionHeader.stringTableSize;
1115
+ } else {
1116
+ sectionPointer = sectionHeader.offsetRecordsEnd;
1117
+ }
1118
+ const idList = [];
1119
+ for (let j = 0; j < sectionHeader.idListSize / 4; j += 1) {
1120
+ idList.push(buffer.readUInt32LE(sectionPointer));
1121
+ sectionPointer += 4;
1122
+ }
1123
+ for (let j = 0; j < sectionHeader.copyTableCount; j += 1) {
1124
+ const dst = buffer.readUInt32LE(sectionPointer);
1125
+ const src = buffer.readUInt32LE(sectionPointer + 4);
1126
+ this.copyTable.set(dst, src);
1127
+ sectionPointer += 8;
1128
+ }
1129
+ const offsetMap = [];
1130
+ for (let j = 0; j < sectionHeader.offsetMapIDCount; j += 1) {
1131
+ const offset = buffer.readUInt32LE(sectionPointer);
1132
+ const size = buffer.readUInt16LE(sectionPointer + 4);
1133
+ const data = buffer.subarray(offset, offset + size);
1134
+ sectionPointer += 6;
1135
+ offsetMap.push({
1136
+ offset,
1137
+ size,
1138
+ data
1139
+ });
1140
+ }
1141
+ const offsetMapIDList = [];
1142
+ if (hasRelationshipData) {
1143
+ for (let j = 0; j < sectionHeader.offsetMapIDCount; j += 1) {
1144
+ offsetMapIDList.push(buffer.readUInt32LE(sectionPointer));
1145
+ sectionPointer += 4;
1146
+ }
1147
+ }
1148
+ const relationshipMap = /* @__PURE__ */ new Map();
1149
+ if (sectionHeader.relationshipDataSize > 0) {
1150
+ const numEntries = buffer.readUInt32LE(sectionPointer);
1151
+ sectionPointer += 12;
1152
+ for (let j = 0; j < numEntries; j += 1) {
1153
+ const foreignID = buffer.readUInt32LE(sectionPointer);
1154
+ const recordIndex = buffer.readUInt32LE(sectionPointer + 4);
1155
+ sectionPointer += 8;
1156
+ relationshipMap.set(recordIndex, foreignID);
1157
+ }
1158
+ }
1159
+ if (!hasRelationshipData) {
1160
+ for (let j = 0; j < sectionHeader.offsetMapIDCount; j += 1) {
1161
+ offsetMapIDList.push(buffer.readUInt32LE(sectionPointer));
1162
+ sectionPointer += 4;
1163
+ }
1164
+ }
1165
+ return {
1166
+ header: sectionHeader,
1167
+ isZeroed,
1168
+ recordDataSize,
1169
+ records,
1170
+ idList,
1171
+ offsetMap,
1172
+ relationshipMap
1173
+ };
1174
+ });
1175
+ const totalRecordDataSize = sections.reduce((acc, section) => acc + section.recordDataSize, 0);
1176
+ sections.forEach((section) => {
1177
+ const {
1178
+ header,
1179
+ isZeroed,
1180
+ records,
1181
+ idList,
1182
+ offsetMap,
1183
+ relationshipMap
1184
+ } = section;
1185
+ const prevRecordDataSize = sections.filter((s) => s.header.fileOffset < header.fileOffset).reduce((acc, s) => acc + s.recordDataSize, 0);
1186
+ if (isZeroed) {
1187
+ return;
1188
+ }
1189
+ for (let recordIndex = 0; recordIndex < header.recordCount; recordIndex += 1) {
1190
+ let recordID = idList.length > 0 ? idList[recordIndex] : void 0;
1191
+ const recordBuffer = isNormal ? records[recordIndex] : offsetMap[recordIndex].data;
1192
+ if (isNormal) {
1193
+ const recordData = fieldsInfo.map((fieldInfo, fieldIndex) => {
1194
+ switch (fieldInfo.storageType) {
1195
+ case "none": {
1196
+ const value = readBitpackedValue(
1197
+ recordBuffer,
1198
+ fieldInfo.fieldOffsetBits,
1199
+ fieldInfo.fieldSizeBits
1200
+ );
1201
+ if (typeof value === "bigint") {
1202
+ return {
1203
+ type: "none",
1204
+ data: value
1205
+ };
1206
+ }
1207
+ if (!recordID && fieldIndex === idIndex) {
1208
+ recordID = value;
1209
+ }
1210
+ const fieldOffset = fieldInfo.fieldOffsetBits >>> 3;
1211
+ const offset = prevRecordDataSize - totalRecordDataSize + recordSize * recordIndex + fieldOffset + value;
1212
+ return {
1213
+ type: "none",
1214
+ data: value,
1215
+ string: stringTable.get(offset)
1216
+ };
1217
+ }
1218
+ case "commonData": {
1219
+ const value = (recordID ? commonData.get(fieldIndex)?.get(recordID) : void 0) ?? fieldInfo.defaultValue;
1220
+ return {
1221
+ type: "commonData",
1222
+ data: value
1223
+ };
1224
+ }
1225
+ case "bitpacked":
1226
+ case "bitpackedSigned":
1227
+ case "bitpackedIndexed":
1228
+ case "bitpackedIndexedArray": {
1229
+ let value = readBitpackedValue(
1230
+ recordBuffer,
1231
+ fieldInfo.fieldOffsetBits,
1232
+ fieldInfo.fieldSizeBits,
1233
+ fieldInfo.storageType === "bitpackedSigned"
1234
+ );
1235
+ assert(typeof value === "number", "Bitpacked value must be a number");
1236
+ if (fieldInfo.storageType === "bitpackedIndexedArray") {
1237
+ const fieldPalletData = palletData.get(fieldIndex);
1238
+ assert(fieldPalletData, `No pallet data for field ${fieldIndex.toString()}`);
1239
+ const data = [];
1240
+ const palletStart = value * fieldInfo.arrayCount;
1241
+ for (let j = 0; j < fieldInfo.arrayCount; j += 1) {
1242
+ data.push(fieldPalletData[palletStart + j]);
1243
+ }
1244
+ return {
1245
+ type: "bitpackedArray",
1246
+ data
1247
+ };
1248
+ }
1249
+ if (fieldInfo.storageType === "bitpackedIndexed") {
1250
+ const fieldPalletData = palletData.get(fieldIndex);
1251
+ assert(fieldPalletData, `No pallet data for field ${fieldIndex.toString()}`);
1252
+ value = fieldPalletData[value];
1253
+ }
1254
+ if (!recordID && fieldIndex === idIndex) {
1255
+ recordID = value;
1256
+ }
1257
+ return {
1258
+ type: "bitpacked",
1259
+ data: value
1260
+ };
1261
+ }
1262
+ default:
1263
+ throw new Error("Unreachable");
1264
+ }
1265
+ });
1266
+ assert(recordID !== void 0, "No record ID found");
1267
+ this.rows.set(recordID, recordData);
1268
+ const foreignID = relationshipMap.get(recordIndex);
1269
+ if (foreignID) {
1270
+ this.relationships.set(recordID, foreignID);
1271
+ }
1272
+ } else {
1273
+ const recordData = {
1274
+ type: "sparse",
1275
+ data: recordBuffer
1276
+ };
1277
+ assert(recordID !== void 0, "No record ID found");
1278
+ this.rows.set(recordID, recordData);
1279
+ const foreignID = relationshipMap.get(recordIndex);
1280
+ if (foreignID) {
1281
+ this.relationships.set(recordID, foreignID);
1282
+ }
1283
+ }
1284
+ }
1285
+ });
1286
+ }
1287
+ getAllIDs() {
1288
+ return [...this.rows.keys(), ...this.copyTable.keys()];
1289
+ }
1290
+ getRowData(id) {
1291
+ const dst = this.copyTable.get(id);
1292
+ if (dst) {
1293
+ return this.rows.get(dst);
1294
+ }
1295
+ return this.rows.get(id);
1296
+ }
1297
+ getRowRelationship(id) {
1298
+ const dst = this.copyTable.get(id);
1299
+ if (dst) {
1300
+ return this.relationships.get(dst);
1301
+ }
1302
+ return this.relationships.get(id);
1303
+ }
1304
+ }
1305
+
1306
+ const resolveCDNHost = async (hosts, path) => {
1307
+ const latencies = await Promise.allSettled(
1308
+ hosts.map(async (host) => {
1309
+ const start = Date.now();
1310
+ await fetch(`http://${host}/`);
1311
+ const end = Date.now();
1312
+ return {
1313
+ host,
1314
+ latency: end - start
1315
+ };
1316
+ })
1317
+ );
1318
+ const resolved = latencies.filter((result) => result.status === "fulfilled").map((result) => result.value).sort((a, b) => a.latency - b.latency);
1319
+ return resolved.map((result) => `http://${result.host}/${path}`);
1320
+ };
1321
+ const startProcessBar = (total, screenWidth = 80) => {
1322
+ const totalText = total.toString();
1323
+ const barLength = screenWidth - totalText.length * 2 - 4;
1324
+ const bar = " ".repeat(barLength);
1325
+ process.stdout.write(`[${bar}] ${"0".padStart(totalText.length, " ")}/${totalText}`);
1326
+ process.stdout.cursorTo(0);
1327
+ };
1328
+ const updateProcessBar = (current, total, screenWidth = 80) => {
1329
+ const totalText = total.toString();
1330
+ const barLength = screenWidth - totalText.length * 2 - 4;
1331
+ const bar = "=".repeat(Math.floor(current / total * barLength));
1332
+ process.stdout.write(`[${bar.padEnd(barLength, " ")}] ${current.toString().padStart(totalText.length, " ")}/${totalText}`);
1333
+ process.stdout.cursorTo(0);
1334
+ };
1335
+ const endProcessBar = () => {
1336
+ process.stdout.write("\n");
1337
+ };
1338
+ const asyncQueue = (items, handler, limit) => {
1339
+ if (items.length === 0) {
1340
+ return Promise.resolve([]);
1341
+ }
1342
+ return new Promise((resolve, reject) => {
1343
+ const results = [];
1344
+ let current = 0;
1345
+ let pending = 0;
1346
+ let completed = 0;
1347
+ const next = () => {
1348
+ if (current < items.length) {
1349
+ const index = current;
1350
+ current += 1;
1351
+ pending += 1;
1352
+ handler(items[index]).then((result) => {
1353
+ results[index] = result;
1354
+ }).catch(reject).finally(() => {
1355
+ pending -= 1;
1356
+ completed += 1;
1357
+ updateProcessBar(completed, items.length);
1358
+ next();
1359
+ });
1360
+ } else if (pending === 0) {
1361
+ endProcessBar();
1362
+ resolve(results);
1363
+ }
1364
+ };
1365
+ startProcessBar(items.length);
1366
+ for (let i = 0; i < limit; i += 1) {
1367
+ next();
1368
+ }
1369
+ });
1370
+ };
1371
+ const JEDEC = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
1372
+ const formatFileSize = (input) => {
1373
+ if (Number.isNaN(input))
1374
+ return "";
1375
+ let size = Number(input);
1376
+ const isNegative = size < 0;
1377
+ const result = [];
1378
+ if (isNegative)
1379
+ size = -size;
1380
+ let exponent = Math.floor(Math.log(size) / Math.log(1024));
1381
+ if (exponent < 0)
1382
+ exponent = 0;
1383
+ if (exponent > 8)
1384
+ exponent = 8;
1385
+ if (size === 0) {
1386
+ result[0] = 0;
1387
+ result[1] = JEDEC[exponent];
1388
+ } else {
1389
+ const val = size / 2 ** (exponent * 10);
1390
+ result[0] = Number(val.toFixed(exponent > 0 ? 2 : 0));
1391
+ if (result[0] === 1024 && exponent < 8) {
1392
+ result[0] = 1;
1393
+ exponent += 1;
1394
+ }
1395
+ result[1] = JEDEC[exponent];
1396
+ }
1397
+ if (isNegative)
1398
+ result[0] = -result[0];
1399
+ return result.join(" ");
1400
+ };
1401
+
1402
+ var __defProp$1 = Object.defineProperty;
1403
+ var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1404
+ var __publicField$1 = (obj, key, value) => {
1405
+ __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
1406
+ return value;
1407
+ };
1408
+ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
1409
+ LogLevel2[LogLevel2["error"] = 0] = "error";
1410
+ LogLevel2[LogLevel2["warn"] = 1] = "warn";
1411
+ LogLevel2[LogLevel2["info"] = 2] = "info";
1412
+ LogLevel2[LogLevel2["debug"] = 3] = "debug";
1413
+ return LogLevel2;
1414
+ })(LogLevel || {});
1415
+ const textLogLevel = ["ERROR", "WARN", "INFO", "DEBUG"];
1416
+ class CASCClient {
1417
+ constructor(region, product, version, logLevel = 2 /* info */) {
1418
+ __publicField$1(this, "region");
1419
+ __publicField$1(this, "product");
1420
+ __publicField$1(this, "version");
1421
+ __publicField$1(this, "name2FileDataID", /* @__PURE__ */ new Map());
1422
+ __publicField$1(this, "keys", /* @__PURE__ */ new Map());
1423
+ __publicField$1(this, "preload");
1424
+ __publicField$1(this, "logLevel");
1425
+ this.region = region;
1426
+ this.product = product;
1427
+ this.version = version;
1428
+ this.logLevel = logLevel;
1429
+ }
1430
+ static async getProductVersion(region, product) {
1431
+ const versionsText = await getProductVersions(region, product);
1432
+ const versions = parseProductVersions(versionsText);
1433
+ return versions.find((version) => version.Region === region);
1434
+ }
1435
+ log(level, message) {
1436
+ if (level <= this.logLevel) {
1437
+ if (level >= 0 /* error */) {
1438
+ console.error(`${( new Date()).toISOString()} [${textLogLevel[level]}]:`, message);
1439
+ } else {
1440
+ console.log(`${( new Date()).toISOString()} [${textLogLevel[level]}]:`, message);
1441
+ }
1442
+ }
1443
+ }
1444
+ async init() {
1445
+ this.log(2 /* info */, "Preloading remote CASC build:");
1446
+ this.log(2 /* info */, this.version);
1447
+ this.log(2 /* info */, "Fetching CDN configuration...");
1448
+ const serverConfigText = await getProductCDNs(this.region, this.product);
1449
+ const serverConfig = parseProductCDNs(serverConfigText).find(
1450
+ (config) => config.Name === this.region
1451
+ );
1452
+ assert(serverConfig, "No server config found");
1453
+ this.log(2 /* info */, "Locating fastest CDN server...");
1454
+ const prefixes = await resolveCDNHost(
1455
+ serverConfig.Hosts.split(" "),
1456
+ serverConfig.Path
1457
+ );
1458
+ this.log(2 /* info */, "Resolved CDN servers:");
1459
+ prefixes.forEach((prefix) => {
1460
+ this.log(2 /* info */, prefix);
1461
+ });
1462
+ this.log(2 /* info */, "Fetching build configurations...");
1463
+ const cdnConfigText = await getConfigFile(prefixes, this.version.CDNConfig);
1464
+ const cdnConfig = parseCDNConfig(cdnConfigText);
1465
+ const buildConfigText = await getConfigFile(prefixes, this.version.BuildConfig);
1466
+ const buildConfig = parseBuildConfig(buildConfigText);
1467
+ this.log(2 /* info */, "Loading archives...");
1468
+ const archiveKeys = cdnConfig.archives.split(" ");
1469
+ const archiveCount = archiveKeys.length;
1470
+ const archiveTotalSize = cdnConfig.archivesIndexSize.split(" ").reduce((a, b) => a + parseInt(b, 10), 0);
1471
+ const archives = new Map(
1472
+ (await asyncQueue(
1473
+ archiveKeys,
1474
+ async (key) => {
1475
+ const fileName = `${key}.index`;
1476
+ const buffer = await getDataFile(prefixes, fileName, "indexes", this.version.BuildConfig);
1477
+ return parseArchiveIndex(buffer, key);
1478
+ },
1479
+ 50
1480
+ )).flatMap((e) => [...e])
1481
+ );
1482
+ this.log(
1483
+ 2 /* info */,
1484
+ `Loaded ${archiveCount.toString()} archives (${archives.size.toString()} entries, ${formatFileSize(archiveTotalSize)})`
1485
+ );
1486
+ this.log(2 /* info */, "Loading encoding table...");
1487
+ const [encodingCKey, encodingEKey] = buildConfig.encoding.split(" ");
1488
+ const encodingBuffer = await getDataFile(prefixes, encodingEKey, "build", this.version.BuildConfig, "encoding");
1489
+ this.log(2 /* info */, `Loaded encoding table (${formatFileSize(encodingBuffer.byteLength)})`);
1490
+ this.log(2 /* info */, "Parsing encoding table...");
1491
+ const encoding = parseEncodingFile(encodingBuffer, encodingEKey, encodingCKey);
1492
+ this.log(2 /* info */, `Parsed encoding table (${encoding.cKey2EKey.size.toString()} entries)`);
1493
+ this.log(2 /* info */, "Loading root table...");
1494
+ const rootCKey = buildConfig.root;
1495
+ const rootEKeys = encoding.cKey2EKey.get(rootCKey);
1496
+ assert(rootEKeys, "Failing to find EKey for root table.");
1497
+ const rootEKey = typeof rootEKeys === "string" ? rootEKeys : rootEKeys[0];
1498
+ const rootBuffer = await getDataFile(prefixes, rootEKey, "build", this.version.BuildConfig, "root");
1499
+ this.log(2 /* info */, `Loaded root table (${formatFileSize(rootBuffer.byteLength)})`);
1500
+ this.log(2 /* info */, "Parsing root file...");
1501
+ const rootFile = parseRootFile(rootBuffer, rootEKey, rootCKey);
1502
+ this.log(2 /* info */, `Parsed root file (${rootFile.fileDataID2CKey.size.toString()} entries, ${rootFile.nameHash2FileDataID.size.toString()} hashes)`);
1503
+ this.preload = {
1504
+ prefixes,
1505
+ archives,
1506
+ encoding,
1507
+ rootFile
1508
+ };
1509
+ }
1510
+ async loadRemoteListFile() {
1511
+ const url = "https://github.com/wowdev/wow-listfile/releases/download/202402031841/community-listfile.csv";
1512
+ const text = await (await fetch(url)).text();
1513
+ const lines = text.split("\n").map((line) => line.trim()).filter((line) => line.length > 0);
1514
+ lines.forEach((line) => {
1515
+ const [fileDataID, name] = line.split(";");
1516
+ this.name2FileDataID.set(name.trim(), parseInt(fileDataID.trim(), 10));
1517
+ });
1518
+ }
1519
+ async loadRemoteTACTKeys() {
1520
+ const url = "https://raw.githubusercontent.com/wowdev/TACTKeys/master/WoW.txt";
1521
+ const text = await (await fetch(url)).text();
1522
+ const lines = text.split("\n").map((line) => line.trim()).filter((line) => line.length > 0);
1523
+ lines.forEach((line) => {
1524
+ const [keyName, keyHex] = line.split(" ");
1525
+ assert(keyName.length === 16, `Invalid keyName length: ${keyName.length.toString()}`);
1526
+ assert(keyHex.length === 32, `Invalid key length: ${keyHex.length.toString()}`);
1527
+ const key = Uint8Array.from(Buffer.from(keyHex, "hex"));
1528
+ this.keys.set(keyName.toLowerCase(), key);
1529
+ });
1530
+ }
1531
+ async loadTACTKeys() {
1532
+ const keysCKeys = this.getContentKeysByFileDataID(1302850);
1533
+ const lookupCKeys = this.getContentKeysByFileDataID(1302851);
1534
+ assert(keysCKeys?.[0], "Failing to find dbfilesclient/tactkey.db2");
1535
+ assert(lookupCKeys?.[0], "Failing to find dbfilesclient/tactkeylookup.db2");
1536
+ const [keysResult, lookupResult] = await Promise.all([
1537
+ this.getFileByContentKey(keysCKeys[0].cKey),
1538
+ this.getFileByContentKey(lookupCKeys[0].cKey)
1539
+ ]);
1540
+ const keysReader = new WDCReader(keysResult.buffer);
1541
+ const lookupReader = new WDCReader(lookupResult.buffer);
1542
+ [...lookupReader.rows.keys()].forEach((keyID) => {
1543
+ const lookupRow = lookupReader.rows.get(keyID);
1544
+ const keyRow = keysReader.rows.get(keyID);
1545
+ if (keyRow) {
1546
+ assert(Array.isArray(lookupRow) && lookupRow[0], `Invalid TACTKeyLookup table row at id ${keyID.toString()}`);
1547
+ assert(Array.isArray(keyRow) && keyRow[0], `Invalid TACTKey table row at id ${keyID.toString()}`);
1548
+ const keyName = lookupRow[0].data.toString(16).padStart(16, "0");
1549
+ const keyHexLE = keyRow[0].data.toString(16).padStart(32, "0");
1550
+ assert(keyName.length === 16, `Invalid keyName length: ${keyName.length.toString()}`);
1551
+ assert(keyHexLE.length === 32, `Invalid key length: ${keyHexLE.length.toString()}`);
1552
+ const keyHex = [...keyHexLE.matchAll(/.{2}/g)].map((v) => v[0]).reverse().join("");
1553
+ const key = Uint8Array.from(Buffer.from(keyHex, "hex"));
1554
+ this.keys.set(keyName.toLowerCase(), key);
1555
+ }
1556
+ });
1557
+ }
1558
+ getFileDataIDByName(name) {
1559
+ assert(this.preload, "Client not initialized");
1560
+ const { rootFile } = this.preload;
1561
+ const { nameHash2FileDataID } = rootFile;
1562
+ const nameHash = getNameHash(name);
1563
+ return nameHash2FileDataID.get(nameHash) ?? this.name2FileDataID.get(name);
1564
+ }
1565
+ getContentKeysByFileDataID(fileDataID) {
1566
+ assert(this.preload, "Client not initialized");
1567
+ const { rootFile } = this.preload;
1568
+ return rootFile.fileDataID2CKey.get(fileDataID);
1569
+ }
1570
+ async getFileByContentKey(cKey, allowMissingKey = false) {
1571
+ assert(this.preload, "Client not initialized");
1572
+ const { prefixes, encoding, archives } = this.preload;
1573
+ const eKeys = encoding.cKey2EKey.get(cKey);
1574
+ assert(eKeys, `Failing to find encoding key for ${cKey}`);
1575
+ const eKey = typeof eKeys === "string" ? eKeys : eKeys[0];
1576
+ const archive = archives.get(eKey);
1577
+ const blte = archive ? await getDataFile(prefixes, archive.key, "data", this.version.BuildConfig, eKey, archive.offset, archive.size) : await getDataFile(prefixes, eKey, "data", this.version.BuildConfig);
1578
+ const reader = new BLTEReader(blte, eKey, this.keys);
1579
+ if (!allowMissingKey) {
1580
+ reader.processBytes(allowMissingKey);
1581
+ return {
1582
+ type: "full",
1583
+ buffer: reader.buffer
1584
+ };
1585
+ }
1586
+ const blocks = reader.processBytes(allowMissingKey);
1587
+ if (blocks.length === 0) {
1588
+ const hash = crypto.createHash("md5").update(reader.buffer).digest("hex");
1589
+ assert(hash === cKey, `Invalid hash: expected ${cKey}, got ${hash}`);
1590
+ return {
1591
+ type: "full",
1592
+ buffer: reader.buffer
1593
+ };
1594
+ }
1595
+ return {
1596
+ type: "partial",
1597
+ buffer: reader.buffer,
1598
+ blocks
1599
+ };
1600
+ }
1601
+ }
1602
+ __publicField$1(CASCClient, "LocaleFlags", LocaleFlags);
1603
+ __publicField$1(CASCClient, "ContentFlags", ContentFlags);
1604
+ __publicField$1(CASCClient, "LogLevel", LogLevel);
1605
+
1606
+ var __defProp = Object.defineProperty;
1607
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1608
+ var __publicField = (obj, key, value) => {
1609
+ __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
1610
+ return value;
1611
+ };
1612
+ const PATTERN_COLUMN = /^(int|float|locstring|string)(<[^:]+::[^>]+>)?\s([^\s]+)/;
1613
+ const PATTERN_LAYOUT = /^LAYOUT\s(.*)/;
1614
+ const PATTERN_FIELD = /^(\$([^$]+)\$)?([^<[]+)(<(u|)(\d+)>)?(\[(\d+)\])?$/;
1615
+ const castIntegerBySize = (value, src, srcSigned, dst, dstSigned) => {
1616
+ const castBuffer = Buffer.alloc(6);
1617
+ if (srcSigned) {
1618
+ castBuffer.writeIntLE(value, 0, src);
1619
+ } else {
1620
+ castBuffer.writeUIntLE(value, 0, src);
1621
+ }
1622
+ return dstSigned ? castBuffer.readIntLE(0, dst) : castBuffer.readUIntLE(0, dst);
1623
+ };
1624
+ const castFloat = (value, src, srcSigned) => {
1625
+ const castBuffer = Buffer.alloc(4);
1626
+ if (srcSigned) {
1627
+ castBuffer.writeIntLE(value, 0, src);
1628
+ } else {
1629
+ castBuffer.writeUIntLE(value, 0, src);
1630
+ }
1631
+ const result = castBuffer.readFloatLE(0);
1632
+ return Math.round(result * 100) / 100;
1633
+ };
1634
+ const castBigInt64 = (value, srcSigned, dstSigned) => {
1635
+ const castBuffer = Buffer.alloc(8);
1636
+ if (srcSigned) {
1637
+ castBuffer.writeBigInt64LE(value, 0);
1638
+ } else {
1639
+ castBuffer.writeBigUInt64LE(value, 0);
1640
+ }
1641
+ return dstSigned ? castBuffer.readBigInt64LE(0) : castBuffer.readBigUInt64LE(0);
1642
+ };
1643
+ class DBDParser {
1644
+ constructor(wdc) {
1645
+ __publicField(this, "wdc");
1646
+ __publicField(this, "definitions", /* @__PURE__ */ new Map());
1647
+ __publicField(this, "columns", []);
1648
+ this.wdc = wdc;
1649
+ }
1650
+ async init() {
1651
+ const manifestsURL = "https://raw.githubusercontent.com/wowdev/WoWDBDefs/master/manifest.json";
1652
+ const manifests = await (await fetch(manifestsURL)).json();
1653
+ const tableHashHex = this.wdc.tableHash.toString(16).padStart(8, "0").toLowerCase();
1654
+ const manifest = manifests.find((v) => v.tableHash.toLowerCase() === tableHashHex);
1655
+ assert(manifest?.tableName, `No manifest found for table hash ${tableHashHex}`);
1656
+ const url = `https://raw.githubusercontent.com/wowdev/WoWDBDefs/master/definitions/${manifest.tableName}.dbd`;
1657
+ const text = await (await fetch(url)).text();
1658
+ const lines = text.split("\n").map((v) => v.trim());
1659
+ const chunks = lines.reduce((acc, line) => {
1660
+ if (line.length > 0) {
1661
+ acc[acc.length - 1].push(line);
1662
+ } else {
1663
+ acc.push([]);
1664
+ }
1665
+ return acc;
1666
+ }, [[]]).filter((chunk) => chunk.length > 0);
1667
+ const columnsChunk = chunks.shift();
1668
+ assert(columnsChunk?.[0] === "COLUMNS", "No column definitions found");
1669
+ columnsChunk.shift();
1670
+ columnsChunk.forEach((line) => {
1671
+ const match = line.match(PATTERN_COLUMN);
1672
+ if (match) {
1673
+ const [, type, , name] = match;
1674
+ this.definitions.set(name.replace("?", ""), type);
1675
+ }
1676
+ });
1677
+ const layoutHashHex = this.wdc.layoutHash.toString(16).padStart(8, "0").toLowerCase();
1678
+ const versionChunk = chunks.find((chunk) => chunk.find((line) => {
1679
+ const layoutsMatch = line.match(PATTERN_LAYOUT);
1680
+ const layouts = layoutsMatch?.[1].split(",").map((v) => v.trim().toLowerCase());
1681
+ return layouts?.includes(layoutHashHex);
1682
+ }));
1683
+ assert(versionChunk, `No version definition found for layout hash ${layoutHashHex}`);
1684
+ versionChunk.forEach((line) => {
1685
+ if (line.startsWith("LAYOUT") || line.startsWith("BUILD") || line.startsWith("COMMENT")) {
1686
+ return;
1687
+ }
1688
+ const match = line.match(PATTERN_FIELD);
1689
+ if (match) {
1690
+ const [, , annotationsText, name, , unsigned, sizeText, , arraySizeText] = match;
1691
+ const type = this.definitions.get(name);
1692
+ assert(type, `No type found for column ${name}`);
1693
+ const annotations = annotationsText ? annotationsText.split(",").map((v) => v.trim()) : void 0;
1694
+ const size = sizeText ? parseInt(sizeText, 10) : void 0;
1695
+ const arraySize = arraySizeText ? parseInt(arraySizeText, 10) : void 0;
1696
+ const isID = !!annotations?.includes("id");
1697
+ const isInline = !annotations?.includes("noninline");
1698
+ const isRelation = !!annotations?.includes("relation");
1699
+ const isSigned = !unsigned;
1700
+ this.columns.push({
1701
+ name,
1702
+ type,
1703
+ isID,
1704
+ isInline,
1705
+ isRelation,
1706
+ isSigned,
1707
+ size,
1708
+ arraySize
1709
+ });
1710
+ }
1711
+ });
1712
+ }
1713
+ getAllIDs() {
1714
+ return this.wdc.getAllIDs();
1715
+ }
1716
+ getRowData(id) {
1717
+ const row = this.wdc.getRowData(id);
1718
+ if (!row) {
1719
+ return void 0;
1720
+ }
1721
+ const data = {};
1722
+ if (Array.isArray(row)) {
1723
+ let fieldIndex = 0;
1724
+ this.columns.forEach((column) => {
1725
+ if (column.isID) {
1726
+ data[column.name] = id;
1727
+ if (column.isInline) {
1728
+ fieldIndex += 1;
1729
+ }
1730
+ } else if (column.isInline) {
1731
+ const cell = row[fieldIndex];
1732
+ assert(cell, `No value found for column ${column.name}`);
1733
+ const fieldInfo = this.wdc.fieldsInfo[fieldIndex];
1734
+ const srcSigned = fieldInfo.storageType === "bitpackedSigned";
1735
+ const srcSize = fieldInfo.storageType === "none" || fieldInfo.storageType === "bitpacked" || fieldInfo.storageType === "bitpackedSigned" ? Math.ceil(fieldInfo.fieldSizeBits / 8) : 4;
1736
+ const dstSize = column.size ? Math.ceil(column.size / 8) : void 0;
1737
+ if (cell.type === "bitpackedArray") {
1738
+ data[column.name] = cell.data.map((v) => {
1739
+ if (column.type === "float") {
1740
+ return castFloat(v, srcSize, srcSigned);
1741
+ }
1742
+ if (dstSize) {
1743
+ return castIntegerBySize(
1744
+ v,
1745
+ srcSize,
1746
+ srcSigned,
1747
+ dstSize,
1748
+ column.isSigned
1749
+ );
1750
+ }
1751
+ return v;
1752
+ });
1753
+ } else if (column.type === "string" || column.type === "locstring") {
1754
+ if (cell.data > 0) {
1755
+ assert(cell.type === "none", `Invalid data type for string column ${column.name}`);
1756
+ assert(typeof cell.string === "string", `Missing string for string column ${column.name}`);
1757
+ data[column.name] = cell.string;
1758
+ }
1759
+ } else if (column.type === "float") {
1760
+ assert(typeof cell.data === "number", `Invalid data type for float column ${column.name}`);
1761
+ data[column.name] = castFloat(cell.data, srcSize, srcSigned);
1762
+ } else if (typeof cell.data === "number") {
1763
+ data[column.name] = castIntegerBySize(
1764
+ cell.data,
1765
+ srcSize,
1766
+ srcSigned,
1767
+ dstSize ?? srcSize,
1768
+ column.isSigned
1769
+ );
1770
+ } else {
1771
+ assert(!column.size || column.size === 64, `Unexpected size ${column.size?.toString() ?? ""} for column ${column.name}`);
1772
+ if (srcSigned !== column.isSigned) {
1773
+ data[column.name] = castBigInt64(
1774
+ cell.data,
1775
+ srcSigned,
1776
+ column.isSigned
1777
+ );
1778
+ } else {
1779
+ data[column.name] = cell.data;
1780
+ }
1781
+ }
1782
+ fieldIndex += 1;
1783
+ } else if (column.isRelation) {
1784
+ const relation = this.wdc.getRowRelationship(id);
1785
+ data[column.name] = relation ?? 0;
1786
+ }
1787
+ });
1788
+ } else {
1789
+ const buffer = row.data;
1790
+ let offset = 0;
1791
+ let fieldIndex = 0;
1792
+ this.columns.forEach((column) => {
1793
+ if (column.isID) {
1794
+ data[column.name] = id;
1795
+ if (column.isInline) {
1796
+ fieldIndex += 1;
1797
+ }
1798
+ } else if (column.isInline) {
1799
+ const values = [];
1800
+ if (column.type === "string" || column.type === "locstring") {
1801
+ const count = column.arraySize ?? 1;
1802
+ for (let i = 0; i < count; i += 1) {
1803
+ const startOffset = offset;
1804
+ while (buffer[offset] !== 0) {
1805
+ offset += 1;
1806
+ }
1807
+ values.push(buffer.toString("utf-8", startOffset, offset));
1808
+ offset += 1;
1809
+ }
1810
+ data[column.name] = count > 1 ? values : values[0];
1811
+ } else {
1812
+ const currField = this.wdc.fields[fieldIndex];
1813
+ const nextField = this.wdc.fields[fieldIndex + 1];
1814
+ const size = Math.ceil((column.size ?? 32 - currField.size) / 8);
1815
+ let count;
1816
+ if (fieldIndex + 1 < this.wdc.fields.length) {
1817
+ count = (nextField.position - currField.position) / size;
1818
+ } else {
1819
+ count = column.arraySize ? (buffer.byteLength - offset) / size : 1;
1820
+ }
1821
+ for (let i = 0; i < count; i += 1) {
1822
+ if (column.type === "float") {
1823
+ const value = buffer.readFloatLE(offset);
1824
+ values.push(Math.round(value * 100) / 100);
1825
+ offset += 4;
1826
+ } else if (size > 6) {
1827
+ assert(size === 8, `Unexpected size ${size.toString()} for column ${column.name}`);
1828
+ const value = column.isSigned ? buffer.readBigInt64LE(offset) : buffer.readBigUInt64LE(offset);
1829
+ values.push(value);
1830
+ offset += size;
1831
+ } else {
1832
+ const value = column.isSigned ? buffer.readIntLE(offset, size) : buffer.readUIntLE(offset, size);
1833
+ values.push(value);
1834
+ offset += size;
1835
+ }
1836
+ }
1837
+ data[column.name] = count > 1 ? values : values[0];
1838
+ }
1839
+ fieldIndex += 1;
1840
+ } else if (column.isRelation) {
1841
+ const relation = this.wdc.getRowRelationship(id);
1842
+ data[column.name] = relation ?? 0;
1843
+ }
1844
+ });
1845
+ }
1846
+ return data;
1847
+ }
1848
+ }
1849
+
1850
+ export { CASCClient, DBDParser, WDCReader };