@rhyster/wow-casc-dbc 2.5.1 → 2.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -2,10 +2,10 @@ import assert from 'node:assert';
2
2
  import crypto from 'node:crypto';
3
3
  import { mapLimit, retry } from 'async';
4
4
  import cliProgress from 'cli-progress';
5
+ import zlib from 'node:zlib';
5
6
  import fs from 'node:fs/promises';
6
- import path from 'node:path';
7
7
  import http from 'node:http';
8
- import zlib from 'node:zlib';
8
+ import path from 'node:path';
9
9
 
10
10
  var __defProp$6 = Object.defineProperty;
11
11
  var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
@@ -13,547 +13,684 @@ var __publicField$6 = (obj, key, value) => {
13
13
  __defNormalProp$6(obj, typeof key !== "symbol" ? key + "" : key, value);
14
14
  return value;
15
15
  };
16
- class Store {
17
- constructor(dataFile) {
18
- __publicField$6(this, "data");
19
- __publicField$6(this, "dataFile");
20
- __publicField$6(this, "promise");
21
- this.dataFile = dataFile;
22
- this.data = {};
23
- this.promise = new Promise((resolve) => {
24
- fs.readFile(dataFile, "utf-8").then((file) => {
25
- this.data = JSON.parse(file);
26
- resolve();
27
- }).catch(() => {
28
- resolve();
29
- });
30
- });
31
- }
32
- async get(key) {
33
- await this.promise;
34
- return this.data[key];
35
- }
36
- async set(key, value) {
37
- await this.promise;
38
- this.data[key] = value;
39
- await fs.writeFile(this.dataFile, JSON.stringify(this.data), "utf-8");
16
+ const ADB_MAGIC = 1481004104;
17
+ class ADBReader {
18
+ constructor(buffer) {
19
+ __publicField$6(this, "build");
20
+ __publicField$6(this, "entries", []);
21
+ __publicField$6(this, "tableEntries", /* @__PURE__ */ new Map());
22
+ const magic = buffer.readUInt32BE(0);
23
+ assert(magic === ADB_MAGIC, `[ADB]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
24
+ const version = buffer.readUInt32LE(4);
25
+ assert(version === 9, `[ADB]: Invalid version: ${version.toString()}`);
26
+ const build = buffer.readUInt32LE(8);
27
+ this.build = build;
28
+ let pointer = 44;
29
+ while (pointer < buffer.byteLength) {
30
+ const offset = pointer;
31
+ const entryMagic = buffer.readUInt32BE(offset);
32
+ assert(entryMagic === ADB_MAGIC, `[ADB]: Invalid entry magic: ${magic.toString(16).padStart(8, "0")}`);
33
+ const regionID = buffer.readInt32LE(offset + 4);
34
+ const pushID = buffer.readInt32LE(offset + 8);
35
+ const uniqueID = buffer.readUInt32LE(offset + 12);
36
+ const tableHash = buffer.readUInt32LE(offset + 16);
37
+ const recordID = buffer.readUInt32LE(offset + 20);
38
+ const dataSize = buffer.readUInt32LE(offset + 24);
39
+ const recordState = buffer.readUInt32LE(offset + 28);
40
+ const data = buffer.subarray(offset + 32, offset + 32 + dataSize);
41
+ const entry = {
42
+ regionID,
43
+ pushID,
44
+ uniqueID,
45
+ tableHash,
46
+ recordID,
47
+ dataSize,
48
+ recordState,
49
+ data
50
+ };
51
+ this.entries.push(entry);
52
+ if (!this.tableEntries.has(tableHash)) {
53
+ this.tableEntries.set(tableHash, []);
54
+ }
55
+ this.tableEntries.get(tableHash)?.push(entry);
56
+ pointer += 32 + dataSize;
57
+ }
40
58
  }
41
59
  }
42
60
 
43
- const USER_AGENT = "node-wow-casc-dbc";
44
- const CACHE_ROOT = path.resolve("cache");
45
- const CACHE_DIRS = {
46
- build: "builds",
47
- indexes: "indices",
48
- data: "data",
49
- dbd: "dbd"
61
+ var __defProp$5 = Object.defineProperty;
62
+ var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
63
+ var __publicField$5 = (obj, key, value) => {
64
+ __defNormalProp$5(obj, typeof key !== "symbol" ? key + "" : key, value);
65
+ return value;
50
66
  };
51
- const CACHE_INTEGRITY_FILE = path.resolve(CACHE_ROOT, "integrity.json");
52
- const cacheIntegrity = new Store(CACHE_INTEGRITY_FILE);
53
- const formatCDNKey = (key) => `${key.substring(0, 2)}/${key.substring(2, 4)}/${key}`;
54
- const requestData = async (url, {
55
- partialOffset,
56
- partialLength,
57
- showProgress
58
- } = {}) => new Promise((resolve, reject) => {
59
- const options = {
60
- headers: {
61
- "User-Agent": USER_AGENT,
62
- Range: partialOffset && partialLength ? `bytes=${partialOffset.toString()}-${(partialOffset + partialLength - 1).toString()}` : "bytes=0-"
63
- }
64
- };
65
- http.get(url, options, (res) => {
66
- if (res.statusCode === 301 || res.statusCode === 302) {
67
- if (res.headers.location) {
68
- requestData(res.headers.location, { partialOffset, partialLength, showProgress }).then(resolve).catch((err) => {
69
- throw err;
70
- });
71
- } else {
72
- reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode.toString()}`));
67
+ class Salsa20 {
68
+ constructor(key, nonce) {
69
+ __publicField$5(this, "fixed");
70
+ __publicField$5(this, "key");
71
+ __publicField$5(this, "nonce");
72
+ __publicField$5(this, "counter", new Uint32Array([0, 0]));
73
+ __publicField$5(this, "state", new Uint32Array(16));
74
+ __publicField$5(this, "block", new Uint8Array(64));
75
+ __publicField$5(this, "position", 0);
76
+ assert(key.length === 32 || key.length === 16, "Salsa20 requires 128-bit or 256-bit key");
77
+ assert(nonce.length === 8, "Salsa20 requires 64-bit nonce");
78
+ this.key = new Uint32Array(8);
79
+ const keyView = new DataView(key.buffer);
80
+ if (key.length === 32) {
81
+ for (let i = 0; i < 8; i += 1) {
82
+ this.key[i] = keyView.getUint32(i * 4, true);
73
83
  }
74
- return;
84
+ this.fixed = new Uint32Array([
85
+ 1634760805,
86
+ 857760878,
87
+ 2036477234,
88
+ 1797285236
89
+ ]);
90
+ } else {
91
+ for (let i = 0; i < 4; i += 1) {
92
+ const word = keyView.getUint32(i * 4, true);
93
+ this.key[i] = word;
94
+ this.key[i + 4] = word;
95
+ }
96
+ this.fixed = new Uint32Array([
97
+ 1634760805,
98
+ 824206446,
99
+ 2036477238,
100
+ 1797285236
101
+ ]);
75
102
  }
76
- if (!res.statusCode || res.statusCode < 200 || res.statusCode > 302) {
77
- reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode?.toString() ?? "undefined"}`));
78
- return;
103
+ this.nonce = new Uint32Array(2);
104
+ const nonceView = new DataView(nonce.buffer);
105
+ for (let i = 0; i < 2; i += 1) {
106
+ this.nonce[i] = nonceView.getUint32(i * 4, true);
79
107
  }
80
- const lengthText = res.headers["content-length"];
81
- const length = lengthText ? parseInt(lengthText, 10) : 0;
82
- const bar = showProgress && !Number.isNaN(length) && length >= 10485760 ? new cliProgress.SingleBar({ etaBuffer: 10240 }, cliProgress.Presets.shades_classic) : void 0;
83
- bar?.start(length, 0);
84
- const chunks = [];
85
- res.on("data", (chunk) => {
86
- bar?.increment(chunk.length);
87
- chunks.push(chunk);
88
- });
89
- res.on("end", () => {
90
- bar?.stop();
91
- resolve(Buffer.concat(chunks));
92
- });
93
- res.on("error", (err) => {
94
- bar?.stop();
95
- reject(err);
96
- });
97
- }).on("error", reject).end();
98
- });
99
- const downloadFile = (prefixes, type, key, {
100
- partialOffset,
101
- partialLength,
102
- showProgress,
103
- showAttemptFail
104
- } = {}) => {
105
- const urls = prefixes.map((prefix) => `${prefix}/${type}/${formatCDNKey(key)}`);
106
- return urls.reduce(
107
- (prev, url, index) => prev.catch((err) => {
108
- if (showAttemptFail && index > 0 && err instanceof Error) {
109
- console.warn(`${( new Date()).toISOString()} [WARN]:`, err.message);
110
- }
111
- return requestData(url, { partialOffset, partialLength, showProgress });
112
- }),
113
- Promise.reject(new Error(""))
114
- );
115
- };
116
- const getFileCache = async (file) => {
117
- const integrity = await cacheIntegrity.get(file);
118
- if (integrity) {
119
- try {
120
- const buffer = await fs.readFile(path.resolve(CACHE_ROOT, file));
121
- const hash = crypto.createHash("sha256").update(buffer).digest("hex");
122
- if (hash === integrity) {
123
- return buffer;
108
+ this.generateBlock();
109
+ }
110
+ // eslint-disable-next-line @typescript-eslint/naming-convention
111
+ QR(a, b, c, d) {
112
+ let t;
113
+ t = this.state[a] + this.state[d] & 4294967295;
114
+ this.state[b] ^= t << 7 | t >>> 25;
115
+ t = this.state[b] + this.state[a] & 4294967295;
116
+ this.state[c] ^= t << 9 | t >>> 23;
117
+ t = this.state[c] + this.state[b] & 4294967295;
118
+ this.state[d] ^= t << 13 | t >>> 19;
119
+ t = this.state[d] + this.state[c] & 4294967295;
120
+ this.state[a] ^= t << 18 | t >>> 14;
121
+ }
122
+ generateBlock() {
123
+ const init = new Uint32Array([
124
+ this.fixed[0],
125
+ this.key[0],
126
+ this.key[1],
127
+ this.key[2],
128
+ this.key[3],
129
+ this.fixed[1],
130
+ this.nonce[0],
131
+ this.nonce[1],
132
+ this.counter[0],
133
+ this.counter[1],
134
+ this.fixed[2],
135
+ this.key[4],
136
+ this.key[5],
137
+ this.key[6],
138
+ this.key[7],
139
+ this.fixed[3]
140
+ ]);
141
+ this.state = new Uint32Array(init);
142
+ for (let i = 0; i < 20; i += 2) {
143
+ this.QR(0, 4, 8, 12);
144
+ this.QR(5, 9, 13, 1);
145
+ this.QR(10, 14, 2, 6);
146
+ this.QR(15, 3, 7, 11);
147
+ this.QR(0, 1, 2, 3);
148
+ this.QR(5, 6, 7, 4);
149
+ this.QR(10, 11, 8, 9);
150
+ this.QR(15, 12, 13, 14);
151
+ }
152
+ for (let i = 0; i < 16; i += 1) {
153
+ const word = this.state[i] + init[i] & 4294967295;
154
+ this.block[i * 4] = word & 255;
155
+ this.block[i * 4 + 1] = word >>> 8 & 255;
156
+ this.block[i * 4 + 2] = word >>> 16 & 255;
157
+ this.block[i * 4 + 3] = word >>> 24 & 255;
158
+ }
159
+ this.counter[0] = this.counter[0] + 1 & 4294967295;
160
+ if (this.counter[0] === 0) {
161
+ this.counter[1] = this.counter[1] + 1 & 4294967295;
162
+ }
163
+ }
164
+ process(input) {
165
+ const { length } = input;
166
+ const result = new Uint8Array(length);
167
+ for (let i = 0; i < length; i += 1) {
168
+ if (this.position === 64) {
169
+ this.generateBlock();
170
+ this.position = 0;
124
171
  }
125
- } catch {
172
+ result[i] = input[i] ^ this.block[this.position];
173
+ this.position += 1;
126
174
  }
175
+ return result;
127
176
  }
128
- return void 0;
177
+ }
178
+
179
+ var __defProp$4 = Object.defineProperty;
180
+ var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
181
+ var __publicField$4 = (obj, key, value) => {
182
+ __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
183
+ return value;
129
184
  };
130
- const getDataFile = async (prefixes, key, type, buildCKey, {
131
- name,
132
- partialOffset,
133
- partialLength,
134
- showProgress,
135
- showAttemptFail
136
- } = {}) => {
137
- const dir = type === "build" ? path.join(CACHE_DIRS[type], buildCKey) : CACHE_DIRS[type];
138
- const file = name ? path.join(dir, name) : path.join(dir, key);
139
- const cacheBuffer = await getFileCache(file);
140
- if (cacheBuffer) {
141
- if (name === void 0 && partialOffset !== void 0 && partialLength !== void 0) {
142
- return cacheBuffer.subarray(partialOffset, partialOffset + partialLength);
143
- }
144
- return cacheBuffer;
145
- }
146
- const downloadBuffer = await downloadFile(prefixes, "data", key, {
147
- partialOffset,
148
- partialLength,
149
- showProgress,
150
- showAttemptFail
151
- });
152
- if (partialOffset === void 0 && partialLength === void 0 || name) {
153
- await fs.mkdir(path.resolve(CACHE_ROOT, dir), { recursive: true });
154
- await fs.writeFile(path.resolve(CACHE_ROOT, file), downloadBuffer);
155
- const hash = crypto.createHash("sha256").update(downloadBuffer).digest("hex");
156
- await cacheIntegrity.set(file, hash);
157
- }
158
- return downloadBuffer;
159
- };
160
- const getConfigFile = async (prefixes, key, {
161
- showProgress,
162
- showAttemptFail
163
- } = {}) => {
164
- const downloadBuffer = await downloadFile(prefixes, "config", key, { showProgress, showAttemptFail });
165
- return downloadBuffer.toString("utf-8");
166
- };
167
- const getProductVersions = async (region, product) => {
168
- const url = `http://${region}.patch.battle.net:1119/${product}/versions`;
169
- const headers = {
170
- "User-Agent": USER_AGENT
171
- };
172
- const res = await fetch(url, { headers });
173
- return res.text();
174
- };
175
- const getProductCDNs = async (region, product) => {
176
- const url = `http://${region}.patch.battle.net:1119/${product}/cdns`;
177
- const headers = {
178
- "User-Agent": USER_AGENT
179
- };
180
- const res = await fetch(url, { headers });
181
- return res.text();
182
- };
183
-
184
- const parseProductConfig = (text) => {
185
- const lines = text.split(/\r?\n/);
186
- const headers = lines[0].split("|").map((header) => header.split("!")[0].replace(" ", ""));
187
- const entries = lines.filter((line, index) => index > 0 && line.trim().length !== 0 && !line.startsWith("#")).map((line) => {
188
- const node = {};
189
- const entryFields = line.split("|");
190
- for (let i = 0, n = entryFields.length; i < n; i += 1) {
191
- node[headers[i]] = entryFields[i];
185
+ const BLTE_MAGIC = 1112298565;
186
+ const ENC_TYPE_SALSA20 = 83;
187
+ const EMPTY_HASH = "00000000000000000000000000000000";
188
+ class BLTEReader {
189
+ constructor(buffer, eKey, keys = /* @__PURE__ */ new Map()) {
190
+ __publicField$4(this, "buffer");
191
+ __publicField$4(this, "blte");
192
+ __publicField$4(this, "blocks", []);
193
+ __publicField$4(this, "keys");
194
+ __publicField$4(this, "processedBlock", 0);
195
+ __publicField$4(this, "processedOffset", 0);
196
+ this.blte = buffer;
197
+ this.buffer = Buffer.alloc(0);
198
+ this.keys = keys;
199
+ const size = buffer.byteLength;
200
+ assert(size >= 8, `[BLTE]: Invalid size: ${size.toString()} < 8`);
201
+ const magic = buffer.readUInt32BE(0);
202
+ assert(magic === BLTE_MAGIC, `[BLTE]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
203
+ const headerSize = buffer.readUInt32BE(4);
204
+ if (headerSize === 0) {
205
+ const blteHash2 = crypto.createHash("md5").update(buffer).digest("hex");
206
+ assert(blteHash2 === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash2}`);
207
+ this.blocks.push({
208
+ compressedSize: size - 8,
209
+ decompressedSize: size - 9,
210
+ hash: EMPTY_HASH
211
+ });
212
+ this.processedOffset = 8;
213
+ return;
192
214
  }
193
- return node;
194
- });
195
- return entries;
196
- };
197
- const parseProductVersions = (text) => parseProductConfig(text);
198
- const parseProductCDNs = (text) => parseProductConfig(text);
199
-
200
- const normalizeKey = (key) => key.split("-").map((part, index) => index === 0 ? part : `${part.charAt(0).toUpperCase()}${part.slice(1)}`).join("");
201
- const parseConfig = (text) => {
202
- const entries = {};
203
- text.split(/\r?\n/).filter((line) => line.trim().length !== 0 && !line.startsWith("#")).forEach((line) => {
204
- const match = line.match(/([^\s]+)\s?=\s?(.*)/);
205
- assert(match !== null, "Invalid token encountered parsing CDN config");
206
- const [key, value] = match.slice(1);
207
- entries[normalizeKey(key)] = value;
208
- });
209
- return entries;
210
- };
211
- const parseCDNConfig = (text) => parseConfig(text);
212
- const parseBuildConfig = (text) => parseConfig(text);
213
-
214
- const VERSION_SUB_OFFSET = -12;
215
- const CHECKSUM_SIZE_SUB_OFFSET = -5;
216
- const BLOCK_SIZE_OFFSET = 3;
217
- const OFFSET_BYTES_OFFSET = 4;
218
- const SIZE_BYTES_OFFSET = 5;
219
- const KEY_SIZE_OFFSET = 6;
220
- const NUM_ELEMENTS_OFFSET = 8;
221
- const CHECKSUM_OFFSET = 12;
222
- const CHECKSUM_TRIES = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0];
223
- const tryArchiveIndexChecksumSize = (buffer, cKey) => {
224
- const res = CHECKSUM_TRIES.filter(
225
- (index) => buffer.readUInt8(buffer.byteLength - index + CHECKSUM_SIZE_SUB_OFFSET) === index && buffer.readUInt8(buffer.byteLength - index + VERSION_SUB_OFFSET) === 1
226
- );
227
- if (res.length === 1) {
228
- return res[0];
229
- }
230
- throw new Error(`Invalid checksum size: ${res.join(", ")} in ${cKey}`);
231
- };
232
- const parseArchiveIndex = (buffer, cKey) => {
233
- const checksumSize = tryArchiveIndexChecksumSize(buffer, cKey);
234
- const versionOffset = buffer.byteLength - checksumSize + VERSION_SUB_OFFSET;
235
- const footerOffset = versionOffset - checksumSize;
236
- const tocChecksum = buffer.toString("hex", footerOffset, versionOffset);
237
- const version = buffer.readUInt8(versionOffset);
238
- const blockSizeKB = buffer.readUInt8(versionOffset + BLOCK_SIZE_OFFSET);
239
- const offsetBytes = buffer.readUInt8(versionOffset + OFFSET_BYTES_OFFSET);
240
- const sizeBytes = buffer.readUInt8(versionOffset + SIZE_BYTES_OFFSET);
241
- const keySize = buffer.readUInt8(versionOffset + KEY_SIZE_OFFSET);
242
- const numElements = buffer.readUInt32LE(versionOffset + NUM_ELEMENTS_OFFSET);
243
- const footerChecksum = buffer.toString("hex", versionOffset + CHECKSUM_OFFSET);
244
- assert(version === 1, `Invalid version: ${version.toString()} in ${cKey}`);
245
- const entrySize = keySize + offsetBytes + sizeBytes;
246
- const blockSize = blockSizeKB * 1024;
247
- const numBlocks = footerOffset / (blockSize + keySize + checksumSize);
248
- const tocSize = (keySize + checksumSize) * numBlocks;
249
- const toc = buffer.subarray(footerOffset - tocSize, footerOffset);
250
- const footer = buffer.subarray(footerOffset);
251
- const footerCheckBuffer = Buffer.concat([
252
- buffer.subarray(versionOffset, buffer.byteLength - checksumSize),
253
- Buffer.alloc(checksumSize)
254
- ]);
255
- const hash = crypto.createHash("md5").update(footer).digest("hex");
256
- assert(hash === cKey, `Invalid footer hash in ${cKey}: expected ${cKey}, got ${hash}`);
257
- const footerHash = crypto.createHash("md5").update(footerCheckBuffer).digest("hex").slice(0, checksumSize * 2);
258
- assert(footerHash === footerChecksum, `Invalid footer checksum in ${cKey}: expected ${footerChecksum}, got ${footerHash}`);
259
- const tocHash = crypto.createHash("md5").update(toc).digest("hex").slice(0, checksumSize * 2);
260
- assert(tocHash === tocChecksum, `Invalid toc checksum in ${cKey}: expected ${tocChecksum}, got ${tocHash}`);
261
- const result = /* @__PURE__ */ new Map();
262
- for (let i = 0; i < numBlocks; i += 1) {
263
- const lastEkey = toc.toString("hex", i * keySize, (i + 1) * keySize);
264
- const blockChecksum = toc.toString("hex", numBlocks * keySize + i * checksumSize, numBlocks * keySize + (i + 1) * checksumSize);
265
- const blockOffset = i * blockSize;
266
- const blockHash = crypto.createHash("md5").update(buffer.subarray(i * blockSize, (i + 1) * blockSize)).digest("hex").slice(0, checksumSize * 2);
267
- assert(blockChecksum === blockHash, `Invalid block hash in ${cKey} at ${i.toString()}: expected ${blockChecksum}, got ${blockHash}`);
268
- let length = 0;
269
- while (length < blockSize) {
270
- const entryOffset = blockOffset + length * entrySize;
271
- const eKey = buffer.toString("hex", entryOffset, entryOffset + keySize);
272
- const size = buffer.readUIntBE(entryOffset + keySize, sizeBytes);
273
- const offset = buffer.readUIntBE(entryOffset + keySize + sizeBytes, offsetBytes);
274
- result.set(eKey, { key: cKey, size, offset });
275
- length += 1;
276
- if (eKey === lastEkey) {
277
- break;
278
- }
215
+ const blteHash = crypto.createHash("md5").update(buffer.subarray(0, headerSize)).digest("hex");
216
+ assert(blteHash === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash}`);
217
+ assert(size >= 12, `[BLTE]: Invalid size: ${size.toString()} < 12`);
218
+ const flag = buffer.readUInt8(8);
219
+ const numBlocks = buffer.readIntBE(9, 3);
220
+ assert(numBlocks > 0, `[BLTE]: Invalid number of blocks: ${numBlocks.toString()}`);
221
+ assert(flag === 15, `[BLTE]: Invalid flag: ${flag.toString(16).padStart(2, "0")}`);
222
+ const blockHeaderSize = numBlocks * 24;
223
+ assert(headerSize === blockHeaderSize + 12, `[BLTE]: Invalid header size: header size ${headerSize.toString()} != block header size ${blockHeaderSize.toString()} + 12`);
224
+ assert(size >= headerSize, `[BLTE]: Invalid size: ${size.toString()} < ${headerSize.toString()}`);
225
+ for (let i = 0; i < numBlocks; i += 1) {
226
+ const offset = 12 + i * 24;
227
+ const compressedSize = buffer.readUInt32BE(offset);
228
+ const decompressedSize = buffer.readUInt32BE(offset + 4);
229
+ const hash = buffer.toString("hex", offset + 8, offset + 24);
230
+ this.blocks.push({
231
+ compressedSize,
232
+ decompressedSize,
233
+ hash
234
+ });
279
235
  }
236
+ this.processedOffset = headerSize;
280
237
  }
281
- assert(result.size === numElements, `Invalid number of elements: ${result.size.toString()} != ${numElements.toString()} in ${cKey}`);
282
- return result;
283
- };
284
-
285
- var __defProp$5 = Object.defineProperty;
286
- var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
287
- var __publicField$5 = (obj, key, value) => {
288
- __defNormalProp$5(obj, typeof key !== "symbol" ? key + "" : key, value);
289
- return value;
290
- };
291
- class Salsa20 {
292
- constructor(key, nonce) {
293
- __publicField$5(this, "fixed");
294
- __publicField$5(this, "key");
295
- __publicField$5(this, "nonce");
296
- __publicField$5(this, "counter", new Uint32Array([0, 0]));
297
- __publicField$5(this, "state", new Uint32Array(16));
298
- __publicField$5(this, "block", new Uint8Array(64));
299
- __publicField$5(this, "position", 0);
300
- assert(key.length === 32 || key.length === 16, "Salsa20 requires 128-bit or 256-bit key");
301
- assert(nonce.length === 8, "Salsa20 requires 64-bit nonce");
302
- this.key = new Uint32Array(8);
303
- const keyView = new DataView(key.buffer);
304
- if (key.length === 32) {
305
- for (let i = 0; i < 8; i += 1) {
306
- this.key[i] = keyView.getUint32(i * 4, true);
307
- }
308
- this.fixed = new Uint32Array([
309
- 1634760805,
310
- 857760878,
311
- 2036477234,
312
- 1797285236
313
- ]);
314
- } else {
315
- for (let i = 0; i < 4; i += 1) {
316
- const word = keyView.getUint32(i * 4, true);
317
- this.key[i] = word;
318
- this.key[i + 4] = word;
238
+ processBlock(buffer, index, allowMissingKey) {
239
+ const flag = buffer.readUInt8(0);
240
+ switch (flag) {
241
+ case 69: {
242
+ let offset = 1;
243
+ const keyNameLength = buffer.readUInt8(offset);
244
+ offset += 1;
245
+ const keyNameBE = buffer.toString("hex", offset, offset + keyNameLength);
246
+ offset += keyNameLength;
247
+ const ivLength = buffer.readUInt8(offset);
248
+ offset += 1;
249
+ const ivBuffer = buffer.subarray(offset, offset + ivLength);
250
+ offset += ivLength;
251
+ const encryptType = buffer.readUInt8(offset);
252
+ offset += 1;
253
+ assert(encryptType === ENC_TYPE_SALSA20, `[BLTE]: Invalid encrypt type: ${encryptType.toString(16).padStart(2, "0")} at block ${index.toString()}`);
254
+ const keyName = [...keyNameBE.matchAll(/.{2}/g)].map((v) => v[0]).reverse().join("").toLowerCase();
255
+ const key = this.keys.get(keyName);
256
+ if (!key) {
257
+ if (allowMissingKey) {
258
+ return keyName;
259
+ }
260
+ throw new Error(`[BLTE]: Missing key: ${keyName} at block ${index.toString()}`);
261
+ }
262
+ const iv = new Uint8Array(8);
263
+ for (let i = 0; i < 8; i += 1) {
264
+ if (i < ivLength) {
265
+ iv[i] = ivBuffer.readUInt8(i) ^ index >>> 8 * i & 255;
266
+ } else {
267
+ iv[i] = 0;
268
+ }
269
+ }
270
+ const handler = new Salsa20(key, iv);
271
+ const decrypted = handler.process(buffer.subarray(offset));
272
+ if (allowMissingKey) {
273
+ return this.processBlock(Buffer.from(decrypted.buffer), index, true);
274
+ }
275
+ return this.processBlock(Buffer.from(decrypted.buffer), index, false);
319
276
  }
320
- this.fixed = new Uint32Array([
321
- 1634760805,
322
- 824206446,
323
- 2036477238,
324
- 1797285236
325
- ]);
326
- }
327
- this.nonce = new Uint32Array(2);
328
- const nonceView = new DataView(nonce.buffer);
329
- for (let i = 0; i < 2; i += 1) {
330
- this.nonce[i] = nonceView.getUint32(i * 4, true);
331
- }
332
- this.generateBlock();
333
- }
334
- QR(a, b, c, d) {
335
- let t;
336
- t = this.state[a] + this.state[d] & 4294967295;
337
- this.state[b] ^= t << 7 | t >>> 25;
338
- t = this.state[b] + this.state[a] & 4294967295;
339
- this.state[c] ^= t << 9 | t >>> 23;
340
- t = this.state[c] + this.state[b] & 4294967295;
341
- this.state[d] ^= t << 13 | t >>> 19;
342
- t = this.state[d] + this.state[c] & 4294967295;
343
- this.state[a] ^= t << 18 | t >>> 14;
344
- }
345
- generateBlock() {
346
- const init = new Uint32Array([
347
- this.fixed[0],
348
- this.key[0],
349
- this.key[1],
350
- this.key[2],
351
- this.key[3],
352
- this.fixed[1],
353
- this.nonce[0],
354
- this.nonce[1],
355
- this.counter[0],
356
- this.counter[1],
357
- this.fixed[2],
358
- this.key[4],
359
- this.key[5],
360
- this.key[6],
361
- this.key[7],
362
- this.fixed[3]
363
- ]);
364
- this.state = new Uint32Array(init);
365
- for (let i = 0; i < 20; i += 2) {
366
- this.QR(0, 4, 8, 12);
367
- this.QR(5, 9, 13, 1);
368
- this.QR(10, 14, 2, 6);
369
- this.QR(15, 3, 7, 11);
370
- this.QR(0, 1, 2, 3);
371
- this.QR(5, 6, 7, 4);
372
- this.QR(10, 11, 8, 9);
373
- this.QR(15, 12, 13, 14);
374
- }
375
- for (let i = 0; i < 16; i += 1) {
376
- const word = this.state[i] + init[i] & 4294967295;
377
- this.block[i * 4] = word & 255;
378
- this.block[i * 4 + 1] = word >>> 8 & 255;
379
- this.block[i * 4 + 2] = word >>> 16 & 255;
380
- this.block[i * 4 + 3] = word >>> 24 & 255;
381
- }
382
- this.counter[0] = this.counter[0] + 1 & 4294967295;
383
- if (this.counter[0] === 0) {
384
- this.counter[1] = this.counter[1] + 1 & 4294967295;
277
+ case 70:
278
+ throw new Error(`[BLTE]: Frame (Recursive) block not supported at block ${index.toString()}`);
279
+ case 78:
280
+ return buffer.subarray(1);
281
+ case 90:
282
+ return zlib.inflateSync(buffer.subarray(1));
283
+ default:
284
+ throw new Error(`[BLTE]: Invalid block flag: ${flag.toString(16).padStart(2, "0")} at block ${index.toString()}`);
385
285
  }
386
286
  }
387
- process(input) {
388
- const { length } = input;
389
- const result = new Uint8Array(length);
390
- for (let i = 0; i < length; i += 1) {
391
- if (this.position === 64) {
392
- this.generateBlock();
393
- this.position = 0;
287
+ processBytes(allowMissingKey = false, size = Infinity) {
288
+ const missingKeyBlocks = [];
289
+ while (this.processedBlock < this.blocks.length && size > this.buffer.byteLength) {
290
+ const blockIndex = this.processedBlock;
291
+ const block = this.blocks[blockIndex];
292
+ const blockBuffer = this.blte.subarray(
293
+ this.processedOffset,
294
+ this.processedOffset + block.compressedSize
295
+ );
296
+ if (block.hash !== EMPTY_HASH) {
297
+ const blockHash = crypto.createHash("md5").update(blockBuffer).digest("hex");
298
+ assert(blockHash === block.hash, `[BLTE]: Invalid block hash: expected ${block.hash}, got ${blockHash}`);
394
299
  }
395
- result[i] = input[i] ^ this.block[this.position];
396
- this.position += 1;
300
+ if (allowMissingKey) {
301
+ const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
302
+ if (buffer instanceof Buffer) {
303
+ assert(
304
+ buffer.byteLength === block.decompressedSize,
305
+ `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
306
+ );
307
+ this.buffer = Buffer.concat([this.buffer, buffer]);
308
+ } else {
309
+ missingKeyBlocks.push({
310
+ offset: this.buffer.byteLength,
311
+ size: block.decompressedSize,
312
+ blockIndex,
313
+ keyName: buffer
314
+ });
315
+ this.buffer = Buffer.concat([
316
+ this.buffer,
317
+ Buffer.alloc(block.decompressedSize)
318
+ ]);
319
+ }
320
+ } else {
321
+ const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
322
+ assert(
323
+ buffer.byteLength === block.decompressedSize,
324
+ `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
325
+ );
326
+ this.buffer = Buffer.concat([this.buffer, buffer]);
327
+ }
328
+ this.processedBlock += 1;
329
+ this.processedOffset += block.compressedSize;
397
330
  }
398
- return result;
331
+ return allowMissingKey ? missingKeyBlocks : void 0;
399
332
  }
400
333
  }
401
334
 
402
- var __defProp$4 = Object.defineProperty;
403
- var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
404
- var __publicField$4 = (obj, key, value) => {
405
- __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
335
+ var __defProp$3 = Object.defineProperty;
336
+ var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
337
+ var __publicField$3 = (obj, key, value) => {
338
+ __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
406
339
  return value;
407
340
  };
408
- const BLTE_MAGIC = 1112298565;
409
- const ENC_TYPE_SALSA20 = 83;
410
- const EMPTY_HASH = "00000000000000000000000000000000";
411
- class BLTEReader {
412
- constructor(buffer, eKey, keys = /* @__PURE__ */ new Map()) {
413
- __publicField$4(this, "buffer");
414
- __publicField$4(this, "blte");
415
- __publicField$4(this, "blocks", []);
416
- __publicField$4(this, "keys");
417
- __publicField$4(this, "processedBlock", 0);
418
- __publicField$4(this, "processedOffset", 0);
419
- this.blte = buffer;
420
- this.buffer = Buffer.alloc(0);
421
- this.keys = keys;
422
- const size = buffer.byteLength;
423
- assert(size >= 8, `[BLTE]: Invalid size: ${size.toString()} < 8`);
424
- const magic = buffer.readUInt32BE(0);
425
- assert(magic === BLTE_MAGIC, `[BLTE]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
426
- const headerSize = buffer.readUInt32BE(4);
427
- if (headerSize === 0) {
428
- const blteHash2 = crypto.createHash("md5").update(buffer).digest("hex");
429
- assert(blteHash2 === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash2}`);
430
- this.blocks.push({
431
- compressedSize: size - 8,
432
- decompressedSize: size - 9,
433
- hash: EMPTY_HASH
341
+ class Store {
342
+ constructor(dataFile) {
343
+ __publicField$3(this, "data");
344
+ __publicField$3(this, "dataFile");
345
+ __publicField$3(this, "promise");
346
+ this.dataFile = dataFile;
347
+ this.data = {};
348
+ this.promise = new Promise((resolve) => {
349
+ fs.readFile(dataFile, "utf-8").then((file) => {
350
+ this.data = JSON.parse(file);
351
+ resolve();
352
+ }).catch(() => {
353
+ resolve();
434
354
  });
435
- this.processedOffset = 8;
355
+ });
356
+ }
357
+ async get(key) {
358
+ await this.promise;
359
+ return this.data[key];
360
+ }
361
+ async set(key, value) {
362
+ await this.promise;
363
+ this.data[key] = value;
364
+ await fs.writeFile(this.dataFile, JSON.stringify(this.data), "utf-8");
365
+ }
366
+ }
367
+
368
+ const USER_AGENT = "node-wow-casc-dbc";
369
+ const CACHE_ROOT = path.resolve("cache");
370
+ const CACHE_DIRS = {
371
+ build: "builds",
372
+ indexes: "indices",
373
+ data: "data",
374
+ dbd: "dbd"
375
+ };
376
+ const CACHE_INTEGRITY_FILE = path.resolve(CACHE_ROOT, "integrity.json");
377
+ const cacheIntegrity = new Store(CACHE_INTEGRITY_FILE);
378
+ const formatCDNKey = (key) => `${key.substring(0, 2)}/${key.substring(2, 4)}/${key}`;
379
+ const requestData = async (url, {
380
+ partialOffset,
381
+ partialLength,
382
+ showProgress
383
+ } = {}) => new Promise((resolve, reject) => {
384
+ const options = {
385
+ headers: {
386
+ // eslint-disable-next-line @typescript-eslint/naming-convention
387
+ "User-Agent": USER_AGENT,
388
+ // eslint-disable-next-line @typescript-eslint/naming-convention
389
+ Range: partialOffset !== void 0 && partialLength !== void 0 ? `bytes=${partialOffset.toString()}-${(partialOffset + partialLength - 1).toString()}` : "bytes=0-"
390
+ }
391
+ };
392
+ http.get(url, options, (res) => {
393
+ if (res.statusCode === 301 || res.statusCode === 302) {
394
+ if (res.headers.location !== void 0) {
395
+ requestData(res.headers.location, { partialOffset, partialLength, showProgress }).then(resolve).catch((err) => {
396
+ throw err;
397
+ });
398
+ } else {
399
+ reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode.toString()}`));
400
+ }
436
401
  return;
437
402
  }
438
- const blteHash = crypto.createHash("md5").update(buffer.subarray(0, headerSize)).digest("hex");
439
- assert(blteHash === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash}`);
440
- assert(size >= 12, `[BLTE]: Invalid size: ${size.toString()} < 12`);
441
- const flag = buffer.readUInt8(8);
442
- const numBlocks = buffer.readIntBE(9, 3);
443
- assert(numBlocks > 0, `[BLTE]: Invalid number of blocks: ${numBlocks.toString()}`);
444
- assert(flag === 15, `[BLTE]: Invalid flag: ${flag.toString(16).padStart(2, "0")}`);
445
- const blockHeaderSize = numBlocks * 24;
446
- assert(headerSize === blockHeaderSize + 12, `[BLTE]: Invalid header size: header size ${headerSize.toString()} != block header size ${blockHeaderSize.toString()} + 12`);
447
- assert(size >= headerSize, `[BLTE]: Invalid size: ${size.toString()} < ${headerSize.toString()}`);
448
- for (let i = 0; i < numBlocks; i += 1) {
449
- const offset = 12 + i * 24;
450
- const compressedSize = buffer.readUInt32BE(offset);
451
- const decompressedSize = buffer.readUInt32BE(offset + 4);
452
- const hash = buffer.toString("hex", offset + 8, offset + 24);
453
- this.blocks.push({
454
- compressedSize,
455
- decompressedSize,
456
- hash
457
- });
403
+ if (res.statusCode === void 0 || res.statusCode < 200 || res.statusCode > 302) {
404
+ reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode?.toString() ?? "undefined"}`));
405
+ return;
458
406
  }
459
- this.processedOffset = headerSize;
407
+ const lengthText = res.headers["content-length"];
408
+ const length = lengthText !== void 0 ? parseInt(lengthText, 10) : 0;
409
+ const bar = showProgress === true && !Number.isNaN(length) && length >= 10485760 ? new cliProgress.SingleBar({ etaBuffer: 10240 }, cliProgress.Presets.shades_classic) : void 0;
410
+ bar?.start(length, 0);
411
+ const chunks = [];
412
+ res.on("data", (chunk) => {
413
+ bar?.increment(chunk.length);
414
+ chunks.push(chunk);
415
+ });
416
+ res.on("end", () => {
417
+ bar?.stop();
418
+ resolve(Buffer.concat(chunks));
419
+ });
420
+ res.on("error", (err) => {
421
+ bar?.stop();
422
+ reject(err);
423
+ });
424
+ }).on("error", reject).end();
425
+ });
426
+ const downloadFile = (prefixes, type, key, {
427
+ partialOffset,
428
+ partialLength,
429
+ showProgress,
430
+ showAttemptFail
431
+ } = {}) => {
432
+ const urls = prefixes.map((prefix) => `${prefix}/${type}/${formatCDNKey(key)}`);
433
+ return urls.reduce(
434
+ (prev, url, index) => prev.catch((err) => {
435
+ if (showAttemptFail === true && index > 0 && err instanceof Error) {
436
+ console.warn(`${( new Date()).toISOString()} [WARN]:`, err.message);
437
+ }
438
+ return requestData(url, { partialOffset, partialLength, showProgress });
439
+ }),
440
+ Promise.reject(new Error(""))
441
+ );
442
+ };
443
+ const getFileCache = async (file) => {
444
+ const integrity = await cacheIntegrity.get(file);
445
+ if (integrity !== void 0) {
446
+ try {
447
+ const buffer = await fs.readFile(path.resolve(CACHE_ROOT, file));
448
+ const hash = crypto.createHash("sha256").update(buffer).digest("hex");
449
+ if (hash === integrity) {
450
+ return buffer;
451
+ }
452
+ } catch {
453
+ }
454
+ }
455
+ return void 0;
456
+ };
457
+ const getDataFile = async (prefixes, key, type, buildCKey, {
458
+ name,
459
+ partialOffset,
460
+ partialLength,
461
+ showProgress,
462
+ showAttemptFail
463
+ } = {}) => {
464
+ const dir = type === "build" ? path.join(CACHE_DIRS[type], buildCKey) : CACHE_DIRS[type];
465
+ const file = name !== void 0 ? path.join(dir, name) : path.join(dir, key);
466
+ const cacheBuffer = await getFileCache(file);
467
+ if (cacheBuffer) {
468
+ if (name === void 0 && partialOffset !== void 0 && partialLength !== void 0) {
469
+ return cacheBuffer.subarray(partialOffset, partialOffset + partialLength);
470
+ }
471
+ return cacheBuffer;
472
+ }
473
+ const downloadBuffer = await downloadFile(prefixes, "data", key, {
474
+ partialOffset,
475
+ partialLength,
476
+ showProgress,
477
+ showAttemptFail
478
+ });
479
+ if (partialOffset === void 0 && partialLength === void 0 || name !== void 0) {
480
+ await fs.mkdir(path.resolve(CACHE_ROOT, dir), { recursive: true });
481
+ await fs.writeFile(path.resolve(CACHE_ROOT, file), downloadBuffer);
482
+ const hash = crypto.createHash("sha256").update(downloadBuffer).digest("hex");
483
+ await cacheIntegrity.set(file, hash);
484
+ }
485
+ return downloadBuffer;
486
+ };
487
+ const getConfigFile = async (prefixes, key, {
488
+ showProgress,
489
+ showAttemptFail
490
+ } = {}) => {
491
+ const downloadBuffer = await downloadFile(prefixes, "config", key, { showProgress, showAttemptFail });
492
+ return downloadBuffer.toString("utf-8");
493
+ };
494
+ const getProductVersions = async (region, product) => {
495
+ const url = `http://${region}.patch.battle.net:1119/${product}/versions`;
496
+ const headers = new Headers();
497
+ headers.set("User-Agent", USER_AGENT);
498
+ const res = await fetch(url, { headers });
499
+ return res.text();
500
+ };
501
+ const getProductCDNs = async (region, product) => {
502
+ const url = `http://${region}.patch.battle.net:1119/${product}/cdns`;
503
+ const headers = new Headers();
504
+ headers.set("User-Agent", USER_AGENT);
505
+ const res = await fetch(url, { headers });
506
+ return res.text();
507
+ };
508
+
509
+ const hashlittle2 = (key, pc = 0, pb = 0) => {
510
+ const { length } = key;
511
+ let offset = 0;
512
+ let a = 3735928559 + length + pc | 0;
513
+ let b = 3735928559 + length + pc | 0;
514
+ let c = 3735928559 + length + pc + pb | 0;
515
+ while (length - offset > 12) {
516
+ a += key.charCodeAt(offset + 0);
517
+ a += key.charCodeAt(offset + 1) << 8;
518
+ a += key.charCodeAt(offset + 2) << 16;
519
+ a += key.charCodeAt(offset + 3) << 24;
520
+ b += key.charCodeAt(offset + 4);
521
+ b += key.charCodeAt(offset + 5) << 8;
522
+ b += key.charCodeAt(offset + 6) << 16;
523
+ b += key.charCodeAt(offset + 7) << 24;
524
+ c += key.charCodeAt(offset + 8);
525
+ c += key.charCodeAt(offset + 9) << 8;
526
+ c += key.charCodeAt(offset + 10) << 16;
527
+ c += key.charCodeAt(offset + 11) << 24;
528
+ a -= c;
529
+ a ^= c << 4 | c >>> 28;
530
+ c = c + b | 0;
531
+ b -= a;
532
+ b ^= a << 6 | a >>> 26;
533
+ a = a + c | 0;
534
+ c -= b;
535
+ c ^= b << 8 | b >>> 24;
536
+ b = b + a | 0;
537
+ a -= c;
538
+ a ^= c << 16 | c >>> 16;
539
+ c = c + b | 0;
540
+ b -= a;
541
+ b ^= a << 19 | a >>> 13;
542
+ a = a + c | 0;
543
+ c -= b;
544
+ c ^= b << 4 | b >>> 28;
545
+ b = b + a | 0;
546
+ offset += 12;
460
547
  }
461
- processBlock(buffer, index, allowMissingKey) {
462
- const flag = buffer.readUInt8(0);
463
- switch (flag) {
464
- case 69: {
465
- let offset = 1;
466
- const keyNameLength = buffer.readUInt8(offset);
467
- offset += 1;
468
- const keyNameBE = buffer.toString("hex", offset, offset + keyNameLength);
469
- offset += keyNameLength;
470
- const ivLength = buffer.readUInt8(offset);
471
- offset += 1;
472
- const ivBuffer = buffer.subarray(offset, offset + ivLength);
473
- offset += ivLength;
474
- const encryptType = buffer.readUInt8(offset);
475
- offset += 1;
476
- assert(encryptType === ENC_TYPE_SALSA20, `[BLTE]: Invalid encrypt type: ${encryptType.toString(16).padStart(2, "0")} at block ${index.toString()}`);
477
- const keyName = [...keyNameBE.matchAll(/.{2}/g)].map((v) => v[0]).reverse().join("").toLowerCase();
478
- const key = this.keys.get(keyName);
479
- if (!key) {
480
- if (allowMissingKey) {
481
- return keyName;
482
- }
483
- throw new Error(`[BLTE]: Missing key: ${keyName} at block ${index.toString()}`);
484
- }
485
- const iv = new Uint8Array(8);
486
- for (let i = 0; i < 8; i += 1) {
487
- if (i < ivLength) {
488
- iv[i] = ivBuffer.readUInt8(i) ^ index >>> 8 * i & 255;
489
- } else {
490
- iv[i] = 0;
491
- }
492
- }
493
- const handler = new Salsa20(key, iv);
494
- const decrypted = handler.process(buffer.subarray(offset));
495
- if (allowMissingKey) {
496
- return this.processBlock(Buffer.from(decrypted.buffer), index, true);
497
- }
498
- return this.processBlock(Buffer.from(decrypted.buffer), index, false);
499
- }
500
- case 70:
501
- throw new Error(`[BLTE]: Frame (Recursive) block not supported at block ${index.toString()}`);
502
- case 78:
503
- return buffer.subarray(1);
504
- case 90:
505
- return zlib.inflateSync(buffer.subarray(1));
506
- default:
507
- throw new Error(`[BLTE]: Invalid block flag: ${flag.toString(16).padStart(2, "0")} at block ${index.toString()}`);
548
+ if (length - offset > 0) {
549
+ switch (length - offset) {
550
+ case 12:
551
+ c += key.charCodeAt(offset + 11) << 24;
552
+ case 11:
553
+ c += key.charCodeAt(offset + 10) << 16;
554
+ case 10:
555
+ c += key.charCodeAt(offset + 9) << 8;
556
+ case 9:
557
+ c += key.charCodeAt(offset + 8);
558
+ case 8:
559
+ b += key.charCodeAt(offset + 7) << 24;
560
+ case 7:
561
+ b += key.charCodeAt(offset + 6) << 16;
562
+ case 6:
563
+ b += key.charCodeAt(offset + 5) << 8;
564
+ case 5:
565
+ b += key.charCodeAt(offset + 4);
566
+ case 4:
567
+ a += key.charCodeAt(offset + 3) << 24;
568
+ case 3:
569
+ a += key.charCodeAt(offset + 2) << 16;
570
+ case 2:
571
+ a += key.charCodeAt(offset + 1) << 8;
572
+ case 1:
573
+ a += key.charCodeAt(offset + 0);
508
574
  }
575
+ c ^= b;
576
+ c -= b << 14 | b >>> 18;
577
+ a ^= c;
578
+ a -= c << 11 | c >>> 21;
579
+ b ^= a;
580
+ b -= a << 25 | a >>> 7;
581
+ c ^= b;
582
+ c -= b << 16 | b >>> 16;
583
+ a ^= c;
584
+ a -= c << 4 | c >>> 28;
585
+ b ^= a;
586
+ b -= a << 14 | a >>> 18;
587
+ c ^= b;
588
+ c -= b << 24 | b >>> 8;
509
589
  }
510
- processBytes(allowMissingKey = false, size = Infinity) {
511
- const missingKeyBlocks = [];
512
- while (this.processedBlock < this.blocks.length && size > this.buffer.byteLength) {
513
- const blockIndex = this.processedBlock;
514
- const block = this.blocks[blockIndex];
515
- const blockBuffer = this.blte.subarray(
516
- this.processedOffset,
517
- this.processedOffset + block.compressedSize
518
- );
519
- if (block.hash !== EMPTY_HASH) {
520
- const blockHash = crypto.createHash("md5").update(blockBuffer).digest("hex");
521
- assert(blockHash === block.hash, `[BLTE]: Invalid block hash: expected ${block.hash}, got ${blockHash}`);
522
- }
523
- if (allowMissingKey) {
524
- const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
525
- if (buffer instanceof Buffer) {
526
- assert(
527
- buffer.byteLength === block.decompressedSize,
528
- `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
529
- );
530
- this.buffer = Buffer.concat([this.buffer, buffer]);
531
- } else {
532
- missingKeyBlocks.push({
533
- offset: this.buffer.byteLength,
534
- size: block.decompressedSize,
535
- blockIndex,
536
- keyName: buffer
537
- });
538
- this.buffer = Buffer.concat([
539
- this.buffer,
540
- Buffer.alloc(block.decompressedSize)
541
- ]);
542
- }
543
- } else {
544
- const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
545
- assert(
546
- buffer.byteLength === block.decompressedSize,
547
- `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
548
- );
549
- this.buffer = Buffer.concat([this.buffer, buffer]);
590
+ return [c >>> 0, b >>> 0];
591
+ };
592
+ const getNameHash = (name) => {
593
+ const normalized = name.replace(/\//g, "\\").toUpperCase();
594
+ const [pc, pb] = hashlittle2(normalized);
595
+ return `${pc.toString(16).padStart(8, "0")}${pb.toString(16).padStart(8, "0")}`;
596
+ };
597
+
598
+ const VERSION_SUB_OFFSET = -12;
599
+ const CHECKSUM_SIZE_SUB_OFFSET = -5;
600
+ const BLOCK_SIZE_OFFSET = 3;
601
+ const OFFSET_BYTES_OFFSET = 4;
602
+ const SIZE_BYTES_OFFSET = 5;
603
+ const KEY_SIZE_OFFSET = 6;
604
+ const NUM_ELEMENTS_OFFSET = 8;
605
+ const CHECKSUM_OFFSET = 12;
606
+ const CHECKSUM_TRIES = [
607
+ 10,
608
+ 9,
609
+ 8,
610
+ 7,
611
+ 6,
612
+ 5,
613
+ 4,
614
+ 3,
615
+ 2,
616
+ 1,
617
+ 0
618
+ ];
619
+ const tryArchiveIndexChecksumSize = (buffer, cKey) => {
620
+ const res = CHECKSUM_TRIES.filter(
621
+ (index) => buffer.readUInt8(buffer.byteLength - index + CHECKSUM_SIZE_SUB_OFFSET) === index && buffer.readUInt8(buffer.byteLength - index + VERSION_SUB_OFFSET) === 1
622
+ );
623
+ if (res.length === 1) {
624
+ return res[0];
625
+ }
626
+ throw new Error(`Invalid checksum size: ${res.join(", ")} in ${cKey}`);
627
+ };
628
+ const parseArchiveIndex = (buffer, cKey) => {
629
+ const checksumSize = tryArchiveIndexChecksumSize(buffer, cKey);
630
+ const versionOffset = buffer.byteLength - checksumSize + VERSION_SUB_OFFSET;
631
+ const footerOffset = versionOffset - checksumSize;
632
+ const tocChecksum = buffer.toString("hex", footerOffset, versionOffset);
633
+ const version = buffer.readUInt8(versionOffset);
634
+ const blockSizeKB = buffer.readUInt8(versionOffset + BLOCK_SIZE_OFFSET);
635
+ const offsetBytes = buffer.readUInt8(versionOffset + OFFSET_BYTES_OFFSET);
636
+ const sizeBytes = buffer.readUInt8(versionOffset + SIZE_BYTES_OFFSET);
637
+ const keySize = buffer.readUInt8(versionOffset + KEY_SIZE_OFFSET);
638
+ const numElements = buffer.readUInt32LE(versionOffset + NUM_ELEMENTS_OFFSET);
639
+ const footerChecksum = buffer.toString("hex", versionOffset + CHECKSUM_OFFSET);
640
+ assert(version === 1, `Invalid version: ${version.toString()} in ${cKey}`);
641
+ const entrySize = keySize + offsetBytes + sizeBytes;
642
+ const blockSize = blockSizeKB * 1024;
643
+ const numBlocks = footerOffset / (blockSize + keySize + checksumSize);
644
+ const tocSize = (keySize + checksumSize) * numBlocks;
645
+ const toc = buffer.subarray(footerOffset - tocSize, footerOffset);
646
+ const footer = buffer.subarray(footerOffset);
647
+ const footerCheckBuffer = Buffer.concat([
648
+ buffer.subarray(versionOffset, buffer.byteLength - checksumSize),
649
+ Buffer.alloc(checksumSize)
650
+ ]);
651
+ const hash = crypto.createHash("md5").update(footer).digest("hex");
652
+ assert(hash === cKey, `Invalid footer hash in ${cKey}: expected ${cKey}, got ${hash}`);
653
+ const footerHash = crypto.createHash("md5").update(footerCheckBuffer).digest("hex").slice(0, checksumSize * 2);
654
+ assert(footerHash === footerChecksum, `Invalid footer checksum in ${cKey}: expected ${footerChecksum}, got ${footerHash}`);
655
+ const tocHash = crypto.createHash("md5").update(toc).digest("hex").slice(0, checksumSize * 2);
656
+ assert(tocHash === tocChecksum, `Invalid toc checksum in ${cKey}: expected ${tocChecksum}, got ${tocHash}`);
657
+ const result = /* @__PURE__ */ new Map();
658
+ for (let i = 0; i < numBlocks; i += 1) {
659
+ const lastEkey = toc.toString("hex", i * keySize, (i + 1) * keySize);
660
+ const blockChecksum = toc.toString("hex", numBlocks * keySize + i * checksumSize, numBlocks * keySize + (i + 1) * checksumSize);
661
+ const blockOffset = i * blockSize;
662
+ const blockHash = crypto.createHash("md5").update(buffer.subarray(i * blockSize, (i + 1) * blockSize)).digest("hex").slice(0, checksumSize * 2);
663
+ assert(blockChecksum === blockHash, `Invalid block hash in ${cKey} at ${i.toString()}: expected ${blockChecksum}, got ${blockHash}`);
664
+ let length = 0;
665
+ while (length < blockSize) {
666
+ const entryOffset = blockOffset + length * entrySize;
667
+ const eKey = buffer.toString("hex", entryOffset, entryOffset + keySize);
668
+ const size = buffer.readUIntBE(entryOffset + keySize, sizeBytes);
669
+ const offset = buffer.readUIntBE(entryOffset + keySize + sizeBytes, offsetBytes);
670
+ result.set(eKey, { key: cKey, size, offset });
671
+ length += 1;
672
+ if (eKey === lastEkey) {
673
+ break;
550
674
  }
551
- this.processedBlock += 1;
552
- this.processedOffset += block.compressedSize;
553
675
  }
554
- return allowMissingKey ? missingKeyBlocks : void 0;
555
676
  }
556
- }
677
+ assert(result.size === numElements, `Invalid number of elements: ${result.size.toString()} != ${numElements.toString()} in ${cKey}`);
678
+ return result;
679
+ };
680
+
681
+ const normalizeKey = (key) => key.split("-").map((part, index) => index === 0 ? part : `${part.charAt(0).toUpperCase()}${part.slice(1)}`).join("");
682
+ const parseConfig = (text) => {
683
+ const entries = {};
684
+ text.split(/\r?\n/).filter((line) => line.trim().length !== 0 && !line.startsWith("#")).forEach((line) => {
685
+ const match = /([^\s]+)\s?=\s?(.*)/.exec(line);
686
+ assert(match !== null, "Invalid token encountered parsing CDN config");
687
+ const [key, value] = match.slice(1);
688
+ entries[normalizeKey(key)] = value;
689
+ });
690
+ return entries;
691
+ };
692
+ const parseCDNConfig = (text) => parseConfig(text);
693
+ const parseBuildConfig = (text) => parseConfig(text);
557
694
 
558
695
  const ENC_MAGIC = 17742;
559
696
  const MAGIC_OFFSET = 0;
@@ -662,15 +799,31 @@ const parseEncodingFile = (inputBuffer, eKey, cKey) => {
662
799
  pagePointer += 5;
663
800
  eKey2FileSize.set(fileEKey, fileSize);
664
801
  }
665
- }
666
- return {
667
- eSpec,
668
- cKey2FileSize,
669
- cKey2EKey,
670
- eKey2ESpecIndex,
671
- eKey2FileSize
672
- };
802
+ }
803
+ return {
804
+ eSpec,
805
+ cKey2FileSize,
806
+ cKey2EKey,
807
+ eKey2ESpecIndex,
808
+ eKey2FileSize
809
+ };
810
+ };
811
+
812
+ const parseProductConfig = (text) => {
813
+ const lines = text.split(/\r?\n/);
814
+ const headers = lines[0].split("|").map((header) => header.split("!")[0].replace(" ", ""));
815
+ const entries = lines.filter((line, index) => index > 0 && line.trim().length !== 0 && !line.startsWith("#")).map((line) => {
816
+ const node = {};
817
+ const entryFields = line.split("|");
818
+ for (let i = 0, n = entryFields.length; i < n; i += 1) {
819
+ node[headers[i]] = entryFields[i];
820
+ }
821
+ return node;
822
+ });
823
+ return entries;
673
824
  };
825
+ const parseProductVersions = (text) => parseProductConfig(text);
826
+ const parseProductCDNs = (text) => parseProductConfig(text);
674
827
 
675
828
  const MFST_MAGIC = 1296454484;
676
829
  const ContentFlags = {
@@ -797,99 +950,66 @@ const parseRootFile = (inputBuffer, eKey, cKey) => {
797
950
  return { fileDataID2CKey, nameHash2FileDataID };
798
951
  };
799
952
 
800
- const hashlittle2 = (key, pc = 0, pb = 0) => {
801
- const { length } = key;
802
- let offset = 0;
803
- let a = 3735928559 + length + pc | 0;
804
- let b = 3735928559 + length + pc | 0;
805
- let c = 3735928559 + length + pc + pb | 0;
806
- while (length - offset > 12) {
807
- a += key.charCodeAt(offset + 0);
808
- a += key.charCodeAt(offset + 1) << 8;
809
- a += key.charCodeAt(offset + 2) << 16;
810
- a += key.charCodeAt(offset + 3) << 24;
811
- b += key.charCodeAt(offset + 4);
812
- b += key.charCodeAt(offset + 5) << 8;
813
- b += key.charCodeAt(offset + 6) << 16;
814
- b += key.charCodeAt(offset + 7) << 24;
815
- c += key.charCodeAt(offset + 8);
816
- c += key.charCodeAt(offset + 9) << 8;
817
- c += key.charCodeAt(offset + 10) << 16;
818
- c += key.charCodeAt(offset + 11) << 24;
819
- a -= c;
820
- a ^= c << 4 | c >>> 28;
821
- c = c + b | 0;
822
- b -= a;
823
- b ^= a << 6 | a >>> 26;
824
- a = a + c | 0;
825
- c -= b;
826
- c ^= b << 8 | b >>> 24;
827
- b = b + a | 0;
828
- a -= c;
829
- a ^= c << 16 | c >>> 16;
830
- c = c + b | 0;
831
- b -= a;
832
- b ^= a << 19 | a >>> 13;
833
- a = a + c | 0;
834
- c -= b;
835
- c ^= b << 4 | b >>> 28;
836
- b = b + a | 0;
837
- offset += 12;
838
- }
839
- if (length - offset > 0) {
840
- switch (length - offset) {
841
- case 12:
842
- c += key.charCodeAt(offset + 11) << 24;
843
- case 11:
844
- c += key.charCodeAt(offset + 10) << 16;
845
- case 10:
846
- c += key.charCodeAt(offset + 9) << 8;
847
- case 9:
848
- c += key.charCodeAt(offset + 8);
849
- case 8:
850
- b += key.charCodeAt(offset + 7) << 24;
851
- case 7:
852
- b += key.charCodeAt(offset + 6) << 16;
853
- case 6:
854
- b += key.charCodeAt(offset + 5) << 8;
855
- case 5:
856
- b += key.charCodeAt(offset + 4);
857
- case 4:
858
- a += key.charCodeAt(offset + 3) << 24;
859
- case 3:
860
- a += key.charCodeAt(offset + 2) << 16;
861
- case 2:
862
- a += key.charCodeAt(offset + 1) << 8;
863
- case 1:
864
- a += key.charCodeAt(offset + 0);
953
+ const JEDEC = [
954
+ "B",
955
+ "KB",
956
+ "MB",
957
+ "GB",
958
+ "TB",
959
+ "PB",
960
+ "EB",
961
+ "ZB",
962
+ "YB"
963
+ ];
964
+ const formatFileSize = (input) => {
965
+ if (Number.isNaN(input))
966
+ return "";
967
+ let size = Number(input);
968
+ const isNegative = size < 0;
969
+ const result = [];
970
+ if (isNegative)
971
+ size = -size;
972
+ let exponent = Math.floor(Math.log(size) / Math.log(1024));
973
+ if (exponent < 0)
974
+ exponent = 0;
975
+ if (exponent > 8)
976
+ exponent = 8;
977
+ if (size === 0) {
978
+ result[0] = 0;
979
+ result[1] = JEDEC[exponent];
980
+ } else {
981
+ const val = size / 2 ** (exponent * 10);
982
+ result[0] = Number(val.toFixed(exponent > 0 ? 2 : 0));
983
+ if (result[0] === 1024 && exponent < 8) {
984
+ result[0] = 1;
985
+ exponent += 1;
865
986
  }
866
- c ^= b;
867
- c -= b << 14 | b >>> 18;
868
- a ^= c;
869
- a -= c << 11 | c >>> 21;
870
- b ^= a;
871
- b -= a << 25 | a >>> 7;
872
- c ^= b;
873
- c -= b << 16 | b >>> 16;
874
- a ^= c;
875
- a -= c << 4 | c >>> 28;
876
- b ^= a;
877
- b -= a << 14 | a >>> 18;
878
- c ^= b;
879
- c -= b << 24 | b >>> 8;
987
+ result[1] = JEDEC[exponent];
880
988
  }
881
- return [c >>> 0, b >>> 0];
989
+ if (isNegative)
990
+ result[0] = -result[0];
991
+ return result.join(" ");
882
992
  };
883
- const getNameHash = (name) => {
884
- const normalized = name.replace(/\//g, "\\").toUpperCase();
885
- const [pc, pb] = hashlittle2(normalized);
886
- return `${pc.toString(16).padStart(8, "0")}${pb.toString(16).padStart(8, "0")}`;
993
+ const resolveCDNHost = async (hosts, path) => {
994
+ const latencies = await Promise.allSettled(
995
+ hosts.map(async (host) => {
996
+ const start = Date.now();
997
+ await fetch(`http://${host}/`);
998
+ const end = Date.now();
999
+ return {
1000
+ host,
1001
+ latency: end - start
1002
+ };
1003
+ })
1004
+ );
1005
+ const resolved = latencies.filter((result) => result.status === "fulfilled").map((result) => result.value).sort((a, b) => a.latency - b.latency);
1006
+ return resolved.map((result) => `http://${result.host}/${path}`);
887
1007
  };
888
1008
 
889
- var __defProp$3 = Object.defineProperty;
890
- var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
891
- var __publicField$3 = (obj, key, value) => {
892
- __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
1009
+ var __defProp$2 = Object.defineProperty;
1010
+ var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1011
+ var __publicField$2 = (obj, key, value) => {
1012
+ __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
893
1013
  return value;
894
1014
  };
895
1015
  const WDC5_MAGIC = 1464091445;
@@ -912,17 +1032,17 @@ const readBitpackedValue = (buffer, fieldOffsetBits, fieldSizeBits, signed = fal
912
1032
  };
913
1033
  class WDCReader {
914
1034
  constructor(buffer, blocks = [], adb) {
915
- __publicField$3(this, "tableHash");
916
- __publicField$3(this, "layoutHash");
917
- __publicField$3(this, "locale");
918
- __publicField$3(this, "isNormal");
919
- __publicField$3(this, "hasRelationshipData");
920
- __publicField$3(this, "fields");
921
- __publicField$3(this, "fieldsInfo");
922
- __publicField$3(this, "rows", /* @__PURE__ */ new Map());
923
- __publicField$3(this, "relationships", /* @__PURE__ */ new Map());
924
- __publicField$3(this, "copyTable", /* @__PURE__ */ new Map());
925
- __publicField$3(this, "hotfixes", /* @__PURE__ */ new Map());
1035
+ __publicField$2(this, "tableHash");
1036
+ __publicField$2(this, "layoutHash");
1037
+ __publicField$2(this, "locale");
1038
+ __publicField$2(this, "isNormal");
1039
+ __publicField$2(this, "hasRelationshipData");
1040
+ __publicField$2(this, "fields");
1041
+ __publicField$2(this, "fieldsInfo");
1042
+ __publicField$2(this, "rows", /* @__PURE__ */ new Map());
1043
+ __publicField$2(this, "relationships", /* @__PURE__ */ new Map());
1044
+ __publicField$2(this, "copyTable", /* @__PURE__ */ new Map());
1045
+ __publicField$2(this, "hotfixes", /* @__PURE__ */ new Map());
926
1046
  const magic = buffer.readUInt32BE(0);
927
1047
  const fieldCount = buffer.readUInt32LE(140);
928
1048
  const recordSize = buffer.readUInt32LE(144);
@@ -1248,7 +1368,7 @@ class WDCReader {
1248
1368
  data: value
1249
1369
  };
1250
1370
  }
1251
- if (!recordID && fieldIndex === idIndex) {
1371
+ if (recordID === void 0 && fieldIndex === idIndex) {
1252
1372
  recordID = value;
1253
1373
  }
1254
1374
  const fieldOffset = fieldInfo.fieldOffsetBits >>> 3;
@@ -1260,7 +1380,7 @@ class WDCReader {
1260
1380
  };
1261
1381
  }
1262
1382
  case "commonData": {
1263
- const value = (recordID ? commonData.get(fieldIndex)?.get(recordID) : void 0) ?? fieldInfo.defaultValue;
1383
+ const value = (recordID !== void 0 ? commonData.get(fieldIndex)?.get(recordID) : void 0) ?? fieldInfo.defaultValue;
1264
1384
  return {
1265
1385
  type: "commonData",
1266
1386
  data: value
@@ -1295,7 +1415,7 @@ class WDCReader {
1295
1415
  assert(fieldPalletData, `No pallet data for field ${fieldIndex.toString()}`);
1296
1416
  value = fieldPalletData[value];
1297
1417
  }
1298
- if (!recordID && fieldIndex === idIndex) {
1418
+ if (recordID === void 0 && fieldIndex === idIndex) {
1299
1419
  recordID = value;
1300
1420
  }
1301
1421
  return {
@@ -1310,7 +1430,7 @@ class WDCReader {
1310
1430
  assert(recordID !== void 0, "No record ID found");
1311
1431
  this.rows.set(recordID, recordData);
1312
1432
  const foreignID = relationshipMap.get(recordIndex);
1313
- if (foreignID) {
1433
+ if (foreignID !== void 0) {
1314
1434
  this.relationships.set(recordID, foreignID);
1315
1435
  }
1316
1436
  } else {
@@ -1321,7 +1441,7 @@ class WDCReader {
1321
1441
  assert(recordID !== void 0, "No record ID found");
1322
1442
  this.rows.set(recordID, recordData);
1323
1443
  const foreignID = relationshipMap.get(recordIndex);
1324
- if (foreignID) {
1444
+ if (foreignID !== void 0) {
1325
1445
  this.relationships.set(recordID, foreignID);
1326
1446
  }
1327
1447
  }
@@ -1365,70 +1485,24 @@ class WDCReader {
1365
1485
  }
1366
1486
  }
1367
1487
  const dst = this.copyTable.get(id);
1368
- if (dst) {
1488
+ if (dst !== void 0) {
1369
1489
  return this.rows.get(dst);
1370
1490
  }
1371
1491
  return this.rows.get(id);
1372
1492
  }
1373
1493
  getRowRelationship(id) {
1374
1494
  const dst = this.copyTable.get(id);
1375
- if (dst) {
1495
+ if (dst !== void 0) {
1376
1496
  return this.relationships.get(dst);
1377
1497
  }
1378
1498
  return this.relationships.get(id);
1379
1499
  }
1380
1500
  }
1381
1501
 
1382
- const resolveCDNHost = async (hosts, path) => {
1383
- const latencies = await Promise.allSettled(
1384
- hosts.map(async (host) => {
1385
- const start = Date.now();
1386
- await fetch(`http://${host}/`);
1387
- const end = Date.now();
1388
- return {
1389
- host,
1390
- latency: end - start
1391
- };
1392
- })
1393
- );
1394
- const resolved = latencies.filter((result) => result.status === "fulfilled").map((result) => result.value).sort((a, b) => a.latency - b.latency);
1395
- return resolved.map((result) => `http://${result.host}/${path}`);
1396
- };
1397
- const JEDEC = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
1398
- const formatFileSize = (input) => {
1399
- if (Number.isNaN(input))
1400
- return "";
1401
- let size = Number(input);
1402
- const isNegative = size < 0;
1403
- const result = [];
1404
- if (isNegative)
1405
- size = -size;
1406
- let exponent = Math.floor(Math.log(size) / Math.log(1024));
1407
- if (exponent < 0)
1408
- exponent = 0;
1409
- if (exponent > 8)
1410
- exponent = 8;
1411
- if (size === 0) {
1412
- result[0] = 0;
1413
- result[1] = JEDEC[exponent];
1414
- } else {
1415
- const val = size / 2 ** (exponent * 10);
1416
- result[0] = Number(val.toFixed(exponent > 0 ? 2 : 0));
1417
- if (result[0] === 1024 && exponent < 8) {
1418
- result[0] = 1;
1419
- exponent += 1;
1420
- }
1421
- result[1] = JEDEC[exponent];
1422
- }
1423
- if (isNegative)
1424
- result[0] = -result[0];
1425
- return result.join(" ");
1426
- };
1427
-
1428
- var __defProp$2 = Object.defineProperty;
1429
- var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1430
- var __publicField$2 = (obj, key, value) => {
1431
- __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
1502
+ var __defProp$1 = Object.defineProperty;
1503
+ var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1504
+ var __publicField$1 = (obj, key, value) => {
1505
+ __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
1432
1506
  return value;
1433
1507
  };
1434
1508
  var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
@@ -1438,16 +1512,21 @@ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
1438
1512
  LogLevel2[LogLevel2["debug"] = 3] = "debug";
1439
1513
  return LogLevel2;
1440
1514
  })(LogLevel || {});
1441
- const textLogLevel = ["ERROR", "WARN", "INFO", "DEBUG"];
1515
+ const textLogLevel = [
1516
+ "ERROR",
1517
+ "WARN",
1518
+ "INFO",
1519
+ "DEBUG"
1520
+ ];
1442
1521
  class CASCClient {
1443
1522
  constructor(region, product, version, logLevel = 2 /* info */) {
1444
- __publicField$2(this, "region");
1445
- __publicField$2(this, "product");
1446
- __publicField$2(this, "version");
1447
- __publicField$2(this, "name2FileDataID", /* @__PURE__ */ new Map());
1448
- __publicField$2(this, "keys", /* @__PURE__ */ new Map());
1449
- __publicField$2(this, "preload");
1450
- __publicField$2(this, "logLevel");
1523
+ __publicField$1(this, "region");
1524
+ __publicField$1(this, "product");
1525
+ __publicField$1(this, "version");
1526
+ __publicField$1(this, "name2FileDataID", /* @__PURE__ */ new Map());
1527
+ __publicField$1(this, "keys", /* @__PURE__ */ new Map());
1528
+ __publicField$1(this, "preload");
1529
+ __publicField$1(this, "logLevel");
1451
1530
  this.region = region;
1452
1531
  this.product = product;
1453
1532
  this.version = version;
@@ -1463,7 +1542,7 @@ class CASCClient {
1463
1542
  if (level <= 0 /* error */) {
1464
1543
  console.error(`${( new Date()).toISOString()} [${textLogLevel[level]}]:`, message);
1465
1544
  } else {
1466
- console.log(`${( new Date()).toISOString()} [${textLogLevel[level]}]:`, message);
1545
+ console.info(`${( new Date()).toISOString()} [${textLogLevel[level]}]:`, message);
1467
1546
  }
1468
1547
  }
1469
1548
  }
@@ -1542,7 +1621,7 @@ class CASCClient {
1542
1621
  this.log(2 /* info */, "Loading root table...");
1543
1622
  const rootCKey = buildConfig.root;
1544
1623
  const rootEKeys = encoding.cKey2EKey.get(rootCKey);
1545
- assert(rootEKeys, "Failing to find EKey for root table.");
1624
+ assert(rootEKeys !== void 0, "Failing to find EKey for root table.");
1546
1625
  const rootEKey = typeof rootEKeys === "string" ? rootEKeys : rootEKeys[0];
1547
1626
  const rootBuffer = await getDataFile(prefixes, rootEKey, "build", this.version.BuildConfig, {
1548
1627
  name: "root",
@@ -1596,8 +1675,8 @@ class CASCClient {
1596
1675
  const lookupRow = lookupReader.rows.get(keyID);
1597
1676
  const keyRow = keysReader.rows.get(keyID);
1598
1677
  if (keyRow) {
1599
- assert(Array.isArray(lookupRow) && lookupRow[0], `Invalid TACTKeyLookup table row at id ${keyID.toString()}`);
1600
- assert(Array.isArray(keyRow) && keyRow[0], `Invalid TACTKey table row at id ${keyID.toString()}`);
1678
+ assert(Array.isArray(lookupRow) && lookupRow.length > 0, `Invalid TACTKeyLookup table row at id ${keyID.toString()}`);
1679
+ assert(Array.isArray(keyRow) && keyRow.length > 0, `Invalid TACTKey table row at id ${keyID.toString()}`);
1601
1680
  const keyName = lookupRow[0].data.toString(16).padStart(16, "0");
1602
1681
  const keyHexLE = keyRow[0].data.toString(16).padStart(32, "0");
1603
1682
  assert(keyName.length === 16, `Invalid keyName length: ${keyName.length.toString()}`);
@@ -1649,7 +1728,7 @@ class CASCClient {
1649
1728
  assert(this.preload, "Client not initialized");
1650
1729
  const { prefixes, encoding, archives } = this.preload;
1651
1730
  const eKeys = encoding.cKey2EKey.get(cKey);
1652
- assert(eKeys, `Failing to find encoding key for ${cKey}`);
1731
+ assert(eKeys !== void 0, `Failing to find encoding key for ${cKey}`);
1653
1732
  const eKey = typeof eKeys === "string" ? eKeys : eKeys[0];
1654
1733
  const archive = archives.get(eKey);
1655
1734
  const blte = archive ? await getDataFile(prefixes, archive.key, "data", this.version.BuildConfig, {
@@ -1690,14 +1769,17 @@ class CASCClient {
1690
1769
  };
1691
1770
  }
1692
1771
  }
1693
- __publicField$2(CASCClient, "LocaleFlags", LocaleFlags);
1694
- __publicField$2(CASCClient, "ContentFlags", ContentFlags);
1695
- __publicField$2(CASCClient, "LogLevel", LogLevel);
1772
+ // eslint-disable-next-line @typescript-eslint/naming-convention
1773
+ __publicField$1(CASCClient, "LocaleFlags", LocaleFlags);
1774
+ // eslint-disable-next-line @typescript-eslint/naming-convention
1775
+ __publicField$1(CASCClient, "ContentFlags", ContentFlags);
1776
+ // eslint-disable-next-line @typescript-eslint/naming-convention
1777
+ __publicField$1(CASCClient, "LogLevel", LogLevel);
1696
1778
 
1697
- var __defProp$1 = Object.defineProperty;
1698
- var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1699
- var __publicField$1 = (obj, key, value) => {
1700
- __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
1779
+ var __defProp = Object.defineProperty;
1780
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1781
+ var __publicField = (obj, key, value) => {
1782
+ __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
1701
1783
  return value;
1702
1784
  };
1703
1785
  const PATTERN_COLUMN = /^(int|float|locstring|string)(<[^:]+::[^>]+>)?\s([^\s]+)/;
@@ -1742,10 +1824,10 @@ const getCastBuffer = (value, srcSize, dstSize) => {
1742
1824
  };
1743
1825
  class DBDParser {
1744
1826
  constructor(wdc) {
1745
- __publicField$1(this, "wdc");
1746
- __publicField$1(this, "definitions", /* @__PURE__ */ new Map());
1747
- __publicField$1(this, "columns", []);
1748
- __publicField$1(this, "cache", /* @__PURE__ */ new Map());
1827
+ __publicField(this, "wdc");
1828
+ __publicField(this, "definitions", /* @__PURE__ */ new Map());
1829
+ __publicField(this, "columns", []);
1830
+ __publicField(this, "cache", /* @__PURE__ */ new Map());
1749
1831
  this.wdc = wdc;
1750
1832
  }
1751
1833
  async init() {
@@ -1753,7 +1835,7 @@ class DBDParser {
1753
1835
  const manifests = await (await fetch(manifestsURL)).json();
1754
1836
  const tableHashHex = this.wdc.tableHash.toString(16).padStart(8, "0").toLowerCase();
1755
1837
  const manifest = manifests.find((v) => v.tableHash.toLowerCase() === tableHashHex);
1756
- assert(manifest?.tableName, `No manifest found for table hash ${tableHashHex}`);
1838
+ assert(manifest?.tableName !== void 0, `No manifest found for table hash ${tableHashHex}`);
1757
1839
  const url = `https://raw.githubusercontent.com/wowdev/WoWDBDefs/master/definitions/${manifest.tableName}.dbd`;
1758
1840
  const text = await (await fetch(url)).text();
1759
1841
  const lines = text.split("\n").map((v) => v.trim());
@@ -1769,7 +1851,7 @@ class DBDParser {
1769
1851
  assert(columnsChunk?.[0] === "COLUMNS", "No column definitions found");
1770
1852
  columnsChunk.shift();
1771
1853
  columnsChunk.forEach((line) => {
1772
- const match = line.match(PATTERN_COLUMN);
1854
+ const match = PATTERN_COLUMN.exec(line);
1773
1855
  if (match) {
1774
1856
  const [, type, , name] = match;
1775
1857
  this.definitions.set(name.replace("?", ""), type);
@@ -1777,7 +1859,7 @@ class DBDParser {
1777
1859
  });
1778
1860
  const layoutHashHex = this.wdc.layoutHash.toString(16).padStart(8, "0").toLowerCase();
1779
1861
  const versionChunk = chunks.find((chunk) => chunk.find((line) => {
1780
- const layoutsMatch = line.match(PATTERN_LAYOUT);
1862
+ const layoutsMatch = PATTERN_LAYOUT.exec(line);
1781
1863
  const layouts = layoutsMatch?.[1].split(",").map((v) => v.trim().toLowerCase());
1782
1864
  return layouts?.includes(layoutHashHex);
1783
1865
  }));
@@ -1786,17 +1868,27 @@ class DBDParser {
1786
1868
  if (line.startsWith("LAYOUT") || line.startsWith("BUILD") || line.startsWith("COMMENT")) {
1787
1869
  return;
1788
1870
  }
1789
- const match = line.match(PATTERN_FIELD);
1871
+ const match = PATTERN_FIELD.exec(line);
1790
1872
  if (match) {
1791
- const [, , annotationsText, name, , unsigned, sizeText, , arraySizeText] = match;
1873
+ const [
1874
+ ,
1875
+ ,
1876
+ annotationsText,
1877
+ name,
1878
+ ,
1879
+ unsigned,
1880
+ sizeText,
1881
+ ,
1882
+ arraySizeText
1883
+ ] = match;
1792
1884
  const type = this.definitions.get(name);
1793
- assert(type, `No type found for column ${name}`);
1794
- const annotations = annotationsText ? annotationsText.split(",").map((v) => v.trim()) : void 0;
1885
+ assert(type !== void 0, `No type found for column ${name}`);
1886
+ const annotations = annotationsText ? annotationsText.split(",").map((v) => v.trim()) : [];
1795
1887
  const size = sizeText ? parseInt(sizeText, 10) : void 0;
1796
1888
  const arraySize = arraySizeText ? parseInt(arraySizeText, 10) : void 0;
1797
- const isID = !!annotations?.includes("id");
1798
- const isInline = !annotations?.includes("noninline");
1799
- const isRelation = !!annotations?.includes("relation");
1889
+ const isID = !!annotations.includes("id");
1890
+ const isInline = !annotations.includes("noninline");
1891
+ const isRelation = !!annotations.includes("relation");
1800
1892
  const isSigned = !unsigned;
1801
1893
  this.columns.push({
1802
1894
  name,
@@ -1837,18 +1929,18 @@ class DBDParser {
1837
1929
  fieldIndex += 1;
1838
1930
  }
1839
1931
  } else if (column.isInline) {
1932
+ assert(row.length > fieldIndex, `No value found for column ${column.name}`);
1840
1933
  const cell = row[fieldIndex];
1841
- assert(cell, `No value found for column ${column.name}`);
1842
1934
  const fieldInfo = this.wdc.fieldsInfo[fieldIndex];
1843
1935
  const srcSigned = fieldInfo.storageType === "bitpackedSigned";
1844
1936
  const srcSize = fieldInfo.storageType === "none" || fieldInfo.storageType === "bitpacked" || fieldInfo.storageType === "bitpackedSigned" ? Math.ceil(fieldInfo.fieldSizeBits / 8) : 4;
1845
- const dstSize = column.size ? Math.ceil(column.size / 8) : void 0;
1937
+ const dstSize = column.size !== void 0 ? Math.ceil(column.size / 8) : void 0;
1846
1938
  if (cell.type === "bitpackedArray") {
1847
1939
  data[column.name] = cell.data.map((v) => {
1848
1940
  if (column.type === "float") {
1849
1941
  return castFloat(v, srcSize, srcSigned);
1850
1942
  }
1851
- if (dstSize) {
1943
+ if (dstSize !== void 0) {
1852
1944
  return castIntegerBySize(
1853
1945
  v,
1854
1946
  srcSize,
@@ -1866,7 +1958,7 @@ class DBDParser {
1866
1958
  data[column.name] = cell.string;
1867
1959
  }
1868
1960
  } else if (column.type === "float") {
1869
- if (column.arraySize) {
1961
+ if (column.arraySize !== void 0) {
1870
1962
  const castBuffer = getCastBuffer(
1871
1963
  typeof cell.data === "number" ? BigInt(cell.data) : cell.data,
1872
1964
  srcSize,
@@ -1883,8 +1975,8 @@ class DBDParser {
1883
1975
  data[column.name] = castFloat(cell.data, srcSize, srcSigned);
1884
1976
  }
1885
1977
  } else if (column.type === "int") {
1886
- if (column.arraySize) {
1887
- assert(dstSize, `Missing size for int array column ${column.name}`);
1978
+ if (column.arraySize !== void 0) {
1979
+ assert(dstSize !== void 0, `Missing size for int array column ${column.name}`);
1888
1980
  const castBuffer = getCastBuffer(
1889
1981
  typeof cell.data === "number" ? BigInt(cell.data) : cell.data,
1890
1982
  srcSize,
@@ -1912,7 +2004,7 @@ class DBDParser {
1912
2004
  column.isSigned
1913
2005
  );
1914
2006
  } else {
1915
- assert(!column.size || column.size === 64, `Unexpected size ${column.size?.toString() ?? ""} for column ${column.name}`);
2007
+ assert(column.size === void 0 || column.size === 64, `Unexpected size ${column.size?.toString() ?? ""} for column ${column.name}`);
1916
2008
  if (srcSigned !== column.isSigned) {
1917
2009
  data[column.name] = castBigInt64(
1918
2010
  cell.data,
@@ -1966,7 +2058,7 @@ class DBDParser {
1966
2058
  if (fieldIndex + 1 < this.wdc.fields.length) {
1967
2059
  count = Math.max((nextField.position - currField.position) / size, 1);
1968
2060
  } else {
1969
- count = column.arraySize ? (buffer.byteLength - offset) / size : 1;
2061
+ count = column.arraySize !== void 0 ? (buffer.byteLength - offset) / size : 1;
1970
2062
  }
1971
2063
  for (let i = 0; i < count; i += 1) {
1972
2064
  if (column.type === "float") {
@@ -1998,55 +2090,4 @@ class DBDParser {
1998
2090
  }
1999
2091
  }
2000
2092
 
2001
- var __defProp = Object.defineProperty;
2002
- var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
2003
- var __publicField = (obj, key, value) => {
2004
- __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
2005
- return value;
2006
- };
2007
- const ADB_MAGIC = 1481004104;
2008
- class ADBReader {
2009
- constructor(buffer) {
2010
- __publicField(this, "build");
2011
- __publicField(this, "entries", []);
2012
- __publicField(this, "tableEntries", /* @__PURE__ */ new Map());
2013
- const magic = buffer.readUInt32BE(0);
2014
- assert(magic === ADB_MAGIC, `[ADB]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
2015
- const version = buffer.readUInt32LE(4);
2016
- assert(version === 9, `[ADB]: Invalid version: ${version.toString()}`);
2017
- const build = buffer.readUInt32LE(8);
2018
- this.build = build;
2019
- let pointer = 44;
2020
- while (pointer < buffer.byteLength) {
2021
- const offset = pointer;
2022
- const entryMagic = buffer.readUInt32BE(offset);
2023
- assert(entryMagic === ADB_MAGIC, `[ADB]: Invalid entry magic: ${magic.toString(16).padStart(8, "0")}`);
2024
- const regionID = buffer.readInt32LE(offset + 4);
2025
- const pushID = buffer.readInt32LE(offset + 8);
2026
- const uniqueID = buffer.readUInt32LE(offset + 12);
2027
- const tableHash = buffer.readUInt32LE(offset + 16);
2028
- const recordID = buffer.readUInt32LE(offset + 20);
2029
- const dataSize = buffer.readUInt32LE(offset + 24);
2030
- const recordState = buffer.readUInt32LE(offset + 28);
2031
- const data = buffer.subarray(offset + 32, offset + 32 + dataSize);
2032
- const entry = {
2033
- regionID,
2034
- pushID,
2035
- uniqueID,
2036
- tableHash,
2037
- recordID,
2038
- dataSize,
2039
- recordState,
2040
- data
2041
- };
2042
- this.entries.push(entry);
2043
- if (!this.tableEntries.has(tableHash)) {
2044
- this.tableEntries.set(tableHash, []);
2045
- }
2046
- this.tableEntries.get(tableHash)?.push(entry);
2047
- pointer += 32 + dataSize;
2048
- }
2049
- }
2050
- }
2051
-
2052
2093
  export { ADBReader, CASCClient, DBDParser, WDCReader };