@rhyster/wow-casc-dbc 2.5.1 → 2.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -4,20 +4,20 @@ const assert = require('node:assert');
4
4
  const crypto = require('node:crypto');
5
5
  const async = require('async');
6
6
  const cliProgress = require('cli-progress');
7
+ const zlib = require('node:zlib');
7
8
  const fs = require('node:fs/promises');
8
- const path = require('node:path');
9
9
  const http = require('node:http');
10
- const zlib = require('node:zlib');
10
+ const path = require('node:path');
11
11
 
12
12
  function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e.default : e; }
13
13
 
14
14
  const assert__default = /*#__PURE__*/_interopDefaultCompat(assert);
15
15
  const crypto__default = /*#__PURE__*/_interopDefaultCompat(crypto);
16
16
  const cliProgress__default = /*#__PURE__*/_interopDefaultCompat(cliProgress);
17
+ const zlib__default = /*#__PURE__*/_interopDefaultCompat(zlib);
17
18
  const fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
18
- const path__default = /*#__PURE__*/_interopDefaultCompat(path);
19
19
  const http__default = /*#__PURE__*/_interopDefaultCompat(http);
20
- const zlib__default = /*#__PURE__*/_interopDefaultCompat(zlib);
20
+ const path__default = /*#__PURE__*/_interopDefaultCompat(path);
21
21
 
22
22
  var __defProp$6 = Object.defineProperty;
23
23
  var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
@@ -25,547 +25,684 @@ var __publicField$6 = (obj, key, value) => {
25
25
  __defNormalProp$6(obj, typeof key !== "symbol" ? key + "" : key, value);
26
26
  return value;
27
27
  };
28
- class Store {
29
- constructor(dataFile) {
30
- __publicField$6(this, "data");
31
- __publicField$6(this, "dataFile");
32
- __publicField$6(this, "promise");
33
- this.dataFile = dataFile;
34
- this.data = {};
35
- this.promise = new Promise((resolve) => {
36
- fs__default.readFile(dataFile, "utf-8").then((file) => {
37
- this.data = JSON.parse(file);
38
- resolve();
39
- }).catch(() => {
40
- resolve();
41
- });
42
- });
43
- }
44
- async get(key) {
45
- await this.promise;
46
- return this.data[key];
47
- }
48
- async set(key, value) {
49
- await this.promise;
50
- this.data[key] = value;
51
- await fs__default.writeFile(this.dataFile, JSON.stringify(this.data), "utf-8");
28
+ const ADB_MAGIC = 1481004104;
29
+ class ADBReader {
30
+ constructor(buffer) {
31
+ __publicField$6(this, "build");
32
+ __publicField$6(this, "entries", []);
33
+ __publicField$6(this, "tableEntries", /* @__PURE__ */ new Map());
34
+ const magic = buffer.readUInt32BE(0);
35
+ assert__default(magic === ADB_MAGIC, `[ADB]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
36
+ const version = buffer.readUInt32LE(4);
37
+ assert__default(version === 9, `[ADB]: Invalid version: ${version.toString()}`);
38
+ const build = buffer.readUInt32LE(8);
39
+ this.build = build;
40
+ let pointer = 44;
41
+ while (pointer < buffer.byteLength) {
42
+ const offset = pointer;
43
+ const entryMagic = buffer.readUInt32BE(offset);
44
+ assert__default(entryMagic === ADB_MAGIC, `[ADB]: Invalid entry magic: ${magic.toString(16).padStart(8, "0")}`);
45
+ const regionID = buffer.readInt32LE(offset + 4);
46
+ const pushID = buffer.readInt32LE(offset + 8);
47
+ const uniqueID = buffer.readUInt32LE(offset + 12);
48
+ const tableHash = buffer.readUInt32LE(offset + 16);
49
+ const recordID = buffer.readUInt32LE(offset + 20);
50
+ const dataSize = buffer.readUInt32LE(offset + 24);
51
+ const recordState = buffer.readUInt32LE(offset + 28);
52
+ const data = buffer.subarray(offset + 32, offset + 32 + dataSize);
53
+ const entry = {
54
+ regionID,
55
+ pushID,
56
+ uniqueID,
57
+ tableHash,
58
+ recordID,
59
+ dataSize,
60
+ recordState,
61
+ data
62
+ };
63
+ this.entries.push(entry);
64
+ if (!this.tableEntries.has(tableHash)) {
65
+ this.tableEntries.set(tableHash, []);
66
+ }
67
+ this.tableEntries.get(tableHash)?.push(entry);
68
+ pointer += 32 + dataSize;
69
+ }
52
70
  }
53
71
  }
54
72
 
55
- const USER_AGENT = "node-wow-casc-dbc";
56
- const CACHE_ROOT = path__default.resolve("cache");
57
- const CACHE_DIRS = {
58
- build: "builds",
59
- indexes: "indices",
60
- data: "data",
61
- dbd: "dbd"
73
+ var __defProp$5 = Object.defineProperty;
74
+ var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
75
+ var __publicField$5 = (obj, key, value) => {
76
+ __defNormalProp$5(obj, typeof key !== "symbol" ? key + "" : key, value);
77
+ return value;
62
78
  };
63
- const CACHE_INTEGRITY_FILE = path__default.resolve(CACHE_ROOT, "integrity.json");
64
- const cacheIntegrity = new Store(CACHE_INTEGRITY_FILE);
65
- const formatCDNKey = (key) => `${key.substring(0, 2)}/${key.substring(2, 4)}/${key}`;
66
- const requestData = async (url, {
67
- partialOffset,
68
- partialLength,
69
- showProgress
70
- } = {}) => new Promise((resolve, reject) => {
71
- const options = {
72
- headers: {
73
- "User-Agent": USER_AGENT,
74
- Range: partialOffset && partialLength ? `bytes=${partialOffset.toString()}-${(partialOffset + partialLength - 1).toString()}` : "bytes=0-"
75
- }
76
- };
77
- http__default.get(url, options, (res) => {
78
- if (res.statusCode === 301 || res.statusCode === 302) {
79
- if (res.headers.location) {
80
- requestData(res.headers.location, { partialOffset, partialLength, showProgress }).then(resolve).catch((err) => {
81
- throw err;
82
- });
83
- } else {
84
- reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode.toString()}`));
79
+ class Salsa20 {
80
+ constructor(key, nonce) {
81
+ __publicField$5(this, "fixed");
82
+ __publicField$5(this, "key");
83
+ __publicField$5(this, "nonce");
84
+ __publicField$5(this, "counter", new Uint32Array([0, 0]));
85
+ __publicField$5(this, "state", new Uint32Array(16));
86
+ __publicField$5(this, "block", new Uint8Array(64));
87
+ __publicField$5(this, "position", 0);
88
+ assert__default(key.length === 32 || key.length === 16, "Salsa20 requires 128-bit or 256-bit key");
89
+ assert__default(nonce.length === 8, "Salsa20 requires 64-bit nonce");
90
+ this.key = new Uint32Array(8);
91
+ const keyView = new DataView(key.buffer);
92
+ if (key.length === 32) {
93
+ for (let i = 0; i < 8; i += 1) {
94
+ this.key[i] = keyView.getUint32(i * 4, true);
85
95
  }
86
- return;
96
+ this.fixed = new Uint32Array([
97
+ 1634760805,
98
+ 857760878,
99
+ 2036477234,
100
+ 1797285236
101
+ ]);
102
+ } else {
103
+ for (let i = 0; i < 4; i += 1) {
104
+ const word = keyView.getUint32(i * 4, true);
105
+ this.key[i] = word;
106
+ this.key[i + 4] = word;
107
+ }
108
+ this.fixed = new Uint32Array([
109
+ 1634760805,
110
+ 824206446,
111
+ 2036477238,
112
+ 1797285236
113
+ ]);
87
114
  }
88
- if (!res.statusCode || res.statusCode < 200 || res.statusCode > 302) {
89
- reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode?.toString() ?? "undefined"}`));
90
- return;
115
+ this.nonce = new Uint32Array(2);
116
+ const nonceView = new DataView(nonce.buffer);
117
+ for (let i = 0; i < 2; i += 1) {
118
+ this.nonce[i] = nonceView.getUint32(i * 4, true);
91
119
  }
92
- const lengthText = res.headers["content-length"];
93
- const length = lengthText ? parseInt(lengthText, 10) : 0;
94
- const bar = showProgress && !Number.isNaN(length) && length >= 10485760 ? new cliProgress__default.SingleBar({ etaBuffer: 10240 }, cliProgress__default.Presets.shades_classic) : void 0;
95
- bar?.start(length, 0);
96
- const chunks = [];
97
- res.on("data", (chunk) => {
98
- bar?.increment(chunk.length);
99
- chunks.push(chunk);
100
- });
101
- res.on("end", () => {
102
- bar?.stop();
103
- resolve(Buffer.concat(chunks));
104
- });
105
- res.on("error", (err) => {
106
- bar?.stop();
107
- reject(err);
108
- });
109
- }).on("error", reject).end();
110
- });
111
- const downloadFile = (prefixes, type, key, {
112
- partialOffset,
113
- partialLength,
114
- showProgress,
115
- showAttemptFail
116
- } = {}) => {
117
- const urls = prefixes.map((prefix) => `${prefix}/${type}/${formatCDNKey(key)}`);
118
- return urls.reduce(
119
- (prev, url, index) => prev.catch((err) => {
120
- if (showAttemptFail && index > 0 && err instanceof Error) {
121
- console.warn(`${( new Date()).toISOString()} [WARN]:`, err.message);
122
- }
123
- return requestData(url, { partialOffset, partialLength, showProgress });
124
- }),
125
- Promise.reject(new Error(""))
126
- );
127
- };
128
- const getFileCache = async (file) => {
129
- const integrity = await cacheIntegrity.get(file);
130
- if (integrity) {
131
- try {
132
- const buffer = await fs__default.readFile(path__default.resolve(CACHE_ROOT, file));
133
- const hash = crypto__default.createHash("sha256").update(buffer).digest("hex");
134
- if (hash === integrity) {
135
- return buffer;
120
+ this.generateBlock();
121
+ }
122
+ // eslint-disable-next-line @typescript-eslint/naming-convention
123
+ QR(a, b, c, d) {
124
+ let t;
125
+ t = this.state[a] + this.state[d] & 4294967295;
126
+ this.state[b] ^= t << 7 | t >>> 25;
127
+ t = this.state[b] + this.state[a] & 4294967295;
128
+ this.state[c] ^= t << 9 | t >>> 23;
129
+ t = this.state[c] + this.state[b] & 4294967295;
130
+ this.state[d] ^= t << 13 | t >>> 19;
131
+ t = this.state[d] + this.state[c] & 4294967295;
132
+ this.state[a] ^= t << 18 | t >>> 14;
133
+ }
134
+ generateBlock() {
135
+ const init = new Uint32Array([
136
+ this.fixed[0],
137
+ this.key[0],
138
+ this.key[1],
139
+ this.key[2],
140
+ this.key[3],
141
+ this.fixed[1],
142
+ this.nonce[0],
143
+ this.nonce[1],
144
+ this.counter[0],
145
+ this.counter[1],
146
+ this.fixed[2],
147
+ this.key[4],
148
+ this.key[5],
149
+ this.key[6],
150
+ this.key[7],
151
+ this.fixed[3]
152
+ ]);
153
+ this.state = new Uint32Array(init);
154
+ for (let i = 0; i < 20; i += 2) {
155
+ this.QR(0, 4, 8, 12);
156
+ this.QR(5, 9, 13, 1);
157
+ this.QR(10, 14, 2, 6);
158
+ this.QR(15, 3, 7, 11);
159
+ this.QR(0, 1, 2, 3);
160
+ this.QR(5, 6, 7, 4);
161
+ this.QR(10, 11, 8, 9);
162
+ this.QR(15, 12, 13, 14);
163
+ }
164
+ for (let i = 0; i < 16; i += 1) {
165
+ const word = this.state[i] + init[i] & 4294967295;
166
+ this.block[i * 4] = word & 255;
167
+ this.block[i * 4 + 1] = word >>> 8 & 255;
168
+ this.block[i * 4 + 2] = word >>> 16 & 255;
169
+ this.block[i * 4 + 3] = word >>> 24 & 255;
170
+ }
171
+ this.counter[0] = this.counter[0] + 1 & 4294967295;
172
+ if (this.counter[0] === 0) {
173
+ this.counter[1] = this.counter[1] + 1 & 4294967295;
174
+ }
175
+ }
176
+ process(input) {
177
+ const { length } = input;
178
+ const result = new Uint8Array(length);
179
+ for (let i = 0; i < length; i += 1) {
180
+ if (this.position === 64) {
181
+ this.generateBlock();
182
+ this.position = 0;
136
183
  }
137
- } catch {
184
+ result[i] = input[i] ^ this.block[this.position];
185
+ this.position += 1;
138
186
  }
187
+ return result;
139
188
  }
140
- return void 0;
189
+ }
190
+
191
+ var __defProp$4 = Object.defineProperty;
192
+ var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
193
+ var __publicField$4 = (obj, key, value) => {
194
+ __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
195
+ return value;
141
196
  };
142
- const getDataFile = async (prefixes, key, type, buildCKey, {
143
- name,
144
- partialOffset,
145
- partialLength,
146
- showProgress,
147
- showAttemptFail
148
- } = {}) => {
149
- const dir = type === "build" ? path__default.join(CACHE_DIRS[type], buildCKey) : CACHE_DIRS[type];
150
- const file = name ? path__default.join(dir, name) : path__default.join(dir, key);
151
- const cacheBuffer = await getFileCache(file);
152
- if (cacheBuffer) {
153
- if (name === void 0 && partialOffset !== void 0 && partialLength !== void 0) {
154
- return cacheBuffer.subarray(partialOffset, partialOffset + partialLength);
155
- }
156
- return cacheBuffer;
157
- }
158
- const downloadBuffer = await downloadFile(prefixes, "data", key, {
159
- partialOffset,
160
- partialLength,
161
- showProgress,
162
- showAttemptFail
163
- });
164
- if (partialOffset === void 0 && partialLength === void 0 || name) {
165
- await fs__default.mkdir(path__default.resolve(CACHE_ROOT, dir), { recursive: true });
166
- await fs__default.writeFile(path__default.resolve(CACHE_ROOT, file), downloadBuffer);
167
- const hash = crypto__default.createHash("sha256").update(downloadBuffer).digest("hex");
168
- await cacheIntegrity.set(file, hash);
169
- }
170
- return downloadBuffer;
171
- };
172
- const getConfigFile = async (prefixes, key, {
173
- showProgress,
174
- showAttemptFail
175
- } = {}) => {
176
- const downloadBuffer = await downloadFile(prefixes, "config", key, { showProgress, showAttemptFail });
177
- return downloadBuffer.toString("utf-8");
178
- };
179
- const getProductVersions = async (region, product) => {
180
- const url = `http://${region}.patch.battle.net:1119/${product}/versions`;
181
- const headers = {
182
- "User-Agent": USER_AGENT
183
- };
184
- const res = await fetch(url, { headers });
185
- return res.text();
186
- };
187
- const getProductCDNs = async (region, product) => {
188
- const url = `http://${region}.patch.battle.net:1119/${product}/cdns`;
189
- const headers = {
190
- "User-Agent": USER_AGENT
191
- };
192
- const res = await fetch(url, { headers });
193
- return res.text();
194
- };
195
-
196
- const parseProductConfig = (text) => {
197
- const lines = text.split(/\r?\n/);
198
- const headers = lines[0].split("|").map((header) => header.split("!")[0].replace(" ", ""));
199
- const entries = lines.filter((line, index) => index > 0 && line.trim().length !== 0 && !line.startsWith("#")).map((line) => {
200
- const node = {};
201
- const entryFields = line.split("|");
202
- for (let i = 0, n = entryFields.length; i < n; i += 1) {
203
- node[headers[i]] = entryFields[i];
197
+ const BLTE_MAGIC = 1112298565;
198
+ const ENC_TYPE_SALSA20 = 83;
199
+ const EMPTY_HASH = "00000000000000000000000000000000";
200
+ class BLTEReader {
201
+ constructor(buffer, eKey, keys = /* @__PURE__ */ new Map()) {
202
+ __publicField$4(this, "buffer");
203
+ __publicField$4(this, "blte");
204
+ __publicField$4(this, "blocks", []);
205
+ __publicField$4(this, "keys");
206
+ __publicField$4(this, "processedBlock", 0);
207
+ __publicField$4(this, "processedOffset", 0);
208
+ this.blte = buffer;
209
+ this.buffer = Buffer.alloc(0);
210
+ this.keys = keys;
211
+ const size = buffer.byteLength;
212
+ assert__default(size >= 8, `[BLTE]: Invalid size: ${size.toString()} < 8`);
213
+ const magic = buffer.readUInt32BE(0);
214
+ assert__default(magic === BLTE_MAGIC, `[BLTE]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
215
+ const headerSize = buffer.readUInt32BE(4);
216
+ if (headerSize === 0) {
217
+ const blteHash2 = crypto__default.createHash("md5").update(buffer).digest("hex");
218
+ assert__default(blteHash2 === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash2}`);
219
+ this.blocks.push({
220
+ compressedSize: size - 8,
221
+ decompressedSize: size - 9,
222
+ hash: EMPTY_HASH
223
+ });
224
+ this.processedOffset = 8;
225
+ return;
204
226
  }
205
- return node;
206
- });
207
- return entries;
208
- };
209
- const parseProductVersions = (text) => parseProductConfig(text);
210
- const parseProductCDNs = (text) => parseProductConfig(text);
211
-
212
- const normalizeKey = (key) => key.split("-").map((part, index) => index === 0 ? part : `${part.charAt(0).toUpperCase()}${part.slice(1)}`).join("");
213
- const parseConfig = (text) => {
214
- const entries = {};
215
- text.split(/\r?\n/).filter((line) => line.trim().length !== 0 && !line.startsWith("#")).forEach((line) => {
216
- const match = line.match(/([^\s]+)\s?=\s?(.*)/);
217
- assert__default(match !== null, "Invalid token encountered parsing CDN config");
218
- const [key, value] = match.slice(1);
219
- entries[normalizeKey(key)] = value;
220
- });
221
- return entries;
222
- };
223
- const parseCDNConfig = (text) => parseConfig(text);
224
- const parseBuildConfig = (text) => parseConfig(text);
225
-
226
- const VERSION_SUB_OFFSET = -12;
227
- const CHECKSUM_SIZE_SUB_OFFSET = -5;
228
- const BLOCK_SIZE_OFFSET = 3;
229
- const OFFSET_BYTES_OFFSET = 4;
230
- const SIZE_BYTES_OFFSET = 5;
231
- const KEY_SIZE_OFFSET = 6;
232
- const NUM_ELEMENTS_OFFSET = 8;
233
- const CHECKSUM_OFFSET = 12;
234
- const CHECKSUM_TRIES = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0];
235
- const tryArchiveIndexChecksumSize = (buffer, cKey) => {
236
- const res = CHECKSUM_TRIES.filter(
237
- (index) => buffer.readUInt8(buffer.byteLength - index + CHECKSUM_SIZE_SUB_OFFSET) === index && buffer.readUInt8(buffer.byteLength - index + VERSION_SUB_OFFSET) === 1
238
- );
239
- if (res.length === 1) {
240
- return res[0];
241
- }
242
- throw new Error(`Invalid checksum size: ${res.join(", ")} in ${cKey}`);
243
- };
244
- const parseArchiveIndex = (buffer, cKey) => {
245
- const checksumSize = tryArchiveIndexChecksumSize(buffer, cKey);
246
- const versionOffset = buffer.byteLength - checksumSize + VERSION_SUB_OFFSET;
247
- const footerOffset = versionOffset - checksumSize;
248
- const tocChecksum = buffer.toString("hex", footerOffset, versionOffset);
249
- const version = buffer.readUInt8(versionOffset);
250
- const blockSizeKB = buffer.readUInt8(versionOffset + BLOCK_SIZE_OFFSET);
251
- const offsetBytes = buffer.readUInt8(versionOffset + OFFSET_BYTES_OFFSET);
252
- const sizeBytes = buffer.readUInt8(versionOffset + SIZE_BYTES_OFFSET);
253
- const keySize = buffer.readUInt8(versionOffset + KEY_SIZE_OFFSET);
254
- const numElements = buffer.readUInt32LE(versionOffset + NUM_ELEMENTS_OFFSET);
255
- const footerChecksum = buffer.toString("hex", versionOffset + CHECKSUM_OFFSET);
256
- assert__default(version === 1, `Invalid version: ${version.toString()} in ${cKey}`);
257
- const entrySize = keySize + offsetBytes + sizeBytes;
258
- const blockSize = blockSizeKB * 1024;
259
- const numBlocks = footerOffset / (blockSize + keySize + checksumSize);
260
- const tocSize = (keySize + checksumSize) * numBlocks;
261
- const toc = buffer.subarray(footerOffset - tocSize, footerOffset);
262
- const footer = buffer.subarray(footerOffset);
263
- const footerCheckBuffer = Buffer.concat([
264
- buffer.subarray(versionOffset, buffer.byteLength - checksumSize),
265
- Buffer.alloc(checksumSize)
266
- ]);
267
- const hash = crypto__default.createHash("md5").update(footer).digest("hex");
268
- assert__default(hash === cKey, `Invalid footer hash in ${cKey}: expected ${cKey}, got ${hash}`);
269
- const footerHash = crypto__default.createHash("md5").update(footerCheckBuffer).digest("hex").slice(0, checksumSize * 2);
270
- assert__default(footerHash === footerChecksum, `Invalid footer checksum in ${cKey}: expected ${footerChecksum}, got ${footerHash}`);
271
- const tocHash = crypto__default.createHash("md5").update(toc).digest("hex").slice(0, checksumSize * 2);
272
- assert__default(tocHash === tocChecksum, `Invalid toc checksum in ${cKey}: expected ${tocChecksum}, got ${tocHash}`);
273
- const result = /* @__PURE__ */ new Map();
274
- for (let i = 0; i < numBlocks; i += 1) {
275
- const lastEkey = toc.toString("hex", i * keySize, (i + 1) * keySize);
276
- const blockChecksum = toc.toString("hex", numBlocks * keySize + i * checksumSize, numBlocks * keySize + (i + 1) * checksumSize);
277
- const blockOffset = i * blockSize;
278
- const blockHash = crypto__default.createHash("md5").update(buffer.subarray(i * blockSize, (i + 1) * blockSize)).digest("hex").slice(0, checksumSize * 2);
279
- assert__default(blockChecksum === blockHash, `Invalid block hash in ${cKey} at ${i.toString()}: expected ${blockChecksum}, got ${blockHash}`);
280
- let length = 0;
281
- while (length < blockSize) {
282
- const entryOffset = blockOffset + length * entrySize;
283
- const eKey = buffer.toString("hex", entryOffset, entryOffset + keySize);
284
- const size = buffer.readUIntBE(entryOffset + keySize, sizeBytes);
285
- const offset = buffer.readUIntBE(entryOffset + keySize + sizeBytes, offsetBytes);
286
- result.set(eKey, { key: cKey, size, offset });
287
- length += 1;
288
- if (eKey === lastEkey) {
289
- break;
290
- }
227
+ const blteHash = crypto__default.createHash("md5").update(buffer.subarray(0, headerSize)).digest("hex");
228
+ assert__default(blteHash === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash}`);
229
+ assert__default(size >= 12, `[BLTE]: Invalid size: ${size.toString()} < 12`);
230
+ const flag = buffer.readUInt8(8);
231
+ const numBlocks = buffer.readIntBE(9, 3);
232
+ assert__default(numBlocks > 0, `[BLTE]: Invalid number of blocks: ${numBlocks.toString()}`);
233
+ assert__default(flag === 15, `[BLTE]: Invalid flag: ${flag.toString(16).padStart(2, "0")}`);
234
+ const blockHeaderSize = numBlocks * 24;
235
+ assert__default(headerSize === blockHeaderSize + 12, `[BLTE]: Invalid header size: header size ${headerSize.toString()} != block header size ${blockHeaderSize.toString()} + 12`);
236
+ assert__default(size >= headerSize, `[BLTE]: Invalid size: ${size.toString()} < ${headerSize.toString()}`);
237
+ for (let i = 0; i < numBlocks; i += 1) {
238
+ const offset = 12 + i * 24;
239
+ const compressedSize = buffer.readUInt32BE(offset);
240
+ const decompressedSize = buffer.readUInt32BE(offset + 4);
241
+ const hash = buffer.toString("hex", offset + 8, offset + 24);
242
+ this.blocks.push({
243
+ compressedSize,
244
+ decompressedSize,
245
+ hash
246
+ });
291
247
  }
248
+ this.processedOffset = headerSize;
292
249
  }
293
- assert__default(result.size === numElements, `Invalid number of elements: ${result.size.toString()} != ${numElements.toString()} in ${cKey}`);
294
- return result;
295
- };
296
-
297
- var __defProp$5 = Object.defineProperty;
298
- var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
299
- var __publicField$5 = (obj, key, value) => {
300
- __defNormalProp$5(obj, typeof key !== "symbol" ? key + "" : key, value);
301
- return value;
302
- };
303
- class Salsa20 {
304
- constructor(key, nonce) {
305
- __publicField$5(this, "fixed");
306
- __publicField$5(this, "key");
307
- __publicField$5(this, "nonce");
308
- __publicField$5(this, "counter", new Uint32Array([0, 0]));
309
- __publicField$5(this, "state", new Uint32Array(16));
310
- __publicField$5(this, "block", new Uint8Array(64));
311
- __publicField$5(this, "position", 0);
312
- assert__default(key.length === 32 || key.length === 16, "Salsa20 requires 128-bit or 256-bit key");
313
- assert__default(nonce.length === 8, "Salsa20 requires 64-bit nonce");
314
- this.key = new Uint32Array(8);
315
- const keyView = new DataView(key.buffer);
316
- if (key.length === 32) {
317
- for (let i = 0; i < 8; i += 1) {
318
- this.key[i] = keyView.getUint32(i * 4, true);
319
- }
320
- this.fixed = new Uint32Array([
321
- 1634760805,
322
- 857760878,
323
- 2036477234,
324
- 1797285236
325
- ]);
326
- } else {
327
- for (let i = 0; i < 4; i += 1) {
328
- const word = keyView.getUint32(i * 4, true);
329
- this.key[i] = word;
330
- this.key[i + 4] = word;
250
+ processBlock(buffer, index, allowMissingKey) {
251
+ const flag = buffer.readUInt8(0);
252
+ switch (flag) {
253
+ case 69: {
254
+ let offset = 1;
255
+ const keyNameLength = buffer.readUInt8(offset);
256
+ offset += 1;
257
+ const keyNameBE = buffer.toString("hex", offset, offset + keyNameLength);
258
+ offset += keyNameLength;
259
+ const ivLength = buffer.readUInt8(offset);
260
+ offset += 1;
261
+ const ivBuffer = buffer.subarray(offset, offset + ivLength);
262
+ offset += ivLength;
263
+ const encryptType = buffer.readUInt8(offset);
264
+ offset += 1;
265
+ assert__default(encryptType === ENC_TYPE_SALSA20, `[BLTE]: Invalid encrypt type: ${encryptType.toString(16).padStart(2, "0")} at block ${index.toString()}`);
266
+ const keyName = [...keyNameBE.matchAll(/.{2}/g)].map((v) => v[0]).reverse().join("").toLowerCase();
267
+ const key = this.keys.get(keyName);
268
+ if (!key) {
269
+ if (allowMissingKey) {
270
+ return keyName;
271
+ }
272
+ throw new Error(`[BLTE]: Missing key: ${keyName} at block ${index.toString()}`);
273
+ }
274
+ const iv = new Uint8Array(8);
275
+ for (let i = 0; i < 8; i += 1) {
276
+ if (i < ivLength) {
277
+ iv[i] = ivBuffer.readUInt8(i) ^ index >>> 8 * i & 255;
278
+ } else {
279
+ iv[i] = 0;
280
+ }
281
+ }
282
+ const handler = new Salsa20(key, iv);
283
+ const decrypted = handler.process(buffer.subarray(offset));
284
+ if (allowMissingKey) {
285
+ return this.processBlock(Buffer.from(decrypted.buffer), index, true);
286
+ }
287
+ return this.processBlock(Buffer.from(decrypted.buffer), index, false);
331
288
  }
332
- this.fixed = new Uint32Array([
333
- 1634760805,
334
- 824206446,
335
- 2036477238,
336
- 1797285236
337
- ]);
338
- }
339
- this.nonce = new Uint32Array(2);
340
- const nonceView = new DataView(nonce.buffer);
341
- for (let i = 0; i < 2; i += 1) {
342
- this.nonce[i] = nonceView.getUint32(i * 4, true);
343
- }
344
- this.generateBlock();
345
- }
346
- QR(a, b, c, d) {
347
- let t;
348
- t = this.state[a] + this.state[d] & 4294967295;
349
- this.state[b] ^= t << 7 | t >>> 25;
350
- t = this.state[b] + this.state[a] & 4294967295;
351
- this.state[c] ^= t << 9 | t >>> 23;
352
- t = this.state[c] + this.state[b] & 4294967295;
353
- this.state[d] ^= t << 13 | t >>> 19;
354
- t = this.state[d] + this.state[c] & 4294967295;
355
- this.state[a] ^= t << 18 | t >>> 14;
356
- }
357
- generateBlock() {
358
- const init = new Uint32Array([
359
- this.fixed[0],
360
- this.key[0],
361
- this.key[1],
362
- this.key[2],
363
- this.key[3],
364
- this.fixed[1],
365
- this.nonce[0],
366
- this.nonce[1],
367
- this.counter[0],
368
- this.counter[1],
369
- this.fixed[2],
370
- this.key[4],
371
- this.key[5],
372
- this.key[6],
373
- this.key[7],
374
- this.fixed[3]
375
- ]);
376
- this.state = new Uint32Array(init);
377
- for (let i = 0; i < 20; i += 2) {
378
- this.QR(0, 4, 8, 12);
379
- this.QR(5, 9, 13, 1);
380
- this.QR(10, 14, 2, 6);
381
- this.QR(15, 3, 7, 11);
382
- this.QR(0, 1, 2, 3);
383
- this.QR(5, 6, 7, 4);
384
- this.QR(10, 11, 8, 9);
385
- this.QR(15, 12, 13, 14);
386
- }
387
- for (let i = 0; i < 16; i += 1) {
388
- const word = this.state[i] + init[i] & 4294967295;
389
- this.block[i * 4] = word & 255;
390
- this.block[i * 4 + 1] = word >>> 8 & 255;
391
- this.block[i * 4 + 2] = word >>> 16 & 255;
392
- this.block[i * 4 + 3] = word >>> 24 & 255;
393
- }
394
- this.counter[0] = this.counter[0] + 1 & 4294967295;
395
- if (this.counter[0] === 0) {
396
- this.counter[1] = this.counter[1] + 1 & 4294967295;
289
+ case 70:
290
+ throw new Error(`[BLTE]: Frame (Recursive) block not supported at block ${index.toString()}`);
291
+ case 78:
292
+ return buffer.subarray(1);
293
+ case 90:
294
+ return zlib__default.inflateSync(buffer.subarray(1));
295
+ default:
296
+ throw new Error(`[BLTE]: Invalid block flag: ${flag.toString(16).padStart(2, "0")} at block ${index.toString()}`);
397
297
  }
398
298
  }
399
- process(input) {
400
- const { length } = input;
401
- const result = new Uint8Array(length);
402
- for (let i = 0; i < length; i += 1) {
403
- if (this.position === 64) {
404
- this.generateBlock();
405
- this.position = 0;
299
+ processBytes(allowMissingKey = false, size = Infinity) {
300
+ const missingKeyBlocks = [];
301
+ while (this.processedBlock < this.blocks.length && size > this.buffer.byteLength) {
302
+ const blockIndex = this.processedBlock;
303
+ const block = this.blocks[blockIndex];
304
+ const blockBuffer = this.blte.subarray(
305
+ this.processedOffset,
306
+ this.processedOffset + block.compressedSize
307
+ );
308
+ if (block.hash !== EMPTY_HASH) {
309
+ const blockHash = crypto__default.createHash("md5").update(blockBuffer).digest("hex");
310
+ assert__default(blockHash === block.hash, `[BLTE]: Invalid block hash: expected ${block.hash}, got ${blockHash}`);
406
311
  }
407
- result[i] = input[i] ^ this.block[this.position];
408
- this.position += 1;
312
+ if (allowMissingKey) {
313
+ const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
314
+ if (buffer instanceof Buffer) {
315
+ assert__default(
316
+ buffer.byteLength === block.decompressedSize,
317
+ `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
318
+ );
319
+ this.buffer = Buffer.concat([this.buffer, buffer]);
320
+ } else {
321
+ missingKeyBlocks.push({
322
+ offset: this.buffer.byteLength,
323
+ size: block.decompressedSize,
324
+ blockIndex,
325
+ keyName: buffer
326
+ });
327
+ this.buffer = Buffer.concat([
328
+ this.buffer,
329
+ Buffer.alloc(block.decompressedSize)
330
+ ]);
331
+ }
332
+ } else {
333
+ const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
334
+ assert__default(
335
+ buffer.byteLength === block.decompressedSize,
336
+ `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
337
+ );
338
+ this.buffer = Buffer.concat([this.buffer, buffer]);
339
+ }
340
+ this.processedBlock += 1;
341
+ this.processedOffset += block.compressedSize;
409
342
  }
410
- return result;
343
+ return allowMissingKey ? missingKeyBlocks : void 0;
411
344
  }
412
345
  }
413
346
 
414
- var __defProp$4 = Object.defineProperty;
415
- var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
416
- var __publicField$4 = (obj, key, value) => {
417
- __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
347
+ var __defProp$3 = Object.defineProperty;
348
+ var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
349
+ var __publicField$3 = (obj, key, value) => {
350
+ __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
418
351
  return value;
419
352
  };
420
- const BLTE_MAGIC = 1112298565;
421
- const ENC_TYPE_SALSA20 = 83;
422
- const EMPTY_HASH = "00000000000000000000000000000000";
423
- class BLTEReader {
424
- constructor(buffer, eKey, keys = /* @__PURE__ */ new Map()) {
425
- __publicField$4(this, "buffer");
426
- __publicField$4(this, "blte");
427
- __publicField$4(this, "blocks", []);
428
- __publicField$4(this, "keys");
429
- __publicField$4(this, "processedBlock", 0);
430
- __publicField$4(this, "processedOffset", 0);
431
- this.blte = buffer;
432
- this.buffer = Buffer.alloc(0);
433
- this.keys = keys;
434
- const size = buffer.byteLength;
435
- assert__default(size >= 8, `[BLTE]: Invalid size: ${size.toString()} < 8`);
436
- const magic = buffer.readUInt32BE(0);
437
- assert__default(magic === BLTE_MAGIC, `[BLTE]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
438
- const headerSize = buffer.readUInt32BE(4);
439
- if (headerSize === 0) {
440
- const blteHash2 = crypto__default.createHash("md5").update(buffer).digest("hex");
441
- assert__default(blteHash2 === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash2}`);
442
- this.blocks.push({
443
- compressedSize: size - 8,
444
- decompressedSize: size - 9,
445
- hash: EMPTY_HASH
353
+ class Store {
354
+ constructor(dataFile) {
355
+ __publicField$3(this, "data");
356
+ __publicField$3(this, "dataFile");
357
+ __publicField$3(this, "promise");
358
+ this.dataFile = dataFile;
359
+ this.data = {};
360
+ this.promise = new Promise((resolve) => {
361
+ fs__default.readFile(dataFile, "utf-8").then((file) => {
362
+ this.data = JSON.parse(file);
363
+ resolve();
364
+ }).catch(() => {
365
+ resolve();
446
366
  });
447
- this.processedOffset = 8;
367
+ });
368
+ }
369
+ async get(key) {
370
+ await this.promise;
371
+ return this.data[key];
372
+ }
373
+ async set(key, value) {
374
+ await this.promise;
375
+ this.data[key] = value;
376
+ await fs__default.writeFile(this.dataFile, JSON.stringify(this.data), "utf-8");
377
+ }
378
+ }
379
+
380
+ const USER_AGENT = "node-wow-casc-dbc";
381
+ const CACHE_ROOT = path__default.resolve("cache");
382
+ const CACHE_DIRS = {
383
+ build: "builds",
384
+ indexes: "indices",
385
+ data: "data",
386
+ dbd: "dbd"
387
+ };
388
+ const CACHE_INTEGRITY_FILE = path__default.resolve(CACHE_ROOT, "integrity.json");
389
+ const cacheIntegrity = new Store(CACHE_INTEGRITY_FILE);
390
+ const formatCDNKey = (key) => `${key.substring(0, 2)}/${key.substring(2, 4)}/${key}`;
391
+ const requestData = async (url, {
392
+ partialOffset,
393
+ partialLength,
394
+ showProgress
395
+ } = {}) => new Promise((resolve, reject) => {
396
+ const options = {
397
+ headers: {
398
+ // eslint-disable-next-line @typescript-eslint/naming-convention
399
+ "User-Agent": USER_AGENT,
400
+ // eslint-disable-next-line @typescript-eslint/naming-convention
401
+ Range: partialOffset !== void 0 && partialLength !== void 0 ? `bytes=${partialOffset.toString()}-${(partialOffset + partialLength - 1).toString()}` : "bytes=0-"
402
+ }
403
+ };
404
+ http__default.get(url, options, (res) => {
405
+ if (res.statusCode === 301 || res.statusCode === 302) {
406
+ if (res.headers.location !== void 0) {
407
+ requestData(res.headers.location, { partialOffset, partialLength, showProgress }).then(resolve).catch((err) => {
408
+ throw err;
409
+ });
410
+ } else {
411
+ reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode.toString()}`));
412
+ }
448
413
  return;
449
414
  }
450
- const blteHash = crypto__default.createHash("md5").update(buffer.subarray(0, headerSize)).digest("hex");
451
- assert__default(blteHash === eKey, `[BLTE]: Invalid hash: expected ${eKey}, got ${blteHash}`);
452
- assert__default(size >= 12, `[BLTE]: Invalid size: ${size.toString()} < 12`);
453
- const flag = buffer.readUInt8(8);
454
- const numBlocks = buffer.readIntBE(9, 3);
455
- assert__default(numBlocks > 0, `[BLTE]: Invalid number of blocks: ${numBlocks.toString()}`);
456
- assert__default(flag === 15, `[BLTE]: Invalid flag: ${flag.toString(16).padStart(2, "0")}`);
457
- const blockHeaderSize = numBlocks * 24;
458
- assert__default(headerSize === blockHeaderSize + 12, `[BLTE]: Invalid header size: header size ${headerSize.toString()} != block header size ${blockHeaderSize.toString()} + 12`);
459
- assert__default(size >= headerSize, `[BLTE]: Invalid size: ${size.toString()} < ${headerSize.toString()}`);
460
- for (let i = 0; i < numBlocks; i += 1) {
461
- const offset = 12 + i * 24;
462
- const compressedSize = buffer.readUInt32BE(offset);
463
- const decompressedSize = buffer.readUInt32BE(offset + 4);
464
- const hash = buffer.toString("hex", offset + 8, offset + 24);
465
- this.blocks.push({
466
- compressedSize,
467
- decompressedSize,
468
- hash
469
- });
415
+ if (res.statusCode === void 0 || res.statusCode < 200 || res.statusCode > 302) {
416
+ reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode?.toString() ?? "undefined"}`));
417
+ return;
470
418
  }
471
- this.processedOffset = headerSize;
419
+ const lengthText = res.headers["content-length"];
420
+ const length = lengthText !== void 0 ? parseInt(lengthText, 10) : 0;
421
+ const bar = showProgress === true && !Number.isNaN(length) && length >= 10485760 ? new cliProgress__default.SingleBar({ etaBuffer: 10240 }, cliProgress__default.Presets.shades_classic) : void 0;
422
+ bar?.start(length, 0);
423
+ const chunks = [];
424
+ res.on("data", (chunk) => {
425
+ bar?.increment(chunk.length);
426
+ chunks.push(chunk);
427
+ });
428
+ res.on("end", () => {
429
+ bar?.stop();
430
+ resolve(Buffer.concat(chunks));
431
+ });
432
+ res.on("error", (err) => {
433
+ bar?.stop();
434
+ reject(err);
435
+ });
436
+ }).on("error", reject).end();
437
+ });
438
+ const downloadFile = (prefixes, type, key, {
439
+ partialOffset,
440
+ partialLength,
441
+ showProgress,
442
+ showAttemptFail
443
+ } = {}) => {
444
+ const urls = prefixes.map((prefix) => `${prefix}/${type}/${formatCDNKey(key)}`);
445
+ return urls.reduce(
446
+ (prev, url, index) => prev.catch((err) => {
447
+ if (showAttemptFail === true && index > 0 && err instanceof Error) {
448
+ console.warn(`${( new Date()).toISOString()} [WARN]:`, err.message);
449
+ }
450
+ return requestData(url, { partialOffset, partialLength, showProgress });
451
+ }),
452
+ Promise.reject(new Error(""))
453
+ );
454
+ };
455
+ const getFileCache = async (file) => {
456
+ const integrity = await cacheIntegrity.get(file);
457
+ if (integrity !== void 0) {
458
+ try {
459
+ const buffer = await fs__default.readFile(path__default.resolve(CACHE_ROOT, file));
460
+ const hash = crypto__default.createHash("sha256").update(buffer).digest("hex");
461
+ if (hash === integrity) {
462
+ return buffer;
463
+ }
464
+ } catch {
465
+ }
466
+ }
467
+ return void 0;
468
+ };
469
+ const getDataFile = async (prefixes, key, type, buildCKey, {
470
+ name,
471
+ partialOffset,
472
+ partialLength,
473
+ showProgress,
474
+ showAttemptFail
475
+ } = {}) => {
476
+ const dir = type === "build" ? path__default.join(CACHE_DIRS[type], buildCKey) : CACHE_DIRS[type];
477
+ const file = name !== void 0 ? path__default.join(dir, name) : path__default.join(dir, key);
478
+ const cacheBuffer = await getFileCache(file);
479
+ if (cacheBuffer) {
480
+ if (name === void 0 && partialOffset !== void 0 && partialLength !== void 0) {
481
+ return cacheBuffer.subarray(partialOffset, partialOffset + partialLength);
482
+ }
483
+ return cacheBuffer;
484
+ }
485
+ const downloadBuffer = await downloadFile(prefixes, "data", key, {
486
+ partialOffset,
487
+ partialLength,
488
+ showProgress,
489
+ showAttemptFail
490
+ });
491
+ if (partialOffset === void 0 && partialLength === void 0 || name !== void 0) {
492
+ await fs__default.mkdir(path__default.resolve(CACHE_ROOT, dir), { recursive: true });
493
+ await fs__default.writeFile(path__default.resolve(CACHE_ROOT, file), downloadBuffer);
494
+ const hash = crypto__default.createHash("sha256").update(downloadBuffer).digest("hex");
495
+ await cacheIntegrity.set(file, hash);
496
+ }
497
+ return downloadBuffer;
498
+ };
499
+ const getConfigFile = async (prefixes, key, {
500
+ showProgress,
501
+ showAttemptFail
502
+ } = {}) => {
503
+ const downloadBuffer = await downloadFile(prefixes, "config", key, { showProgress, showAttemptFail });
504
+ return downloadBuffer.toString("utf-8");
505
+ };
506
+ const getProductVersions = async (region, product) => {
507
+ const url = `http://${region}.patch.battle.net:1119/${product}/versions`;
508
+ const headers = new Headers();
509
+ headers.set("User-Agent", USER_AGENT);
510
+ const res = await fetch(url, { headers });
511
+ return res.text();
512
+ };
513
+ const getProductCDNs = async (region, product) => {
514
+ const url = `http://${region}.patch.battle.net:1119/${product}/cdns`;
515
+ const headers = new Headers();
516
+ headers.set("User-Agent", USER_AGENT);
517
+ const res = await fetch(url, { headers });
518
+ return res.text();
519
+ };
520
+
521
+ const hashlittle2 = (key, pc = 0, pb = 0) => {
522
+ const { length } = key;
523
+ let offset = 0;
524
+ let a = 3735928559 + length + pc | 0;
525
+ let b = 3735928559 + length + pc | 0;
526
+ let c = 3735928559 + length + pc + pb | 0;
527
+ while (length - offset > 12) {
528
+ a += key.charCodeAt(offset + 0);
529
+ a += key.charCodeAt(offset + 1) << 8;
530
+ a += key.charCodeAt(offset + 2) << 16;
531
+ a += key.charCodeAt(offset + 3) << 24;
532
+ b += key.charCodeAt(offset + 4);
533
+ b += key.charCodeAt(offset + 5) << 8;
534
+ b += key.charCodeAt(offset + 6) << 16;
535
+ b += key.charCodeAt(offset + 7) << 24;
536
+ c += key.charCodeAt(offset + 8);
537
+ c += key.charCodeAt(offset + 9) << 8;
538
+ c += key.charCodeAt(offset + 10) << 16;
539
+ c += key.charCodeAt(offset + 11) << 24;
540
+ a -= c;
541
+ a ^= c << 4 | c >>> 28;
542
+ c = c + b | 0;
543
+ b -= a;
544
+ b ^= a << 6 | a >>> 26;
545
+ a = a + c | 0;
546
+ c -= b;
547
+ c ^= b << 8 | b >>> 24;
548
+ b = b + a | 0;
549
+ a -= c;
550
+ a ^= c << 16 | c >>> 16;
551
+ c = c + b | 0;
552
+ b -= a;
553
+ b ^= a << 19 | a >>> 13;
554
+ a = a + c | 0;
555
+ c -= b;
556
+ c ^= b << 4 | b >>> 28;
557
+ b = b + a | 0;
558
+ offset += 12;
472
559
  }
473
- processBlock(buffer, index, allowMissingKey) {
474
- const flag = buffer.readUInt8(0);
475
- switch (flag) {
476
- case 69: {
477
- let offset = 1;
478
- const keyNameLength = buffer.readUInt8(offset);
479
- offset += 1;
480
- const keyNameBE = buffer.toString("hex", offset, offset + keyNameLength);
481
- offset += keyNameLength;
482
- const ivLength = buffer.readUInt8(offset);
483
- offset += 1;
484
- const ivBuffer = buffer.subarray(offset, offset + ivLength);
485
- offset += ivLength;
486
- const encryptType = buffer.readUInt8(offset);
487
- offset += 1;
488
- assert__default(encryptType === ENC_TYPE_SALSA20, `[BLTE]: Invalid encrypt type: ${encryptType.toString(16).padStart(2, "0")} at block ${index.toString()}`);
489
- const keyName = [...keyNameBE.matchAll(/.{2}/g)].map((v) => v[0]).reverse().join("").toLowerCase();
490
- const key = this.keys.get(keyName);
491
- if (!key) {
492
- if (allowMissingKey) {
493
- return keyName;
494
- }
495
- throw new Error(`[BLTE]: Missing key: ${keyName} at block ${index.toString()}`);
496
- }
497
- const iv = new Uint8Array(8);
498
- for (let i = 0; i < 8; i += 1) {
499
- if (i < ivLength) {
500
- iv[i] = ivBuffer.readUInt8(i) ^ index >>> 8 * i & 255;
501
- } else {
502
- iv[i] = 0;
503
- }
504
- }
505
- const handler = new Salsa20(key, iv);
506
- const decrypted = handler.process(buffer.subarray(offset));
507
- if (allowMissingKey) {
508
- return this.processBlock(Buffer.from(decrypted.buffer), index, true);
509
- }
510
- return this.processBlock(Buffer.from(decrypted.buffer), index, false);
511
- }
512
- case 70:
513
- throw new Error(`[BLTE]: Frame (Recursive) block not supported at block ${index.toString()}`);
514
- case 78:
515
- return buffer.subarray(1);
516
- case 90:
517
- return zlib__default.inflateSync(buffer.subarray(1));
518
- default:
519
- throw new Error(`[BLTE]: Invalid block flag: ${flag.toString(16).padStart(2, "0")} at block ${index.toString()}`);
560
+ if (length - offset > 0) {
561
+ switch (length - offset) {
562
+ case 12:
563
+ c += key.charCodeAt(offset + 11) << 24;
564
+ case 11:
565
+ c += key.charCodeAt(offset + 10) << 16;
566
+ case 10:
567
+ c += key.charCodeAt(offset + 9) << 8;
568
+ case 9:
569
+ c += key.charCodeAt(offset + 8);
570
+ case 8:
571
+ b += key.charCodeAt(offset + 7) << 24;
572
+ case 7:
573
+ b += key.charCodeAt(offset + 6) << 16;
574
+ case 6:
575
+ b += key.charCodeAt(offset + 5) << 8;
576
+ case 5:
577
+ b += key.charCodeAt(offset + 4);
578
+ case 4:
579
+ a += key.charCodeAt(offset + 3) << 24;
580
+ case 3:
581
+ a += key.charCodeAt(offset + 2) << 16;
582
+ case 2:
583
+ a += key.charCodeAt(offset + 1) << 8;
584
+ case 1:
585
+ a += key.charCodeAt(offset + 0);
520
586
  }
587
+ c ^= b;
588
+ c -= b << 14 | b >>> 18;
589
+ a ^= c;
590
+ a -= c << 11 | c >>> 21;
591
+ b ^= a;
592
+ b -= a << 25 | a >>> 7;
593
+ c ^= b;
594
+ c -= b << 16 | b >>> 16;
595
+ a ^= c;
596
+ a -= c << 4 | c >>> 28;
597
+ b ^= a;
598
+ b -= a << 14 | a >>> 18;
599
+ c ^= b;
600
+ c -= b << 24 | b >>> 8;
521
601
  }
522
- processBytes(allowMissingKey = false, size = Infinity) {
523
- const missingKeyBlocks = [];
524
- while (this.processedBlock < this.blocks.length && size > this.buffer.byteLength) {
525
- const blockIndex = this.processedBlock;
526
- const block = this.blocks[blockIndex];
527
- const blockBuffer = this.blte.subarray(
528
- this.processedOffset,
529
- this.processedOffset + block.compressedSize
530
- );
531
- if (block.hash !== EMPTY_HASH) {
532
- const blockHash = crypto__default.createHash("md5").update(blockBuffer).digest("hex");
533
- assert__default(blockHash === block.hash, `[BLTE]: Invalid block hash: expected ${block.hash}, got ${blockHash}`);
534
- }
535
- if (allowMissingKey) {
536
- const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
537
- if (buffer instanceof Buffer) {
538
- assert__default(
539
- buffer.byteLength === block.decompressedSize,
540
- `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
541
- );
542
- this.buffer = Buffer.concat([this.buffer, buffer]);
543
- } else {
544
- missingKeyBlocks.push({
545
- offset: this.buffer.byteLength,
546
- size: block.decompressedSize,
547
- blockIndex,
548
- keyName: buffer
549
- });
550
- this.buffer = Buffer.concat([
551
- this.buffer,
552
- Buffer.alloc(block.decompressedSize)
553
- ]);
554
- }
555
- } else {
556
- const buffer = this.processBlock(blockBuffer, blockIndex, allowMissingKey);
557
- assert__default(
558
- buffer.byteLength === block.decompressedSize,
559
- `[BLTE]: Invalid decompressed size: expected ${block.decompressedSize.toString()}, got ${buffer.byteLength.toString()}`
560
- );
561
- this.buffer = Buffer.concat([this.buffer, buffer]);
602
+ return [c >>> 0, b >>> 0];
603
+ };
604
+ const getNameHash = (name) => {
605
+ const normalized = name.replace(/\//g, "\\").toUpperCase();
606
+ const [pc, pb] = hashlittle2(normalized);
607
+ return `${pc.toString(16).padStart(8, "0")}${pb.toString(16).padStart(8, "0")}`;
608
+ };
609
+
610
+ const VERSION_SUB_OFFSET = -12;
611
+ const CHECKSUM_SIZE_SUB_OFFSET = -5;
612
+ const BLOCK_SIZE_OFFSET = 3;
613
+ const OFFSET_BYTES_OFFSET = 4;
614
+ const SIZE_BYTES_OFFSET = 5;
615
+ const KEY_SIZE_OFFSET = 6;
616
+ const NUM_ELEMENTS_OFFSET = 8;
617
+ const CHECKSUM_OFFSET = 12;
618
+ const CHECKSUM_TRIES = [
619
+ 10,
620
+ 9,
621
+ 8,
622
+ 7,
623
+ 6,
624
+ 5,
625
+ 4,
626
+ 3,
627
+ 2,
628
+ 1,
629
+ 0
630
+ ];
631
+ const tryArchiveIndexChecksumSize = (buffer, cKey) => {
632
+ const res = CHECKSUM_TRIES.filter(
633
+ (index) => buffer.readUInt8(buffer.byteLength - index + CHECKSUM_SIZE_SUB_OFFSET) === index && buffer.readUInt8(buffer.byteLength - index + VERSION_SUB_OFFSET) === 1
634
+ );
635
+ if (res.length === 1) {
636
+ return res[0];
637
+ }
638
+ throw new Error(`Invalid checksum size: ${res.join(", ")} in ${cKey}`);
639
+ };
640
+ const parseArchiveIndex = (buffer, cKey) => {
641
+ const checksumSize = tryArchiveIndexChecksumSize(buffer, cKey);
642
+ const versionOffset = buffer.byteLength - checksumSize + VERSION_SUB_OFFSET;
643
+ const footerOffset = versionOffset - checksumSize;
644
+ const tocChecksum = buffer.toString("hex", footerOffset, versionOffset);
645
+ const version = buffer.readUInt8(versionOffset);
646
+ const blockSizeKB = buffer.readUInt8(versionOffset + BLOCK_SIZE_OFFSET);
647
+ const offsetBytes = buffer.readUInt8(versionOffset + OFFSET_BYTES_OFFSET);
648
+ const sizeBytes = buffer.readUInt8(versionOffset + SIZE_BYTES_OFFSET);
649
+ const keySize = buffer.readUInt8(versionOffset + KEY_SIZE_OFFSET);
650
+ const numElements = buffer.readUInt32LE(versionOffset + NUM_ELEMENTS_OFFSET);
651
+ const footerChecksum = buffer.toString("hex", versionOffset + CHECKSUM_OFFSET);
652
+ assert__default(version === 1, `Invalid version: ${version.toString()} in ${cKey}`);
653
+ const entrySize = keySize + offsetBytes + sizeBytes;
654
+ const blockSize = blockSizeKB * 1024;
655
+ const numBlocks = footerOffset / (blockSize + keySize + checksumSize);
656
+ const tocSize = (keySize + checksumSize) * numBlocks;
657
+ const toc = buffer.subarray(footerOffset - tocSize, footerOffset);
658
+ const footer = buffer.subarray(footerOffset);
659
+ const footerCheckBuffer = Buffer.concat([
660
+ buffer.subarray(versionOffset, buffer.byteLength - checksumSize),
661
+ Buffer.alloc(checksumSize)
662
+ ]);
663
+ const hash = crypto__default.createHash("md5").update(footer).digest("hex");
664
+ assert__default(hash === cKey, `Invalid footer hash in ${cKey}: expected ${cKey}, got ${hash}`);
665
+ const footerHash = crypto__default.createHash("md5").update(footerCheckBuffer).digest("hex").slice(0, checksumSize * 2);
666
+ assert__default(footerHash === footerChecksum, `Invalid footer checksum in ${cKey}: expected ${footerChecksum}, got ${footerHash}`);
667
+ const tocHash = crypto__default.createHash("md5").update(toc).digest("hex").slice(0, checksumSize * 2);
668
+ assert__default(tocHash === tocChecksum, `Invalid toc checksum in ${cKey}: expected ${tocChecksum}, got ${tocHash}`);
669
+ const result = /* @__PURE__ */ new Map();
670
+ for (let i = 0; i < numBlocks; i += 1) {
671
+ const lastEkey = toc.toString("hex", i * keySize, (i + 1) * keySize);
672
+ const blockChecksum = toc.toString("hex", numBlocks * keySize + i * checksumSize, numBlocks * keySize + (i + 1) * checksumSize);
673
+ const blockOffset = i * blockSize;
674
+ const blockHash = crypto__default.createHash("md5").update(buffer.subarray(i * blockSize, (i + 1) * blockSize)).digest("hex").slice(0, checksumSize * 2);
675
+ assert__default(blockChecksum === blockHash, `Invalid block hash in ${cKey} at ${i.toString()}: expected ${blockChecksum}, got ${blockHash}`);
676
+ let length = 0;
677
+ while (length < blockSize) {
678
+ const entryOffset = blockOffset + length * entrySize;
679
+ const eKey = buffer.toString("hex", entryOffset, entryOffset + keySize);
680
+ const size = buffer.readUIntBE(entryOffset + keySize, sizeBytes);
681
+ const offset = buffer.readUIntBE(entryOffset + keySize + sizeBytes, offsetBytes);
682
+ result.set(eKey, { key: cKey, size, offset });
683
+ length += 1;
684
+ if (eKey === lastEkey) {
685
+ break;
562
686
  }
563
- this.processedBlock += 1;
564
- this.processedOffset += block.compressedSize;
565
687
  }
566
- return allowMissingKey ? missingKeyBlocks : void 0;
567
688
  }
568
- }
689
+ assert__default(result.size === numElements, `Invalid number of elements: ${result.size.toString()} != ${numElements.toString()} in ${cKey}`);
690
+ return result;
691
+ };
692
+
693
+ const normalizeKey = (key) => key.split("-").map((part, index) => index === 0 ? part : `${part.charAt(0).toUpperCase()}${part.slice(1)}`).join("");
694
+ const parseConfig = (text) => {
695
+ const entries = {};
696
+ text.split(/\r?\n/).filter((line) => line.trim().length !== 0 && !line.startsWith("#")).forEach((line) => {
697
+ const match = /([^\s]+)\s?=\s?(.*)/.exec(line);
698
+ assert__default(match !== null, "Invalid token encountered parsing CDN config");
699
+ const [key, value] = match.slice(1);
700
+ entries[normalizeKey(key)] = value;
701
+ });
702
+ return entries;
703
+ };
704
+ const parseCDNConfig = (text) => parseConfig(text);
705
+ const parseBuildConfig = (text) => parseConfig(text);
569
706
 
570
707
  const ENC_MAGIC = 17742;
571
708
  const MAGIC_OFFSET = 0;
@@ -674,15 +811,31 @@ const parseEncodingFile = (inputBuffer, eKey, cKey) => {
674
811
  pagePointer += 5;
675
812
  eKey2FileSize.set(fileEKey, fileSize);
676
813
  }
677
- }
678
- return {
679
- eSpec,
680
- cKey2FileSize,
681
- cKey2EKey,
682
- eKey2ESpecIndex,
683
- eKey2FileSize
684
- };
814
+ }
815
+ return {
816
+ eSpec,
817
+ cKey2FileSize,
818
+ cKey2EKey,
819
+ eKey2ESpecIndex,
820
+ eKey2FileSize
821
+ };
822
+ };
823
+
824
+ const parseProductConfig = (text) => {
825
+ const lines = text.split(/\r?\n/);
826
+ const headers = lines[0].split("|").map((header) => header.split("!")[0].replace(" ", ""));
827
+ const entries = lines.filter((line, index) => index > 0 && line.trim().length !== 0 && !line.startsWith("#")).map((line) => {
828
+ const node = {};
829
+ const entryFields = line.split("|");
830
+ for (let i = 0, n = entryFields.length; i < n; i += 1) {
831
+ node[headers[i]] = entryFields[i];
832
+ }
833
+ return node;
834
+ });
835
+ return entries;
685
836
  };
837
+ const parseProductVersions = (text) => parseProductConfig(text);
838
+ const parseProductCDNs = (text) => parseProductConfig(text);
686
839
 
687
840
  const MFST_MAGIC = 1296454484;
688
841
  const ContentFlags = {
@@ -809,99 +962,66 @@ const parseRootFile = (inputBuffer, eKey, cKey) => {
809
962
  return { fileDataID2CKey, nameHash2FileDataID };
810
963
  };
811
964
 
812
- const hashlittle2 = (key, pc = 0, pb = 0) => {
813
- const { length } = key;
814
- let offset = 0;
815
- let a = 3735928559 + length + pc | 0;
816
- let b = 3735928559 + length + pc | 0;
817
- let c = 3735928559 + length + pc + pb | 0;
818
- while (length - offset > 12) {
819
- a += key.charCodeAt(offset + 0);
820
- a += key.charCodeAt(offset + 1) << 8;
821
- a += key.charCodeAt(offset + 2) << 16;
822
- a += key.charCodeAt(offset + 3) << 24;
823
- b += key.charCodeAt(offset + 4);
824
- b += key.charCodeAt(offset + 5) << 8;
825
- b += key.charCodeAt(offset + 6) << 16;
826
- b += key.charCodeAt(offset + 7) << 24;
827
- c += key.charCodeAt(offset + 8);
828
- c += key.charCodeAt(offset + 9) << 8;
829
- c += key.charCodeAt(offset + 10) << 16;
830
- c += key.charCodeAt(offset + 11) << 24;
831
- a -= c;
832
- a ^= c << 4 | c >>> 28;
833
- c = c + b | 0;
834
- b -= a;
835
- b ^= a << 6 | a >>> 26;
836
- a = a + c | 0;
837
- c -= b;
838
- c ^= b << 8 | b >>> 24;
839
- b = b + a | 0;
840
- a -= c;
841
- a ^= c << 16 | c >>> 16;
842
- c = c + b | 0;
843
- b -= a;
844
- b ^= a << 19 | a >>> 13;
845
- a = a + c | 0;
846
- c -= b;
847
- c ^= b << 4 | b >>> 28;
848
- b = b + a | 0;
849
- offset += 12;
850
- }
851
- if (length - offset > 0) {
852
- switch (length - offset) {
853
- case 12:
854
- c += key.charCodeAt(offset + 11) << 24;
855
- case 11:
856
- c += key.charCodeAt(offset + 10) << 16;
857
- case 10:
858
- c += key.charCodeAt(offset + 9) << 8;
859
- case 9:
860
- c += key.charCodeAt(offset + 8);
861
- case 8:
862
- b += key.charCodeAt(offset + 7) << 24;
863
- case 7:
864
- b += key.charCodeAt(offset + 6) << 16;
865
- case 6:
866
- b += key.charCodeAt(offset + 5) << 8;
867
- case 5:
868
- b += key.charCodeAt(offset + 4);
869
- case 4:
870
- a += key.charCodeAt(offset + 3) << 24;
871
- case 3:
872
- a += key.charCodeAt(offset + 2) << 16;
873
- case 2:
874
- a += key.charCodeAt(offset + 1) << 8;
875
- case 1:
876
- a += key.charCodeAt(offset + 0);
965
+ const JEDEC = [
966
+ "B",
967
+ "KB",
968
+ "MB",
969
+ "GB",
970
+ "TB",
971
+ "PB",
972
+ "EB",
973
+ "ZB",
974
+ "YB"
975
+ ];
976
+ const formatFileSize = (input) => {
977
+ if (Number.isNaN(input))
978
+ return "";
979
+ let size = Number(input);
980
+ const isNegative = size < 0;
981
+ const result = [];
982
+ if (isNegative)
983
+ size = -size;
984
+ let exponent = Math.floor(Math.log(size) / Math.log(1024));
985
+ if (exponent < 0)
986
+ exponent = 0;
987
+ if (exponent > 8)
988
+ exponent = 8;
989
+ if (size === 0) {
990
+ result[0] = 0;
991
+ result[1] = JEDEC[exponent];
992
+ } else {
993
+ const val = size / 2 ** (exponent * 10);
994
+ result[0] = Number(val.toFixed(exponent > 0 ? 2 : 0));
995
+ if (result[0] === 1024 && exponent < 8) {
996
+ result[0] = 1;
997
+ exponent += 1;
877
998
  }
878
- c ^= b;
879
- c -= b << 14 | b >>> 18;
880
- a ^= c;
881
- a -= c << 11 | c >>> 21;
882
- b ^= a;
883
- b -= a << 25 | a >>> 7;
884
- c ^= b;
885
- c -= b << 16 | b >>> 16;
886
- a ^= c;
887
- a -= c << 4 | c >>> 28;
888
- b ^= a;
889
- b -= a << 14 | a >>> 18;
890
- c ^= b;
891
- c -= b << 24 | b >>> 8;
999
+ result[1] = JEDEC[exponent];
892
1000
  }
893
- return [c >>> 0, b >>> 0];
1001
+ if (isNegative)
1002
+ result[0] = -result[0];
1003
+ return result.join(" ");
894
1004
  };
895
- const getNameHash = (name) => {
896
- const normalized = name.replace(/\//g, "\\").toUpperCase();
897
- const [pc, pb] = hashlittle2(normalized);
898
- return `${pc.toString(16).padStart(8, "0")}${pb.toString(16).padStart(8, "0")}`;
1005
+ const resolveCDNHost = async (hosts, path) => {
1006
+ const latencies = await Promise.allSettled(
1007
+ hosts.map(async (host) => {
1008
+ const start = Date.now();
1009
+ await fetch(`http://${host}/`);
1010
+ const end = Date.now();
1011
+ return {
1012
+ host,
1013
+ latency: end - start
1014
+ };
1015
+ })
1016
+ );
1017
+ const resolved = latencies.filter((result) => result.status === "fulfilled").map((result) => result.value).sort((a, b) => a.latency - b.latency);
1018
+ return resolved.map((result) => `http://${result.host}/${path}`);
899
1019
  };
900
1020
 
901
- var __defProp$3 = Object.defineProperty;
902
- var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
903
- var __publicField$3 = (obj, key, value) => {
904
- __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
1021
+ var __defProp$2 = Object.defineProperty;
1022
+ var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1023
+ var __publicField$2 = (obj, key, value) => {
1024
+ __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
905
1025
  return value;
906
1026
  };
907
1027
  const WDC5_MAGIC = 1464091445;
@@ -924,17 +1044,17 @@ const readBitpackedValue = (buffer, fieldOffsetBits, fieldSizeBits, signed = fal
924
1044
  };
925
1045
  class WDCReader {
926
1046
  constructor(buffer, blocks = [], adb) {
927
- __publicField$3(this, "tableHash");
928
- __publicField$3(this, "layoutHash");
929
- __publicField$3(this, "locale");
930
- __publicField$3(this, "isNormal");
931
- __publicField$3(this, "hasRelationshipData");
932
- __publicField$3(this, "fields");
933
- __publicField$3(this, "fieldsInfo");
934
- __publicField$3(this, "rows", /* @__PURE__ */ new Map());
935
- __publicField$3(this, "relationships", /* @__PURE__ */ new Map());
936
- __publicField$3(this, "copyTable", /* @__PURE__ */ new Map());
937
- __publicField$3(this, "hotfixes", /* @__PURE__ */ new Map());
1047
+ __publicField$2(this, "tableHash");
1048
+ __publicField$2(this, "layoutHash");
1049
+ __publicField$2(this, "locale");
1050
+ __publicField$2(this, "isNormal");
1051
+ __publicField$2(this, "hasRelationshipData");
1052
+ __publicField$2(this, "fields");
1053
+ __publicField$2(this, "fieldsInfo");
1054
+ __publicField$2(this, "rows", /* @__PURE__ */ new Map());
1055
+ __publicField$2(this, "relationships", /* @__PURE__ */ new Map());
1056
+ __publicField$2(this, "copyTable", /* @__PURE__ */ new Map());
1057
+ __publicField$2(this, "hotfixes", /* @__PURE__ */ new Map());
938
1058
  const magic = buffer.readUInt32BE(0);
939
1059
  const fieldCount = buffer.readUInt32LE(140);
940
1060
  const recordSize = buffer.readUInt32LE(144);
@@ -1260,7 +1380,7 @@ class WDCReader {
1260
1380
  data: value
1261
1381
  };
1262
1382
  }
1263
- if (!recordID && fieldIndex === idIndex) {
1383
+ if (recordID === void 0 && fieldIndex === idIndex) {
1264
1384
  recordID = value;
1265
1385
  }
1266
1386
  const fieldOffset = fieldInfo.fieldOffsetBits >>> 3;
@@ -1272,7 +1392,7 @@ class WDCReader {
1272
1392
  };
1273
1393
  }
1274
1394
  case "commonData": {
1275
- const value = (recordID ? commonData.get(fieldIndex)?.get(recordID) : void 0) ?? fieldInfo.defaultValue;
1395
+ const value = (recordID !== void 0 ? commonData.get(fieldIndex)?.get(recordID) : void 0) ?? fieldInfo.defaultValue;
1276
1396
  return {
1277
1397
  type: "commonData",
1278
1398
  data: value
@@ -1307,7 +1427,7 @@ class WDCReader {
1307
1427
  assert__default(fieldPalletData, `No pallet data for field ${fieldIndex.toString()}`);
1308
1428
  value = fieldPalletData[value];
1309
1429
  }
1310
- if (!recordID && fieldIndex === idIndex) {
1430
+ if (recordID === void 0 && fieldIndex === idIndex) {
1311
1431
  recordID = value;
1312
1432
  }
1313
1433
  return {
@@ -1322,7 +1442,7 @@ class WDCReader {
1322
1442
  assert__default(recordID !== void 0, "No record ID found");
1323
1443
  this.rows.set(recordID, recordData);
1324
1444
  const foreignID = relationshipMap.get(recordIndex);
1325
- if (foreignID) {
1445
+ if (foreignID !== void 0) {
1326
1446
  this.relationships.set(recordID, foreignID);
1327
1447
  }
1328
1448
  } else {
@@ -1333,7 +1453,7 @@ class WDCReader {
1333
1453
  assert__default(recordID !== void 0, "No record ID found");
1334
1454
  this.rows.set(recordID, recordData);
1335
1455
  const foreignID = relationshipMap.get(recordIndex);
1336
- if (foreignID) {
1456
+ if (foreignID !== void 0) {
1337
1457
  this.relationships.set(recordID, foreignID);
1338
1458
  }
1339
1459
  }
@@ -1377,70 +1497,24 @@ class WDCReader {
1377
1497
  }
1378
1498
  }
1379
1499
  const dst = this.copyTable.get(id);
1380
- if (dst) {
1500
+ if (dst !== void 0) {
1381
1501
  return this.rows.get(dst);
1382
1502
  }
1383
1503
  return this.rows.get(id);
1384
1504
  }
1385
1505
  getRowRelationship(id) {
1386
1506
  const dst = this.copyTable.get(id);
1387
- if (dst) {
1507
+ if (dst !== void 0) {
1388
1508
  return this.relationships.get(dst);
1389
1509
  }
1390
1510
  return this.relationships.get(id);
1391
1511
  }
1392
1512
  }
1393
1513
 
1394
- const resolveCDNHost = async (hosts, path) => {
1395
- const latencies = await Promise.allSettled(
1396
- hosts.map(async (host) => {
1397
- const start = Date.now();
1398
- await fetch(`http://${host}/`);
1399
- const end = Date.now();
1400
- return {
1401
- host,
1402
- latency: end - start
1403
- };
1404
- })
1405
- );
1406
- const resolved = latencies.filter((result) => result.status === "fulfilled").map((result) => result.value).sort((a, b) => a.latency - b.latency);
1407
- return resolved.map((result) => `http://${result.host}/${path}`);
1408
- };
1409
- const JEDEC = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
1410
- const formatFileSize = (input) => {
1411
- if (Number.isNaN(input))
1412
- return "";
1413
- let size = Number(input);
1414
- const isNegative = size < 0;
1415
- const result = [];
1416
- if (isNegative)
1417
- size = -size;
1418
- let exponent = Math.floor(Math.log(size) / Math.log(1024));
1419
- if (exponent < 0)
1420
- exponent = 0;
1421
- if (exponent > 8)
1422
- exponent = 8;
1423
- if (size === 0) {
1424
- result[0] = 0;
1425
- result[1] = JEDEC[exponent];
1426
- } else {
1427
- const val = size / 2 ** (exponent * 10);
1428
- result[0] = Number(val.toFixed(exponent > 0 ? 2 : 0));
1429
- if (result[0] === 1024 && exponent < 8) {
1430
- result[0] = 1;
1431
- exponent += 1;
1432
- }
1433
- result[1] = JEDEC[exponent];
1434
- }
1435
- if (isNegative)
1436
- result[0] = -result[0];
1437
- return result.join(" ");
1438
- };
1439
-
1440
- var __defProp$2 = Object.defineProperty;
1441
- var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1442
- var __publicField$2 = (obj, key, value) => {
1443
- __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
1514
+ var __defProp$1 = Object.defineProperty;
1515
+ var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1516
+ var __publicField$1 = (obj, key, value) => {
1517
+ __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
1444
1518
  return value;
1445
1519
  };
1446
1520
  var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
@@ -1450,16 +1524,21 @@ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
1450
1524
  LogLevel2[LogLevel2["debug"] = 3] = "debug";
1451
1525
  return LogLevel2;
1452
1526
  })(LogLevel || {});
1453
- const textLogLevel = ["ERROR", "WARN", "INFO", "DEBUG"];
1527
+ const textLogLevel = [
1528
+ "ERROR",
1529
+ "WARN",
1530
+ "INFO",
1531
+ "DEBUG"
1532
+ ];
1454
1533
  class CASCClient {
1455
1534
  constructor(region, product, version, logLevel = 2 /* info */) {
1456
- __publicField$2(this, "region");
1457
- __publicField$2(this, "product");
1458
- __publicField$2(this, "version");
1459
- __publicField$2(this, "name2FileDataID", /* @__PURE__ */ new Map());
1460
- __publicField$2(this, "keys", /* @__PURE__ */ new Map());
1461
- __publicField$2(this, "preload");
1462
- __publicField$2(this, "logLevel");
1535
+ __publicField$1(this, "region");
1536
+ __publicField$1(this, "product");
1537
+ __publicField$1(this, "version");
1538
+ __publicField$1(this, "name2FileDataID", /* @__PURE__ */ new Map());
1539
+ __publicField$1(this, "keys", /* @__PURE__ */ new Map());
1540
+ __publicField$1(this, "preload");
1541
+ __publicField$1(this, "logLevel");
1463
1542
  this.region = region;
1464
1543
  this.product = product;
1465
1544
  this.version = version;
@@ -1475,7 +1554,7 @@ class CASCClient {
1475
1554
  if (level <= 0 /* error */) {
1476
1555
  console.error(`${( new Date()).toISOString()} [${textLogLevel[level]}]:`, message);
1477
1556
  } else {
1478
- console.log(`${( new Date()).toISOString()} [${textLogLevel[level]}]:`, message);
1557
+ console.info(`${( new Date()).toISOString()} [${textLogLevel[level]}]:`, message);
1479
1558
  }
1480
1559
  }
1481
1560
  }
@@ -1554,7 +1633,7 @@ class CASCClient {
1554
1633
  this.log(2 /* info */, "Loading root table...");
1555
1634
  const rootCKey = buildConfig.root;
1556
1635
  const rootEKeys = encoding.cKey2EKey.get(rootCKey);
1557
- assert__default(rootEKeys, "Failing to find EKey for root table.");
1636
+ assert__default(rootEKeys !== void 0, "Failing to find EKey for root table.");
1558
1637
  const rootEKey = typeof rootEKeys === "string" ? rootEKeys : rootEKeys[0];
1559
1638
  const rootBuffer = await getDataFile(prefixes, rootEKey, "build", this.version.BuildConfig, {
1560
1639
  name: "root",
@@ -1608,8 +1687,8 @@ class CASCClient {
1608
1687
  const lookupRow = lookupReader.rows.get(keyID);
1609
1688
  const keyRow = keysReader.rows.get(keyID);
1610
1689
  if (keyRow) {
1611
- assert__default(Array.isArray(lookupRow) && lookupRow[0], `Invalid TACTKeyLookup table row at id ${keyID.toString()}`);
1612
- assert__default(Array.isArray(keyRow) && keyRow[0], `Invalid TACTKey table row at id ${keyID.toString()}`);
1690
+ assert__default(Array.isArray(lookupRow) && lookupRow.length > 0, `Invalid TACTKeyLookup table row at id ${keyID.toString()}`);
1691
+ assert__default(Array.isArray(keyRow) && keyRow.length > 0, `Invalid TACTKey table row at id ${keyID.toString()}`);
1613
1692
  const keyName = lookupRow[0].data.toString(16).padStart(16, "0");
1614
1693
  const keyHexLE = keyRow[0].data.toString(16).padStart(32, "0");
1615
1694
  assert__default(keyName.length === 16, `Invalid keyName length: ${keyName.length.toString()}`);
@@ -1661,7 +1740,7 @@ class CASCClient {
1661
1740
  assert__default(this.preload, "Client not initialized");
1662
1741
  const { prefixes, encoding, archives } = this.preload;
1663
1742
  const eKeys = encoding.cKey2EKey.get(cKey);
1664
- assert__default(eKeys, `Failing to find encoding key for ${cKey}`);
1743
+ assert__default(eKeys !== void 0, `Failing to find encoding key for ${cKey}`);
1665
1744
  const eKey = typeof eKeys === "string" ? eKeys : eKeys[0];
1666
1745
  const archive = archives.get(eKey);
1667
1746
  const blte = archive ? await getDataFile(prefixes, archive.key, "data", this.version.BuildConfig, {
@@ -1702,14 +1781,17 @@ class CASCClient {
1702
1781
  };
1703
1782
  }
1704
1783
  }
1705
- __publicField$2(CASCClient, "LocaleFlags", LocaleFlags);
1706
- __publicField$2(CASCClient, "ContentFlags", ContentFlags);
1707
- __publicField$2(CASCClient, "LogLevel", LogLevel);
1784
+ // eslint-disable-next-line @typescript-eslint/naming-convention
1785
+ __publicField$1(CASCClient, "LocaleFlags", LocaleFlags);
1786
+ // eslint-disable-next-line @typescript-eslint/naming-convention
1787
+ __publicField$1(CASCClient, "ContentFlags", ContentFlags);
1788
+ // eslint-disable-next-line @typescript-eslint/naming-convention
1789
+ __publicField$1(CASCClient, "LogLevel", LogLevel);
1708
1790
 
1709
- var __defProp$1 = Object.defineProperty;
1710
- var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1711
- var __publicField$1 = (obj, key, value) => {
1712
- __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
1791
+ var __defProp = Object.defineProperty;
1792
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
1793
+ var __publicField = (obj, key, value) => {
1794
+ __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
1713
1795
  return value;
1714
1796
  };
1715
1797
  const PATTERN_COLUMN = /^(int|float|locstring|string)(<[^:]+::[^>]+>)?\s([^\s]+)/;
@@ -1754,10 +1836,10 @@ const getCastBuffer = (value, srcSize, dstSize) => {
1754
1836
  };
1755
1837
  class DBDParser {
1756
1838
  constructor(wdc) {
1757
- __publicField$1(this, "wdc");
1758
- __publicField$1(this, "definitions", /* @__PURE__ */ new Map());
1759
- __publicField$1(this, "columns", []);
1760
- __publicField$1(this, "cache", /* @__PURE__ */ new Map());
1839
+ __publicField(this, "wdc");
1840
+ __publicField(this, "definitions", /* @__PURE__ */ new Map());
1841
+ __publicField(this, "columns", []);
1842
+ __publicField(this, "cache", /* @__PURE__ */ new Map());
1761
1843
  this.wdc = wdc;
1762
1844
  }
1763
1845
  async init() {
@@ -1765,7 +1847,7 @@ class DBDParser {
1765
1847
  const manifests = await (await fetch(manifestsURL)).json();
1766
1848
  const tableHashHex = this.wdc.tableHash.toString(16).padStart(8, "0").toLowerCase();
1767
1849
  const manifest = manifests.find((v) => v.tableHash.toLowerCase() === tableHashHex);
1768
- assert__default(manifest?.tableName, `No manifest found for table hash ${tableHashHex}`);
1850
+ assert__default(manifest?.tableName !== void 0, `No manifest found for table hash ${tableHashHex}`);
1769
1851
  const url = `https://raw.githubusercontent.com/wowdev/WoWDBDefs/master/definitions/${manifest.tableName}.dbd`;
1770
1852
  const text = await (await fetch(url)).text();
1771
1853
  const lines = text.split("\n").map((v) => v.trim());
@@ -1781,7 +1863,7 @@ class DBDParser {
1781
1863
  assert__default(columnsChunk?.[0] === "COLUMNS", "No column definitions found");
1782
1864
  columnsChunk.shift();
1783
1865
  columnsChunk.forEach((line) => {
1784
- const match = line.match(PATTERN_COLUMN);
1866
+ const match = PATTERN_COLUMN.exec(line);
1785
1867
  if (match) {
1786
1868
  const [, type, , name] = match;
1787
1869
  this.definitions.set(name.replace("?", ""), type);
@@ -1789,7 +1871,7 @@ class DBDParser {
1789
1871
  });
1790
1872
  const layoutHashHex = this.wdc.layoutHash.toString(16).padStart(8, "0").toLowerCase();
1791
1873
  const versionChunk = chunks.find((chunk) => chunk.find((line) => {
1792
- const layoutsMatch = line.match(PATTERN_LAYOUT);
1874
+ const layoutsMatch = PATTERN_LAYOUT.exec(line);
1793
1875
  const layouts = layoutsMatch?.[1].split(",").map((v) => v.trim().toLowerCase());
1794
1876
  return layouts?.includes(layoutHashHex);
1795
1877
  }));
@@ -1798,17 +1880,27 @@ class DBDParser {
1798
1880
  if (line.startsWith("LAYOUT") || line.startsWith("BUILD") || line.startsWith("COMMENT")) {
1799
1881
  return;
1800
1882
  }
1801
- const match = line.match(PATTERN_FIELD);
1883
+ const match = PATTERN_FIELD.exec(line);
1802
1884
  if (match) {
1803
- const [, , annotationsText, name, , unsigned, sizeText, , arraySizeText] = match;
1885
+ const [
1886
+ ,
1887
+ ,
1888
+ annotationsText,
1889
+ name,
1890
+ ,
1891
+ unsigned,
1892
+ sizeText,
1893
+ ,
1894
+ arraySizeText
1895
+ ] = match;
1804
1896
  const type = this.definitions.get(name);
1805
- assert__default(type, `No type found for column ${name}`);
1806
- const annotations = annotationsText ? annotationsText.split(",").map((v) => v.trim()) : void 0;
1897
+ assert__default(type !== void 0, `No type found for column ${name}`);
1898
+ const annotations = annotationsText ? annotationsText.split(",").map((v) => v.trim()) : [];
1807
1899
  const size = sizeText ? parseInt(sizeText, 10) : void 0;
1808
1900
  const arraySize = arraySizeText ? parseInt(arraySizeText, 10) : void 0;
1809
- const isID = !!annotations?.includes("id");
1810
- const isInline = !annotations?.includes("noninline");
1811
- const isRelation = !!annotations?.includes("relation");
1901
+ const isID = !!annotations.includes("id");
1902
+ const isInline = !annotations.includes("noninline");
1903
+ const isRelation = !!annotations.includes("relation");
1812
1904
  const isSigned = !unsigned;
1813
1905
  this.columns.push({
1814
1906
  name,
@@ -1849,18 +1941,18 @@ class DBDParser {
1849
1941
  fieldIndex += 1;
1850
1942
  }
1851
1943
  } else if (column.isInline) {
1944
+ assert__default(row.length > fieldIndex, `No value found for column ${column.name}`);
1852
1945
  const cell = row[fieldIndex];
1853
- assert__default(cell, `No value found for column ${column.name}`);
1854
1946
  const fieldInfo = this.wdc.fieldsInfo[fieldIndex];
1855
1947
  const srcSigned = fieldInfo.storageType === "bitpackedSigned";
1856
1948
  const srcSize = fieldInfo.storageType === "none" || fieldInfo.storageType === "bitpacked" || fieldInfo.storageType === "bitpackedSigned" ? Math.ceil(fieldInfo.fieldSizeBits / 8) : 4;
1857
- const dstSize = column.size ? Math.ceil(column.size / 8) : void 0;
1949
+ const dstSize = column.size !== void 0 ? Math.ceil(column.size / 8) : void 0;
1858
1950
  if (cell.type === "bitpackedArray") {
1859
1951
  data[column.name] = cell.data.map((v) => {
1860
1952
  if (column.type === "float") {
1861
1953
  return castFloat(v, srcSize, srcSigned);
1862
1954
  }
1863
- if (dstSize) {
1955
+ if (dstSize !== void 0) {
1864
1956
  return castIntegerBySize(
1865
1957
  v,
1866
1958
  srcSize,
@@ -1878,7 +1970,7 @@ class DBDParser {
1878
1970
  data[column.name] = cell.string;
1879
1971
  }
1880
1972
  } else if (column.type === "float") {
1881
- if (column.arraySize) {
1973
+ if (column.arraySize !== void 0) {
1882
1974
  const castBuffer = getCastBuffer(
1883
1975
  typeof cell.data === "number" ? BigInt(cell.data) : cell.data,
1884
1976
  srcSize,
@@ -1895,8 +1987,8 @@ class DBDParser {
1895
1987
  data[column.name] = castFloat(cell.data, srcSize, srcSigned);
1896
1988
  }
1897
1989
  } else if (column.type === "int") {
1898
- if (column.arraySize) {
1899
- assert__default(dstSize, `Missing size for int array column ${column.name}`);
1990
+ if (column.arraySize !== void 0) {
1991
+ assert__default(dstSize !== void 0, `Missing size for int array column ${column.name}`);
1900
1992
  const castBuffer = getCastBuffer(
1901
1993
  typeof cell.data === "number" ? BigInt(cell.data) : cell.data,
1902
1994
  srcSize,
@@ -1924,7 +2016,7 @@ class DBDParser {
1924
2016
  column.isSigned
1925
2017
  );
1926
2018
  } else {
1927
- assert__default(!column.size || column.size === 64, `Unexpected size ${column.size?.toString() ?? ""} for column ${column.name}`);
2019
+ assert__default(column.size === void 0 || column.size === 64, `Unexpected size ${column.size?.toString() ?? ""} for column ${column.name}`);
1928
2020
  if (srcSigned !== column.isSigned) {
1929
2021
  data[column.name] = castBigInt64(
1930
2022
  cell.data,
@@ -1978,7 +2070,7 @@ class DBDParser {
1978
2070
  if (fieldIndex + 1 < this.wdc.fields.length) {
1979
2071
  count = Math.max((nextField.position - currField.position) / size, 1);
1980
2072
  } else {
1981
- count = column.arraySize ? (buffer.byteLength - offset) / size : 1;
2073
+ count = column.arraySize !== void 0 ? (buffer.byteLength - offset) / size : 1;
1982
2074
  }
1983
2075
  for (let i = 0; i < count; i += 1) {
1984
2076
  if (column.type === "float") {
@@ -2010,57 +2102,6 @@ class DBDParser {
2010
2102
  }
2011
2103
  }
2012
2104
 
2013
- var __defProp = Object.defineProperty;
2014
- var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
2015
- var __publicField = (obj, key, value) => {
2016
- __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
2017
- return value;
2018
- };
2019
- const ADB_MAGIC = 1481004104;
2020
- class ADBReader {
2021
- constructor(buffer) {
2022
- __publicField(this, "build");
2023
- __publicField(this, "entries", []);
2024
- __publicField(this, "tableEntries", /* @__PURE__ */ new Map());
2025
- const magic = buffer.readUInt32BE(0);
2026
- assert__default(magic === ADB_MAGIC, `[ADB]: Invalid magic: ${magic.toString(16).padStart(8, "0")}`);
2027
- const version = buffer.readUInt32LE(4);
2028
- assert__default(version === 9, `[ADB]: Invalid version: ${version.toString()}`);
2029
- const build = buffer.readUInt32LE(8);
2030
- this.build = build;
2031
- let pointer = 44;
2032
- while (pointer < buffer.byteLength) {
2033
- const offset = pointer;
2034
- const entryMagic = buffer.readUInt32BE(offset);
2035
- assert__default(entryMagic === ADB_MAGIC, `[ADB]: Invalid entry magic: ${magic.toString(16).padStart(8, "0")}`);
2036
- const regionID = buffer.readInt32LE(offset + 4);
2037
- const pushID = buffer.readInt32LE(offset + 8);
2038
- const uniqueID = buffer.readUInt32LE(offset + 12);
2039
- const tableHash = buffer.readUInt32LE(offset + 16);
2040
- const recordID = buffer.readUInt32LE(offset + 20);
2041
- const dataSize = buffer.readUInt32LE(offset + 24);
2042
- const recordState = buffer.readUInt32LE(offset + 28);
2043
- const data = buffer.subarray(offset + 32, offset + 32 + dataSize);
2044
- const entry = {
2045
- regionID,
2046
- pushID,
2047
- uniqueID,
2048
- tableHash,
2049
- recordID,
2050
- dataSize,
2051
- recordState,
2052
- data
2053
- };
2054
- this.entries.push(entry);
2055
- if (!this.tableEntries.has(tableHash)) {
2056
- this.tableEntries.set(tableHash, []);
2057
- }
2058
- this.tableEntries.get(tableHash)?.push(entry);
2059
- pointer += 32 + dataSize;
2060
- }
2061
- }
2062
- }
2063
-
2064
2105
  exports.ADBReader = ADBReader;
2065
2106
  exports.CASCClient = CASCClient;
2066
2107
  exports.DBDParser = DBDParser;