@rhyster/wow-casc-dbc 2.6.19 → 2.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adb.d.ts +17 -0
- package/dist/adb.d.ts.map +1 -0
- package/dist/blte.d.ts +25 -0
- package/dist/blte.d.ts.map +1 -0
- package/dist/client.d.ts +84 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/dbd.d.ts +26 -0
- package/dist/dbd.d.ts.map +1 -0
- package/dist/fetcher.d.ts +21 -0
- package/dist/fetcher.d.ts.map +1 -0
- package/dist/index.cjs +1 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +9 -261
- package/dist/index.d.ts.map +1 -0
- package/dist/index.mjs +1 -0
- package/dist/index.mjs.map +1 -0
- package/dist/jenkins96.d.ts +3 -0
- package/dist/jenkins96.d.ts.map +1 -0
- package/dist/parsers/archiveIndex.d.ts +9 -0
- package/dist/parsers/archiveIndex.d.ts.map +1 -0
- package/dist/parsers/config.d.ts +40 -0
- package/dist/parsers/config.d.ts.map +1 -0
- package/dist/parsers/encodingFile.d.ts +11 -0
- package/dist/parsers/encodingFile.d.ts.map +1 -0
- package/dist/parsers/productConfig.d.ts +21 -0
- package/dist/parsers/productConfig.d.ts.map +1 -0
- package/dist/parsers/rootFile.d.ts +45 -0
- package/dist/parsers/rootFile.d.ts.map +1 -0
- package/dist/salsa20.d.ts +14 -0
- package/dist/salsa20.d.ts.map +1 -0
- package/dist/store.d.ts +9 -0
- package/dist/store.d.ts.map +1 -0
- package/dist/test/salsa20.test.d.ts +2 -0
- package/dist/test/salsa20.test.d.ts.map +1 -0
- package/dist/utils.d.ts +3 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/wdc.d.ts +104 -0
- package/dist/wdc.d.ts.map +1 -0
- package/package.json +4 -3
- package/src/adb.ts +70 -0
- package/src/blte.ts +220 -0
- package/src/client.ts +411 -0
- package/src/dbd.ts +427 -0
- package/src/fetcher.ts +223 -0
- package/src/index.ts +44 -0
- package/src/jenkins96.ts +75 -0
- package/src/parsers/archiveIndex.ts +119 -0
- package/src/parsers/config.ts +75 -0
- package/src/parsers/encodingFile.ts +159 -0
- package/src/parsers/productConfig.ts +57 -0
- package/src/parsers/rootFile.ts +172 -0
- package/src/salsa20.ts +143 -0
- package/src/store.ts +37 -0
- package/src/test/salsa20.test.ts +522 -0
- package/src/utils.ts +77 -0
- package/src/wdc.ts +788 -0
package/src/jenkins96.ts
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/* eslint-disable no-bitwise */
|
|
2
|
+
|
|
3
|
+
const hashlittle2 = (key: string, pc = 0, pb = 0): [number, number] => {
|
|
4
|
+
const { length } = key;
|
|
5
|
+
let offset = 0;
|
|
6
|
+
|
|
7
|
+
let a = 0xdeadbeef + length + pc | 0;
|
|
8
|
+
let b = 0xdeadbeef + length + pc | 0;
|
|
9
|
+
let c = 0xdeadbeef + length + pc + pb | 0;
|
|
10
|
+
|
|
11
|
+
while (length - offset > 12) {
|
|
12
|
+
a += key.charCodeAt(offset + 0);
|
|
13
|
+
a += key.charCodeAt(offset + 1) << 8;
|
|
14
|
+
a += key.charCodeAt(offset + 2) << 16;
|
|
15
|
+
a += key.charCodeAt(offset + 3) << 24;
|
|
16
|
+
|
|
17
|
+
b += key.charCodeAt(offset + 4);
|
|
18
|
+
b += key.charCodeAt(offset + 5) << 8;
|
|
19
|
+
b += key.charCodeAt(offset + 6) << 16;
|
|
20
|
+
b += key.charCodeAt(offset + 7) << 24;
|
|
21
|
+
|
|
22
|
+
c += key.charCodeAt(offset + 8);
|
|
23
|
+
c += key.charCodeAt(offset + 9) << 8;
|
|
24
|
+
c += key.charCodeAt(offset + 10) << 16;
|
|
25
|
+
c += key.charCodeAt(offset + 11) << 24;
|
|
26
|
+
|
|
27
|
+
// mix(a, b, c);
|
|
28
|
+
a -= c; a ^= (c << 4) | (c >>> 28); c = c + b | 0;
|
|
29
|
+
b -= a; b ^= (a << 6) | (a >>> 26); a = a + c | 0;
|
|
30
|
+
c -= b; c ^= (b << 8) | (b >>> 24); b = b + a | 0;
|
|
31
|
+
a -= c; a ^= (c << 16) | (c >>> 16); c = c + b | 0;
|
|
32
|
+
b -= a; b ^= (a << 19) | (a >>> 13); a = a + c | 0;
|
|
33
|
+
c -= b; c ^= (b << 4) | (b >>> 28); b = b + a | 0;
|
|
34
|
+
|
|
35
|
+
offset += 12;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (length - offset > 0) {
|
|
39
|
+
// zero length strings require no mixing
|
|
40
|
+
// eslint-disable-next-line default-case
|
|
41
|
+
switch (length - offset) {
|
|
42
|
+
case 12: c += key.charCodeAt(offset + 11) << 24; // falls through
|
|
43
|
+
case 11: c += key.charCodeAt(offset + 10) << 16; // falls through
|
|
44
|
+
case 10: c += key.charCodeAt(offset + 9) << 8; // falls through
|
|
45
|
+
case 9: c += key.charCodeAt(offset + 8); // falls through
|
|
46
|
+
case 8: b += key.charCodeAt(offset + 7) << 24; // falls through
|
|
47
|
+
case 7: b += key.charCodeAt(offset + 6) << 16; // falls through
|
|
48
|
+
case 6: b += key.charCodeAt(offset + 5) << 8; // falls through
|
|
49
|
+
case 5: b += key.charCodeAt(offset + 4); // falls through
|
|
50
|
+
case 4: a += key.charCodeAt(offset + 3) << 24; // falls through
|
|
51
|
+
case 3: a += key.charCodeAt(offset + 2) << 16; // falls through
|
|
52
|
+
case 2: a += key.charCodeAt(offset + 1) << 8; // falls through
|
|
53
|
+
case 1: a += key.charCodeAt(offset + 0);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// final(a, b, c);
|
|
57
|
+
c ^= b; c -= (b << 14) | (b >>> 18);
|
|
58
|
+
a ^= c; a -= (c << 11) | (c >>> 21);
|
|
59
|
+
b ^= a; b -= (a << 25) | (a >>> 7);
|
|
60
|
+
c ^= b; c -= (b << 16) | (b >>> 16);
|
|
61
|
+
a ^= c; a -= (c << 4) | (c >>> 28);
|
|
62
|
+
b ^= a; b -= (a << 14) | (a >>> 18);
|
|
63
|
+
c ^= b; c -= (b << 24) | (b >>> 8);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
return [c >>> 0, b >>> 0];
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
const getNameHash = (name: string): string => {
|
|
70
|
+
const normalized = name.replace(/\//g, '\\').toUpperCase();
|
|
71
|
+
const [pc, pb] = hashlittle2(normalized);
|
|
72
|
+
return `${pc.toString(16).padStart(8, '0')}${pb.toString(16).padStart(8, '0')}`;
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
export default getNameHash;
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import assert from 'node:assert';
|
|
2
|
+
import crypto from 'node:crypto';
|
|
3
|
+
|
|
4
|
+
const VERSION_SUB_OFFSET = -12;
|
|
5
|
+
const CHECKSUM_SIZE_SUB_OFFSET = -5;
|
|
6
|
+
|
|
7
|
+
const BLOCK_SIZE_OFFSET = 3;
|
|
8
|
+
const OFFSET_BYTES_OFFSET = 4;
|
|
9
|
+
const SIZE_BYTES_OFFSET = 5;
|
|
10
|
+
const KEY_SIZE_OFFSET = 6;
|
|
11
|
+
// const CHECKSUM_SIZE_OFFSET = 7;
|
|
12
|
+
const NUM_ELEMENTS_OFFSET = 8;
|
|
13
|
+
const CHECKSUM_OFFSET = 12;
|
|
14
|
+
|
|
15
|
+
const CHECKSUM_TRIES = [
|
|
16
|
+
10,
|
|
17
|
+
9,
|
|
18
|
+
8,
|
|
19
|
+
7,
|
|
20
|
+
6,
|
|
21
|
+
5,
|
|
22
|
+
4,
|
|
23
|
+
3,
|
|
24
|
+
2,
|
|
25
|
+
1,
|
|
26
|
+
0,
|
|
27
|
+
];
|
|
28
|
+
|
|
29
|
+
interface ArchiveIndex {
|
|
30
|
+
key: string,
|
|
31
|
+
size: number,
|
|
32
|
+
offset: number,
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
const tryArchiveIndexChecksumSize = (buffer: Buffer, cKey: string): number => {
|
|
36
|
+
const res = CHECKSUM_TRIES.filter(
|
|
37
|
+
(index) => (
|
|
38
|
+
buffer.readUInt8(buffer.byteLength - index + CHECKSUM_SIZE_SUB_OFFSET) === index
|
|
39
|
+
&& buffer.readUInt8(buffer.byteLength - index + VERSION_SUB_OFFSET) === 1
|
|
40
|
+
),
|
|
41
|
+
);
|
|
42
|
+
|
|
43
|
+
if (res.length === 1) {
|
|
44
|
+
return res[0];
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
throw new Error(`Invalid checksum size: ${res.join(', ')} in ${cKey}`);
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
const parseArchiveIndex = (buffer: Buffer, cKey: string): Map<string, ArchiveIndex> => {
|
|
51
|
+
const checksumSize = tryArchiveIndexChecksumSize(buffer, cKey);
|
|
52
|
+
|
|
53
|
+
const versionOffset = buffer.byteLength - checksumSize + VERSION_SUB_OFFSET;
|
|
54
|
+
const footerOffset = versionOffset - checksumSize;
|
|
55
|
+
|
|
56
|
+
const tocChecksum = buffer.toString('hex', footerOffset, versionOffset);
|
|
57
|
+
const version = buffer.readUInt8(versionOffset);
|
|
58
|
+
const blockSizeKB = buffer.readUInt8(versionOffset + BLOCK_SIZE_OFFSET);
|
|
59
|
+
const offsetBytes = buffer.readUInt8(versionOffset + OFFSET_BYTES_OFFSET);
|
|
60
|
+
const sizeBytes = buffer.readUInt8(versionOffset + SIZE_BYTES_OFFSET);
|
|
61
|
+
const keySize = buffer.readUInt8(versionOffset + KEY_SIZE_OFFSET);
|
|
62
|
+
const numElements = buffer.readUInt32LE(versionOffset + NUM_ELEMENTS_OFFSET);
|
|
63
|
+
const footerChecksum = buffer.toString('hex', versionOffset + CHECKSUM_OFFSET);
|
|
64
|
+
|
|
65
|
+
assert(version === 1, `Invalid version: ${version.toString()} in ${cKey}`);
|
|
66
|
+
|
|
67
|
+
const entrySize = keySize + offsetBytes + sizeBytes;
|
|
68
|
+
const blockSize = blockSizeKB * 1024;
|
|
69
|
+
const numBlocks = footerOffset / (blockSize + keySize + checksumSize);
|
|
70
|
+
const tocSize = (keySize + checksumSize) * numBlocks;
|
|
71
|
+
const toc = buffer.subarray(footerOffset - tocSize, footerOffset);
|
|
72
|
+
const footer = buffer.subarray(footerOffset);
|
|
73
|
+
const footerCheckBuffer = Buffer.concat([
|
|
74
|
+
buffer.subarray(versionOffset, buffer.byteLength - checksumSize),
|
|
75
|
+
Buffer.alloc(checksumSize),
|
|
76
|
+
]);
|
|
77
|
+
|
|
78
|
+
const hash = crypto.createHash('md5').update(footer).digest('hex');
|
|
79
|
+
assert(hash === cKey, `Invalid footer hash in ${cKey}: expected ${cKey}, got ${hash}`);
|
|
80
|
+
|
|
81
|
+
const footerHash = crypto.createHash('md5').update(footerCheckBuffer).digest('hex').slice(0, checksumSize * 2);
|
|
82
|
+
assert(footerHash === footerChecksum, `Invalid footer checksum in ${cKey}: expected ${footerChecksum}, got ${footerHash}`);
|
|
83
|
+
|
|
84
|
+
const tocHash = crypto.createHash('md5').update(toc).digest('hex').slice(0, checksumSize * 2);
|
|
85
|
+
assert(tocHash === tocChecksum, `Invalid toc checksum in ${cKey}: expected ${tocChecksum}, got ${tocHash}`);
|
|
86
|
+
|
|
87
|
+
const result = new Map<string, ArchiveIndex>();
|
|
88
|
+
for (let i = 0; i < numBlocks; i += 1) {
|
|
89
|
+
const lastEkey = toc.toString('hex', i * keySize, (i + 1) * keySize);
|
|
90
|
+
const blockChecksum = toc.toString('hex', numBlocks * keySize + i * checksumSize, numBlocks * keySize + (i + 1) * checksumSize);
|
|
91
|
+
const blockOffset = i * blockSize;
|
|
92
|
+
|
|
93
|
+
const blockHash = crypto.createHash('md5').update(buffer.subarray(i * blockSize, (i + 1) * blockSize)).digest('hex').slice(0, checksumSize * 2);
|
|
94
|
+
assert(blockChecksum === blockHash, `Invalid block hash in ${cKey} at ${i.toString()}: expected ${blockChecksum}, got ${blockHash}`);
|
|
95
|
+
|
|
96
|
+
let length = 0;
|
|
97
|
+
while (length < blockSize) {
|
|
98
|
+
const entryOffset = blockOffset + length * entrySize;
|
|
99
|
+
const eKey = buffer.toString('hex', entryOffset, entryOffset + keySize);
|
|
100
|
+
const size = buffer.readUIntBE(entryOffset + keySize, sizeBytes);
|
|
101
|
+
const offset = buffer.readUIntBE(entryOffset + keySize + sizeBytes, offsetBytes);
|
|
102
|
+
|
|
103
|
+
result.set(eKey, { key: cKey, size, offset });
|
|
104
|
+
length += 1;
|
|
105
|
+
|
|
106
|
+
if (eKey === lastEkey) {
|
|
107
|
+
break;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
assert(result.size === numElements, `Invalid number of elements: ${result.size.toString()} != ${numElements.toString()} in ${cKey}`);
|
|
113
|
+
|
|
114
|
+
return result;
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
export default parseArchiveIndex;
|
|
118
|
+
|
|
119
|
+
export type { ArchiveIndex };
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import assert from 'node:assert';
|
|
2
|
+
|
|
3
|
+
const normalizeKey = (key: string): string => key
|
|
4
|
+
.split('-')
|
|
5
|
+
.map((part, index) => (
|
|
6
|
+
index === 0
|
|
7
|
+
? part
|
|
8
|
+
: `${part.charAt(0).toUpperCase()}${part.slice(1)}`
|
|
9
|
+
))
|
|
10
|
+
.join('');
|
|
11
|
+
|
|
12
|
+
const parseConfig = (text: string): Record<string, string> => {
|
|
13
|
+
const entries: Record<string, string> = {};
|
|
14
|
+
|
|
15
|
+
text
|
|
16
|
+
.split(/\r?\n/)
|
|
17
|
+
.filter((line) => line.trim().length !== 0 && !line.startsWith('#'))
|
|
18
|
+
.forEach((line) => {
|
|
19
|
+
const match = /([^\s]+)\s?=\s?(.*)/.exec(line);
|
|
20
|
+
assert(match !== null, 'Invalid token encountered parsing CDN config');
|
|
21
|
+
|
|
22
|
+
const [key, value] = match.slice(1);
|
|
23
|
+
entries[normalizeKey(key)] = value;
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
return entries;
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
interface CDNConfig {
|
|
30
|
+
archives: string,
|
|
31
|
+
archivesIndexSize: string,
|
|
32
|
+
archiveGroup: string,
|
|
33
|
+
patchArchives: string,
|
|
34
|
+
patchArchivesIndexSize: string,
|
|
35
|
+
patchArchiveGroup: string,
|
|
36
|
+
fileIndex: string,
|
|
37
|
+
fileIndexSize: string,
|
|
38
|
+
patchFileIndex: string,
|
|
39
|
+
patchFileIndexSize: string,
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const parseCDNConfig = (
|
|
43
|
+
text: string,
|
|
44
|
+
): CDNConfig => parseConfig(text) as unknown as CDNConfig;
|
|
45
|
+
|
|
46
|
+
interface BuildConfig {
|
|
47
|
+
root: string,
|
|
48
|
+
install: string,
|
|
49
|
+
installSize: string,
|
|
50
|
+
download: string,
|
|
51
|
+
downloadSize: string,
|
|
52
|
+
size: string,
|
|
53
|
+
sizeSize: string,
|
|
54
|
+
encoding: string,
|
|
55
|
+
encodingSize: string,
|
|
56
|
+
patchIndex: string,
|
|
57
|
+
patchIndexSize: string,
|
|
58
|
+
patch: string,
|
|
59
|
+
patchSize: string,
|
|
60
|
+
patchConfig: string,
|
|
61
|
+
buildName: string,
|
|
62
|
+
buildUid: string,
|
|
63
|
+
buildProduct: string,
|
|
64
|
+
buildPlaybuildInstaller: string,
|
|
65
|
+
buildPartialPriority: string,
|
|
66
|
+
vfsRoot: string,
|
|
67
|
+
vfsRootSize: string,
|
|
68
|
+
[key: `vfs${number}` | `vfs${number}Size`]: string,
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const parseBuildConfig = (
|
|
72
|
+
text: string,
|
|
73
|
+
): BuildConfig => parseConfig(text) as unknown as BuildConfig;
|
|
74
|
+
|
|
75
|
+
export { parseCDNConfig, parseBuildConfig };
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
import assert from 'node:assert';
|
|
2
|
+
import crypto from 'node:crypto';
|
|
3
|
+
|
|
4
|
+
import BLTEReader from '../blte.ts';
|
|
5
|
+
|
|
6
|
+
const ENC_MAGIC = 0x454e;
|
|
7
|
+
|
|
8
|
+
const MAGIC_OFFSET = 0;
|
|
9
|
+
const VERSION_OFFSET = 2;
|
|
10
|
+
const HASH_SIZE_CKEY_OFFSET = 3;
|
|
11
|
+
const HASH_SIZE_EKEY_OFFSET = 4;
|
|
12
|
+
const CKEY_PAGE_SIZE_OFFSET = 5;
|
|
13
|
+
const EKEY_PAGE_SIZE_OFFSET = 7;
|
|
14
|
+
const CKEY_PAGE_COUNT_OFFSET = 9;
|
|
15
|
+
const EKEY_PAGE_COUNT_OFFSET = 13;
|
|
16
|
+
// const UNK11_OFFSET = 17;
|
|
17
|
+
const SPEC_BLOCK_SIZE_OFFSET = 18;
|
|
18
|
+
const SPEC_BLOCK_OFFSET = 22;
|
|
19
|
+
|
|
20
|
+
interface EncodingData {
|
|
21
|
+
eSpec: string[],
|
|
22
|
+
cKey2FileSize: Map<string, number>,
|
|
23
|
+
cKey2EKey: Map<string, string | string[]>,
|
|
24
|
+
eKey2ESpecIndex: Map<string, number>,
|
|
25
|
+
eKey2FileSize: Map<string, number>,
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const parseEncodingFile = (inputBuffer: Buffer, eKey: string, cKey: string): EncodingData => {
|
|
29
|
+
const reader = new BLTEReader(inputBuffer, eKey);
|
|
30
|
+
reader.processBytes();
|
|
31
|
+
|
|
32
|
+
const { buffer } = reader;
|
|
33
|
+
|
|
34
|
+
const encodingHash = crypto.createHash('md5').update(buffer).digest('hex');
|
|
35
|
+
assert(encodingHash === cKey, `Invalid encoding hash: expected ${cKey}, got ${encodingHash}`);
|
|
36
|
+
|
|
37
|
+
const magic = buffer.readUInt16BE(MAGIC_OFFSET);
|
|
38
|
+
assert(magic === ENC_MAGIC, `Invalid encoding magic: ${magic.toString(16).padStart(4, '0')}`);
|
|
39
|
+
|
|
40
|
+
const version = buffer.readUInt8(VERSION_OFFSET);
|
|
41
|
+
const hashSizeCKey = buffer.readUInt8(HASH_SIZE_CKEY_OFFSET);
|
|
42
|
+
const hashSizeEKey = buffer.readUInt8(HASH_SIZE_EKEY_OFFSET);
|
|
43
|
+
const cKeyPageSizeKB = buffer.readUInt16BE(CKEY_PAGE_SIZE_OFFSET);
|
|
44
|
+
const eKeyPageSizeKB = buffer.readUInt16BE(EKEY_PAGE_SIZE_OFFSET);
|
|
45
|
+
const cKeyPageCount = buffer.readUInt32BE(CKEY_PAGE_COUNT_OFFSET);
|
|
46
|
+
const eKeyPageCount = buffer.readUInt32BE(EKEY_PAGE_COUNT_OFFSET);
|
|
47
|
+
const specBlockSize = buffer.readUInt32BE(SPEC_BLOCK_SIZE_OFFSET);
|
|
48
|
+
|
|
49
|
+
assert(version === 1, `Invalid encoding version: ${version.toString()}`);
|
|
50
|
+
|
|
51
|
+
const eSpec: string[] = [];
|
|
52
|
+
let eSpecStringStart = SPEC_BLOCK_OFFSET;
|
|
53
|
+
for (
|
|
54
|
+
let i = SPEC_BLOCK_OFFSET;
|
|
55
|
+
i < SPEC_BLOCK_OFFSET + specBlockSize;
|
|
56
|
+
i += 1
|
|
57
|
+
) {
|
|
58
|
+
if (buffer[i] === 0x00) {
|
|
59
|
+
eSpec.push(buffer.toString('ascii', eSpecStringStart, i));
|
|
60
|
+
eSpecStringStart = i + 1;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const cKey2FileSize = new Map<string, number>();
|
|
65
|
+
const cKey2EKey = new Map<string, string | string[]>();
|
|
66
|
+
const cKeyPageIndexOffset = SPEC_BLOCK_OFFSET + specBlockSize;
|
|
67
|
+
const cKeyPageIndexEntrySize = hashSizeCKey + 0x10;
|
|
68
|
+
const cKeyPageOffset = cKeyPageIndexOffset + cKeyPageIndexEntrySize * cKeyPageCount;
|
|
69
|
+
const cKeyPageSize = cKeyPageSizeKB * 1024;
|
|
70
|
+
for (let i = 0; i < cKeyPageCount; i += 1) {
|
|
71
|
+
const indexOffset = cKeyPageIndexOffset + i * cKeyPageIndexEntrySize;
|
|
72
|
+
const pageOffset = cKeyPageOffset + i * cKeyPageSize;
|
|
73
|
+
|
|
74
|
+
const firstCKey = buffer.toString('hex', indexOffset, indexOffset + hashSizeCKey);
|
|
75
|
+
const pageChecksum = buffer.toString('hex', indexOffset + hashSizeCKey, indexOffset + hashSizeCKey + 0x10);
|
|
76
|
+
|
|
77
|
+
const pageBuffer = buffer.subarray(pageOffset, pageOffset + cKeyPageSize);
|
|
78
|
+
const pageHash = crypto.createHash('md5').update(pageBuffer).digest('hex');
|
|
79
|
+
assert(pageHash === pageChecksum, `Invalid ckey page ${i.toString()} checksum: expected ${pageChecksum}, got ${pageHash}`);
|
|
80
|
+
|
|
81
|
+
const pageFirstCKey = pageBuffer.toString('hex', 6, 6 + hashSizeCKey);
|
|
82
|
+
assert(pageFirstCKey === firstCKey, `Invalid ckey page ${i.toString()} first ckey: expected ${firstCKey}, got ${pageFirstCKey}`);
|
|
83
|
+
|
|
84
|
+
let pagePointer = 0;
|
|
85
|
+
while (pagePointer < cKeyPageSize) {
|
|
86
|
+
const keyCount = pageBuffer.readUInt8(pagePointer);
|
|
87
|
+
pagePointer += 1;
|
|
88
|
+
if (keyCount === 0x00) {
|
|
89
|
+
break;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const fileSize = pageBuffer.readUIntBE(pagePointer, 5);
|
|
93
|
+
pagePointer += 5;
|
|
94
|
+
|
|
95
|
+
const fileCKey = pageBuffer.toString('hex', pagePointer, pagePointer + hashSizeCKey);
|
|
96
|
+
pagePointer += hashSizeCKey;
|
|
97
|
+
|
|
98
|
+
cKey2FileSize.set(fileCKey, fileSize);
|
|
99
|
+
|
|
100
|
+
if (keyCount === 1) {
|
|
101
|
+
const fileEKey = pageBuffer.toString('hex', pagePointer, pagePointer + hashSizeEKey);
|
|
102
|
+
cKey2EKey.set(fileCKey, fileEKey);
|
|
103
|
+
pagePointer += hashSizeEKey;
|
|
104
|
+
} else {
|
|
105
|
+
const fileEKeys: string[] = [];
|
|
106
|
+
for (let j = 0; j < keyCount; j += 1) {
|
|
107
|
+
const fileEKey = pageBuffer.toString('hex', pagePointer, pagePointer + hashSizeEKey);
|
|
108
|
+
fileEKeys.push(fileEKey);
|
|
109
|
+
pagePointer += hashSizeEKey;
|
|
110
|
+
}
|
|
111
|
+
cKey2EKey.set(fileCKey, fileEKeys);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
const eKey2ESpecIndex = new Map<string, number>();
|
|
117
|
+
const eKey2FileSize = new Map<string, number>();
|
|
118
|
+
const eKeyPageIndexOffset = cKeyPageOffset + cKeyPageSize * cKeyPageCount;
|
|
119
|
+
const eKeyPageIndexEntrySize = hashSizeEKey + 0x10;
|
|
120
|
+
const eKeyPageOffset = eKeyPageIndexOffset + eKeyPageIndexEntrySize * eKeyPageCount;
|
|
121
|
+
const eKeyPageSize = eKeyPageSizeKB * 1024;
|
|
122
|
+
const eKeyPageEntrySize = hashSizeEKey + 0x04 + 0x05;
|
|
123
|
+
for (let i = 0; i < eKeyPageCount; i += 1) {
|
|
124
|
+
const indexOffset = eKeyPageIndexOffset + i * eKeyPageIndexEntrySize;
|
|
125
|
+
const pageOffset = eKeyPageOffset + i * eKeyPageSize;
|
|
126
|
+
|
|
127
|
+
const firstEKey = buffer.toString('hex', indexOffset, indexOffset + hashSizeEKey);
|
|
128
|
+
const pageChecksum = buffer.toString('hex', indexOffset + hashSizeEKey, indexOffset + hashSizeEKey + 0x10);
|
|
129
|
+
|
|
130
|
+
const pageBuffer = buffer.subarray(pageOffset, pageOffset + eKeyPageSize);
|
|
131
|
+
const pageHash = crypto.createHash('md5').update(pageBuffer).digest('hex');
|
|
132
|
+
assert(pageHash === pageChecksum, `Invalid ekey page ${i.toString()} checksum: expected ${pageChecksum}, got ${pageHash}`);
|
|
133
|
+
|
|
134
|
+
const pageFirstEKey = pageBuffer.toString('hex', 0, hashSizeEKey);
|
|
135
|
+
assert(pageFirstEKey === firstEKey, `Invalid ekey page ${i.toString()} first ekey: expected ${firstEKey}, got ${pageFirstEKey}`);
|
|
136
|
+
|
|
137
|
+
let pagePointer = 0;
|
|
138
|
+
while (pagePointer + eKeyPageEntrySize <= eKeyPageSize) {
|
|
139
|
+
const fileEKey = pageBuffer.toString('hex', pagePointer, pagePointer + hashSizeEKey);
|
|
140
|
+
pagePointer += hashSizeEKey;
|
|
141
|
+
|
|
142
|
+
const eSpecIndex = pageBuffer.readUInt32BE(pagePointer);
|
|
143
|
+
pagePointer += 4;
|
|
144
|
+
eKey2ESpecIndex.set(fileEKey, eSpecIndex);
|
|
145
|
+
|
|
146
|
+
const fileSize = pageBuffer.readUIntBE(pagePointer, 5);
|
|
147
|
+
pagePointer += 5;
|
|
148
|
+
eKey2FileSize.set(fileEKey, fileSize);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
return {
|
|
153
|
+
eSpec, cKey2FileSize, cKey2EKey, eKey2ESpecIndex, eKey2FileSize,
|
|
154
|
+
};
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
export default parseEncodingFile;
|
|
158
|
+
|
|
159
|
+
export type { EncodingData };
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/naming-convention */
|
|
2
|
+
|
|
3
|
+
const parseProductConfig = (text: string): Record<string, string>[] => {
|
|
4
|
+
const lines = text.split(/\r?\n/);
|
|
5
|
+
|
|
6
|
+
// First line contains field definitions.
|
|
7
|
+
// Example: Name!STRING:0|Path!STRING:0|Hosts!STRING:0|Servers!STRING:0|ConfigPath!STRING:0
|
|
8
|
+
// Whitespace is replaced so that a field like 'Install Key' becomes 'InstallKey'.
|
|
9
|
+
// This just improves coding readability when accessing the fields later on.
|
|
10
|
+
const headers = lines[0]
|
|
11
|
+
.split('|')
|
|
12
|
+
.map((header) => header.split('!')[0].replace(' ', ''));
|
|
13
|
+
|
|
14
|
+
const entries = lines
|
|
15
|
+
.filter((line, index) => index > 0 && line.trim().length !== 0 && !line.startsWith('#'))
|
|
16
|
+
.map((line) => {
|
|
17
|
+
const node: Record<string, string> = {};
|
|
18
|
+
const entryFields = line.split('|');
|
|
19
|
+
for (let i = 0, n = entryFields.length; i < n; i += 1) {
|
|
20
|
+
node[headers[i]] = entryFields[i];
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return node;
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
return entries;
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
interface Version {
|
|
30
|
+
Region: string,
|
|
31
|
+
BuildConfig: string,
|
|
32
|
+
CDNConfig: string,
|
|
33
|
+
KeyRing: string,
|
|
34
|
+
BuildId: string,
|
|
35
|
+
VersionsName: string,
|
|
36
|
+
ProductConfig: string,
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const parseProductVersions = (
|
|
40
|
+
text: string,
|
|
41
|
+
): Version[] => parseProductConfig(text) as unknown as Version[];
|
|
42
|
+
|
|
43
|
+
interface CDN {
|
|
44
|
+
Name: string,
|
|
45
|
+
Path: string,
|
|
46
|
+
Hosts: string,
|
|
47
|
+
Servers: string,
|
|
48
|
+
ConfigPath: string,
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const parseProductCDNs = (
|
|
52
|
+
text: string,
|
|
53
|
+
): CDN[] => parseProductConfig(text) as unknown as CDN[];
|
|
54
|
+
|
|
55
|
+
export { parseProductVersions, parseProductCDNs };
|
|
56
|
+
|
|
57
|
+
export type { Version };
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/naming-convention */
|
|
2
|
+
|
|
3
|
+
import assert from 'node:assert';
|
|
4
|
+
import crypto from 'node:crypto';
|
|
5
|
+
|
|
6
|
+
import BLTEReader from '../blte.ts';
|
|
7
|
+
|
|
8
|
+
const MFST_MAGIC = 0x4d465354;
|
|
9
|
+
|
|
10
|
+
const ContentFlags = {
|
|
11
|
+
Install: 0x4,
|
|
12
|
+
LoadOnWindows: 0x8,
|
|
13
|
+
LoadOnMacOS: 0x10,
|
|
14
|
+
x86_32: 0x20,
|
|
15
|
+
x86_64: 0x40,
|
|
16
|
+
LowViolence: 0x80,
|
|
17
|
+
DoNotLoad: 0x100,
|
|
18
|
+
UpdatePlugin: 0x800,
|
|
19
|
+
ARM64: 0x8000,
|
|
20
|
+
Encrypted: 0x8000000,
|
|
21
|
+
NoNameHash: 0x10000000,
|
|
22
|
+
UncommonResolution: 0x20000000,
|
|
23
|
+
Bundle: 0x40000000,
|
|
24
|
+
NoCompression: 0x80000000,
|
|
25
|
+
} as const;
|
|
26
|
+
|
|
27
|
+
const LocaleFlags = {
|
|
28
|
+
enUS: 0x2,
|
|
29
|
+
koKR: 0x4,
|
|
30
|
+
frFR: 0x10,
|
|
31
|
+
deDE: 0x20,
|
|
32
|
+
zhCN: 0x40,
|
|
33
|
+
esES: 0x80,
|
|
34
|
+
zhTW: 0x100,
|
|
35
|
+
enGB: 0x200,
|
|
36
|
+
// enCN: 0x400,
|
|
37
|
+
// enTW: 0x800,
|
|
38
|
+
esMX: 0x1000,
|
|
39
|
+
ruRU: 0x2000,
|
|
40
|
+
ptBR: 0x4000,
|
|
41
|
+
itIT: 0x8000,
|
|
42
|
+
ptPT: 0x10000,
|
|
43
|
+
} as const;
|
|
44
|
+
|
|
45
|
+
interface FileInfo {
|
|
46
|
+
cKey: string,
|
|
47
|
+
contentFlags: number,
|
|
48
|
+
localeFlags: number,
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
interface RootData {
|
|
52
|
+
fileDataID2CKey: Map<number, FileInfo[]>,
|
|
53
|
+
nameHash2FileDataID: Map<string, number>,
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const parseRootFile = (inputBuffer: Buffer, eKey: string, cKey: string): RootData => {
|
|
57
|
+
const reader = new BLTEReader(inputBuffer, eKey);
|
|
58
|
+
reader.processBytes();
|
|
59
|
+
|
|
60
|
+
const { buffer } = reader;
|
|
61
|
+
|
|
62
|
+
const rootHash = crypto.createHash('md5').update(buffer).digest('hex');
|
|
63
|
+
assert(rootHash === cKey, `Invalid root hash: expected ${cKey}, got ${rootHash}`);
|
|
64
|
+
|
|
65
|
+
const fileDataID2CKey = new Map<number, FileInfo[]>();
|
|
66
|
+
const nameHash2FileDataID = new Map<string, number>();
|
|
67
|
+
|
|
68
|
+
const magic = buffer.readUInt32LE(0);
|
|
69
|
+
if (magic === MFST_MAGIC) {
|
|
70
|
+
// post 8.2.0
|
|
71
|
+
const firstEntry = buffer.readUInt32LE(4);
|
|
72
|
+
const newFormat = firstEntry < 100; // post 10.1.7
|
|
73
|
+
|
|
74
|
+
const headerSize = newFormat ? firstEntry : 12;
|
|
75
|
+
// const version = newFormat ? buffer.readUInt32LE(8) : 0;
|
|
76
|
+
const totalFileCount = newFormat ? buffer.readUInt32LE(12) : firstEntry;
|
|
77
|
+
const namedFileCount = newFormat ? buffer.readUInt32LE(16) : buffer.readUInt32LE(8);
|
|
78
|
+
|
|
79
|
+
const allowNonNamedFiles = totalFileCount !== namedFileCount;
|
|
80
|
+
|
|
81
|
+
let pointer = headerSize;
|
|
82
|
+
while (pointer < buffer.byteLength) {
|
|
83
|
+
const numRecords = buffer.readUInt32LE(pointer);
|
|
84
|
+
const contentFlags = buffer.readUInt32LE(pointer + 4);
|
|
85
|
+
const localeFlags = buffer.readUInt32LE(pointer + 8);
|
|
86
|
+
pointer += 12;
|
|
87
|
+
|
|
88
|
+
const fileDataIDs = [];
|
|
89
|
+
let currFileDataID = -1;
|
|
90
|
+
for (let i = 0; i < numRecords; i += 1) {
|
|
91
|
+
currFileDataID += buffer.readUInt32LE(pointer) + 1;
|
|
92
|
+
fileDataIDs.push(currFileDataID);
|
|
93
|
+
pointer += 4;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
for (let i = 0; i < numRecords; i += 1) {
|
|
97
|
+
const fileDataID = fileDataIDs[i];
|
|
98
|
+
const fileCKey = buffer.toString('hex', pointer, pointer + 16);
|
|
99
|
+
pointer += 16;
|
|
100
|
+
|
|
101
|
+
if (fileDataID2CKey.has(fileDataID)) {
|
|
102
|
+
fileDataID2CKey.get(fileDataID)?.push({
|
|
103
|
+
cKey: fileCKey,
|
|
104
|
+
contentFlags,
|
|
105
|
+
localeFlags,
|
|
106
|
+
});
|
|
107
|
+
} else {
|
|
108
|
+
fileDataID2CKey.set(fileDataID, [
|
|
109
|
+
{ cKey: fileCKey, contentFlags, localeFlags },
|
|
110
|
+
]);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// eslint-disable-next-line no-bitwise
|
|
115
|
+
if (!(allowNonNamedFiles && (contentFlags & ContentFlags.NoNameHash))) {
|
|
116
|
+
for (let i = 0; i < numRecords; i += 1) {
|
|
117
|
+
const fileDataID = fileDataIDs[i];
|
|
118
|
+
const nameHash = buffer.readBigUInt64LE(pointer).toString(16).padStart(16, '0');
|
|
119
|
+
pointer += 8;
|
|
120
|
+
|
|
121
|
+
nameHash2FileDataID.set(nameHash, fileDataID);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
} else {
|
|
126
|
+
// pre 8.2.0
|
|
127
|
+
let pointer = 0;
|
|
128
|
+
while (pointer < buffer.byteLength) {
|
|
129
|
+
const numRecords = buffer.readUInt32LE(pointer);
|
|
130
|
+
const contentFlags = buffer.readUInt32LE(pointer + 4);
|
|
131
|
+
const localeFlags = buffer.readUInt32LE(pointer + 8);
|
|
132
|
+
pointer += 12;
|
|
133
|
+
|
|
134
|
+
const fileDataIDs = [];
|
|
135
|
+
let currFileDataID = -1;
|
|
136
|
+
for (let i = 0; i < numRecords; i += 1) {
|
|
137
|
+
currFileDataID += buffer.readUInt32LE(pointer) + 1;
|
|
138
|
+
fileDataIDs.push(currFileDataID);
|
|
139
|
+
pointer += 4;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
for (let i = 0; i < numRecords; i += 1) {
|
|
143
|
+
const fileDataID = fileDataIDs[i];
|
|
144
|
+
const fileCKey = buffer.toString('hex', pointer, pointer + 16);
|
|
145
|
+
const nameHash = buffer.toString('hex', pointer + 16, pointer + 24);
|
|
146
|
+
pointer += 24;
|
|
147
|
+
|
|
148
|
+
if (fileDataID2CKey.has(fileDataID)) {
|
|
149
|
+
fileDataID2CKey.get(fileDataID)?.push({
|
|
150
|
+
cKey: fileCKey,
|
|
151
|
+
contentFlags,
|
|
152
|
+
localeFlags,
|
|
153
|
+
});
|
|
154
|
+
} else {
|
|
155
|
+
fileDataID2CKey.set(fileDataID, [
|
|
156
|
+
{ cKey: fileCKey, contentFlags, localeFlags },
|
|
157
|
+
]);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
nameHash2FileDataID.set(nameHash, fileDataID);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
return { fileDataID2CKey, nameHash2FileDataID };
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
export default parseRootFile;
|
|
169
|
+
|
|
170
|
+
export { ContentFlags, LocaleFlags };
|
|
171
|
+
|
|
172
|
+
export type { FileInfo, RootData };
|