@rhyster/wow-casc-dbc 2.6.19 → 2.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adb.d.ts +17 -0
- package/dist/adb.d.ts.map +1 -0
- package/dist/blte.d.ts +25 -0
- package/dist/blte.d.ts.map +1 -0
- package/dist/client.d.ts +84 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/dbd.d.ts +26 -0
- package/dist/dbd.d.ts.map +1 -0
- package/dist/fetcher.d.ts +21 -0
- package/dist/fetcher.d.ts.map +1 -0
- package/dist/index.cjs +1 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +9 -261
- package/dist/index.d.ts.map +1 -0
- package/dist/index.mjs +1 -0
- package/dist/index.mjs.map +1 -0
- package/dist/jenkins96.d.ts +3 -0
- package/dist/jenkins96.d.ts.map +1 -0
- package/dist/parsers/archiveIndex.d.ts +9 -0
- package/dist/parsers/archiveIndex.d.ts.map +1 -0
- package/dist/parsers/config.d.ts +40 -0
- package/dist/parsers/config.d.ts.map +1 -0
- package/dist/parsers/encodingFile.d.ts +11 -0
- package/dist/parsers/encodingFile.d.ts.map +1 -0
- package/dist/parsers/productConfig.d.ts +21 -0
- package/dist/parsers/productConfig.d.ts.map +1 -0
- package/dist/parsers/rootFile.d.ts +45 -0
- package/dist/parsers/rootFile.d.ts.map +1 -0
- package/dist/salsa20.d.ts +14 -0
- package/dist/salsa20.d.ts.map +1 -0
- package/dist/store.d.ts +9 -0
- package/dist/store.d.ts.map +1 -0
- package/dist/test/salsa20.test.d.ts +2 -0
- package/dist/test/salsa20.test.d.ts.map +1 -0
- package/dist/utils.d.ts +3 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/wdc.d.ts +104 -0
- package/dist/wdc.d.ts.map +1 -0
- package/package.json +4 -3
- package/src/adb.ts +70 -0
- package/src/blte.ts +220 -0
- package/src/client.ts +411 -0
- package/src/dbd.ts +427 -0
- package/src/fetcher.ts +223 -0
- package/src/index.ts +44 -0
- package/src/jenkins96.ts +75 -0
- package/src/parsers/archiveIndex.ts +119 -0
- package/src/parsers/config.ts +75 -0
- package/src/parsers/encodingFile.ts +159 -0
- package/src/parsers/productConfig.ts +57 -0
- package/src/parsers/rootFile.ts +172 -0
- package/src/salsa20.ts +143 -0
- package/src/store.ts +37 -0
- package/src/test/salsa20.test.ts +522 -0
- package/src/utils.ts +77 -0
- package/src/wdc.ts +788 -0
package/src/client.ts
ADDED
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
import assert from 'node:assert';
|
|
2
|
+
import crypto from 'node:crypto';
|
|
3
|
+
|
|
4
|
+
import { mapLimit, retry } from 'async';
|
|
5
|
+
import cliProgress from 'cli-progress';
|
|
6
|
+
|
|
7
|
+
import BLTEReader from './blte.ts';
|
|
8
|
+
import {
|
|
9
|
+
getProductVersions,
|
|
10
|
+
getProductCDNs,
|
|
11
|
+
getConfigFile,
|
|
12
|
+
getDataFile,
|
|
13
|
+
} from './fetcher.ts';
|
|
14
|
+
import getNameHash from './jenkins96.ts';
|
|
15
|
+
import parseArchiveIndex from './parsers/archiveIndex.ts';
|
|
16
|
+
import { parseCDNConfig, parseBuildConfig } from './parsers/config.ts';
|
|
17
|
+
import parseEncodingFile from './parsers/encodingFile.ts';
|
|
18
|
+
import { parseProductVersions, parseProductCDNs } from './parsers/productConfig.ts';
|
|
19
|
+
import parseRootFile, { LocaleFlags, ContentFlags } from './parsers/rootFile.ts';
|
|
20
|
+
import { resolveCDNHost, formatFileSize } from './utils.ts';
|
|
21
|
+
import WDCReader from './wdc.ts';
|
|
22
|
+
|
|
23
|
+
import type ADBReader from './adb.ts';
|
|
24
|
+
import type { MissingKeyBlock } from './blte.ts';
|
|
25
|
+
import type { ArchiveIndex } from './parsers/archiveIndex.ts';
|
|
26
|
+
import type { EncodingData } from './parsers/encodingFile.ts';
|
|
27
|
+
import type { Version } from './parsers/productConfig.ts';
|
|
28
|
+
import type { FileInfo, RootData } from './parsers/rootFile.ts';
|
|
29
|
+
|
|
30
|
+
interface ClientPreloadData {
|
|
31
|
+
prefixes: string[],
|
|
32
|
+
archives: Map<string, ArchiveIndex>,
|
|
33
|
+
encoding: EncodingData,
|
|
34
|
+
rootFile: RootData,
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
interface FileFetchResultFull {
|
|
38
|
+
type: 'full',
|
|
39
|
+
buffer: Buffer,
|
|
40
|
+
blocks: undefined,
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
interface FileFetchResultPartial {
|
|
44
|
+
type: 'partial',
|
|
45
|
+
buffer: Buffer,
|
|
46
|
+
blocks: MissingKeyBlock[],
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
type FileFetchResult = FileFetchResultFull | FileFetchResultPartial;
|
|
50
|
+
|
|
51
|
+
enum LogLevel {
|
|
52
|
+
error = 0,
|
|
53
|
+
warn = 1,
|
|
54
|
+
info = 2,
|
|
55
|
+
debug = 3,
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const textLogLevel = [
|
|
59
|
+
'ERROR',
|
|
60
|
+
'WARN',
|
|
61
|
+
'INFO',
|
|
62
|
+
'DEBUG',
|
|
63
|
+
] as const;
|
|
64
|
+
|
|
65
|
+
export default class CASCClient {
|
|
66
|
+
public readonly region: string;
|
|
67
|
+
|
|
68
|
+
public readonly product: string;
|
|
69
|
+
|
|
70
|
+
public readonly version: Version;
|
|
71
|
+
|
|
72
|
+
public readonly name2FileDataID = new Map<string, number>();
|
|
73
|
+
|
|
74
|
+
public readonly keys = new Map<string, Uint8Array>();
|
|
75
|
+
|
|
76
|
+
public preload?: ClientPreloadData;
|
|
77
|
+
|
|
78
|
+
static async getProductVersion(region: string, product: string): Promise<Version | undefined> {
|
|
79
|
+
const versionsText = await getProductVersions(region, product);
|
|
80
|
+
const versions = parseProductVersions(versionsText);
|
|
81
|
+
return versions.find((version) => version.Region === region);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
|
85
|
+
public static LocaleFlags = LocaleFlags;
|
|
86
|
+
|
|
87
|
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
|
88
|
+
public static ContentFlags = ContentFlags;
|
|
89
|
+
|
|
90
|
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
|
91
|
+
public static LogLevel = LogLevel;
|
|
92
|
+
|
|
93
|
+
public logLevel: LogLevel;
|
|
94
|
+
|
|
95
|
+
constructor(region: string, product: string, version: Version, logLevel = LogLevel.info) {
|
|
96
|
+
this.region = region;
|
|
97
|
+
this.product = product;
|
|
98
|
+
this.version = version;
|
|
99
|
+
this.logLevel = logLevel;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
private log(level: LogLevel, message: unknown): void {
|
|
103
|
+
if (level <= this.logLevel) {
|
|
104
|
+
if (level <= LogLevel.error) {
|
|
105
|
+
console.error(`${new Date().toISOString()} [${textLogLevel[level]}]:`, message);
|
|
106
|
+
} else {
|
|
107
|
+
console.info(`${new Date().toISOString()} [${textLogLevel[level]}]:`, message);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
async init(): Promise<void> {
|
|
113
|
+
this.log(LogLevel.info, 'Preloading remote CASC build:');
|
|
114
|
+
this.log(LogLevel.info, this.version);
|
|
115
|
+
|
|
116
|
+
this.log(LogLevel.info, 'Fetching CDN configuration...');
|
|
117
|
+
const serverConfigText = await getProductCDNs(this.region, this.product);
|
|
118
|
+
const serverConfig = parseProductCDNs(serverConfigText).find(
|
|
119
|
+
(config) => config.Name === this.region,
|
|
120
|
+
);
|
|
121
|
+
assert(serverConfig, 'No server config found');
|
|
122
|
+
|
|
123
|
+
this.log(LogLevel.info, 'Locating fastest CDN server...');
|
|
124
|
+
const prefixes = await resolveCDNHost(
|
|
125
|
+
serverConfig.Hosts.split(' '),
|
|
126
|
+
serverConfig.Path,
|
|
127
|
+
);
|
|
128
|
+
this.log(LogLevel.info, 'Resolved CDN servers:');
|
|
129
|
+
prefixes.forEach((prefix) => {
|
|
130
|
+
this.log(LogLevel.info, prefix);
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
this.log(LogLevel.info, 'Fetching build configurations...');
|
|
134
|
+
const cdnConfigText = await getConfigFile(prefixes, this.version.CDNConfig, {
|
|
135
|
+
showAttemptFail: this.logLevel >= LogLevel.warn,
|
|
136
|
+
});
|
|
137
|
+
const cdnConfig = parseCDNConfig(cdnConfigText);
|
|
138
|
+
const buildConfigText = await getConfigFile(prefixes, this.version.BuildConfig, {
|
|
139
|
+
showAttemptFail: this.logLevel >= LogLevel.warn,
|
|
140
|
+
});
|
|
141
|
+
const buildConfig = parseBuildConfig(buildConfigText);
|
|
142
|
+
|
|
143
|
+
this.log(LogLevel.info, 'Loading archives...');
|
|
144
|
+
const archiveKeys = cdnConfig.archives.split(' ');
|
|
145
|
+
const archiveCount = archiveKeys.length;
|
|
146
|
+
const archiveTotalSize = cdnConfig.archivesIndexSize
|
|
147
|
+
.split(' ')
|
|
148
|
+
.reduce((a, b) => a + parseInt(b, 10), 0);
|
|
149
|
+
const archiveBar = this.logLevel >= LogLevel.info
|
|
150
|
+
? new cliProgress.SingleBar({ etaBuffer: 100 }, cliProgress.Presets.shades_classic)
|
|
151
|
+
: undefined;
|
|
152
|
+
archiveBar?.start(archiveCount, 0);
|
|
153
|
+
const archivesMapArray = await mapLimit(
|
|
154
|
+
archiveKeys,
|
|
155
|
+
50,
|
|
156
|
+
async (key: string) => {
|
|
157
|
+
const fileName = `${key}.index`;
|
|
158
|
+
const buffer = await retry({
|
|
159
|
+
times: 5,
|
|
160
|
+
interval: 3000,
|
|
161
|
+
}, async () => getDataFile(prefixes, fileName, 'indexes', this.version.BuildConfig, {
|
|
162
|
+
showProgress: this.logLevel >= LogLevel.info,
|
|
163
|
+
showAttemptFail: this.logLevel >= LogLevel.warn,
|
|
164
|
+
}));
|
|
165
|
+
const map = parseArchiveIndex(buffer, key);
|
|
166
|
+
|
|
167
|
+
archiveBar?.increment();
|
|
168
|
+
|
|
169
|
+
return map;
|
|
170
|
+
},
|
|
171
|
+
)
|
|
172
|
+
.then((result) => {
|
|
173
|
+
archiveBar?.stop();
|
|
174
|
+
return result.flatMap((e) => [...e]);
|
|
175
|
+
})
|
|
176
|
+
.catch((error: unknown) => {
|
|
177
|
+
archiveBar?.stop();
|
|
178
|
+
throw error;
|
|
179
|
+
});
|
|
180
|
+
const archives = new Map(archivesMapArray);
|
|
181
|
+
this.log(
|
|
182
|
+
LogLevel.info,
|
|
183
|
+
`Loaded ${archiveCount.toString()} archives (${archives.size.toString()} entries, ${formatFileSize(archiveTotalSize)})`,
|
|
184
|
+
);
|
|
185
|
+
|
|
186
|
+
this.log(LogLevel.info, 'Loading encoding table...');
|
|
187
|
+
const [encodingCKey, encodingEKey] = buildConfig.encoding.split(' ');
|
|
188
|
+
const encodingBuffer = await getDataFile(prefixes, encodingEKey, 'build', this.version.BuildConfig, {
|
|
189
|
+
name: 'encoding',
|
|
190
|
+
showProgress: this.logLevel >= LogLevel.info,
|
|
191
|
+
showAttemptFail: this.logLevel >= LogLevel.warn,
|
|
192
|
+
});
|
|
193
|
+
this.log(LogLevel.info, `Loaded encoding table (${formatFileSize(encodingBuffer.byteLength)})`);
|
|
194
|
+
|
|
195
|
+
this.log(LogLevel.info, 'Parsing encoding table...');
|
|
196
|
+
const encoding = parseEncodingFile(encodingBuffer, encodingEKey, encodingCKey);
|
|
197
|
+
this.log(LogLevel.info, `Parsed encoding table (${encoding.cKey2EKey.size.toString()} entries)`);
|
|
198
|
+
|
|
199
|
+
this.log(LogLevel.info, 'Loading root table...');
|
|
200
|
+
const rootCKey = buildConfig.root;
|
|
201
|
+
const rootEKeys = encoding.cKey2EKey.get(rootCKey);
|
|
202
|
+
assert(rootEKeys !== undefined, 'Failing to find EKey for root table.');
|
|
203
|
+
const rootEKey = typeof rootEKeys === 'string' ? rootEKeys : rootEKeys[0];
|
|
204
|
+
const rootBuffer = await getDataFile(prefixes, rootEKey, 'build', this.version.BuildConfig, {
|
|
205
|
+
name: 'root',
|
|
206
|
+
showProgress: this.logLevel >= LogLevel.info,
|
|
207
|
+
showAttemptFail: this.logLevel >= LogLevel.warn,
|
|
208
|
+
});
|
|
209
|
+
this.log(LogLevel.info, `Loaded root table (${formatFileSize(rootBuffer.byteLength)})`);
|
|
210
|
+
|
|
211
|
+
this.log(LogLevel.info, 'Parsing root file...');
|
|
212
|
+
const rootFile = parseRootFile(rootBuffer, rootEKey, rootCKey);
|
|
213
|
+
this.log(LogLevel.info, `Parsed root file (${rootFile.fileDataID2CKey.size.toString()} entries, ${rootFile.nameHash2FileDataID.size.toString()} hashes)`);
|
|
214
|
+
|
|
215
|
+
this.preload = {
|
|
216
|
+
prefixes,
|
|
217
|
+
archives,
|
|
218
|
+
encoding,
|
|
219
|
+
rootFile,
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
async loadRemoteListFile(): Promise<void> {
|
|
224
|
+
const url = 'https://github.com/wowdev/wow-listfile/releases/latest/download/community-listfile.csv';
|
|
225
|
+
const text = await (await fetch(url)).text();
|
|
226
|
+
const lines = text.split('\n').map((line) => line.trim()).filter((line) => line.length > 0);
|
|
227
|
+
|
|
228
|
+
lines.forEach((line) => {
|
|
229
|
+
const [fileDataID, name] = line.split(';');
|
|
230
|
+
this.name2FileDataID.set(name.trim(), parseInt(fileDataID.trim(), 10));
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
async loadRemoteTACTKeys(): Promise<void> {
|
|
235
|
+
const url = 'https://raw.githubusercontent.com/wowdev/TACTKeys/master/WoW.txt';
|
|
236
|
+
const text = await (await fetch(url)).text();
|
|
237
|
+
const lines = text.split('\n').map((line) => line.trim()).filter((line) => line.length > 0);
|
|
238
|
+
|
|
239
|
+
lines.forEach((line) => {
|
|
240
|
+
const [keyName, keyHex] = line.split(' ');
|
|
241
|
+
|
|
242
|
+
assert(keyName.length === 16, `Invalid keyName length: ${keyName.length.toString()}`);
|
|
243
|
+
assert(keyHex.length === 32, `Invalid key length: ${keyHex.length.toString()}`);
|
|
244
|
+
|
|
245
|
+
const key = Uint8Array.from(Buffer.from(keyHex, 'hex'));
|
|
246
|
+
|
|
247
|
+
this.keys.set(keyName.toLowerCase(), key);
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
async loadTACTKeys(): Promise<void> {
|
|
252
|
+
const keysCKeys = this.getContentKeysByFileDataID(1302850);
|
|
253
|
+
const lookupCKeys = this.getContentKeysByFileDataID(1302851);
|
|
254
|
+
|
|
255
|
+
assert(keysCKeys?.[0], 'Failing to find dbfilesclient/tactkey.db2');
|
|
256
|
+
assert(lookupCKeys?.[0], 'Failing to find dbfilesclient/tactkeylookup.db2');
|
|
257
|
+
|
|
258
|
+
const [keysResult, lookupResult] = await Promise.all([
|
|
259
|
+
this.getFileByContentKey(keysCKeys[0].cKey),
|
|
260
|
+
this.getFileByContentKey(lookupCKeys[0].cKey),
|
|
261
|
+
]);
|
|
262
|
+
|
|
263
|
+
const keysReader = new WDCReader(keysResult.buffer);
|
|
264
|
+
const lookupReader = new WDCReader(lookupResult.buffer);
|
|
265
|
+
|
|
266
|
+
lookupReader.getAllIDs().forEach((keyID) => {
|
|
267
|
+
const lookupRow = lookupReader.rows.get(keyID);
|
|
268
|
+
const keyRow = keysReader.rows.get(keyID);
|
|
269
|
+
|
|
270
|
+
if (keyRow) {
|
|
271
|
+
assert(Array.isArray(lookupRow) && lookupRow.length > 0, `Invalid TACTKeyLookup table row at id ${keyID.toString()}`);
|
|
272
|
+
assert(Array.isArray(keyRow) && keyRow.length > 0, `Invalid TACTKey table row at id ${keyID.toString()}`);
|
|
273
|
+
|
|
274
|
+
const keyName = lookupRow[0].data.toString(16).padStart(16, '0');
|
|
275
|
+
const keyHexLE = keyRow[0].data.toString(16).padStart(32, '0');
|
|
276
|
+
|
|
277
|
+
assert(keyName.length === 16, `Invalid keyName length: ${keyName.length.toString()}`);
|
|
278
|
+
assert(keyHexLE.length === 32, `Invalid key length: ${keyHexLE.length.toString()}`);
|
|
279
|
+
|
|
280
|
+
const keyHex = [...keyHexLE.matchAll(/.{2}/g)].map((v) => v[0]).reverse().join('');
|
|
281
|
+
const key = Uint8Array.from(Buffer.from(keyHex, 'hex'));
|
|
282
|
+
|
|
283
|
+
this.keys.set(keyName.toLowerCase(), key);
|
|
284
|
+
}
|
|
285
|
+
});
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
loadBroadcastTACTKeys(adb: ADBReader): void {
|
|
289
|
+
adb.tableEntries
|
|
290
|
+
.get(0x021826BB) // BroadcastText
|
|
291
|
+
?.forEach(({ data }) => {
|
|
292
|
+
if (data.byteLength > 0) {
|
|
293
|
+
let pointer = 0;
|
|
294
|
+
|
|
295
|
+
// Text_lang
|
|
296
|
+
while (data[pointer] !== 0) {
|
|
297
|
+
pointer += 1;
|
|
298
|
+
}
|
|
299
|
+
pointer += 1;
|
|
300
|
+
|
|
301
|
+
// Text1_lang
|
|
302
|
+
while (data[pointer] !== 0) {
|
|
303
|
+
pointer += 1;
|
|
304
|
+
}
|
|
305
|
+
pointer += 1 + 43;
|
|
306
|
+
|
|
307
|
+
if (pointer < data.byteLength) {
|
|
308
|
+
const extraTableHash = data.readUInt32LE(pointer);
|
|
309
|
+
if (extraTableHash === 0xDF2F53CF) { // TactKey
|
|
310
|
+
const keyName = data.readBigUInt64LE(pointer + 4).toString(16).padStart(16, '0');
|
|
311
|
+
const key = Uint8Array.from(data.subarray(pointer + 12));
|
|
312
|
+
|
|
313
|
+
if (!this.keys.has(keyName)) {
|
|
314
|
+
this.keys.set(keyName, key);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
getFileDataIDByName(name: string): number | undefined {
|
|
323
|
+
assert(this.preload, 'Client not initialized');
|
|
324
|
+
|
|
325
|
+
const { rootFile } = this.preload;
|
|
326
|
+
const { nameHash2FileDataID } = rootFile;
|
|
327
|
+
|
|
328
|
+
const nameHash = getNameHash(name);
|
|
329
|
+
return nameHash2FileDataID.get(nameHash) ?? this.name2FileDataID.get(name.toLowerCase());
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
getContentKeysByFileDataID(fileDataID: number): FileInfo[] | undefined {
|
|
333
|
+
assert(this.preload, 'Client not initialized');
|
|
334
|
+
|
|
335
|
+
const { rootFile } = this.preload;
|
|
336
|
+
|
|
337
|
+
return rootFile.fileDataID2CKey.get(fileDataID);
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
async getFileByContentKey(cKey: string, allowMissingKey?: false): Promise<FileFetchResultFull>;
|
|
341
|
+
async getFileByContentKey(cKey: string, allowMissingKey: true): Promise<FileFetchResult>;
|
|
342
|
+
async getFileByContentKey(cKey: string, allowMissingKey = false): Promise<FileFetchResult> {
|
|
343
|
+
assert(this.preload, 'Client not initialized');
|
|
344
|
+
|
|
345
|
+
const { prefixes, encoding, archives } = this.preload;
|
|
346
|
+
const eKeys = encoding.cKey2EKey.get(cKey);
|
|
347
|
+
assert(eKeys !== undefined, `Failing to find encoding key for ${cKey}`);
|
|
348
|
+
|
|
349
|
+
const eKey = typeof eKeys === 'string' ? eKeys : eKeys[0];
|
|
350
|
+
|
|
351
|
+
const archive = archives.get(eKey);
|
|
352
|
+
const blte = archive
|
|
353
|
+
? await getDataFile(prefixes, archive.key, 'data', this.version.BuildConfig, {
|
|
354
|
+
name: eKey,
|
|
355
|
+
partialOffset: archive.offset,
|
|
356
|
+
partialLength: archive.size,
|
|
357
|
+
showProgress: this.logLevel >= LogLevel.info,
|
|
358
|
+
showAttemptFail: this.logLevel >= LogLevel.warn,
|
|
359
|
+
})
|
|
360
|
+
: await getDataFile(prefixes, eKey, 'data', this.version.BuildConfig, {
|
|
361
|
+
showProgress: this.logLevel >= LogLevel.info,
|
|
362
|
+
showAttemptFail: this.logLevel >= LogLevel.warn,
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
const reader = new BLTEReader(blte, eKey, this.keys);
|
|
366
|
+
if (!allowMissingKey) {
|
|
367
|
+
reader.processBytes(allowMissingKey);
|
|
368
|
+
|
|
369
|
+
const hash = crypto.createHash('md5').update(reader.buffer).digest('hex');
|
|
370
|
+
assert(hash === cKey, `Invalid hash: expected ${cKey}, got ${hash}`);
|
|
371
|
+
|
|
372
|
+
return {
|
|
373
|
+
type: 'full',
|
|
374
|
+
buffer: reader.buffer,
|
|
375
|
+
blocks: undefined,
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
const blocks = reader.processBytes(allowMissingKey);
|
|
380
|
+
|
|
381
|
+
if (blocks.length === 0) {
|
|
382
|
+
const hash = crypto.createHash('md5').update(reader.buffer).digest('hex');
|
|
383
|
+
assert(hash === cKey, `Invalid hash: expected ${cKey}, got ${hash}`);
|
|
384
|
+
|
|
385
|
+
return {
|
|
386
|
+
type: 'full',
|
|
387
|
+
buffer: reader.buffer,
|
|
388
|
+
blocks: undefined,
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
return {
|
|
393
|
+
type: 'partial',
|
|
394
|
+
buffer: reader.buffer,
|
|
395
|
+
blocks,
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
export type {
|
|
401
|
+
Version,
|
|
402
|
+
ClientPreloadData,
|
|
403
|
+
ArchiveIndex,
|
|
404
|
+
EncodingData,
|
|
405
|
+
RootData,
|
|
406
|
+
FileInfo,
|
|
407
|
+
FileFetchResultFull,
|
|
408
|
+
FileFetchResultPartial,
|
|
409
|
+
FileFetchResult,
|
|
410
|
+
MissingKeyBlock,
|
|
411
|
+
};
|