@rhyster/wow-casc-dbc 2.9.3 → 2.9.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -322,7 +322,7 @@ class BLTEReader {
322
322
  this.processedBlock += 1;
323
323
  this.processedOffset += block.compressedSize;
324
324
  }
325
- return allowMissingKey ? missingKeyBlocks : void 0;
325
+ return allowMissingKey ? missingKeyBlocks : undefined;
326
326
  }
327
327
  }
328
328
 
@@ -374,12 +374,12 @@ const requestData = async (url, {
374
374
  // eslint-disable-next-line @typescript-eslint/naming-convention
375
375
  "User-Agent": USER_AGENT,
376
376
  // eslint-disable-next-line @typescript-eslint/naming-convention
377
- Range: partialOffset !== void 0 && partialLength !== void 0 ? `bytes=${partialOffset.toString()}-${(partialOffset + partialLength - 1).toString()}` : "bytes=0-"
377
+ Range: partialOffset !== undefined && partialLength !== undefined ? `bytes=${partialOffset.toString()}-${(partialOffset + partialLength - 1).toString()}` : "bytes=0-"
378
378
  }
379
379
  };
380
380
  http__default.get(url, options, (res) => {
381
381
  if (res.statusCode === 301 || res.statusCode === 302) {
382
- if (res.headers.location !== void 0) {
382
+ if (res.headers.location !== undefined) {
383
383
  requestData(res.headers.location, { partialOffset, partialLength, showProgress }).then(resolve).catch((err) => {
384
384
  throw err;
385
385
  });
@@ -388,13 +388,13 @@ const requestData = async (url, {
388
388
  }
389
389
  return;
390
390
  }
391
- if (res.statusCode === void 0 || res.statusCode < 200 || res.statusCode > 302) {
391
+ if (res.statusCode === undefined || res.statusCode < 200 || res.statusCode > 302) {
392
392
  reject(new Error(`Failed to request ${url}, Status Code: ${res.statusCode?.toString() ?? "undefined"}`));
393
393
  return;
394
394
  }
395
395
  const lengthText = res.headers["content-length"];
396
- const length = lengthText !== void 0 ? parseInt(lengthText, 10) : 0;
397
- const bar = showProgress === true && !Number.isNaN(length) && length >= 10485760 ? new cliProgress__default.SingleBar({ etaBuffer: 10240 }, cliProgress__default.Presets.shades_classic) : void 0;
396
+ const length = lengthText !== undefined ? parseInt(lengthText, 10) : 0;
397
+ const bar = showProgress === true && !Number.isNaN(length) && length >= 10485760 ? new cliProgress__default.SingleBar({ etaBuffer: 10240 }, cliProgress__default.Presets.shades_classic) : undefined;
398
398
  bar?.start(length, 0);
399
399
  const chunks = [];
400
400
  res.on("data", (chunk) => {
@@ -430,7 +430,7 @@ const downloadFile = (prefixes, type, key, {
430
430
  };
431
431
  const getFileCache = async (file) => {
432
432
  const integrity = await cacheIntegrity.get(file);
433
- if (integrity !== void 0) {
433
+ if (integrity !== undefined) {
434
434
  try {
435
435
  const buffer = await fs__default.readFile(path__default.resolve(CACHE_ROOT, file));
436
436
  const hash = crypto__default.createHash("sha256").update(buffer).digest("hex");
@@ -440,7 +440,7 @@ const getFileCache = async (file) => {
440
440
  } catch {
441
441
  }
442
442
  }
443
- return void 0;
443
+ return undefined;
444
444
  };
445
445
  const getDataFile = async (prefixes, key, type, buildCKey, {
446
446
  name,
@@ -450,10 +450,10 @@ const getDataFile = async (prefixes, key, type, buildCKey, {
450
450
  showAttemptFail
451
451
  } = {}) => {
452
452
  const dir = type === "build" ? path__default.join(CACHE_DIRS[type], buildCKey) : CACHE_DIRS[type];
453
- const file = name !== void 0 ? path__default.join(dir, name) : path__default.join(dir, key);
453
+ const file = name !== undefined ? path__default.join(dir, name) : path__default.join(dir, key);
454
454
  const cacheBuffer = await getFileCache(file);
455
455
  if (cacheBuffer) {
456
- if (name === void 0 && partialOffset !== void 0 && partialLength !== void 0) {
456
+ if (name === undefined && partialOffset !== undefined && partialLength !== undefined) {
457
457
  return cacheBuffer.subarray(partialOffset, partialOffset + partialLength);
458
458
  }
459
459
  return cacheBuffer;
@@ -464,7 +464,7 @@ const getDataFile = async (prefixes, key, type, buildCKey, {
464
464
  showProgress,
465
465
  showAttemptFail
466
466
  });
467
- if (partialOffset === void 0 && partialLength === void 0 || name !== void 0) {
467
+ if (partialOffset === undefined && partialLength === undefined || name !== undefined) {
468
468
  await fs__default.mkdir(path__default.resolve(CACHE_ROOT, dir), { recursive: true });
469
469
  await fs__default.writeFile(path__default.resolve(CACHE_ROOT, file), downloadBuffer);
470
470
  const hash = crypto__default.createHash("sha256").update(downloadBuffer).digest("hex");
@@ -1419,7 +1419,7 @@ class WDCReader {
1419
1419
  return;
1420
1420
  }
1421
1421
  for (let recordIndex = 0; recordIndex < header.recordCount; recordIndex += 1) {
1422
- let recordID = idList.length > 0 ? idList[recordIndex] : void 0;
1422
+ let recordID = idList.length > 0 ? idList[recordIndex] : undefined;
1423
1423
  const recordBuffer = isNormal ? records[recordIndex] : offsetMap[recordIndex].data;
1424
1424
  if (isNormal) {
1425
1425
  const recordData = fieldsInfo.map((fieldInfo, fieldIndex) => {
@@ -1436,7 +1436,7 @@ class WDCReader {
1436
1436
  data: value
1437
1437
  };
1438
1438
  }
1439
- if (recordID === void 0 && fieldIndex === idIndex) {
1439
+ if (recordID === undefined && fieldIndex === idIndex) {
1440
1440
  recordID = value;
1441
1441
  }
1442
1442
  const fieldOffset = fieldInfo.fieldOffsetBits >>> 3;
@@ -1448,7 +1448,7 @@ class WDCReader {
1448
1448
  };
1449
1449
  }
1450
1450
  case "commonData": {
1451
- const value = (recordID !== void 0 ? commonData.get(fieldIndex)?.get(recordID) : void 0) ?? fieldInfo.defaultValue;
1451
+ const value = (recordID !== undefined ? commonData.get(fieldIndex)?.get(recordID) : undefined) ?? fieldInfo.defaultValue;
1452
1452
  return {
1453
1453
  type: "commonData",
1454
1454
  data: value
@@ -1483,7 +1483,7 @@ class WDCReader {
1483
1483
  assert__default(fieldPalletData, `No pallet data for field ${fieldIndex.toString()}`);
1484
1484
  value = fieldPalletData[value];
1485
1485
  }
1486
- if (recordID === void 0 && fieldIndex === idIndex) {
1486
+ if (recordID === undefined && fieldIndex === idIndex) {
1487
1487
  recordID = value;
1488
1488
  }
1489
1489
  return {
@@ -1495,10 +1495,10 @@ class WDCReader {
1495
1495
  throw new Error("Unreachable");
1496
1496
  }
1497
1497
  });
1498
- assert__default(recordID !== void 0, "No record ID found");
1498
+ assert__default(recordID !== undefined, "No record ID found");
1499
1499
  this.rows.set(recordID, recordData);
1500
1500
  const foreignID = relationshipMap.get(recordIndex);
1501
- if (foreignID !== void 0) {
1501
+ if (foreignID !== undefined) {
1502
1502
  this.relationships.set(recordID, foreignID);
1503
1503
  }
1504
1504
  } else {
@@ -1506,10 +1506,10 @@ class WDCReader {
1506
1506
  type: "sparse",
1507
1507
  data: recordBuffer
1508
1508
  };
1509
- assert__default(recordID !== void 0, "No record ID found");
1509
+ assert__default(recordID !== undefined, "No record ID found");
1510
1510
  this.rows.set(recordID, recordData);
1511
1511
  const foreignID = relationshipMap.get(recordIndex);
1512
- if (foreignID !== void 0) {
1512
+ if (foreignID !== undefined) {
1513
1513
  this.relationships.set(recordID, foreignID);
1514
1514
  }
1515
1515
  }
@@ -1547,20 +1547,20 @@ class WDCReader {
1547
1547
  data: hotfix.data
1548
1548
  };
1549
1549
  case "delete":
1550
- return void 0;
1550
+ return undefined;
1551
1551
  default:
1552
1552
  throw new Error("Unreachable");
1553
1553
  }
1554
1554
  }
1555
1555
  const dst = this.copyTable.get(id);
1556
- if (dst !== void 0) {
1556
+ if (dst !== undefined) {
1557
1557
  return this.rows.get(dst);
1558
1558
  }
1559
1559
  return this.rows.get(id);
1560
1560
  }
1561
1561
  getRowRelationship(id) {
1562
1562
  const dst = this.copyTable.get(id);
1563
- if (dst !== void 0) {
1563
+ if (dst !== undefined) {
1564
1564
  return this.relationships.get(dst);
1565
1565
  }
1566
1566
  return this.relationships.get(id);
@@ -1645,7 +1645,7 @@ class CASCClient {
1645
1645
  const archiveKeys = cdnConfig.archives.split(" ");
1646
1646
  const archiveCount = archiveKeys.length;
1647
1647
  const archiveTotalSize = cdnConfig.archivesIndexSize.split(" ").reduce((a, b) => a + parseInt(b, 10), 0);
1648
- const archiveBar = this.logLevel >= 2 /* info */ ? new cliProgress__default.SingleBar({ etaBuffer: 100 }, cliProgress__default.Presets.shades_classic) : void 0;
1648
+ const archiveBar = this.logLevel >= 2 /* info */ ? new cliProgress__default.SingleBar({ etaBuffer: 100 }, cliProgress__default.Presets.shades_classic) : undefined;
1649
1649
  archiveBar?.start(archiveCount, 0);
1650
1650
  const archivesMapArray = await async.mapLimit(
1651
1651
  archiveKeys,
@@ -1693,7 +1693,7 @@ class CASCClient {
1693
1693
  }
1694
1694
  const cKey = configText;
1695
1695
  const eKeys = encoding.cKey2EKey.get(cKey);
1696
- assert__default(eKeys !== void 0, `Failing to find encoding key for ${cKey}`);
1696
+ assert__default(eKeys !== undefined, `Failing to find encoding key for ${cKey}`);
1697
1697
  const eKey = typeof eKeys === "string" ? eKeys : eKeys[0];
1698
1698
  return [cKey, eKey];
1699
1699
  };
@@ -1821,7 +1821,7 @@ class CASCClient {
1821
1821
  assert__default(this.preload, "Client not initialized");
1822
1822
  const { prefixes, encoding, archives } = this.preload;
1823
1823
  const eKeys = encoding.cKey2EKey.get(cKey);
1824
- assert__default(eKeys !== void 0, `Failing to find encoding key for ${cKey}`);
1824
+ assert__default(eKeys !== undefined, `Failing to find encoding key for ${cKey}`);
1825
1825
  const eKey = typeof eKeys === "string" ? eKeys : eKeys[0];
1826
1826
  const archive = archives.get(eKey);
1827
1827
  const blte = archive ? await getDataFile(prefixes, archive.key, "data", this.version.BuildConfig, {
@@ -1842,7 +1842,7 @@ class CASCClient {
1842
1842
  return {
1843
1843
  type: "full",
1844
1844
  buffer: reader.buffer,
1845
- blocks: void 0
1845
+ blocks: undefined
1846
1846
  };
1847
1847
  }
1848
1848
  const blocks = reader.processBytes(allowMissingKey);
@@ -1852,7 +1852,7 @@ class CASCClient {
1852
1852
  return {
1853
1853
  type: "full",
1854
1854
  buffer: reader.buffer,
1855
- blocks: void 0
1855
+ blocks: undefined
1856
1856
  };
1857
1857
  }
1858
1858
  return {
@@ -1916,7 +1916,7 @@ class DBDParser {
1916
1916
  const manifests = await (await fetch(manifestsURL)).json();
1917
1917
  const tableHashHex = this.wdc.tableHash.toString(16).padStart(8, "0").toLowerCase();
1918
1918
  const manifest = manifests.find((v) => v.tableHash.toLowerCase() === tableHashHex);
1919
- assert__default(manifest?.tableName !== void 0, `No manifest found for table hash ${tableHashHex}`);
1919
+ assert__default(manifest?.tableName !== undefined, `No manifest found for table hash ${tableHashHex}`);
1920
1920
  const url = `https://raw.githubusercontent.com/wowdev/WoWDBDefs/master/definitions/${manifest.tableName}.dbd`;
1921
1921
  const text = await (await fetch(url)).text();
1922
1922
  const lines = text.split("\n").map((v) => v.trim());
@@ -1943,7 +1943,7 @@ class DBDParser {
1943
1943
  const layoutsMatch = PATTERN_LAYOUT.exec(line);
1944
1944
  const layouts = layoutsMatch?.[1].split(",").map((v) => v.trim().toLowerCase());
1945
1945
  return layouts?.includes(layoutHashHex) === true;
1946
- }) !== void 0);
1946
+ }) !== undefined);
1947
1947
  assert__default(versionChunk, `No version definition found for layout hash ${layoutHashHex}`);
1948
1948
  versionChunk.forEach((line) => {
1949
1949
  if (line.startsWith("LAYOUT") || line.startsWith("BUILD") || line.startsWith("COMMENT")) {
@@ -1963,10 +1963,10 @@ class DBDParser {
1963
1963
  arraySizeText
1964
1964
  ] = match;
1965
1965
  const type = this.definitions.get(name);
1966
- assert__default(type !== void 0, `No type found for column ${name}`);
1966
+ assert__default(type !== undefined, `No type found for column ${name}`);
1967
1967
  const annotations = annotationsText ? annotationsText.split(",").map((v) => v.trim()) : [];
1968
- const size = sizeText ? parseInt(sizeText, 10) : void 0;
1969
- const arraySize = arraySizeText ? parseInt(arraySizeText, 10) : void 0;
1968
+ const size = sizeText ? parseInt(sizeText, 10) : undefined;
1969
+ const arraySize = arraySizeText ? parseInt(arraySizeText, 10) : undefined;
1970
1970
  const isID = !!annotations.includes("id");
1971
1971
  const isInline = !annotations.includes("noninline");
1972
1972
  const isRelation = !!annotations.includes("relation");
@@ -1998,7 +1998,7 @@ class DBDParser {
1998
1998
  }
1999
1999
  const row = this.wdc.getRowData(id);
2000
2000
  if (!row) {
2001
- return void 0;
2001
+ return undefined;
2002
2002
  }
2003
2003
  const data = {};
2004
2004
  if (Array.isArray(row)) {
@@ -2015,13 +2015,13 @@ class DBDParser {
2015
2015
  const fieldInfo = this.wdc.fieldsInfo[fieldIndex];
2016
2016
  const srcSigned = fieldInfo.storageType === "bitpackedSigned";
2017
2017
  const srcSize = fieldInfo.storageType === "none" || fieldInfo.storageType === "bitpacked" || fieldInfo.storageType === "bitpackedSigned" ? Math.ceil(fieldInfo.fieldSizeBits / 8) : 4;
2018
- const dstSize = column.size !== void 0 ? Math.ceil(column.size / 8) : void 0;
2018
+ const dstSize = column.size !== undefined ? Math.ceil(column.size / 8) : undefined;
2019
2019
  if (cell.type === "bitpackedArray") {
2020
2020
  data[column.name] = cell.data.map((v) => {
2021
2021
  if (column.type === "float") {
2022
2022
  return castFloat(v, srcSize, srcSigned);
2023
2023
  }
2024
- if (dstSize !== void 0) {
2024
+ if (dstSize !== undefined) {
2025
2025
  return castIntegerBySize(
2026
2026
  v,
2027
2027
  srcSize,
@@ -2039,7 +2039,7 @@ class DBDParser {
2039
2039
  data[column.name] = cell.string;
2040
2040
  }
2041
2041
  } else if (column.type === "float") {
2042
- if (column.arraySize !== void 0) {
2042
+ if (column.arraySize !== undefined) {
2043
2043
  const castBuffer = getCastBuffer(
2044
2044
  typeof cell.data === "number" ? BigInt(cell.data) : cell.data,
2045
2045
  srcSize,
@@ -2056,8 +2056,8 @@ class DBDParser {
2056
2056
  data[column.name] = castFloat(cell.data, srcSize, srcSigned);
2057
2057
  }
2058
2058
  } else if (column.type === "int") {
2059
- if (column.arraySize !== void 0) {
2060
- assert__default(dstSize !== void 0, `Missing size for int array column ${column.name}`);
2059
+ if (column.arraySize !== undefined) {
2060
+ assert__default(dstSize !== undefined, `Missing size for int array column ${column.name}`);
2061
2061
  const castBuffer = getCastBuffer(
2062
2062
  typeof cell.data === "number" ? BigInt(cell.data) : cell.data,
2063
2063
  srcSize,
@@ -2085,7 +2085,7 @@ class DBDParser {
2085
2085
  column.isSigned
2086
2086
  );
2087
2087
  } else {
2088
- assert__default(column.size === void 0 || column.size === 64, `Unexpected size ${column.size?.toString() ?? ""} for column ${column.name}`);
2088
+ assert__default(column.size === undefined || column.size === 64, `Unexpected size ${column.size?.toString() ?? ""} for column ${column.name}`);
2089
2089
  if (srcSigned !== column.isSigned) {
2090
2090
  data[column.name] = castBigInt64(
2091
2091
  cell.data,
@@ -2139,7 +2139,7 @@ class DBDParser {
2139
2139
  if (fieldIndex + 1 < this.wdc.fields.length) {
2140
2140
  count = Math.max((nextField.position - currField.position) / size, 1);
2141
2141
  } else {
2142
- count = column.arraySize !== void 0 ? (buffer.byteLength - offset) / size : 1;
2142
+ count = column.arraySize !== undefined ? (buffer.byteLength - offset) / size : 1;
2143
2143
  }
2144
2144
  for (let i = 0; i < count; i += 1) {
2145
2145
  if (column.type === "float") {