@loaders.gl/draco 4.3.4 → 4.4.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/dist/dist.dev.js +13059 -1021
  2. package/dist/dist.min.js +12 -3
  3. package/dist/draco-arrow-loader.d.ts +30 -0
  4. package/dist/draco-arrow-loader.d.ts.map +1 -0
  5. package/dist/draco-arrow-loader.js +20 -0
  6. package/dist/draco-arrow-loader.js.map +1 -0
  7. package/dist/draco-format.d.ts +13 -0
  8. package/dist/draco-format.d.ts.map +1 -0
  9. package/dist/draco-format.js +16 -0
  10. package/dist/draco-format.js.map +1 -0
  11. package/dist/draco-loader.d.ts +29 -2
  12. package/dist/draco-loader.d.ts.map +1 -1
  13. package/dist/draco-loader.js +21 -1
  14. package/dist/draco-loader.js.map +1 -0
  15. package/dist/draco-worker-node.js +429 -185
  16. package/dist/draco-worker-node.js.map +4 -4
  17. package/dist/draco-worker.js +188 -116
  18. package/dist/draco-worker.js.map +4 -4
  19. package/dist/draco-writer-worker-node.js +301 -76
  20. package/dist/draco-writer-worker-node.js.map +4 -4
  21. package/dist/draco-writer-worker.js +61 -7
  22. package/dist/draco-writer-worker.js.map +2 -2
  23. package/dist/draco-writer.d.ts +16 -0
  24. package/dist/draco-writer.d.ts.map +1 -1
  25. package/dist/draco-writer.js +21 -1
  26. package/dist/draco-writer.js.map +1 -0
  27. package/dist/draco3d/draco3d-types.js +1 -0
  28. package/dist/draco3d/draco3d-types.js.map +1 -0
  29. package/dist/index.cjs +865 -811
  30. package/dist/index.cjs.map +4 -4
  31. package/dist/index.d.ts +5 -48
  32. package/dist/index.d.ts.map +1 -1
  33. package/dist/index.js +7 -38
  34. package/dist/index.js.map +1 -0
  35. package/dist/lib/draco-builder.d.ts.map +1 -1
  36. package/dist/lib/draco-builder.js +1 -0
  37. package/dist/lib/draco-builder.js.map +1 -0
  38. package/dist/lib/draco-module-loader.d.ts +3 -2
  39. package/dist/lib/draco-module-loader.d.ts.map +1 -1
  40. package/dist/lib/draco-module-loader.js +53 -10
  41. package/dist/lib/draco-module-loader.js.map +1 -0
  42. package/dist/lib/draco-parser.d.ts.map +1 -1
  43. package/dist/lib/draco-parser.js +4 -1
  44. package/dist/lib/draco-parser.js.map +1 -0
  45. package/dist/lib/draco-types.js +1 -0
  46. package/dist/lib/draco-types.js.map +1 -0
  47. package/dist/lib/utils/get-draco-schema.d.ts.map +1 -1
  48. package/dist/lib/utils/get-draco-schema.js +2 -1
  49. package/dist/lib/utils/get-draco-schema.js.map +1 -0
  50. package/dist/lib/utils/version.js +2 -1
  51. package/dist/lib/utils/version.js.map +1 -0
  52. package/dist/libs/libs/draco_decoder.wasm +0 -0
  53. package/dist/libs/libs/draco_encoder.js +52 -0
  54. package/dist/libs/libs/draco_wasm_wrapper.js +117 -0
  55. package/dist/workers/draco-worker-node.js +1 -0
  56. package/dist/workers/draco-worker-node.js.map +1 -0
  57. package/dist/workers/draco-worker.js +1 -0
  58. package/dist/workers/draco-worker.js.map +1 -0
  59. package/dist/workers/draco-writer-worker-node.cjs +18 -0
  60. package/dist/workers/draco-writer-worker-node.cjs.map +1 -0
  61. package/dist/workers/draco-writer-worker-node.d.cts +2 -0
  62. package/dist/workers/draco-writer-worker-node.d.cts.map +1 -0
  63. package/dist/workers/draco-writer-worker-node.js +1 -0
  64. package/dist/workers/draco-writer-worker-node.js.map +1 -0
  65. package/dist/workers/draco-writer-worker.js +1 -0
  66. package/dist/workers/draco-writer-worker.js.map +1 -0
  67. package/package.json +19 -6
  68. package/src/draco-arrow-loader.ts +25 -0
  69. package/src/draco-format.ts +18 -0
  70. package/src/draco-loader.ts +26 -3
  71. package/src/draco-writer.ts +22 -1
  72. package/src/index.ts +5 -45
  73. package/src/lib/draco-module-loader.ts +87 -23
  74. package/src/lib/draco-parser.ts +3 -1
  75. package/src/lib/utils/get-draco-schema.ts +2 -1
  76. package/src/workers/draco-writer-worker-node.cjs +18 -0
@@ -156,9 +156,9 @@ var require_inherits = __commonJS({
156
156
  }
157
157
  });
158
158
 
159
- // ../../node_modules/string_decoder/index.js
159
+ // ../../node_modules/contentstream/node_modules/string_decoder/index.js
160
160
  var require_string_decoder = __commonJS({
161
- "../../node_modules/string_decoder/index.js"(exports) {
161
+ "../../node_modules/contentstream/node_modules/string_decoder/index.js"(exports) {
162
162
  var Buffer2 = require("buffer").Buffer;
163
163
  var isBufferEncoding = Buffer2.isEncoding || function(encoding) {
164
164
  switch (encoding && encoding.toLowerCase()) {
@@ -1685,6 +1685,144 @@ var require_stream_duplex2 = __commonJS({
1685
1685
  }
1686
1686
  });
1687
1687
 
1688
+ // ../../node_modules/gif-encoder/node_modules/string_decoder/index.js
1689
+ var require_string_decoder2 = __commonJS({
1690
+ "../../node_modules/gif-encoder/node_modules/string_decoder/index.js"(exports) {
1691
+ var Buffer2 = require("buffer").Buffer;
1692
+ var isBufferEncoding = Buffer2.isEncoding || function(encoding) {
1693
+ switch (encoding && encoding.toLowerCase()) {
1694
+ case "hex":
1695
+ case "utf8":
1696
+ case "utf-8":
1697
+ case "ascii":
1698
+ case "binary":
1699
+ case "base64":
1700
+ case "ucs2":
1701
+ case "ucs-2":
1702
+ case "utf16le":
1703
+ case "utf-16le":
1704
+ case "raw":
1705
+ return true;
1706
+ default:
1707
+ return false;
1708
+ }
1709
+ };
1710
+ function assertEncoding(encoding) {
1711
+ if (encoding && !isBufferEncoding(encoding)) {
1712
+ throw new Error("Unknown encoding: " + encoding);
1713
+ }
1714
+ }
1715
+ var StringDecoder = exports.StringDecoder = function(encoding) {
1716
+ this.encoding = (encoding || "utf8").toLowerCase().replace(/[-_]/, "");
1717
+ assertEncoding(encoding);
1718
+ switch (this.encoding) {
1719
+ case "utf8":
1720
+ this.surrogateSize = 3;
1721
+ break;
1722
+ case "ucs2":
1723
+ case "utf16le":
1724
+ this.surrogateSize = 2;
1725
+ this.detectIncompleteChar = utf16DetectIncompleteChar;
1726
+ break;
1727
+ case "base64":
1728
+ this.surrogateSize = 3;
1729
+ this.detectIncompleteChar = base64DetectIncompleteChar;
1730
+ break;
1731
+ default:
1732
+ this.write = passThroughWrite;
1733
+ return;
1734
+ }
1735
+ this.charBuffer = new Buffer2(6);
1736
+ this.charReceived = 0;
1737
+ this.charLength = 0;
1738
+ };
1739
+ StringDecoder.prototype.write = function(buffer) {
1740
+ var charStr = "";
1741
+ while (this.charLength) {
1742
+ var available = buffer.length >= this.charLength - this.charReceived ? this.charLength - this.charReceived : buffer.length;
1743
+ buffer.copy(this.charBuffer, this.charReceived, 0, available);
1744
+ this.charReceived += available;
1745
+ if (this.charReceived < this.charLength) {
1746
+ return "";
1747
+ }
1748
+ buffer = buffer.slice(available, buffer.length);
1749
+ charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
1750
+ var charCode = charStr.charCodeAt(charStr.length - 1);
1751
+ if (charCode >= 55296 && charCode <= 56319) {
1752
+ this.charLength += this.surrogateSize;
1753
+ charStr = "";
1754
+ continue;
1755
+ }
1756
+ this.charReceived = this.charLength = 0;
1757
+ if (buffer.length === 0) {
1758
+ return charStr;
1759
+ }
1760
+ break;
1761
+ }
1762
+ this.detectIncompleteChar(buffer);
1763
+ var end = buffer.length;
1764
+ if (this.charLength) {
1765
+ buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
1766
+ end -= this.charReceived;
1767
+ }
1768
+ charStr += buffer.toString(this.encoding, 0, end);
1769
+ var end = charStr.length - 1;
1770
+ var charCode = charStr.charCodeAt(end);
1771
+ if (charCode >= 55296 && charCode <= 56319) {
1772
+ var size = this.surrogateSize;
1773
+ this.charLength += size;
1774
+ this.charReceived += size;
1775
+ this.charBuffer.copy(this.charBuffer, size, 0, size);
1776
+ buffer.copy(this.charBuffer, 0, 0, size);
1777
+ return charStr.substring(0, end);
1778
+ }
1779
+ return charStr;
1780
+ };
1781
+ StringDecoder.prototype.detectIncompleteChar = function(buffer) {
1782
+ var i2 = buffer.length >= 3 ? 3 : buffer.length;
1783
+ for (; i2 > 0; i2--) {
1784
+ var c2 = buffer[buffer.length - i2];
1785
+ if (i2 == 1 && c2 >> 5 == 6) {
1786
+ this.charLength = 2;
1787
+ break;
1788
+ }
1789
+ if (i2 <= 2 && c2 >> 4 == 14) {
1790
+ this.charLength = 3;
1791
+ break;
1792
+ }
1793
+ if (i2 <= 3 && c2 >> 3 == 30) {
1794
+ this.charLength = 4;
1795
+ break;
1796
+ }
1797
+ }
1798
+ this.charReceived = i2;
1799
+ };
1800
+ StringDecoder.prototype.end = function(buffer) {
1801
+ var res = "";
1802
+ if (buffer && buffer.length)
1803
+ res = this.write(buffer);
1804
+ if (this.charReceived) {
1805
+ var cr2 = this.charReceived;
1806
+ var buf = this.charBuffer;
1807
+ var enc = this.encoding;
1808
+ res += buf.slice(0, cr2).toString(enc);
1809
+ }
1810
+ return res;
1811
+ };
1812
+ function passThroughWrite(buffer) {
1813
+ return buffer.toString(this.encoding);
1814
+ }
1815
+ function utf16DetectIncompleteChar(buffer) {
1816
+ this.charReceived = buffer.length % 2;
1817
+ this.charLength = this.charReceived ? 2 : 0;
1818
+ }
1819
+ function base64DetectIncompleteChar(buffer) {
1820
+ this.charReceived = buffer.length % 3;
1821
+ this.charLength = this.charReceived ? 3 : 0;
1822
+ }
1823
+ }
1824
+ });
1825
+
1688
1826
  // ../../node_modules/gif-encoder/node_modules/readable-stream/lib/_stream_readable.js
1689
1827
  var require_stream_readable2 = __commonJS({
1690
1828
  "../../node_modules/gif-encoder/node_modules/readable-stream/lib/_stream_readable.js"(exports, module2) {
@@ -1739,7 +1877,7 @@ var require_stream_readable2 = __commonJS({
1739
1877
  this.encoding = null;
1740
1878
  if (options.encoding) {
1741
1879
  if (!StringDecoder)
1742
- StringDecoder = require_string_decoder().StringDecoder;
1880
+ StringDecoder = require_string_decoder2().StringDecoder;
1743
1881
  this.decoder = new StringDecoder(options.encoding);
1744
1882
  this.encoding = options.encoding;
1745
1883
  }
@@ -1811,7 +1949,7 @@ var require_stream_readable2 = __commonJS({
1811
1949
  }
1812
1950
  Readable5.prototype.setEncoding = function(enc) {
1813
1951
  if (!StringDecoder)
1814
- StringDecoder = require_string_decoder().StringDecoder;
1952
+ StringDecoder = require_string_decoder2().StringDecoder;
1815
1953
  this._readableState.decoder = new StringDecoder(enc);
1816
1954
  this._readableState.encoding = enc;
1817
1955
  return this;
@@ -7024,13 +7162,13 @@ var require_iota = __commonJS({
7024
7162
  var require_is_buffer = __commonJS({
7025
7163
  "../../node_modules/is-buffer/index.js"(exports, module2) {
7026
7164
  module2.exports = function(obj) {
7027
- return obj != null && (isBuffer3(obj) || isSlowBuffer(obj) || !!obj._isBuffer);
7165
+ return obj != null && (isBuffer2(obj) || isSlowBuffer(obj) || !!obj._isBuffer);
7028
7166
  };
7029
- function isBuffer3(obj) {
7167
+ function isBuffer2(obj) {
7030
7168
  return !!obj.constructor && typeof obj.constructor.isBuffer === "function" && obj.constructor.isBuffer(obj);
7031
7169
  }
7032
7170
  function isSlowBuffer(obj) {
7033
- return typeof obj.readFloatLE === "function" && typeof obj.slice === "function" && isBuffer3(obj.slice(0, 0));
7171
+ return typeof obj.readFloatLE === "function" && typeof obj.slice === "function" && isBuffer2(obj.slice(0, 0));
7034
7172
  }
7035
7173
  }
7036
7174
  });
@@ -7039,7 +7177,7 @@ var require_is_buffer = __commonJS({
7039
7177
  var require_ndarray = __commonJS({
7040
7178
  "../../node_modules/ndarray/ndarray.js"(exports, module2) {
7041
7179
  var iota = require_iota();
7042
- var isBuffer3 = require_is_buffer();
7180
+ var isBuffer2 = require_is_buffer();
7043
7181
  var hasTypedArrays = typeof Float64Array !== "undefined";
7044
7182
  function compare1st(a2, b2) {
7045
7183
  return a2[0] - b2[0];
@@ -7201,7 +7339,7 @@ var require_ndarray = __commonJS({
7201
7339
  return procedure(CACHED_CONSTRUCTORS[dtype], order);
7202
7340
  }
7203
7341
  function arrayDType(data) {
7204
- if (isBuffer3(data)) {
7342
+ if (isBuffer2(data)) {
7205
7343
  return "buffer";
7206
7344
  }
7207
7345
  if (hasTypedArrays) {
@@ -33594,7 +33732,7 @@ var require_openssh_cert = __commonJS({
33594
33732
  write,
33595
33733
  /* Internal private API */
33596
33734
  fromBuffer,
33597
- toBuffer
33735
+ toBuffer: toBuffer3
33598
33736
  };
33599
33737
  var assert3 = require_assert();
33600
33738
  var SSHBuffer = require_ssh_buffer();
@@ -33734,7 +33872,7 @@ var require_openssh_cert = __commonJS({
33734
33872
  if (cert.signatures.openssh === void 0)
33735
33873
  cert.signatures.openssh = {};
33736
33874
  try {
33737
- var blob = toBuffer(cert, true);
33875
+ var blob = toBuffer3(cert, true);
33738
33876
  } catch (e2) {
33739
33877
  delete cert.signatures.openssh;
33740
33878
  return false;
@@ -33752,7 +33890,7 @@ var require_openssh_cert = __commonJS({
33752
33890
  if (cert.signatures.openssh === void 0)
33753
33891
  cert.signatures.openssh = {};
33754
33892
  try {
33755
- var blob = toBuffer(cert, true);
33893
+ var blob = toBuffer3(cert, true);
33756
33894
  } catch (e2) {
33757
33895
  delete cert.signatures.openssh;
33758
33896
  done(e2);
@@ -33777,13 +33915,13 @@ var require_openssh_cert = __commonJS({
33777
33915
  function write(cert, options) {
33778
33916
  if (options === void 0)
33779
33917
  options = {};
33780
- var blob = toBuffer(cert);
33918
+ var blob = toBuffer3(cert);
33781
33919
  var out = getCertType(cert.subjectKey) + " " + blob.toString("base64");
33782
33920
  if (options.comment)
33783
33921
  out = out + " " + options.comment;
33784
33922
  return out;
33785
33923
  }
33786
- function toBuffer(cert, noSig) {
33924
+ function toBuffer3(cert, noSig) {
33787
33925
  assert3.object(cert.signatures.openssh, "signature for openssh format");
33788
33926
  var sig = cert.signatures.openssh;
33789
33927
  if (sig.nonce === void 0)
@@ -47316,7 +47454,7 @@ var require_utils3 = __commonJS({
47316
47454
  var isRegExp = function isRegExp2(obj) {
47317
47455
  return Object.prototype.toString.call(obj) === "[object RegExp]";
47318
47456
  };
47319
- var isBuffer3 = function isBuffer4(obj) {
47457
+ var isBuffer2 = function isBuffer3(obj) {
47320
47458
  if (obj === null || typeof obj === "undefined") {
47321
47459
  return false;
47322
47460
  }
@@ -47328,7 +47466,7 @@ var require_utils3 = __commonJS({
47328
47466
  compact,
47329
47467
  decode: decode2,
47330
47468
  encode: encode3,
47331
- isBuffer: isBuffer3,
47469
+ isBuffer: isBuffer2,
47332
47470
  isRegExp,
47333
47471
  merge
47334
47472
  };
@@ -57750,11 +57888,11 @@ var require_data_uri_to_buffer = __commonJS({
57750
57888
  // ../../node_modules/parse-data-uri/index.js
57751
57889
  var require_parse_data_uri = __commonJS({
57752
57890
  "../../node_modules/parse-data-uri/index.js"(exports, module2) {
57753
- var toBuffer = require_data_uri_to_buffer();
57891
+ var toBuffer3 = require_data_uri_to_buffer();
57754
57892
  function parseDataUri(dataUri) {
57755
57893
  return {
57756
57894
  mimeType: normalizeMimeType(parseMimeType(dataUri)),
57757
- data: toBuffer(dataUri)
57895
+ data: toBuffer3(dataUri)
57758
57896
  };
57759
57897
  }
57760
57898
  function parseMimeType(uri) {
@@ -59668,6 +59806,9 @@ function getPixelsAsync(buffer, mimeType) {
59668
59806
  );
59669
59807
  }
59670
59808
 
59809
+ // ../loader-utils/src/lib/javascript-utils/is-type.ts
59810
+ var isSharedArrayBuffer = (value) => typeof SharedArrayBuffer !== "undefined" && value instanceof SharedArrayBuffer;
59811
+
59671
59812
  // ../worker-utils/src/lib/env-utils/version.ts
59672
59813
  function getVersion() {
59673
59814
  var _a;
@@ -59678,8 +59819,9 @@ function getVersion() {
59678
59819
  "loaders.gl: The __VERSION__ variable is not injected using babel plugin. Latest unstable workers would be fetched from the CDN."
59679
59820
  );
59680
59821
  globalThis._loadersgl_.version = NPM_TAG;
59822
+ warningIssued = true;
59681
59823
  } else {
59682
- globalThis._loadersgl_.version = "4.3.3";
59824
+ globalThis._loadersgl_.version = "4.4.0-alpha.10";
59683
59825
  }
59684
59826
  }
59685
59827
  return globalThis._loadersgl_.version;
@@ -59850,6 +59992,9 @@ async function loadLibrary(libraryUrl, moduleName = null, options = {}, libraryN
59850
59992
  return await loadLibraryPromises[libraryUrl];
59851
59993
  }
59852
59994
  function getLibraryUrl(library, moduleName, options = {}, libraryName = null) {
59995
+ if (options == null ? void 0 : options.core) {
59996
+ throw new Error("loadLibrary: options.core must be pre-normalized");
59997
+ }
59853
59998
  if (!options.useLocalLibraries && library.startsWith("http")) {
59854
59999
  return library;
59855
60000
  }
@@ -59875,10 +60020,20 @@ async function loadLibraryFromFile(libraryUrl) {
59875
60020
  return await loadAsArrayBuffer(libraryUrl);
59876
60021
  }
59877
60022
  if (!isBrowser2) {
60023
+ const { requireFromFile: requireFromFile2 } = globalThis.loaders || {};
59878
60024
  try {
59879
- const { requireFromFile: requireFromFile2 } = globalThis.loaders || {};
59880
- return await (requireFromFile2 == null ? void 0 : requireFromFile2(libraryUrl));
60025
+ const result = await (requireFromFile2 == null ? void 0 : requireFromFile2(libraryUrl));
60026
+ if (result || !libraryUrl.includes("/dist/libs/")) {
60027
+ return result;
60028
+ }
60029
+ return await (requireFromFile2 == null ? void 0 : requireFromFile2(libraryUrl.replace("/dist/libs/", "/src/libs/")));
59881
60030
  } catch (error) {
60031
+ if (libraryUrl.includes("/dist/libs/")) {
60032
+ try {
60033
+ return await (requireFromFile2 == null ? void 0 : requireFromFile2(libraryUrl.replace("/dist/libs/", "/src/libs/")));
60034
+ } catch {
60035
+ }
60036
+ }
59882
60037
  console.error(error);
59883
60038
  return null;
59884
60039
  }
@@ -59914,7 +60069,14 @@ async function loadAsArrayBuffer(url) {
59914
60069
  const response = await fetch(url);
59915
60070
  return await response.arrayBuffer();
59916
60071
  }
59917
- return await readFileAsArrayBuffer2(url);
60072
+ try {
60073
+ return await readFileAsArrayBuffer2(url);
60074
+ } catch {
60075
+ if (url.includes("/dist/libs/")) {
60076
+ return await readFileAsArrayBuffer2(url.replace("/dist/libs/", "/src/libs/"));
60077
+ }
60078
+ throw new Error(`Failed to load ArrayBuffer from ${url}`);
60079
+ }
59918
60080
  }
59919
60081
  async function loadAsText(url) {
59920
60082
  const { readFileAsText: readFileAsText2 } = globalThis.loaders || {};
@@ -59922,7 +60084,14 @@ async function loadAsText(url) {
59922
60084
  const response = await fetch(url);
59923
60085
  return await response.text();
59924
60086
  }
59925
- return await readFileAsText2(url);
60087
+ try {
60088
+ return await readFileAsText2(url);
60089
+ } catch {
60090
+ if (url.includes("/dist/libs/")) {
60091
+ return await readFileAsText2(url.replace("/dist/libs/", "/src/libs/"));
60092
+ }
60093
+ throw new Error(`Failed to load text from ${url}`);
60094
+ }
59926
60095
  }
59927
60096
 
59928
60097
  // ../loader-utils/src/lib/binary-utils/array-buffer-utils.ts
@@ -59947,10 +60116,26 @@ function concatenateArrayBuffersFromArray(sources) {
59947
60116
  async function concatenateArrayBuffersAsync(asyncIterator) {
59948
60117
  const arrayBuffers = [];
59949
60118
  for await (const chunk of asyncIterator) {
59950
- arrayBuffers.push(chunk);
60119
+ arrayBuffers.push(copyToArrayBuffer(chunk));
59951
60120
  }
59952
60121
  return concatenateArrayBuffers(...arrayBuffers);
59953
60122
  }
60123
+ function copyToArrayBuffer(chunk) {
60124
+ if (chunk instanceof ArrayBuffer) {
60125
+ return chunk;
60126
+ }
60127
+ if (ArrayBuffer.isView(chunk)) {
60128
+ const { buffer, byteOffset, byteLength } = chunk;
60129
+ return copyFromBuffer(buffer, byteOffset, byteLength);
60130
+ }
60131
+ return copyFromBuffer(chunk);
60132
+ }
60133
+ function copyFromBuffer(buffer, byteOffset = 0, byteLength = buffer.byteLength - byteOffset) {
60134
+ const view = new Uint8Array(buffer, byteOffset, byteLength);
60135
+ const copy = new Uint8Array(view.length);
60136
+ copy.set(view);
60137
+ return copy.buffer;
60138
+ }
59954
60139
 
59955
60140
  // ../loader-utils/src/lib/path-utils/file-aliases.ts
59956
60141
  var pathPrefix = "";
@@ -59968,6 +60153,66 @@ function resolvePath(filename) {
59968
60153
  return filename;
59969
60154
  }
59970
60155
 
60156
+ // ../loader-utils/src/lib/node/buffer.ts
60157
+ function toArrayBuffer(buffer) {
60158
+ if (Buffer.isBuffer(buffer)) {
60159
+ const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
60160
+ return typedArray.slice().buffer;
60161
+ }
60162
+ return buffer;
60163
+ }
60164
+
60165
+ // ../loader-utils/src/lib/binary-utils/memory-conversion-utils.ts
60166
+ function isBuffer(value) {
60167
+ return value && typeof value === "object" && value.isBuffer;
60168
+ }
60169
+ function toArrayBuffer2(data) {
60170
+ if (isBuffer(data)) {
60171
+ return toArrayBuffer(data);
60172
+ }
60173
+ if (data instanceof ArrayBuffer) {
60174
+ return data;
60175
+ }
60176
+ if (isSharedArrayBuffer(data)) {
60177
+ return copyToArrayBuffer2(data);
60178
+ }
60179
+ if (ArrayBuffer.isView(data)) {
60180
+ const buffer = data.buffer;
60181
+ if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {
60182
+ return buffer;
60183
+ }
60184
+ return buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);
60185
+ }
60186
+ if (typeof data === "string") {
60187
+ const text = data;
60188
+ const uint8Array = new TextEncoder().encode(text);
60189
+ return uint8Array.buffer;
60190
+ }
60191
+ if (data && typeof data === "object" && data._toArrayBuffer) {
60192
+ return data._toArrayBuffer();
60193
+ }
60194
+ throw new Error("toArrayBuffer");
60195
+ }
60196
+ function ensureArrayBuffer(bufferSource) {
60197
+ if (bufferSource instanceof ArrayBuffer) {
60198
+ return bufferSource;
60199
+ }
60200
+ if (isSharedArrayBuffer(bufferSource)) {
60201
+ return copyToArrayBuffer2(bufferSource);
60202
+ }
60203
+ const { buffer, byteOffset, byteLength } = bufferSource;
60204
+ if (buffer instanceof ArrayBuffer && byteOffset === 0 && byteLength === buffer.byteLength) {
60205
+ return buffer;
60206
+ }
60207
+ return copyToArrayBuffer2(buffer, byteOffset, byteLength);
60208
+ }
60209
+ function copyToArrayBuffer2(buffer, byteOffset = 0, byteLength = buffer.byteLength - byteOffset) {
60210
+ const view = new Uint8Array(buffer, byteOffset, byteLength);
60211
+ const copy = new Uint8Array(view.length);
60212
+ copy.set(view);
60213
+ return copy.buffer;
60214
+ }
60215
+
59971
60216
  // ../polyfills/src/filesystems/node-file.ts
59972
60217
  var import_fs = __toESM(require("fs"), 1);
59973
60218
  var NodeFile = class {
@@ -60093,8 +60338,6 @@ var import_stream = require("stream");
60093
60338
 
60094
60339
  // ../polyfills/src/filesystems/stream-utils.node.ts
60095
60340
  var import_zlib = __toESM(require("zlib"), 1);
60096
- var isArrayBuffer = (x2) => x2 && x2 instanceof ArrayBuffer;
60097
- var isBuffer = (x2) => x2 && x2 instanceof Buffer;
60098
60341
  function decompressReadStream(readStream, headers) {
60099
60342
  switch (headers == null ? void 0 : headers.get("content-encoding")) {
60100
60343
  case "br":
@@ -60116,7 +60359,7 @@ async function concatenateReadStream(readStream) {
60116
60359
  if (typeof chunk === "string") {
60117
60360
  reject(new Error("Read stream not binary"));
60118
60361
  }
60119
- arrayBufferChunks.push(toArrayBuffer(chunk));
60362
+ arrayBufferChunks.push(toArrayBuffer2(chunk));
60120
60363
  });
60121
60364
  readStream.on("end", () => {
60122
60365
  const arrayBuffer = concatenateArrayBuffers2(arrayBufferChunks);
@@ -60137,27 +60380,6 @@ function concatenateArrayBuffers2(sources) {
60137
60380
  }
60138
60381
  return result.buffer;
60139
60382
  }
60140
- function toArrayBuffer(data) {
60141
- if (isArrayBuffer(data)) {
60142
- return data;
60143
- }
60144
- if (isBuffer(data)) {
60145
- const typedArray = new Uint8Array(data);
60146
- return typedArray.buffer;
60147
- }
60148
- if (ArrayBuffer.isView(data)) {
60149
- return data.buffer;
60150
- }
60151
- if (typeof data === "string") {
60152
- const text = data;
60153
- const uint8Array = new TextEncoder().encode(text);
60154
- return uint8Array.buffer;
60155
- }
60156
- if (data && typeof data === "object" && data._toArrayBuffer) {
60157
- return data._toArrayBuffer();
60158
- }
60159
- throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
60160
- }
60161
60383
 
60162
60384
  // ../polyfills/src/filesystems/fetch-node.ts
60163
60385
  var isBoolean = (x2) => typeof x2 === "boolean";
@@ -60171,10 +60393,10 @@ async function fetchNode(url, options) {
60171
60393
  noqueryUrl = resolvePath(noqueryUrl);
60172
60394
  const responseHeaders = new Headers();
60173
60395
  if (url.endsWith(".gz")) {
60174
- responseHeaders["content-encoding"] = "gzip";
60396
+ responseHeaders.set("content-encoding", "gzip");
60175
60397
  }
60176
60398
  if (url.endsWith(".br")) {
60177
- responseHeaders["content-encoding"] = "br";
60399
+ responseHeaders.set("content-encoding", "br");
60178
60400
  }
60179
60401
  try {
60180
60402
  const body = await new Promise((resolve, reject) => {
@@ -62855,7 +63077,7 @@ async function readFileAsArrayBuffer(filename) {
62855
63077
  return await response.arrayBuffer();
62856
63078
  }
62857
63079
  const buffer = import_fs3.default.readFileSync(filename);
62858
- return buffer.buffer;
63080
+ return ensureArrayBuffer(buffer.buffer);
62859
63081
  }
62860
63082
  async function readFileAsText(filename) {
62861
63083
  if (filename.startsWith("http")) {
@@ -63069,8 +63291,6 @@ var import_http = __toESM(require("http"), 1);
63069
63291
  var import_https = __toESM(require("https"), 1);
63070
63292
 
63071
63293
  // ../polyfills/src/fetch/decode-data-uri.ts
63072
- var isArrayBuffer2 = (x2) => x2 && x2 instanceof ArrayBuffer;
63073
- var isBuffer2 = (x2) => x2 && x2 instanceof Buffer;
63074
63294
  function decodeDataUri(uri) {
63075
63295
  const dataIndex = uri.indexOf(",");
63076
63296
  let buffer;
@@ -63089,27 +63309,6 @@ function decodeDataUri(uri) {
63089
63309
  }
63090
63310
  return { arrayBuffer: toArrayBuffer2(buffer), mimeType };
63091
63311
  }
63092
- function toArrayBuffer2(data) {
63093
- if (isArrayBuffer2(data)) {
63094
- return data;
63095
- }
63096
- if (isBuffer2(data)) {
63097
- const typedArray = new Uint8Array(data);
63098
- return typedArray.buffer;
63099
- }
63100
- if (ArrayBuffer.isView(data)) {
63101
- return data.buffer;
63102
- }
63103
- if (typeof data === "string") {
63104
- const text = data;
63105
- const uint8Array = new TextEncoder().encode(text);
63106
- return uint8Array.buffer;
63107
- }
63108
- if (data && typeof data === "object" && data._toArrayBuffer) {
63109
- return data._toArrayBuffer();
63110
- }
63111
- throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
63112
- }
63113
63312
 
63114
63313
  // ../polyfills/src/fetch/fetch-polyfill.ts
63115
63314
  var isDataURL = (url) => url.startsWith("data:");
@@ -63622,6 +63821,17 @@ async function loadDracoEncoderModule(options) {
63622
63821
  }
63623
63822
  return await loadEncoderPromise;
63624
63823
  }
63824
+ function getLibraryExport(library, exportName) {
63825
+ if (library && typeof library === "object") {
63826
+ if (library.default) {
63827
+ return library.default;
63828
+ }
63829
+ if (library[exportName]) {
63830
+ return library[exportName];
63831
+ }
63832
+ }
63833
+ return library;
63834
+ }
63625
63835
  async function loadDracoEncoder(options) {
63626
63836
  let DracoEncoderModule = await loadLibrary(
63627
63837
  DRACO_EXTERNAL_LIBRARY_URLS[DRACO_EXTERNAL_LIBRARIES.ENCODER],
@@ -63629,7 +63839,21 @@ async function loadDracoEncoder(options) {
63629
63839
  options,
63630
63840
  DRACO_EXTERNAL_LIBRARIES.ENCODER
63631
63841
  );
63842
+ DracoEncoderModule = getLibraryExport(DracoEncoderModule, "DracoEncoderModule");
63632
63843
  DracoEncoderModule = DracoEncoderModule || globalThis.DracoEncoderModule;
63844
+ if (!DracoEncoderModule && !isBrowser2) {
63845
+ DracoEncoderModule = await loadLibrary(
63846
+ DRACO_EXTERNAL_LIBRARY_URLS[DRACO_EXTERNAL_LIBRARIES.ENCODER],
63847
+ "draco",
63848
+ { ...options, useLocalLibraries: true },
63849
+ DRACO_EXTERNAL_LIBRARIES.ENCODER
63850
+ );
63851
+ DracoEncoderModule = getLibraryExport(DracoEncoderModule, "DracoEncoderModule");
63852
+ DracoEncoderModule = DracoEncoderModule || globalThis.DracoEncoderModule;
63853
+ }
63854
+ if (typeof DracoEncoderModule !== "function") {
63855
+ throw new Error("DracoEncoderModule could not be loaded");
63856
+ }
63633
63857
  return new Promise((resolve) => {
63634
63858
  DracoEncoderModule({
63635
63859
  onModuleLoaded: (draco) => resolve({ draco })
@@ -63639,7 +63863,7 @@ async function loadDracoEncoder(options) {
63639
63863
  }
63640
63864
 
63641
63865
  // src/lib/utils/version.ts
63642
- var VERSION2 = true ? "4.3.3" : "latest";
63866
+ var VERSION2 = true ? "4.4.0-alpha.10" : "latest";
63643
63867
 
63644
63868
  // src/draco-writer.ts
63645
63869
  var DEFAULT_DRACO_WRITER_OPTIONS = {
@@ -63659,13 +63883,14 @@ var DracoWriter = {
63659
63883
  module: "draco",
63660
63884
  version: VERSION2,
63661
63885
  extensions: ["drc"],
63886
+ mimeTypes: ["application/octet-stream"],
63662
63887
  options: {
63663
63888
  draco: DEFAULT_DRACO_WRITER_OPTIONS
63664
63889
  },
63665
63890
  encode: encode2
63666
63891
  };
63667
63892
  async function encode2(data, options = {}) {
63668
- const { draco } = await loadDracoEncoderModule(options);
63893
+ const { draco } = await loadDracoEncoderModule(options.core || {});
63669
63894
  const dracoBuilder = new DracoBuilder(draco);
63670
63895
  try {
63671
63896
  return dracoBuilder.encodeSync(data, options.draco);