@loaders.gl/polyfills 4.2.0-alpha.4 → 4.2.0-alpha.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/dist/buffer/btoa.node.js +5 -3
  2. package/dist/buffer/to-array-buffer.node.js +6 -6
  3. package/dist/crypto/node-hash.js +41 -45
  4. package/dist/fetch/decode-data-uri.js +56 -41
  5. package/dist/fetch/fetch-polyfill.d.ts +1 -1
  6. package/dist/fetch/fetch-polyfill.d.ts.map +1 -1
  7. package/dist/fetch/fetch-polyfill.js +118 -103
  8. package/dist/fetch/headers-polyfill.js +90 -89
  9. package/dist/fetch/response-polyfill.d.ts +1 -1
  10. package/dist/fetch/response-polyfill.d.ts.map +1 -1
  11. package/dist/fetch/response-polyfill.js +65 -57
  12. package/dist/fetch/utils/decode-data-uri.node.js +56 -41
  13. package/dist/file/blob-stream-controller.js +54 -38
  14. package/dist/file/blob-stream.d.ts +1 -1
  15. package/dist/file/blob-stream.d.ts.map +1 -1
  16. package/dist/file/blob-stream.js +29 -15
  17. package/dist/file/blob.d.ts +1 -1
  18. package/dist/file/blob.d.ts.map +1 -1
  19. package/dist/file/blob.js +146 -109
  20. package/dist/file/file-reader.js +22 -50
  21. package/dist/file/file.js +30 -15
  22. package/dist/file/install-blob-polyfills.js +6 -5
  23. package/dist/file/install-file-polyfills.js +11 -8
  24. package/dist/file/readable-stream.js +5 -2
  25. package/dist/filesystems/fetch-node.js +78 -68
  26. package/dist/filesystems/node-file.js +119 -87
  27. package/dist/filesystems/node-filesystem.d.ts +1 -1
  28. package/dist/filesystems/node-filesystem.d.ts.map +1 -1
  29. package/dist/filesystems/node-filesystem.js +39 -35
  30. package/dist/filesystems/stream-utils.node.js +88 -55
  31. package/dist/images/encode-image-node.js +25 -12
  32. package/dist/images/encode-image.node.js +25 -12
  33. package/dist/images/parse-image-node.js +30 -23
  34. package/dist/images/parse-image.node.js +30 -23
  35. package/dist/index.browser.js +8 -3
  36. package/dist/index.cjs +13 -65091
  37. package/dist/index.cjs.map +7 -0
  38. package/dist/index.d.ts +4 -4
  39. package/dist/index.d.ts.map +1 -1
  40. package/dist/index.js +45 -15
  41. package/dist/libs/encoding-indexes-asian.js +6 -7
  42. package/dist/load-library/require-utils.node.js +70 -44
  43. package/dist/streams/make-node-stream.js +48 -46
  44. package/dist/text-encoder/encoding-indexes.js +31 -28
  45. package/dist/text-encoder/text-encoder.js +2604 -1033
  46. package/dist/utils/assert.js +3 -4
  47. package/dist/utils/is-browser.js +7 -2
  48. package/package.json +10 -8
  49. package/dist/buffer/btoa.node.js.map +0 -1
  50. package/dist/buffer/to-array-buffer.node.js.map +0 -1
  51. package/dist/crypto/node-hash.js.map +0 -1
  52. package/dist/dist.dev.js +0 -45
  53. package/dist/fetch/decode-data-uri.js.map +0 -1
  54. package/dist/fetch/fetch-polyfill.js.map +0 -1
  55. package/dist/fetch/headers-polyfill.js.map +0 -1
  56. package/dist/fetch/response-polyfill.js.map +0 -1
  57. package/dist/fetch/utils/decode-data-uri.node.js.map +0 -1
  58. package/dist/file/blob-stream-controller.js.map +0 -1
  59. package/dist/file/blob-stream.js.map +0 -1
  60. package/dist/file/blob.js.map +0 -1
  61. package/dist/file/file-reader.js.map +0 -1
  62. package/dist/file/file.js.map +0 -1
  63. package/dist/file/install-blob-polyfills.js.map +0 -1
  64. package/dist/file/install-file-polyfills.js.map +0 -1
  65. package/dist/file/readable-stream.js.map +0 -1
  66. package/dist/filesystems/fetch-node.js.map +0 -1
  67. package/dist/filesystems/node-file.js.map +0 -1
  68. package/dist/filesystems/node-filesystem.js.map +0 -1
  69. package/dist/filesystems/stream-utils.node.js.map +0 -1
  70. package/dist/images/encode-image-node.js.map +0 -1
  71. package/dist/images/encode-image.node.js.map +0 -1
  72. package/dist/images/parse-image-node.js.map +0 -1
  73. package/dist/images/parse-image.node.js.map +0 -1
  74. package/dist/index.browser.js.map +0 -1
  75. package/dist/index.js.map +0 -1
  76. package/dist/load-library/require-utils.node.js.map +0 -1
  77. package/dist/streams/make-node-stream.js.map +0 -1
  78. package/dist/text-encoder/encoding-indexes.js.map +0 -1
  79. package/dist/text-encoder/text-encoder.js.map +0 -1
  80. package/dist/utils/assert.js.map +0 -1
  81. package/dist/utils/is-browser.js.map +0 -1
@@ -1,7 +1,9 @@
1
+ // btoa, atob polyfills for Node.js
2
+ // Note: The atob and btoa functions (not just the polyfills!) are not unicode safe
3
+ // But still useful for unit testing
1
4
  export function atob(string) {
2
- return Buffer.from(string).toString('base64');
5
+ return Buffer.from(string).toString('base64');
3
6
  }
4
7
  export function btoa(base64) {
5
- return Buffer.from(base64, 'base64').toString('ascii');
8
+ return Buffer.from(base64, 'base64').toString('ascii');
6
9
  }
7
- //# sourceMappingURL=btoa.node.js.map
@@ -1,8 +1,8 @@
1
1
  export function bufferToArrayBuffer(buffer) {
2
- if (Buffer.isBuffer(buffer)) {
3
- const typedArray = new Uint8Array(buffer);
4
- return typedArray.buffer;
5
- }
6
- return buffer;
2
+ // TODO - per docs we should just be able to call buffer.buffer, but there are issues
3
+ if (Buffer.isBuffer(buffer)) {
4
+ const typedArray = new Uint8Array(buffer);
5
+ return typedArray.buffer;
6
+ }
7
+ return buffer;
7
8
  }
8
- //# sourceMappingURL=to-array-buffer.node.js.map
@@ -1,56 +1,52 @@
1
+ // This dependency is too big, application must provide it
1
2
  import { Hash } from '@loaders.gl/crypto';
2
- import * as crypto from 'crypto';
3
+ import * as crypto from 'crypto'; // Node.js builtin
4
+ /**
5
+ * Calculates Cryptographic Hash using Node.js crypto library
6
+ * @deprecated Warning, experimental class
7
+ */
3
8
  export class NodeHash extends Hash {
4
- constructor(options) {
5
- var _this$options, _this$options$crypto;
6
- super();
7
- this.name = 'crypto-node';
8
- this.options = void 0;
9
- this._algorithm = void 0;
10
- this._hash = void 0;
11
- this.options = options;
12
- if (!((_this$options = this.options) !== null && _this$options !== void 0 && (_this$options$crypto = _this$options.crypto) !== null && _this$options$crypto !== void 0 && _this$options$crypto.algorithm)) {
13
- throw new Error(this.name);
9
+ constructor(options) {
10
+ super();
11
+ this.name = 'crypto-node';
12
+ this.options = options;
13
+ if (!this.options?.crypto?.algorithm) {
14
+ throw new Error(this.name);
15
+ }
14
16
  }
15
- }
16
- async hash(input, encoding) {
17
- var _this$options2, _this$options2$crypto, _this$options2$crypto2;
18
- const algorithm = (_this$options2 = this.options) === null || _this$options2 === void 0 ? void 0 : (_this$options2$crypto = _this$options2.crypto) === null || _this$options2$crypto === void 0 ? void 0 : (_this$options2$crypto2 = _this$options2$crypto.algorithm) === null || _this$options2$crypto2 === void 0 ? void 0 : _this$options2$crypto2.toLowerCase();
19
- try {
20
- var _crypto$createHash;
21
- if (!crypto.createHash) {
22
- throw new Error('crypto.createHash not available');
23
- }
24
- const hash = (_crypto$createHash = crypto.createHash) === null || _crypto$createHash === void 0 ? void 0 : _crypto$createHash.call(crypto, algorithm);
25
- const inputArray = new Uint8Array(input);
26
- return hash.update(inputArray).digest('base64');
27
- } catch (error) {
28
- throw Error(`${algorithm} hash not available. ${error}`);
17
+ /**
18
+ * Atomic hash calculation
19
+ * @returns base64 encoded hash
20
+ */
21
+ async hash(input, encoding) {
22
+ // await this.preload();
23
+ const algorithm = this.options?.crypto?.algorithm?.toLowerCase();
24
+ try {
25
+ if (!crypto.createHash) {
26
+ throw new Error('crypto.createHash not available');
27
+ }
28
+ const hash = crypto.createHash?.(algorithm);
29
+ const inputArray = new Uint8Array(input);
30
+ return hash.update(inputArray).digest('base64');
31
+ }
32
+ catch (error) {
33
+ throw Error(`${algorithm} hash not available. ${error}`);
34
+ }
29
35
  }
30
- }
31
- hashBatches(asyncIterator) {
32
- try {
33
- var _this = this;
34
- let encoding = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'base64';
35
- return async function* () {
36
- var _crypto$createHash2, _this$options3, _this$options3$crypto, _this$options3$crypto2, _this$options4, _this$options4$crypto, _this$options4$crypto2;
36
+ async *hashBatches(asyncIterator, encoding = 'base64') {
37
+ // await this.preload();
37
38
  if (!crypto.createHash) {
38
- throw new Error('crypto.createHash not available');
39
+ throw new Error('crypto.createHash not available');
39
40
  }
40
- const hash = (_crypto$createHash2 = crypto.createHash) === null || _crypto$createHash2 === void 0 ? void 0 : _crypto$createHash2.call(crypto, (_this$options3 = _this.options) === null || _this$options3 === void 0 ? void 0 : (_this$options3$crypto = _this$options3.crypto) === null || _this$options3$crypto === void 0 ? void 0 : (_this$options3$crypto2 = _this$options3$crypto.algorithm) === null || _this$options3$crypto2 === void 0 ? void 0 : _this$options3$crypto2.toLowerCase());
41
+ const hash = crypto.createHash?.(this.options?.crypto?.algorithm?.toLowerCase());
41
42
  for await (const chunk of asyncIterator) {
42
- const inputArray = new Uint8Array(chunk);
43
- hash.update(inputArray);
44
- yield chunk;
43
+ // https://stackoverflow.com/questions/25567468/how-to-decrypt-an-arraybuffer
44
+ const inputArray = new Uint8Array(chunk);
45
+ hash.update(inputArray);
46
+ yield chunk;
45
47
  }
48
+ // We can pass our encoding constant directly to Node.js digest as it already supports `hex` and `base64`
46
49
  const digest = hash.digest(encoding);
47
- (_this$options4 = _this.options) === null || _this$options4 === void 0 ? void 0 : (_this$options4$crypto = _this$options4.crypto) === null || _this$options4$crypto === void 0 ? void 0 : (_this$options4$crypto2 = _this$options4$crypto.onEnd) === null || _this$options4$crypto2 === void 0 ? void 0 : _this$options4$crypto2.call(_this$options4$crypto, {
48
- hash: digest
49
- });
50
- }();
51
- } catch (e) {
52
- return Promise.reject(e);
50
+ this.options?.crypto?.onEnd?.({ hash: digest });
53
51
  }
54
- }
55
52
  }
56
- //# sourceMappingURL=node-hash.js.map
@@ -1,45 +1,60 @@
1
- const isArrayBuffer = x => x && x instanceof ArrayBuffer;
2
- const isBuffer = x => x && x instanceof Buffer;
1
+ // Based on binary-gltf-utils under MIT license: Copyright (c) 2016-17 Karl Cheng
2
+ const isArrayBuffer = (x) => x && x instanceof ArrayBuffer;
3
+ const isBuffer = (x) => x && x instanceof Buffer;
4
+ /**
5
+ * Parses a data URI into a buffer, as well as retrieving its declared MIME type.
6
+ *
7
+ * @param {string} uri - a data URI (assumed to be valid)
8
+ * @returns {Object} { buffer, mimeType }
9
+ */
3
10
  export function decodeDataUri(uri) {
4
- const dataIndex = uri.indexOf(',');
5
- let buffer;
6
- let mimeType;
7
- if (uri.slice(dataIndex - 7, dataIndex) === ';base64') {
8
- buffer = Buffer.from(uri.slice(dataIndex + 1), 'base64');
9
- mimeType = uri.slice(5, dataIndex - 7).trim();
10
- } else {
11
- buffer = Buffer.from(decodeURIComponent(uri.slice(dataIndex + 1)));
12
- mimeType = uri.slice(5, dataIndex).trim();
13
- }
14
- if (!mimeType) {
15
- mimeType = 'text/plain;charset=US-ASCII';
16
- } else if (mimeType.startsWith(';')) {
17
- mimeType = `text/plain${mimeType}`;
18
- }
19
- return {
20
- arrayBuffer: toArrayBuffer(buffer),
21
- mimeType
22
- };
11
+ const dataIndex = uri.indexOf(',');
12
+ let buffer;
13
+ let mimeType;
14
+ if (uri.slice(dataIndex - 7, dataIndex) === ';base64') {
15
+ buffer = Buffer.from(uri.slice(dataIndex + 1), 'base64');
16
+ mimeType = uri.slice(5, dataIndex - 7).trim();
17
+ }
18
+ else {
19
+ buffer = Buffer.from(decodeURIComponent(uri.slice(dataIndex + 1)));
20
+ mimeType = uri.slice(5, dataIndex).trim();
21
+ }
22
+ if (!mimeType) {
23
+ mimeType = 'text/plain;charset=US-ASCII';
24
+ }
25
+ else if (mimeType.startsWith(';')) {
26
+ mimeType = `text/plain${mimeType}`;
27
+ }
28
+ return { arrayBuffer: toArrayBuffer(buffer), mimeType };
23
29
  }
30
+ /**
31
+ * @param data
32
+ * @todo Duplicate of core
33
+ */
24
34
  export function toArrayBuffer(data) {
25
- if (isArrayBuffer(data)) {
26
- return data;
27
- }
28
- if (isBuffer(data)) {
29
- const typedArray = new Uint8Array(data);
30
- return typedArray.buffer;
31
- }
32
- if (ArrayBuffer.isView(data)) {
33
- return data.buffer;
34
- }
35
- if (typeof data === 'string') {
36
- const text = data;
37
- const uint8Array = new TextEncoder().encode(text);
38
- return uint8Array.buffer;
39
- }
40
- if (data && typeof data === 'object' && data._toArrayBuffer) {
41
- return data._toArrayBuffer();
42
- }
43
- throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
35
+ if (isArrayBuffer(data)) {
36
+ return data;
37
+ }
38
+ // TODO - per docs we should just be able to call buffer.buffer, but there are issues
39
+ if (isBuffer(data)) {
40
+ // @ts-expect-error
41
+ const typedArray = new Uint8Array(data);
42
+ return typedArray.buffer;
43
+ }
44
+ // Careful - Node Buffers will look like ArrayBuffers (keep after isBuffer)
45
+ if (ArrayBuffer.isView(data)) {
46
+ return data.buffer;
47
+ }
48
+ if (typeof data === 'string') {
49
+ const text = data;
50
+ const uint8Array = new TextEncoder().encode(text);
51
+ return uint8Array.buffer;
52
+ }
53
+ // HACK to support Blob polyfill
54
+ // @ts-expect-error
55
+ if (data && typeof data === 'object' && data._toArrayBuffer) {
56
+ // @ts-expect-error
57
+ return data._toArrayBuffer();
58
+ }
59
+ throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
44
60
  }
45
- //# sourceMappingURL=decode-data-uri.js.map
@@ -1,6 +1,6 @@
1
1
  /// <reference types="node" />
2
2
  import http from 'http';
3
- import { Response } from './response-polyfill';
3
+ import { Response } from "./response-polyfill.js";
4
4
  /**
5
5
  * Emulation of Browser fetch for Node.js
6
6
  * @param url
@@ -1 +1 @@
1
- {"version":3,"file":"fetch-polyfill.d.ts","sourceRoot":"","sources":["../../src/fetch/fetch-polyfill.ts"],"names":[],"mappings":";AAIA,OAAO,IAAI,MAAM,MAAM,CAAC;AAExB,OAAO,EAAC,QAAQ,EAAC,MAAM,qBAAqB,CAAC;AAO7C;;;;GAIG;AAEH,wBAAsB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CAgDpF;AAED,2DAA2D;AAC3D,wBAAsB,2BAA2B,CAC/C,GAAG,EAAE,MAAM,EACX,OAAO,KAAA,GACN,OAAO,CAAC,IAAI,CAAC,eAAe,CAAC,CAW/B"}
1
+ {"version":3,"file":"fetch-polyfill.d.ts","sourceRoot":"","sources":["../../src/fetch/fetch-polyfill.ts"],"names":[],"mappings":";AAIA,OAAO,IAAI,MAAM,MAAM,CAAC;AAExB,OAAO,EAAC,QAAQ,EAAC,+BAA4B;AAO7C;;;;GAIG;AAEH,wBAAsB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CAgDpF;AAED,2DAA2D;AAC3D,wBAAsB,2BAA2B,CAC/C,GAAG,EAAE,MAAM,EACX,OAAO,KAAA,GACN,OAAO,CAAC,IAAI,CAAC,eAAe,CAAC,CAW/B"}
@@ -1,125 +1,140 @@
1
+ // loaders.gl
2
+ // SPDX-License-Identifier: MIT
3
+ // Copyright (c) vis.gl contributors
1
4
  import http from 'http';
2
5
  import https from 'https';
3
6
  import { Response } from "./response-polyfill.js";
4
7
  import { Headers } from "./headers-polyfill.js";
5
8
  import { decodeDataUri } from "./decode-data-uri.js";
6
- const isDataURL = url => url.startsWith('data:');
7
- const isRequestURL = url => url.startsWith('http:') || url.startsWith('https:');
9
+ const isDataURL = (url) => url.startsWith('data:');
10
+ const isRequestURL = (url) => url.startsWith('http:') || url.startsWith('https:');
11
+ /**
12
+ * Emulation of Browser fetch for Node.js
13
+ * @param url
14
+ * @param options
15
+ */
16
+ // eslint-disable-next-line complexity
8
17
  export async function fetchNode(url, options) {
9
- try {
10
- if (globalThis.fetch !== fetchNode && (isRequestURL(url) || isDataURL(url))) {
11
- return await fetch(url, options);
18
+ try {
19
+ // Handle file streams in node
20
+ // @ts-expect-error
21
+ if (globalThis.fetch !== fetchNode && (isRequestURL(url) || isDataURL(url))) {
22
+ // @ts-expect-error
23
+ return await fetch(url, options);
24
+ }
25
+ // Handle data urls in node, to match `fetch``
26
+ // Note - this loses the MIME type, data URIs are handled directly in fetch
27
+ if (isDataURL(url)) {
28
+ const { arrayBuffer, mimeType } = decodeDataUri(url);
29
+ const response = new Response(arrayBuffer, {
30
+ headers: { 'content-type': mimeType },
31
+ url
32
+ });
33
+ return response;
34
+ }
35
+ // Automatically decompress gzipped files with .gz extension
36
+ const syntheticResponseHeaders = {};
37
+ const originalUrl = url;
38
+ if (url.endsWith('.gz')) {
39
+ url = url.slice(0, -3);
40
+ syntheticResponseHeaders['content-encoding'] = 'gzip';
41
+ }
42
+ // Need to create the stream in advance since Response constructor needs to be sync
43
+ const body = await createHTTPRequestReadStream(originalUrl, options);
44
+ const headers = getHeaders(url, body, syntheticResponseHeaders);
45
+ const { status, statusText } = getStatus(body);
46
+ const followRedirect =
47
+ // @ts-expect-error
48
+ !options || options.followRedirect || options.followRedirect === undefined;
49
+ if (status >= 300 && status < 400 && headers.has('location') && followRedirect) {
50
+ const redirectUrl = generateRedirectUrl(url, headers.get('location'));
51
+ // Redirect
52
+ return await fetchNode(redirectUrl, options);
53
+ }
54
+ return new Response(body, { headers, status, statusText, url });
12
55
  }
13
- if (isDataURL(url)) {
14
- const {
15
- arrayBuffer,
16
- mimeType
17
- } = decodeDataUri(url);
18
- const response = new Response(arrayBuffer, {
19
- headers: {
20
- 'content-type': mimeType
21
- },
22
- url
23
- });
24
- return response;
56
+ catch (error) {
57
+ // TODO - what error code to use here?
58
+ return new Response(null, { status: 400, statusText: String(error), url });
25
59
  }
26
- const syntheticResponseHeaders = {};
27
- const originalUrl = url;
28
- if (url.endsWith('.gz')) {
29
- url = url.slice(0, -3);
30
- syntheticResponseHeaders['content-encoding'] = 'gzip';
31
- }
32
- const body = await createHTTPRequestReadStream(originalUrl, options);
33
- const headers = getHeaders(url, body, syntheticResponseHeaders);
34
- const {
35
- status,
36
- statusText
37
- } = getStatus(body);
38
- const followRedirect = !options || options.followRedirect || options.followRedirect === undefined;
39
- if (status >= 300 && status < 400 && headers.has('location') && followRedirect) {
40
- const redirectUrl = generateRedirectUrl(url, headers.get('location'));
41
- return await fetchNode(redirectUrl, options);
42
- }
43
- return new Response(body, {
44
- headers,
45
- status,
46
- statusText,
47
- url
48
- });
49
- } catch (error) {
50
- return new Response(null, {
51
- status: 400,
52
- statusText: String(error),
53
- url
54
- });
55
- }
56
60
  }
61
+ /** Returns a promise that resolves to a readable stream */
57
62
  export async function createHTTPRequestReadStream(url, options) {
58
- return await new Promise((resolve, reject) => {
59
- const requestOptions = getRequestOptions(url, options);
60
- const req = url.startsWith('https:') ? https.request(requestOptions, res => resolve(res)) : http.request(requestOptions, res => resolve(res));
61
- req.on('error', error => reject(error));
62
- req.end();
63
- });
63
+ // HANDLE HTTP/HTTPS REQUESTS IN NODE
64
+ // TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
65
+ return await new Promise((resolve, reject) => {
66
+ const requestOptions = getRequestOptions(url, options);
67
+ const req = url.startsWith('https:')
68
+ ? https.request(requestOptions, (res) => resolve(res))
69
+ : http.request(requestOptions, (res) => resolve(res));
70
+ req.on('error', (error) => reject(error));
71
+ req.end();
72
+ });
64
73
  }
74
+ /**
75
+ * Generate redirect url from location without origin and protocol.
76
+ * @param originalUrl
77
+ * @param redirectUrl
78
+ */
65
79
  function generateRedirectUrl(originalUrl, location) {
66
- if (location.startsWith('http')) {
67
- return location;
68
- }
69
- const url = new URL(originalUrl);
70
- url.pathname = location;
71
- return url.href;
80
+ if (location.startsWith('http')) {
81
+ return location;
82
+ }
83
+ // If url doesn't have origin and protocol just extend current url origin with location.
84
+ const url = new URL(originalUrl);
85
+ url.pathname = location;
86
+ return url.href;
72
87
  }
88
+ // HELPER FUNCTIONS
73
89
  function getRequestOptions(url, options) {
74
- const originalHeaders = (options === null || options === void 0 ? void 0 : options.headers) || {};
75
- const headers = {};
76
- for (const key of Object.keys(originalHeaders)) {
77
- headers[key.toLowerCase()] = originalHeaders[key];
78
- }
79
- headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
80
- const urlObject = new URL(url);
81
- return {
82
- hostname: urlObject.hostname,
83
- path: urlObject.pathname,
84
- method: 'GET',
85
- ...options,
86
- ...(options === null || options === void 0 ? void 0 : options.fetch),
87
- headers,
88
- port: urlObject.port
89
- };
90
- }
91
- function getStatus(httpResponse) {
92
- if (httpResponse.statusCode) {
90
+ // Ensure header keys are lower case so that we can merge without duplicates
91
+ const originalHeaders = options?.headers || {};
92
+ const headers = {};
93
+ for (const key of Object.keys(originalHeaders)) {
94
+ headers[key.toLowerCase()] = originalHeaders[key];
95
+ }
96
+ // Add default accept-encoding to headers
97
+ headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
98
+ const urlObject = new URL(url);
93
99
  return {
94
- status: httpResponse.statusCode,
95
- statusText: httpResponse.statusMessage || 'NA'
100
+ hostname: urlObject.hostname,
101
+ path: urlObject.pathname,
102
+ method: 'GET',
103
+ // Add options and user provided 'options.fetch' overrides if available
104
+ ...options,
105
+ ...options?.fetch,
106
+ // Override with updated headers with accepted encodings:
107
+ headers,
108
+ port: urlObject.port
96
109
  };
97
- }
98
- return {
99
- status: 200,
100
- statusText: 'OK'
101
- };
102
110
  }
103
- function getHeaders(url, httpResponse) {
104
- let additionalHeaders = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
105
- const headers = {};
106
- if (httpResponse && httpResponse.headers) {
107
- const httpHeaders = httpResponse.headers;
108
- for (const key in httpHeaders) {
109
- const header = httpHeaders[key];
110
- headers[key.toLowerCase()] = String(header);
111
+ function getStatus(httpResponse) {
112
+ if (httpResponse.statusCode) {
113
+ return { status: httpResponse.statusCode, statusText: httpResponse.statusMessage || 'NA' };
114
+ }
115
+ return { status: 200, statusText: 'OK' };
116
+ }
117
+ function getHeaders(url, httpResponse, additionalHeaders = {}) {
118
+ const headers = {};
119
+ if (httpResponse && httpResponse.headers) {
120
+ const httpHeaders = httpResponse.headers;
121
+ for (const key in httpHeaders) {
122
+ const header = httpHeaders[key];
123
+ headers[key.toLowerCase()] = String(header);
124
+ }
111
125
  }
112
- }
113
- if (!headers['content-length']) {
114
- const contentLength = getContentLength(url);
115
- if (Number.isFinite(contentLength)) {
116
- headers['content-length'] = contentLength;
126
+ // Fix up content length if we can for best progress experience
127
+ if (!headers['content-length']) {
128
+ const contentLength = getContentLength(url);
129
+ if (Number.isFinite(contentLength)) {
130
+ headers['content-length'] = contentLength;
131
+ }
117
132
  }
118
- }
119
- Object.assign(headers, additionalHeaders);
120
- return new Headers(headers);
133
+ Object.assign(headers, additionalHeaders);
134
+ return new Headers(headers);
121
135
  }
136
+ /** Needs to be read from actual headers */
122
137
  function getContentLength(url) {
123
- return isDataURL(url) ? url.length - 'data:'.length : null;
138
+ // TODO - remove media type etc
139
+ return isDataURL(url) ? url.length - 'data:'.length : null;
124
140
  }
125
- //# sourceMappingURL=fetch-polyfill.js.map