@loaders.gl/polyfills 4.2.0-alpha.4 → 4.2.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. package/dist/buffer/btoa.node.js +5 -3
  2. package/dist/buffer/to-array-buffer.node.js +6 -6
  3. package/dist/crypto/node-hash.js +46 -45
  4. package/dist/fetch/decode-data-uri.js +56 -41
  5. package/dist/fetch/fetch-polyfill.d.ts +1 -1
  6. package/dist/fetch/fetch-polyfill.d.ts.map +1 -1
  7. package/dist/fetch/fetch-polyfill.js +118 -103
  8. package/dist/fetch/headers-polyfill.js +91 -89
  9. package/dist/fetch/response-polyfill.d.ts +1 -1
  10. package/dist/fetch/response-polyfill.d.ts.map +1 -1
  11. package/dist/fetch/response-polyfill.js +73 -57
  12. package/dist/fetch/utils/decode-data-uri.node.js +56 -41
  13. package/dist/file/blob-stream-controller.js +55 -38
  14. package/dist/file/blob-stream.d.ts +1 -1
  15. package/dist/file/blob-stream.d.ts.map +1 -1
  16. package/dist/file/blob-stream.js +30 -15
  17. package/dist/file/blob.d.ts +1 -1
  18. package/dist/file/blob.d.ts.map +1 -1
  19. package/dist/file/blob.js +152 -109
  20. package/dist/file/file-reader.js +38 -50
  21. package/dist/file/file.js +35 -15
  22. package/dist/file/install-blob-polyfills.js +6 -5
  23. package/dist/file/install-file-polyfills.d.ts +3 -2
  24. package/dist/file/install-file-polyfills.d.ts.map +1 -1
  25. package/dist/file/install-file-polyfills.js +11 -8
  26. package/dist/file/readable-stream.js +5 -2
  27. package/dist/filesystems/fetch-node.d.ts.map +1 -1
  28. package/dist/filesystems/fetch-node.js +80 -68
  29. package/dist/filesystems/node-file.d.ts.map +1 -1
  30. package/dist/filesystems/node-file.js +129 -87
  31. package/dist/filesystems/node-filesystem.d.ts +1 -1
  32. package/dist/filesystems/node-filesystem.d.ts.map +1 -1
  33. package/dist/filesystems/node-filesystem.js +39 -35
  34. package/dist/filesystems/stream-utils.node.js +88 -55
  35. package/dist/images/encode-image-node.js +25 -12
  36. package/dist/images/encode-image.node.js +25 -12
  37. package/dist/images/parse-image-node.js +30 -23
  38. package/dist/images/parse-image.node.js +30 -23
  39. package/dist/index.browser.js +8 -3
  40. package/dist/index.cjs +13 -65091
  41. package/dist/index.cjs.map +7 -0
  42. package/dist/index.d.ts +4 -4
  43. package/dist/index.d.ts.map +1 -1
  44. package/dist/index.js +46 -15
  45. package/dist/libs/encoding-indexes-asian.js +6 -7
  46. package/dist/load-library/require-utils.node.js +70 -44
  47. package/dist/streams/make-node-stream.d.ts.map +1 -1
  48. package/dist/streams/make-node-stream.js +52 -46
  49. package/dist/text-encoder/encoding-indexes.js +31 -28
  50. package/dist/text-encoder/text-encoder.js +2604 -1033
  51. package/dist/utils/assert.js +3 -4
  52. package/dist/utils/is-browser.js +7 -2
  53. package/package.json +13 -11
  54. package/src/fetch/response-polyfill.ts +2 -0
  55. package/src/filesystems/fetch-node.ts +2 -0
  56. package/src/filesystems/node-file.ts +4 -0
  57. package/src/filesystems/node-filesystem.ts +1 -0
  58. package/src/index.ts +1 -0
  59. package/src/load-library/require-utils.node.ts +1 -1
  60. package/src/streams/make-node-stream.ts +3 -2
  61. package/dist/buffer/btoa.node.js.map +0 -1
  62. package/dist/buffer/to-array-buffer.node.js.map +0 -1
  63. package/dist/crypto/node-hash.js.map +0 -1
  64. package/dist/dist.dev.js +0 -45
  65. package/dist/fetch/decode-data-uri.js.map +0 -1
  66. package/dist/fetch/fetch-polyfill.js.map +0 -1
  67. package/dist/fetch/headers-polyfill.js.map +0 -1
  68. package/dist/fetch/response-polyfill.js.map +0 -1
  69. package/dist/fetch/utils/decode-data-uri.node.js.map +0 -1
  70. package/dist/file/blob-stream-controller.js.map +0 -1
  71. package/dist/file/blob-stream.js.map +0 -1
  72. package/dist/file/blob.js.map +0 -1
  73. package/dist/file/file-reader.js.map +0 -1
  74. package/dist/file/file.js.map +0 -1
  75. package/dist/file/install-blob-polyfills.js.map +0 -1
  76. package/dist/file/install-file-polyfills.js.map +0 -1
  77. package/dist/file/readable-stream.js.map +0 -1
  78. package/dist/filesystems/fetch-node.js.map +0 -1
  79. package/dist/filesystems/node-file.js.map +0 -1
  80. package/dist/filesystems/node-filesystem.js.map +0 -1
  81. package/dist/filesystems/stream-utils.node.js.map +0 -1
  82. package/dist/images/encode-image-node.js.map +0 -1
  83. package/dist/images/encode-image.node.js.map +0 -1
  84. package/dist/images/parse-image-node.js.map +0 -1
  85. package/dist/images/parse-image.node.js.map +0 -1
  86. package/dist/index.browser.js.map +0 -1
  87. package/dist/index.js.map +0 -1
  88. package/dist/load-library/require-utils.node.js.map +0 -1
  89. package/dist/streams/make-node-stream.js.map +0 -1
  90. package/dist/text-encoder/encoding-indexes.js.map +0 -1
  91. package/dist/text-encoder/text-encoder.js.map +0 -1
  92. package/dist/utils/assert.js.map +0 -1
  93. package/dist/utils/is-browser.js.map +0 -1
package/dist/file/blob.js CHANGED
@@ -1,119 +1,162 @@
1
- let _Symbol$toStringTag;
1
+ // Forked from @gozala's web-blob under MIT license https://github.com/Gozala/web-blob
2
2
  import { BlobStream } from "./blob-stream.js";
3
- _Symbol$toStringTag = Symbol.toStringTag;
3
+ /**
4
+ * Forked from @gozala's web-blob under MIT license
5
+ * @see https://github.com/Gozala/web-blob
6
+ */
4
7
  export class BlobPolyfill {
5
- constructor() {
6
- let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];
7
- let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
8
- this.type = void 0;
9
- this.size = void 0;
10
- this.parts = void 0;
11
- this.parts = [];
12
- this.size = 0;
13
- for (const part of init) {
14
- if (typeof part === 'string') {
15
- const bytes = new TextEncoder().encode(part);
16
- this.parts.push(bytes);
17
- this.size += bytes.byteLength;
18
- } else if (part instanceof BlobPolyfill) {
19
- this.size += part.size;
20
- this.parts.push(...part.parts);
21
- } else if (part instanceof ArrayBuffer) {
22
- this.parts.push(new Uint8Array(part));
23
- this.size += part.byteLength;
24
- } else if (part instanceof Uint8Array) {
25
- this.parts.push(part);
26
- this.size += part.byteLength;
27
- } else if (ArrayBuffer.isView(part)) {
28
- const {
29
- buffer,
30
- byteOffset,
31
- byteLength
32
- } = part;
33
- this.parts.push(new Uint8Array(buffer, byteOffset, byteLength));
34
- this.size += byteLength;
35
- } else {
36
- const bytes = new TextEncoder().encode(String(part));
37
- this.parts.push(bytes);
38
- this.size += bytes.byteLength;
39
- }
8
+ // implements Blob {
9
+ /** The MIME type of the data contained in the Blob. If type is unknown, string is empty. */
10
+ type;
11
+ /** The size, in bytes, of the data contained in the Blob object. */
12
+ size;
13
+ parts;
14
+ /**
15
+ * @param [init]
16
+ * @param [options]
17
+ */
18
+ constructor(init = [], options = {}) {
19
+ this.parts = [];
20
+ this.size = 0;
21
+ for (const part of init) {
22
+ if (typeof part === 'string') {
23
+ const bytes = new TextEncoder().encode(part);
24
+ this.parts.push(bytes);
25
+ this.size += bytes.byteLength;
26
+ }
27
+ else if (part instanceof BlobPolyfill) {
28
+ this.size += part.size;
29
+ // @ts-ignore - `parts` is marked private so TS will complain about
30
+ // accessing it.
31
+ this.parts.push(...part.parts);
32
+ }
33
+ else if (part instanceof ArrayBuffer) {
34
+ this.parts.push(new Uint8Array(part));
35
+ this.size += part.byteLength;
36
+ }
37
+ else if (part instanceof Uint8Array) {
38
+ this.parts.push(part);
39
+ this.size += part.byteLength;
40
+ }
41
+ else if (ArrayBuffer.isView(part)) {
42
+ const { buffer, byteOffset, byteLength } = part;
43
+ this.parts.push(new Uint8Array(buffer, byteOffset, byteLength));
44
+ this.size += byteLength;
45
+ }
46
+ else {
47
+ const bytes = new TextEncoder().encode(String(part));
48
+ this.parts.push(bytes);
49
+ this.size += bytes.byteLength;
50
+ }
51
+ }
52
+ /** @private */
53
+ this.type = readType(options.type);
54
+ }
55
+ /**
56
+ * Returns a new Blob object containing the data in the specified range of
57
+ * bytes of the blob on which it's called.
58
+ * @param start=0 - An index into the Blob indicating the first
59
+ * byte to include in the new Blob. If you specify a negative value, it's
60
+ * treated as an offset from the end of the Blob toward the beginning. For
61
+ * example, `-10` would be the 10th from last byte in the Blob. The default
62
+ * value is `0`. If you specify a value for start that is larger than the
63
+ * size of the source Blob, the returned Blob has size 0 and contains no
64
+ * data.
65
+ * @param end - An index into the `Blob` indicating the first byte
66
+ * that will *not* be included in the new `Blob` (i.e. the byte exactly at
67
+ * this index is not included). If you specify a negative value, it's treated
68
+ * as an offset from the end of the Blob toward the beginning. For example,
69
+ * `-10` would be the 10th from last byte in the `Blob`. The default value is
70
+ * size.
71
+ * @param type - The content type to assign to the new Blob;
72
+ * this will be the value of its type property. The default value is an empty
73
+ * string.
74
+ */
75
+ slice(start = 0, end = this.size, type = '') {
76
+ const { size, parts: parts } = this;
77
+ let offset = start < 0 ? Math.max(size + start, 0) : Math.min(start, size);
78
+ let limit = end < 0 ? Math.max(size + end, 0) : Math.min(end, size);
79
+ const span = Math.max(limit - offset, 0);
80
+ const blob = new BlobPolyfill([], { type });
81
+ if (span === 0) {
82
+ // @ts-ignore
83
+ return blob;
84
+ }
85
+ let blobSize = 0;
86
+ const blobParts = [];
87
+ for (const part of parts) {
88
+ const { byteLength } = part;
89
+ if (offset > 0 && byteLength <= offset) {
90
+ offset -= byteLength;
91
+ limit -= byteLength;
92
+ }
93
+ else {
94
+ const chunk = part.subarray(offset, Math.min(byteLength, limit));
95
+ blobParts.push(chunk);
96
+ blobSize += chunk.byteLength;
97
+ // no longer need to take that into account
98
+ offset = 0;
99
+ // don't add the overflow to new blobParts
100
+ if (blobSize >= span) {
101
+ break;
102
+ }
103
+ }
104
+ }
105
+ blob.parts = blobParts;
106
+ blob.size = blobSize;
107
+ // @ts-ignore
108
+ return blob;
40
109
  }
41
- this.type = readType(options.type);
42
- }
43
- slice() {
44
- let start = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
45
- let end = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : this.size;
46
- let type = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : '';
47
- const {
48
- size,
49
- parts: parts
50
- } = this;
51
- let offset = start < 0 ? Math.max(size + start, 0) : Math.min(start, size);
52
- let limit = end < 0 ? Math.max(size + end, 0) : Math.min(end, size);
53
- const span = Math.max(limit - offset, 0);
54
- const blob = new BlobPolyfill([], {
55
- type
56
- });
57
- if (span === 0) {
58
- return blob;
110
+ /**
111
+ * Returns a promise that resolves with an ArrayBuffer containing the entire
112
+ * contents of the Blob as binary data.
113
+ */
114
+ // eslint-disable-next-line require-await
115
+ async arrayBuffer() {
116
+ return this._toArrayBuffer();
59
117
  }
60
- let blobSize = 0;
61
- const blobParts = [];
62
- for (const part of parts) {
63
- const {
64
- byteLength
65
- } = part;
66
- if (offset > 0 && byteLength <= offset) {
67
- offset -= byteLength;
68
- limit -= byteLength;
69
- } else {
70
- const chunk = part.subarray(offset, Math.min(byteLength, limit));
71
- blobParts.push(chunk);
72
- blobSize += chunk.byteLength;
73
- offset = 0;
74
- if (blobSize >= span) {
75
- break;
118
+ /**
119
+ * Returns a promise that resolves with a USVString containing the entire
120
+ * contents of the Blob interpreted as UTF-8 text.
121
+ */
122
+ // eslint-disable-next-line require-await
123
+ async text() {
124
+ const decoder = new TextDecoder();
125
+ let text = '';
126
+ for (const part of this.parts) {
127
+ text += decoder.decode(part);
76
128
  }
77
- }
129
+ return text;
78
130
  }
79
- blob.parts = blobParts;
80
- blob.size = blobSize;
81
- return blob;
82
- }
83
- async arrayBuffer() {
84
- return this._toArrayBuffer();
85
- }
86
- async text() {
87
- const decoder = new TextDecoder();
88
- let text = '';
89
- for (const part of this.parts) {
90
- text += decoder.decode(part);
131
+ /**
132
+ */
133
+ // @ts-ignore
134
+ stream() {
135
+ return new BlobStream(this.parts);
91
136
  }
92
- return text;
93
- }
94
- stream() {
95
- return new BlobStream(this.parts);
96
- }
97
- toString() {
98
- return '[object Blob]';
99
- }
100
- get [_Symbol$toStringTag]() {
101
- return 'Blob';
102
- }
103
- _toArrayBuffer() {
104
- const buffer = new ArrayBuffer(this.size);
105
- const bytes = new Uint8Array(buffer);
106
- let offset = 0;
107
- for (const part of this.parts) {
108
- bytes.set(part, offset);
109
- offset += part.byteLength;
137
+ /**
138
+ * @returns {string}
139
+ */
140
+ toString() {
141
+ return '[object Blob]';
142
+ }
143
+ get [Symbol.toStringTag]() {
144
+ return 'Blob';
145
+ }
146
+ _toArrayBuffer() {
147
+ const buffer = new ArrayBuffer(this.size);
148
+ const bytes = new Uint8Array(buffer);
149
+ let offset = 0;
150
+ for (const part of this.parts) {
151
+ bytes.set(part, offset);
152
+ offset += part.byteLength;
153
+ }
154
+ return buffer;
110
155
  }
111
- return buffer;
112
- }
113
156
  }
114
- function readType() {
115
- let input = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : '';
116
- const type = String(input).toLowerCase();
117
- return /[^\u0020-\u007E]/.test(type) ? '' : type;
157
+ /**
158
+ */
159
+ function readType(input = '') {
160
+ const type = String(input).toLowerCase();
161
+ return /[^\u0020-\u007E]/.test(type) ? '' : type;
118
162
  }
119
- //# sourceMappingURL=blob.js.map
@@ -1,59 +1,47 @@
1
1
  import { atob } from "../buffer/btoa.node.js";
2
2
  export class FileReaderPolyfill {
3
- constructor() {
4
- this.onload = void 0;
5
- this.onabort = void 0;
6
- this.onerror = void 0;
7
- this.error = void 0;
8
- this.onloadstart = void 0;
9
- this.onloadend = void 0;
10
- this.onprogress = void 0;
11
- this.readyState = void 0;
12
- this.result = void 0;
13
- this.DONE = void 0;
14
- this.EMPTY = void 0;
15
- this.LOADING = void 0;
16
- this.addEventListener = void 0;
17
- this.removeEventListener = void 0;
18
- this.dispatchEvent = void 0;
19
- this.onload = null;
20
- }
21
- abort() {
22
- return;
23
- }
24
- async readAsArrayBuffer(blob) {
25
- const arrayBuffer = await blob.arrayBuffer();
26
- if (this.onload) {
27
- this.onload({
28
- target: {
29
- result: arrayBuffer
3
+ // onload: ({result: any}) => void;
4
+ onload;
5
+ onabort;
6
+ onerror;
7
+ error;
8
+ onloadstart;
9
+ onloadend;
10
+ onprogress;
11
+ readyState;
12
+ result;
13
+ DONE;
14
+ EMPTY;
15
+ LOADING;
16
+ addEventListener;
17
+ removeEventListener;
18
+ dispatchEvent;
19
+ constructor() {
20
+ this.onload = null;
21
+ }
22
+ abort() {
23
+ return;
24
+ }
25
+ async readAsArrayBuffer(blob) {
26
+ const arrayBuffer = await blob.arrayBuffer();
27
+ if (this.onload) {
28
+ this.onload({ target: { result: arrayBuffer } });
30
29
  }
31
- });
32
30
  }
33
- }
34
- async readAsBinaryString(blob) {
35
- throw Error('Not implemented');
36
- }
37
- async readAsDataURL(blob) {
38
- const text = await blob.text();
39
- const dataUrl = `data://;base64,${atob(text)}`;
40
- if (this.onload) {
41
- this.onload({
42
- target: {
43
- result: dataUrl
31
+ async readAsBinaryString(blob) {
32
+ throw Error('Not implemented');
33
+ }
34
+ async readAsDataURL(blob) {
35
+ const text = await blob.text();
36
+ const dataUrl = `data://;base64,${atob(text)}`;
37
+ if (this.onload) {
38
+ this.onload({ target: { result: dataUrl } });
44
39
  }
45
- });
46
40
  }
47
- }
48
- async readAsText(blob) {
49
- const text = await blob.text();
50
- if (this.onload) {
51
- this.onload({
52
- target: {
53
- result: text
41
+ async readAsText(blob) {
42
+ const text = await blob.text();
43
+ if (this.onload) {
44
+ this.onload({ target: { result: text } });
54
45
  }
55
- });
56
46
  }
57
- }
58
47
  }
59
- //# sourceMappingURL=file-reader.js.map
package/dist/file/file.js CHANGED
@@ -1,17 +1,37 @@
1
- let _Symbol$toStringTag;
2
- _Symbol$toStringTag = Symbol.toStringTag;
1
+ // Forked from @gozala's web-file under MIT license https://github.com/Gozala/web-file
2
+ /**
3
+ * Forked from @gozala's web-file under MIT license
4
+ * @see https://github.com/Gozala/web-file
5
+ */
6
+ // @ts-ignore
3
7
  export class FilePolyfill extends globalThis.Blob {
4
- constructor(init, name) {
5
- let options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
6
- super(init, options);
7
- this.name = '';
8
- this.webkitRelativePath = '';
9
- this.lastModified = void 0;
10
- this.name = String(name).replace(/\//g, ':');
11
- this.lastModified = (options === null || options === void 0 ? void 0 : options.lastModified) || Date.now();
12
- }
13
- get [_Symbol$toStringTag]() {
14
- return 'File';
15
- }
8
+ // implements File {
9
+ // public API
10
+ /** The name of the file referenced by the File object. */
11
+ name = '';
12
+ /** The path the URL of the File is relative to. */
13
+ webkitRelativePath = '';
14
+ /**
15
+ * Returns the last modified time of the file, in millisecond since the UNIX
16
+ * epoch (January 1st, 1970 at Midnight).
17
+ */
18
+ lastModified;
19
+ /**
20
+ * @param init
21
+ * @param name - A USVString representing the file name or the path
22
+ * to the file.
23
+ * @param [options]
24
+ */
25
+ constructor(init, name, options = {}) {
26
+ super(init, options);
27
+ // Per File API spec https://w3c.github.io/FileAPI/#file-constructor
28
+ // Every "/" character of file name must be replaced with a ":".
29
+ /** @private */
30
+ this.name = String(name).replace(/\//g, ':');
31
+ /** @private */
32
+ this.lastModified = options?.lastModified || Date.now();
33
+ }
34
+ get [Symbol.toStringTag]() {
35
+ return 'File';
36
+ }
16
37
  }
17
- //# sourceMappingURL=file.js.map
@@ -1,9 +1,10 @@
1
+ // import {ReadableStreamPolyfill} from './readable-stream';
1
2
  import { BlobPolyfill } from "./blob.js";
2
3
  export function instalBlobPolyfills() {
3
- if (typeof Blob === 'undefined' && !globalThis.Blob) {
4
- globalThis.Blob = BlobPolyfill;
5
- }
6
- return globalThis.Blob;
4
+ if (typeof Blob === 'undefined' && !globalThis.Blob) {
5
+ // @ts-ignore;
6
+ globalThis.Blob = BlobPolyfill;
7
+ }
8
+ return globalThis.Blob;
7
9
  }
8
10
  export const Blob_ = instalBlobPolyfills();
9
- //# sourceMappingURL=install-blob-polyfills.js.map
@@ -1,3 +1,4 @@
1
- export declare function installFilePolyfills(): typeof globalThis;
2
- export declare const File_: typeof globalThis;
1
+ /// <reference types="node" />
2
+ export declare function installFilePolyfills(): NodeJS.Global;
3
+ export declare const File_: NodeJS.Global;
3
4
  //# sourceMappingURL=install-file-polyfills.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"install-file-polyfills.d.ts","sourceRoot":"","sources":["../../src/file/install-file-polyfills.ts"],"names":[],"mappings":"AAIA,wBAAgB,oBAAoB,sBAanC;AAED,eAAO,MAAM,KAAK,mBAAyB,CAAC"}
1
+ {"version":3,"file":"install-file-polyfills.d.ts","sourceRoot":"","sources":["../../src/file/install-file-polyfills.ts"],"names":[],"mappings":";AAIA,wBAAgB,oBAAoB,kBAanC;AAED,eAAO,MAAM,KAAK,eAAyB,CAAC"}
@@ -1,13 +1,16 @@
1
+ // import {ReadableStreamPolyfill} from './readable-stream';
1
2
  import { FileReaderPolyfill } from "./file-reader.js";
2
3
  import { FilePolyfill } from "./file.js";
3
4
  export function installFilePolyfills() {
4
- if (typeof FileReader === 'undefined' && !globalThis.FileReader) {
5
- globalThis.FileReader = FileReaderPolyfill;
6
- }
7
- if (typeof File === 'undefined' && !globalThis.File) {
8
- globalThis.File = FilePolyfill;
9
- }
10
- return global;
5
+ if (typeof FileReader === 'undefined' && !globalThis.FileReader) {
6
+ // @ts-ignore;
7
+ globalThis.FileReader = FileReaderPolyfill;
8
+ }
9
+ // Install minimal Node.js File polyfill
10
+ if (typeof File === 'undefined' && !globalThis.File) {
11
+ // @ts-ignore;
12
+ globalThis.File = FilePolyfill;
13
+ }
14
+ return global;
11
15
  }
12
16
  export const File_ = installFilePolyfills();
13
- //# sourceMappingURL=install-file-polyfills.js.map
@@ -1,4 +1,7 @@
1
1
  import { ReadableStream as WSPReadableStream } from 'web-streams-polyfill';
2
+ // Want a polyfill, but please don't install it
3
+ // @ts-ignore
2
4
  delete globalThis.ReadableStream;
3
- export class ReadableStreamPolyfill extends WSPReadableStream {}
4
- //# sourceMappingURL=readable-stream.js.map
5
+ // @ts-ignore
6
+ export class ReadableStreamPolyfill extends WSPReadableStream {
7
+ }
@@ -1 +1 @@
1
- {"version":3,"file":"fetch-node.d.ts","sourceRoot":"","sources":["../../src/filesystems/fetch-node.ts"],"names":[],"mappings":"AAeA;;;;;GAKG;AAEH,wBAAsB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CAyDrF"}
1
+ {"version":3,"file":"fetch-node.d.ts","sourceRoot":"","sources":["../../src/filesystems/fetch-node.ts"],"names":[],"mappings":"AAeA;;;;;GAKG;AAEH,wBAAsB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CA2DrF"}
@@ -1,77 +1,89 @@
1
+ // loaders.gl
2
+ // SPDX-License-Identifier: MIT
3
+ // Copyright (c) vis.gl contributors
1
4
  import fs from 'fs';
2
5
  import { Readable } from 'stream';
3
6
  import { resolvePath } from '@loaders.gl/loader-utils';
4
7
  import { decompressReadStream } from "./stream-utils.node.js";
5
- const isBoolean = x => typeof x === 'boolean';
6
- const isFunction = x => typeof x === 'function';
7
- const isObject = x => x !== null && typeof x === 'object';
8
- const isReadableNodeStream = x => isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);
8
+ const isBoolean = (x) => typeof x === 'boolean';
9
+ const isFunction = (x) => typeof x === 'function';
10
+ const isObject = (x) => x !== null && typeof x === 'object';
11
+ const isReadableNodeStream = (x) => isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);
12
+ /**
13
+ * Enables
14
+ * @param url
15
+ * @param options
16
+ * @returns
17
+ */
18
+ // eslint-disable-next-line max-statements
9
19
  export async function fetchNode(url, options) {
10
- const FILE_PROTOCOL_REGEX = /^file:\/\//;
11
- url.replace(FILE_PROTOCOL_REGEX, '/');
12
- let noqueryUrl = url.split('?')[0];
13
- noqueryUrl = resolvePath(noqueryUrl);
14
- const responseHeaders = new Headers();
15
- if (url.endsWith('.gz')) {
16
- responseHeaders['content-encoding'] = 'gzip';
17
- }
18
- if (url.endsWith('.br')) {
19
- responseHeaders['content-encoding'] = 'br';
20
- }
21
- try {
22
- const body = await new Promise((resolve, reject) => {
23
- const stream = fs.createReadStream(noqueryUrl, {
24
- encoding: null
25
- });
26
- stream.once('readable', () => resolve(stream));
27
- stream.on('error', error => reject(error));
28
- });
29
- let bodyStream = body;
30
- if (isReadableNodeStream(body)) {
31
- bodyStream = decompressReadStream(body, responseHeaders);
32
- } else if (typeof body === 'string') {
33
- bodyStream = Readable.from([new TextEncoder().encode(body)]);
34
- } else {
35
- bodyStream = Readable.from([body || new ArrayBuffer(0)]);
20
+ // Support `file://` protocol
21
+ const FILE_PROTOCOL_REGEX = /^file:\/\//;
22
+ url.replace(FILE_PROTOCOL_REGEX, '/');
23
+ // Remove any query parameters, as they have no meaning
24
+ let noqueryUrl = url.split('?')[0];
25
+ noqueryUrl = resolvePath(noqueryUrl);
26
+ const responseHeaders = new Headers();
27
+ // Automatically decompress gzipped files with .gz extension
28
+ if (url.endsWith('.gz')) {
29
+ // url = url.slice(0, -3);
30
+ responseHeaders['content-encoding'] = 'gzip';
31
+ }
32
+ if (url.endsWith('.br')) {
33
+ // url = url.slice(0, -3);
34
+ responseHeaders['content-encoding'] = 'br';
35
+ }
36
+ try {
37
+ // Now open the stream
38
+ const body = await new Promise((resolve, reject) => {
39
+ // @ts-ignore
40
+ const stream = fs.createReadStream(noqueryUrl, { encoding: null });
41
+ stream.once('readable', () => resolve(stream));
42
+ stream.on('error', (error) => reject(error));
43
+ });
44
+ let bodyStream = body;
45
+ // Check for content-encoding and create a decompression stream
46
+ if (isReadableNodeStream(body)) {
47
+ bodyStream = decompressReadStream(body, responseHeaders);
48
+ }
49
+ else if (typeof body === 'string') {
50
+ // @ts-expect-error
51
+ bodyStream = Readable.from([new TextEncoder().encode(body)]);
52
+ }
53
+ else {
54
+ // @ts-expect-error
55
+ bodyStream = Readable.from([body || new ArrayBuffer(0)]);
56
+ }
57
+ const status = 200;
58
+ const statusText = 'OK';
59
+ const headers = getHeadersForFile(noqueryUrl);
60
+ // @ts-expect-error
61
+ const response = new Response(bodyStream, { headers, status, statusText });
62
+ Object.defineProperty(response, 'url', { value: url });
63
+ return response;
64
+ }
65
+ catch (error) {
66
+ // console.error(error);
67
+ const errorMessage = error.message;
68
+ const status = 400;
69
+ const statusText = errorMessage;
70
+ const headers = {};
71
+ const response = new Response(errorMessage, { headers, status, statusText });
72
+ Object.defineProperty(response, 'url', { value: url });
73
+ return response;
36
74
  }
37
- const status = 200;
38
- const statusText = 'OK';
39
- const headers = getHeadersForFile(noqueryUrl);
40
- const response = new Response(bodyStream, {
41
- headers,
42
- status,
43
- statusText
44
- });
45
- Object.defineProperty(response, 'url', {
46
- value: url
47
- });
48
- return response;
49
- } catch (error) {
50
- const errorMessage = error.message;
51
- const status = 400;
52
- const statusText = errorMessage;
53
- const headers = {};
54
- const response = new Response(errorMessage, {
55
- headers,
56
- status,
57
- statusText
58
- });
59
- Object.defineProperty(response, 'url', {
60
- value: url
61
- });
62
- return response;
63
- }
64
75
  }
65
76
  function getHeadersForFile(noqueryUrl) {
66
- const headers = {};
67
- if (!headers['content-length']) {
68
- const stats = fs.statSync(noqueryUrl);
69
- headers['content-length'] = stats.size;
70
- }
71
- if (noqueryUrl.endsWith('.gz')) {
72
- noqueryUrl = noqueryUrl.slice(0, -3);
73
- headers['content-encoding'] = 'gzip';
74
- }
75
- return new Headers(headers);
77
+ const headers = {};
78
+ // Fix up content length if we can for best progress experience
79
+ if (!headers['content-length']) {
80
+ const stats = fs.statSync(noqueryUrl);
81
+ headers['content-length'] = stats.size;
82
+ }
83
+ // Automatically decompress gzipped files with .gz extension
84
+ if (noqueryUrl.endsWith('.gz')) {
85
+ noqueryUrl = noqueryUrl.slice(0, -3);
86
+ headers['content-encoding'] = 'gzip';
87
+ }
88
+ return new Headers(headers);
76
89
  }
77
- //# sourceMappingURL=fetch-node.js.map