@loaders.gl/polyfills 4.2.0-alpha.4 → 4.2.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/buffer/btoa.node.js +5 -3
- package/dist/buffer/to-array-buffer.node.js +6 -6
- package/dist/crypto/node-hash.js +41 -45
- package/dist/fetch/decode-data-uri.js +56 -41
- package/dist/fetch/fetch-polyfill.d.ts +1 -1
- package/dist/fetch/fetch-polyfill.d.ts.map +1 -1
- package/dist/fetch/fetch-polyfill.js +118 -103
- package/dist/fetch/headers-polyfill.js +90 -89
- package/dist/fetch/response-polyfill.d.ts +1 -1
- package/dist/fetch/response-polyfill.d.ts.map +1 -1
- package/dist/fetch/response-polyfill.js +65 -57
- package/dist/fetch/utils/decode-data-uri.node.js +56 -41
- package/dist/file/blob-stream-controller.js +54 -38
- package/dist/file/blob-stream.d.ts +1 -1
- package/dist/file/blob-stream.d.ts.map +1 -1
- package/dist/file/blob-stream.js +29 -15
- package/dist/file/blob.d.ts +1 -1
- package/dist/file/blob.d.ts.map +1 -1
- package/dist/file/blob.js +146 -109
- package/dist/file/file-reader.js +22 -50
- package/dist/file/file.js +30 -15
- package/dist/file/install-blob-polyfills.js +6 -5
- package/dist/file/install-file-polyfills.js +11 -8
- package/dist/file/readable-stream.js +5 -2
- package/dist/filesystems/fetch-node.js +78 -68
- package/dist/filesystems/node-file.js +119 -87
- package/dist/filesystems/node-filesystem.d.ts +1 -1
- package/dist/filesystems/node-filesystem.d.ts.map +1 -1
- package/dist/filesystems/node-filesystem.js +39 -35
- package/dist/filesystems/stream-utils.node.js +88 -55
- package/dist/images/encode-image-node.js +25 -12
- package/dist/images/encode-image.node.js +25 -12
- package/dist/images/parse-image-node.js +30 -23
- package/dist/images/parse-image.node.js +30 -23
- package/dist/index.browser.js +8 -3
- package/dist/index.cjs +13 -65091
- package/dist/index.cjs.map +7 -0
- package/dist/index.d.ts +4 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +45 -15
- package/dist/libs/encoding-indexes-asian.js +6 -7
- package/dist/load-library/require-utils.node.js +70 -44
- package/dist/streams/make-node-stream.js +48 -46
- package/dist/text-encoder/encoding-indexes.js +31 -28
- package/dist/text-encoder/text-encoder.js +2604 -1033
- package/dist/utils/assert.js +3 -4
- package/dist/utils/is-browser.js +7 -2
- package/package.json +10 -8
- package/dist/buffer/btoa.node.js.map +0 -1
- package/dist/buffer/to-array-buffer.node.js.map +0 -1
- package/dist/crypto/node-hash.js.map +0 -1
- package/dist/dist.dev.js +0 -45
- package/dist/fetch/decode-data-uri.js.map +0 -1
- package/dist/fetch/fetch-polyfill.js.map +0 -1
- package/dist/fetch/headers-polyfill.js.map +0 -1
- package/dist/fetch/response-polyfill.js.map +0 -1
- package/dist/fetch/utils/decode-data-uri.node.js.map +0 -1
- package/dist/file/blob-stream-controller.js.map +0 -1
- package/dist/file/blob-stream.js.map +0 -1
- package/dist/file/blob.js.map +0 -1
- package/dist/file/file-reader.js.map +0 -1
- package/dist/file/file.js.map +0 -1
- package/dist/file/install-blob-polyfills.js.map +0 -1
- package/dist/file/install-file-polyfills.js.map +0 -1
- package/dist/file/readable-stream.js.map +0 -1
- package/dist/filesystems/fetch-node.js.map +0 -1
- package/dist/filesystems/node-file.js.map +0 -1
- package/dist/filesystems/node-filesystem.js.map +0 -1
- package/dist/filesystems/stream-utils.node.js.map +0 -1
- package/dist/images/encode-image-node.js.map +0 -1
- package/dist/images/encode-image.node.js.map +0 -1
- package/dist/images/parse-image-node.js.map +0 -1
- package/dist/images/parse-image.node.js.map +0 -1
- package/dist/index.browser.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/load-library/require-utils.node.js.map +0 -1
- package/dist/streams/make-node-stream.js.map +0 -1
- package/dist/text-encoder/encoding-indexes.js.map +0 -1
- package/dist/text-encoder/text-encoder.js.map +0 -1
- package/dist/utils/assert.js.map +0 -1
- package/dist/utils/is-browser.js.map +0 -1
package/dist/file/blob.js
CHANGED
|
@@ -1,119 +1,156 @@
|
|
|
1
|
-
|
|
1
|
+
// Forked from @gozala's web-blob under MIT license https://github.com/Gozala/web-blob
|
|
2
2
|
import { BlobStream } from "./blob-stream.js";
|
|
3
|
-
|
|
3
|
+
/**
|
|
4
|
+
* Forked from @gozala's web-blob under MIT license
|
|
5
|
+
* @see https://github.com/Gozala/web-blob
|
|
6
|
+
*/
|
|
4
7
|
export class BlobPolyfill {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
8
|
+
/**
|
|
9
|
+
* @param [init]
|
|
10
|
+
* @param [options]
|
|
11
|
+
*/
|
|
12
|
+
constructor(init = [], options = {}) {
|
|
13
|
+
this.parts = [];
|
|
14
|
+
this.size = 0;
|
|
15
|
+
for (const part of init) {
|
|
16
|
+
if (typeof part === 'string') {
|
|
17
|
+
const bytes = new TextEncoder().encode(part);
|
|
18
|
+
this.parts.push(bytes);
|
|
19
|
+
this.size += bytes.byteLength;
|
|
20
|
+
}
|
|
21
|
+
else if (part instanceof BlobPolyfill) {
|
|
22
|
+
this.size += part.size;
|
|
23
|
+
// @ts-ignore - `parts` is marked private so TS will complain about
|
|
24
|
+
// accessing it.
|
|
25
|
+
this.parts.push(...part.parts);
|
|
26
|
+
}
|
|
27
|
+
else if (part instanceof ArrayBuffer) {
|
|
28
|
+
this.parts.push(new Uint8Array(part));
|
|
29
|
+
this.size += part.byteLength;
|
|
30
|
+
}
|
|
31
|
+
else if (part instanceof Uint8Array) {
|
|
32
|
+
this.parts.push(part);
|
|
33
|
+
this.size += part.byteLength;
|
|
34
|
+
}
|
|
35
|
+
else if (ArrayBuffer.isView(part)) {
|
|
36
|
+
const { buffer, byteOffset, byteLength } = part;
|
|
37
|
+
this.parts.push(new Uint8Array(buffer, byteOffset, byteLength));
|
|
38
|
+
this.size += byteLength;
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
const bytes = new TextEncoder().encode(String(part));
|
|
42
|
+
this.parts.push(bytes);
|
|
43
|
+
this.size += bytes.byteLength;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/** @private */
|
|
47
|
+
this.type = readType(options.type);
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Returns a new Blob object containing the data in the specified range of
|
|
51
|
+
* bytes of the blob on which it's called.
|
|
52
|
+
* @param start=0 - An index into the Blob indicating the first
|
|
53
|
+
* byte to include in the new Blob. If you specify a negative value, it's
|
|
54
|
+
* treated as an offset from the end of the Blob toward the beginning. For
|
|
55
|
+
* example, `-10` would be the 10th from last byte in the Blob. The default
|
|
56
|
+
* value is `0`. If you specify a value for start that is larger than the
|
|
57
|
+
* size of the source Blob, the returned Blob has size 0 and contains no
|
|
58
|
+
* data.
|
|
59
|
+
* @param end - An index into the `Blob` indicating the first byte
|
|
60
|
+
* that will *not* be included in the new `Blob` (i.e. the byte exactly at
|
|
61
|
+
* this index is not included). If you specify a negative value, it's treated
|
|
62
|
+
* as an offset from the end of the Blob toward the beginning. For example,
|
|
63
|
+
* `-10` would be the 10th from last byte in the `Blob`. The default value is
|
|
64
|
+
* size.
|
|
65
|
+
* @param type - The content type to assign to the new Blob;
|
|
66
|
+
* this will be the value of its type property. The default value is an empty
|
|
67
|
+
* string.
|
|
68
|
+
*/
|
|
69
|
+
slice(start = 0, end = this.size, type = '') {
|
|
70
|
+
const { size, parts: parts } = this;
|
|
71
|
+
let offset = start < 0 ? Math.max(size + start, 0) : Math.min(start, size);
|
|
72
|
+
let limit = end < 0 ? Math.max(size + end, 0) : Math.min(end, size);
|
|
73
|
+
const span = Math.max(limit - offset, 0);
|
|
74
|
+
const blob = new BlobPolyfill([], { type });
|
|
75
|
+
if (span === 0) {
|
|
76
|
+
// @ts-ignore
|
|
77
|
+
return blob;
|
|
78
|
+
}
|
|
79
|
+
let blobSize = 0;
|
|
80
|
+
const blobParts = [];
|
|
81
|
+
for (const part of parts) {
|
|
82
|
+
const { byteLength } = part;
|
|
83
|
+
if (offset > 0 && byteLength <= offset) {
|
|
84
|
+
offset -= byteLength;
|
|
85
|
+
limit -= byteLength;
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
const chunk = part.subarray(offset, Math.min(byteLength, limit));
|
|
89
|
+
blobParts.push(chunk);
|
|
90
|
+
blobSize += chunk.byteLength;
|
|
91
|
+
// no longer need to take that into account
|
|
92
|
+
offset = 0;
|
|
93
|
+
// don't add the overflow to new blobParts
|
|
94
|
+
if (blobSize >= span) {
|
|
95
|
+
break;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
blob.parts = blobParts;
|
|
100
|
+
blob.size = blobSize;
|
|
101
|
+
// @ts-ignore
|
|
102
|
+
return blob;
|
|
40
103
|
}
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
size,
|
|
49
|
-
parts: parts
|
|
50
|
-
} = this;
|
|
51
|
-
let offset = start < 0 ? Math.max(size + start, 0) : Math.min(start, size);
|
|
52
|
-
let limit = end < 0 ? Math.max(size + end, 0) : Math.min(end, size);
|
|
53
|
-
const span = Math.max(limit - offset, 0);
|
|
54
|
-
const blob = new BlobPolyfill([], {
|
|
55
|
-
type
|
|
56
|
-
});
|
|
57
|
-
if (span === 0) {
|
|
58
|
-
return blob;
|
|
104
|
+
/**
|
|
105
|
+
* Returns a promise that resolves with an ArrayBuffer containing the entire
|
|
106
|
+
* contents of the Blob as binary data.
|
|
107
|
+
*/
|
|
108
|
+
// eslint-disable-next-line require-await
|
|
109
|
+
async arrayBuffer() {
|
|
110
|
+
return this._toArrayBuffer();
|
|
59
111
|
}
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
const chunk = part.subarray(offset, Math.min(byteLength, limit));
|
|
71
|
-
blobParts.push(chunk);
|
|
72
|
-
blobSize += chunk.byteLength;
|
|
73
|
-
offset = 0;
|
|
74
|
-
if (blobSize >= span) {
|
|
75
|
-
break;
|
|
112
|
+
/**
|
|
113
|
+
* Returns a promise that resolves with a USVString containing the entire
|
|
114
|
+
* contents of the Blob interpreted as UTF-8 text.
|
|
115
|
+
*/
|
|
116
|
+
// eslint-disable-next-line require-await
|
|
117
|
+
async text() {
|
|
118
|
+
const decoder = new TextDecoder();
|
|
119
|
+
let text = '';
|
|
120
|
+
for (const part of this.parts) {
|
|
121
|
+
text += decoder.decode(part);
|
|
76
122
|
}
|
|
77
|
-
|
|
123
|
+
return text;
|
|
78
124
|
}
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
return this._toArrayBuffer();
|
|
85
|
-
}
|
|
86
|
-
async text() {
|
|
87
|
-
const decoder = new TextDecoder();
|
|
88
|
-
let text = '';
|
|
89
|
-
for (const part of this.parts) {
|
|
90
|
-
text += decoder.decode(part);
|
|
125
|
+
/**
|
|
126
|
+
*/
|
|
127
|
+
// @ts-ignore
|
|
128
|
+
stream() {
|
|
129
|
+
return new BlobStream(this.parts);
|
|
91
130
|
}
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
131
|
+
/**
|
|
132
|
+
* @returns {string}
|
|
133
|
+
*/
|
|
134
|
+
toString() {
|
|
135
|
+
return '[object Blob]';
|
|
136
|
+
}
|
|
137
|
+
get [Symbol.toStringTag]() {
|
|
138
|
+
return 'Blob';
|
|
139
|
+
}
|
|
140
|
+
_toArrayBuffer() {
|
|
141
|
+
const buffer = new ArrayBuffer(this.size);
|
|
142
|
+
const bytes = new Uint8Array(buffer);
|
|
143
|
+
let offset = 0;
|
|
144
|
+
for (const part of this.parts) {
|
|
145
|
+
bytes.set(part, offset);
|
|
146
|
+
offset += part.byteLength;
|
|
147
|
+
}
|
|
148
|
+
return buffer;
|
|
110
149
|
}
|
|
111
|
-
return buffer;
|
|
112
|
-
}
|
|
113
150
|
}
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
151
|
+
/**
|
|
152
|
+
*/
|
|
153
|
+
function readType(input = '') {
|
|
154
|
+
const type = String(input).toLowerCase();
|
|
155
|
+
return /[^\u0020-\u007E]/.test(type) ? '' : type;
|
|
118
156
|
}
|
|
119
|
-
//# sourceMappingURL=blob.js.map
|
package/dist/file/file-reader.js
CHANGED
|
@@ -1,59 +1,31 @@
|
|
|
1
1
|
import { atob } from "../buffer/btoa.node.js";
|
|
2
2
|
export class FileReaderPolyfill {
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
this.DONE = void 0;
|
|
14
|
-
this.EMPTY = void 0;
|
|
15
|
-
this.LOADING = void 0;
|
|
16
|
-
this.addEventListener = void 0;
|
|
17
|
-
this.removeEventListener = void 0;
|
|
18
|
-
this.dispatchEvent = void 0;
|
|
19
|
-
this.onload = null;
|
|
20
|
-
}
|
|
21
|
-
abort() {
|
|
22
|
-
return;
|
|
23
|
-
}
|
|
24
|
-
async readAsArrayBuffer(blob) {
|
|
25
|
-
const arrayBuffer = await blob.arrayBuffer();
|
|
26
|
-
if (this.onload) {
|
|
27
|
-
this.onload({
|
|
28
|
-
target: {
|
|
29
|
-
result: arrayBuffer
|
|
3
|
+
constructor() {
|
|
4
|
+
this.onload = null;
|
|
5
|
+
}
|
|
6
|
+
abort() {
|
|
7
|
+
return;
|
|
8
|
+
}
|
|
9
|
+
async readAsArrayBuffer(blob) {
|
|
10
|
+
const arrayBuffer = await blob.arrayBuffer();
|
|
11
|
+
if (this.onload) {
|
|
12
|
+
this.onload({ target: { result: arrayBuffer } });
|
|
30
13
|
}
|
|
31
|
-
});
|
|
32
14
|
}
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
this.onload({
|
|
42
|
-
target: {
|
|
43
|
-
result: dataUrl
|
|
15
|
+
async readAsBinaryString(blob) {
|
|
16
|
+
throw Error('Not implemented');
|
|
17
|
+
}
|
|
18
|
+
async readAsDataURL(blob) {
|
|
19
|
+
const text = await blob.text();
|
|
20
|
+
const dataUrl = `data://;base64,${atob(text)}`;
|
|
21
|
+
if (this.onload) {
|
|
22
|
+
this.onload({ target: { result: dataUrl } });
|
|
44
23
|
}
|
|
45
|
-
});
|
|
46
24
|
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
this.onload({
|
|
52
|
-
target: {
|
|
53
|
-
result: text
|
|
25
|
+
async readAsText(blob) {
|
|
26
|
+
const text = await blob.text();
|
|
27
|
+
if (this.onload) {
|
|
28
|
+
this.onload({ target: { result: text } });
|
|
54
29
|
}
|
|
55
|
-
});
|
|
56
30
|
}
|
|
57
|
-
}
|
|
58
31
|
}
|
|
59
|
-
//# sourceMappingURL=file-reader.js.map
|
package/dist/file/file.js
CHANGED
|
@@ -1,17 +1,32 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
// Forked from @gozala's web-file under MIT license https://github.com/Gozala/web-file
|
|
2
|
+
/**
|
|
3
|
+
* Forked from @gozala's web-file under MIT license
|
|
4
|
+
* @see https://github.com/Gozala/web-file
|
|
5
|
+
*/
|
|
6
|
+
// @ts-ignore
|
|
3
7
|
export class FilePolyfill extends globalThis.Blob {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
8
|
+
/**
|
|
9
|
+
* @param init
|
|
10
|
+
* @param name - A USVString representing the file name or the path
|
|
11
|
+
* to the file.
|
|
12
|
+
* @param [options]
|
|
13
|
+
*/
|
|
14
|
+
constructor(init, name, options = {}) {
|
|
15
|
+
super(init, options);
|
|
16
|
+
// implements File {
|
|
17
|
+
// public API
|
|
18
|
+
/** The name of the file referenced by the File object. */
|
|
19
|
+
this.name = '';
|
|
20
|
+
/** The path the URL of the File is relative to. */
|
|
21
|
+
this.webkitRelativePath = '';
|
|
22
|
+
// Per File API spec https://w3c.github.io/FileAPI/#file-constructor
|
|
23
|
+
// Every "/" character of file name must be replaced with a ":".
|
|
24
|
+
/** @private */
|
|
25
|
+
this.name = String(name).replace(/\//g, ':');
|
|
26
|
+
/** @private */
|
|
27
|
+
this.lastModified = options?.lastModified || Date.now();
|
|
28
|
+
}
|
|
29
|
+
get [Symbol.toStringTag]() {
|
|
30
|
+
return 'File';
|
|
31
|
+
}
|
|
16
32
|
}
|
|
17
|
-
//# sourceMappingURL=file.js.map
|
|
@@ -1,9 +1,10 @@
|
|
|
1
|
+
// import {ReadableStreamPolyfill} from './readable-stream';
|
|
1
2
|
import { BlobPolyfill } from "./blob.js";
|
|
2
3
|
export function instalBlobPolyfills() {
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
4
|
+
if (typeof Blob === 'undefined' && !globalThis.Blob) {
|
|
5
|
+
// @ts-ignore;
|
|
6
|
+
globalThis.Blob = BlobPolyfill;
|
|
7
|
+
}
|
|
8
|
+
return globalThis.Blob;
|
|
7
9
|
}
|
|
8
10
|
export const Blob_ = instalBlobPolyfills();
|
|
9
|
-
//# sourceMappingURL=install-blob-polyfills.js.map
|
|
@@ -1,13 +1,16 @@
|
|
|
1
|
+
// import {ReadableStreamPolyfill} from './readable-stream';
|
|
1
2
|
import { FileReaderPolyfill } from "./file-reader.js";
|
|
2
3
|
import { FilePolyfill } from "./file.js";
|
|
3
4
|
export function installFilePolyfills() {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
5
|
+
if (typeof FileReader === 'undefined' && !globalThis.FileReader) {
|
|
6
|
+
// @ts-ignore;
|
|
7
|
+
globalThis.FileReader = FileReaderPolyfill;
|
|
8
|
+
}
|
|
9
|
+
// Install minimal Node.js File polyfill
|
|
10
|
+
if (typeof File === 'undefined' && !globalThis.File) {
|
|
11
|
+
// @ts-ignore;
|
|
12
|
+
globalThis.File = FilePolyfill;
|
|
13
|
+
}
|
|
14
|
+
return global;
|
|
11
15
|
}
|
|
12
16
|
export const File_ = installFilePolyfills();
|
|
13
|
-
//# sourceMappingURL=install-file-polyfills.js.map
|
|
@@ -1,4 +1,7 @@
|
|
|
1
1
|
import { ReadableStream as WSPReadableStream } from 'web-streams-polyfill';
|
|
2
|
+
// Want a polyfill, but please don't install it
|
|
3
|
+
// @ts-ignore
|
|
2
4
|
delete globalThis.ReadableStream;
|
|
3
|
-
|
|
4
|
-
|
|
5
|
+
// @ts-ignore
|
|
6
|
+
export class ReadableStreamPolyfill extends WSPReadableStream {
|
|
7
|
+
}
|
|
@@ -1,77 +1,87 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
1
4
|
import fs from 'fs';
|
|
2
5
|
import { Readable } from 'stream';
|
|
3
6
|
import { resolvePath } from '@loaders.gl/loader-utils';
|
|
4
7
|
import { decompressReadStream } from "./stream-utils.node.js";
|
|
5
|
-
const isBoolean = x => typeof x === 'boolean';
|
|
6
|
-
const isFunction = x => typeof x === 'function';
|
|
7
|
-
const isObject = x => x !== null && typeof x === 'object';
|
|
8
|
-
const isReadableNodeStream = x => isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);
|
|
8
|
+
const isBoolean = (x) => typeof x === 'boolean';
|
|
9
|
+
const isFunction = (x) => typeof x === 'function';
|
|
10
|
+
const isObject = (x) => x !== null && typeof x === 'object';
|
|
11
|
+
const isReadableNodeStream = (x) => isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);
|
|
12
|
+
/**
|
|
13
|
+
* Enables
|
|
14
|
+
* @param url
|
|
15
|
+
* @param options
|
|
16
|
+
* @returns
|
|
17
|
+
*/
|
|
18
|
+
// eslint-disable-next-line max-statements
|
|
9
19
|
export async function fetchNode(url, options) {
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
responseHeaders
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
encoding
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
20
|
+
// Support `file://` protocol
|
|
21
|
+
const FILE_PROTOCOL_REGEX = /^file:\/\//;
|
|
22
|
+
url.replace(FILE_PROTOCOL_REGEX, '/');
|
|
23
|
+
// Remove any query parameters, as they have no meaning
|
|
24
|
+
let noqueryUrl = url.split('?')[0];
|
|
25
|
+
noqueryUrl = resolvePath(noqueryUrl);
|
|
26
|
+
const responseHeaders = new Headers();
|
|
27
|
+
// Automatically decompress gzipped files with .gz extension
|
|
28
|
+
if (url.endsWith('.gz')) {
|
|
29
|
+
// url = url.slice(0, -3);
|
|
30
|
+
responseHeaders['content-encoding'] = 'gzip';
|
|
31
|
+
}
|
|
32
|
+
if (url.endsWith('.br')) {
|
|
33
|
+
// url = url.slice(0, -3);
|
|
34
|
+
responseHeaders['content-encoding'] = 'br';
|
|
35
|
+
}
|
|
36
|
+
try {
|
|
37
|
+
// Now open the stream
|
|
38
|
+
const body = await new Promise((resolve, reject) => {
|
|
39
|
+
// @ts-ignore
|
|
40
|
+
const stream = fs.createReadStream(noqueryUrl, { encoding: null });
|
|
41
|
+
stream.once('readable', () => resolve(stream));
|
|
42
|
+
stream.on('error', (error) => reject(error));
|
|
43
|
+
});
|
|
44
|
+
let bodyStream = body;
|
|
45
|
+
// Check for content-encoding and create a decompression stream
|
|
46
|
+
if (isReadableNodeStream(body)) {
|
|
47
|
+
bodyStream = decompressReadStream(body, responseHeaders);
|
|
48
|
+
}
|
|
49
|
+
else if (typeof body === 'string') {
|
|
50
|
+
bodyStream = Readable.from([new TextEncoder().encode(body)]);
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
bodyStream = Readable.from([body || new ArrayBuffer(0)]);
|
|
54
|
+
}
|
|
55
|
+
const status = 200;
|
|
56
|
+
const statusText = 'OK';
|
|
57
|
+
const headers = getHeadersForFile(noqueryUrl);
|
|
58
|
+
// @ts-expect-error
|
|
59
|
+
const response = new Response(bodyStream, { headers, status, statusText });
|
|
60
|
+
Object.defineProperty(response, 'url', { value: url });
|
|
61
|
+
return response;
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
// console.error(error);
|
|
65
|
+
const errorMessage = error.message;
|
|
66
|
+
const status = 400;
|
|
67
|
+
const statusText = errorMessage;
|
|
68
|
+
const headers = {};
|
|
69
|
+
const response = new Response(errorMessage, { headers, status, statusText });
|
|
70
|
+
Object.defineProperty(response, 'url', { value: url });
|
|
71
|
+
return response;
|
|
36
72
|
}
|
|
37
|
-
const status = 200;
|
|
38
|
-
const statusText = 'OK';
|
|
39
|
-
const headers = getHeadersForFile(noqueryUrl);
|
|
40
|
-
const response = new Response(bodyStream, {
|
|
41
|
-
headers,
|
|
42
|
-
status,
|
|
43
|
-
statusText
|
|
44
|
-
});
|
|
45
|
-
Object.defineProperty(response, 'url', {
|
|
46
|
-
value: url
|
|
47
|
-
});
|
|
48
|
-
return response;
|
|
49
|
-
} catch (error) {
|
|
50
|
-
const errorMessage = error.message;
|
|
51
|
-
const status = 400;
|
|
52
|
-
const statusText = errorMessage;
|
|
53
|
-
const headers = {};
|
|
54
|
-
const response = new Response(errorMessage, {
|
|
55
|
-
headers,
|
|
56
|
-
status,
|
|
57
|
-
statusText
|
|
58
|
-
});
|
|
59
|
-
Object.defineProperty(response, 'url', {
|
|
60
|
-
value: url
|
|
61
|
-
});
|
|
62
|
-
return response;
|
|
63
|
-
}
|
|
64
73
|
}
|
|
65
74
|
function getHeadersForFile(noqueryUrl) {
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
75
|
+
const headers = {};
|
|
76
|
+
// Fix up content length if we can for best progress experience
|
|
77
|
+
if (!headers['content-length']) {
|
|
78
|
+
const stats = fs.statSync(noqueryUrl);
|
|
79
|
+
headers['content-length'] = stats.size;
|
|
80
|
+
}
|
|
81
|
+
// Automatically decompress gzipped files with .gz extension
|
|
82
|
+
if (noqueryUrl.endsWith('.gz')) {
|
|
83
|
+
noqueryUrl = noqueryUrl.slice(0, -3);
|
|
84
|
+
headers['content-encoding'] = 'gzip';
|
|
85
|
+
}
|
|
86
|
+
return new Headers(headers);
|
|
76
87
|
}
|
|
77
|
-
//# sourceMappingURL=fetch-node.js.map
|