@loaders.gl/polyfills 4.0.0-alpha.4 → 4.0.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/bundle.js +2 -2
- package/dist/dist.min.js +4207 -0
- package/dist/es5/bundle.js +6 -0
- package/dist/es5/bundle.js.map +1 -0
- package/dist/es5/index.js +103 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/lib/encoding-indexes.js +37 -0
- package/dist/es5/lib/encoding-indexes.js.map +1 -0
- package/dist/es5/lib/encoding.js +1214 -0
- package/dist/es5/lib/encoding.js.map +1 -0
- package/{src/libs/encoding-indexes.js → dist/es5/libs/encoding-indexes-asian.js} +2 -40
- package/dist/es5/node/buffer/btoa.node.js +14 -0
- package/dist/es5/node/buffer/btoa.node.js.map +1 -0
- package/dist/es5/node/buffer/to-array-buffer.node.js +14 -0
- package/dist/es5/node/buffer/to-array-buffer.node.js.map +1 -0
- package/dist/es5/node/fetch/fetch-file.node.js +83 -0
- package/dist/es5/node/fetch/fetch-file.node.js.map +1 -0
- package/dist/es5/node/fetch/fetch.node.js +194 -0
- package/dist/es5/node/fetch/fetch.node.js.map +1 -0
- package/dist/es5/node/fetch/headers.node.js +151 -0
- package/dist/es5/node/fetch/headers.node.js.map +1 -0
- package/dist/es5/node/fetch/response.node.js +182 -0
- package/dist/es5/node/fetch/response.node.js.map +1 -0
- package/dist/es5/node/fetch/utils/decode-data-uri.node.js +58 -0
- package/dist/es5/node/fetch/utils/decode-data-uri.node.js.map +1 -0
- package/dist/es5/node/fetch/utils/stream-utils.node.js +92 -0
- package/dist/es5/node/fetch/utils/stream-utils.node.js.map +1 -0
- package/dist/es5/node/file/blob-stream-controller.js +90 -0
- package/dist/es5/node/file/blob-stream-controller.js.map +1 -0
- package/dist/es5/node/file/blob-stream.js +64 -0
- package/dist/es5/node/file/blob-stream.js.map +1 -0
- package/dist/es5/node/file/blob.js +212 -0
- package/dist/es5/node/file/blob.js.map +1 -0
- package/dist/es5/node/file/file-reader.js +153 -0
- package/dist/es5/node/file/file-reader.js.map +1 -0
- package/dist/es5/node/file/file.js +44 -0
- package/dist/es5/node/file/file.js.map +1 -0
- package/dist/es5/node/file/install-file-polyfills.js +25 -0
- package/dist/es5/node/file/install-file-polyfills.js.map +1 -0
- package/dist/es5/node/file/readable-stream.js +27 -0
- package/dist/es5/node/file/readable-stream.js.map +1 -0
- package/dist/es5/node/images/encode-image.node.js +30 -0
- package/dist/es5/node/images/encode-image.node.js.map +1 -0
- package/dist/es5/node/images/parse-image.node.js +64 -0
- package/dist/es5/node/images/parse-image.node.js.map +1 -0
- package/dist/es5/promise/all-settled.js +28 -0
- package/dist/es5/promise/all-settled.js.map +1 -0
- package/dist/es5/utils/assert.js +12 -0
- package/dist/es5/utils/assert.js.map +1 -0
- package/dist/es5/utils/globals.js +18 -0
- package/dist/es5/utils/globals.js.map +1 -0
- package/dist/esm/bundle.js +4 -0
- package/dist/esm/bundle.js.map +1 -0
- package/dist/esm/index.js +54 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/lib/encoding-indexes.js +30 -0
- package/dist/esm/lib/encoding-indexes.js.map +1 -0
- package/dist/esm/lib/encoding.js +1206 -0
- package/dist/esm/lib/encoding.js.map +1 -0
- package/dist/{libs/encoding-indexes.js → esm/libs/encoding-indexes-asian.js} +2 -40
- package/dist/esm/node/buffer/btoa.node.js +7 -0
- package/dist/esm/node/buffer/btoa.node.js.map +1 -0
- package/dist/esm/node/buffer/to-array-buffer.node.js +8 -0
- package/dist/esm/node/buffer/to-array-buffer.node.js.map +1 -0
- package/dist/esm/node/fetch/fetch-file.node.js +50 -0
- package/dist/esm/node/fetch/fetch-file.node.js.map +1 -0
- package/dist/esm/node/fetch/fetch.node.js +126 -0
- package/dist/esm/node/fetch/fetch.node.js.map +1 -0
- package/dist/esm/node/fetch/headers.node.js +102 -0
- package/dist/esm/node/fetch/headers.node.js.map +1 -0
- package/dist/esm/node/fetch/response.node.js +67 -0
- package/dist/esm/node/fetch/response.node.js.map +1 -0
- package/dist/esm/node/fetch/utils/decode-data-uri.node.js +45 -0
- package/dist/esm/node/fetch/utils/decode-data-uri.node.js.map +1 -0
- package/dist/esm/node/fetch/utils/stream-utils.node.js +43 -0
- package/dist/esm/node/fetch/utils/stream-utils.node.js.map +1 -0
- package/dist/esm/node/file/blob-stream-controller.js +44 -0
- package/dist/esm/node/file/blob-stream-controller.js.map +1 -0
- package/dist/esm/node/file/blob-stream.js +20 -0
- package/dist/esm/node/file/blob-stream.js.map +1 -0
- package/dist/esm/node/file/blob.js +120 -0
- package/dist/esm/node/file/blob.js.map +1 -0
- package/dist/esm/node/file/file-reader.js +60 -0
- package/dist/esm/node/file/file-reader.js.map +1 -0
- package/dist/esm/node/file/file.js +19 -0
- package/dist/esm/node/file/file.js.map +1 -0
- package/dist/esm/node/file/install-file-polyfills.js +19 -0
- package/dist/esm/node/file/install-file-polyfills.js.map +1 -0
- package/dist/esm/node/file/readable-stream.js +4 -0
- package/dist/esm/node/file/readable-stream.js.map +1 -0
- package/dist/esm/node/images/encode-image.node.js +20 -0
- package/dist/esm/node/images/encode-image.node.js.map +1 -0
- package/dist/esm/node/images/parse-image.node.js +29 -0
- package/dist/esm/node/images/parse-image.node.js.map +1 -0
- package/dist/esm/promise/all-settled.js +19 -0
- package/dist/esm/promise/all-settled.js.map +1 -0
- package/dist/esm/utils/assert.js +6 -0
- package/dist/esm/utils/assert.js.map +1 -0
- package/dist/esm/utils/globals.js +9 -0
- package/dist/esm/utils/globals.js.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +90 -49
- package/dist/lib/encoding-indexes.d.ts +31 -0
- package/dist/lib/encoding-indexes.d.ts.map +1 -0
- package/dist/lib/encoding-indexes.js +35 -0
- package/dist/lib/encoding.d.ts +15 -0
- package/dist/lib/encoding.d.ts.map +1 -0
- package/dist/lib/encoding.js +2779 -0
- package/dist/libs/encoding-indexes-asian.d.ts +10 -0
- package/dist/libs/encoding-indexes-asian.d.ts.map +1 -0
- package/dist/libs/encoding-indexes-asian.js +14 -0
- package/dist/node/buffer/btoa.node.d.ts +3 -0
- package/dist/node/buffer/btoa.node.d.ts.map +1 -0
- package/dist/node/buffer/btoa.node.js +12 -5
- package/dist/node/buffer/to-array-buffer.node.d.ts +2 -0
- package/dist/node/buffer/to-array-buffer.node.d.ts.map +1 -0
- package/dist/node/buffer/to-array-buffer.node.js +11 -8
- package/dist/node/fetch/fetch-file.node.d.ts +4 -0
- package/dist/node/fetch/fetch-file.node.d.ts.map +1 -0
- package/dist/node/fetch/fetch-file.node.js +51 -0
- package/dist/node/fetch/fetch.node.d.ts +12 -0
- package/dist/node/fetch/fetch.node.d.ts.map +1 -0
- package/dist/node/fetch/fetch.node.js +128 -111
- package/dist/node/fetch/headers.node.d.ts +34 -0
- package/dist/node/fetch/headers.node.d.ts.map +1 -0
- package/dist/node/fetch/headers.node.js +95 -114
- package/dist/node/fetch/response.node.d.ts +22 -0
- package/dist/node/fetch/response.node.d.ts.map +1 -0
- package/dist/node/fetch/response.node.js +72 -84
- package/dist/node/fetch/utils/decode-data-uri.node.d.ts +16 -0
- package/dist/node/fetch/utils/decode-data-uri.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/decode-data-uri.node.js +63 -55
- package/dist/node/fetch/utils/stream-utils.node.d.ts +17 -0
- package/dist/node/fetch/utils/stream-utils.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/stream-utils.node.js +69 -85
- package/dist/node/file/blob-stream-controller.d.ts +29 -0
- package/dist/node/file/blob-stream-controller.d.ts.map +1 -0
- package/dist/node/file/blob-stream-controller.js +59 -52
- package/dist/node/file/blob-stream.d.ts +25 -0
- package/dist/node/file/blob-stream.d.ts.map +1 -0
- package/dist/node/file/blob-stream.js +36 -25
- package/dist/node/file/blob.d.ts +58 -0
- package/dist/node/file/blob.d.ts.map +1 -0
- package/dist/node/file/blob.js +151 -131
- package/dist/node/file/file-reader.d.ts +24 -0
- package/dist/node/file/file-reader.d.ts.map +1 -0
- package/dist/node/file/file-reader.js +28 -77
- package/dist/node/file/file.d.ts +25 -0
- package/dist/node/file/file.d.ts.map +1 -0
- package/dist/node/file/file.js +36 -25
- package/dist/node/file/install-file-polyfills.d.ts +2 -0
- package/dist/node/file/install-file-polyfills.d.ts.map +1 -0
- package/dist/node/file/install-file-polyfills.js +26 -21
- package/dist/node/file/readable-stream.d.ts +4 -0
- package/dist/node/file/readable-stream.d.ts.map +1 -0
- package/dist/node/file/readable-stream.js +10 -3
- package/dist/node/images/encode-image.node.d.ts +20 -0
- package/dist/node/images/encode-image.node.d.ts.map +1 -0
- package/dist/node/images/encode-image.node.js +38 -17
- package/dist/node/images/parse-image.node.d.ts +13 -0
- package/dist/node/images/parse-image.node.d.ts.map +1 -0
- package/dist/node/images/parse-image.node.js +40 -19
- package/dist/promise/all-settled.d.ts +10 -0
- package/dist/promise/all-settled.d.ts.map +1 -0
- package/dist/promise/all-settled.js +22 -17
- package/dist/utils/assert.d.ts +2 -0
- package/dist/utils/assert.d.ts.map +1 -0
- package/dist/utils/assert.js +8 -5
- package/dist/utils/globals.d.ts +4 -0
- package/dist/utils/globals.d.ts.map +1 -0
- package/dist/utils/globals.js +34 -7
- package/package.json +9 -8
- package/src/index.ts +9 -6
- package/src/lib/encoding-indexes.ts +34 -0
- package/src/{libs/encoding.js → lib/encoding.ts} +78 -78
- package/src/libs/encoding-indexes-asian.js +13 -0
- package/src/node/fetch/fetch-file.node.ts +51 -0
- package/src/node/fetch/fetch.node.ts +64 -30
- package/src/node/fetch/headers.node.ts +1 -1
- package/src/node/fetch/response.node.ts +4 -2
- package/src/node/fetch/utils/decode-data-uri.node.ts +7 -6
- package/src/node/fetch/utils/stream-utils.node.ts +39 -64
- package/src/node/images/parse-image.node.ts +35 -20
- package/dist/bundle.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/libs/encoding.js +0 -3084
- package/dist/node/buffer/btoa.node.js.map +0 -1
- package/dist/node/buffer/to-array-buffer.node.js.map +0 -1
- package/dist/node/fetch/fetch.node.js.map +0 -1
- package/dist/node/fetch/headers.node.js.map +0 -1
- package/dist/node/fetch/response.node.js.map +0 -1
- package/dist/node/fetch/utils/decode-data-uri.node.js.map +0 -1
- package/dist/node/fetch/utils/stream-utils.node.js.map +0 -1
- package/dist/node/file/blob-stream-controller.js.map +0 -1
- package/dist/node/file/blob-stream.js.map +0 -1
- package/dist/node/file/blob.js.map +0 -1
- package/dist/node/file/file-reader.js.map +0 -1
- package/dist/node/file/file.js.map +0 -1
- package/dist/node/file/install-file-polyfills.js.map +0 -1
- package/dist/node/file/readable-stream.js.map +0 -1
- package/dist/node/images/encode-image.node.js.map +0 -1
- package/dist/node/images/parse-image.node.js.map +0 -1
- package/dist/promise/all-settled.js.map +0 -1
- package/dist/utils/assert.js.map +0 -1
- package/dist/utils/globals.js.map +0 -1
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
// loaders.gl, MIT license
|
|
2
|
+
|
|
3
|
+
import fs from 'fs'; // `fs` will be empty object in browsers (see package.json "browser" field).
|
|
4
|
+
import {Response} from './response.node';
|
|
5
|
+
import {Headers} from './headers.node';
|
|
6
|
+
|
|
7
|
+
export function isRequestURL(url: string): boolean {
|
|
8
|
+
return url.startsWith('http:') || url.startsWith('https:');
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export async function fetchFileNode(url: string, options): Promise<Response> {
|
|
12
|
+
const noqueryUrl = url.split('?')[0];
|
|
13
|
+
|
|
14
|
+
try {
|
|
15
|
+
// Now open the stream
|
|
16
|
+
const body = await new Promise((resolve, reject) => {
|
|
17
|
+
// @ts-ignore
|
|
18
|
+
const stream = fs.createReadStream(noqueryUrl, {encoding: null});
|
|
19
|
+
stream.once('readable', () => resolve(stream));
|
|
20
|
+
stream.on('error', (error) => reject(error));
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const status = 200;
|
|
24
|
+
const statusText = 'OK';
|
|
25
|
+
const headers = getHeadersForFile(noqueryUrl);
|
|
26
|
+
return new Response(body, {headers, status, statusText, url});
|
|
27
|
+
} catch (error) {
|
|
28
|
+
const status = 400;
|
|
29
|
+
const statusText = (error as Error).message;
|
|
30
|
+
const headers = {};
|
|
31
|
+
return new Response((error as Error).message, {headers, status, statusText, url});
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function getHeadersForFile(noqueryUrl: string): Headers {
|
|
36
|
+
const headers = {};
|
|
37
|
+
|
|
38
|
+
// Fix up content length if we can for best progress experience
|
|
39
|
+
if (!headers['content-length']) {
|
|
40
|
+
const stats = fs.statSync(noqueryUrl);
|
|
41
|
+
headers['content-length'] = stats.size;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Automatically decompress gzipped files with .gz extension
|
|
45
|
+
if (noqueryUrl.endsWith('.gz')) {
|
|
46
|
+
noqueryUrl = noqueryUrl.slice(0, -3);
|
|
47
|
+
headers['content-encoding'] = 'gzip';
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return new Headers(headers);
|
|
51
|
+
}
|
|
@@ -1,20 +1,29 @@
|
|
|
1
|
-
|
|
2
|
-
import Response from './response.node';
|
|
3
|
-
import Headers from './headers.node';
|
|
1
|
+
// loaders.gl, MIT license
|
|
4
2
|
|
|
3
|
+
import http from 'http';
|
|
4
|
+
import https from 'https';
|
|
5
|
+
import {Response} from './response.node';
|
|
6
|
+
import {Headers} from './headers.node';
|
|
5
7
|
import {decodeDataUri} from './utils/decode-data-uri.node';
|
|
6
|
-
import {createReadStream} from './utils/stream-utils.node';
|
|
7
8
|
|
|
8
|
-
|
|
9
|
-
|
|
9
|
+
import {fetchFileNode} from './fetch-file.node';
|
|
10
|
+
|
|
11
|
+
const isDataURL = (url: string): boolean => url.startsWith('data:');
|
|
12
|
+
const isRequestURL = (url: string): boolean => url.startsWith('http:') || url.startsWith('https:');
|
|
10
13
|
|
|
11
14
|
/**
|
|
12
15
|
* Emulation of Browser fetch for Node.js
|
|
13
16
|
* @param url
|
|
14
17
|
* @param options
|
|
15
18
|
*/
|
|
16
|
-
|
|
19
|
+
// eslint-disable-next-line complexity
|
|
20
|
+
export async function fetchNode(url: string, options): Promise<Response> {
|
|
17
21
|
try {
|
|
22
|
+
// Handle file streams in node
|
|
23
|
+
if (!isRequestURL(url) && !isDataURL(url)) {
|
|
24
|
+
return await fetchFileNode(url, options);
|
|
25
|
+
}
|
|
26
|
+
|
|
18
27
|
// Handle data urls in node, to match `fetch``
|
|
19
28
|
// Note - this loses the MIME type, data URIs are handled directly in fetch
|
|
20
29
|
if (isDataURL(url)) {
|
|
@@ -35,7 +44,7 @@ export default async function fetchNode(url, options) {
|
|
|
35
44
|
}
|
|
36
45
|
|
|
37
46
|
// Need to create the stream in advance since Response constructor needs to be sync
|
|
38
|
-
const body = await
|
|
47
|
+
const body = await createHTTPRequestReadStream(originalUrl, options);
|
|
39
48
|
const headers = getHeaders(url, body, syntheticResponseHeaders);
|
|
40
49
|
const {status, statusText} = getStatus(body);
|
|
41
50
|
|
|
@@ -55,6 +64,23 @@ export default async function fetchNode(url, options) {
|
|
|
55
64
|
}
|
|
56
65
|
}
|
|
57
66
|
|
|
67
|
+
/** Returns a promise that resolves to a readable stream */
|
|
68
|
+
export async function createHTTPRequestReadStream(
|
|
69
|
+
url: string,
|
|
70
|
+
options
|
|
71
|
+
): Promise<http.IncomingMessage> {
|
|
72
|
+
// HANDLE HTTP/HTTPS REQUESTS IN NODE
|
|
73
|
+
// TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
|
|
74
|
+
return await new Promise((resolve, reject) => {
|
|
75
|
+
const requestOptions = getRequestOptions(url, options);
|
|
76
|
+
const req = url.startsWith('https:')
|
|
77
|
+
? https.request(requestOptions, (res) => resolve(res))
|
|
78
|
+
: http.request(requestOptions, (res) => resolve(res));
|
|
79
|
+
req.on('error', (error) => reject(error));
|
|
80
|
+
req.end();
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
|
|
58
84
|
/**
|
|
59
85
|
* Generate redirect url from location without origin and protocol.
|
|
60
86
|
* @param originalUrl
|
|
@@ -72,9 +98,33 @@ function generateRedirectUrl(originalUrl: string, location: string): string {
|
|
|
72
98
|
}
|
|
73
99
|
|
|
74
100
|
// HELPER FUNCTIONS
|
|
75
|
-
// PRIVATE
|
|
76
101
|
|
|
77
|
-
function
|
|
102
|
+
function getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {
|
|
103
|
+
// Ensure header keys are lower case so that we can merge without duplicates
|
|
104
|
+
const originalHeaders = options?.headers || {};
|
|
105
|
+
const headers = {};
|
|
106
|
+
for (const key of Object.keys(originalHeaders)) {
|
|
107
|
+
headers[key.toLowerCase()] = originalHeaders[key];
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Add default accept-encoding to headers
|
|
111
|
+
headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
|
|
112
|
+
|
|
113
|
+
const urlObject = new URL(url);
|
|
114
|
+
return {
|
|
115
|
+
hostname: urlObject.hostname,
|
|
116
|
+
path: urlObject.pathname,
|
|
117
|
+
method: 'GET',
|
|
118
|
+
// Add options and user provided 'options.fetch' overrides if available
|
|
119
|
+
...options,
|
|
120
|
+
...options?.fetch,
|
|
121
|
+
// Override with updated headers with accepted encodings:
|
|
122
|
+
headers,
|
|
123
|
+
port: urlObject.port
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
function getStatus(httpResponse: http.IncomingMessage): {status: number; statusText: string} {
|
|
78
128
|
if (httpResponse.statusCode) {
|
|
79
129
|
return {status: httpResponse.statusCode, statusText: httpResponse.statusMessage || 'NA'};
|
|
80
130
|
}
|
|
@@ -105,24 +155,8 @@ function getHeaders(url, httpResponse, additionalHeaders = {}) {
|
|
|
105
155
|
return new Headers(headers);
|
|
106
156
|
}
|
|
107
157
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
} else if (isDataURL(url)) {
|
|
113
|
-
// TODO - remove media type etc
|
|
114
|
-
return url.length - 'data:'.length;
|
|
115
|
-
}
|
|
116
|
-
// File URL
|
|
117
|
-
// TODO - how to handle non-existing file, this presumably just throws
|
|
118
|
-
try {
|
|
119
|
-
// strip query params from URL
|
|
120
|
-
const noqueryUrl = url.split('?')[0];
|
|
121
|
-
const stats = fs.statSync(noqueryUrl);
|
|
122
|
-
return stats.size;
|
|
123
|
-
} catch (error) {
|
|
124
|
-
// ignore for now
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
return null;
|
|
158
|
+
/** Needs to be read from actual headers */
|
|
159
|
+
function getContentLength(url: string): number | null {
|
|
160
|
+
// TODO - remove media type etc
|
|
161
|
+
return isDataURL(url) ? url.length - 'data:'.length : null;
|
|
128
162
|
}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
+
// loaders.gl, MIT license
|
|
2
|
+
|
|
1
3
|
import {assert} from '../../utils/assert';
|
|
2
4
|
import {decompressReadStream, concatenateReadStream} from './utils/stream-utils.node';
|
|
3
|
-
import Headers from './headers.node';
|
|
5
|
+
import {Headers} from './headers.node';
|
|
4
6
|
|
|
5
7
|
const isBoolean = (x) => typeof x === 'boolean';
|
|
6
8
|
const isFunction = (x) => typeof x === 'function';
|
|
@@ -20,7 +22,7 @@ const isReadableNodeStream = (x) =>
|
|
|
20
22
|
*/
|
|
21
23
|
import {Readable} from 'stream';
|
|
22
24
|
|
|
23
|
-
export
|
|
25
|
+
export class Response {
|
|
24
26
|
readonly ok: boolean;
|
|
25
27
|
readonly status: number;
|
|
26
28
|
readonly statusText: string;
|
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
// Based on binary-gltf-utils under MIT license: Copyright (c) 2016-17 Karl Cheng
|
|
2
2
|
|
|
3
|
-
import {assert} from '../../../utils/assert';
|
|
4
|
-
|
|
5
3
|
const isArrayBuffer = (x) => x && x instanceof ArrayBuffer;
|
|
6
4
|
const isBuffer = (x) => x && x instanceof Buffer;
|
|
7
5
|
|
|
@@ -11,7 +9,7 @@ const isBuffer = (x) => x && x instanceof Buffer;
|
|
|
11
9
|
* @param {string} uri - a data URI (assumed to be valid)
|
|
12
10
|
* @returns {Object} { buffer, mimeType }
|
|
13
11
|
*/
|
|
14
|
-
export function decodeDataUri(uri) {
|
|
12
|
+
export function decodeDataUri(uri: string): {arrayBuffer: ArrayBuffer; mimeType: string} {
|
|
15
13
|
const dataIndex = uri.indexOf(',');
|
|
16
14
|
|
|
17
15
|
let buffer;
|
|
@@ -37,13 +35,14 @@ export function decodeDataUri(uri) {
|
|
|
37
35
|
* @param data
|
|
38
36
|
* @todo Duplicate of core
|
|
39
37
|
*/
|
|
40
|
-
export function toArrayBuffer(data) {
|
|
38
|
+
export function toArrayBuffer(data: unknown): ArrayBuffer {
|
|
41
39
|
if (isArrayBuffer(data)) {
|
|
42
|
-
return data;
|
|
40
|
+
return data as ArrayBuffer;
|
|
43
41
|
}
|
|
44
42
|
|
|
45
43
|
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
46
44
|
if (isBuffer(data)) {
|
|
45
|
+
// @ts-expect-error
|
|
47
46
|
const typedArray = new Uint8Array(data);
|
|
48
47
|
return typedArray.buffer;
|
|
49
48
|
}
|
|
@@ -60,9 +59,11 @@ export function toArrayBuffer(data) {
|
|
|
60
59
|
}
|
|
61
60
|
|
|
62
61
|
// HACK to support Blob polyfill
|
|
62
|
+
// @ts-expect-error
|
|
63
63
|
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
64
|
+
// @ts-expect-error
|
|
64
65
|
return data._toArrayBuffer();
|
|
65
66
|
}
|
|
66
67
|
|
|
67
|
-
|
|
68
|
+
throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
|
|
68
69
|
}
|
|
@@ -1,37 +1,12 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
import https from 'https';
|
|
1
|
+
// loaders.gl, MIT license
|
|
2
|
+
|
|
4
3
|
import zlib from 'zlib';
|
|
5
4
|
|
|
6
5
|
import {toArrayBuffer} from './decode-data-uri.node';
|
|
7
6
|
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
export async function createReadStream(url, options) {
|
|
12
|
-
// Handle file streams in node
|
|
13
|
-
if (!isRequestURL(url)) {
|
|
14
|
-
const noqueryUrl = url.split('?')[0];
|
|
15
|
-
// Now open the stream
|
|
16
|
-
return await new Promise((resolve, reject) => {
|
|
17
|
-
// @ts-ignore
|
|
18
|
-
const stream = fs.createReadStream(noqueryUrl, {encoding: null});
|
|
19
|
-
stream.once('readable', () => resolve(stream));
|
|
20
|
-
stream.on('error', (error) => reject(error));
|
|
21
|
-
});
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
// HANDLE HTTP/HTTPS REQUESTS IN NODE
|
|
25
|
-
// TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
|
|
26
|
-
return await new Promise((resolve, reject) => {
|
|
27
|
-
const requestFunction = url.startsWith('https:') ? https.request : http.request;
|
|
28
|
-
const requestOptions = getRequestOptions(url, options);
|
|
29
|
-
const req = requestFunction(requestOptions, (res) => resolve(res));
|
|
30
|
-
req.on('error', (error) => reject(error));
|
|
31
|
-
req.end();
|
|
32
|
-
});
|
|
33
|
-
}
|
|
34
|
-
|
|
7
|
+
/**
|
|
8
|
+
*
|
|
9
|
+
*/
|
|
35
10
|
export function decompressReadStream(readStream, headers) {
|
|
36
11
|
switch (headers.get('content-encoding')) {
|
|
37
12
|
case 'br':
|
|
@@ -46,8 +21,13 @@ export function decompressReadStream(readStream, headers) {
|
|
|
46
21
|
}
|
|
47
22
|
}
|
|
48
23
|
|
|
49
|
-
|
|
50
|
-
|
|
24
|
+
/**
|
|
25
|
+
*
|
|
26
|
+
* @param readStream
|
|
27
|
+
* @returns
|
|
28
|
+
*/
|
|
29
|
+
export async function concatenateReadStream(readStream): Promise<ArrayBuffer> {
|
|
30
|
+
const arrayBufferChunks: ArrayBuffer[] = [];
|
|
51
31
|
|
|
52
32
|
return await new Promise((resolve, reject) => {
|
|
53
33
|
readStream.on('error', (error) => reject(error));
|
|
@@ -60,45 +40,40 @@ export async function concatenateReadStream(readStream) {
|
|
|
60
40
|
if (typeof chunk === 'string') {
|
|
61
41
|
reject(new Error('Read stream not binary'));
|
|
62
42
|
}
|
|
63
|
-
|
|
64
|
-
arrayBuffer = concatenateArrayBuffers(arrayBuffer, chunkAsArrayBuffer);
|
|
43
|
+
arrayBufferChunks.push(toArrayBuffer(chunk));
|
|
65
44
|
});
|
|
66
45
|
|
|
67
|
-
readStream.on('end', () =>
|
|
46
|
+
readStream.on('end', () => {
|
|
47
|
+
const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);
|
|
48
|
+
resolve(arrayBuffer);
|
|
49
|
+
});
|
|
68
50
|
});
|
|
69
51
|
}
|
|
70
52
|
|
|
71
|
-
|
|
53
|
+
/**
|
|
54
|
+
* Concatenate a sequence of ArrayBuffers
|
|
55
|
+
* @return A concatenated ArrayBuffer
|
|
56
|
+
* @note duplicates loader-utils since polyfills should be independent
|
|
57
|
+
*/
|
|
58
|
+
export function concatenateArrayBuffers(sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {
|
|
59
|
+
// Make sure all inputs are wrapped in typed arrays
|
|
60
|
+
const sourceArrays = sources.map((source2) =>
|
|
61
|
+
source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2
|
|
62
|
+
);
|
|
72
63
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
const originalHeaders = options?.headers || {};
|
|
76
|
-
const headers = {};
|
|
77
|
-
for (const key of Object.keys(originalHeaders)) {
|
|
78
|
-
headers[key.toLowerCase()] = originalHeaders[key];
|
|
79
|
-
}
|
|
64
|
+
// Get length of all inputs
|
|
65
|
+
const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
|
|
80
66
|
|
|
81
|
-
//
|
|
82
|
-
|
|
67
|
+
// Allocate array with space for all inputs
|
|
68
|
+
const result = new Uint8Array(byteLength);
|
|
83
69
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
...options,
|
|
91
|
-
...options?.fetch,
|
|
92
|
-
// Override with updated headers with accepted encodings:
|
|
93
|
-
headers
|
|
94
|
-
};
|
|
95
|
-
}
|
|
70
|
+
// Copy the subarrays
|
|
71
|
+
let offset = 0;
|
|
72
|
+
for (const sourceArray of sourceArrays) {
|
|
73
|
+
result.set(sourceArray, offset);
|
|
74
|
+
offset += sourceArray.byteLength;
|
|
75
|
+
}
|
|
96
76
|
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
const sourceArray2 = source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2;
|
|
100
|
-
const temp = new Uint8Array(sourceArray1.byteLength + sourceArray2.byteLength);
|
|
101
|
-
temp.set(sourceArray1, 0);
|
|
102
|
-
temp.set(sourceArray2, sourceArray1.byteLength);
|
|
103
|
-
return temp.buffer;
|
|
77
|
+
// We work with ArrayBuffers, discard the typed array wrapper
|
|
78
|
+
return result.buffer;
|
|
104
79
|
}
|
|
@@ -1,6 +1,9 @@
|
|
|
1
|
+
// loaders.gl, MIT license
|
|
2
|
+
|
|
1
3
|
import getPixels from 'get-pixels';
|
|
2
|
-
|
|
3
|
-
|
|
4
|
+
|
|
5
|
+
/** Declares which image format mime types this loader polyfill supports */
|
|
6
|
+
export const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];
|
|
4
7
|
|
|
5
8
|
// Note: These types are also defined in @loaders.gl/images and need to be kept in sync
|
|
6
9
|
type NDArray = {
|
|
@@ -13,26 +16,38 @@ type NDArray = {
|
|
|
13
16
|
};
|
|
14
17
|
|
|
15
18
|
export async function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray> {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
// if not, parseImage can be a sync function
|
|
20
|
-
const getPixelsAsync = util.promisify(getPixels);
|
|
19
|
+
if (!mimeType) {
|
|
20
|
+
throw new Error('MIMEType is required to parse image under Node.js');
|
|
21
|
+
}
|
|
21
22
|
|
|
22
23
|
const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);
|
|
23
|
-
|
|
24
24
|
const ndarray = await getPixelsAsync(buffer, mimeType);
|
|
25
|
+
return ndarray;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// TODO - check if getPixels callback is asynchronous if provided with buffer input
|
|
29
|
+
// if not, parseImage can be a sync function
|
|
30
|
+
function getPixelsAsync(buffer: Buffer, mimeType: string): Promise<NDArray> {
|
|
31
|
+
return new Promise<NDArray>((resolve) =>
|
|
32
|
+
getPixels(buffer, mimeType, (err, ndarray) => {
|
|
33
|
+
if (err) {
|
|
34
|
+
throw err;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const shape = [...ndarray.shape];
|
|
38
|
+
const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;
|
|
39
|
+
const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;
|
|
25
40
|
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
41
|
+
// extract width/height etc
|
|
42
|
+
resolve({
|
|
43
|
+
shape,
|
|
44
|
+
data,
|
|
45
|
+
width: ndarray.shape[0],
|
|
46
|
+
height: ndarray.shape[1],
|
|
47
|
+
components: ndarray.shape[2],
|
|
48
|
+
// TODO - error
|
|
49
|
+
layers: layers ? [layers] : []
|
|
50
|
+
});
|
|
51
|
+
})
|
|
52
|
+
);
|
|
38
53
|
}
|
package/dist/bundle.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/bundle.ts"],"names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"mappings":"AACA,MAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACAC,UAAU,CAACC,OAAX,GAAqBD,UAAU,CAACC,OAAX,IAAsB,EAA3C;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcL,UAAU,CAACC,OAAzB,EAAkCH,aAAlC,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"file":"bundle.js"}
|
package/dist/index.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts"],"names":["DOMParser","isBrowser","global","TextDecoder","TextEncoder","allSettled","base64","HeadersNode","ResponseNode","fetchNode","encodeImageNode","parseImageNode","ReadableStreamPolyfill","BlobPolyfill","FileReaderPolyfill","FilePolyfill","installFilePolyfills","installTextEncoder","installTextDecoder","atob","btoa","Promise"],"mappings":"AACA,SAAQA,SAAR,QAAwB,QAAxB;AACA,SAAQC,SAAR,EAAmBC,MAAnB,QAAgC,iBAAhC;AAEA,SAAQC,WAAR,EAAqBC,WAArB,QAAuC,iBAAvC;AACA,SAAQC,UAAR,QAAyB,uBAAzB;AAGA,OAAO,KAAKC,MAAZ,MAAwB,yBAAxB;AAEA,OAAOC,WAAP,MAAwB,2BAAxB;AACA,OAAOC,YAAP,MAAyB,4BAAzB;AACA,OAAOC,SAAP,MAAsB,yBAAtB;AAEA,SAAQC,eAAR,QAA8B,iCAA9B;AACA,SAAQC,cAAR,QAA6B,gCAA7B;AAEA,SAAQC,sBAAR,QAAqC,6BAArC;AACA,SAAQC,YAAR,QAA2B,kBAA3B;AACA,SAAQC,kBAAR,QAAiC,yBAAjC;AACA,SAAQC,YAAR,QAA2B,kBAA3B;AACA,SAAQC,oBAAR,QAAmC,oCAAnC;AAMA,MAAMC,kBAAkB,GAAG,CAAChB,SAAD,IAAc,EAAE,iBAAiBC,MAAnB,CAAzC;;AACA,IAAIe,kBAAJ,EAAwB;AACtBf,EAAAA,MAAM,CAAC,aAAD,CAAN,GAAwBE,WAAxB;AACD;;AAED,MAAMc,kBAAkB,GAAG,CAACjB,SAAD,IAAc,EAAE,iBAAiBC,MAAnB,CAAzC;;AACA,IAAIgB,kBAAJ,EAAwB;AACtBhB,EAAAA,MAAM,CAAC,aAAD,CAAN,GAAwBC,WAAxB;AACD;;AAMD,IAAI,CAACF,SAAD,IAAc,EAAE,UAAUC,MAAZ,CAAd,IAAqCI,MAAM,CAACa,IAAhD,EAAsD;AACpDjB,EAAAA,MAAM,CAAC,MAAD,CAAN,GAAiBI,MAAM,CAACa,IAAxB;AACD;;AACD,IAAI,CAAClB,SAAD,IAAc,EAAE,UAAUC,MAAZ,CAAd,IAAqCI,MAAM,CAACc,IAAhD,EAAsD;AACpDlB,EAAAA,MAAM,CAAC,MAAD,CAAN,GAAiBI,MAAM,CAACc,IAAxB;AACD;;AAMD,IAAI,CAACnB,SAAD,IAAc,EAAE,aAAaC,MAAf,CAAd,IAAwCK,WAA5C,EAAyD;AACvDL,EAAAA,MAAM,CAAC,SAAD,CAAN,GAAoBK,WAApB;AACD;;AAED,IAAI,CAACN,SAAD,IAAc,EAAE,cAAcC,MAAhB,CAAd,IAAyCM,YAA7C,EAA2D;AACzDN,EAAAA,MAAM,CAAC,UAAD,CAAN,GAAqBM,YAArB;AACD;;AAED,IAAI,CAACP,SAAD,IAAc,EAAE,WAAWC,MAAb,CAAd,IAAsCO,SAA1C,EAAqD;AACnDP,EAAAA,MAAM,CAAC,OAAD,CAAN,GAAkBO,SAAlB;AACD;;AAMD,IAAI,CAACR,SAAD,IAAc,EAAE,eAAeC,MAAjB,CAAd,IAA0CF,SAA9C,EAAyD;AACvDE,EAAAA,MAAM,CAAC,WAAD,CAAN,GAAsBF,SAAtB;AACD;;AAMD,IAAI,CAACC,SAAD,IAAc,EAAE,sBAAsBC,MAAxB,CAAd,IAAiDQ,eAArD,EAAsE;AACpER,EAAAA,MAAM,CAAC,kBAAD,CAAN,GAA6BQ,eAA7B;AACD;;AAED,IAAI,CAACT,SAAD,IAAc,EAAE,qBAAqBC,MAAvB,CAAd,IAAgDS,cAApD,EAAoE;AAClET,EAAAA,MAAM,CAAC,iBAAD,CAAN,GAA4BS,cAA5B;AACD;;AAED,IAAI,EAAE,gBAAgBU,OAAlB,CAAJ,EAAgC;AAE9BA,EAAAA,OAAO,CAAChB,UAAR,GAAqBA,UAArB;AACD","sourcesContent":["/* eslint-disable dot-notation */\nimport {DOMParser} from 'xmldom';\nimport {isBrowser, global} from './utils/globals';\n\nimport {TextDecoder, TextEncoder} from './libs/encoding';\nimport {allSettled} from './promise/all-settled';\n\n// Node specific\nimport * as base64 from './node/buffer/btoa.node';\n\nimport HeadersNode from './node/fetch/headers.node';\nimport ResponseNode from './node/fetch/response.node';\nimport fetchNode from './node/fetch/fetch.node';\n\nimport {encodeImageNode} from './node/images/encode-image.node';\nimport {parseImageNode} from './node/images/parse-image.node';\n\nexport {ReadableStreamPolyfill} from './node/file/readable-stream';\nexport {BlobPolyfill} from './node/file/blob';\nexport {FileReaderPolyfill} from './node/file/file-reader';\nexport {FilePolyfill} from './node/file/file';\nexport {installFilePolyfills} from './node/file/install-file-polyfills';\n\n// POLYFILLS: TextEncoder, TextDecoder\n// - Recent Node versions have these classes but virtually no encodings unless special build.\n// - Browser: Edge, IE11 do not have these\n\nconst installTextEncoder = !isBrowser || !('TextEncoder' in global);\nif (installTextEncoder) {\n global['TextEncoder'] = TextEncoder;\n}\n\nconst installTextDecoder = !isBrowser || !('TextDecoder' in global);\nif (installTextDecoder) {\n global['TextDecoder'] = TextDecoder;\n}\n\n// POLYFILLS: btoa, atob\n// - Node: Yes\n// - Browser: No\n\nif (!isBrowser && !('atob' in global) && base64.atob) {\n global['atob'] = base64.atob;\n}\nif (!isBrowser && !('btoa' in global) && base64.btoa) {\n global['btoa'] = base64.btoa;\n}\n\n// POLYFILL: fetch\n// - Node: Yes\n// - Browser: No. For This polyfill is node only, IE11 etc, install external polyfill\n\nif (!isBrowser && !('Headers' in global) && HeadersNode) {\n global['Headers'] = HeadersNode;\n}\n\nif (!isBrowser && !('Response' in global) && ResponseNode) {\n global['Response'] = ResponseNode;\n}\n\nif (!isBrowser && !('fetch' in global) && fetchNode) {\n global['fetch'] = fetchNode;\n}\n\n// POLYFILL: DOMParser\n// - Node: Yes\n// - Browser: No\n\nif (!isBrowser && !('DOMParser' in global) && DOMParser) {\n global['DOMParser'] = DOMParser;\n}\n\n// NODE IMAGE FUNCTIONS:\n// These are not official polyfills but used by the @loaders.gl/images module if installed\n// TODO - is there an appropriate Image API we could polyfill using an adapter?\n\nif (!isBrowser && !('_encodeImageNode' in global) && encodeImageNode) {\n global['_encodeImageNode'] = encodeImageNode;\n}\n\nif (!isBrowser && !('_parseImageNode' in global) && parseImageNode) {\n global['_parseImageNode'] = parseImageNode;\n}\n\nif (!('allSettled' in Promise)) {\n // @ts-ignore\n Promise.allSettled = allSettled;\n}\n"],"file":"index.js"}
|