@loaders.gl/polyfills 3.3.0-alpha.9 → 3.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +64 -15
- package/dist/es5/index.js +17 -11
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/node/fetch/fetch-file.node.js +88 -0
- package/dist/es5/node/fetch/fetch-file.node.js.map +1 -0
- package/dist/es5/node/fetch/fetch.node.js +90 -35
- package/dist/es5/node/fetch/fetch.node.js.map +1 -1
- package/dist/es5/node/fetch/headers.node.js +2 -2
- package/dist/es5/node/fetch/headers.node.js.map +1 -1
- package/dist/es5/node/fetch/response.node.js +5 -4
- package/dist/es5/node/fetch/response.node.js.map +1 -1
- package/dist/es5/node/fetch/utils/stream-utils.node.js +9 -93
- package/dist/es5/node/fetch/utils/stream-utils.node.js.map +1 -1
- package/dist/es5/node/images/parse-image.node.js +30 -18
- package/dist/es5/node/images/parse-image.node.js.map +1 -1
- package/dist/esm/index.js +5 -4
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/node/fetch/fetch-file.node.js +54 -0
- package/dist/esm/node/fetch/fetch-file.node.js.map +1 -0
- package/dist/esm/node/fetch/fetch.node.js +43 -18
- package/dist/esm/node/fetch/fetch.node.js.map +1 -1
- package/dist/esm/node/fetch/headers.node.js +1 -1
- package/dist/esm/node/fetch/headers.node.js.map +1 -1
- package/dist/esm/node/fetch/response.node.js +3 -2
- package/dist/esm/node/fetch/response.node.js.map +1 -1
- package/dist/esm/node/fetch/utils/stream-utils.node.js +3 -46
- package/dist/esm/node/fetch/utils/stream-utils.node.js.map +1 -1
- package/dist/esm/node/images/parse-image.node.js +23 -16
- package/dist/esm/node/images/parse-image.node.js.map +1 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +13 -14
- package/dist/lib/encoding.js +1 -1
- package/dist/node/fetch/fetch-file.node.d.ts +4 -0
- package/dist/node/fetch/fetch-file.node.d.ts.map +1 -0
- package/dist/node/fetch/fetch-file.node.js +51 -0
- package/dist/node/fetch/fetch.node.d.ts +6 -1
- package/dist/node/fetch/fetch.node.d.ts.map +1 -1
- package/dist/node/fetch/fetch.node.js +57 -31
- package/dist/node/fetch/headers.node.d.ts +1 -1
- package/dist/node/fetch/headers.node.d.ts.map +1 -1
- package/dist/node/fetch/headers.node.js +2 -1
- package/dist/node/fetch/response.node.d.ts +2 -2
- package/dist/node/fetch/response.node.d.ts.map +1 -1
- package/dist/node/fetch/response.node.js +5 -6
- package/dist/node/fetch/utils/stream-utils.node.d.ts +8 -1
- package/dist/node/fetch/utils/stream-utils.node.d.ts.map +1 -1
- package/dist/node/fetch/utils/stream-utils.node.js +10 -54
- package/dist/node/images/parse-image.node.d.ts.map +1 -1
- package/dist/node/images/parse-image.node.js +26 -18
- package/package.json +3 -2
- package/src/index.ts +5 -4
- package/src/node/fetch/fetch-file.node.ts +51 -0
- package/src/node/fetch/fetch.node.ts +64 -30
- package/src/node/fetch/headers.node.ts +1 -1
- package/src/node/fetch/response.node.ts +4 -2
- package/src/node/fetch/utils/stream-utils.node.ts +10 -59
- package/src/node/images/parse-image.node.ts +31 -20
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"response.node.d.ts","sourceRoot":"","sources":["../../../src/node/fetch/response.node.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"response.node.d.ts","sourceRoot":"","sources":["../../../src/node/fetch/response.node.ts"],"names":[],"mappings":"AAIA,OAAO,EAAC,OAAO,EAAC,MAAM,gBAAgB,CAAC;AAoBvC,qBAAa,QAAQ;IACnB,QAAQ,CAAC,EAAE,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,OAAO,CAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC;gBAIrB,IAAI,KAAA,EACJ,OAAO,EAAE;QACP,OAAO,CAAC,MAAC;QACT,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,GAAG,EAAE,MAAM,CAAC;KACb;IAuBH,IAAI,IAAI,QAKP;IAIK,WAAW;IAQX,IAAI;IAMJ,IAAI;IAKJ,IAAI;CAMX"}
|
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
2
|
+
// loaders.gl, MIT license
|
|
5
3
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.Response = void 0;
|
|
6
5
|
const assert_1 = require("../../utils/assert");
|
|
7
6
|
const stream_utils_node_1 = require("./utils/stream-utils.node");
|
|
8
|
-
const headers_node_1 =
|
|
7
|
+
const headers_node_1 = require("./headers.node");
|
|
9
8
|
const isBoolean = (x) => typeof x === 'boolean';
|
|
10
9
|
const isFunction = (x) => typeof x === 'function';
|
|
11
10
|
const isObject = (x) => x !== null && typeof x === 'object';
|
|
@@ -30,7 +29,7 @@ class Response {
|
|
|
30
29
|
this.ok = status === 200;
|
|
31
30
|
this.status = status; // TODO - handle errors and set status
|
|
32
31
|
this.statusText = statusText;
|
|
33
|
-
this.headers = new headers_node_1.
|
|
32
|
+
this.headers = new headers_node_1.Headers(options?.headers || {});
|
|
34
33
|
// Check for content-encoding and create a decompression stream
|
|
35
34
|
if (isReadableNodeStream(body)) {
|
|
36
35
|
this._body = (0, stream_utils_node_1.decompressReadStream)(body, headers);
|
|
@@ -74,4 +73,4 @@ class Response {
|
|
|
74
73
|
return new Blob([await this.arrayBuffer()]);
|
|
75
74
|
}
|
|
76
75
|
}
|
|
77
|
-
exports.
|
|
76
|
+
exports.Response = Response;
|
|
@@ -1,5 +1,12 @@
|
|
|
1
|
-
|
|
1
|
+
/**
|
|
2
|
+
*
|
|
3
|
+
*/
|
|
2
4
|
export declare function decompressReadStream(readStream: any, headers: any): any;
|
|
5
|
+
/**
|
|
6
|
+
*
|
|
7
|
+
* @param readStream
|
|
8
|
+
* @returns
|
|
9
|
+
*/
|
|
3
10
|
export declare function concatenateReadStream(readStream: any): Promise<ArrayBuffer>;
|
|
4
11
|
/**
|
|
5
12
|
* Concatenate a sequence of ArrayBuffers
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"stream-utils.node.d.ts","sourceRoot":"","sources":["../../../../src/node/fetch/utils/stream-utils.node.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"stream-utils.node.d.ts","sourceRoot":"","sources":["../../../../src/node/fetch/utils/stream-utils.node.ts"],"names":[],"mappings":"AAMA;;GAEG;AACH,wBAAgB,oBAAoB,CAAC,UAAU,KAAA,EAAE,OAAO,KAAA,OAYvD;AAED;;;;GAIG;AACH,wBAAsB,qBAAqB,CAAC,UAAU,KAAA,GAAG,OAAO,CAAC,WAAW,CAAC,CAsB5E;AAED;;;;GAIG;AACH,wBAAgB,uBAAuB,CAAC,OAAO,EAAE,CAAC,WAAW,GAAG,UAAU,CAAC,EAAE,GAAG,WAAW,CAqB1F"}
|
|
@@ -1,41 +1,15 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
// loaders.gl, MIT license
|
|
2
3
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
4
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
5
|
};
|
|
5
6
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.concatenateArrayBuffers = exports.concatenateReadStream = exports.decompressReadStream =
|
|
7
|
-
const fs_1 = __importDefault(require("fs")); // `fs` will be empty object in browsers (see package.json "browser" field).
|
|
8
|
-
const http_1 = __importDefault(require("http"));
|
|
9
|
-
const https_1 = __importDefault(require("https"));
|
|
7
|
+
exports.concatenateArrayBuffers = exports.concatenateReadStream = exports.decompressReadStream = void 0;
|
|
10
8
|
const zlib_1 = __importDefault(require("zlib"));
|
|
11
9
|
const decode_data_uri_node_1 = require("./decode-data-uri.node");
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
// Returns a promise that resolves to a readable stream
|
|
16
|
-
async function createReadStream(url, options) {
|
|
17
|
-
// Handle file streams in node
|
|
18
|
-
if (!isRequestURL(url)) {
|
|
19
|
-
const noqueryUrl = url.split('?')[0];
|
|
20
|
-
// Now open the stream
|
|
21
|
-
return await new Promise((resolve, reject) => {
|
|
22
|
-
// @ts-ignore
|
|
23
|
-
const stream = fs_1.default.createReadStream(noqueryUrl, { encoding: null });
|
|
24
|
-
stream.once('readable', () => resolve(stream));
|
|
25
|
-
stream.on('error', (error) => reject(error));
|
|
26
|
-
});
|
|
27
|
-
}
|
|
28
|
-
// HANDLE HTTP/HTTPS REQUESTS IN NODE
|
|
29
|
-
// TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
|
|
30
|
-
return await new Promise((resolve, reject) => {
|
|
31
|
-
const requestFunction = url.startsWith('https:') ? https_1.default.request : http_1.default.request;
|
|
32
|
-
const requestOptions = getRequestOptions(url, options);
|
|
33
|
-
const req = requestFunction(requestOptions, (res) => resolve(res));
|
|
34
|
-
req.on('error', (error) => reject(error));
|
|
35
|
-
req.end();
|
|
36
|
-
});
|
|
37
|
-
}
|
|
38
|
-
exports.createReadStream = createReadStream;
|
|
10
|
+
/**
|
|
11
|
+
*
|
|
12
|
+
*/
|
|
39
13
|
function decompressReadStream(readStream, headers) {
|
|
40
14
|
switch (headers.get('content-encoding')) {
|
|
41
15
|
case 'br':
|
|
@@ -50,6 +24,11 @@ function decompressReadStream(readStream, headers) {
|
|
|
50
24
|
}
|
|
51
25
|
}
|
|
52
26
|
exports.decompressReadStream = decompressReadStream;
|
|
27
|
+
/**
|
|
28
|
+
*
|
|
29
|
+
* @param readStream
|
|
30
|
+
* @returns
|
|
31
|
+
*/
|
|
53
32
|
async function concatenateReadStream(readStream) {
|
|
54
33
|
const arrayBufferChunks = [];
|
|
55
34
|
return await new Promise((resolve, reject) => {
|
|
@@ -70,29 +49,6 @@ async function concatenateReadStream(readStream) {
|
|
|
70
49
|
});
|
|
71
50
|
}
|
|
72
51
|
exports.concatenateReadStream = concatenateReadStream;
|
|
73
|
-
// HELPERS
|
|
74
|
-
function getRequestOptions(url, options) {
|
|
75
|
-
// Ensure header keys are lower case so that we can merge without duplicates
|
|
76
|
-
const originalHeaders = options?.headers || {};
|
|
77
|
-
const headers = {};
|
|
78
|
-
for (const key of Object.keys(originalHeaders)) {
|
|
79
|
-
headers[key.toLowerCase()] = originalHeaders[key];
|
|
80
|
-
}
|
|
81
|
-
// Add default accept-encoding to headers
|
|
82
|
-
headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
|
|
83
|
-
const urlObject = new URL(url);
|
|
84
|
-
return {
|
|
85
|
-
hostname: urlObject.hostname,
|
|
86
|
-
path: urlObject.pathname,
|
|
87
|
-
method: 'GET',
|
|
88
|
-
// Add options and user provided 'options.fetch' overrides if available
|
|
89
|
-
...options,
|
|
90
|
-
...options?.fetch,
|
|
91
|
-
// Override with updated headers with accepted encodings:
|
|
92
|
-
headers,
|
|
93
|
-
port: urlObject.port
|
|
94
|
-
};
|
|
95
|
-
}
|
|
96
52
|
/**
|
|
97
53
|
* Concatenate a sequence of ArrayBuffers
|
|
98
54
|
* @return A concatenated ArrayBuffer
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-image.node.d.ts","sourceRoot":"","sources":["../../../src/node/images/parse-image.node.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"parse-image.node.d.ts","sourceRoot":"","sources":["../../../src/node/images/parse-image.node.ts"],"names":[],"mappings":"AAGA,KAAK,OAAO,GAAG;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB,CAAC;AAEF,wBAAsB,cAAc,CAAC,WAAW,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAUjG"}
|
|
@@ -5,26 +5,34 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.parseImageNode = void 0;
|
|
7
7
|
const get_pixels_1 = __importDefault(require("get-pixels"));
|
|
8
|
-
const assert_1 = require("../../utils/assert");
|
|
9
|
-
const util_1 = __importDefault(require("util"));
|
|
10
8
|
async function parseImageNode(arrayBuffer, mimeType) {
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
const getPixelsAsync = util_1.default.promisify(get_pixels_1.default);
|
|
9
|
+
if (!mimeType) {
|
|
10
|
+
throw new Error('MIMEType is required to parse image under Node.js');
|
|
11
|
+
}
|
|
15
12
|
const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);
|
|
16
13
|
const ndarray = await getPixelsAsync(buffer, mimeType);
|
|
17
|
-
|
|
18
|
-
const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;
|
|
19
|
-
const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;
|
|
20
|
-
// extract width/height etc
|
|
21
|
-
return {
|
|
22
|
-
shape,
|
|
23
|
-
data,
|
|
24
|
-
width: ndarray.shape[0],
|
|
25
|
-
height: ndarray.shape[1],
|
|
26
|
-
components: ndarray.shape[2],
|
|
27
|
-
layers
|
|
28
|
-
};
|
|
14
|
+
return ndarray;
|
|
29
15
|
}
|
|
30
16
|
exports.parseImageNode = parseImageNode;
|
|
17
|
+
// TODO - check if getPixels callback is asynchronous if provided with buffer input
|
|
18
|
+
// if not, parseImage can be a sync function
|
|
19
|
+
function getPixelsAsync(buffer, mimeType) {
|
|
20
|
+
return new Promise((resolve) => (0, get_pixels_1.default)(buffer, mimeType, (err, ndarray) => {
|
|
21
|
+
if (err) {
|
|
22
|
+
throw err;
|
|
23
|
+
}
|
|
24
|
+
const shape = [...ndarray.shape];
|
|
25
|
+
const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;
|
|
26
|
+
const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;
|
|
27
|
+
// extract width/height etc
|
|
28
|
+
resolve({
|
|
29
|
+
shape,
|
|
30
|
+
data,
|
|
31
|
+
width: ndarray.shape[0],
|
|
32
|
+
height: ndarray.shape[1],
|
|
33
|
+
components: ndarray.shape[2],
|
|
34
|
+
// TODO - error
|
|
35
|
+
layers: layers ? [layers] : []
|
|
36
|
+
});
|
|
37
|
+
}));
|
|
38
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/polyfills",
|
|
3
|
-
"version": "3.3.
|
|
3
|
+
"version": "3.3.1",
|
|
4
4
|
"description": "Polyfills for TextEncoder/TextDecoder",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"publishConfig": {
|
|
@@ -98,6 +98,7 @@
|
|
|
98
98
|
"dependencies": {
|
|
99
99
|
"@babel/runtime": "^7.3.1",
|
|
100
100
|
"@xmldom/xmldom": "^0.7.5",
|
|
101
|
+
"buffer": "^6.0.3",
|
|
101
102
|
"get-pixels": "^3.3.2",
|
|
102
103
|
"ndarray": "^1.0.18",
|
|
103
104
|
"save-pixels": "^2.3.2",
|
|
@@ -105,5 +106,5 @@
|
|
|
105
106
|
"through": "^2.3.8",
|
|
106
107
|
"web-streams-polyfill": "^3.0.0"
|
|
107
108
|
},
|
|
108
|
-
"gitHead": "
|
|
109
|
+
"gitHead": "51632b5948e496a4b75e970030ad7579650c129d"
|
|
109
110
|
}
|
package/src/index.ts
CHANGED
|
@@ -8,9 +8,9 @@ import {allSettled} from './promise/all-settled';
|
|
|
8
8
|
// Node specific
|
|
9
9
|
import * as base64 from './node/buffer/btoa.node';
|
|
10
10
|
|
|
11
|
-
import HeadersNode from './node/fetch/headers.node';
|
|
12
|
-
import ResponseNode from './node/fetch/response.node';
|
|
13
|
-
import fetchNode from './node/fetch/fetch.node';
|
|
11
|
+
import {Headers as HeadersNode} from './node/fetch/headers.node';
|
|
12
|
+
import {Response as ResponseNode} from './node/fetch/response.node';
|
|
13
|
+
import {fetchNode as fetchNode} from './node/fetch/fetch.node';
|
|
14
14
|
|
|
15
15
|
import {encodeImageNode} from './node/images/encode-image.node';
|
|
16
16
|
import {parseImageNode} from './node/images/parse-image.node';
|
|
@@ -20,7 +20,8 @@ export {BlobPolyfill} from './node/file/blob';
|
|
|
20
20
|
export {FileReaderPolyfill} from './node/file/file-reader';
|
|
21
21
|
export {FilePolyfill} from './node/file/file';
|
|
22
22
|
export {installFilePolyfills} from './node/file/install-file-polyfills';
|
|
23
|
-
export {
|
|
23
|
+
export {fetchNode as _fetchNode} from './node/fetch/fetch.node';
|
|
24
|
+
export {fetchFileNode as _fetchFileNode} from './node/fetch/fetch-file.node';
|
|
24
25
|
|
|
25
26
|
// POLYFILLS: TextEncoder, TextDecoder
|
|
26
27
|
// - Recent Node versions have these classes but virtually no encodings unless special build.
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
// loaders.gl, MIT license
|
|
2
|
+
|
|
3
|
+
import fs from 'fs'; // `fs` will be empty object in browsers (see package.json "browser" field).
|
|
4
|
+
import {Response} from './response.node';
|
|
5
|
+
import {Headers} from './headers.node';
|
|
6
|
+
|
|
7
|
+
export function isRequestURL(url: string): boolean {
|
|
8
|
+
return url.startsWith('http:') || url.startsWith('https:');
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export async function fetchFileNode(url: string, options): Promise<Response> {
|
|
12
|
+
const noqueryUrl = url.split('?')[0];
|
|
13
|
+
|
|
14
|
+
try {
|
|
15
|
+
// Now open the stream
|
|
16
|
+
const body = await new Promise((resolve, reject) => {
|
|
17
|
+
// @ts-ignore
|
|
18
|
+
const stream = fs.createReadStream(noqueryUrl, {encoding: null});
|
|
19
|
+
stream.once('readable', () => resolve(stream));
|
|
20
|
+
stream.on('error', (error) => reject(error));
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const status = 200;
|
|
24
|
+
const statusText = 'OK';
|
|
25
|
+
const headers = getHeadersForFile(noqueryUrl);
|
|
26
|
+
return new Response(body, {headers, status, statusText, url});
|
|
27
|
+
} catch (error) {
|
|
28
|
+
const status = 400;
|
|
29
|
+
const statusText = (error as Error).message;
|
|
30
|
+
const headers = {};
|
|
31
|
+
return new Response((error as Error).message, {headers, status, statusText, url});
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function getHeadersForFile(noqueryUrl: string): Headers {
|
|
36
|
+
const headers = {};
|
|
37
|
+
|
|
38
|
+
// Fix up content length if we can for best progress experience
|
|
39
|
+
if (!headers['content-length']) {
|
|
40
|
+
const stats = fs.statSync(noqueryUrl);
|
|
41
|
+
headers['content-length'] = stats.size;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Automatically decompress gzipped files with .gz extension
|
|
45
|
+
if (noqueryUrl.endsWith('.gz')) {
|
|
46
|
+
noqueryUrl = noqueryUrl.slice(0, -3);
|
|
47
|
+
headers['content-encoding'] = 'gzip';
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return new Headers(headers);
|
|
51
|
+
}
|
|
@@ -1,20 +1,29 @@
|
|
|
1
|
-
|
|
2
|
-
import Response from './response.node';
|
|
3
|
-
import Headers from './headers.node';
|
|
1
|
+
// loaders.gl, MIT license
|
|
4
2
|
|
|
3
|
+
import http from 'http';
|
|
4
|
+
import https from 'https';
|
|
5
|
+
import {Response} from './response.node';
|
|
6
|
+
import {Headers} from './headers.node';
|
|
5
7
|
import {decodeDataUri} from './utils/decode-data-uri.node';
|
|
6
|
-
import {createReadStream} from './utils/stream-utils.node';
|
|
7
8
|
|
|
8
|
-
|
|
9
|
-
|
|
9
|
+
import {fetchFileNode} from './fetch-file.node';
|
|
10
|
+
|
|
11
|
+
const isDataURL = (url: string): boolean => url.startsWith('data:');
|
|
12
|
+
const isRequestURL = (url: string): boolean => url.startsWith('http:') || url.startsWith('https:');
|
|
10
13
|
|
|
11
14
|
/**
|
|
12
15
|
* Emulation of Browser fetch for Node.js
|
|
13
16
|
* @param url
|
|
14
17
|
* @param options
|
|
15
18
|
*/
|
|
16
|
-
|
|
19
|
+
// eslint-disable-next-line complexity
|
|
20
|
+
export async function fetchNode(url: string, options): Promise<Response> {
|
|
17
21
|
try {
|
|
22
|
+
// Handle file streams in node
|
|
23
|
+
if (!isRequestURL(url) && !isDataURL(url)) {
|
|
24
|
+
return await fetchFileNode(url, options);
|
|
25
|
+
}
|
|
26
|
+
|
|
18
27
|
// Handle data urls in node, to match `fetch``
|
|
19
28
|
// Note - this loses the MIME type, data URIs are handled directly in fetch
|
|
20
29
|
if (isDataURL(url)) {
|
|
@@ -35,7 +44,7 @@ export default async function fetchNode(url, options) {
|
|
|
35
44
|
}
|
|
36
45
|
|
|
37
46
|
// Need to create the stream in advance since Response constructor needs to be sync
|
|
38
|
-
const body = await
|
|
47
|
+
const body = await createHTTPRequestReadStream(originalUrl, options);
|
|
39
48
|
const headers = getHeaders(url, body, syntheticResponseHeaders);
|
|
40
49
|
const {status, statusText} = getStatus(body);
|
|
41
50
|
|
|
@@ -55,6 +64,23 @@ export default async function fetchNode(url, options) {
|
|
|
55
64
|
}
|
|
56
65
|
}
|
|
57
66
|
|
|
67
|
+
/** Returns a promise that resolves to a readable stream */
|
|
68
|
+
export async function createHTTPRequestReadStream(
|
|
69
|
+
url: string,
|
|
70
|
+
options
|
|
71
|
+
): Promise<http.IncomingMessage> {
|
|
72
|
+
// HANDLE HTTP/HTTPS REQUESTS IN NODE
|
|
73
|
+
// TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
|
|
74
|
+
return await new Promise((resolve, reject) => {
|
|
75
|
+
const requestOptions = getRequestOptions(url, options);
|
|
76
|
+
const req = url.startsWith('https:')
|
|
77
|
+
? https.request(requestOptions, (res) => resolve(res))
|
|
78
|
+
: http.request(requestOptions, (res) => resolve(res));
|
|
79
|
+
req.on('error', (error) => reject(error));
|
|
80
|
+
req.end();
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
|
|
58
84
|
/**
|
|
59
85
|
* Generate redirect url from location without origin and protocol.
|
|
60
86
|
* @param originalUrl
|
|
@@ -72,9 +98,33 @@ function generateRedirectUrl(originalUrl: string, location: string): string {
|
|
|
72
98
|
}
|
|
73
99
|
|
|
74
100
|
// HELPER FUNCTIONS
|
|
75
|
-
// PRIVATE
|
|
76
101
|
|
|
77
|
-
function
|
|
102
|
+
function getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {
|
|
103
|
+
// Ensure header keys are lower case so that we can merge without duplicates
|
|
104
|
+
const originalHeaders = options?.headers || {};
|
|
105
|
+
const headers = {};
|
|
106
|
+
for (const key of Object.keys(originalHeaders)) {
|
|
107
|
+
headers[key.toLowerCase()] = originalHeaders[key];
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Add default accept-encoding to headers
|
|
111
|
+
headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
|
|
112
|
+
|
|
113
|
+
const urlObject = new URL(url);
|
|
114
|
+
return {
|
|
115
|
+
hostname: urlObject.hostname,
|
|
116
|
+
path: urlObject.pathname,
|
|
117
|
+
method: 'GET',
|
|
118
|
+
// Add options and user provided 'options.fetch' overrides if available
|
|
119
|
+
...options,
|
|
120
|
+
...options?.fetch,
|
|
121
|
+
// Override with updated headers with accepted encodings:
|
|
122
|
+
headers,
|
|
123
|
+
port: urlObject.port
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
function getStatus(httpResponse: http.IncomingMessage): {status: number; statusText: string} {
|
|
78
128
|
if (httpResponse.statusCode) {
|
|
79
129
|
return {status: httpResponse.statusCode, statusText: httpResponse.statusMessage || 'NA'};
|
|
80
130
|
}
|
|
@@ -105,24 +155,8 @@ function getHeaders(url, httpResponse, additionalHeaders = {}) {
|
|
|
105
155
|
return new Headers(headers);
|
|
106
156
|
}
|
|
107
157
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
} else if (isDataURL(url)) {
|
|
113
|
-
// TODO - remove media type etc
|
|
114
|
-
return url.length - 'data:'.length;
|
|
115
|
-
}
|
|
116
|
-
// File URL
|
|
117
|
-
// TODO - how to handle non-existing file, this presumably just throws
|
|
118
|
-
try {
|
|
119
|
-
// strip query params from URL
|
|
120
|
-
const noqueryUrl = url.split('?')[0];
|
|
121
|
-
const stats = fs.statSync(noqueryUrl);
|
|
122
|
-
return stats.size;
|
|
123
|
-
} catch (error) {
|
|
124
|
-
// ignore for now
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
return null;
|
|
158
|
+
/** Needs to be read from actual headers */
|
|
159
|
+
function getContentLength(url: string): number | null {
|
|
160
|
+
// TODO - remove media type etc
|
|
161
|
+
return isDataURL(url) ? url.length - 'data:'.length : null;
|
|
128
162
|
}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
+
// loaders.gl, MIT license
|
|
2
|
+
|
|
1
3
|
import {assert} from '../../utils/assert';
|
|
2
4
|
import {decompressReadStream, concatenateReadStream} from './utils/stream-utils.node';
|
|
3
|
-
import Headers from './headers.node';
|
|
5
|
+
import {Headers} from './headers.node';
|
|
4
6
|
|
|
5
7
|
const isBoolean = (x) => typeof x === 'boolean';
|
|
6
8
|
const isFunction = (x) => typeof x === 'function';
|
|
@@ -20,7 +22,7 @@ const isReadableNodeStream = (x) =>
|
|
|
20
22
|
*/
|
|
21
23
|
import {Readable} from 'stream';
|
|
22
24
|
|
|
23
|
-
export
|
|
25
|
+
export class Response {
|
|
24
26
|
readonly ok: boolean;
|
|
25
27
|
readonly status: number;
|
|
26
28
|
readonly statusText: string;
|
|
@@ -1,39 +1,12 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
import https from 'https';
|
|
1
|
+
// loaders.gl, MIT license
|
|
2
|
+
|
|
4
3
|
import zlib from 'zlib';
|
|
5
4
|
|
|
6
5
|
import {toArrayBuffer} from './decode-data-uri.node';
|
|
7
6
|
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
// Returns a promise that resolves to a readable stream
|
|
13
|
-
export async function createReadStream(url, options): Promise<any> {
|
|
14
|
-
// Handle file streams in node
|
|
15
|
-
if (!isRequestURL(url)) {
|
|
16
|
-
const noqueryUrl = url.split('?')[0];
|
|
17
|
-
// Now open the stream
|
|
18
|
-
return await new Promise((resolve, reject) => {
|
|
19
|
-
// @ts-ignore
|
|
20
|
-
const stream = fs.createReadStream(noqueryUrl, {encoding: null});
|
|
21
|
-
stream.once('readable', () => resolve(stream));
|
|
22
|
-
stream.on('error', (error) => reject(error));
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
// HANDLE HTTP/HTTPS REQUESTS IN NODE
|
|
27
|
-
// TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
|
|
28
|
-
return await new Promise((resolve, reject) => {
|
|
29
|
-
const requestFunction = url.startsWith('https:') ? https.request : http.request;
|
|
30
|
-
const requestOptions = getRequestOptions(url, options);
|
|
31
|
-
const req = requestFunction(requestOptions, (res) => resolve(res));
|
|
32
|
-
req.on('error', (error) => reject(error));
|
|
33
|
-
req.end();
|
|
34
|
-
});
|
|
35
|
-
}
|
|
36
|
-
|
|
7
|
+
/**
|
|
8
|
+
*
|
|
9
|
+
*/
|
|
37
10
|
export function decompressReadStream(readStream, headers) {
|
|
38
11
|
switch (headers.get('content-encoding')) {
|
|
39
12
|
case 'br':
|
|
@@ -48,6 +21,11 @@ export function decompressReadStream(readStream, headers) {
|
|
|
48
21
|
}
|
|
49
22
|
}
|
|
50
23
|
|
|
24
|
+
/**
|
|
25
|
+
*
|
|
26
|
+
* @param readStream
|
|
27
|
+
* @returns
|
|
28
|
+
*/
|
|
51
29
|
export async function concatenateReadStream(readStream): Promise<ArrayBuffer> {
|
|
52
30
|
const arrayBufferChunks: ArrayBuffer[] = [];
|
|
53
31
|
|
|
@@ -72,33 +50,6 @@ export async function concatenateReadStream(readStream): Promise<ArrayBuffer> {
|
|
|
72
50
|
});
|
|
73
51
|
}
|
|
74
52
|
|
|
75
|
-
// HELPERS
|
|
76
|
-
|
|
77
|
-
function getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {
|
|
78
|
-
// Ensure header keys are lower case so that we can merge without duplicates
|
|
79
|
-
const originalHeaders = options?.headers || {};
|
|
80
|
-
const headers = {};
|
|
81
|
-
for (const key of Object.keys(originalHeaders)) {
|
|
82
|
-
headers[key.toLowerCase()] = originalHeaders[key];
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
// Add default accept-encoding to headers
|
|
86
|
-
headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
|
|
87
|
-
|
|
88
|
-
const urlObject = new URL(url);
|
|
89
|
-
return {
|
|
90
|
-
hostname: urlObject.hostname,
|
|
91
|
-
path: urlObject.pathname,
|
|
92
|
-
method: 'GET',
|
|
93
|
-
// Add options and user provided 'options.fetch' overrides if available
|
|
94
|
-
...options,
|
|
95
|
-
...options?.fetch,
|
|
96
|
-
// Override with updated headers with accepted encodings:
|
|
97
|
-
headers,
|
|
98
|
-
port: urlObject.port
|
|
99
|
-
};
|
|
100
|
-
}
|
|
101
|
-
|
|
102
53
|
/**
|
|
103
54
|
* Concatenate a sequence of ArrayBuffers
|
|
104
55
|
* @return A concatenated ArrayBuffer
|
|
@@ -1,6 +1,4 @@
|
|
|
1
1
|
import getPixels from 'get-pixels';
|
|
2
|
-
import {assert} from '../../utils/assert';
|
|
3
|
-
import util from 'util';
|
|
4
2
|
|
|
5
3
|
// Note: These types are also defined in @loaders.gl/images and need to be kept in sync
|
|
6
4
|
type NDArray = {
|
|
@@ -13,27 +11,40 @@ type NDArray = {
|
|
|
13
11
|
};
|
|
14
12
|
|
|
15
13
|
export async function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray> {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
// if not, parseImage can be a sync function
|
|
20
|
-
const getPixelsAsync = util.promisify(getPixels);
|
|
14
|
+
if (!mimeType) {
|
|
15
|
+
throw new Error('MIMEType is required to parse image under Node.js');
|
|
16
|
+
}
|
|
21
17
|
|
|
22
18
|
const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);
|
|
23
19
|
|
|
24
20
|
const ndarray = await getPixelsAsync(buffer, mimeType);
|
|
25
21
|
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
22
|
+
return ndarray;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// TODO - check if getPixels callback is asynchronous if provided with buffer input
|
|
26
|
+
// if not, parseImage can be a sync function
|
|
27
|
+
function getPixelsAsync(buffer: Buffer, mimeType: string): Promise<NDArray> {
|
|
28
|
+
return new Promise<NDArray>((resolve) =>
|
|
29
|
+
getPixels(buffer, mimeType, (err, ndarray) => {
|
|
30
|
+
if (err) {
|
|
31
|
+
throw err;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const shape = [...ndarray.shape];
|
|
35
|
+
const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;
|
|
36
|
+
const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;
|
|
37
|
+
|
|
38
|
+
// extract width/height etc
|
|
39
|
+
resolve({
|
|
40
|
+
shape,
|
|
41
|
+
data,
|
|
42
|
+
width: ndarray.shape[0],
|
|
43
|
+
height: ndarray.shape[1],
|
|
44
|
+
components: ndarray.shape[2],
|
|
45
|
+
// TODO - error
|
|
46
|
+
layers: layers ? [layers] : []
|
|
47
|
+
});
|
|
48
|
+
})
|
|
49
|
+
);
|
|
39
50
|
}
|