@loaders.gl/polyfills 3.1.0-alpha.4 → 3.1.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/bundle.js +3994 -0
- package/dist/es5/bundle.js +1 -1
- package/dist/es5/bundle.js.map +1 -1
- package/dist/es5/index.js +11 -13
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/encoding-indexes.js +37 -0
- package/dist/es5/lib/encoding-indexes.js.map +1 -0
- package/dist/es5/lib/encoding.js +1459 -0
- package/dist/es5/lib/encoding.js.map +1 -0
- package/dist/{esm/libs/encoding-indexes.js → es5/libs/encoding-indexes-asian.js} +2 -40
- package/dist/es5/node/buffer/to-array-buffer.node.js +1 -1
- package/dist/es5/node/buffer/to-array-buffer.node.js.map +1 -1
- package/dist/es5/node/fetch/fetch.node.js +69 -100
- package/dist/es5/node/fetch/fetch.node.js.map +1 -1
- package/dist/es5/node/fetch/headers.node.js +73 -112
- package/dist/es5/node/fetch/headers.node.js.map +1 -1
- package/dist/es5/node/fetch/response.node.js +47 -182
- package/dist/es5/node/fetch/response.node.js.map +1 -1
- package/dist/es5/node/fetch/utils/decode-data-uri.node.js +10 -18
- package/dist/es5/node/fetch/utils/decode-data-uri.node.js.map +1 -1
- package/dist/es5/node/fetch/utils/stream-utils.node.js +46 -126
- package/dist/es5/node/fetch/utils/stream-utils.node.js.map +1 -1
- package/dist/es5/node/file/blob-stream-controller.js +37 -82
- package/dist/es5/node/file/blob-stream-controller.js.map +1 -1
- package/dist/es5/node/file/blob-stream.js +12 -67
- package/dist/es5/node/file/blob-stream.js.map +1 -1
- package/dist/es5/node/file/blob.js +100 -209
- package/dist/es5/node/file/blob.js.map +1 -1
- package/dist/es5/node/file/file-reader.js +40 -147
- package/dist/es5/node/file/file-reader.js.map +1 -1
- package/dist/es5/node/file/file.js +13 -41
- package/dist/es5/node/file/file.js.map +1 -1
- package/dist/es5/node/file/readable-stream.js +1 -26
- package/dist/es5/node/file/readable-stream.js.map +1 -1
- package/dist/es5/node/images/encode-image.node.js +8 -10
- package/dist/es5/node/images/encode-image.node.js.map +1 -1
- package/dist/es5/node/images/parse-image.node.js +17 -44
- package/dist/es5/node/images/parse-image.node.js.map +1 -1
- package/dist/es5/promise/all-settled.js +7 -7
- package/dist/es5/promise/all-settled.js.map +1 -1
- package/dist/es5/utils/globals.js +3 -8
- package/dist/es5/utils/globals.js.map +1 -1
- package/dist/esm/index.js +2 -2
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/encoding-indexes.js +30 -0
- package/dist/esm/lib/encoding-indexes.js.map +1 -0
- package/dist/esm/lib/encoding.js +1450 -0
- package/dist/esm/lib/encoding.js.map +1 -0
- package/dist/{es5/libs/encoding-indexes.js → esm/libs/encoding-indexes-asian.js} +2 -40
- package/dist/esm/node/fetch/fetch.node.js +12 -1
- package/dist/esm/node/fetch/fetch.node.js.map +1 -1
- package/dist/esm/node/fetch/headers.node.js +1 -1
- package/dist/esm/node/fetch/headers.node.js.map +1 -1
- package/dist/esm/node/fetch/utils/decode-data-uri.node.js +2 -2
- package/dist/esm/node/fetch/utils/decode-data-uri.node.js.map +1 -1
- package/dist/esm/node/file/file-reader.js +1 -1
- package/dist/esm/node/file/file-reader.js.map +1 -1
- package/dist/esm/utils/assert.js +1 -1
- package/dist/esm/utils/assert.js.map +1 -1
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +98 -0
- package/dist/lib/encoding-indexes.d.ts +31 -0
- package/dist/lib/encoding-indexes.d.ts.map +1 -0
- package/dist/lib/encoding-indexes.js +35 -0
- package/dist/lib/encoding.d.ts +15 -0
- package/dist/lib/encoding.d.ts.map +1 -0
- package/dist/lib/encoding.js +2779 -0
- package/dist/libs/encoding-indexes-asian.d.ts +10 -0
- package/dist/libs/encoding-indexes-asian.d.ts.map +1 -0
- package/dist/libs/encoding-indexes-asian.js +14 -0
- package/dist/node/buffer/btoa.node.d.ts +3 -0
- package/dist/node/buffer/btoa.node.d.ts.map +1 -0
- package/dist/node/buffer/btoa.node.js +14 -0
- package/dist/node/buffer/to-array-buffer.node.d.ts +2 -0
- package/dist/node/buffer/to-array-buffer.node.d.ts.map +1 -0
- package/dist/node/buffer/to-array-buffer.node.js +12 -0
- package/dist/node/fetch/fetch.node.d.ts +7 -0
- package/dist/node/fetch/fetch.node.d.ts.map +1 -0
- package/dist/node/fetch/fetch.node.js +117 -0
- package/dist/node/fetch/headers.node.d.ts +34 -0
- package/dist/node/fetch/headers.node.d.ts.map +1 -0
- package/dist/node/fetch/headers.node.js +105 -0
- package/dist/node/fetch/response.node.d.ts +22 -0
- package/dist/node/fetch/response.node.d.ts.map +1 -0
- package/dist/node/fetch/response.node.js +77 -0
- package/dist/node/fetch/utils/decode-data-uri.node.d.ts +16 -0
- package/dist/node/fetch/utils/decode-data-uri.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/decode-data-uri.node.js +63 -0
- package/dist/node/fetch/utils/stream-utils.node.d.ts +4 -0
- package/dist/node/fetch/utils/stream-utils.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/stream-utils.node.js +98 -0
- package/dist/node/file/blob-stream-controller.d.ts +29 -0
- package/dist/node/file/blob-stream-controller.d.ts.map +1 -0
- package/dist/node/file/blob-stream-controller.js +63 -0
- package/dist/node/file/blob-stream.d.ts +25 -0
- package/dist/node/file/blob-stream.d.ts.map +1 -0
- package/dist/node/file/blob-stream.js +37 -0
- package/dist/node/file/blob.d.ts +58 -0
- package/dist/node/file/blob.d.ts.map +1 -0
- package/dist/node/file/blob.js +160 -0
- package/dist/node/file/file-reader.d.ts +24 -0
- package/dist/node/file/file-reader.d.ts.map +1 -0
- package/dist/node/file/file-reader.js +35 -0
- package/dist/node/file/file.d.ts +25 -0
- package/dist/node/file/file.d.ts.map +1 -0
- package/dist/node/file/file.js +37 -0
- package/dist/node/file/install-file-polyfills.d.ts +2 -0
- package/dist/node/file/install-file-polyfills.d.ts.map +1 -0
- package/dist/node/file/install-file-polyfills.js +27 -0
- package/dist/node/file/readable-stream.d.ts +4 -0
- package/dist/node/file/readable-stream.d.ts.map +1 -0
- package/dist/node/file/readable-stream.js +11 -0
- package/dist/node/images/encode-image.node.d.ts +20 -0
- package/dist/node/images/encode-image.node.d.ts.map +1 -0
- package/dist/node/images/encode-image.node.js +41 -0
- package/dist/node/images/parse-image.node.d.ts +11 -0
- package/dist/node/images/parse-image.node.d.ts.map +1 -0
- package/dist/node/images/parse-image.node.js +29 -0
- package/dist/promise/all-settled.d.ts +10 -0
- package/dist/promise/all-settled.d.ts.map +1 -0
- package/dist/promise/all-settled.js +24 -0
- package/dist/utils/assert.d.ts +2 -0
- package/dist/utils/assert.d.ts.map +1 -0
- package/dist/utils/assert.js +9 -0
- package/dist/utils/globals.d.ts +4 -0
- package/dist/utils/globals.d.ts.map +1 -0
- package/dist/utils/globals.js +36 -0
- package/package.json +6 -6
- package/src/index.ts +2 -2
- package/src/lib/encoding-indexes.ts +34 -0
- package/{dist/esm/libs/encoding.js → src/lib/encoding.ts} +78 -78
- package/src/libs/{encoding-indexes.js → encoding-indexes-asian.js} +2 -40
- package/src/node/fetch/fetch.node.ts +19 -2
- package/dist/dist.min.js +0 -2
- package/dist/dist.min.js.map +0 -1
- package/dist/es5/libs/encoding.js +0 -3084
- package/src/libs/encoding.js +0 -3084
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"btoa.node.d.ts","sourceRoot":"","sources":["../../../src/node/buffer/btoa.node.ts"],"names":[],"mappings":"AAIA,wBAAgB,IAAI,CAAC,MAAM,KAAA,UAE1B;AAED,wBAAgB,IAAI,CAAC,MAAM,KAAA,UAE1B"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// btoa, atob polyfills for Node.js
|
|
3
|
+
// Note: The atob and btoa functions (not just the polyfills!) are not unicode safe
|
|
4
|
+
// But still useful for unit testing
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.btoa = exports.atob = void 0;
|
|
7
|
+
function atob(string) {
|
|
8
|
+
return Buffer.from(string).toString('base64');
|
|
9
|
+
}
|
|
10
|
+
exports.atob = atob;
|
|
11
|
+
function btoa(base64) {
|
|
12
|
+
return Buffer.from(base64, 'base64').toString('ascii');
|
|
13
|
+
}
|
|
14
|
+
exports.btoa = btoa;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"to-array-buffer.node.d.ts","sourceRoot":"","sources":["../../../src/node/buffer/to-array-buffer.node.ts"],"names":[],"mappings":"AAAA,wBAAgB,mBAAmB,CAAC,MAAM,KAAA,OAOzC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.bufferToArrayBuffer = void 0;
|
|
4
|
+
function bufferToArrayBuffer(buffer) {
|
|
5
|
+
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
6
|
+
if (Buffer.isBuffer(buffer)) {
|
|
7
|
+
const typedArray = new Uint8Array(buffer);
|
|
8
|
+
return typedArray.buffer;
|
|
9
|
+
}
|
|
10
|
+
return buffer;
|
|
11
|
+
}
|
|
12
|
+
exports.bufferToArrayBuffer = bufferToArrayBuffer;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fetch.node.d.ts","sourceRoot":"","sources":["../../../src/node/fetch/fetch.node.ts"],"names":[],"mappings":"AAUA;;;;GAIG;AACH,wBAA8B,SAAS,CAAC,GAAG,KAAA,EAAE,OAAO,KAAA,OAwCnD"}
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const fs_1 = __importDefault(require("fs")); // `fs` will be empty object in browsers (see package.json "browser" field).
|
|
7
|
+
const response_node_1 = __importDefault(require("./response.node"));
|
|
8
|
+
const headers_node_1 = __importDefault(require("./headers.node"));
|
|
9
|
+
const decode_data_uri_node_1 = require("./utils/decode-data-uri.node");
|
|
10
|
+
const stream_utils_node_1 = require("./utils/stream-utils.node");
|
|
11
|
+
const isDataURL = (url) => url.startsWith('data:');
|
|
12
|
+
const isRequestURL = (url) => url.startsWith('http:') || url.startsWith('https:');
|
|
13
|
+
/**
|
|
14
|
+
* Emulation of Browser fetch for Node.js
|
|
15
|
+
* @param url
|
|
16
|
+
* @param options
|
|
17
|
+
*/
|
|
18
|
+
async function fetchNode(url, options) {
|
|
19
|
+
try {
|
|
20
|
+
// Handle data urls in node, to match `fetch``
|
|
21
|
+
// Note - this loses the MIME type, data URIs are handled directly in fetch
|
|
22
|
+
if (isDataURL(url)) {
|
|
23
|
+
const { arrayBuffer, mimeType } = (0, decode_data_uri_node_1.decodeDataUri)(url);
|
|
24
|
+
const response = new response_node_1.default(arrayBuffer, {
|
|
25
|
+
headers: { 'content-type': mimeType },
|
|
26
|
+
url
|
|
27
|
+
});
|
|
28
|
+
return response;
|
|
29
|
+
}
|
|
30
|
+
// Automatically decompress gzipped files with .gz extension
|
|
31
|
+
const syntheticResponseHeaders = {};
|
|
32
|
+
const originalUrl = url;
|
|
33
|
+
if (url.endsWith('.gz')) {
|
|
34
|
+
url = url.slice(0, -3);
|
|
35
|
+
syntheticResponseHeaders['content-encoding'] = 'gzip';
|
|
36
|
+
}
|
|
37
|
+
// Need to create the stream in advance since Response constructor needs to be sync
|
|
38
|
+
const body = await (0, stream_utils_node_1.createReadStream)(originalUrl, options);
|
|
39
|
+
const headers = getHeaders(url, body, syntheticResponseHeaders);
|
|
40
|
+
const { status, statusText } = getStatus(body);
|
|
41
|
+
const followRedirect = !options || options.followRedirect || options.followRedirect === undefined;
|
|
42
|
+
if (status >= 300 && status < 400 && headers.has('location') && followRedirect) {
|
|
43
|
+
const redirectUrl = generateRedirectUrl(url, headers.get('location'));
|
|
44
|
+
// Redirect
|
|
45
|
+
return await fetchNode(redirectUrl, options);
|
|
46
|
+
}
|
|
47
|
+
return new response_node_1.default(body, { headers, status, statusText, url });
|
|
48
|
+
}
|
|
49
|
+
catch (error) {
|
|
50
|
+
// TODO - what error code to use here?
|
|
51
|
+
return new response_node_1.default(null, { status: 400, statusText: String(error), url });
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
exports.default = fetchNode;
|
|
55
|
+
/**
|
|
56
|
+
* Generate redirect url from location without origin and protocol.
|
|
57
|
+
* @param originalUrl
|
|
58
|
+
* @param redirectUrl
|
|
59
|
+
*/
|
|
60
|
+
function generateRedirectUrl(originalUrl, location) {
|
|
61
|
+
if (location.startsWith('http')) {
|
|
62
|
+
return location;
|
|
63
|
+
}
|
|
64
|
+
// If url doesn't have origin and protocol just extend current url origin with location.
|
|
65
|
+
const url = new URL(originalUrl);
|
|
66
|
+
url.pathname = location;
|
|
67
|
+
return url.href;
|
|
68
|
+
}
|
|
69
|
+
// HELPER FUNCTIONS
|
|
70
|
+
// PRIVATE
|
|
71
|
+
function getStatus(httpResponse) {
|
|
72
|
+
if (httpResponse.statusCode) {
|
|
73
|
+
return { status: httpResponse.statusCode, statusText: httpResponse.statusMessage || 'NA' };
|
|
74
|
+
}
|
|
75
|
+
return { status: 200, statusText: 'OK' };
|
|
76
|
+
}
|
|
77
|
+
function getHeaders(url, httpResponse, additionalHeaders = {}) {
|
|
78
|
+
const headers = {};
|
|
79
|
+
if (httpResponse && httpResponse.headers) {
|
|
80
|
+
const httpHeaders = httpResponse.headers;
|
|
81
|
+
for (const key in httpHeaders) {
|
|
82
|
+
const header = httpHeaders[key];
|
|
83
|
+
headers[key.toLowerCase()] = String(header);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
// Fix up content length if we can for best progress experience
|
|
87
|
+
if (!headers['content-length']) {
|
|
88
|
+
const contentLength = getContentLength(url);
|
|
89
|
+
if (Number.isFinite(contentLength)) {
|
|
90
|
+
headers['content-length'] = contentLength;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
Object.assign(headers, additionalHeaders);
|
|
94
|
+
return new headers_node_1.default(headers);
|
|
95
|
+
}
|
|
96
|
+
function getContentLength(url) {
|
|
97
|
+
if (isRequestURL(url)) {
|
|
98
|
+
// Needs to be read from actual headers
|
|
99
|
+
return null;
|
|
100
|
+
}
|
|
101
|
+
else if (isDataURL(url)) {
|
|
102
|
+
// TODO - remove media type etc
|
|
103
|
+
return url.length - 'data:'.length;
|
|
104
|
+
}
|
|
105
|
+
// File URL
|
|
106
|
+
// TODO - how to handle non-existing file, this presumably just throws
|
|
107
|
+
try {
|
|
108
|
+
// strip query params from URL
|
|
109
|
+
const noqueryUrl = url.split('?')[0];
|
|
110
|
+
const stats = fs_1.default.statSync(noqueryUrl);
|
|
111
|
+
return stats.size;
|
|
112
|
+
}
|
|
113
|
+
catch (error) {
|
|
114
|
+
// ignore for now
|
|
115
|
+
}
|
|
116
|
+
return null;
|
|
117
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Polyfill for Browser Headers
|
|
3
|
+
* Based on https://github.com/github/fetch under MIT license
|
|
4
|
+
*/
|
|
5
|
+
export default class Headers {
|
|
6
|
+
map: {};
|
|
7
|
+
constructor(headers: any);
|
|
8
|
+
append(name: any, value: any): void;
|
|
9
|
+
delete(name: any): void;
|
|
10
|
+
get(name: any): any;
|
|
11
|
+
has(name: any): boolean;
|
|
12
|
+
set(name: any, value: any): void;
|
|
13
|
+
forEach(visitor: any, thisArg?: null): void;
|
|
14
|
+
keys(): {
|
|
15
|
+
next(): {
|
|
16
|
+
done: boolean;
|
|
17
|
+
value: any;
|
|
18
|
+
};
|
|
19
|
+
};
|
|
20
|
+
values(): {
|
|
21
|
+
next(): {
|
|
22
|
+
done: boolean;
|
|
23
|
+
value: any;
|
|
24
|
+
};
|
|
25
|
+
};
|
|
26
|
+
entries(): {
|
|
27
|
+
next(): {
|
|
28
|
+
done: boolean;
|
|
29
|
+
value: any;
|
|
30
|
+
};
|
|
31
|
+
};
|
|
32
|
+
[Symbol.iterator](): Generator<any, void, unknown>;
|
|
33
|
+
}
|
|
34
|
+
//# sourceMappingURL=headers.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"headers.node.d.ts","sourceRoot":"","sources":["../../../src/node/fetch/headers.node.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,CAAC,OAAO,OAAO,OAAO;IAC1B,GAAG,EAAE,EAAE,CAAC;gBAEI,OAAO,KAAA;IAYnB,MAAM,CAAC,IAAI,KAAA,EAAE,KAAK,KAAA;IAOlB,MAAM,CAAC,IAAI,KAAA;IAIX,GAAG,CAAC,IAAI,KAAA;IAKR,GAAG,CAAC,IAAI,KAAA;IAIR,GAAG,CAAC,IAAI,KAAA,EAAE,KAAK,KAAA;IAIf,OAAO,CAAC,OAAO,KAAA,EAAE,OAAO,OAAO;IAY/B,IAAI;;;;;;IAQJ,MAAM;;;;;;IAQN,OAAO;;;;;;IAQN,CAAC,MAAM,CAAC,QAAQ,CAAC;CAInB"}
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
/**
|
|
4
|
+
* Polyfill for Browser Headers
|
|
5
|
+
* Based on https://github.com/github/fetch under MIT license
|
|
6
|
+
*/
|
|
7
|
+
class Headers {
|
|
8
|
+
constructor(headers) {
|
|
9
|
+
this.map = {};
|
|
10
|
+
if (headers instanceof Headers) {
|
|
11
|
+
headers.forEach((value, name) => this.append(name, value));
|
|
12
|
+
}
|
|
13
|
+
else if (Array.isArray(headers)) {
|
|
14
|
+
headers.forEach((header) => this.append(header[0], header[1]));
|
|
15
|
+
}
|
|
16
|
+
else if (headers) {
|
|
17
|
+
Object.getOwnPropertyNames(headers).forEach((name) => this.append(name, headers[name]));
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
append(name, value) {
|
|
21
|
+
name = normalizeName(name);
|
|
22
|
+
value = normalizeValue(value);
|
|
23
|
+
const oldValue = this.map[name];
|
|
24
|
+
this.map[name] = oldValue ? `${oldValue}, ${value}` : value;
|
|
25
|
+
}
|
|
26
|
+
delete(name) {
|
|
27
|
+
delete this.map[normalizeName(name)];
|
|
28
|
+
}
|
|
29
|
+
get(name) {
|
|
30
|
+
name = normalizeName(name);
|
|
31
|
+
return this.has(name) ? this.map[name] : null;
|
|
32
|
+
}
|
|
33
|
+
has(name) {
|
|
34
|
+
return this.map.hasOwnProperty(normalizeName(name));
|
|
35
|
+
}
|
|
36
|
+
set(name, value) {
|
|
37
|
+
this.map[normalizeName(name)] = normalizeValue(value);
|
|
38
|
+
}
|
|
39
|
+
forEach(visitor, thisArg = null) {
|
|
40
|
+
for (const name in this.map) {
|
|
41
|
+
if (this.map.hasOwnProperty(name)) {
|
|
42
|
+
if (thisArg) {
|
|
43
|
+
visitor.call(thisArg, this.map[name], name, this);
|
|
44
|
+
}
|
|
45
|
+
else {
|
|
46
|
+
visitor(this.map[name], name, this);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
keys() {
|
|
52
|
+
const items = [];
|
|
53
|
+
this.forEach(function (value, name) {
|
|
54
|
+
items.push(name);
|
|
55
|
+
});
|
|
56
|
+
return iteratorFor(items);
|
|
57
|
+
}
|
|
58
|
+
values() {
|
|
59
|
+
const items = [];
|
|
60
|
+
this.forEach(function (value) {
|
|
61
|
+
items.push(value);
|
|
62
|
+
});
|
|
63
|
+
return iteratorFor(items);
|
|
64
|
+
}
|
|
65
|
+
entries() {
|
|
66
|
+
const items = [];
|
|
67
|
+
this.forEach(function (value, name) {
|
|
68
|
+
items.push([name, value]);
|
|
69
|
+
});
|
|
70
|
+
return iteratorFor(items);
|
|
71
|
+
}
|
|
72
|
+
*[Symbol.iterator]() {
|
|
73
|
+
// @ts-ignore must have a '[Symbol.iterator]()' method that returns an iterator.
|
|
74
|
+
yield* this.entries();
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
exports.default = Headers;
|
|
78
|
+
function normalizeName(name) {
|
|
79
|
+
if (typeof name !== 'string') {
|
|
80
|
+
name = String(name);
|
|
81
|
+
}
|
|
82
|
+
if (/[^a-z0-9\-#$%&'*+.^_`|~]/i.test(name) || name === '') {
|
|
83
|
+
throw new TypeError('Invalid character in header field name');
|
|
84
|
+
}
|
|
85
|
+
return name.toLowerCase();
|
|
86
|
+
}
|
|
87
|
+
function normalizeValue(value) {
|
|
88
|
+
if (typeof value !== 'string') {
|
|
89
|
+
value = String(value);
|
|
90
|
+
}
|
|
91
|
+
return value;
|
|
92
|
+
}
|
|
93
|
+
// Build a destructive iterator for the value list
|
|
94
|
+
function iteratorFor(items) {
|
|
95
|
+
const iterator = {
|
|
96
|
+
next() {
|
|
97
|
+
const value = items.shift();
|
|
98
|
+
return { done: value === undefined, value };
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
iterator[Symbol.iterator] = function () {
|
|
102
|
+
return iterator;
|
|
103
|
+
};
|
|
104
|
+
return iterator;
|
|
105
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import Headers from './headers.node';
|
|
2
|
+
export default class Response {
|
|
3
|
+
readonly ok: boolean;
|
|
4
|
+
readonly status: number;
|
|
5
|
+
readonly statusText: string;
|
|
6
|
+
readonly headers: Headers;
|
|
7
|
+
readonly url: string;
|
|
8
|
+
bodyUsed: boolean;
|
|
9
|
+
private readonly _body;
|
|
10
|
+
constructor(body: any, options: {
|
|
11
|
+
headers?: any;
|
|
12
|
+
status?: number;
|
|
13
|
+
statusText?: string;
|
|
14
|
+
url: string;
|
|
15
|
+
});
|
|
16
|
+
get body(): any;
|
|
17
|
+
arrayBuffer(): Promise<any>;
|
|
18
|
+
text(): Promise<string>;
|
|
19
|
+
json(): Promise<any>;
|
|
20
|
+
blob(): Promise<Blob>;
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=response.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"response.node.d.ts","sourceRoot":"","sources":["../../../src/node/fetch/response.node.ts"],"names":[],"mappings":"AAEA,OAAO,OAAO,MAAM,gBAAgB,CAAC;AAoBrC,MAAM,CAAC,OAAO,OAAO,QAAQ;IAC3B,QAAQ,CAAC,EAAE,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,OAAO,CAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC;gBAIrB,IAAI,KAAA,EACJ,OAAO,EAAE;QACP,OAAO,CAAC,MAAC;QACT,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,GAAG,EAAE,MAAM,CAAC;KACb;IAuBH,IAAI,IAAI,QAKP;IAIK,WAAW;IAQX,IAAI;IAMJ,IAAI;IAKJ,IAAI;CAMX"}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const assert_1 = require("../../utils/assert");
|
|
7
|
+
const stream_utils_node_1 = require("./utils/stream-utils.node");
|
|
8
|
+
const headers_node_1 = __importDefault(require("./headers.node"));
|
|
9
|
+
const isBoolean = (x) => typeof x === 'boolean';
|
|
10
|
+
const isFunction = (x) => typeof x === 'function';
|
|
11
|
+
const isObject = (x) => x !== null && typeof x === 'object';
|
|
12
|
+
const isReadableNodeStream = (x) => isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);
|
|
13
|
+
/**
|
|
14
|
+
* Polyfill for Browser Response
|
|
15
|
+
*
|
|
16
|
+
* Under Node.js we return a mock "fetch response object"
|
|
17
|
+
* so that apps can use the same API as in the browser.
|
|
18
|
+
*
|
|
19
|
+
* Note: This is intended to be a "lightweight" implementation and will have limitations.
|
|
20
|
+
*
|
|
21
|
+
* See https://developer.mozilla.org/en-US/docs/Web/API/Response
|
|
22
|
+
*/
|
|
23
|
+
const stream_1 = require("stream");
|
|
24
|
+
class Response {
|
|
25
|
+
// TODO - handle ArrayBuffer, ArrayBufferView, Buffer
|
|
26
|
+
constructor(body, options) {
|
|
27
|
+
this.bodyUsed = false;
|
|
28
|
+
const { headers, status = 200, statusText = 'OK', url } = options || {};
|
|
29
|
+
this.url = url;
|
|
30
|
+
this.ok = status === 200;
|
|
31
|
+
this.status = status; // TODO - handle errors and set status
|
|
32
|
+
this.statusText = statusText;
|
|
33
|
+
this.headers = new headers_node_1.default(options?.headers || {});
|
|
34
|
+
// Check for content-encoding and create a decompression stream
|
|
35
|
+
if (isReadableNodeStream(body)) {
|
|
36
|
+
this._body = (0, stream_utils_node_1.decompressReadStream)(body, headers);
|
|
37
|
+
}
|
|
38
|
+
else if (typeof body === 'string') {
|
|
39
|
+
this._body = stream_1.Readable.from([new TextEncoder().encode(body)]);
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
this._body = stream_1.Readable.from([body || new ArrayBuffer(0)]);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
// Subset of Properties
|
|
46
|
+
// Returns a readable stream to the "body" of the response (or file)
|
|
47
|
+
get body() {
|
|
48
|
+
(0, assert_1.assert)(!this.bodyUsed);
|
|
49
|
+
(0, assert_1.assert)(isReadableNodeStream(this._body)); // Not implemented: conversion of ArrayBuffer etc to stream
|
|
50
|
+
this.bodyUsed = true;
|
|
51
|
+
return this._body;
|
|
52
|
+
}
|
|
53
|
+
// Subset of Methods
|
|
54
|
+
async arrayBuffer() {
|
|
55
|
+
if (!isReadableNodeStream(this._body)) {
|
|
56
|
+
return this._body || new ArrayBuffer(0);
|
|
57
|
+
}
|
|
58
|
+
const data = await (0, stream_utils_node_1.concatenateReadStream)(this._body);
|
|
59
|
+
return data;
|
|
60
|
+
}
|
|
61
|
+
async text() {
|
|
62
|
+
const arrayBuffer = await this.arrayBuffer();
|
|
63
|
+
const textDecoder = new TextDecoder();
|
|
64
|
+
return textDecoder.decode(arrayBuffer);
|
|
65
|
+
}
|
|
66
|
+
async json() {
|
|
67
|
+
const text = await this.text();
|
|
68
|
+
return JSON.parse(text);
|
|
69
|
+
}
|
|
70
|
+
async blob() {
|
|
71
|
+
if (typeof Blob === 'undefined') {
|
|
72
|
+
throw new Error('Blob polyfill not installed');
|
|
73
|
+
}
|
|
74
|
+
return new Blob([await this.arrayBuffer()]);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
exports.default = Response;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parses a data URI into a buffer, as well as retrieving its declared MIME type.
|
|
3
|
+
*
|
|
4
|
+
* @param {string} uri - a data URI (assumed to be valid)
|
|
5
|
+
* @returns {Object} { buffer, mimeType }
|
|
6
|
+
*/
|
|
7
|
+
export declare function decodeDataUri(uri: any): {
|
|
8
|
+
arrayBuffer: any;
|
|
9
|
+
mimeType: any;
|
|
10
|
+
};
|
|
11
|
+
/**
|
|
12
|
+
* @param data
|
|
13
|
+
* @todo Duplicate of core
|
|
14
|
+
*/
|
|
15
|
+
export declare function toArrayBuffer(data: any): any;
|
|
16
|
+
//# sourceMappingURL=decode-data-uri.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"decode-data-uri.node.d.ts","sourceRoot":"","sources":["../../../../src/node/fetch/utils/decode-data-uri.node.ts"],"names":[],"mappings":"AAOA;;;;;GAKG;AACH,wBAAgB,aAAa,CAAC,GAAG,KAAA;;;EAoBhC;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,IAAI,KAAA,OA4BjC"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Based on binary-gltf-utils under MIT license: Copyright (c) 2016-17 Karl Cheng
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.toArrayBuffer = exports.decodeDataUri = void 0;
|
|
5
|
+
const assert_1 = require("../../../utils/assert");
|
|
6
|
+
const isArrayBuffer = (x) => x && x instanceof ArrayBuffer;
|
|
7
|
+
const isBuffer = (x) => x && x instanceof Buffer;
|
|
8
|
+
/**
|
|
9
|
+
* Parses a data URI into a buffer, as well as retrieving its declared MIME type.
|
|
10
|
+
*
|
|
11
|
+
* @param {string} uri - a data URI (assumed to be valid)
|
|
12
|
+
* @returns {Object} { buffer, mimeType }
|
|
13
|
+
*/
|
|
14
|
+
function decodeDataUri(uri) {
|
|
15
|
+
const dataIndex = uri.indexOf(',');
|
|
16
|
+
let buffer;
|
|
17
|
+
let mimeType;
|
|
18
|
+
if (uri.slice(dataIndex - 7, dataIndex) === ';base64') {
|
|
19
|
+
buffer = Buffer.from(uri.slice(dataIndex + 1), 'base64');
|
|
20
|
+
mimeType = uri.slice(5, dataIndex - 7).trim();
|
|
21
|
+
}
|
|
22
|
+
else {
|
|
23
|
+
buffer = Buffer.from(decodeURIComponent(uri.slice(dataIndex + 1)));
|
|
24
|
+
mimeType = uri.slice(5, dataIndex).trim();
|
|
25
|
+
}
|
|
26
|
+
if (!mimeType) {
|
|
27
|
+
mimeType = 'text/plain;charset=US-ASCII';
|
|
28
|
+
}
|
|
29
|
+
else if (mimeType.startsWith(';')) {
|
|
30
|
+
mimeType = `text/plain${mimeType}`;
|
|
31
|
+
}
|
|
32
|
+
return { arrayBuffer: toArrayBuffer(buffer), mimeType };
|
|
33
|
+
}
|
|
34
|
+
exports.decodeDataUri = decodeDataUri;
|
|
35
|
+
/**
|
|
36
|
+
* @param data
|
|
37
|
+
* @todo Duplicate of core
|
|
38
|
+
*/
|
|
39
|
+
function toArrayBuffer(data) {
|
|
40
|
+
if (isArrayBuffer(data)) {
|
|
41
|
+
return data;
|
|
42
|
+
}
|
|
43
|
+
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
44
|
+
if (isBuffer(data)) {
|
|
45
|
+
const typedArray = new Uint8Array(data);
|
|
46
|
+
return typedArray.buffer;
|
|
47
|
+
}
|
|
48
|
+
// Careful - Node Buffers will look like ArrayBuffers (keep after isBuffer)
|
|
49
|
+
if (ArrayBuffer.isView(data)) {
|
|
50
|
+
return data.buffer;
|
|
51
|
+
}
|
|
52
|
+
if (typeof data === 'string') {
|
|
53
|
+
const text = data;
|
|
54
|
+
const uint8Array = new TextEncoder().encode(text);
|
|
55
|
+
return uint8Array.buffer;
|
|
56
|
+
}
|
|
57
|
+
// HACK to support Blob polyfill
|
|
58
|
+
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
59
|
+
return data._toArrayBuffer();
|
|
60
|
+
}
|
|
61
|
+
return (0, assert_1.assert)(false, `toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
|
|
62
|
+
}
|
|
63
|
+
exports.toArrayBuffer = toArrayBuffer;
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export declare function createReadStream(url: any, options: any): Promise<unknown>;
|
|
2
|
+
export declare function decompressReadStream(readStream: any, headers: any): any;
|
|
3
|
+
export declare function concatenateReadStream(readStream: any): Promise<unknown>;
|
|
4
|
+
//# sourceMappingURL=stream-utils.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream-utils.node.d.ts","sourceRoot":"","sources":["../../../../src/node/fetch/utils/stream-utils.node.ts"],"names":[],"mappings":"AAUA,wBAAsB,gBAAgB,CAAC,GAAG,KAAA,EAAE,OAAO,KAAA,oBAsBlD;AAED,wBAAgB,oBAAoB,CAAC,UAAU,KAAA,EAAE,OAAO,KAAA,OAYvD;AAED,wBAAsB,qBAAqB,CAAC,UAAU,KAAA,oBAoBrD"}
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.concatenateReadStream = exports.decompressReadStream = exports.createReadStream = void 0;
|
|
7
|
+
const fs_1 = __importDefault(require("fs")); // `fs` will be empty object in browsers (see package.json "browser" field).
|
|
8
|
+
const http_1 = __importDefault(require("http"));
|
|
9
|
+
const https_1 = __importDefault(require("https"));
|
|
10
|
+
const zlib_1 = __importDefault(require("zlib"));
|
|
11
|
+
const decode_data_uri_node_1 = require("./decode-data-uri.node");
|
|
12
|
+
const isRequestURL = (url) => url.startsWith('http:') || url.startsWith('https:');
|
|
13
|
+
// Returns a promise that resolves to a readable stream
|
|
14
|
+
async function createReadStream(url, options) {
|
|
15
|
+
// Handle file streams in node
|
|
16
|
+
if (!isRequestURL(url)) {
|
|
17
|
+
const noqueryUrl = url.split('?')[0];
|
|
18
|
+
// Now open the stream
|
|
19
|
+
return await new Promise((resolve, reject) => {
|
|
20
|
+
// @ts-ignore
|
|
21
|
+
const stream = fs_1.default.createReadStream(noqueryUrl, { encoding: null });
|
|
22
|
+
stream.once('readable', () => resolve(stream));
|
|
23
|
+
stream.on('error', (error) => reject(error));
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
// HANDLE HTTP/HTTPS REQUESTS IN NODE
|
|
27
|
+
// TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
|
|
28
|
+
return await new Promise((resolve, reject) => {
|
|
29
|
+
const requestFunction = url.startsWith('https:') ? https_1.default.request : http_1.default.request;
|
|
30
|
+
const requestOptions = getRequestOptions(url, options);
|
|
31
|
+
const req = requestFunction(requestOptions, (res) => resolve(res));
|
|
32
|
+
req.on('error', (error) => reject(error));
|
|
33
|
+
req.end();
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
exports.createReadStream = createReadStream;
|
|
37
|
+
function decompressReadStream(readStream, headers) {
|
|
38
|
+
switch (headers.get('content-encoding')) {
|
|
39
|
+
case 'br':
|
|
40
|
+
return readStream.pipe(zlib_1.default.createBrotliDecompress());
|
|
41
|
+
case 'gzip':
|
|
42
|
+
return readStream.pipe(zlib_1.default.createGunzip());
|
|
43
|
+
case 'deflate':
|
|
44
|
+
return readStream.pipe(zlib_1.default.createDeflate());
|
|
45
|
+
default:
|
|
46
|
+
// No compression or an unknown one, just return it as is
|
|
47
|
+
return readStream;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
exports.decompressReadStream = decompressReadStream;
|
|
51
|
+
async function concatenateReadStream(readStream) {
|
|
52
|
+
let arrayBuffer = new ArrayBuffer(0);
|
|
53
|
+
return await new Promise((resolve, reject) => {
|
|
54
|
+
readStream.on('error', (error) => reject(error));
|
|
55
|
+
// Once the readable callback has been added, stream switches to "flowing mode"
|
|
56
|
+
// In Node 10 (but not 12 and 14) this causes `data` and `end` to never be called unless we read data here
|
|
57
|
+
readStream.on('readable', () => readStream.read());
|
|
58
|
+
readStream.on('data', (chunk) => {
|
|
59
|
+
if (typeof chunk === 'string') {
|
|
60
|
+
reject(new Error('Read stream not binary'));
|
|
61
|
+
}
|
|
62
|
+
const chunkAsArrayBuffer = (0, decode_data_uri_node_1.toArrayBuffer)(chunk);
|
|
63
|
+
arrayBuffer = concatenateArrayBuffers(arrayBuffer, chunkAsArrayBuffer);
|
|
64
|
+
});
|
|
65
|
+
readStream.on('end', () => resolve(arrayBuffer));
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
exports.concatenateReadStream = concatenateReadStream;
|
|
69
|
+
// HELPERS
|
|
70
|
+
function getRequestOptions(url, options) {
|
|
71
|
+
// Ensure header keys are lower case so that we can merge without duplicates
|
|
72
|
+
const originalHeaders = options?.headers || {};
|
|
73
|
+
const headers = {};
|
|
74
|
+
for (const key of Object.keys(originalHeaders)) {
|
|
75
|
+
headers[key.toLowerCase()] = originalHeaders[key];
|
|
76
|
+
}
|
|
77
|
+
// Add default accept-encoding to headers
|
|
78
|
+
headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
|
|
79
|
+
const urlObject = new URL(url);
|
|
80
|
+
return {
|
|
81
|
+
hostname: urlObject.hostname,
|
|
82
|
+
path: urlObject.pathname,
|
|
83
|
+
method: 'GET',
|
|
84
|
+
// Add options and user provided 'options.fetch' overrides if available
|
|
85
|
+
...options,
|
|
86
|
+
...options?.fetch,
|
|
87
|
+
// Override with updated headers with accepted encodings:
|
|
88
|
+
headers
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
function concatenateArrayBuffers(source1, source2) {
|
|
92
|
+
const sourceArray1 = source1 instanceof ArrayBuffer ? new Uint8Array(source1) : source1;
|
|
93
|
+
const sourceArray2 = source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2;
|
|
94
|
+
const temp = new Uint8Array(sourceArray1.byteLength + sourceArray2.byteLength);
|
|
95
|
+
temp.set(sourceArray1, 0);
|
|
96
|
+
temp.set(sourceArray2, sourceArray1.byteLength);
|
|
97
|
+
return temp.buffer;
|
|
98
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Forked from @gozala's web-blob under MIT license
|
|
3
|
+
* @see https://github.com/Gozala/web-blob
|
|
4
|
+
*/
|
|
5
|
+
export declare class BlobStreamController {
|
|
6
|
+
private chunks;
|
|
7
|
+
private isWorking;
|
|
8
|
+
private isCancelled;
|
|
9
|
+
/**
|
|
10
|
+
* @param chunks
|
|
11
|
+
*/
|
|
12
|
+
constructor(chunks: Iterator<Uint8Array>);
|
|
13
|
+
/**
|
|
14
|
+
* @param controller
|
|
15
|
+
*/
|
|
16
|
+
start(controller: ReadableStreamDefaultController): void;
|
|
17
|
+
/**
|
|
18
|
+
*
|
|
19
|
+
* @param controller
|
|
20
|
+
*/
|
|
21
|
+
work(controller: ReadableStreamDefaultController): Promise<void>;
|
|
22
|
+
/**
|
|
23
|
+
*
|
|
24
|
+
* @param {ReadableStreamDefaultController} controller
|
|
25
|
+
*/
|
|
26
|
+
pull(controller: any): void;
|
|
27
|
+
cancel(): void;
|
|
28
|
+
}
|
|
29
|
+
//# sourceMappingURL=blob-stream-controller.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"blob-stream-controller.d.ts","sourceRoot":"","sources":["../../../src/node/file/blob-stream-controller.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,qBAAa,oBAAoB;IAC/B,OAAO,CAAC,MAAM,CAAuB;IACrC,OAAO,CAAC,SAAS,CAAkB;IACnC,OAAO,CAAC,WAAW,CAAkB;IAErC;;OAEG;gBACS,MAAM,EAAE,QAAQ,CAAC,UAAU,CAAC;IAIxC;;OAEG;IACH,KAAK,CAAC,UAAU,EAAE,+BAA+B;IAIjD;;;OAGG;IACG,IAAI,CAAC,UAAU,EAAE,+BAA+B;IAyBtD;;;OAGG;IACH,IAAI,CAAC,UAAU,KAAA;IAKf,MAAM;CAGP"}
|