@loaders.gl/polyfills 3.1.3 → 4.0.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.js +2 -2
- package/dist/bundle.js.map +1 -0
- package/dist/index.js +49 -84
- package/dist/index.js.map +1 -0
- package/dist/lib/encoding-indexes.js +29 -34
- package/dist/lib/encoding-indexes.js.map +1 -0
- package/dist/lib/encoding.js +1281 -2610
- package/dist/lib/encoding.js.map +1 -0
- package/dist/libs/encoding-indexes-asian.js +8 -9
- package/dist/node/buffer/btoa.node.js +5 -12
- package/dist/node/buffer/btoa.node.js.map +1 -0
- package/dist/node/buffer/to-array-buffer.node.js +8 -11
- package/dist/node/buffer/to-array-buffer.node.js.map +1 -0
- package/dist/node/fetch/fetch.node.js +112 -103
- package/dist/node/fetch/fetch.node.js.map +1 -0
- package/dist/node/fetch/headers.node.js +114 -94
- package/dist/node/fetch/headers.node.js.map +1 -0
- package/dist/node/fetch/response.node.js +84 -73
- package/dist/node/fetch/response.node.js.map +1 -0
- package/dist/node/fetch/utils/decode-data-uri.node.js +53 -63
- package/dist/node/fetch/utils/decode-data-uri.node.js.map +1 -0
- package/dist/node/fetch/utils/stream-utils.node.js +87 -105
- package/dist/node/fetch/utils/stream-utils.node.js.map +1 -0
- package/dist/node/file/blob-stream-controller.js +52 -59
- package/dist/node/file/blob-stream-controller.js.map +1 -0
- package/dist/node/file/blob-stream.js +25 -36
- package/dist/node/file/blob-stream.js.map +1 -0
- package/dist/node/file/blob.js +131 -151
- package/dist/node/file/blob.js.map +1 -0
- package/dist/node/file/file-reader.js +77 -28
- package/dist/node/file/file-reader.js.map +1 -0
- package/dist/node/file/file.js +25 -36
- package/dist/node/file/file.js.map +1 -0
- package/dist/node/file/install-file-polyfills.js +21 -26
- package/dist/node/file/install-file-polyfills.js.map +1 -0
- package/dist/node/file/readable-stream.js +3 -10
- package/dist/node/file/readable-stream.js.map +1 -0
- package/dist/node/images/encode-image.node.js +17 -38
- package/dist/node/images/encode-image.node.js.map +1 -0
- package/dist/node/images/parse-image.node.js +19 -28
- package/dist/node/images/parse-image.node.js.map +1 -0
- package/dist/promise/all-settled.js +17 -22
- package/dist/promise/all-settled.js.map +1 -0
- package/dist/utils/assert.js +5 -8
- package/dist/utils/assert.js.map +1 -0
- package/dist/utils/globals.js +7 -34
- package/dist/utils/globals.js.map +1 -0
- package/package.json +4 -4
- package/dist/es5/bundle.js +0 -7
- package/dist/es5/bundle.js.map +0 -1
- package/dist/es5/index.js +0 -122
- package/dist/es5/index.js.map +0 -1
- package/dist/es5/lib/encoding-indexes.js +0 -37
- package/dist/es5/lib/encoding-indexes.js.map +0 -1
- package/dist/es5/lib/encoding.js +0 -1465
- package/dist/es5/lib/encoding.js.map +0 -1
- package/dist/es5/libs/encoding-indexes-asian.js +0 -13
- package/dist/es5/node/buffer/btoa.node.js +0 -16
- package/dist/es5/node/buffer/btoa.node.js.map +0 -1
- package/dist/es5/node/buffer/to-array-buffer.node.js +0 -16
- package/dist/es5/node/buffer/to-array-buffer.node.js.map +0 -1
- package/dist/es5/node/fetch/fetch.node.js +0 -183
- package/dist/es5/node/fetch/fetch.node.js.map +0 -1
- package/dist/es5/node/fetch/headers.node.js +0 -175
- package/dist/es5/node/fetch/headers.node.js.map +0 -1
- package/dist/es5/node/fetch/response.node.js +0 -231
- package/dist/es5/node/fetch/response.node.js.map +0 -1
- package/dist/es5/node/fetch/utils/decode-data-uri.node.js +0 -72
- package/dist/es5/node/fetch/utils/decode-data-uri.node.js.map +0 -1
- package/dist/es5/node/fetch/utils/stream-utils.node.js +0 -213
- package/dist/es5/node/fetch/utils/stream-utils.node.js.map +0 -1
- package/dist/es5/node/file/blob-stream-controller.js +0 -110
- package/dist/es5/node/file/blob-stream-controller.js.map +0 -1
- package/dist/es5/node/file/blob-stream.js +0 -93
- package/dist/es5/node/file/blob-stream.js.map +0 -1
- package/dist/es5/node/file/blob.js +0 -259
- package/dist/es5/node/file/blob.js.map +0 -1
- package/dist/es5/node/file/file-reader.js +0 -189
- package/dist/es5/node/file/file-reader.js.map +0 -1
- package/dist/es5/node/file/file.js +0 -63
- package/dist/es5/node/file/file.js.map +0 -1
- package/dist/es5/node/file/install-file-polyfills.js +0 -33
- package/dist/es5/node/file/install-file-polyfills.js.map +0 -1
- package/dist/es5/node/file/readable-stream.js +0 -40
- package/dist/es5/node/file/readable-stream.js.map +0 -1
- package/dist/es5/node/images/encode-image.node.js +0 -36
- package/dist/es5/node/images/encode-image.node.js.map +0 -1
- package/dist/es5/node/images/parse-image.node.js +0 -61
- package/dist/es5/node/images/parse-image.node.js.map +0 -1
- package/dist/es5/promise/all-settled.js +0 -29
- package/dist/es5/promise/all-settled.js.map +0 -1
- package/dist/es5/utils/assert.js +0 -13
- package/dist/es5/utils/assert.js.map +0 -1
- package/dist/es5/utils/globals.js +0 -21
- package/dist/es5/utils/globals.js.map +0 -1
- package/dist/esm/bundle.js +0 -5
- package/dist/esm/bundle.js.map +0 -1
- package/dist/esm/index.js +0 -63
- package/dist/esm/index.js.map +0 -1
- package/dist/esm/lib/encoding-indexes.js +0 -30
- package/dist/esm/lib/encoding-indexes.js.map +0 -1
- package/dist/esm/lib/encoding.js +0 -1450
- package/dist/esm/lib/encoding.js.map +0 -1
- package/dist/esm/libs/encoding-indexes-asian.js +0 -13
- package/dist/esm/node/buffer/btoa.node.js +0 -7
- package/dist/esm/node/buffer/btoa.node.js.map +0 -1
- package/dist/esm/node/buffer/to-array-buffer.node.js +0 -9
- package/dist/esm/node/buffer/to-array-buffer.node.js.map +0 -1
- package/dist/esm/node/fetch/fetch.node.js +0 -126
- package/dist/esm/node/fetch/fetch.node.js.map +0 -1
- package/dist/esm/node/fetch/headers.node.js +0 -125
- package/dist/esm/node/fetch/headers.node.js.map +0 -1
- package/dist/esm/node/fetch/response.node.js +0 -88
- package/dist/esm/node/fetch/response.node.js.map +0 -1
- package/dist/esm/node/fetch/utils/decode-data-uri.node.js +0 -55
- package/dist/esm/node/fetch/utils/decode-data-uri.node.js.map +0 -1
- package/dist/esm/node/fetch/utils/stream-utils.node.js +0 -98
- package/dist/esm/node/fetch/utils/stream-utils.node.js.map +0 -1
- package/dist/esm/node/file/blob-stream-controller.js +0 -56
- package/dist/esm/node/file/blob-stream-controller.js.map +0 -1
- package/dist/esm/node/file/blob-stream.js +0 -26
- package/dist/esm/node/file/blob-stream.js.map +0 -1
- package/dist/esm/node/file/blob.js +0 -140
- package/dist/esm/node/file/blob.js.map +0 -1
- package/dist/esm/node/file/file-reader.js +0 -84
- package/dist/esm/node/file/file-reader.js.map +0 -1
- package/dist/esm/node/file/file.js +0 -26
- package/dist/esm/node/file/file.js.map +0 -1
- package/dist/esm/node/file/install-file-polyfills.js +0 -22
- package/dist/esm/node/file/install-file-polyfills.js.map +0 -1
- package/dist/esm/node/file/readable-stream.js +0 -4
- package/dist/esm/node/file/readable-stream.js.map +0 -1
- package/dist/esm/node/images/encode-image.node.js +0 -20
- package/dist/esm/node/images/encode-image.node.js.map +0 -1
- package/dist/esm/node/images/parse-image.node.js +0 -20
- package/dist/esm/node/images/parse-image.node.js.map +0 -1
- package/dist/esm/promise/all-settled.js +0 -19
- package/dist/esm/promise/all-settled.js.map +0 -1
- package/dist/esm/utils/assert.js +0 -6
- package/dist/esm/utils/assert.js.map +0 -1
- package/dist/esm/utils/globals.js +0 -9
- package/dist/esm/utils/globals.js.map +0 -1
|
@@ -1,14 +1,7 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
// Note: The atob and btoa functions (not just the polyfills!) are not unicode safe
|
|
4
|
-
// But still useful for unit testing
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.btoa = exports.atob = void 0;
|
|
7
|
-
function atob(string) {
|
|
8
|
-
return Buffer.from(string).toString('base64');
|
|
1
|
+
export function atob(string) {
|
|
2
|
+
return Buffer.from(string).toString('base64');
|
|
9
3
|
}
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
return Buffer.from(base64, 'base64').toString('ascii');
|
|
4
|
+
export function btoa(base64) {
|
|
5
|
+
return Buffer.from(base64, 'base64').toString('ascii');
|
|
13
6
|
}
|
|
14
|
-
|
|
7
|
+
//# sourceMappingURL=btoa.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/node/buffer/btoa.node.ts"],"names":["atob","string","Buffer","from","toString","btoa","base64"],"mappings":"AAIA,OAAO,SAASA,IAAT,CAAcC,MAAd,EAAsB;AAC3B,SAAOC,MAAM,CAACC,IAAP,CAAYF,MAAZ,EAAoBG,QAApB,CAA6B,QAA7B,CAAP;AACD;AAED,OAAO,SAASC,IAAT,CAAcC,MAAd,EAAsB;AAC3B,SAAOJ,MAAM,CAACC,IAAP,CAAYG,MAAZ,EAAoB,QAApB,EAA8BF,QAA9B,CAAuC,OAAvC,CAAP;AACD","sourcesContent":["// btoa, atob polyfills for Node.js\n// Note: The atob and btoa functions (not just the polyfills!) are not unicode safe\n// But still useful for unit testing\n\nexport function atob(string) {\n return Buffer.from(string).toString('base64');\n}\n\nexport function btoa(base64) {\n return Buffer.from(base64, 'base64').toString('ascii');\n}\n"],"file":"btoa.node.js"}
|
|
@@ -1,12 +1,9 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
return typedArray.buffer;
|
|
9
|
-
}
|
|
10
|
-
return buffer;
|
|
1
|
+
export function bufferToArrayBuffer(buffer) {
|
|
2
|
+
if (Buffer.isBuffer(buffer)) {
|
|
3
|
+
const typedArray = new Uint8Array(buffer);
|
|
4
|
+
return typedArray.buffer;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
return buffer;
|
|
11
8
|
}
|
|
12
|
-
|
|
9
|
+
//# sourceMappingURL=to-array-buffer.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/node/buffer/to-array-buffer.node.ts"],"names":["bufferToArrayBuffer","buffer","Buffer","isBuffer","typedArray","Uint8Array"],"mappings":"AAAA,OAAO,SAASA,mBAAT,CAA6BC,MAA7B,EAAqC;AAE1C,MAAIC,MAAM,CAACC,QAAP,CAAgBF,MAAhB,CAAJ,EAA6B;AAC3B,UAAMG,UAAU,GAAG,IAAIC,UAAJ,CAAeJ,MAAf,CAAnB;AACA,WAAOG,UAAU,CAACH,MAAlB;AACD;;AACD,SAAOA,MAAP;AACD","sourcesContent":["export function bufferToArrayBuffer(buffer) {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer);\n return typedArray.buffer;\n }\n return buffer;\n}\n"],"file":"to-array-buffer.node.js"}
|
|
@@ -1,117 +1,126 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
};
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
const
|
|
8
|
-
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
headers: { 'content-type': mimeType },
|
|
26
|
-
url
|
|
27
|
-
});
|
|
28
|
-
return response;
|
|
29
|
-
}
|
|
30
|
-
// Automatically decompress gzipped files with .gz extension
|
|
31
|
-
const syntheticResponseHeaders = {};
|
|
32
|
-
const originalUrl = url;
|
|
33
|
-
if (url.endsWith('.gz')) {
|
|
34
|
-
url = url.slice(0, -3);
|
|
35
|
-
syntheticResponseHeaders['content-encoding'] = 'gzip';
|
|
36
|
-
}
|
|
37
|
-
// Need to create the stream in advance since Response constructor needs to be sync
|
|
38
|
-
const body = await (0, stream_utils_node_1.createReadStream)(originalUrl, options);
|
|
39
|
-
const headers = getHeaders(url, body, syntheticResponseHeaders);
|
|
40
|
-
const { status, statusText } = getStatus(body);
|
|
41
|
-
const followRedirect = !options || options.followRedirect || options.followRedirect === undefined;
|
|
42
|
-
if (status >= 300 && status < 400 && headers.has('location') && followRedirect) {
|
|
43
|
-
const redirectUrl = generateRedirectUrl(url, headers.get('location'));
|
|
44
|
-
// Redirect
|
|
45
|
-
return await fetchNode(redirectUrl, options);
|
|
46
|
-
}
|
|
47
|
-
return new response_node_1.default(body, { headers, status, statusText, url });
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import Response from './response.node';
|
|
3
|
+
import Headers from './headers.node';
|
|
4
|
+
import { decodeDataUri } from './utils/decode-data-uri.node';
|
|
5
|
+
import { createReadStream } from './utils/stream-utils.node';
|
|
6
|
+
|
|
7
|
+
const isDataURL = url => url.startsWith('data:');
|
|
8
|
+
|
|
9
|
+
const isRequestURL = url => url.startsWith('http:') || url.startsWith('https:');
|
|
10
|
+
|
|
11
|
+
export default async function fetchNode(url, options) {
|
|
12
|
+
try {
|
|
13
|
+
if (isDataURL(url)) {
|
|
14
|
+
const {
|
|
15
|
+
arrayBuffer,
|
|
16
|
+
mimeType
|
|
17
|
+
} = decodeDataUri(url);
|
|
18
|
+
const response = new Response(arrayBuffer, {
|
|
19
|
+
headers: {
|
|
20
|
+
'content-type': mimeType
|
|
21
|
+
},
|
|
22
|
+
url
|
|
23
|
+
});
|
|
24
|
+
return response;
|
|
48
25
|
}
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
26
|
+
|
|
27
|
+
const syntheticResponseHeaders = {};
|
|
28
|
+
const originalUrl = url;
|
|
29
|
+
|
|
30
|
+
if (url.endsWith('.gz')) {
|
|
31
|
+
url = url.slice(0, -3);
|
|
32
|
+
syntheticResponseHeaders['content-encoding'] = 'gzip';
|
|
52
33
|
}
|
|
34
|
+
|
|
35
|
+
const body = await createReadStream(originalUrl, options);
|
|
36
|
+
const headers = getHeaders(url, body, syntheticResponseHeaders);
|
|
37
|
+
const {
|
|
38
|
+
status,
|
|
39
|
+
statusText
|
|
40
|
+
} = getStatus(body);
|
|
41
|
+
const followRedirect = !options || options.followRedirect || options.followRedirect === undefined;
|
|
42
|
+
|
|
43
|
+
if (status >= 300 && status < 400 && headers.has('location') && followRedirect) {
|
|
44
|
+
const redirectUrl = generateRedirectUrl(url, headers.get('location'));
|
|
45
|
+
return await fetchNode(redirectUrl, options);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return new Response(body, {
|
|
49
|
+
headers,
|
|
50
|
+
status,
|
|
51
|
+
statusText,
|
|
52
|
+
url
|
|
53
|
+
});
|
|
54
|
+
} catch (error) {
|
|
55
|
+
return new Response(null, {
|
|
56
|
+
status: 400,
|
|
57
|
+
statusText: String(error),
|
|
58
|
+
url
|
|
59
|
+
});
|
|
60
|
+
}
|
|
53
61
|
}
|
|
54
|
-
|
|
55
|
-
/**
|
|
56
|
-
* Generate redirect url from location without origin and protocol.
|
|
57
|
-
* @param originalUrl
|
|
58
|
-
* @param redirectUrl
|
|
59
|
-
*/
|
|
62
|
+
|
|
60
63
|
function generateRedirectUrl(originalUrl, location) {
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
64
|
+
if (location.startsWith('http')) {
|
|
65
|
+
return location;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const url = new URL(originalUrl);
|
|
69
|
+
url.pathname = location;
|
|
70
|
+
return url.href;
|
|
68
71
|
}
|
|
69
|
-
|
|
70
|
-
// PRIVATE
|
|
72
|
+
|
|
71
73
|
function getStatus(httpResponse) {
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
74
|
+
if (httpResponse.statusCode) {
|
|
75
|
+
return {
|
|
76
|
+
status: httpResponse.statusCode,
|
|
77
|
+
statusText: httpResponse.statusMessage || 'NA'
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return {
|
|
82
|
+
status: 200,
|
|
83
|
+
statusText: 'OK'
|
|
84
|
+
};
|
|
76
85
|
}
|
|
86
|
+
|
|
77
87
|
function getHeaders(url, httpResponse, additionalHeaders = {}) {
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
88
|
+
const headers = {};
|
|
89
|
+
|
|
90
|
+
if (httpResponse && httpResponse.headers) {
|
|
91
|
+
const httpHeaders = httpResponse.headers;
|
|
92
|
+
|
|
93
|
+
for (const key in httpHeaders) {
|
|
94
|
+
const header = httpHeaders[key];
|
|
95
|
+
headers[key.toLowerCase()] = String(header);
|
|
85
96
|
}
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (!headers['content-length']) {
|
|
100
|
+
const contentLength = getContentLength(url);
|
|
101
|
+
|
|
102
|
+
if (Number.isFinite(contentLength)) {
|
|
103
|
+
headers['content-length'] = contentLength;
|
|
92
104
|
}
|
|
93
|
-
|
|
94
|
-
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
Object.assign(headers, additionalHeaders);
|
|
108
|
+
return new Headers(headers);
|
|
95
109
|
}
|
|
110
|
+
|
|
96
111
|
function getContentLength(url) {
|
|
97
|
-
|
|
98
|
-
// Needs to be read from actual headers
|
|
99
|
-
return null;
|
|
100
|
-
}
|
|
101
|
-
else if (isDataURL(url)) {
|
|
102
|
-
// TODO - remove media type etc
|
|
103
|
-
return url.length - 'data:'.length;
|
|
104
|
-
}
|
|
105
|
-
// File URL
|
|
106
|
-
// TODO - how to handle non-existing file, this presumably just throws
|
|
107
|
-
try {
|
|
108
|
-
// strip query params from URL
|
|
109
|
-
const noqueryUrl = url.split('?')[0];
|
|
110
|
-
const stats = fs_1.default.statSync(noqueryUrl);
|
|
111
|
-
return stats.size;
|
|
112
|
-
}
|
|
113
|
-
catch (error) {
|
|
114
|
-
// ignore for now
|
|
115
|
-
}
|
|
112
|
+
if (isRequestURL(url)) {
|
|
116
113
|
return null;
|
|
114
|
+
} else if (isDataURL(url)) {
|
|
115
|
+
return url.length - 'data:'.length;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
try {
|
|
119
|
+
const noqueryUrl = url.split('?')[0];
|
|
120
|
+
const stats = fs.statSync(noqueryUrl);
|
|
121
|
+
return stats.size;
|
|
122
|
+
} catch (error) {}
|
|
123
|
+
|
|
124
|
+
return null;
|
|
117
125
|
}
|
|
126
|
+
//# sourceMappingURL=fetch.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/node/fetch/fetch.node.ts"],"names":["fs","Response","Headers","decodeDataUri","createReadStream","isDataURL","url","startsWith","isRequestURL","fetchNode","options","arrayBuffer","mimeType","response","headers","syntheticResponseHeaders","originalUrl","endsWith","slice","body","getHeaders","status","statusText","getStatus","followRedirect","undefined","has","redirectUrl","generateRedirectUrl","get","error","String","location","URL","pathname","href","httpResponse","statusCode","statusMessage","additionalHeaders","httpHeaders","key","header","toLowerCase","contentLength","getContentLength","Number","isFinite","Object","assign","length","noqueryUrl","split","stats","statSync","size"],"mappings":"AAAA,OAAOA,EAAP,MAAe,IAAf;AACA,OAAOC,QAAP,MAAqB,iBAArB;AACA,OAAOC,OAAP,MAAoB,gBAApB;AAEA,SAAQC,aAAR,QAA4B,8BAA5B;AACA,SAAQC,gBAAR,QAA+B,2BAA/B;;AAEA,MAAMC,SAAS,GAAIC,GAAD,IAASA,GAAG,CAACC,UAAJ,CAAe,OAAf,CAA3B;;AACA,MAAMC,YAAY,GAAIF,GAAD,IAASA,GAAG,CAACC,UAAJ,CAAe,OAAf,KAA2BD,GAAG,CAACC,UAAJ,CAAe,QAAf,CAAzD;;AAOA,eAAe,eAAeE,SAAf,CAAyBH,GAAzB,EAA8BI,OAA9B,EAAuC;AACpD,MAAI;AAGF,QAAIL,SAAS,CAACC,GAAD,CAAb,EAAoB;AAClB,YAAM;AAACK,QAAAA,WAAD;AAAcC,QAAAA;AAAd,UAA0BT,aAAa,CAACG,GAAD,CAA7C;AACA,YAAMO,QAAQ,GAAG,IAAIZ,QAAJ,CAAaU,WAAb,EAA0B;AACzCG,QAAAA,OAAO,EAAE;AAAC,0BAAgBF;AAAjB,SADgC;AAEzCN,QAAAA;AAFyC,OAA1B,CAAjB;AAIA,aAAOO,QAAP;AACD;;AAGD,UAAME,wBAAwB,GAAG,EAAjC;AACA,UAAMC,WAAW,GAAGV,GAApB;;AACA,QAAIA,GAAG,CAACW,QAAJ,CAAa,KAAb,CAAJ,EAAyB;AACvBX,MAAAA,GAAG,GAAGA,GAAG,CAACY,KAAJ,CAAU,CAAV,EAAa,CAAC,CAAd,CAAN;AACAH,MAAAA,wBAAwB,CAAC,kBAAD,CAAxB,GAA+C,MAA/C;AACD;;AAGD,UAAMI,IAAI,GAAG,MAAMf,gBAAgB,CAACY,WAAD,EAAcN,OAAd,CAAnC;AACA,UAAMI,OAAO,GAAGM,UAAU,CAACd,GAAD,EAAMa,IAAN,EAAYJ,wBAAZ,CAA1B;AACA,UAAM;AAACM,MAAAA,MAAD;AAASC,MAAAA;AAAT,QAAuBC,SAAS,CAACJ,IAAD,CAAtC;AAEA,UAAMK,cAAc,GAClB,CAACd,OAAD,IAAYA,OAAO,CAACc,cAApB,IAAsCd,OAAO,CAACc,cAAR,KAA2BC,SADnE;;AAGA,QAAIJ,MAAM,IAAI,GAAV,IAAiBA,MAAM,GAAG,GAA1B,IAAiCP,OAAO,CAACY,GAAR,CAAY,UAAZ,CAAjC,IAA4DF,cAAhE,EAAgF;AAC9E,YAAMG,WAAW,GAAGC,mBAAmB,CAACtB,GAAD,EAAMQ,OAAO,CAACe,GAAR,CAAY,UAAZ,CAAN,CAAvC;AAGA,aAAO,MAAMpB,SAAS,CAACkB,WAAD,EAAcjB,OAAd,CAAtB;AACD;;AACD,WAAO,IAAIT,QAAJ,CAAakB,IAAb,EAAmB;AAACL,MAAAA,OAAD;AAAUO,MAAAA,MAAV;AAAkBC,MAAAA,UAAlB;AAA8BhB,MAAAA;AAA9B,KAAnB,CAAP;AACD,GAnCD,CAmCE,OAAOwB,KAAP,EAAc;AAEd,WAAO,IAAI7B,QAAJ,CAAa,IAAb,EAAmB;AAACoB,MAAAA,MAAM,EAAE,GAAT;AAAcC,MAAAA,UAAU,EAAES,MAAM,CAACD,KAAD,CAAhC;AAAyCxB,MAAAA;AAAzC,KAAnB,CAAP;AACD;AACF;;AAOD,SAASsB,mBAAT,CAA6BZ,WAA7B,EAAkDgB,QAAlD,EAA4E;AAC1E,MAAIA,QAAQ,CAACzB,UAAT,CAAoB,MAApB,CAAJ,EAAiC;AAC/B,WAAOyB,QAAP;AACD;;AAED,QAAM1B,GAAG,GAAG,IAAI2B,GAAJ,CAAQjB,WAAR,CAAZ;AACAV,EAAAA,GAAG,CAAC4B,QAAJ,GAAeF,QAAf;AAEA,SAAO1B,GAAG,CAAC6B,IAAX;AACD;;AAKD,SAASZ,SAAT,CAAmBa,YAAnB,EAAiC;AAC/B,MAAIA,YAAY,CAACC,UAAjB,EAA6B;AAC3B,WAAO;AAAChB,MAAAA,MAAM,EAAEe,YAAY,CAACC,UAAtB;AAAkCf,MAAAA,UAAU,EAAEc,YAAY,CAACE,aAAb,IAA8B;AAA5E,KAAP;AACD;;AACD,SAAO;AAACjB,IAAAA,MAAM,EAAE,GAAT;AAAcC,IAAAA,UAAU,EAAE;AAA1B,GAAP;AACD;;AAED,SAASF,UAAT,CAAoBd,GAApB,EAAyB8B,YAAzB,EAAuCG,iBAAiB,GAAG,EAA3D,EAA+D;AAC7D,QAAMzB,OAAO,GAAG,EAAhB;;AAEA,MAAIsB,YAAY,IAAIA,YAAY,CAACtB,OAAjC,EAA0C;AACxC,UAAM0B,WAAW,GAAGJ,YAAY,CAACtB,OAAjC;;AACA,SAAK,MAAM2B,GAAX,IAAkBD,WAAlB,EAA+B;AAC7B,YAAME,MAAM,GAAGF,WAAW,CAACC,GAAD,CAA1B;AACA3B,MAAAA,OAAO,CAAC2B,GAAG,CAACE,WAAJ,EAAD,CAAP,GAA6BZ,MAAM,CAACW,MAAD,CAAnC;AACD;AACF;;AAGD,MAAI,CAAC5B,OAAO,CAAC,gBAAD,CAAZ,EAAgC;AAC9B,UAAM8B,aAAa,GAAGC,gBAAgB,CAACvC,GAAD,CAAtC;;AACA,QAAIwC,MAAM,CAACC,QAAP,CAAgBH,aAAhB,CAAJ,EAAoC;AAClC9B,MAAAA,OAAO,CAAC,gBAAD,CAAP,GAA4B8B,aAA5B;AACD;AACF;;AAEDI,EAAAA,MAAM,CAACC,MAAP,CAAcnC,OAAd,EAAuByB,iBAAvB;AAEA,SAAO,IAAIrC,OAAJ,CAAYY,OAAZ,CAAP;AACD;;AAED,SAAS+B,gBAAT,CAA0BvC,GAA1B,EAA+B;AAC7B,MAAIE,YAAY,CAACF,GAAD,CAAhB,EAAuB;AAErB,WAAO,IAAP;AACD,GAHD,MAGO,IAAID,SAAS,CAACC,GAAD,CAAb,EAAoB;AAEzB,WAAOA,GAAG,CAAC4C,MAAJ,GAAa,QAAQA,MAA5B;AACD;;AAGD,MAAI;AAEF,UAAMC,UAAU,GAAG7C,GAAG,CAAC8C,KAAJ,CAAU,GAAV,EAAe,CAAf,CAAnB;AACA,UAAMC,KAAK,GAAGrD,EAAE,CAACsD,QAAH,CAAYH,UAAZ,CAAd;AACA,WAAOE,KAAK,CAACE,IAAb;AACD,GALD,CAKE,OAAOzB,KAAP,EAAc,CAEf;;AAED,SAAO,IAAP;AACD","sourcesContent":["import fs from 'fs'; // `fs` will be empty object in browsers (see package.json \"browser\" field).\nimport Response from './response.node';\nimport Headers from './headers.node';\n\nimport {decodeDataUri} from './utils/decode-data-uri.node';\nimport {createReadStream} from './utils/stream-utils.node';\n\nconst isDataURL = (url) => url.startsWith('data:');\nconst isRequestURL = (url) => url.startsWith('http:') || url.startsWith('https:');\n\n/**\n * Emulation of Browser fetch for Node.js\n * @param url\n * @param options\n */\nexport default async function fetchNode(url, options) {\n try {\n // Handle data urls in node, to match `fetch``\n // Note - this loses the MIME type, data URIs are handled directly in fetch\n if (isDataURL(url)) {\n const {arrayBuffer, mimeType} = decodeDataUri(url);\n const response = new Response(arrayBuffer, {\n headers: {'content-type': mimeType},\n url\n });\n return response;\n }\n\n // Automatically decompress gzipped files with .gz extension\n const syntheticResponseHeaders = {};\n const originalUrl = url;\n if (url.endsWith('.gz')) {\n url = url.slice(0, -3);\n syntheticResponseHeaders['content-encoding'] = 'gzip';\n }\n\n // Need to create the stream in advance since Response constructor needs to be sync\n const body = await createReadStream(originalUrl, options);\n const headers = getHeaders(url, body, syntheticResponseHeaders);\n const {status, statusText} = getStatus(body);\n\n const followRedirect =\n !options || options.followRedirect || options.followRedirect === undefined;\n\n if (status >= 300 && status < 400 && headers.has('location') && followRedirect) {\n const redirectUrl = generateRedirectUrl(url, headers.get('location'));\n\n // Redirect\n return await fetchNode(redirectUrl, options);\n }\n return new Response(body, {headers, status, statusText, url});\n } catch (error) {\n // TODO - what error code to use here?\n return new Response(null, {status: 400, statusText: String(error), url});\n }\n}\n\n/**\n * Generate redirect url from location without origin and protocol.\n * @param originalUrl\n * @param redirectUrl\n */\nfunction generateRedirectUrl(originalUrl: string, location: string): string {\n if (location.startsWith('http')) {\n return location;\n }\n // If url doesn't have origin and protocol just extend current url origin with location.\n const url = new URL(originalUrl);\n url.pathname = location;\n\n return url.href;\n}\n\n// HELPER FUNCTIONS\n// PRIVATE\n\nfunction getStatus(httpResponse) {\n if (httpResponse.statusCode) {\n return {status: httpResponse.statusCode, statusText: httpResponse.statusMessage || 'NA'};\n }\n return {status: 200, statusText: 'OK'};\n}\n\nfunction getHeaders(url, httpResponse, additionalHeaders = {}) {\n const headers = {};\n\n if (httpResponse && httpResponse.headers) {\n const httpHeaders = httpResponse.headers;\n for (const key in httpHeaders) {\n const header = httpHeaders[key];\n headers[key.toLowerCase()] = String(header);\n }\n }\n\n // Fix up content length if we can for best progress experience\n if (!headers['content-length']) {\n const contentLength = getContentLength(url);\n if (Number.isFinite(contentLength)) {\n headers['content-length'] = contentLength;\n }\n }\n\n Object.assign(headers, additionalHeaders);\n\n return new Headers(headers);\n}\n\nfunction getContentLength(url) {\n if (isRequestURL(url)) {\n // Needs to be read from actual headers\n return null;\n } else if (isDataURL(url)) {\n // TODO - remove media type etc\n return url.length - 'data:'.length;\n }\n // File URL\n // TODO - how to handle non-existing file, this presumably just throws\n try {\n // strip query params from URL\n const noqueryUrl = url.split('?')[0];\n const stats = fs.statSync(noqueryUrl);\n return stats.size;\n } catch (error) {\n // ignore for now\n }\n\n return null;\n}\n"],"file":"fetch.node.js"}
|
|
@@ -1,105 +1,125 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
append(name, value) {
|
|
21
|
-
name = normalizeName(name);
|
|
22
|
-
value = normalizeValue(value);
|
|
23
|
-
const oldValue = this.map[name];
|
|
24
|
-
this.map[name] = oldValue ? `${oldValue}, ${value}` : value;
|
|
25
|
-
}
|
|
26
|
-
delete(name) {
|
|
27
|
-
delete this.map[normalizeName(name)];
|
|
1
|
+
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
+
|
|
3
|
+
let _Symbol$iterator;
|
|
4
|
+
|
|
5
|
+
_Symbol$iterator = Symbol.iterator;
|
|
6
|
+
export default class Headers {
|
|
7
|
+
constructor(headers) {
|
|
8
|
+
_defineProperty(this, "map", void 0);
|
|
9
|
+
|
|
10
|
+
this.map = {};
|
|
11
|
+
|
|
12
|
+
if (headers instanceof Headers) {
|
|
13
|
+
headers.forEach((value, name) => this.append(name, value));
|
|
14
|
+
} else if (Array.isArray(headers)) {
|
|
15
|
+
headers.forEach(header => this.append(header[0], header[1]));
|
|
16
|
+
} else if (headers) {
|
|
17
|
+
Object.getOwnPropertyNames(headers).forEach(name => this.append(name, headers[name]));
|
|
28
18
|
}
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
append(name, value) {
|
|
22
|
+
name = normalizeName(name);
|
|
23
|
+
value = normalizeValue(value);
|
|
24
|
+
const oldValue = this.map[name];
|
|
25
|
+
this.map[name] = oldValue ? "".concat(oldValue, ", ").concat(value) : value;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
delete(name) {
|
|
29
|
+
delete this.map[normalizeName(name)];
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
get(name) {
|
|
33
|
+
name = normalizeName(name);
|
|
34
|
+
return this.has(name) ? this.map[name] : null;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
has(name) {
|
|
38
|
+
return this.map.hasOwnProperty(normalizeName(name));
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
set(name, value) {
|
|
42
|
+
this.map[normalizeName(name)] = normalizeValue(value);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
forEach(visitor, thisArg = null) {
|
|
46
|
+
for (const name in this.map) {
|
|
47
|
+
if (this.map.hasOwnProperty(name)) {
|
|
48
|
+
if (thisArg) {
|
|
49
|
+
visitor.call(thisArg, this.map[name], name, this);
|
|
50
|
+
} else {
|
|
51
|
+
visitor(this.map[name], name, this);
|
|
49
52
|
}
|
|
53
|
+
}
|
|
50
54
|
}
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
keys() {
|
|
58
|
+
const items = [];
|
|
59
|
+
this.forEach(function (value, name) {
|
|
60
|
+
items.push(name);
|
|
61
|
+
});
|
|
62
|
+
return iteratorFor(items);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
values() {
|
|
66
|
+
const items = [];
|
|
67
|
+
this.forEach(function (value) {
|
|
68
|
+
items.push(value);
|
|
69
|
+
});
|
|
70
|
+
return iteratorFor(items);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
entries() {
|
|
74
|
+
const items = [];
|
|
75
|
+
this.forEach(function (value, name) {
|
|
76
|
+
items.push([name, value]);
|
|
77
|
+
});
|
|
78
|
+
return iteratorFor(items);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
*[_Symbol$iterator]() {
|
|
82
|
+
yield* this.entries();
|
|
83
|
+
}
|
|
84
|
+
|
|
76
85
|
}
|
|
77
|
-
|
|
86
|
+
|
|
78
87
|
function normalizeName(name) {
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
88
|
+
if (typeof name !== 'string') {
|
|
89
|
+
name = String(name);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (/[^a-z0-9\-#$%&'*+.^_`|~]/i.test(name) || name === '') {
|
|
93
|
+
throw new TypeError('Invalid character in header field name');
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
return name.toLowerCase();
|
|
86
97
|
}
|
|
98
|
+
|
|
87
99
|
function normalizeValue(value) {
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
100
|
+
if (typeof value !== 'string') {
|
|
101
|
+
value = String(value);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return value;
|
|
92
105
|
}
|
|
93
|
-
|
|
106
|
+
|
|
94
107
|
function iteratorFor(items) {
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
108
|
+
const iterator = {
|
|
109
|
+
next() {
|
|
110
|
+
const value = items.shift();
|
|
111
|
+
return {
|
|
112
|
+
done: value === undefined,
|
|
113
|
+
value
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
iterator[Symbol.iterator] = function () {
|
|
104
120
|
return iterator;
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
return iterator;
|
|
105
124
|
}
|
|
125
|
+
//# sourceMappingURL=headers.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/node/fetch/headers.node.ts"],"names":["Symbol","iterator","Headers","constructor","headers","map","forEach","value","name","append","Array","isArray","header","Object","getOwnPropertyNames","normalizeName","normalizeValue","oldValue","delete","get","has","hasOwnProperty","set","visitor","thisArg","call","keys","items","push","iteratorFor","values","entries","String","test","TypeError","toLowerCase","next","shift","done","undefined"],"mappings":";;;;mBA+EIA,MAAM,CAACC,Q;AA3EX,eAAe,MAAMC,OAAN,CAAc;AAG3BC,EAAAA,WAAW,CAACC,OAAD,EAAU;AAAA;;AACnB,SAAKC,GAAL,GAAW,EAAX;;AAEA,QAAID,OAAO,YAAYF,OAAvB,EAAgC;AAC9BE,MAAAA,OAAO,CAACE,OAAR,CAAgB,CAACC,KAAD,EAAQC,IAAR,KAAiB,KAAKC,MAAL,CAAYD,IAAZ,EAAkBD,KAAlB,CAAjC;AACD,KAFD,MAEO,IAAIG,KAAK,CAACC,OAAN,CAAcP,OAAd,CAAJ,EAA4B;AACjCA,MAAAA,OAAO,CAACE,OAAR,CAAiBM,MAAD,IAAY,KAAKH,MAAL,CAAYG,MAAM,CAAC,CAAD,CAAlB,EAAuBA,MAAM,CAAC,CAAD,CAA7B,CAA5B;AACD,KAFM,MAEA,IAAIR,OAAJ,EAAa;AAClBS,MAAAA,MAAM,CAACC,mBAAP,CAA2BV,OAA3B,EAAoCE,OAApC,CAA6CE,IAAD,IAAU,KAAKC,MAAL,CAAYD,IAAZ,EAAkBJ,OAAO,CAACI,IAAD,CAAzB,CAAtD;AACD;AACF;;AAEDC,EAAAA,MAAM,CAACD,IAAD,EAAOD,KAAP,EAAc;AAClBC,IAAAA,IAAI,GAAGO,aAAa,CAACP,IAAD,CAApB;AACAD,IAAAA,KAAK,GAAGS,cAAc,CAACT,KAAD,CAAtB;AACA,UAAMU,QAAQ,GAAG,KAAKZ,GAAL,CAASG,IAAT,CAAjB;AACA,SAAKH,GAAL,CAASG,IAAT,IAAiBS,QAAQ,aAAMA,QAAN,eAAmBV,KAAnB,IAA6BA,KAAtD;AACD;;AAEDW,EAAAA,MAAM,CAACV,IAAD,EAAO;AACX,WAAO,KAAKH,GAAL,CAASU,aAAa,CAACP,IAAD,CAAtB,CAAP;AACD;;AAEDW,EAAAA,GAAG,CAACX,IAAD,EAAO;AACRA,IAAAA,IAAI,GAAGO,aAAa,CAACP,IAAD,CAApB;AACA,WAAO,KAAKY,GAAL,CAASZ,IAAT,IAAiB,KAAKH,GAAL,CAASG,IAAT,CAAjB,GAAkC,IAAzC;AACD;;AAEDY,EAAAA,GAAG,CAACZ,IAAD,EAAO;AACR,WAAO,KAAKH,GAAL,CAASgB,cAAT,CAAwBN,aAAa,CAACP,IAAD,CAArC,CAAP;AACD;;AAEDc,EAAAA,GAAG,CAACd,IAAD,EAAOD,KAAP,EAAc;AACf,SAAKF,GAAL,CAASU,aAAa,CAACP,IAAD,CAAtB,IAAgCQ,cAAc,CAACT,KAAD,CAA9C;AACD;;AAEDD,EAAAA,OAAO,CAACiB,OAAD,EAAUC,OAAO,GAAG,IAApB,EAA0B;AAC/B,SAAK,MAAMhB,IAAX,IAAmB,KAAKH,GAAxB,EAA6B;AAC3B,UAAI,KAAKA,GAAL,CAASgB,cAAT,CAAwBb,IAAxB,CAAJ,EAAmC;AACjC,YAAIgB,OAAJ,EAAa;AACXD,UAAAA,OAAO,CAACE,IAAR,CAAaD,OAAb,EAAsB,KAAKnB,GAAL,CAASG,IAAT,CAAtB,EAAsCA,IAAtC,EAA4C,IAA5C;AACD,SAFD,MAEO;AACLe,UAAAA,OAAO,CAAC,KAAKlB,GAAL,CAASG,IAAT,CAAD,EAAiBA,IAAjB,EAAuB,IAAvB,CAAP;AACD;AACF;AACF;AACF;;AAEDkB,EAAAA,IAAI,GAAG;AACL,UAAMC,KAAY,GAAG,EAArB;AACA,SAAKrB,OAAL,CAAa,UAAUC,KAAV,EAAiBC,IAAjB,EAAuB;AAClCmB,MAAAA,KAAK,CAACC,IAAN,CAAWpB,IAAX;AACD,KAFD;AAGA,WAAOqB,WAAW,CAACF,KAAD,CAAlB;AACD;;AAEDG,EAAAA,MAAM,GAAG;AACP,UAAMH,KAAY,GAAG,EAArB;AACA,SAAKrB,OAAL,CAAa,UAAUC,KAAV,EAAiB;AAC5BoB,MAAAA,KAAK,CAACC,IAAN,CAAWrB,KAAX;AACD,KAFD;AAGA,WAAOsB,WAAW,CAACF,KAAD,CAAlB;AACD;;AAEDI,EAAAA,OAAO,GAAG;AACR,UAAMJ,KAAY,GAAG,EAArB;AACA,SAAKrB,OAAL,CAAa,UAAUC,KAAV,EAAiBC,IAAjB,EAAuB;AAClCmB,MAAAA,KAAK,CAACC,IAAN,CAAW,CAACpB,IAAD,EAAOD,KAAP,CAAX;AACD,KAFD;AAGA,WAAOsB,WAAW,CAACF,KAAD,CAAlB;AACD;;AAED,wBAAqB;AAEnB,WAAO,KAAKI,OAAL,EAAP;AACD;;AA9E0B;;AAiF7B,SAAShB,aAAT,CAAuBP,IAAvB,EAA6B;AAC3B,MAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAC5BA,IAAAA,IAAI,GAAGwB,MAAM,CAACxB,IAAD,CAAb;AACD;;AACD,MAAI,4BAA4ByB,IAA5B,CAAiCzB,IAAjC,KAA0CA,IAAI,KAAK,EAAvD,EAA2D;AACzD,UAAM,IAAI0B,SAAJ,CAAc,wCAAd,CAAN;AACD;;AACD,SAAO1B,IAAI,CAAC2B,WAAL,EAAP;AACD;;AAED,SAASnB,cAAT,CAAwBT,KAAxB,EAA+B;AAC7B,MAAI,OAAOA,KAAP,KAAiB,QAArB,EAA+B;AAC7BA,IAAAA,KAAK,GAAGyB,MAAM,CAACzB,KAAD,CAAd;AACD;;AACD,SAAOA,KAAP;AACD;;AAGD,SAASsB,WAAT,CAAqBF,KAArB,EAA4B;AAC1B,QAAM1B,QAAQ,GAAG;AACfmC,IAAAA,IAAI,GAAG;AACL,YAAM7B,KAAK,GAAGoB,KAAK,CAACU,KAAN,EAAd;AACA,aAAO;AAACC,QAAAA,IAAI,EAAE/B,KAAK,KAAKgC,SAAjB;AAA4BhC,QAAAA;AAA5B,OAAP;AACD;;AAJc,GAAjB;;AAOAN,EAAAA,QAAQ,CAACD,MAAM,CAACC,QAAR,CAAR,GAA4B,YAAY;AACtC,WAAOA,QAAP;AACD,GAFD;;AAIA,SAAOA,QAAP;AACD","sourcesContent":["/**\n * Polyfill for Browser Headers\n * Based on https://github.com/github/fetch under MIT license\n */\nexport default class Headers {\n map: {};\n\n constructor(headers) {\n this.map = {};\n\n if (headers instanceof Headers) {\n headers.forEach((value, name) => this.append(name, value));\n } else if (Array.isArray(headers)) {\n headers.forEach((header) => this.append(header[0], header[1]));\n } else if (headers) {\n Object.getOwnPropertyNames(headers).forEach((name) => this.append(name, headers[name]));\n }\n }\n\n append(name, value) {\n name = normalizeName(name);\n value = normalizeValue(value);\n const oldValue = this.map[name];\n this.map[name] = oldValue ? `${oldValue}, ${value}` : value;\n }\n\n delete(name) {\n delete this.map[normalizeName(name)];\n }\n\n get(name) {\n name = normalizeName(name);\n return this.has(name) ? this.map[name] : null;\n }\n\n has(name) {\n return this.map.hasOwnProperty(normalizeName(name));\n }\n\n set(name, value) {\n this.map[normalizeName(name)] = normalizeValue(value);\n }\n\n forEach(visitor, thisArg = null) {\n for (const name in this.map) {\n if (this.map.hasOwnProperty(name)) {\n if (thisArg) {\n visitor.call(thisArg, this.map[name], name, this);\n } else {\n visitor(this.map[name], name, this);\n }\n }\n }\n }\n\n keys() {\n const items: any[] = [];\n this.forEach(function (value, name) {\n items.push(name);\n });\n return iteratorFor(items);\n }\n\n values() {\n const items: any[] = [];\n this.forEach(function (value) {\n items.push(value);\n });\n return iteratorFor(items);\n }\n\n entries() {\n const items: any[] = [];\n this.forEach(function (value, name) {\n items.push([name, value]);\n });\n return iteratorFor(items);\n }\n\n *[Symbol.iterator]() {\n // @ts-ignore must have a '[Symbol.iterator]()' method that returns an iterator.\n yield* this.entries();\n }\n}\n\nfunction normalizeName(name) {\n if (typeof name !== 'string') {\n name = String(name);\n }\n if (/[^a-z0-9\\-#$%&'*+.^_`|~]/i.test(name) || name === '') {\n throw new TypeError('Invalid character in header field name');\n }\n return name.toLowerCase();\n}\n\nfunction normalizeValue(value) {\n if (typeof value !== 'string') {\n value = String(value);\n }\n return value;\n}\n\n// Build a destructive iterator for the value list\nfunction iteratorFor(items) {\n const iterator = {\n next() {\n const value = items.shift();\n return {done: value === undefined, value};\n }\n };\n\n iterator[Symbol.iterator] = function () {\n return iterator;\n };\n\n return iterator;\n}\n"],"file":"headers.node.js"}
|