@loaders.gl/polyfills 3.3.0-alpha.13 → 3.3.0-alpha.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/dist.min.js +64 -15
  2. package/dist/es5/index.js +17 -11
  3. package/dist/es5/index.js.map +1 -1
  4. package/dist/es5/node/fetch/fetch-file.node.js +88 -0
  5. package/dist/es5/node/fetch/fetch-file.node.js.map +1 -0
  6. package/dist/es5/node/fetch/fetch.node.js +90 -35
  7. package/dist/es5/node/fetch/fetch.node.js.map +1 -1
  8. package/dist/es5/node/fetch/headers.node.js +2 -2
  9. package/dist/es5/node/fetch/headers.node.js.map +1 -1
  10. package/dist/es5/node/fetch/response.node.js +5 -4
  11. package/dist/es5/node/fetch/response.node.js.map +1 -1
  12. package/dist/es5/node/fetch/utils/stream-utils.node.js +9 -93
  13. package/dist/es5/node/fetch/utils/stream-utils.node.js.map +1 -1
  14. package/dist/esm/index.js +5 -4
  15. package/dist/esm/index.js.map +1 -1
  16. package/dist/esm/node/fetch/fetch-file.node.js +54 -0
  17. package/dist/esm/node/fetch/fetch-file.node.js.map +1 -0
  18. package/dist/esm/node/fetch/fetch.node.js +43 -18
  19. package/dist/esm/node/fetch/fetch.node.js.map +1 -1
  20. package/dist/esm/node/fetch/headers.node.js +1 -1
  21. package/dist/esm/node/fetch/headers.node.js.map +1 -1
  22. package/dist/esm/node/fetch/response.node.js +3 -2
  23. package/dist/esm/node/fetch/response.node.js.map +1 -1
  24. package/dist/esm/node/fetch/utils/stream-utils.node.js +3 -46
  25. package/dist/esm/node/fetch/utils/stream-utils.node.js.map +1 -1
  26. package/dist/index.d.ts +2 -1
  27. package/dist/index.d.ts.map +1 -1
  28. package/dist/index.js +13 -14
  29. package/dist/node/fetch/fetch-file.node.d.ts +4 -0
  30. package/dist/node/fetch/fetch-file.node.d.ts.map +1 -0
  31. package/dist/node/fetch/fetch-file.node.js +51 -0
  32. package/dist/node/fetch/fetch.node.d.ts +6 -1
  33. package/dist/node/fetch/fetch.node.d.ts.map +1 -1
  34. package/dist/node/fetch/fetch.node.js +57 -31
  35. package/dist/node/fetch/headers.node.d.ts +1 -1
  36. package/dist/node/fetch/headers.node.d.ts.map +1 -1
  37. package/dist/node/fetch/headers.node.js +2 -1
  38. package/dist/node/fetch/response.node.d.ts +2 -2
  39. package/dist/node/fetch/response.node.d.ts.map +1 -1
  40. package/dist/node/fetch/response.node.js +5 -6
  41. package/dist/node/fetch/utils/stream-utils.node.d.ts +8 -1
  42. package/dist/node/fetch/utils/stream-utils.node.d.ts.map +1 -1
  43. package/dist/node/fetch/utils/stream-utils.node.js +10 -54
  44. package/package.json +2 -2
  45. package/src/index.ts +5 -4
  46. package/src/node/fetch/fetch-file.node.ts +51 -0
  47. package/src/node/fetch/fetch.node.ts +64 -30
  48. package/src/node/fetch/headers.node.ts +1 -1
  49. package/src/node/fetch/response.node.ts +4 -2
  50. package/src/node/fetch/utils/stream-utils.node.ts +10 -59
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@loaders.gl/polyfills",
3
- "version": "3.3.0-alpha.13",
3
+ "version": "3.3.0-alpha.14",
4
4
  "description": "Polyfills for TextEncoder/TextDecoder",
5
5
  "license": "MIT",
6
6
  "publishConfig": {
@@ -106,5 +106,5 @@
106
106
  "through": "^2.3.8",
107
107
  "web-streams-polyfill": "^3.0.0"
108
108
  },
109
- "gitHead": "429a5667c9903cc2de4d1f3e55e1c2f2e58a3ff6"
109
+ "gitHead": "cc91201ca3c0581a5c9edf7a8bc0fc230212bf3d"
110
110
  }
package/src/index.ts CHANGED
@@ -8,9 +8,9 @@ import {allSettled} from './promise/all-settled';
8
8
  // Node specific
9
9
  import * as base64 from './node/buffer/btoa.node';
10
10
 
11
- import HeadersNode from './node/fetch/headers.node';
12
- import ResponseNode from './node/fetch/response.node';
13
- import fetchNode from './node/fetch/fetch.node';
11
+ import {Headers as HeadersNode} from './node/fetch/headers.node';
12
+ import {Response as ResponseNode} from './node/fetch/response.node';
13
+ import {fetchNode as fetchNode} from './node/fetch/fetch.node';
14
14
 
15
15
  import {encodeImageNode} from './node/images/encode-image.node';
16
16
  import {parseImageNode} from './node/images/parse-image.node';
@@ -20,7 +20,8 @@ export {BlobPolyfill} from './node/file/blob';
20
20
  export {FileReaderPolyfill} from './node/file/file-reader';
21
21
  export {FilePolyfill} from './node/file/file';
22
22
  export {installFilePolyfills} from './node/file/install-file-polyfills';
23
- export {default as _fetchNode} from './node/fetch/fetch.node';
23
+ export {fetchNode as _fetchNode} from './node/fetch/fetch.node';
24
+ export {fetchFileNode as _fetchFileNode} from './node/fetch/fetch-file.node';
24
25
 
25
26
  // POLYFILLS: TextEncoder, TextDecoder
26
27
  // - Recent Node versions have these classes but virtually no encodings unless special build.
@@ -0,0 +1,51 @@
1
+ // loaders.gl, MIT license
2
+
3
+ import fs from 'fs'; // `fs` will be empty object in browsers (see package.json "browser" field).
4
+ import {Response} from './response.node';
5
+ import {Headers} from './headers.node';
6
+
7
+ export function isRequestURL(url: string): boolean {
8
+ return url.startsWith('http:') || url.startsWith('https:');
9
+ }
10
+
11
+ export async function fetchFileNode(url: string, options): Promise<Response> {
12
+ const noqueryUrl = url.split('?')[0];
13
+
14
+ try {
15
+ // Now open the stream
16
+ const body = await new Promise((resolve, reject) => {
17
+ // @ts-ignore
18
+ const stream = fs.createReadStream(noqueryUrl, {encoding: null});
19
+ stream.once('readable', () => resolve(stream));
20
+ stream.on('error', (error) => reject(error));
21
+ });
22
+
23
+ const status = 200;
24
+ const statusText = 'OK';
25
+ const headers = getHeadersForFile(noqueryUrl);
26
+ return new Response(body, {headers, status, statusText, url});
27
+ } catch (error) {
28
+ const status = 400;
29
+ const statusText = (error as Error).message;
30
+ const headers = {};
31
+ return new Response((error as Error).message, {headers, status, statusText, url});
32
+ }
33
+ }
34
+
35
+ function getHeadersForFile(noqueryUrl: string): Headers {
36
+ const headers = {};
37
+
38
+ // Fix up content length if we can for best progress experience
39
+ if (!headers['content-length']) {
40
+ const stats = fs.statSync(noqueryUrl);
41
+ headers['content-length'] = stats.size;
42
+ }
43
+
44
+ // Automatically decompress gzipped files with .gz extension
45
+ if (noqueryUrl.endsWith('.gz')) {
46
+ noqueryUrl = noqueryUrl.slice(0, -3);
47
+ headers['content-encoding'] = 'gzip';
48
+ }
49
+
50
+ return new Headers(headers);
51
+ }
@@ -1,20 +1,29 @@
1
- import fs from 'fs'; // `fs` will be empty object in browsers (see package.json "browser" field).
2
- import Response from './response.node';
3
- import Headers from './headers.node';
1
+ // loaders.gl, MIT license
4
2
 
3
+ import http from 'http';
4
+ import https from 'https';
5
+ import {Response} from './response.node';
6
+ import {Headers} from './headers.node';
5
7
  import {decodeDataUri} from './utils/decode-data-uri.node';
6
- import {createReadStream} from './utils/stream-utils.node';
7
8
 
8
- const isDataURL = (url) => url.startsWith('data:');
9
- const isRequestURL = (url) => url.startsWith('http:') || url.startsWith('https:');
9
+ import {fetchFileNode} from './fetch-file.node';
10
+
11
+ const isDataURL = (url: string): boolean => url.startsWith('data:');
12
+ const isRequestURL = (url: string): boolean => url.startsWith('http:') || url.startsWith('https:');
10
13
 
11
14
  /**
12
15
  * Emulation of Browser fetch for Node.js
13
16
  * @param url
14
17
  * @param options
15
18
  */
16
- export default async function fetchNode(url, options) {
19
+ // eslint-disable-next-line complexity
20
+ export async function fetchNode(url: string, options): Promise<Response> {
17
21
  try {
22
+ // Handle file streams in node
23
+ if (!isRequestURL(url) && !isDataURL(url)) {
24
+ return await fetchFileNode(url, options);
25
+ }
26
+
18
27
  // Handle data urls in node, to match `fetch``
19
28
  // Note - this loses the MIME type, data URIs are handled directly in fetch
20
29
  if (isDataURL(url)) {
@@ -35,7 +44,7 @@ export default async function fetchNode(url, options) {
35
44
  }
36
45
 
37
46
  // Need to create the stream in advance since Response constructor needs to be sync
38
- const body = await createReadStream(originalUrl, options);
47
+ const body = await createHTTPRequestReadStream(originalUrl, options);
39
48
  const headers = getHeaders(url, body, syntheticResponseHeaders);
40
49
  const {status, statusText} = getStatus(body);
41
50
 
@@ -55,6 +64,23 @@ export default async function fetchNode(url, options) {
55
64
  }
56
65
  }
57
66
 
67
+ /** Returns a promise that resolves to a readable stream */
68
+ export async function createHTTPRequestReadStream(
69
+ url: string,
70
+ options
71
+ ): Promise<http.IncomingMessage> {
72
+ // HANDLE HTTP/HTTPS REQUESTS IN NODE
73
+ // TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
74
+ return await new Promise((resolve, reject) => {
75
+ const requestOptions = getRequestOptions(url, options);
76
+ const req = url.startsWith('https:')
77
+ ? https.request(requestOptions, (res) => resolve(res))
78
+ : http.request(requestOptions, (res) => resolve(res));
79
+ req.on('error', (error) => reject(error));
80
+ req.end();
81
+ });
82
+ }
83
+
58
84
  /**
59
85
  * Generate redirect url from location without origin and protocol.
60
86
  * @param originalUrl
@@ -72,9 +98,33 @@ function generateRedirectUrl(originalUrl: string, location: string): string {
72
98
  }
73
99
 
74
100
  // HELPER FUNCTIONS
75
- // PRIVATE
76
101
 
77
- function getStatus(httpResponse) {
102
+ function getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {
103
+ // Ensure header keys are lower case so that we can merge without duplicates
104
+ const originalHeaders = options?.headers || {};
105
+ const headers = {};
106
+ for (const key of Object.keys(originalHeaders)) {
107
+ headers[key.toLowerCase()] = originalHeaders[key];
108
+ }
109
+
110
+ // Add default accept-encoding to headers
111
+ headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
112
+
113
+ const urlObject = new URL(url);
114
+ return {
115
+ hostname: urlObject.hostname,
116
+ path: urlObject.pathname,
117
+ method: 'GET',
118
+ // Add options and user provided 'options.fetch' overrides if available
119
+ ...options,
120
+ ...options?.fetch,
121
+ // Override with updated headers with accepted encodings:
122
+ headers,
123
+ port: urlObject.port
124
+ };
125
+ }
126
+
127
+ function getStatus(httpResponse: http.IncomingMessage): {status: number; statusText: string} {
78
128
  if (httpResponse.statusCode) {
79
129
  return {status: httpResponse.statusCode, statusText: httpResponse.statusMessage || 'NA'};
80
130
  }
@@ -105,24 +155,8 @@ function getHeaders(url, httpResponse, additionalHeaders = {}) {
105
155
  return new Headers(headers);
106
156
  }
107
157
 
108
- function getContentLength(url) {
109
- if (isRequestURL(url)) {
110
- // Needs to be read from actual headers
111
- return null;
112
- } else if (isDataURL(url)) {
113
- // TODO - remove media type etc
114
- return url.length - 'data:'.length;
115
- }
116
- // File URL
117
- // TODO - how to handle non-existing file, this presumably just throws
118
- try {
119
- // strip query params from URL
120
- const noqueryUrl = url.split('?')[0];
121
- const stats = fs.statSync(noqueryUrl);
122
- return stats.size;
123
- } catch (error) {
124
- // ignore for now
125
- }
126
-
127
- return null;
158
+ /** Needs to be read from actual headers */
159
+ function getContentLength(url: string): number | null {
160
+ // TODO - remove media type etc
161
+ return isDataURL(url) ? url.length - 'data:'.length : null;
128
162
  }
@@ -2,7 +2,7 @@
2
2
  * Polyfill for Browser Headers
3
3
  * Based on https://github.com/github/fetch under MIT license
4
4
  */
5
- export default class Headers {
5
+ export class Headers {
6
6
  map: {};
7
7
 
8
8
  constructor(headers) {
@@ -1,6 +1,8 @@
1
+ // loaders.gl, MIT license
2
+
1
3
  import {assert} from '../../utils/assert';
2
4
  import {decompressReadStream, concatenateReadStream} from './utils/stream-utils.node';
3
- import Headers from './headers.node';
5
+ import {Headers} from './headers.node';
4
6
 
5
7
  const isBoolean = (x) => typeof x === 'boolean';
6
8
  const isFunction = (x) => typeof x === 'function';
@@ -20,7 +22,7 @@ const isReadableNodeStream = (x) =>
20
22
  */
21
23
  import {Readable} from 'stream';
22
24
 
23
- export default class Response {
25
+ export class Response {
24
26
  readonly ok: boolean;
25
27
  readonly status: number;
26
28
  readonly statusText: string;
@@ -1,39 +1,12 @@
1
- import fs from 'fs'; // `fs` will be empty object in browsers (see package.json "browser" field).
2
- import http from 'http';
3
- import https from 'https';
1
+ // loaders.gl, MIT license
2
+
4
3
  import zlib from 'zlib';
5
4
 
6
5
  import {toArrayBuffer} from './decode-data-uri.node';
7
6
 
8
- function isRequestURL(url: string): boolean {
9
- return url.startsWith('http:') || url.startsWith('https:');
10
- }
11
-
12
- // Returns a promise that resolves to a readable stream
13
- export async function createReadStream(url, options): Promise<any> {
14
- // Handle file streams in node
15
- if (!isRequestURL(url)) {
16
- const noqueryUrl = url.split('?')[0];
17
- // Now open the stream
18
- return await new Promise((resolve, reject) => {
19
- // @ts-ignore
20
- const stream = fs.createReadStream(noqueryUrl, {encoding: null});
21
- stream.once('readable', () => resolve(stream));
22
- stream.on('error', (error) => reject(error));
23
- });
24
- }
25
-
26
- // HANDLE HTTP/HTTPS REQUESTS IN NODE
27
- // TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM
28
- return await new Promise((resolve, reject) => {
29
- const requestFunction = url.startsWith('https:') ? https.request : http.request;
30
- const requestOptions = getRequestOptions(url, options);
31
- const req = requestFunction(requestOptions, (res) => resolve(res));
32
- req.on('error', (error) => reject(error));
33
- req.end();
34
- });
35
- }
36
-
7
+ /**
8
+ *
9
+ */
37
10
  export function decompressReadStream(readStream, headers) {
38
11
  switch (headers.get('content-encoding')) {
39
12
  case 'br':
@@ -48,6 +21,11 @@ export function decompressReadStream(readStream, headers) {
48
21
  }
49
22
  }
50
23
 
24
+ /**
25
+ *
26
+ * @param readStream
27
+ * @returns
28
+ */
51
29
  export async function concatenateReadStream(readStream): Promise<ArrayBuffer> {
52
30
  const arrayBufferChunks: ArrayBuffer[] = [];
53
31
 
@@ -72,33 +50,6 @@ export async function concatenateReadStream(readStream): Promise<ArrayBuffer> {
72
50
  });
73
51
  }
74
52
 
75
- // HELPERS
76
-
77
- function getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {
78
- // Ensure header keys are lower case so that we can merge without duplicates
79
- const originalHeaders = options?.headers || {};
80
- const headers = {};
81
- for (const key of Object.keys(originalHeaders)) {
82
- headers[key.toLowerCase()] = originalHeaders[key];
83
- }
84
-
85
- // Add default accept-encoding to headers
86
- headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';
87
-
88
- const urlObject = new URL(url);
89
- return {
90
- hostname: urlObject.hostname,
91
- path: urlObject.pathname,
92
- method: 'GET',
93
- // Add options and user provided 'options.fetch' overrides if available
94
- ...options,
95
- ...options?.fetch,
96
- // Override with updated headers with accepted encodings:
97
- headers,
98
- port: urlObject.port
99
- };
100
- }
101
-
102
53
  /**
103
54
  * Concatenate a sequence of ArrayBuffers
104
55
  * @return A concatenated ArrayBuffer