@loaders.gl/polyfills 3.1.0-beta.6 → 3.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/dist/bundle.js +5 -3994
  2. package/dist/dist.min.js +3994 -0
  3. package/dist/es5/bundle.js +1 -1
  4. package/dist/es5/bundle.js.map +1 -1
  5. package/dist/es5/index.js +11 -9
  6. package/dist/es5/index.js.map +1 -1
  7. package/dist/es5/lib/encoding.js +20 -14
  8. package/dist/es5/lib/encoding.js.map +1 -1
  9. package/dist/es5/node/buffer/to-array-buffer.node.js +1 -1
  10. package/dist/es5/node/buffer/to-array-buffer.node.js.map +1 -1
  11. package/dist/es5/node/fetch/fetch.node.js +103 -61
  12. package/dist/es5/node/fetch/fetch.node.js.map +1 -1
  13. package/dist/es5/node/fetch/headers.node.js +112 -73
  14. package/dist/es5/node/fetch/headers.node.js.map +1 -1
  15. package/dist/es5/node/fetch/response.node.js +182 -47
  16. package/dist/es5/node/fetch/response.node.js.map +1 -1
  17. package/dist/es5/node/fetch/utils/decode-data-uri.node.js +18 -12
  18. package/dist/es5/node/fetch/utils/decode-data-uri.node.js.map +1 -1
  19. package/dist/es5/node/fetch/utils/stream-utils.node.js +156 -50
  20. package/dist/es5/node/fetch/utils/stream-utils.node.js.map +1 -1
  21. package/dist/es5/node/file/blob-stream-controller.js +82 -37
  22. package/dist/es5/node/file/blob-stream-controller.js.map +1 -1
  23. package/dist/es5/node/file/blob-stream.js +67 -12
  24. package/dist/es5/node/file/blob-stream.js.map +1 -1
  25. package/dist/es5/node/file/blob.js +209 -100
  26. package/dist/es5/node/file/blob.js.map +1 -1
  27. package/dist/es5/node/file/file-reader.js +147 -40
  28. package/dist/es5/node/file/file-reader.js.map +1 -1
  29. package/dist/es5/node/file/file.js +41 -13
  30. package/dist/es5/node/file/file.js.map +1 -1
  31. package/dist/es5/node/file/readable-stream.js +26 -1
  32. package/dist/es5/node/file/readable-stream.js.map +1 -1
  33. package/dist/es5/node/images/encode-image.node.js +10 -8
  34. package/dist/es5/node/images/encode-image.node.js.map +1 -1
  35. package/dist/es5/node/images/parse-image.node.js +44 -17
  36. package/dist/es5/node/images/parse-image.node.js.map +1 -1
  37. package/dist/es5/promise/all-settled.js +7 -7
  38. package/dist/es5/promise/all-settled.js.map +1 -1
  39. package/dist/es5/utils/globals.js +8 -3
  40. package/dist/es5/utils/globals.js.map +1 -1
  41. package/dist/esm/node/fetch/utils/decode-data-uri.node.js +1 -3
  42. package/dist/esm/node/fetch/utils/decode-data-uri.node.js.map +1 -1
  43. package/dist/esm/node/fetch/utils/stream-utils.node.js +21 -12
  44. package/dist/esm/node/fetch/utils/stream-utils.node.js.map +1 -1
  45. package/dist/node/fetch/utils/decode-data-uri.node.d.ts +4 -4
  46. package/dist/node/fetch/utils/decode-data-uri.node.d.ts.map +1 -1
  47. package/dist/node/fetch/utils/decode-data-uri.node.js +4 -2
  48. package/dist/node/fetch/utils/stream-utils.node.d.ts +8 -2
  49. package/dist/node/fetch/utils/stream-utils.node.d.ts.map +1 -1
  50. package/dist/node/fetch/utils/stream-utils.node.js +31 -13
  51. package/package.json +3 -3
  52. package/src/node/fetch/utils/decode-data-uri.node.ts +7 -6
  53. package/src/node/fetch/utils/stream-utils.node.ts +38 -15
@@ -3,13 +3,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.concatenateReadStream = exports.decompressReadStream = exports.createReadStream = void 0;
6
+ exports.concatenateArrayBuffers = exports.concatenateReadStream = exports.decompressReadStream = exports.createReadStream = void 0;
7
7
  const fs_1 = __importDefault(require("fs")); // `fs` will be empty object in browsers (see package.json "browser" field).
8
8
  const http_1 = __importDefault(require("http"));
9
9
  const https_1 = __importDefault(require("https"));
10
10
  const zlib_1 = __importDefault(require("zlib"));
11
11
  const decode_data_uri_node_1 = require("./decode-data-uri.node");
12
- const isRequestURL = (url) => url.startsWith('http:') || url.startsWith('https:');
12
+ function isRequestURL(url) {
13
+ return url.startsWith('http:') || url.startsWith('https:');
14
+ }
13
15
  // Returns a promise that resolves to a readable stream
14
16
  async function createReadStream(url, options) {
15
17
  // Handle file streams in node
@@ -49,7 +51,7 @@ function decompressReadStream(readStream, headers) {
49
51
  }
50
52
  exports.decompressReadStream = decompressReadStream;
51
53
  async function concatenateReadStream(readStream) {
52
- let arrayBuffer = new ArrayBuffer(0);
54
+ const arrayBufferChunks = [];
53
55
  return await new Promise((resolve, reject) => {
54
56
  readStream.on('error', (error) => reject(error));
55
57
  // Once the readable callback has been added, stream switches to "flowing mode"
@@ -59,10 +61,12 @@ async function concatenateReadStream(readStream) {
59
61
  if (typeof chunk === 'string') {
60
62
  reject(new Error('Read stream not binary'));
61
63
  }
62
- const chunkAsArrayBuffer = (0, decode_data_uri_node_1.toArrayBuffer)(chunk);
63
- arrayBuffer = concatenateArrayBuffers(arrayBuffer, chunkAsArrayBuffer);
64
+ arrayBufferChunks.push((0, decode_data_uri_node_1.toArrayBuffer)(chunk));
65
+ });
66
+ readStream.on('end', () => {
67
+ const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);
68
+ resolve(arrayBuffer);
64
69
  });
65
- readStream.on('end', () => resolve(arrayBuffer));
66
70
  });
67
71
  }
68
72
  exports.concatenateReadStream = concatenateReadStream;
@@ -88,11 +92,25 @@ function getRequestOptions(url, options) {
88
92
  headers
89
93
  };
90
94
  }
91
- function concatenateArrayBuffers(source1, source2) {
92
- const sourceArray1 = source1 instanceof ArrayBuffer ? new Uint8Array(source1) : source1;
93
- const sourceArray2 = source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2;
94
- const temp = new Uint8Array(sourceArray1.byteLength + sourceArray2.byteLength);
95
- temp.set(sourceArray1, 0);
96
- temp.set(sourceArray2, sourceArray1.byteLength);
97
- return temp.buffer;
95
+ /**
96
+ * Concatenate a sequence of ArrayBuffers
97
+ * @return A concatenated ArrayBuffer
98
+ * @note duplicates loader-utils since polyfills should be independent
99
+ */
100
+ function concatenateArrayBuffers(sources) {
101
+ // Make sure all inputs are wrapped in typed arrays
102
+ const sourceArrays = sources.map((source2) => source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2);
103
+ // Get length of all inputs
104
+ const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
105
+ // Allocate array with space for all inputs
106
+ const result = new Uint8Array(byteLength);
107
+ // Copy the subarrays
108
+ let offset = 0;
109
+ for (const sourceArray of sourceArrays) {
110
+ result.set(sourceArray, offset);
111
+ offset += sourceArray.byteLength;
112
+ }
113
+ // We work with ArrayBuffers, discard the typed array wrapper
114
+ return result.buffer;
98
115
  }
116
+ exports.concatenateArrayBuffers = concatenateArrayBuffers;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@loaders.gl/polyfills",
3
- "version": "3.1.0-beta.6",
3
+ "version": "3.1.2",
4
4
  "description": "Polyfills for TextEncoder/TextDecoder",
5
5
  "license": "MIT",
6
6
  "publishConfig": {
@@ -93,7 +93,7 @@
93
93
  },
94
94
  "scripts": {
95
95
  "pre-build": "npm run build-bundle",
96
- "build-bundle": "esbuild src/bundle.ts --bundle --outfile=dist/bundle.js"
96
+ "build-bundle": "esbuild src/bundle.ts --bundle --outfile=dist/dist.min.js"
97
97
  },
98
98
  "dependencies": {
99
99
  "@babel/runtime": "^7.3.1",
@@ -105,5 +105,5 @@
105
105
  "through": "^2.3.8",
106
106
  "web-streams-polyfill": "^3.0.0"
107
107
  },
108
- "gitHead": "e6fce1907a689f9199922e84bd493adacfefbbf6"
108
+ "gitHead": "5c25bb71a2ac8558ecedf2256cc925427b2a0506"
109
109
  }
@@ -1,7 +1,5 @@
1
1
  // Based on binary-gltf-utils under MIT license: Copyright (c) 2016-17 Karl Cheng
2
2
 
3
- import {assert} from '../../../utils/assert';
4
-
5
3
  const isArrayBuffer = (x) => x && x instanceof ArrayBuffer;
6
4
  const isBuffer = (x) => x && x instanceof Buffer;
7
5
 
@@ -11,7 +9,7 @@ const isBuffer = (x) => x && x instanceof Buffer;
11
9
  * @param {string} uri - a data URI (assumed to be valid)
12
10
  * @returns {Object} { buffer, mimeType }
13
11
  */
14
- export function decodeDataUri(uri) {
12
+ export function decodeDataUri(uri: string): {arrayBuffer: ArrayBuffer; mimeType: string} {
15
13
  const dataIndex = uri.indexOf(',');
16
14
 
17
15
  let buffer;
@@ -37,13 +35,14 @@ export function decodeDataUri(uri) {
37
35
  * @param data
38
36
  * @todo Duplicate of core
39
37
  */
40
- export function toArrayBuffer(data) {
38
+ export function toArrayBuffer(data: unknown): ArrayBuffer {
41
39
  if (isArrayBuffer(data)) {
42
- return data;
40
+ return data as ArrayBuffer;
43
41
  }
44
42
 
45
43
  // TODO - per docs we should just be able to call buffer.buffer, but there are issues
46
44
  if (isBuffer(data)) {
45
+ // @ts-expect-error
47
46
  const typedArray = new Uint8Array(data);
48
47
  return typedArray.buffer;
49
48
  }
@@ -60,9 +59,11 @@ export function toArrayBuffer(data) {
60
59
  }
61
60
 
62
61
  // HACK to support Blob polyfill
62
+ // @ts-expect-error
63
63
  if (data && typeof data === 'object' && data._toArrayBuffer) {
64
+ // @ts-expect-error
64
65
  return data._toArrayBuffer();
65
66
  }
66
67
 
67
- return assert(false, `toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
68
+ throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
68
69
  }
@@ -5,10 +5,12 @@ import zlib from 'zlib';
5
5
 
6
6
  import {toArrayBuffer} from './decode-data-uri.node';
7
7
 
8
- const isRequestURL = (url) => url.startsWith('http:') || url.startsWith('https:');
8
+ function isRequestURL(url: string): boolean {
9
+ return url.startsWith('http:') || url.startsWith('https:');
10
+ }
9
11
 
10
12
  // Returns a promise that resolves to a readable stream
11
- export async function createReadStream(url, options) {
13
+ export async function createReadStream(url, options): Promise<any> {
12
14
  // Handle file streams in node
13
15
  if (!isRequestURL(url)) {
14
16
  const noqueryUrl = url.split('?')[0];
@@ -46,8 +48,8 @@ export function decompressReadStream(readStream, headers) {
46
48
  }
47
49
  }
48
50
 
49
- export async function concatenateReadStream(readStream) {
50
- let arrayBuffer = new ArrayBuffer(0);
51
+ export async function concatenateReadStream(readStream): Promise<ArrayBuffer> {
52
+ const arrayBufferChunks: ArrayBuffer[] = [];
51
53
 
52
54
  return await new Promise((resolve, reject) => {
53
55
  readStream.on('error', (error) => reject(error));
@@ -60,17 +62,19 @@ export async function concatenateReadStream(readStream) {
60
62
  if (typeof chunk === 'string') {
61
63
  reject(new Error('Read stream not binary'));
62
64
  }
63
- const chunkAsArrayBuffer = toArrayBuffer(chunk);
64
- arrayBuffer = concatenateArrayBuffers(arrayBuffer, chunkAsArrayBuffer);
65
+ arrayBufferChunks.push(toArrayBuffer(chunk));
65
66
  });
66
67
 
67
- readStream.on('end', () => resolve(arrayBuffer));
68
+ readStream.on('end', () => {
69
+ const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);
70
+ resolve(arrayBuffer);
71
+ });
68
72
  });
69
73
  }
70
74
 
71
75
  // HELPERS
72
76
 
73
- function getRequestOptions(url, options?: {fetch?: typeof fetch; headers?}) {
77
+ function getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {
74
78
  // Ensure header keys are lower case so that we can merge without duplicates
75
79
  const originalHeaders = options?.headers || {};
76
80
  const headers = {};
@@ -94,11 +98,30 @@ function getRequestOptions(url, options?: {fetch?: typeof fetch; headers?}) {
94
98
  };
95
99
  }
96
100
 
97
- function concatenateArrayBuffers(source1, source2) {
98
- const sourceArray1 = source1 instanceof ArrayBuffer ? new Uint8Array(source1) : source1;
99
- const sourceArray2 = source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2;
100
- const temp = new Uint8Array(sourceArray1.byteLength + sourceArray2.byteLength);
101
- temp.set(sourceArray1, 0);
102
- temp.set(sourceArray2, sourceArray1.byteLength);
103
- return temp.buffer;
101
+ /**
102
+ * Concatenate a sequence of ArrayBuffers
103
+ * @return A concatenated ArrayBuffer
104
+ * @note duplicates loader-utils since polyfills should be independent
105
+ */
106
+ export function concatenateArrayBuffers(sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {
107
+ // Make sure all inputs are wrapped in typed arrays
108
+ const sourceArrays = sources.map((source2) =>
109
+ source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2
110
+ );
111
+
112
+ // Get length of all inputs
113
+ const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
114
+
115
+ // Allocate array with space for all inputs
116
+ const result = new Uint8Array(byteLength);
117
+
118
+ // Copy the subarrays
119
+ let offset = 0;
120
+ for (const sourceArray of sourceArrays) {
121
+ result.set(sourceArray, offset);
122
+ offset += sourceArray.byteLength;
123
+ }
124
+
125
+ // We work with ArrayBuffers, discard the typed array wrapper
126
+ return result.buffer;
104
127
  }