@loaders.gl/polyfills 4.0.0-alpha.4 → 4.0.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/dist.min.js +3994 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/lib/encoding-indexes.d.ts +31 -0
- package/dist/lib/encoding-indexes.d.ts.map +1 -0
- package/dist/lib/encoding-indexes.js +30 -0
- package/dist/lib/encoding-indexes.js.map +1 -0
- package/dist/lib/encoding.d.ts +15 -0
- package/dist/lib/encoding.d.ts.map +1 -0
- package/dist/lib/encoding.js +1450 -0
- package/dist/lib/encoding.js.map +1 -0
- package/dist/libs/encoding-indexes-asian.d.ts +10 -0
- package/dist/libs/encoding-indexes-asian.d.ts.map +1 -0
- package/dist/libs/{encoding-indexes.js → encoding-indexes-asian.js} +2 -40
- package/dist/node/buffer/btoa.node.d.ts +3 -0
- package/dist/node/buffer/btoa.node.d.ts.map +1 -0
- package/dist/node/buffer/to-array-buffer.node.d.ts +2 -0
- package/dist/node/buffer/to-array-buffer.node.d.ts.map +1 -0
- package/dist/node/fetch/fetch.node.d.ts +7 -0
- package/dist/node/fetch/fetch.node.d.ts.map +1 -0
- package/dist/node/fetch/headers.node.d.ts +34 -0
- package/dist/node/fetch/headers.node.d.ts.map +1 -0
- package/dist/node/fetch/response.node.d.ts +22 -0
- package/dist/node/fetch/response.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/decode-data-uri.node.d.ts +16 -0
- package/dist/node/fetch/utils/decode-data-uri.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/decode-data-uri.node.js +1 -3
- package/dist/node/fetch/utils/decode-data-uri.node.js.map +1 -1
- package/dist/node/fetch/utils/stream-utils.node.d.ts +10 -0
- package/dist/node/fetch/utils/stream-utils.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/stream-utils.node.js +21 -12
- package/dist/node/fetch/utils/stream-utils.node.js.map +1 -1
- package/dist/node/file/blob-stream-controller.d.ts +29 -0
- package/dist/node/file/blob-stream-controller.d.ts.map +1 -0
- package/dist/node/file/blob-stream.d.ts +25 -0
- package/dist/node/file/blob-stream.d.ts.map +1 -0
- package/dist/node/file/blob.d.ts +58 -0
- package/dist/node/file/blob.d.ts.map +1 -0
- package/dist/node/file/file-reader.d.ts +24 -0
- package/dist/node/file/file-reader.d.ts.map +1 -0
- package/dist/node/file/file.d.ts +25 -0
- package/dist/node/file/file.d.ts.map +1 -0
- package/dist/node/file/install-file-polyfills.d.ts +2 -0
- package/dist/node/file/install-file-polyfills.d.ts.map +1 -0
- package/dist/node/file/readable-stream.d.ts +4 -0
- package/dist/node/file/readable-stream.d.ts.map +1 -0
- package/dist/node/images/encode-image.node.d.ts +20 -0
- package/dist/node/images/encode-image.node.d.ts.map +1 -0
- package/dist/node/images/parse-image.node.d.ts +11 -0
- package/dist/node/images/parse-image.node.d.ts.map +1 -0
- package/dist/promise/all-settled.d.ts +10 -0
- package/dist/promise/all-settled.d.ts.map +1 -0
- package/dist/utils/assert.d.ts +2 -0
- package/dist/utils/assert.d.ts.map +1 -0
- package/dist/utils/globals.d.ts +4 -0
- package/dist/utils/globals.d.ts.map +1 -0
- package/package.json +6 -6
- package/src/index.ts +2 -2
- package/src/lib/encoding-indexes.ts +34 -0
- package/{dist/libs/encoding.js → src/lib/encoding.ts} +78 -78
- package/src/libs/{encoding-indexes.js → encoding-indexes-asian.js} +2 -40
- package/src/node/fetch/utils/decode-data-uri.node.ts +7 -6
- package/src/node/fetch/utils/stream-utils.node.ts +38 -15
- package/src/libs/encoding.js +0 -3084
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
// Based on binary-gltf-utils under MIT license: Copyright (c) 2016-17 Karl Cheng
|
|
2
2
|
|
|
3
|
-
import {assert} from '../../../utils/assert';
|
|
4
|
-
|
|
5
3
|
const isArrayBuffer = (x) => x && x instanceof ArrayBuffer;
|
|
6
4
|
const isBuffer = (x) => x && x instanceof Buffer;
|
|
7
5
|
|
|
@@ -11,7 +9,7 @@ const isBuffer = (x) => x && x instanceof Buffer;
|
|
|
11
9
|
* @param {string} uri - a data URI (assumed to be valid)
|
|
12
10
|
* @returns {Object} { buffer, mimeType }
|
|
13
11
|
*/
|
|
14
|
-
export function decodeDataUri(uri) {
|
|
12
|
+
export function decodeDataUri(uri: string): {arrayBuffer: ArrayBuffer; mimeType: string} {
|
|
15
13
|
const dataIndex = uri.indexOf(',');
|
|
16
14
|
|
|
17
15
|
let buffer;
|
|
@@ -37,13 +35,14 @@ export function decodeDataUri(uri) {
|
|
|
37
35
|
* @param data
|
|
38
36
|
* @todo Duplicate of core
|
|
39
37
|
*/
|
|
40
|
-
export function toArrayBuffer(data) {
|
|
38
|
+
export function toArrayBuffer(data: unknown): ArrayBuffer {
|
|
41
39
|
if (isArrayBuffer(data)) {
|
|
42
|
-
return data;
|
|
40
|
+
return data as ArrayBuffer;
|
|
43
41
|
}
|
|
44
42
|
|
|
45
43
|
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
46
44
|
if (isBuffer(data)) {
|
|
45
|
+
// @ts-expect-error
|
|
47
46
|
const typedArray = new Uint8Array(data);
|
|
48
47
|
return typedArray.buffer;
|
|
49
48
|
}
|
|
@@ -60,9 +59,11 @@ export function toArrayBuffer(data) {
|
|
|
60
59
|
}
|
|
61
60
|
|
|
62
61
|
// HACK to support Blob polyfill
|
|
62
|
+
// @ts-expect-error
|
|
63
63
|
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
64
|
+
// @ts-expect-error
|
|
64
65
|
return data._toArrayBuffer();
|
|
65
66
|
}
|
|
66
67
|
|
|
67
|
-
|
|
68
|
+
throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
|
|
68
69
|
}
|
|
@@ -5,10 +5,12 @@ import zlib from 'zlib';
|
|
|
5
5
|
|
|
6
6
|
import {toArrayBuffer} from './decode-data-uri.node';
|
|
7
7
|
|
|
8
|
-
|
|
8
|
+
function isRequestURL(url: string): boolean {
|
|
9
|
+
return url.startsWith('http:') || url.startsWith('https:');
|
|
10
|
+
}
|
|
9
11
|
|
|
10
12
|
// Returns a promise that resolves to a readable stream
|
|
11
|
-
export async function createReadStream(url, options) {
|
|
13
|
+
export async function createReadStream(url, options): Promise<any> {
|
|
12
14
|
// Handle file streams in node
|
|
13
15
|
if (!isRequestURL(url)) {
|
|
14
16
|
const noqueryUrl = url.split('?')[0];
|
|
@@ -46,8 +48,8 @@ export function decompressReadStream(readStream, headers) {
|
|
|
46
48
|
}
|
|
47
49
|
}
|
|
48
50
|
|
|
49
|
-
export async function concatenateReadStream(readStream) {
|
|
50
|
-
|
|
51
|
+
export async function concatenateReadStream(readStream): Promise<ArrayBuffer> {
|
|
52
|
+
const arrayBufferChunks: ArrayBuffer[] = [];
|
|
51
53
|
|
|
52
54
|
return await new Promise((resolve, reject) => {
|
|
53
55
|
readStream.on('error', (error) => reject(error));
|
|
@@ -60,17 +62,19 @@ export async function concatenateReadStream(readStream) {
|
|
|
60
62
|
if (typeof chunk === 'string') {
|
|
61
63
|
reject(new Error('Read stream not binary'));
|
|
62
64
|
}
|
|
63
|
-
|
|
64
|
-
arrayBuffer = concatenateArrayBuffers(arrayBuffer, chunkAsArrayBuffer);
|
|
65
|
+
arrayBufferChunks.push(toArrayBuffer(chunk));
|
|
65
66
|
});
|
|
66
67
|
|
|
67
|
-
readStream.on('end', () =>
|
|
68
|
+
readStream.on('end', () => {
|
|
69
|
+
const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);
|
|
70
|
+
resolve(arrayBuffer);
|
|
71
|
+
});
|
|
68
72
|
});
|
|
69
73
|
}
|
|
70
74
|
|
|
71
75
|
// HELPERS
|
|
72
76
|
|
|
73
|
-
function getRequestOptions(url, options?: {fetch?: typeof fetch; headers?}) {
|
|
77
|
+
function getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {
|
|
74
78
|
// Ensure header keys are lower case so that we can merge without duplicates
|
|
75
79
|
const originalHeaders = options?.headers || {};
|
|
76
80
|
const headers = {};
|
|
@@ -94,11 +98,30 @@ function getRequestOptions(url, options?: {fetch?: typeof fetch; headers?}) {
|
|
|
94
98
|
};
|
|
95
99
|
}
|
|
96
100
|
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
101
|
+
/**
|
|
102
|
+
* Concatenate a sequence of ArrayBuffers
|
|
103
|
+
* @return A concatenated ArrayBuffer
|
|
104
|
+
* @note duplicates loader-utils since polyfills should be independent
|
|
105
|
+
*/
|
|
106
|
+
export function concatenateArrayBuffers(sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {
|
|
107
|
+
// Make sure all inputs are wrapped in typed arrays
|
|
108
|
+
const sourceArrays = sources.map((source2) =>
|
|
109
|
+
source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2
|
|
110
|
+
);
|
|
111
|
+
|
|
112
|
+
// Get length of all inputs
|
|
113
|
+
const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
|
|
114
|
+
|
|
115
|
+
// Allocate array with space for all inputs
|
|
116
|
+
const result = new Uint8Array(byteLength);
|
|
117
|
+
|
|
118
|
+
// Copy the subarrays
|
|
119
|
+
let offset = 0;
|
|
120
|
+
for (const sourceArray of sourceArrays) {
|
|
121
|
+
result.set(sourceArray, offset);
|
|
122
|
+
offset += sourceArray.byteLength;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// We work with ArrayBuffers, discard the typed array wrapper
|
|
126
|
+
return result.buffer;
|
|
104
127
|
}
|