@loaders.gl/polyfills 4.2.0-alpha.3 → 4.2.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/buffer/btoa.node.js +5 -3
- package/dist/buffer/to-array-buffer.node.js +6 -6
- package/dist/crypto/node-hash.js +41 -45
- package/dist/fetch/decode-data-uri.js +56 -41
- package/dist/fetch/fetch-polyfill.d.ts +1 -1
- package/dist/fetch/fetch-polyfill.d.ts.map +1 -1
- package/dist/fetch/fetch-polyfill.js +118 -103
- package/dist/fetch/headers-polyfill.js +90 -89
- package/dist/fetch/response-polyfill.d.ts +1 -1
- package/dist/fetch/response-polyfill.d.ts.map +1 -1
- package/dist/fetch/response-polyfill.js +65 -57
- package/dist/fetch/utils/decode-data-uri.node.js +56 -41
- package/dist/file/blob-stream-controller.js +54 -38
- package/dist/file/blob-stream.d.ts +1 -1
- package/dist/file/blob-stream.d.ts.map +1 -1
- package/dist/file/blob-stream.js +29 -15
- package/dist/file/blob.d.ts +1 -1
- package/dist/file/blob.d.ts.map +1 -1
- package/dist/file/blob.js +146 -109
- package/dist/file/file-reader.js +22 -50
- package/dist/file/file.js +30 -15
- package/dist/file/install-blob-polyfills.js +6 -5
- package/dist/file/install-file-polyfills.js +11 -8
- package/dist/file/readable-stream.js +5 -2
- package/dist/filesystems/fetch-node.js +78 -68
- package/dist/filesystems/node-file.js +119 -87
- package/dist/filesystems/node-filesystem.d.ts +1 -1
- package/dist/filesystems/node-filesystem.d.ts.map +1 -1
- package/dist/filesystems/node-filesystem.js +39 -35
- package/dist/filesystems/stream-utils.node.js +88 -55
- package/dist/images/encode-image-node.js +25 -12
- package/dist/images/encode-image.node.js +25 -12
- package/dist/images/parse-image-node.js +30 -23
- package/dist/images/parse-image.node.js +30 -23
- package/dist/index.browser.js +8 -3
- package/dist/index.cjs +13 -65091
- package/dist/index.cjs.map +7 -0
- package/dist/index.d.ts +4 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +45 -15
- package/dist/libs/encoding-indexes-asian.js +6 -7
- package/dist/load-library/require-utils.node.js +70 -44
- package/dist/streams/make-node-stream.js +48 -46
- package/dist/text-encoder/encoding-indexes.js +31 -28
- package/dist/text-encoder/text-encoder.js +2604 -1033
- package/dist/utils/assert.js +3 -4
- package/dist/utils/is-browser.js +7 -2
- package/package.json +10 -8
- package/dist/buffer/btoa.node.js.map +0 -1
- package/dist/buffer/to-array-buffer.node.js.map +0 -1
- package/dist/crypto/node-hash.js.map +0 -1
- package/dist/dist.dev.js +0 -45
- package/dist/fetch/decode-data-uri.js.map +0 -1
- package/dist/fetch/fetch-polyfill.js.map +0 -1
- package/dist/fetch/headers-polyfill.js.map +0 -1
- package/dist/fetch/response-polyfill.js.map +0 -1
- package/dist/fetch/utils/decode-data-uri.node.js.map +0 -1
- package/dist/file/blob-stream-controller.js.map +0 -1
- package/dist/file/blob-stream.js.map +0 -1
- package/dist/file/blob.js.map +0 -1
- package/dist/file/file-reader.js.map +0 -1
- package/dist/file/file.js.map +0 -1
- package/dist/file/install-blob-polyfills.js.map +0 -1
- package/dist/file/install-file-polyfills.js.map +0 -1
- package/dist/file/readable-stream.js.map +0 -1
- package/dist/filesystems/fetch-node.js.map +0 -1
- package/dist/filesystems/node-file.js.map +0 -1
- package/dist/filesystems/node-filesystem.js.map +0 -1
- package/dist/filesystems/stream-utils.node.js.map +0 -1
- package/dist/images/encode-image-node.js.map +0 -1
- package/dist/images/encode-image.node.js.map +0 -1
- package/dist/images/parse-image-node.js.map +0 -1
- package/dist/images/parse-image.node.js.map +0 -1
- package/dist/index.browser.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/load-library/require-utils.node.js.map +0 -1
- package/dist/streams/make-node-stream.js.map +0 -1
- package/dist/text-encoder/encoding-indexes.js.map +0 -1
- package/dist/text-encoder/text-encoder.js.map +0 -1
- package/dist/utils/assert.js.map +0 -1
- package/dist/utils/is-browser.js.map +0 -1
|
@@ -1,98 +1,130 @@
|
|
|
1
1
|
import { resolvePath } from '@loaders.gl/loader-utils';
|
|
2
2
|
import fs from 'fs';
|
|
3
3
|
export class NodeFile {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
4
|
+
constructor(path, flags, mode) {
|
|
5
|
+
path = resolvePath(path);
|
|
6
|
+
this.handle = fs.openSync(path, flags, mode);
|
|
7
|
+
const stats = fs.fstatSync(this.handle, { bigint: true });
|
|
8
|
+
this.size = Number(stats.size);
|
|
9
|
+
this.bigsize = stats.size;
|
|
10
|
+
this.url = path;
|
|
11
|
+
}
|
|
12
|
+
async close() {
|
|
13
|
+
return new Promise((resolve, reject) => {
|
|
14
|
+
fs.close(this.handle, (err) => (err ? reject(err) : resolve()));
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
async truncate(length) {
|
|
18
|
+
return new Promise((resolve, reject) => {
|
|
19
|
+
fs.ftruncate(this.handle, length, (err) => {
|
|
20
|
+
if (err) {
|
|
21
|
+
reject(err);
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
this.bigsize = BigInt(length);
|
|
25
|
+
this.size = Number(this.bigsize);
|
|
26
|
+
resolve();
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
async append(data) {
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
fs.appendFile(this.handle, data, (err) => {
|
|
34
|
+
if (err) {
|
|
35
|
+
reject(err);
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
this.bigsize = this.bigsize + BigInt(data.length);
|
|
39
|
+
this.size = Number(this.bigsize);
|
|
40
|
+
resolve();
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
async stat() {
|
|
46
|
+
return await new Promise((resolve, reject) => fs.fstat(this.handle, { bigint: true }, (err, info) => {
|
|
47
|
+
const stats = {
|
|
48
|
+
size: Number(info.size),
|
|
49
|
+
bigsize: info.size,
|
|
50
|
+
isDirectory: info.isDirectory()
|
|
51
|
+
};
|
|
52
|
+
if (err) {
|
|
53
|
+
reject(err);
|
|
54
|
+
}
|
|
55
|
+
else {
|
|
56
|
+
resolve(stats);
|
|
57
|
+
}
|
|
58
|
+
}));
|
|
59
|
+
}
|
|
60
|
+
async read(offset, length) {
|
|
61
|
+
const arrayBuffer = new ArrayBuffer(length);
|
|
62
|
+
let bigOffset = BigInt(offset);
|
|
63
|
+
let totalBytesRead = 0;
|
|
64
|
+
const uint8Array = new Uint8Array(arrayBuffer);
|
|
65
|
+
let position;
|
|
66
|
+
// Read in loop until we get required number of bytes
|
|
67
|
+
while (length > 0) {
|
|
68
|
+
const bytesRead = await readBytes(this.handle, uint8Array, 0, length, bigOffset);
|
|
69
|
+
// Check if end of file reached
|
|
70
|
+
if (bytesRead === 0) {
|
|
71
|
+
break;
|
|
72
|
+
}
|
|
73
|
+
totalBytesRead += bytesRead;
|
|
74
|
+
bigOffset += BigInt(bytesRead);
|
|
75
|
+
length -= bytesRead;
|
|
76
|
+
// Advance position unless we are using built-in position advancement
|
|
77
|
+
if (position !== undefined) {
|
|
78
|
+
position += bytesRead;
|
|
79
|
+
}
|
|
32
80
|
}
|
|
33
|
-
|
|
34
|
-
}
|
|
81
|
+
return totalBytesRead < length ? arrayBuffer.slice(0, totalBytesRead) : arrayBuffer;
|
|
82
|
+
}
|
|
83
|
+
async write(arrayBuffer, offset = 0, length = arrayBuffer.byteLength) {
|
|
84
|
+
return new Promise((resolve, reject) => {
|
|
85
|
+
// TODO - Node.js doesn't offer write with bigint offsets???
|
|
86
|
+
const nOffset = Number(offset);
|
|
87
|
+
const uint8Array = new Uint8Array(arrayBuffer, Number(offset), length);
|
|
88
|
+
fs.write(this.handle, uint8Array, 0, length, nOffset, (err, bytesWritten) => err ? reject(err) : resolve(bytesWritten));
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
async function readBytes(fd, uint8Array, offset, length, position) {
|
|
93
|
+
return await new Promise((resolve, reject) => fs.read(fd, uint8Array, offset, length, position, (err, bytesRead) => err ? reject(err) : resolve(bytesRead)));
|
|
94
|
+
}
|
|
95
|
+
// TODO - implement streaming write
|
|
96
|
+
/*
|
|
97
|
+
export interface WriteStreamOptions {
|
|
98
|
+
flags?: string;
|
|
99
|
+
encoding?: 'utf8';
|
|
100
|
+
fd?: number;
|
|
101
|
+
mode?: number;
|
|
102
|
+
autoClose?: boolean;
|
|
103
|
+
start?: number;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
export class NodeStreamWritableFile implements WritableFile {
|
|
107
|
+
outputStream: fs.WriteStream | Writable;
|
|
108
|
+
|
|
109
|
+
constructor(pathOrStream: string | Writable, options?: WriteStreamOptions) {
|
|
110
|
+
this.outputStream =
|
|
111
|
+
typeof pathOrStream === 'string' ? fs.createWriteStream(pathOrStream, options) : pathOrStream;
|
|
35
112
|
}
|
|
36
|
-
|
|
113
|
+
|
|
114
|
+
async write(buffer: ArrayBuffer): Promise<void> {
|
|
37
115
|
return new Promise((resolve, reject) => {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
reject(err);
|
|
41
|
-
} else {
|
|
42
|
-
this.bigsize = this.bigsize + BigInt(data.length);
|
|
43
|
-
this.size = Number(this.bigsize);
|
|
44
|
-
resolve();
|
|
45
|
-
}
|
|
46
|
-
});
|
|
116
|
+
const uint8Array = new Uint8Array(buffer);
|
|
117
|
+
this.outputStream.write(uint8Array, (err) => (err ? reject(err) : resolve()));
|
|
47
118
|
});
|
|
48
119
|
}
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
isDirectory: info.isDirectory()
|
|
57
|
-
};
|
|
58
|
-
if (err) {
|
|
59
|
-
reject(err);
|
|
60
|
-
} else {
|
|
61
|
-
resolve(stats);
|
|
62
|
-
}
|
|
63
|
-
}));
|
|
64
|
-
}
|
|
65
|
-
async read(offset, length) {
|
|
66
|
-
const arrayBuffer = new ArrayBuffer(length);
|
|
67
|
-
let bigOffset = BigInt(offset);
|
|
68
|
-
let totalBytesRead = 0;
|
|
69
|
-
const uint8Array = new Uint8Array(arrayBuffer);
|
|
70
|
-
let position;
|
|
71
|
-
while (length > 0) {
|
|
72
|
-
const bytesRead = await readBytes(this.handle, uint8Array, 0, length, bigOffset);
|
|
73
|
-
if (bytesRead === 0) {
|
|
74
|
-
break;
|
|
75
|
-
}
|
|
76
|
-
totalBytesRead += bytesRead;
|
|
77
|
-
bigOffset += BigInt(bytesRead);
|
|
78
|
-
length -= bytesRead;
|
|
79
|
-
if (position !== undefined) {
|
|
80
|
-
position += bytesRead;
|
|
81
|
-
}
|
|
120
|
+
|
|
121
|
+
async close(): Promise<void> {
|
|
122
|
+
if (this.outputStream instanceof fs.WriteStream) {
|
|
123
|
+
return new Promise((resolve, reject) => {
|
|
124
|
+
const stream = this.outputStream as fs.WriteStream;
|
|
125
|
+
stream.close((err) => (err ? reject(err) : resolve()));
|
|
126
|
+
});
|
|
82
127
|
}
|
|
83
|
-
return totalBytesRead < length ? arrayBuffer.slice(0, totalBytesRead) : arrayBuffer;
|
|
84
128
|
}
|
|
85
|
-
async write(arrayBuffer) {
|
|
86
|
-
let offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
|
|
87
|
-
let length = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : arrayBuffer.byteLength;
|
|
88
|
-
return new Promise((resolve, reject) => {
|
|
89
|
-
const nOffset = Number(offset);
|
|
90
|
-
const uint8Array = new Uint8Array(arrayBuffer, Number(offset), length);
|
|
91
|
-
fs.write(this.handle, uint8Array, 0, length, nOffset, (err, bytesWritten) => err ? reject(err) : resolve(bytesWritten));
|
|
92
|
-
});
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
async function readBytes(fd, uint8Array, offset, length, position) {
|
|
96
|
-
return await new Promise((resolve, reject) => fs.read(fd, uint8Array, offset, length, position, (err, bytesRead) => err ? reject(err) : resolve(bytesRead)));
|
|
97
129
|
}
|
|
98
|
-
|
|
130
|
+
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"node-filesystem.d.ts","sourceRoot":"","sources":["../../src/filesystems/node-filesystem.ts"],"names":[],"mappings":"AAIA,OAAO,EAAC,IAAI,EAAE,sBAAsB,EAAC,MAAM,0BAA0B,CAAC;AAEtE,OAAO,EAAC,QAAQ,EAAC,
|
|
1
|
+
{"version":3,"file":"node-filesystem.d.ts","sourceRoot":"","sources":["../../src/filesystems/node-filesystem.ts"],"names":[],"mappings":"AAIA,OAAO,EAAC,IAAI,EAAE,sBAAsB,EAAC,MAAM,0BAA0B,CAAC;AAEtE,OAAO,EAAC,QAAQ,EAAC,uBAAoB;AAMrC;;;;GAIG;AACH,qBAAa,cAAe,YAAW,sBAAsB;IAC3D,QAAQ,EAAE,OAAO,CAAQ;IACzB,QAAQ,EAAE,OAAO,CAAQ;;IAKnB,OAAO,CAAC,OAAO,SAAM,EAAE,OAAO,CAAC,EAAE,EAAE,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAIpD,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IASjC,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAInC,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC;IAK5D,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,GAAE,GAAS,GAAG,OAAO,CAAC,QAAQ,CAAC;IAInE,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,GAAE,GAAG,GAAG,IAAU,EAAE,IAAI,CAAC,EAAE,GAAG,GAAG,OAAO,CAAC,QAAQ,CAAC;CAG7F"}
|
|
@@ -1,40 +1,44 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
1
4
|
import fsPromise from 'fs/promises';
|
|
2
5
|
import { NodeFile } from "./node-file.js";
|
|
3
6
|
import { fetchNode } from "./fetch-node.js";
|
|
7
|
+
// import {fetchFile} from "../fetch/fetch-file"
|
|
8
|
+
// import {selectLoader} from "../api/select-loader";
|
|
9
|
+
/**
|
|
10
|
+
* FileSystem pass-through for Node.js
|
|
11
|
+
* Compatible with BrowserFileSystem.
|
|
12
|
+
* @param options
|
|
13
|
+
*/
|
|
4
14
|
export class NodeFileSystem {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
async openWritableFile(path) {
|
|
35
|
-
let flags = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'w';
|
|
36
|
-
let mode = arguments.length > 2 ? arguments[2] : undefined;
|
|
37
|
-
return new NodeFile(path, flags, mode);
|
|
38
|
-
}
|
|
15
|
+
// implements FileSystem
|
|
16
|
+
constructor() {
|
|
17
|
+
this.readable = true;
|
|
18
|
+
this.writable = true;
|
|
19
|
+
}
|
|
20
|
+
async readdir(dirname = '.', options) {
|
|
21
|
+
return await fsPromise.readdir(dirname, options);
|
|
22
|
+
}
|
|
23
|
+
async stat(path) {
|
|
24
|
+
const info = await fsPromise.stat(path, { bigint: true });
|
|
25
|
+
return {
|
|
26
|
+
size: Number(info.size),
|
|
27
|
+
bigsize: info.size,
|
|
28
|
+
isDirectory: info.isDirectory()
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
async unlink(path) {
|
|
32
|
+
return await fsPromise.unlink(path);
|
|
33
|
+
}
|
|
34
|
+
async fetch(path, options) {
|
|
35
|
+
return await fetchNode(path, options);
|
|
36
|
+
}
|
|
37
|
+
// implements IRandomAccessFileSystem
|
|
38
|
+
async openReadableFile(path, flags = 'r') {
|
|
39
|
+
return new NodeFile(path, flags);
|
|
40
|
+
}
|
|
41
|
+
async openWritableFile(path, flags = 'w', mode) {
|
|
42
|
+
return new NodeFile(path, flags, mode);
|
|
43
|
+
}
|
|
39
44
|
}
|
|
40
|
-
//# sourceMappingURL=node-filesystem.js.map
|
|
@@ -1,65 +1,98 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
1
4
|
import zlib from 'zlib';
|
|
2
|
-
const isArrayBuffer = x => x && x instanceof ArrayBuffer;
|
|
3
|
-
const isBuffer = x => x && x instanceof Buffer;
|
|
5
|
+
const isArrayBuffer = (x) => x && x instanceof ArrayBuffer;
|
|
6
|
+
const isBuffer = (x) => x && x instanceof Buffer;
|
|
7
|
+
/**
|
|
8
|
+
*
|
|
9
|
+
*/
|
|
4
10
|
export function decompressReadStream(readStream, headers) {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
11
|
+
switch (headers?.get('content-encoding')) {
|
|
12
|
+
case 'br':
|
|
13
|
+
return readStream.pipe(zlib.createBrotliDecompress());
|
|
14
|
+
case 'gzip':
|
|
15
|
+
return readStream.pipe(zlib.createGunzip());
|
|
16
|
+
case 'deflate':
|
|
17
|
+
return readStream.pipe(zlib.createDeflate());
|
|
18
|
+
default:
|
|
19
|
+
// No compression or an unknown one, just return it as is
|
|
20
|
+
return readStream;
|
|
21
|
+
}
|
|
15
22
|
}
|
|
23
|
+
/**
|
|
24
|
+
*
|
|
25
|
+
* @param readStream
|
|
26
|
+
* @returns
|
|
27
|
+
*/
|
|
16
28
|
export async function concatenateReadStream(readStream) {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
29
|
+
const arrayBufferChunks = [];
|
|
30
|
+
return await new Promise((resolve, reject) => {
|
|
31
|
+
readStream.on('error', (error) => reject(error));
|
|
32
|
+
// Once the readable callback has been added, stream switches to "flowing mode"
|
|
33
|
+
// In Node 10 (but not 12 and 14) this causes `data` and `end` to never be called unless we read data here
|
|
34
|
+
readStream.on('readable', () => readStream.read());
|
|
35
|
+
readStream.on('data', (chunk) => {
|
|
36
|
+
if (typeof chunk === 'string') {
|
|
37
|
+
reject(new Error('Read stream not binary'));
|
|
38
|
+
}
|
|
39
|
+
arrayBufferChunks.push(toArrayBuffer(chunk));
|
|
40
|
+
});
|
|
41
|
+
readStream.on('end', () => {
|
|
42
|
+
const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);
|
|
43
|
+
resolve(arrayBuffer);
|
|
44
|
+
});
|
|
26
45
|
});
|
|
27
|
-
readStream.on('end', () => {
|
|
28
|
-
const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);
|
|
29
|
-
resolve(arrayBuffer);
|
|
30
|
-
});
|
|
31
|
-
});
|
|
32
46
|
}
|
|
47
|
+
/**
|
|
48
|
+
* Concatenate a sequence of ArrayBuffers
|
|
49
|
+
* @return A concatenated ArrayBuffer
|
|
50
|
+
* @note duplicates loader-utils since polyfills should be independent
|
|
51
|
+
*/
|
|
33
52
|
export function concatenateArrayBuffers(sources) {
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
result
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
53
|
+
// Make sure all inputs are wrapped in typed arrays
|
|
54
|
+
const sourceArrays = sources.map((source2) => source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2);
|
|
55
|
+
// Get length of all inputs
|
|
56
|
+
const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
|
|
57
|
+
// Allocate array with space for all inputs
|
|
58
|
+
const result = new Uint8Array(byteLength);
|
|
59
|
+
// Copy the subarrays
|
|
60
|
+
let offset = 0;
|
|
61
|
+
for (const sourceArray of sourceArrays) {
|
|
62
|
+
result.set(sourceArray, offset);
|
|
63
|
+
offset += sourceArray.byteLength;
|
|
64
|
+
}
|
|
65
|
+
// We work with ArrayBuffers, discard the typed array wrapper
|
|
66
|
+
return result.buffer;
|
|
43
67
|
}
|
|
68
|
+
/**
|
|
69
|
+
* @param data
|
|
70
|
+
* @todo Duplicate of core
|
|
71
|
+
*/
|
|
44
72
|
export function toArrayBuffer(data) {
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
73
|
+
if (isArrayBuffer(data)) {
|
|
74
|
+
return data;
|
|
75
|
+
}
|
|
76
|
+
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
77
|
+
if (isBuffer(data)) {
|
|
78
|
+
// @ts-expect-error
|
|
79
|
+
const typedArray = new Uint8Array(data);
|
|
80
|
+
return typedArray.buffer;
|
|
81
|
+
}
|
|
82
|
+
// Careful - Node Buffers will look like ArrayBuffers (keep after isBuffer)
|
|
83
|
+
if (ArrayBuffer.isView(data)) {
|
|
84
|
+
return data.buffer;
|
|
85
|
+
}
|
|
86
|
+
if (typeof data === 'string') {
|
|
87
|
+
const text = data;
|
|
88
|
+
const uint8Array = new TextEncoder().encode(text);
|
|
89
|
+
return uint8Array.buffer;
|
|
90
|
+
}
|
|
91
|
+
// HACK to support Blob polyfill
|
|
92
|
+
// @ts-expect-error
|
|
93
|
+
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
94
|
+
// @ts-expect-error
|
|
95
|
+
return data._toArrayBuffer();
|
|
96
|
+
}
|
|
97
|
+
throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
|
|
64
98
|
}
|
|
65
|
-
//# sourceMappingURL=stream-utils.node.js.map
|
|
@@ -1,20 +1,33 @@
|
|
|
1
|
+
// Use stackgl modules for DOM-less reading and writing of images
|
|
1
2
|
import savePixels from 'save-pixels';
|
|
2
3
|
import ndarray from 'ndarray';
|
|
3
4
|
import { bufferToArrayBuffer } from "../buffer/to-array-buffer.node.js";
|
|
5
|
+
/**
|
|
6
|
+
* Returns data bytes representing a compressed image in PNG or JPG format,
|
|
7
|
+
* This data can be saved using file system (f) methods or
|
|
8
|
+
* used in a request.
|
|
9
|
+
* @param image to save
|
|
10
|
+
* @param options
|
|
11
|
+
* @param options.type='png' - png, jpg or image/png, image/jpg are valid
|
|
12
|
+
* @param options.dataURI - Whether to include a data URI header
|
|
13
|
+
* @return {*} bytes
|
|
14
|
+
*/
|
|
4
15
|
export function encodeImageToStreamNode(image, options) {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
16
|
+
// Support MIME type strings
|
|
17
|
+
const type = options.type ? options.type.replace('image/', '') : 'jpeg';
|
|
18
|
+
const pixels = ndarray(image.data, [image.width, image.height, 4], [4, image.width * 4, 1], 0);
|
|
19
|
+
// Note: savePixels returns a stream
|
|
20
|
+
return savePixels(pixels, type, options);
|
|
8
21
|
}
|
|
9
22
|
export function encodeImageNode(image, options) {
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
23
|
+
const imageStream = encodeImageToStreamNode(image, options);
|
|
24
|
+
return new Promise((resolve) => {
|
|
25
|
+
const buffers = [];
|
|
26
|
+
imageStream.on('data', (buffer) => buffers.push(buffer));
|
|
27
|
+
// TODO - convert to arraybuffer?
|
|
28
|
+
imageStream.on('end', () => {
|
|
29
|
+
const buffer = Buffer.concat(buffers);
|
|
30
|
+
resolve(bufferToArrayBuffer(buffer));
|
|
31
|
+
});
|
|
17
32
|
});
|
|
18
|
-
});
|
|
19
33
|
}
|
|
20
|
-
//# sourceMappingURL=encode-image-node.js.map
|
|
@@ -1,20 +1,33 @@
|
|
|
1
|
+
// Use stackgl modules for DOM-less reading and writing of images
|
|
1
2
|
import savePixels from 'save-pixels';
|
|
2
3
|
import ndarray from 'ndarray';
|
|
3
4
|
import { bufferToArrayBuffer } from "../buffer/to-array-buffer.node.js";
|
|
5
|
+
/**
|
|
6
|
+
* Returns data bytes representing a compressed image in PNG or JPG format,
|
|
7
|
+
* This data can be saved using file system (f) methods or
|
|
8
|
+
* used in a request.
|
|
9
|
+
* @param image to save
|
|
10
|
+
* @param options
|
|
11
|
+
* @param options.type='png' - png, jpg or image/png, image/jpg are valid
|
|
12
|
+
* @param options.dataURI - Whether to include a data URI header
|
|
13
|
+
* @return {*} bytes
|
|
14
|
+
*/
|
|
4
15
|
export function encodeImageToStreamNode(image, options) {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
16
|
+
// Support MIME type strings
|
|
17
|
+
const type = options.type ? options.type.replace('image/', '') : 'jpeg';
|
|
18
|
+
const pixels = ndarray(image.data, [image.width, image.height, 4], [4, image.width * 4, 1], 0);
|
|
19
|
+
// Note: savePixels returns a stream
|
|
20
|
+
return savePixels(pixels, type, options);
|
|
8
21
|
}
|
|
9
22
|
export function encodeImageNode(image, options) {
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
23
|
+
const imageStream = encodeImageToStreamNode(image, options);
|
|
24
|
+
return new Promise((resolve) => {
|
|
25
|
+
const buffers = [];
|
|
26
|
+
imageStream.on('data', (buffer) => buffers.push(buffer));
|
|
27
|
+
// TODO - convert to arraybuffer?
|
|
28
|
+
imageStream.on('end', () => {
|
|
29
|
+
const buffer = Buffer.concat(buffers);
|
|
30
|
+
resolve(bufferToArrayBuffer(buffer));
|
|
31
|
+
});
|
|
17
32
|
});
|
|
18
|
-
});
|
|
19
33
|
}
|
|
20
|
-
//# sourceMappingURL=encode-image.node.js.map
|
|
@@ -1,29 +1,36 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
1
4
|
import getPixels from 'get-pixels';
|
|
5
|
+
/** Declares which image format mime types this loader polyfill supports */
|
|
2
6
|
export const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];
|
|
3
7
|
export async function parseImageNode(arrayBuffer, mimeType) {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
8
|
+
if (!mimeType) {
|
|
9
|
+
throw new Error('MIMEType is required to parse image under Node.js');
|
|
10
|
+
}
|
|
11
|
+
const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);
|
|
12
|
+
const ndarray = await getPixelsAsync(buffer, mimeType);
|
|
13
|
+
return ndarray;
|
|
10
14
|
}
|
|
15
|
+
// TODO - check if getPixels callback is asynchronous if provided with buffer input
|
|
16
|
+
// if not, parseImage can be a sync function
|
|
11
17
|
function getPixelsAsync(buffer, mimeType) {
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
18
|
+
return new Promise((resolve) => getPixels(buffer, mimeType, (err, ndarray) => {
|
|
19
|
+
if (err) {
|
|
20
|
+
throw err;
|
|
21
|
+
}
|
|
22
|
+
const shape = [...ndarray.shape];
|
|
23
|
+
const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;
|
|
24
|
+
const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;
|
|
25
|
+
// extract width/height etc
|
|
26
|
+
resolve({
|
|
27
|
+
shape,
|
|
28
|
+
data,
|
|
29
|
+
width: ndarray.shape[0],
|
|
30
|
+
height: ndarray.shape[1],
|
|
31
|
+
components: ndarray.shape[2],
|
|
32
|
+
// TODO - error
|
|
33
|
+
layers: layers ? [layers] : []
|
|
34
|
+
});
|
|
35
|
+
}));
|
|
28
36
|
}
|
|
29
|
-
//# sourceMappingURL=parse-image-node.js.map
|