@loaders.gl/loader-utils 4.0.0-alpha.23 → 4.0.0-alpha.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es5/json-loader.js +1 -1
- package/dist/es5/lib/file-provider/file-handle.js +14 -20
- package/dist/es5/lib/file-provider/file-handle.js.map +1 -1
- package/dist/es5/lib/filesystems/filesystem.js.map +1 -1
- package/dist/es5/lib/filesystems/node-filesystem.browser.js +172 -0
- package/dist/es5/lib/filesystems/node-filesystem.browser.js.map +1 -0
- package/dist/es5/lib/filesystems/node-filesystem.js.map +1 -1
- package/dist/es5/lib/node/fs.browser.js +9 -0
- package/dist/es5/lib/node/fs.browser.js.map +1 -0
- package/dist/es5/lib/node/fs.js +25 -37
- package/dist/es5/lib/node/fs.js.map +1 -1
- package/dist/es5/lib/node/stream.browser.js +9 -0
- package/dist/es5/lib/node/stream.browser.js.map +1 -0
- package/dist/es5/lib/node/stream.js +6 -7
- package/dist/es5/lib/node/stream.js.map +1 -1
- package/dist/es5/lib/option-utils/merge-loader-options.js +1 -1
- package/dist/es5/lib/option-utils/merge-loader-options.js.map +1 -1
- package/dist/es5/loader-types.js.map +1 -1
- package/dist/es5/writer-types.js.map +1 -1
- package/dist/esm/json-loader.js +1 -1
- package/dist/esm/lib/file-provider/file-handle.js +6 -10
- package/dist/esm/lib/file-provider/file-handle.js.map +1 -1
- package/dist/esm/lib/filesystems/filesystem.js.map +1 -1
- package/dist/esm/lib/filesystems/node-filesystem.browser.js +42 -0
- package/dist/esm/lib/filesystems/node-filesystem.browser.js.map +1 -0
- package/dist/esm/lib/filesystems/node-filesystem.js.map +1 -1
- package/dist/esm/lib/node/fs.browser.js +2 -0
- package/dist/esm/lib/node/fs.browser.js.map +1 -0
- package/dist/esm/lib/node/fs.js +16 -31
- package/dist/esm/lib/node/fs.js.map +1 -1
- package/dist/esm/lib/node/stream.browser.js +2 -0
- package/dist/esm/lib/node/stream.browser.js.map +1 -0
- package/dist/esm/lib/node/stream.js +2 -5
- package/dist/esm/lib/node/stream.js.map +1 -1
- package/dist/esm/lib/option-utils/merge-loader-options.js +1 -1
- package/dist/esm/lib/option-utils/merge-loader-options.js.map +1 -1
- package/dist/esm/loader-types.js.map +1 -1
- package/dist/esm/writer-types.js.map +1 -1
- package/dist/lib/file-provider/file-handle.d.ts +2 -2
- package/dist/lib/file-provider/file-handle.d.ts.map +1 -1
- package/dist/lib/filesystems/filesystem.d.ts +1 -2
- package/dist/lib/filesystems/filesystem.d.ts.map +1 -1
- package/dist/lib/filesystems/node-filesystem.browser.d.ts +39 -0
- package/dist/lib/filesystems/node-filesystem.browser.d.ts.map +1 -0
- package/dist/lib/filesystems/node-filesystem.d.ts +1 -1
- package/dist/lib/filesystems/node-filesystem.d.ts.map +1 -1
- package/dist/lib/node/fs.browser.d.ts +2 -0
- package/dist/lib/node/fs.browser.d.ts.map +1 -0
- package/dist/lib/node/fs.d.ts +19 -16
- package/dist/lib/node/fs.d.ts.map +1 -1
- package/dist/lib/node/stream.browser.d.ts +2 -0
- package/dist/lib/node/stream.browser.d.ts.map +1 -0
- package/dist/lib/node/stream.d.ts +4 -1
- package/dist/lib/node/stream.d.ts.map +1 -1
- package/dist/loader-types.d.ts +2 -2
- package/dist/loader-types.d.ts.map +1 -1
- package/dist/writer-types.d.ts +4 -6
- package/dist/writer-types.d.ts.map +1 -1
- package/package.json +9 -5
- package/src/lib/file-provider/file-handle.ts +17 -12
- package/src/lib/filesystems/filesystem.ts +1 -1
- package/src/lib/filesystems/node-filesystem.browser.ts +61 -0
- package/src/lib/filesystems/node-filesystem.ts +1 -1
- package/src/lib/node/fs.browser.ts +1 -0
- package/src/lib/node/fs.ts +20 -54
- package/src/lib/node/stream.browser.ts +1 -0
- package/src/lib/node/stream.ts +3 -11
- package/src/lib/option-utils/merge-loader-options.ts +1 -1
- package/src/loader-types.ts +2 -2
- package/src/writer-types.ts +11 -16
- package/dist/index.js +0 -135
- package/dist/json-loader.js +0 -27
- package/dist/lib/binary-utils/array-buffer-utils.js +0 -84
- package/dist/lib/binary-utils/dataview-copy-utils.js +0 -97
- package/dist/lib/binary-utils/get-first-characters.js +0 -45
- package/dist/lib/binary-utils/memory-conversion-utils.js +0 -73
- package/dist/lib/binary-utils/memory-copy-utils.js +0 -61
- package/dist/lib/env-utils/assert.js +0 -13
- package/dist/lib/env-utils/globals.js +0 -30
- package/dist/lib/file-provider/data-view-file.js +0 -63
- package/dist/lib/file-provider/file-handle-file.js +0 -92
- package/dist/lib/file-provider/file-handle.js +0 -55
- package/dist/lib/file-provider/file-provider.js +0 -13
- package/dist/lib/filesystems/filesystem.js +0 -37
- package/dist/lib/filesystems/node-filesystem.js +0 -74
- package/dist/lib/filesystems/readable-file.js +0 -25
- package/dist/lib/filesystems/writable-file.js +0 -48
- package/dist/lib/iterators/async-iteration.js +0 -53
- package/dist/lib/iterators/text-iterators.js +0 -61
- package/dist/lib/node/buffer.browser.js +0 -22
- package/dist/lib/node/buffer.js +0 -36
- package/dist/lib/node/fs.js +0 -51
- package/dist/lib/node/promisify.js +0 -22
- package/dist/lib/node/stream.js +0 -17
- package/dist/lib/option-utils/merge-loader-options.js +0 -27
- package/dist/lib/parser-utils/parse-json.js +0 -16
- package/dist/lib/path-utils/file-aliases.js +0 -47
- package/dist/lib/path-utils/get-cwd.js +0 -12
- package/dist/lib/path-utils/path.js +0 -178
- package/dist/lib/request-utils/request-scheduler.js +0 -142
- package/dist/lib/worker-loader-utils/create-loader-worker.js +0 -99
- package/dist/lib/worker-loader-utils/encode-with-worker.js +0 -21
- package/dist/lib/worker-loader-utils/parse-with-worker.js +0 -81
- package/dist/loader-types.js +0 -36
- package/dist/types.js +0 -3
- package/dist/workers/json-worker.js +0 -5
- package/dist/writer-types.js +0 -3
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.FileHandleFile = void 0;
|
|
4
|
-
const file_handle_1 = require("./file-handle");
|
|
5
|
-
const file_aliases_1 = require("../path-utils/file-aliases");
|
|
6
|
-
/**
|
|
7
|
-
* Provides file data using node fs library
|
|
8
|
-
*/
|
|
9
|
-
class FileHandleFile {
|
|
10
|
-
/**
|
|
11
|
-
* Returns a new copy of FileHandleFile
|
|
12
|
-
* @param path The path to the file in file system
|
|
13
|
-
*/
|
|
14
|
-
static async from(path) {
|
|
15
|
-
path = (0, file_aliases_1.resolvePath)(path);
|
|
16
|
-
const fileDescriptor = await file_handle_1.FileHandle.open(path);
|
|
17
|
-
return new FileHandleFile(fileDescriptor, fileDescriptor.stat.size);
|
|
18
|
-
}
|
|
19
|
-
constructor(fileDescriptor, size) {
|
|
20
|
-
this.fileDescriptor = fileDescriptor;
|
|
21
|
-
this.size = size;
|
|
22
|
-
}
|
|
23
|
-
/** Close file */
|
|
24
|
-
async destroy() {
|
|
25
|
-
await this.fileDescriptor.close();
|
|
26
|
-
}
|
|
27
|
-
/**
|
|
28
|
-
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
|
|
29
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
30
|
-
*/
|
|
31
|
-
async getUint8(offset) {
|
|
32
|
-
const val = new Uint8Array((await this.fileDescriptor.read(Buffer.alloc(1), 0, 1, offset)).buffer.buffer).at(0);
|
|
33
|
-
if (val === undefined) {
|
|
34
|
-
throw new Error('something went wrong');
|
|
35
|
-
}
|
|
36
|
-
return val;
|
|
37
|
-
}
|
|
38
|
-
/**
|
|
39
|
-
* Gets an unsigned 16-bit integer at the specified byte offset from the start of the file.
|
|
40
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
41
|
-
*/
|
|
42
|
-
async getUint16(offset) {
|
|
43
|
-
const val = new Uint16Array((await this.fileDescriptor.read(Buffer.alloc(2), 0, 2, offset)).buffer.buffer).at(0);
|
|
44
|
-
if (val === undefined) {
|
|
45
|
-
throw new Error('something went wrong');
|
|
46
|
-
}
|
|
47
|
-
return val;
|
|
48
|
-
}
|
|
49
|
-
/**
|
|
50
|
-
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
51
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
52
|
-
*/
|
|
53
|
-
async getUint32(offset) {
|
|
54
|
-
const val = new Uint32Array((await this.fileDescriptor.read(Buffer.alloc(4), 0, 4, offset)).buffer.buffer).at(0);
|
|
55
|
-
if (val === undefined) {
|
|
56
|
-
throw new Error('something went wrong');
|
|
57
|
-
}
|
|
58
|
-
return val;
|
|
59
|
-
}
|
|
60
|
-
/**
|
|
61
|
-
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
62
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
63
|
-
*/
|
|
64
|
-
async getBigUint64(offset) {
|
|
65
|
-
const val = new BigInt64Array((await this.fileDescriptor.read(Buffer.alloc(8), 0, 8, offset)).buffer.buffer).at(0);
|
|
66
|
-
if (val === undefined) {
|
|
67
|
-
throw new Error('something went wrong');
|
|
68
|
-
}
|
|
69
|
-
return val;
|
|
70
|
-
}
|
|
71
|
-
/**
|
|
72
|
-
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
|
|
73
|
-
* @param startOffsset The offset, in byte, from the start of the file where to start reading the data.
|
|
74
|
-
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
|
|
75
|
-
*/
|
|
76
|
-
async slice(startOffsset, endOffset) {
|
|
77
|
-
const bigLength = endOffset - startOffsset;
|
|
78
|
-
if (bigLength > Number.MAX_SAFE_INTEGER) {
|
|
79
|
-
throw new Error('too big slice');
|
|
80
|
-
}
|
|
81
|
-
const length = Number(bigLength);
|
|
82
|
-
return (await this.fileDescriptor.read(Buffer.alloc(length), 0, length, startOffsset)).buffer
|
|
83
|
-
.buffer;
|
|
84
|
-
}
|
|
85
|
-
/**
|
|
86
|
-
* the length (in bytes) of the data.
|
|
87
|
-
*/
|
|
88
|
-
get length() {
|
|
89
|
-
return this.size;
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
exports.FileHandleFile = FileHandleFile;
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.FileHandle = void 0;
|
|
4
|
-
const fs_1 = require("fs");
|
|
5
|
-
/** Object handling file info */
|
|
6
|
-
class FileHandle {
|
|
7
|
-
constructor(fileDescriptor, stats) {
|
|
8
|
-
/**
|
|
9
|
-
* Reads data from the file and stores that in the given buffer.
|
|
10
|
-
*
|
|
11
|
-
* If the file is not modified concurrently, the end-of-file is reached when the
|
|
12
|
-
* number of bytes read is zero.
|
|
13
|
-
* @param buffer A buffer that will be filled with the file data read.
|
|
14
|
-
* @param offset The location in the buffer at which to start filling.
|
|
15
|
-
* @param length The number of bytes to read.
|
|
16
|
-
* @param position The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an
|
|
17
|
-
* integer, the current file position will remain unchanged.
|
|
18
|
-
* @return Fulfills upon success with a FileReadResult object
|
|
19
|
-
*/
|
|
20
|
-
this.read = (buffer, offset, length, position) => {
|
|
21
|
-
return new Promise((s) => {
|
|
22
|
-
(0, fs_1.read)(this.fileDescriptor, buffer, offset, length, position, (_err, bytesRead, buffer) => s({ bytesRead, buffer }));
|
|
23
|
-
});
|
|
24
|
-
};
|
|
25
|
-
this.fileDescriptor = fileDescriptor;
|
|
26
|
-
this.stats = stats;
|
|
27
|
-
}
|
|
28
|
-
/**
|
|
29
|
-
* Opens a `FileHandle`.
|
|
30
|
-
*
|
|
31
|
-
* @param path path to the file
|
|
32
|
-
* @return Fulfills with a {FileHandle} object.
|
|
33
|
-
*/
|
|
34
|
-
static async open(path) {
|
|
35
|
-
const [fd, stats] = await Promise.all([
|
|
36
|
-
new Promise((resolve, reject) => {
|
|
37
|
-
(0, fs_1.open)(path, undefined, undefined, (_err, fd) => (_err ? reject(_err) : resolve(fd)));
|
|
38
|
-
}),
|
|
39
|
-
new Promise((resolve, reject) => {
|
|
40
|
-
(0, fs_1.stat)(path, { bigint: true }, (_err, stats) => (_err ? reject(_err) : resolve(stats)));
|
|
41
|
-
})
|
|
42
|
-
]);
|
|
43
|
-
return new FileHandle(fd, stats);
|
|
44
|
-
}
|
|
45
|
-
/** Close file */
|
|
46
|
-
async close() {
|
|
47
|
-
return new Promise((resolve) => {
|
|
48
|
-
(0, fs_1.close)(this.fileDescriptor, (_err) => resolve());
|
|
49
|
-
});
|
|
50
|
-
}
|
|
51
|
-
get stat() {
|
|
52
|
-
return this.stats;
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
exports.FileHandle = FileHandle;
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.isFileProvider = void 0;
|
|
4
|
-
/**
|
|
5
|
-
* Check is the object has FileProvider members
|
|
6
|
-
* @param fileProvider - tested object
|
|
7
|
-
*/
|
|
8
|
-
const isFileProvider = (fileProvider) => {
|
|
9
|
-
return (fileProvider?.getUint8 &&
|
|
10
|
-
fileProvider?.slice &&
|
|
11
|
-
fileProvider?.length);
|
|
12
|
-
};
|
|
13
|
-
exports.isFileProvider = isFileProvider;
|
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
/**
|
|
5
|
-
* A FileSystem interface can encapsulate a FileList, a ZipFile, a GoogleDrive etc.
|
|
6
|
-
*
|
|
7
|
-
export interface IFileSystem {
|
|
8
|
-
/**
|
|
9
|
-
* Return a list of file names
|
|
10
|
-
* @param dirname directory name. file system root directory if omitted
|
|
11
|
-
*
|
|
12
|
-
readdir(dirname?: string, options?: {recursive?: boolean}): Promise<string[]>;
|
|
13
|
-
|
|
14
|
-
/**
|
|
15
|
-
* Gets information from a local file from the filesystem
|
|
16
|
-
* @param filename file name to stat
|
|
17
|
-
* @param options currently unused
|
|
18
|
-
* @throws if filename is not in local filesystem
|
|
19
|
-
*
|
|
20
|
-
stat(filename: string, options?: object): Promise<{size: number}>;
|
|
21
|
-
|
|
22
|
-
/**
|
|
23
|
-
* Fetches a local file from the filesystem (or a URL)
|
|
24
|
-
* @param filename
|
|
25
|
-
* @param options
|
|
26
|
-
*
|
|
27
|
-
fetch(filename: string, options?: object): Promise<Response>;
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
type ReadOptions = {buffer?: ArrayBuffer; offset?: number; length?: number; position?: number};
|
|
31
|
-
export interface IRandomAccessReadFileSystem extends IFileSystem {
|
|
32
|
-
open(path: string, flags: string | number, mode?: any): Promise<any>;
|
|
33
|
-
close(fd: any): Promise<void>;
|
|
34
|
-
fstat(fd: any): Promise<object>;
|
|
35
|
-
read(fd: any, options?: ReadOptions): Promise<{bytesRead: number; buffer: Buffer}>;
|
|
36
|
-
}
|
|
37
|
-
*/
|
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
-
exports.NodeFileSystem = void 0;
|
|
27
|
-
const fs = __importStar(require("../node/fs"));
|
|
28
|
-
/**
|
|
29
|
-
* FileSystem pass-through for Node.js
|
|
30
|
-
* Compatible with BrowserFileSystem.
|
|
31
|
-
* @param options
|
|
32
|
-
*/
|
|
33
|
-
class NodeFileSystem {
|
|
34
|
-
// implements FileSystem
|
|
35
|
-
constructor(options) {
|
|
36
|
-
this.fetch = options._fetch;
|
|
37
|
-
}
|
|
38
|
-
async readdir(dirname = '.', options) {
|
|
39
|
-
return await fs.readdir(dirname, options);
|
|
40
|
-
}
|
|
41
|
-
async stat(path, options) {
|
|
42
|
-
const info = await fs.stat(path, options);
|
|
43
|
-
return { size: Number(info.size), isDirectory: () => false, info };
|
|
44
|
-
}
|
|
45
|
-
async fetch(path, options) {
|
|
46
|
-
// Falls back to handle https:/http:/data: etc fetches
|
|
47
|
-
// eslint-disable-next-line
|
|
48
|
-
const fallbackFetch = options.fetch || this.fetch;
|
|
49
|
-
return fallbackFetch(path, options);
|
|
50
|
-
}
|
|
51
|
-
// implements IRandomAccessFileSystem
|
|
52
|
-
async open(path, flags, mode) {
|
|
53
|
-
return await fs.open(path, flags);
|
|
54
|
-
}
|
|
55
|
-
async close(fd) {
|
|
56
|
-
return await fs.close(fd);
|
|
57
|
-
}
|
|
58
|
-
async fstat(fd) {
|
|
59
|
-
const info = await fs.fstat(fd);
|
|
60
|
-
return info;
|
|
61
|
-
}
|
|
62
|
-
async read(fd,
|
|
63
|
-
// @ts-ignore Possibly null
|
|
64
|
-
{ buffer = null, offset = 0, length = buffer.byteLength, position = null }) {
|
|
65
|
-
let totalBytesRead = 0;
|
|
66
|
-
// Read in loop until we get required number of bytes
|
|
67
|
-
while (totalBytesRead < length) {
|
|
68
|
-
const { bytesRead } = await fs.read(fd, buffer, offset + totalBytesRead, length - totalBytesRead, position + totalBytesRead);
|
|
69
|
-
totalBytesRead += bytesRead;
|
|
70
|
-
}
|
|
71
|
-
return { bytesRead: totalBytesRead, buffer };
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
exports.NodeFileSystem = NodeFileSystem;
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.makeReadableFile = void 0;
|
|
5
|
-
/** Helper function to create an envelope reader for a binary memory input */
|
|
6
|
-
function makeReadableFile(data) {
|
|
7
|
-
if (data instanceof ArrayBuffer) {
|
|
8
|
-
const arrayBuffer = data;
|
|
9
|
-
return {
|
|
10
|
-
read: async (start, length) => Buffer.from(data, start, length),
|
|
11
|
-
close: async () => { },
|
|
12
|
-
size: arrayBuffer.byteLength
|
|
13
|
-
};
|
|
14
|
-
}
|
|
15
|
-
const blob = data;
|
|
16
|
-
return {
|
|
17
|
-
read: async (start, length) => {
|
|
18
|
-
const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();
|
|
19
|
-
return Buffer.from(arrayBuffer);
|
|
20
|
-
},
|
|
21
|
-
close: async () => { },
|
|
22
|
-
size: blob.size
|
|
23
|
-
};
|
|
24
|
-
}
|
|
25
|
-
exports.makeReadableFile = makeReadableFile;
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
-
exports.makeWritableFile = void 0;
|
|
27
|
-
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
28
|
-
const globals_1 = require("../env-utils/globals");
|
|
29
|
-
const fs = __importStar(require("../node/fs"));
|
|
30
|
-
/** Helper function to create an envelope reader for a binary memory input */
|
|
31
|
-
function makeWritableFile(pathOrStream, options) {
|
|
32
|
-
if (globals_1.isBrowser) {
|
|
33
|
-
return {
|
|
34
|
-
write: async () => { },
|
|
35
|
-
close: async () => { }
|
|
36
|
-
};
|
|
37
|
-
}
|
|
38
|
-
const outputStream = typeof pathOrStream === 'string' ? fs.createWriteStream(pathOrStream, options) : pathOrStream;
|
|
39
|
-
return {
|
|
40
|
-
write: async (buffer) => new Promise((resolve, reject) => {
|
|
41
|
-
outputStream.write(buffer, (err) => (err ? reject(err) : resolve()));
|
|
42
|
-
}),
|
|
43
|
-
close: () => new Promise((resolve, reject) => {
|
|
44
|
-
outputStream.close((err) => (err ? reject(err) : resolve()));
|
|
45
|
-
})
|
|
46
|
-
};
|
|
47
|
-
}
|
|
48
|
-
exports.makeWritableFile = makeWritableFile;
|
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.concatenateStringsAsync = exports.concatenateArrayBuffersAsync = exports.forEach = void 0;
|
|
4
|
-
const array_buffer_utils_1 = require("../binary-utils/array-buffer-utils");
|
|
5
|
-
// GENERAL UTILITIES
|
|
6
|
-
/**
|
|
7
|
-
* Iterate over async iterator, without resetting iterator if end is not reached
|
|
8
|
-
* - forEach intentionally does not reset iterator if exiting loop prematurely
|
|
9
|
-
* so that iteration can continue in a second loop
|
|
10
|
-
* - It is recommended to use a standard for-await as last loop to ensure
|
|
11
|
-
* iterator gets properly reset
|
|
12
|
-
*
|
|
13
|
-
* TODO - optimize using sync iteration if argument is an Iterable?
|
|
14
|
-
*
|
|
15
|
-
* @param iterator
|
|
16
|
-
* @param visitor
|
|
17
|
-
*/
|
|
18
|
-
async function forEach(iterator, visitor) {
|
|
19
|
-
// eslint-disable-next-line
|
|
20
|
-
while (true) {
|
|
21
|
-
const { done, value } = await iterator.next();
|
|
22
|
-
if (done) {
|
|
23
|
-
iterator.return();
|
|
24
|
-
return;
|
|
25
|
-
}
|
|
26
|
-
const cancel = visitor(value);
|
|
27
|
-
if (cancel) {
|
|
28
|
-
return;
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
exports.forEach = forEach;
|
|
33
|
-
// Breaking big data into iterable chunks, concatenating iterable chunks into big data objects
|
|
34
|
-
/**
|
|
35
|
-
* Concatenates all data chunks yielded by an (async) iterator
|
|
36
|
-
* This function can e.g. be used to enable atomic parsers to work on (async) iterator inputs
|
|
37
|
-
*/
|
|
38
|
-
async function concatenateArrayBuffersAsync(asyncIterator) {
|
|
39
|
-
const arrayBuffers = [];
|
|
40
|
-
for await (const chunk of asyncIterator) {
|
|
41
|
-
arrayBuffers.push(chunk);
|
|
42
|
-
}
|
|
43
|
-
return (0, array_buffer_utils_1.concatenateArrayBuffers)(...arrayBuffers);
|
|
44
|
-
}
|
|
45
|
-
exports.concatenateArrayBuffersAsync = concatenateArrayBuffersAsync;
|
|
46
|
-
async function concatenateStringsAsync(asyncIterator) {
|
|
47
|
-
const strings = [];
|
|
48
|
-
for await (const chunk of asyncIterator) {
|
|
49
|
-
strings.push(chunk);
|
|
50
|
-
}
|
|
51
|
-
return strings.join('');
|
|
52
|
-
}
|
|
53
|
-
exports.concatenateStringsAsync = concatenateStringsAsync;
|
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// TextDecoder iterators
|
|
3
|
-
// TextDecoder will keep any partial undecoded bytes between calls to `decode`
|
|
4
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
-
exports.makeNumberedLineIterator = exports.makeLineIterator = exports.makeTextEncoderIterator = exports.makeTextDecoderIterator = void 0;
|
|
6
|
-
async function* makeTextDecoderIterator(arrayBufferIterator, options = {}) {
|
|
7
|
-
const textDecoder = new TextDecoder(undefined, options);
|
|
8
|
-
for await (const arrayBuffer of arrayBufferIterator) {
|
|
9
|
-
yield typeof arrayBuffer === 'string'
|
|
10
|
-
? arrayBuffer
|
|
11
|
-
: textDecoder.decode(arrayBuffer, { stream: true });
|
|
12
|
-
}
|
|
13
|
-
}
|
|
14
|
-
exports.makeTextDecoderIterator = makeTextDecoderIterator;
|
|
15
|
-
// TextEncoder iterator
|
|
16
|
-
// TODO - this is not useful unless min chunk size is given
|
|
17
|
-
// TextEncoder will keep any partial undecoded bytes between calls to `encode`
|
|
18
|
-
// If iterator does not yield strings, assume arrayBuffer and return unencoded
|
|
19
|
-
async function* makeTextEncoderIterator(textIterator) {
|
|
20
|
-
const textEncoder = new TextEncoder();
|
|
21
|
-
for await (const text of textIterator) {
|
|
22
|
-
yield typeof text === 'string' ? textEncoder.encode(text) : text;
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
exports.makeTextEncoderIterator = makeTextEncoderIterator;
|
|
26
|
-
/**
|
|
27
|
-
* @param textIterator async iterable yielding strings
|
|
28
|
-
* @returns an async iterable over lines
|
|
29
|
-
* See http://2ality.com/2018/04/async-iter-nodejs.html
|
|
30
|
-
*/
|
|
31
|
-
async function* makeLineIterator(textIterator) {
|
|
32
|
-
let previous = '';
|
|
33
|
-
for await (const textChunk of textIterator) {
|
|
34
|
-
previous += textChunk;
|
|
35
|
-
let eolIndex;
|
|
36
|
-
while ((eolIndex = previous.indexOf('\n')) >= 0) {
|
|
37
|
-
// line includes the EOL
|
|
38
|
-
const line = previous.slice(0, eolIndex + 1);
|
|
39
|
-
previous = previous.slice(eolIndex + 1);
|
|
40
|
-
yield line;
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
if (previous.length > 0) {
|
|
44
|
-
yield previous;
|
|
45
|
-
}
|
|
46
|
-
}
|
|
47
|
-
exports.makeLineIterator = makeLineIterator;
|
|
48
|
-
/**
|
|
49
|
-
* @param lineIterator async iterable yielding lines
|
|
50
|
-
* @returns async iterable yielding numbered lines
|
|
51
|
-
*
|
|
52
|
-
* See http://2ality.com/2018/04/async-iter-nodejs.html
|
|
53
|
-
*/
|
|
54
|
-
async function* makeNumberedLineIterator(lineIterator) {
|
|
55
|
-
let counter = 1;
|
|
56
|
-
for await (const line of lineIterator) {
|
|
57
|
-
yield { counter, line };
|
|
58
|
-
counter++;
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
exports.makeNumberedLineIterator = makeNumberedLineIterator;
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.toBuffer = exports.toArrayBuffer = void 0;
|
|
5
|
-
// Isolates Buffer references to ensure they are only bundled under Node.js (avoids big webpack polyfill)
|
|
6
|
-
// this file is selected by the package.json "browser" field).
|
|
7
|
-
/**
|
|
8
|
-
* Convert Buffer to ArrayBuffer
|
|
9
|
-
* Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)
|
|
10
|
-
* @todo better data type
|
|
11
|
-
*/
|
|
12
|
-
function toArrayBuffer(buffer) {
|
|
13
|
-
return buffer;
|
|
14
|
-
}
|
|
15
|
-
exports.toArrayBuffer = toArrayBuffer;
|
|
16
|
-
/**
|
|
17
|
-
* Convert (copy) ArrayBuffer to Buffer
|
|
18
|
-
*/
|
|
19
|
-
function toBuffer(binaryData) {
|
|
20
|
-
throw new Error('Buffer not supported in browser');
|
|
21
|
-
}
|
|
22
|
-
exports.toBuffer = toBuffer;
|
package/dist/lib/node/buffer.js
DELETED
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.toBuffer = exports.toArrayBuffer = void 0;
|
|
5
|
-
// Isolates Buffer references to ensure they are only bundled under Node.js (avoids big webpack polyfill)
|
|
6
|
-
// this file is selected by the package.json "browser" field).
|
|
7
|
-
/**
|
|
8
|
-
* Convert Buffer to ArrayBuffer
|
|
9
|
-
* Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)
|
|
10
|
-
* @todo better data type
|
|
11
|
-
*/
|
|
12
|
-
function toArrayBuffer(buffer) {
|
|
13
|
-
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
14
|
-
if (Buffer.isBuffer(buffer)) {
|
|
15
|
-
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
16
|
-
return typedArray.slice().buffer;
|
|
17
|
-
}
|
|
18
|
-
return buffer;
|
|
19
|
-
}
|
|
20
|
-
exports.toArrayBuffer = toArrayBuffer;
|
|
21
|
-
/**
|
|
22
|
-
* Convert (copy) ArrayBuffer to Buffer
|
|
23
|
-
*/
|
|
24
|
-
function toBuffer(binaryData) {
|
|
25
|
-
if (Buffer.isBuffer(binaryData)) {
|
|
26
|
-
return binaryData;
|
|
27
|
-
}
|
|
28
|
-
if (ArrayBuffer.isView(binaryData)) {
|
|
29
|
-
binaryData = binaryData.buffer;
|
|
30
|
-
}
|
|
31
|
-
if (typeof Buffer !== 'undefined' && binaryData instanceof ArrayBuffer) {
|
|
32
|
-
return Buffer.from(binaryData);
|
|
33
|
-
}
|
|
34
|
-
throw new Error('toBuffer');
|
|
35
|
-
}
|
|
36
|
-
exports.toBuffer = toBuffer;
|
package/dist/lib/node/fs.js
DELETED
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports._readToArrayBuffer = exports.isSupported = exports.createWriteStream = exports.createReadStream = exports.fstat = exports.read = exports.close = exports.open = exports.writeFileSync = exports.writeFile = exports.readFileSync = exports.readFile = exports.statSync = exports.stat = exports.readdir = void 0;
|
|
7
|
-
// fs wrapper (promisified fs + avoids bundling fs in browsers)
|
|
8
|
-
const fs_1 = __importDefault(require("fs"));
|
|
9
|
-
const buffer_1 = require("./buffer");
|
|
10
|
-
const promisify_1 = require("./promisify");
|
|
11
|
-
exports.isSupported = Boolean(fs_1.default);
|
|
12
|
-
// paths
|
|
13
|
-
try {
|
|
14
|
-
/** Wrapper for Node.js fs method */
|
|
15
|
-
exports.readdir = (0, promisify_1.promisify2)(fs_1.default.readdir);
|
|
16
|
-
/** Wrapper for Node.js fs method */
|
|
17
|
-
exports.stat = (0, promisify_1.promisify2)(fs_1.default.stat);
|
|
18
|
-
exports.statSync = fs_1.default.statSync;
|
|
19
|
-
/** Wrapper for Node.js fs method */
|
|
20
|
-
exports.readFile = fs_1.default.readFile;
|
|
21
|
-
/** Wrapper for Node.js fs method */
|
|
22
|
-
exports.readFileSync = fs_1.default.readFileSync;
|
|
23
|
-
/** Wrapper for Node.js fs method */
|
|
24
|
-
exports.writeFile = (0, promisify_1.promisify3)(fs_1.default.writeFile);
|
|
25
|
-
/** Wrapper for Node.js fs method */
|
|
26
|
-
exports.writeFileSync = fs_1.default.writeFileSync;
|
|
27
|
-
// file descriptors
|
|
28
|
-
/** Wrapper for Node.js fs method */
|
|
29
|
-
exports.open = fs_1.default.open;
|
|
30
|
-
/** Wrapper for Node.js fs method */
|
|
31
|
-
exports.close = (fd) => new Promise((resolve, reject) => fs_1.default.close(fd, (err) => (err ? reject(err) : resolve())));
|
|
32
|
-
/** Wrapper for Node.js fs method */
|
|
33
|
-
exports.read = fs_1.default.read;
|
|
34
|
-
/** Wrapper for Node.js fs method */
|
|
35
|
-
exports.fstat = fs_1.default.fstat;
|
|
36
|
-
exports.createReadStream = fs_1.default.createReadStream;
|
|
37
|
-
exports.createWriteStream = fs_1.default.createWriteStream;
|
|
38
|
-
exports.isSupported = Boolean(fs_1.default);
|
|
39
|
-
}
|
|
40
|
-
catch {
|
|
41
|
-
// ignore
|
|
42
|
-
}
|
|
43
|
-
async function _readToArrayBuffer(fd, start, length) {
|
|
44
|
-
const buffer = Buffer.alloc(length);
|
|
45
|
-
const { bytesRead } = await (0, exports.read)(fd, buffer, 0, length, start);
|
|
46
|
-
if (bytesRead !== length) {
|
|
47
|
-
throw new Error('fs.read failed');
|
|
48
|
-
}
|
|
49
|
-
return (0, buffer_1.toArrayBuffer)(buffer);
|
|
50
|
-
}
|
|
51
|
-
exports._readToArrayBuffer = _readToArrayBuffer;
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// @loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.promisify3 = exports.promisify2 = exports.promisify1 = void 0;
|
|
5
|
-
/**
|
|
6
|
-
* Typesafe promisify implementation
|
|
7
|
-
* @link https://dev.to/_gdelgado/implement-a-type-safe-version-of-node-s-promisify-in-7-lines-of-code-in-typescript-2j34
|
|
8
|
-
* @param fn
|
|
9
|
-
* @returns
|
|
10
|
-
*/
|
|
11
|
-
function promisify1(fn) {
|
|
12
|
-
return (args) => new Promise((resolve, reject) => fn(args, (error, callbackArgs) => (error ? reject(error) : resolve(callbackArgs))));
|
|
13
|
-
}
|
|
14
|
-
exports.promisify1 = promisify1;
|
|
15
|
-
function promisify2(fn) {
|
|
16
|
-
return (arg1, arg2) => new Promise((resolve, reject) => fn(arg1, arg2, (error, callbackArgs) => (error ? reject(error) : resolve(callbackArgs))));
|
|
17
|
-
}
|
|
18
|
-
exports.promisify2 = promisify2;
|
|
19
|
-
function promisify3(fn) {
|
|
20
|
-
return (arg1, arg2, arg3) => new Promise((resolve, reject) => fn(arg1, arg2, arg3, (error, callbackArgs) => (error ? reject(error) : resolve(callbackArgs))));
|
|
21
|
-
}
|
|
22
|
-
exports.promisify3 = promisify3;
|
package/dist/lib/node/stream.js
DELETED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
-
};
|
|
6
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
-
exports.isSupported = exports.Transform = void 0;
|
|
8
|
-
const stream_1 = __importDefault(require("stream"));
|
|
9
|
-
exports.isSupported = Boolean(stream_1.default);
|
|
10
|
-
// paths
|
|
11
|
-
try {
|
|
12
|
-
/** Wrapper for Node.js fs method */
|
|
13
|
-
exports.Transform = stream_1.default.Transform;
|
|
14
|
-
}
|
|
15
|
-
catch {
|
|
16
|
-
// ignore
|
|
17
|
-
}
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.mergeLoaderOptions = void 0;
|
|
5
|
-
/**
|
|
6
|
-
*
|
|
7
|
-
* @param baseOptions Can be undefined, in which case a fresh options object will be minted
|
|
8
|
-
* @param newOptions
|
|
9
|
-
* @returns
|
|
10
|
-
*/
|
|
11
|
-
function mergeLoaderOptions(baseOptions, newOptions) {
|
|
12
|
-
return mergeOptionsRecursively(baseOptions || {}, newOptions);
|
|
13
|
-
}
|
|
14
|
-
exports.mergeLoaderOptions = mergeLoaderOptions;
|
|
15
|
-
function mergeOptionsRecursively(baseOptions, newOptions) {
|
|
16
|
-
const options = { ...baseOptions };
|
|
17
|
-
for (const [key, newValue] of Object.entries(newOptions)) {
|
|
18
|
-
if (newValue && typeof newValue === 'object') {
|
|
19
|
-
options[key] = mergeOptionsRecursively(options[key] || {}, newOptions[key]);
|
|
20
|
-
// Object.assign(options[key] as object, newOptions[key]);
|
|
21
|
-
}
|
|
22
|
-
else {
|
|
23
|
-
options[key] = newOptions[key];
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
return options;
|
|
27
|
-
}
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.parseJSON = void 0;
|
|
4
|
-
const get_first_characters_1 = require("../binary-utils/get-first-characters");
|
|
5
|
-
/**
|
|
6
|
-
* Minimal JSON parser that throws more meaningful error messages
|
|
7
|
-
*/
|
|
8
|
-
function parseJSON(string) {
|
|
9
|
-
try {
|
|
10
|
-
return JSON.parse(string);
|
|
11
|
-
}
|
|
12
|
-
catch (_) {
|
|
13
|
-
throw new Error(`Failed to parse JSON from data starting with "${(0, get_first_characters_1.getFirstCharacters)(string)}"`);
|
|
14
|
-
}
|
|
15
|
-
}
|
|
16
|
-
exports.parseJSON = parseJSON;
|