@loaders.gl/loader-utils 3.1.0-alpha.1 → 3.1.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es5/index.js +323 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/json-loader.js +29 -0
- package/dist/es5/json-loader.js.map +1 -0
- package/dist/es5/lib/binary-utils/array-buffer-utils.js +102 -0
- package/dist/es5/lib/binary-utils/array-buffer-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/binary-copy-utils.js +35 -0
- package/dist/es5/lib/binary-utils/binary-copy-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/buffer-utils.js +32 -0
- package/dist/es5/lib/binary-utils/buffer-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/encode-utils.js +42 -0
- package/dist/es5/lib/binary-utils/encode-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/get-first-characters.js +36 -0
- package/dist/es5/lib/binary-utils/get-first-characters.js.map +1 -0
- package/dist/es5/lib/binary-utils/memory-copy-utils.js +39 -0
- package/dist/es5/lib/binary-utils/memory-copy-utils.js.map +1 -0
- package/dist/es5/lib/env-utils/assert.js +13 -0
- package/dist/es5/lib/env-utils/assert.js.map +1 -0
- package/dist/es5/lib/env-utils/globals.js +28 -0
- package/dist/es5/lib/env-utils/globals.js.map +1 -0
- package/dist/es5/lib/filesystems/node-filesystem.js +74 -0
- package/dist/es5/lib/filesystems/node-filesystem.js.map +1 -0
- package/dist/es5/lib/iterators/async-iteration.js +51 -0
- package/dist/es5/lib/iterators/async-iteration.js.map +1 -0
- package/dist/es5/lib/iterators/text-iterators.js +59 -0
- package/dist/es5/lib/iterators/text-iterators.js.map +1 -0
- package/dist/es5/lib/node/buffer.js +38 -0
- package/dist/es5/lib/node/buffer.js.map +1 -0
- package/dist/es5/lib/node/fs.js +52 -0
- package/dist/es5/lib/node/fs.js.map +1 -0
- package/dist/es5/lib/node/util.js +16 -0
- package/dist/es5/lib/node/util.js.map +1 -0
- package/dist/es5/lib/parser-utils/parse-json.js +17 -0
- package/dist/es5/lib/parser-utils/parse-json.js.map +1 -0
- package/dist/es5/lib/path-utils/file-aliases.js +39 -0
- package/dist/es5/lib/path-utils/file-aliases.js.map +1 -0
- package/dist/es5/lib/path-utils/path.js +35 -0
- package/dist/es5/lib/path-utils/path.js.map +1 -0
- package/dist/es5/lib/request-utils/request-scheduler.js +154 -0
- package/dist/es5/lib/request-utils/request-scheduler.js.map +1 -0
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js +117 -0
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js.map +1 -0
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js +76 -0
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js.map +1 -0
- package/dist/es5/types.js +2 -0
- package/dist/{types.js.map → es5/types.js.map} +0 -0
- package/dist/es5/workers/json-worker.js +8 -0
- package/dist/es5/workers/json-worker.js.map +1 -0
- package/dist/{index.js → esm/index.js} +8 -4
- package/dist/esm/index.js.map +1 -0
- package/dist/{json-loader.js → esm/json-loader.js} +1 -1
- package/dist/esm/json-loader.js.map +1 -0
- package/dist/{lib → esm/lib}/binary-utils/array-buffer-utils.js +0 -0
- package/dist/esm/lib/binary-utils/array-buffer-utils.js.map +1 -0
- package/dist/{lib → esm/lib}/binary-utils/binary-copy-utils.js +0 -0
- package/dist/esm/lib/binary-utils/binary-copy-utils.js.map +1 -0
- package/dist/{lib → esm/lib}/binary-utils/buffer-utils.js +1 -1
- package/dist/esm/lib/binary-utils/buffer-utils.js.map +1 -0
- package/dist/{lib → esm/lib}/binary-utils/encode-utils.js +0 -0
- package/dist/esm/lib/binary-utils/encode-utils.js.map +1 -0
- package/dist/{lib → esm/lib}/binary-utils/get-first-characters.js +0 -0
- package/dist/esm/lib/binary-utils/get-first-characters.js.map +1 -0
- package/dist/{lib → esm/lib}/binary-utils/memory-copy-utils.js +0 -0
- package/dist/esm/lib/binary-utils/memory-copy-utils.js.map +1 -0
- package/dist/{lib → esm/lib}/env-utils/assert.js +0 -0
- package/dist/esm/lib/env-utils/assert.js.map +1 -0
- package/dist/{lib → esm/lib}/env-utils/globals.js +1 -1
- package/dist/esm/lib/env-utils/globals.js.map +1 -0
- package/dist/esm/lib/filesystems/node-filesystem.js +60 -0
- package/dist/esm/lib/filesystems/node-filesystem.js.map +1 -0
- package/dist/{lib → esm/lib}/iterators/async-iteration.js +0 -0
- package/dist/esm/lib/iterators/async-iteration.js.map +1 -0
- package/dist/{lib → esm/lib}/iterators/text-iterators.js +0 -0
- package/dist/esm/lib/iterators/text-iterators.js.map +1 -0
- package/dist/{lib/node/buffer-utils.node.js → esm/lib/node/buffer.js} +1 -1
- package/dist/esm/lib/node/buffer.js.map +1 -0
- package/dist/esm/lib/node/fs.js +27 -0
- package/dist/esm/lib/node/fs.js.map +1 -0
- package/dist/esm/lib/node/util.js +3 -0
- package/dist/esm/lib/node/util.js.map +1 -0
- package/dist/{lib → esm/lib}/parser-utils/parse-json.js +1 -1
- package/dist/esm/lib/parser-utils/parse-json.js.map +1 -0
- package/dist/{lib → esm/lib}/path-utils/file-aliases.js +1 -1
- package/dist/esm/lib/path-utils/file-aliases.js.map +1 -0
- package/dist/{lib → esm/lib}/path-utils/path.js +6 -2
- package/dist/esm/lib/path-utils/path.js.map +1 -0
- package/dist/{lib → esm/lib}/request-utils/request-scheduler.js +0 -0
- package/dist/esm/lib/request-utils/request-scheduler.js.map +1 -0
- package/dist/{lib → esm/lib}/worker-loader-utils/create-loader-worker.js +1 -1
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js.map +1 -0
- package/dist/{lib → esm/lib}/worker-loader-utils/parse-with-worker.js +1 -1
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js.map +1 -0
- package/dist/{types.js → esm/types.js} +0 -0
- package/dist/esm/types.js.map +1 -0
- package/dist/{workers → esm/workers}/json-worker.js +0 -0
- package/dist/esm/workers/json-worker.js.map +1 -0
- package/dist/index.d.ts +26 -0
- package/dist/json-loader.d.ts +21 -0
- package/dist/lib/binary-utils/array-buffer-utils.d.ts +30 -0
- package/dist/lib/binary-utils/binary-copy-utils.d.ts +23 -0
- package/dist/lib/binary-utils/buffer-utils.d.ts +15 -0
- package/dist/lib/binary-utils/encode-utils.d.ts +3 -0
- package/dist/lib/binary-utils/get-first-characters.d.ts +2 -0
- package/dist/lib/binary-utils/memory-copy-utils.d.ts +24 -0
- package/dist/lib/env-utils/assert.d.ts +5 -0
- package/dist/lib/env-utils/globals.d.ts +14 -0
- package/dist/lib/filesystems/node-filesystem.d.ts +37 -0
- package/dist/lib/iterators/async-iteration.d.ts +19 -0
- package/dist/lib/iterators/text-iterators.d.ts +18 -0
- package/dist/lib/node/buffer.d.ts +9 -0
- package/dist/lib/node/fs.d.ts +25 -0
- package/dist/lib/node/util.d.ts +4 -0
- package/dist/lib/parser-utils/parse-json.d.ts +4 -0
- package/dist/lib/path-utils/file-aliases.d.ts +16 -0
- package/dist/lib/path-utils/path.d.ts +15 -0
- package/dist/lib/request-utils/request-scheduler.d.ts +61 -0
- package/dist/lib/worker-loader-utils/create-loader-worker.d.ts +6 -0
- package/dist/lib/worker-loader-utils/parse-with-worker.d.ts +14 -0
- package/dist/types.d.ts +206 -0
- package/dist/workers/json-worker.d.ts +1 -0
- package/package.json +15 -10
- package/src/index.ts +26 -8
- package/src/lib/binary-utils/buffer-utils.ts +1 -1
- package/src/lib/env-utils/globals.ts +1 -1
- package/src/lib/filesystems/node-filesystem.ts +79 -0
- package/src/lib/node/{buffer-utils.node.ts → buffer.ts} +0 -0
- package/src/lib/node/fs.ts +29 -13
- package/src/lib/node/util.ts +4 -0
- package/src/lib/path-utils/path.ts +9 -0
- package/dist/index.js.map +0 -1
- package/dist/json-loader.js.map +0 -1
- package/dist/lib/binary-utils/array-buffer-utils.js.map +0 -1
- package/dist/lib/binary-utils/binary-copy-utils.js.map +0 -1
- package/dist/lib/binary-utils/buffer-utils.js.map +0 -1
- package/dist/lib/binary-utils/encode-utils.js.map +0 -1
- package/dist/lib/binary-utils/get-first-characters.js.map +0 -1
- package/dist/lib/binary-utils/memory-copy-utils.js.map +0 -1
- package/dist/lib/env-utils/assert.js.map +0 -1
- package/dist/lib/env-utils/globals.js.map +0 -1
- package/dist/lib/iterators/async-iteration.js.map +0 -1
- package/dist/lib/iterators/text-iterators.js.map +0 -1
- package/dist/lib/node/buffer-utils.node.js.map +0 -1
- package/dist/lib/node/fs.js +0 -29
- package/dist/lib/node/fs.js.map +0 -1
- package/dist/lib/parser-utils/parse-json.js.map +0 -1
- package/dist/lib/path-utils/file-aliases.js.map +0 -1
- package/dist/lib/path-utils/path.js.map +0 -1
- package/dist/lib/request-utils/request-scheduler.js.map +0 -1
- package/dist/lib/worker-loader-utils/create-loader-worker.js.map +0 -1
- package/dist/lib/worker-loader-utils/parse-with-worker.js.map +0 -1
- package/dist/workers/json-worker.js.map +0 -1
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import * as fs from '../node/fs';
|
|
2
|
+
export default class NodeFileSystem {
|
|
3
|
+
constructor(options) {
|
|
4
|
+
this.fetch = options._fetch;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
async readdir(dirname = '.', options) {
|
|
8
|
+
return await fs.readdir(dirname, options);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
async stat(path, options) {
|
|
12
|
+
const info = await fs.stat(path, options);
|
|
13
|
+
return {
|
|
14
|
+
size: Number(info.size),
|
|
15
|
+
isDirectory: () => false,
|
|
16
|
+
info
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async fetch(path, options) {
|
|
21
|
+
const fallbackFetch = options.fetch || this.fetch;
|
|
22
|
+
return fallbackFetch(path, options);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async open(path, flags, mode) {
|
|
26
|
+
return await fs.open(path, flags);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async close(fd) {
|
|
30
|
+
return await fs.close(fd);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async fstat(fd) {
|
|
34
|
+
const info = await fs.fstat(fd);
|
|
35
|
+
return info;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async read(fd, {
|
|
39
|
+
buffer = null,
|
|
40
|
+
offset = 0,
|
|
41
|
+
length = buffer.byteLength,
|
|
42
|
+
position = null
|
|
43
|
+
}) {
|
|
44
|
+
let totalBytesRead = 0;
|
|
45
|
+
|
|
46
|
+
while (totalBytesRead < length) {
|
|
47
|
+
const {
|
|
48
|
+
bytesRead
|
|
49
|
+
} = await fs.read(fd, buffer, offset + totalBytesRead, length - totalBytesRead, position + totalBytesRead);
|
|
50
|
+
totalBytesRead += bytesRead;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return {
|
|
54
|
+
bytesRead: totalBytesRead,
|
|
55
|
+
buffer
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
}
|
|
60
|
+
//# sourceMappingURL=node-filesystem.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/filesystems/node-filesystem.ts"],"names":["fs","NodeFileSystem","constructor","options","fetch","_fetch","readdir","dirname","stat","path","info","size","Number","isDirectory","fallbackFetch","open","flags","mode","close","fd","fstat","read","buffer","offset","length","byteLength","position","totalBytesRead","bytesRead"],"mappings":"AAAA,OAAO,KAAKA,EAAZ,MAAoB,YAApB;AAuBA,eAAe,MAAMC,cAAN,CAAyE;AAEtFC,EAAAA,WAAW,CAACC,OAAD,EAAgC;AACzC,SAAKC,KAAL,GAAaD,OAAO,CAACE,MAArB;AACD;;AAEY,QAAPC,OAAO,CAACC,OAAO,GAAG,GAAX,EAAgBJ,OAAhB,EAA8C;AACzD,WAAO,MAAMH,EAAE,CAACM,OAAH,CAAWC,OAAX,EAAoBJ,OAApB,CAAb;AACD;;AAES,QAAJK,IAAI,CAACC,IAAD,EAAeN,OAAf,EAA4C;AACpD,UAAMO,IAAI,GAAG,MAAMV,EAAE,CAACQ,IAAH,CAAQC,IAAR,EAAcN,OAAd,CAAnB;AACA,WAAO;AAACQ,MAAAA,IAAI,EAAEC,MAAM,CAACF,IAAI,CAACC,IAAN,CAAb;AAA0BE,MAAAA,WAAW,EAAE,MAAM,KAA7C;AAAoDH,MAAAA;AAApD,KAAP;AACD;;AAEU,QAALN,KAAK,CAACK,IAAD,EAAeN,OAAf,EAA8C;AAGvD,UAAMW,aAAa,GAAGX,OAAO,CAACC,KAAR,IAAiB,KAAKA,KAA5C;AACA,WAAOU,aAAa,CAACL,IAAD,EAAON,OAAP,CAApB;AACD;;AAGS,QAAJY,IAAI,CAACN,IAAD,EAAeO,KAAf,EAAuCC,IAAvC,EAAoE;AAC5E,WAAO,MAAMjB,EAAE,CAACe,IAAH,CAAQN,IAAR,EAAcO,KAAd,CAAb;AACD;;AAEU,QAALE,KAAK,CAACC,EAAD,EAA4B;AACrC,WAAO,MAAMnB,EAAE,CAACkB,KAAH,CAASC,EAAT,CAAb;AACD;;AAEU,QAALC,KAAK,CAACD,EAAD,EAA4B;AACrC,UAAMT,IAAI,GAAG,MAAMV,EAAE,CAACoB,KAAH,CAASD,EAAT,CAAnB;AACA,WAAOT,IAAP;AACD;;AAES,QAAJW,IAAI,CACRF,EADQ,EAGR;AAACG,IAAAA,MAAM,GAAG,IAAV;AAAgBC,IAAAA,MAAM,GAAG,CAAzB;AAA4BC,IAAAA,MAAM,GAAGF,MAAM,CAACG,UAA5C;AAAwDC,IAAAA,QAAQ,GAAG;AAAnE,GAHQ,EAIsC;AAC9C,QAAIC,cAAc,GAAG,CAArB;;AAEA,WAAOA,cAAc,GAAGH,MAAxB,EAAgC;AAC9B,YAAM;AAACI,QAAAA;AAAD,UAAc,MAAM5B,EAAE,CAACqB,IAAH,CACxBF,EADwB,EAExBG,MAFwB,EAGxBC,MAAM,GAAGI,cAHe,EAIxBH,MAAM,GAAGG,cAJe,EAKxBD,QAAQ,GAAGC,cALa,CAA1B;AAOAA,MAAAA,cAAc,IAAIC,SAAlB;AACD;;AACD,WAAO;AAACA,MAAAA,SAAS,EAAED,cAAZ;AAA4BL,MAAAA;AAA5B,KAAP;AACD;;AAtDqF","sourcesContent":["import * as fs from '../node/fs';\nimport {IFileSystem, IRandomAccessReadFileSystem} from '../../types';\n// import {fetchFile} from \"../fetch/fetch-file\"\n// import {selectLoader} from \"../api/select-loader\";\n\ntype Stat = {\n size: number;\n isDirectory: () => boolean;\n info?: fs.Stats;\n};\n\ntype ReadOptions = {\n buffer?: Buffer;\n offset?: number;\n length?: number;\n position?: number;\n};\n\n/**\n * FileSystem pass-through for Node.js\n * Compatible with BrowserFileSystem.\n * @param options\n */\nexport default class NodeFileSystem implements IFileSystem, IRandomAccessReadFileSystem {\n // implements IFileSystem\n constructor(options: {[key: string]: any}) {\n this.fetch = options._fetch;\n }\n\n async readdir(dirname = '.', options?: {}): Promise<any[]> {\n return await fs.readdir(dirname, options);\n }\n\n async stat(path: string, options?: {}): Promise<Stat> {\n const info = await fs.stat(path, options);\n return {size: Number(info.size), isDirectory: () => false, info};\n }\n\n async fetch(path: string, options: {[key: string]: any}) {\n // Falls back to handle https:/http:/data: etc fetches\n // eslint-disable-next-line\n const fallbackFetch = options.fetch || this.fetch;\n return fallbackFetch(path, options);\n }\n\n // implements IRandomAccessFileSystem\n async open(path: string, flags: string | number, mode?: any): Promise<number> {\n return await fs.open(path, flags);\n }\n\n async close(fd: number): Promise<void> {\n return await fs.close(fd);\n }\n\n async fstat(fd: number): Promise<Stat> {\n const info = await fs.fstat(fd);\n return info;\n }\n\n async read(\n fd: number,\n // @ts-ignore Possibly null\n {buffer = null, offset = 0, length = buffer.byteLength, position = null}: ReadOptions\n ): Promise<{bytesRead: number; buffer: Buffer}> {\n let totalBytesRead = 0;\n // Read in loop until we get required number of bytes\n while (totalBytesRead < length) {\n const {bytesRead} = await fs.read(\n fd,\n buffer,\n offset + totalBytesRead,\n length - totalBytesRead,\n position + totalBytesRead\n );\n totalBytesRead += bytesRead;\n }\n return {bytesRead: totalBytesRead, buffer};\n }\n}\n"],"file":"node-filesystem.js"}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/iterators/async-iteration.ts"],"names":["concatenateArrayBuffers","forEach","iterator","visitor","done","value","next","return","cancel","concatenateArrayBuffersAsync","asyncIterator","arrayBuffers","chunk","push","concatenateStringsAsync","strings","join"],"mappings":"AAAA,SAAQA,uBAAR,QAAsC,oCAAtC;AAgBA,OAAO,eAAeC,OAAf,CAAuBC,QAAvB,EAAiCC,OAAjC,EAA0C;AAE/C,SAAO,IAAP,EAAa;AACX,UAAM;AAACC,MAAAA,IAAD;AAAOC,MAAAA;AAAP,QAAgB,MAAMH,QAAQ,CAACI,IAAT,EAA5B;;AACA,QAAIF,IAAJ,EAAU;AACRF,MAAAA,QAAQ,CAACK,MAAT;AACA;AACD;;AACD,UAAMC,MAAM,GAAGL,OAAO,CAACE,KAAD,CAAtB;;AACA,QAAIG,MAAJ,EAAY;AACV;AACD;AACF;AACF;AASD,OAAO,eAAeC,4BAAf,CACLC,aADK,EAEiB;AACtB,QAAMC,YAA2B,GAAG,EAApC;;AACA,aAAW,MAAMC,KAAjB,IAA0BF,aAA1B,EAAyC;AACvCC,IAAAA,YAAY,CAACE,IAAb,CAAkBD,KAAlB;AACD;;AACD,SAAOZ,uBAAuB,CAAC,GAAGW,YAAJ,CAA9B;AACD;AAED,OAAO,eAAeG,uBAAf,CACLJ,aADK,EAEY;AACjB,QAAMK,OAAiB,GAAG,EAA1B;;AACA,aAAW,MAAMH,KAAjB,IAA0BF,aAA1B,EAAyC;AACvCK,IAAAA,OAAO,CAACF,IAAR,CAAaD,KAAb;AACD;;AACD,SAAOG,OAAO,CAACC,IAAR,CAAa,EAAb,CAAP;AACD","sourcesContent":["import {concatenateArrayBuffers} from '../binary-utils/array-buffer-utils';\n\n// GENERAL UTILITIES\n\n/**\n * Iterate over async iterator, without resetting iterator if end is not reached\n * - forEach intentionally does not reset iterator if exiting loop prematurely\n * so that iteration can continue in a second loop\n * - It is recommended to use a standard for-await as last loop to ensure\n * iterator gets properly reset\n *\n * TODO - optimize using sync iteration if argument is an Iterable?\n *\n * @param iterator\n * @param visitor\n */\nexport async function forEach(iterator, visitor) {\n // eslint-disable-next-line\n while (true) {\n const {done, value} = await iterator.next();\n if (done) {\n iterator.return();\n return;\n }\n const cancel = visitor(value);\n if (cancel) {\n return;\n }\n }\n}\n\n// Breaking big data into iterable chunks, concatenating iterable chunks into big data objects\n\n/**\n * Concatenates all data chunks yielded by an (async) iterator\n * This function can e.g. be used to enable atomic parsers to work on (async) iterator inputs\n */\n\nexport async function concatenateArrayBuffersAsync(\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>\n): Promise<ArrayBuffer> {\n const arrayBuffers: ArrayBuffer[] = [];\n for await (const chunk of asyncIterator) {\n arrayBuffers.push(chunk);\n }\n return concatenateArrayBuffers(...arrayBuffers);\n}\n\nexport async function concatenateStringsAsync(\n asyncIterator: AsyncIterable<string> | Iterable<string>\n): Promise<string> {\n const strings: string[] = [];\n for await (const chunk of asyncIterator) {\n strings.push(chunk);\n }\n return strings.join('');\n}\n"],"file":"async-iteration.js"}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/iterators/text-iterators.ts"],"names":["makeTextDecoderIterator","arrayBufferIterator","options","textDecoder","TextDecoder","undefined","arrayBuffer","decode","stream","makeTextEncoderIterator","textIterator","textEncoder","TextEncoder","text","encode","makeLineIterator","previous","textChunk","eolIndex","indexOf","line","slice","length","makeNumberedLineIterator","lineIterator","counter"],"mappings":"AAGA,OAAO,gBAAgBA,uBAAhB,CACLC,mBADK,EAELC,OAA2B,GAAG,EAFzB,EAGkB;AACvB,QAAMC,WAAW,GAAG,IAAIC,WAAJ,CAAgBC,SAAhB,EAA2BH,OAA3B,CAApB;;AACA,aAAW,MAAMI,WAAjB,IAAgCL,mBAAhC,EAAqD;AACnD,UAAM,OAAOK,WAAP,KAAuB,QAAvB,GACFA,WADE,GAEFH,WAAW,CAACI,MAAZ,CAAmBD,WAAnB,EAAgC;AAACE,MAAAA,MAAM,EAAE;AAAT,KAAhC,CAFJ;AAGD;AACF;AAOD,OAAO,gBAAgBC,uBAAhB,CACLC,YADK,EAEuB;AAC5B,QAAMC,WAAW,GAAG,IAAIC,WAAJ,EAApB;;AACA,aAAW,MAAMC,IAAjB,IAAyBH,YAAzB,EAAuC;AACrC,UAAM,OAAOG,IAAP,KAAgB,QAAhB,GAA2BF,WAAW,CAACG,MAAZ,CAAmBD,IAAnB,CAA3B,GAAsDA,IAA5D;AACD;AACF;AAQD,OAAO,gBAAgBE,gBAAhB,CACLL,YADK,EAEkB;AACvB,MAAIM,QAAQ,GAAG,EAAf;;AACA,aAAW,MAAMC,SAAjB,IAA8BP,YAA9B,EAA4C;AAC1CM,IAAAA,QAAQ,IAAIC,SAAZ;AACA,QAAIC,QAAJ;;AACA,WAAO,CAACA,QAAQ,GAAGF,QAAQ,CAACG,OAAT,CAAiB,IAAjB,CAAZ,KAAuC,CAA9C,EAAiD;AAE/C,YAAMC,IAAI,GAAGJ,QAAQ,CAACK,KAAT,CAAe,CAAf,EAAkBH,QAAQ,GAAG,CAA7B,CAAb;AACAF,MAAAA,QAAQ,GAAGA,QAAQ,CAACK,KAAT,CAAeH,QAAQ,GAAG,CAA1B,CAAX;AACA,YAAME,IAAN;AACD;AACF;;AAED,MAAIJ,QAAQ,CAACM,MAAT,GAAkB,CAAtB,EAAyB;AACvB,UAAMN,QAAN;AACD;AACF;AAQD,OAAO,gBAAgBO,wBAAhB,CACLC,YADK,EAE2C;AAChD,MAAIC,OAAO,GAAG,CAAd;;AACA,aAAW,MAAML,IAAjB,IAAyBI,YAAzB,EAAuC;AACrC,UAAM;AAACC,MAAAA,OAAD;AAAUL,MAAAA;AAAV,KAAN;AACAK,IAAAA,OAAO;AACR;AACF","sourcesContent":["// TextDecoder iterators\n// TextDecoder will keep any partial undecoded bytes between calls to `decode`\n\nexport async function* makeTextDecoderIterator(\n arrayBufferIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: TextDecoderOptions = {}\n): AsyncIterable<string> {\n const textDecoder = new TextDecoder(undefined, options);\n for await (const arrayBuffer of arrayBufferIterator) {\n yield typeof arrayBuffer === 'string'\n ? arrayBuffer\n : textDecoder.decode(arrayBuffer, {stream: true});\n }\n}\n\n// TextEncoder iterator\n// TODO - this is not useful unless min chunk size is given\n// TextEncoder will keep any partial undecoded bytes between calls to `encode`\n// If iterator does not yield strings, assume arrayBuffer and return unencoded\n\nexport async function* makeTextEncoderIterator(\n textIterator: AsyncIterable<string> | Iterable<ArrayBuffer>\n): AsyncIterable<ArrayBuffer> {\n const textEncoder = new TextEncoder();\n for await (const text of textIterator) {\n yield typeof text === 'string' ? textEncoder.encode(text) : text;\n }\n}\n\n/**\n * @param textIterator async iterable yielding strings\n * @returns an async iterable over lines\n * See http://2ality.com/2018/04/async-iter-nodejs.html\n */\n\nexport async function* makeLineIterator(\n textIterator: AsyncIterable<string>\n): AsyncIterable<string> {\n let previous = '';\n for await (const textChunk of textIterator) {\n previous += textChunk;\n let eolIndex;\n while ((eolIndex = previous.indexOf('\\n')) >= 0) {\n // line includes the EOL\n const line = previous.slice(0, eolIndex + 1);\n previous = previous.slice(eolIndex + 1);\n yield line;\n }\n }\n\n if (previous.length > 0) {\n yield previous;\n }\n}\n\n/**\n * @param lineIterator async iterable yielding lines\n * @returns async iterable yielding numbered lines\n *\n * See http://2ality.com/2018/04/async-iter-nodejs.html\n */\nexport async function* makeNumberedLineIterator(\n lineIterator: AsyncIterable<string>\n): AsyncIterable<{counter: number; line: string}> {\n let counter = 1;\n for await (const line of lineIterator) {\n yield {counter, line};\n counter++;\n }\n}\n"],"file":"text-iterators.js"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/node/buffer.ts"],"names":["assert","toArrayBuffer","buffer","Buffer","isBuffer","typedArray","Uint8Array","byteOffset","length","slice","toBuffer","binaryData","ArrayBuffer","isView","byteLength","view","i"],"mappings":"AAGA,SAAQA,MAAR,QAAqB,qBAArB;AAKA,OAAO,SAASC,aAAT,CAAuBC,MAAvB,EAA+B;AAEpC,MAAIC,MAAM,CAACC,QAAP,CAAgBF,MAAhB,CAAJ,EAA6B;AAC3B,UAAMG,UAAU,GAAG,IAAIC,UAAJ,CAAeJ,MAAM,CAACA,MAAtB,EAA8BA,MAAM,CAACK,UAArC,EAAiDL,MAAM,CAACM,MAAxD,CAAnB;AACA,WAAOH,UAAU,CAACI,KAAX,GAAmBP,MAA1B;AACD;;AACD,SAAOA,MAAP;AACD;AAKD,OAAO,SAASQ,QAAT,CAAkBC,UAAlB,EAA8B;AACnC,MAAIC,WAAW,CAACC,MAAZ,CAAmBF,UAAnB,CAAJ,EAAoC;AAClCA,IAAAA,UAAU,GAAGA,UAAU,CAACT,MAAxB;AACD;;AAED,MAAI,OAAOC,MAAP,KAAkB,WAAlB,IAAiCQ,UAAU,YAAYC,WAA3D,EAAwE;AACtE,UAAMV,MAAM,GAAG,IAAIC,MAAJ,CAAWQ,UAAU,CAACG,UAAtB,CAAf;AACA,UAAMC,IAAI,GAAG,IAAIT,UAAJ,CAAeK,UAAf,CAAb;;AACA,SAAK,IAAIK,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGd,MAAM,CAACM,MAA3B,EAAmC,EAAEQ,CAArC,EAAwC;AACtCd,MAAAA,MAAM,CAACc,CAAD,CAAN,GAAYD,IAAI,CAACC,CAAD,CAAhB;AACD;;AACD,WAAOd,MAAP;AACD;;AAED,SAAOF,MAAM,CAAC,KAAD,CAAb;AACD","sourcesContent":["// Isolates Buffer references to ensure they are only bundled under Node.js (avoids big webpack polyfill)\n// this file is not visible to webpack (it is excluded in the package.json \"browser\" field).\n\nimport {assert} from '../env-utils/assert';\n\n/**\n * Convert Buffer to ArrayBuffer\n */\nexport function toArrayBuffer(buffer) {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n\n/**\n * Convert (copy) ArrayBuffer to Buffer\n */\nexport function toBuffer(binaryData) {\n if (ArrayBuffer.isView(binaryData)) {\n binaryData = binaryData.buffer;\n }\n\n if (typeof Buffer !== 'undefined' && binaryData instanceof ArrayBuffer) {\n const buffer = new Buffer(binaryData.byteLength);\n const view = new Uint8Array(binaryData);\n for (let i = 0; i < buffer.length; ++i) {\n buffer[i] = view[i];\n }\n return buffer;\n }\n\n return assert(false);\n}\n"],"file":"buffer.js"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import { toArrayBuffer } from './buffer';
|
|
3
|
+
import { promisify } from './util';
|
|
4
|
+
export const readdir = promisify(fs.readdir);
|
|
5
|
+
export const stat = promisify(fs.stat);
|
|
6
|
+
export const readFile = promisify(fs.readFile);
|
|
7
|
+
export const readFileSync = fs.readFileSync;
|
|
8
|
+
export const writeFile = promisify(fs.writeFile);
|
|
9
|
+
export const writeFileSync = fs.writeFileSync;
|
|
10
|
+
export const open = promisify(fs.open);
|
|
11
|
+
export const close = promisify(fs.close);
|
|
12
|
+
export const read = promisify(fs.read);
|
|
13
|
+
export const fstat = promisify(fs.fstat);
|
|
14
|
+
export const isSupported = Boolean(fs);
|
|
15
|
+
export async function _readToArrayBuffer(fd, start, length) {
|
|
16
|
+
const buffer = Buffer.alloc(length);
|
|
17
|
+
const {
|
|
18
|
+
bytesRead
|
|
19
|
+
} = await read(fd, buffer, 0, length, start);
|
|
20
|
+
|
|
21
|
+
if (bytesRead !== length) {
|
|
22
|
+
throw new Error('fs.read failed');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
return toArrayBuffer(buffer);
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=fs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/node/fs.ts"],"names":["fs","toArrayBuffer","promisify","readdir","stat","readFile","readFileSync","writeFile","writeFileSync","open","close","read","fstat","isSupported","Boolean","_readToArrayBuffer","fd","start","length","buffer","Buffer","alloc","bytesRead","Error"],"mappings":"AACA,OAAOA,EAAP,MAAe,IAAf;AACA,SAAQC,aAAR,QAA4B,UAA5B;AACA,SAAQC,SAAR,QAAwB,QAAxB;AAOA,OAAO,MAAMC,OAAO,GAAGD,SAAS,CAACF,EAAE,CAACG,OAAJ,CAAzB;AAEP,OAAO,MAAMC,IAAI,GAAGF,SAAS,CAACF,EAAE,CAACI,IAAJ,CAAtB;AAGP,OAAO,MAAMC,QAAQ,GAAGH,SAAS,CAACF,EAAE,CAACK,QAAJ,CAA1B;AAEP,OAAO,MAAMC,YAAY,GAAGN,EAAE,CAACM,YAAxB;AAEP,OAAO,MAAMC,SAAS,GAAGL,SAAS,CAACF,EAAE,CAACO,SAAJ,CAA3B;AAEP,OAAO,MAAMC,aAAa,GAAGR,EAAE,CAACQ,aAAzB;AAKP,OAAO,MAAMC,IAAI,GAAGP,SAAS,CAACF,EAAE,CAACS,IAAJ,CAAtB;AAEP,OAAO,MAAMC,KAAK,GAAGR,SAAS,CAACF,EAAE,CAACU,KAAJ,CAAvB;AAEP,OAAO,MAAMC,IAAI,GAAGT,SAAS,CAACF,EAAE,CAACW,IAAJ,CAAtB;AAEP,OAAO,MAAMC,KAAK,GAAGV,SAAS,CAACF,EAAE,CAACY,KAAJ,CAAvB;AAEP,OAAO,MAAMC,WAAW,GAAGC,OAAO,CAACd,EAAD,CAA3B;AAEP,OAAO,eAAee,kBAAf,CAAkCC,EAAlC,EAA8CC,KAA9C,EAA6DC,MAA7D,EAA6E;AAClF,QAAMC,MAAM,GAAGC,MAAM,CAACC,KAAP,CAAaH,MAAb,CAAf;AACA,QAAM;AAACI,IAAAA;AAAD,MAAc,MAAMX,IAAI,CAACK,EAAD,EAAKG,MAAL,EAAa,CAAb,EAAgBD,MAAhB,EAAwBD,KAAxB,CAA9B;;AACA,MAAIK,SAAS,KAAKJ,MAAlB,EAA0B;AACxB,UAAM,IAAIK,KAAJ,CAAU,gBAAV,CAAN;AACD;;AACD,SAAOtB,aAAa,CAACkB,MAAD,CAApB;AACD","sourcesContent":["// fs wrapper (promisified fs + avoids bundling fs in browsers)\nimport fs from 'fs';\nimport {toArrayBuffer} from './buffer';\nimport {promisify} from './util';\n\nexport type {Stats} from 'fs';\n\n// paths\n\n/** Wrapper for Node.js fs method */\nexport const readdir = promisify(fs.readdir);\n/** Wrapper for Node.js fs method */\nexport const stat = promisify(fs.stat);\n\n/** Wrapper for Node.js fs method */\nexport const readFile = promisify(fs.readFile);\n/** Wrapper for Node.js fs method */\nexport const readFileSync = fs.readFileSync;\n/** Wrapper for Node.js fs method */\nexport const writeFile = promisify(fs.writeFile);\n/** Wrapper for Node.js fs method */\nexport const writeFileSync = fs.writeFileSync;\n\n// file descriptors\n\n/** Wrapper for Node.js fs method */\nexport const open = promisify(fs.open);\n/** Wrapper for Node.js fs method */\nexport const close = promisify(fs.close);\n/** Wrapper for Node.js fs method */\nexport const read = promisify(fs.read);\n/** Wrapper for Node.js fs method */\nexport const fstat = promisify(fs.fstat);\n\nexport const isSupported = Boolean(fs);\n\nexport async function _readToArrayBuffer(fd: number, start: number, length: number) {\n const buffer = Buffer.alloc(length);\n const {bytesRead} = await read(fd, buffer, 0, length, start);\n if (bytesRead !== length) {\n throw new Error('fs.read failed');\n }\n return toArrayBuffer(buffer);\n}\n"],"file":"fs.js"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/node/util.ts"],"names":["util","promisify"],"mappings":"AAAA,OAAO,KAAKA,IAAZ,MAAsB,MAAtB;AAGA,OAAO,MAAMC,SAAS,GAAGD,IAAI,CAACC,SAAvB","sourcesContent":["import * as util from 'util';\n\n/** Wrapper for Node.js promisify */\nexport const promisify = util.promisify;\n"],"file":"util.js"}
|
|
@@ -3,7 +3,7 @@ export function parseJSON(string) {
|
|
|
3
3
|
try {
|
|
4
4
|
return JSON.parse(string);
|
|
5
5
|
} catch (_) {
|
|
6
|
-
throw new Error(
|
|
6
|
+
throw new Error("Failed to parse JSON from data starting with \"".concat(getFirstCharacters(string), "\""));
|
|
7
7
|
}
|
|
8
8
|
}
|
|
9
9
|
//# sourceMappingURL=parse-json.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/parser-utils/parse-json.ts"],"names":["getFirstCharacters","parseJSON","string","JSON","parse","_","Error"],"mappings":"AAAA,SAAQA,kBAAR,QAAiC,sCAAjC;AAKA,OAAO,SAASC,SAAT,CAAmBC,MAAnB,EAAwC;AAC7C,MAAI;AACF,WAAOC,IAAI,CAACC,KAAL,CAAWF,MAAX,CAAP;AACD,GAFD,CAEE,OAAOG,CAAP,EAAU;AACV,UAAM,IAAIC,KAAJ,0DAA2DN,kBAAkB,CAACE,MAAD,CAA7E,QAAN;AACD;AACF","sourcesContent":["import {getFirstCharacters} from '../binary-utils/get-first-characters';\n\n/**\n * Minimal JSON parser that throws more meaningful error messages\n */\nexport function parseJSON(string: string): any {\n try {\n return JSON.parse(string);\n } catch (_) {\n throw new Error(`Failed to parse JSON from data starting with \"${getFirstCharacters(string)}\"`);\n }\n}\n"],"file":"parse-json.js"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/path-utils/file-aliases.ts"],"names":["pathPrefix","fileAliases","setPathPrefix","prefix","getPathPrefix","addAliases","aliases","Object","assign","resolvePath","filename","alias","startsWith","replacement","replace"],"mappings":"AAEA,IAAIA,UAAU,GAAG,EAAjB;AACA,MAAMC,WAA0C,GAAG,EAAnD;AAKA,OAAO,SAASC,aAAT,CAAuBC,MAAvB,EAA6C;AAClDH,EAAAA,UAAU,GAAGG,MAAb;AACD;AAKD,OAAO,SAASC,aAAT,GAAiC;AACtC,SAAOJ,UAAP;AACD;AASD,OAAO,SAASK,UAAT,CAAoBC,OAApB,EAAkE;AACvEC,EAAAA,MAAM,CAACC,MAAP,CAAcP,WAAd,EAA2BK,OAA3B;AACD;AAKD,OAAO,SAASG,WAAT,CAAqBC,QAArB,EAA+C;AACpD,OAAK,MAAMC,KAAX,IAAoBV,WAApB,EAAiC;AAC/B,QAAIS,QAAQ,CAACE,UAAT,CAAoBD,KAApB,CAAJ,EAAgC;AAC9B,YAAME,WAAW,GAAGZ,WAAW,CAACU,KAAD,CAA/B;AACAD,MAAAA,QAAQ,GAAGA,QAAQ,CAACI,OAAT,CAAiBH,KAAjB,EAAwBE,WAAxB,CAAX;AACD;AACF;;AACD,MAAI,CAACH,QAAQ,CAACE,UAAT,CAAoB,SAApB,CAAD,IAAmC,CAACF,QAAQ,CAACE,UAAT,CAAoB,UAApB,CAAxC,EAAyE;AACvEF,IAAAA,QAAQ,aAAMV,UAAN,SAAmBU,QAAnB,CAAR;AACD;;AACD,SAAOA,QAAP;AACD","sourcesContent":["// Simple file alias mechanisms for tests.\n\nlet pathPrefix = '';\nconst fileAliases: {[aliasPath: string]: string} = {};\n\n/*\n * Set a relative path prefix\n */\nexport function setPathPrefix(prefix: string): void {\n pathPrefix = prefix;\n}\n\n/*\n * Get the relative path prefix\n */\nexport function getPathPrefix(): string {\n return pathPrefix;\n}\n\n/**\n *\n * @param aliases\n *\n * Note: addAliases are an experimental export, they are only for testing of loaders.gl loaders\n * not intended as a generic aliasing mechanism\n */\nexport function addAliases(aliases: {[aliasPath: string]: string}): void {\n Object.assign(fileAliases, aliases);\n}\n\n/**\n * Resolves aliases and adds path-prefix to paths\n */\nexport function resolvePath(filename: string): string {\n for (const alias in fileAliases) {\n if (filename.startsWith(alias)) {\n const replacement = fileAliases[alias];\n filename = filename.replace(alias, replacement);\n }\n }\n if (!filename.startsWith('http://') && !filename.startsWith('https://')) {\n filename = `${pathPrefix}${filename}`;\n }\n return filename;\n}\n"],"file":"file-aliases.js"}
|
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
export function filename(url) {
|
|
2
|
+
const slashIndex = url && url.lastIndexOf('/');
|
|
3
|
+
return slashIndex >= 0 ? url.substr(slashIndex + 1) : '';
|
|
4
|
+
}
|
|
1
5
|
export function dirname(url) {
|
|
2
6
|
const slashIndex = url && url.lastIndexOf('/');
|
|
3
7
|
return slashIndex >= 0 ? url.substr(0, slashIndex) : '';
|
|
@@ -6,11 +10,11 @@ export function join(...parts) {
|
|
|
6
10
|
const separator = '/';
|
|
7
11
|
parts = parts.map((part, index) => {
|
|
8
12
|
if (index) {
|
|
9
|
-
part = part.replace(new RegExp(
|
|
13
|
+
part = part.replace(new RegExp("^".concat(separator)), '');
|
|
10
14
|
}
|
|
11
15
|
|
|
12
16
|
if (index !== parts.length - 1) {
|
|
13
|
-
part = part.replace(new RegExp(
|
|
17
|
+
part = part.replace(new RegExp("".concat(separator, "$")), '');
|
|
14
18
|
}
|
|
15
19
|
|
|
16
20
|
return part;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/path-utils/path.ts"],"names":["filename","url","slashIndex","lastIndexOf","substr","dirname","join","parts","separator","map","part","index","replace","RegExp","length"],"mappings":"AAMA,OAAO,SAASA,QAAT,CAAkBC,GAAlB,EAAuC;AAC5C,QAAMC,UAAU,GAAGD,GAAG,IAAIA,GAAG,CAACE,WAAJ,CAAgB,GAAhB,CAA1B;AACA,SAAOD,UAAU,IAAI,CAAd,GAAkBD,GAAG,CAACG,MAAJ,CAAYF,UAAD,GAAyB,CAApC,CAAlB,GAA2D,EAAlE;AACD;AAMD,OAAO,SAASG,OAAT,CAAiBJ,GAAjB,EAAsC;AAC3C,QAAMC,UAAU,GAAGD,GAAG,IAAIA,GAAG,CAACE,WAAJ,CAAgB,GAAhB,CAA1B;AACA,SAAOD,UAAU,IAAI,CAAd,GAAkBD,GAAG,CAACG,MAAJ,CAAW,CAAX,EAAcF,UAAd,CAAlB,GAAwD,EAA/D;AACD;AAMD,OAAO,SAASI,IAAT,CAAc,GAAGC,KAAjB,EAA0C;AAC/C,QAAMC,SAAS,GAAG,GAAlB;AACAD,EAAAA,KAAK,GAAGA,KAAK,CAACE,GAAN,CAAU,CAACC,IAAD,EAAOC,KAAP,KAAiB;AACjC,QAAIA,KAAJ,EAAW;AACTD,MAAAA,IAAI,GAAGA,IAAI,CAACE,OAAL,CAAa,IAAIC,MAAJ,YAAeL,SAAf,EAAb,EAA0C,EAA1C,CAAP;AACD;;AACD,QAAIG,KAAK,KAAKJ,KAAK,CAACO,MAAN,GAAe,CAA7B,EAAgC;AAC9BJ,MAAAA,IAAI,GAAGA,IAAI,CAACE,OAAL,CAAa,IAAIC,MAAJ,WAAcL,SAAd,OAAb,EAA0C,EAA1C,CAAP;AACD;;AACD,WAAOE,IAAP;AACD,GARO,CAAR;AASA,SAAOH,KAAK,CAACD,IAAN,CAAWE,SAAX,CAAP;AACD","sourcesContent":["// Beginning of a minimal implementation of the Node.js path API, that doesn't pull in big polyfills.\n\n/**\n * Replacement for Node.js path.filename\n * @param url\n */\nexport function filename(url: string): string {\n const slashIndex = url && url.lastIndexOf('/');\n return slashIndex >= 0 ? url.substr((slashIndex as number) + 1) : '';\n}\n\n/**\n * Replacement for Node.js path.dirname\n * @param url\n */\nexport function dirname(url: string): string {\n const slashIndex = url && url.lastIndexOf('/');\n return slashIndex >= 0 ? url.substr(0, slashIndex as number) : '';\n}\n\n/**\n * Replacement for Node.js path.join\n * @param parts\n */\nexport function join(...parts: string[]): string {\n const separator = '/';\n parts = parts.map((part, index) => {\n if (index) {\n part = part.replace(new RegExp(`^${separator}`), '');\n }\n if (index !== parts.length - 1) {\n part = part.replace(new RegExp(`${separator}$`), '');\n }\n return part;\n });\n return parts.join(separator);\n}\n"],"file":"path.js"}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/request-utils/request-scheduler.ts"],"names":["Stats","STAT_QUEUED_REQUESTS","STAT_ACTIVE_REQUESTS","STAT_CANCELLED_REQUESTS","STAT_QUEUED_REQUESTS_EVER","STAT_ACTIVE_REQUESTS_EVER","DEFAULT_PROPS","id","throttleRequests","maxRequests","RequestScheduler","constructor","props","Map","stats","get","scheduleRequest","handle","getPriority","Promise","resolve","done","requestMap","has","request","priority","promise","requestQueue","push","set","_issueNewRequests","_issueRequest","isDone","delete","activeRequestCount","deferredUpdate","setTimeout","_issueNewRequestsAsync","freeSlots","Math","max","_updateAllRequests","i","shift","length","_updateRequest","splice","sort","a","b"],"mappings":";AAAA,SAAQA,KAAR,QAAoB,iBAApB;AAgBA,MAAMC,oBAAoB,GAAG,iBAA7B;AACA,MAAMC,oBAAoB,GAAG,iBAA7B;AACA,MAAMC,uBAAuB,GAAG,oBAAhC;AACA,MAAMC,yBAAyB,GAAG,sBAAlC;AACA,MAAMC,yBAAyB,GAAG,sBAAlC;AAEA,MAAMC,aAA8C,GAAG;AACrDC,EAAAA,EAAE,EAAE,mBADiD;AAGrDC,EAAAA,gBAAgB,EAAE,IAHmC;AAKrDC,EAAAA,WAAW,EAAE;AALwC,CAAvD;AAoBA,eAAe,MAAMC,gBAAN,CAAuB;AAUpCC,EAAAA,WAAW,CAACC,KAA4B,GAAG,EAAhC,EAAoC;AAAA;;AAAA;;AAAA,gDAPlB,CAOkB;;AAAA,0CAJb,EAIa;;AAAA,wCAHW,IAAIC,GAAJ,EAGX;;AAAA,4CAFjB,IAEiB;;AAC7C,SAAKD,KAAL,GAAa,EAAC,GAAGN,aAAJ;AAAmB,SAAGM;AAAtB,KAAb;AAGA,SAAKE,KAAL,GAAa,IAAId,KAAJ,CAAU;AAACO,MAAAA,EAAE,EAAE,KAAKK,KAAL,CAAWL;AAAhB,KAAV,CAAb;AACA,SAAKO,KAAL,CAAWC,GAAX,CAAed,oBAAf;AACA,SAAKa,KAAL,CAAWC,GAAX,CAAeb,oBAAf;AACA,SAAKY,KAAL,CAAWC,GAAX,CAAeZ,uBAAf;AACA,SAAKW,KAAL,CAAWC,GAAX,CAAeX,yBAAf;AACA,SAAKU,KAAL,CAAWC,GAAX,CAAeV,yBAAf;AACD;;AAkBDW,EAAAA,eAAe,CACbC,MADa,EAEbC,WAAgC,GAAG,MAAM,CAF5B,EAGW;AAExB,QAAI,CAAC,KAAKN,KAAL,CAAWJ,gBAAhB,EAAkC;AAChC,aAAOW,OAAO,CAACC,OAAR,CAAgB;AAACC,QAAAA,IAAI,EAAE,MAAM,CAAE;AAAf,OAAhB,CAAP;AACD;;AAGD,QAAI,KAAKC,UAAL,CAAgBC,GAAhB,CAAoBN,MAApB,CAAJ,EAAiC;AAC/B,aAAO,KAAKK,UAAL,CAAgBP,GAAhB,CAAoBE,MAApB,CAAP;AACD;;AAED,UAAMO,OAAgB,GAAG;AAACP,MAAAA,MAAD;AAASQ,MAAAA,QAAQ,EAAE,CAAnB;AAAsBP,MAAAA;AAAtB,KAAzB;AACA,UAAMQ,OAAO,GAAG,IAAIP,OAAJ,CAA4BC,OAAD,IAAa;AAEtDI,MAAAA,OAAO,CAACJ,OAAR,GAAkBA,OAAlB;AACA,aAAOI,OAAP;AACD,KAJe,CAAhB;AAMA,SAAKG,YAAL,CAAkBC,IAAlB,CAAuBJ,OAAvB;AACA,SAAKF,UAAL,CAAgBO,GAAhB,CAAoBZ,MAApB,EAA4BS,OAA5B;;AACA,SAAKI,iBAAL;;AACA,WAAOJ,OAAP;AACD;;AAIDK,EAAAA,aAAa,CAACP,OAAD,EAAiC;AAC5C,UAAM;AAACP,MAAAA,MAAD;AAASG,MAAAA;AAAT,QAAoBI,OAA1B;AACA,QAAIQ,MAAM,GAAG,KAAb;;AAEA,UAAMX,IAAI,GAAG,MAAM;AAEjB,UAAI,CAACW,MAAL,EAAa;AACXA,QAAAA,MAAM,GAAG,IAAT;AAGA,aAAKV,UAAL,CAAgBW,MAAhB,CAAuBhB,MAAvB;AACA,aAAKiB,kBAAL;;AAEA,aAAKJ,iBAAL;AACD;AACF,KAXD;;AAcA,SAAKI,kBAAL;AAEA,WAAOd,OAAO,GAAGA,OAAO,CAAC;AAACC,MAAAA;AAAD,KAAD,CAAV,GAAqBF,OAAO,CAACC,OAAR,CAAgB;AAACC,MAAAA;AAAD,KAAhB,CAAnC;AACD;;AAGDS,EAAAA,iBAAiB,GAAS;AACxB,QAAI,CAAC,KAAKK,cAAV,EAA0B;AACxB,WAAKA,cAAL,GAAsBC,UAAU,CAAC,MAAM,KAAKC,sBAAL,EAAP,EAAsC,CAAtC,CAAhC;AACD;AACF;;AAGDA,EAAAA,sBAAsB,GAAG;AAEvB,SAAKF,cAAL,GAAsB,IAAtB;AAEA,UAAMG,SAAS,GAAGC,IAAI,CAACC,GAAL,CAAS,KAAK5B,KAAL,CAAWH,WAAX,GAAyB,KAAKyB,kBAAvC,EAA2D,CAA3D,CAAlB;;AAEA,QAAII,SAAS,KAAK,CAAlB,EAAqB;AACnB;AACD;;AAED,SAAKG,kBAAL;;AAGA,SAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGJ,SAApB,EAA+B,EAAEI,CAAjC,EAAoC;AAClC,YAAMlB,OAAO,GAAG,KAAKG,YAAL,CAAkBgB,KAAlB,EAAhB;;AACA,UAAInB,OAAJ,EAAa;AACX,aAAKO,aAAL,CAAmBP,OAAnB;AACD;AACF;AAIF;;AAGDiB,EAAAA,kBAAkB,GAAG;AACnB,UAAMd,YAAY,GAAG,KAAKA,YAA1B;;AACA,SAAK,IAAIe,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGf,YAAY,CAACiB,MAAjC,EAAyC,EAAEF,CAA3C,EAA8C;AAC5C,YAAMlB,OAAO,GAAGG,YAAY,CAACe,CAAD,CAA5B;;AACA,UAAI,CAAC,KAAKG,cAAL,CAAoBrB,OAApB,CAAL,EAAmC;AAEjCG,QAAAA,YAAY,CAACmB,MAAb,CAAoBJ,CAApB,EAAuB,CAAvB;AACA,aAAKpB,UAAL,CAAgBW,MAAhB,CAAuBT,OAAO,CAACP,MAA/B;AACAyB,QAAAA,CAAC;AACF;AACF;;AAGDf,IAAAA,YAAY,CAACoB,IAAb,CAAkB,CAACC,CAAD,EAAIC,CAAJ,KAAUD,CAAC,CAACvB,QAAF,GAAawB,CAAC,CAACxB,QAA3C;AACD;;AAGDoB,EAAAA,cAAc,CAACrB,OAAD,EAAU;AACtBA,IAAAA,OAAO,CAACC,QAAR,GAAmBD,OAAO,CAACN,WAAR,CAAoBM,OAAO,CAACP,MAA5B,CAAnB;;AAGA,QAAIO,OAAO,CAACC,QAAR,GAAmB,CAAvB,EAA0B;AACxBD,MAAAA,OAAO,CAACJ,OAAR,CAAgB,IAAhB;AACA,aAAO,KAAP;AACD;;AACD,WAAO,IAAP;AACD;;AArJmC","sourcesContent":["import {Stats} from '@probe.gl/stats';\n\ntype Handle = any;\ntype DoneFunction = () => any;\ntype GetPriorityFunction = () => number;\ntype RequestResult = {\n done: DoneFunction;\n} | null;\n\n/** RequestScheduler Options */\nexport type RequestSchedulerProps = {\n id?: string;\n throttleRequests?: boolean;\n maxRequests?: number;\n};\n\nconst STAT_QUEUED_REQUESTS = 'Queued Requests';\nconst STAT_ACTIVE_REQUESTS = 'Active Requests';\nconst STAT_CANCELLED_REQUESTS = 'Cancelled Requests';\nconst STAT_QUEUED_REQUESTS_EVER = 'Queued Requests Ever';\nconst STAT_ACTIVE_REQUESTS_EVER = 'Active Requests Ever';\n\nconst DEFAULT_PROPS: Required<RequestSchedulerProps> = {\n id: 'request-scheduler',\n // Specifies if the request scheduler should throttle incoming requests, mainly for comparative testing\n throttleRequests: true,\n // The maximum number of simultaneous active requests. Un-throttled requests do not observe this limit.\n maxRequests: 6\n};\n\n/** Tracks one request */\ntype Request = {\n handle: Handle;\n priority: number;\n getPriority: GetPriorityFunction;\n resolve?: (value: any) => any;\n};\n\n/**\n * Used to issue a request, without having them \"deeply queued\" by the browser.\n * @todo - Track requests globally, across multiple servers\n */\nexport default class RequestScheduler {\n readonly props: Required<RequestSchedulerProps>;\n readonly stats: Stats;\n activeRequestCount: number = 0;\n\n /** Tracks the number of active requests and prioritizes/cancels queued requests. */\n private requestQueue: Request[] = [];\n private requestMap: Map<Handle, Promise<RequestResult>> = new Map();\n private deferredUpdate: any = null;\n\n constructor(props: RequestSchedulerProps = {}) {\n this.props = {...DEFAULT_PROPS, ...props};\n\n // Returns the statistics used by the request scheduler.\n this.stats = new Stats({id: this.props.id});\n this.stats.get(STAT_QUEUED_REQUESTS);\n this.stats.get(STAT_ACTIVE_REQUESTS);\n this.stats.get(STAT_CANCELLED_REQUESTS);\n this.stats.get(STAT_QUEUED_REQUESTS_EVER);\n this.stats.get(STAT_ACTIVE_REQUESTS_EVER);\n }\n\n /**\n * Called by an application that wants to issue a request, without having it deeply queued by the browser\n *\n * When the returned promise resolved, it is OK for the application to issue a request.\n * The promise resolves to an object that contains a `done` method.\n * When the application's request has completed (or failed), the application must call the `done` function\n *\n * @param handle\n * @param getPriority will be called when request \"slots\" open up,\n * allowing the caller to update priority or cancel the request\n * Highest priority executes first, priority < 0 cancels the request\n * @returns a promise\n * - resolves to a object (with a `done` field) when the request can be issued without queueing,\n * - resolves to `null` if the request has been cancelled (by the callback return < 0).\n * In this case the application should not issue the request\n */\n scheduleRequest(\n handle: Handle,\n getPriority: GetPriorityFunction = () => 0\n ): Promise<RequestResult> {\n // Allows throttling to be disabled\n if (!this.props.throttleRequests) {\n return Promise.resolve({done: () => {}});\n }\n\n // dedupe\n if (this.requestMap.has(handle)) {\n return this.requestMap.get(handle) as Promise<any>;\n }\n\n const request: Request = {handle, priority: 0, getPriority};\n const promise = new Promise<RequestResult>((resolve) => {\n // @ts-ignore\n request.resolve = resolve;\n return request;\n });\n\n this.requestQueue.push(request);\n this.requestMap.set(handle, promise);\n this._issueNewRequests();\n return promise;\n }\n\n // PRIVATE\n\n _issueRequest(request: Request): Promise<any> {\n const {handle, resolve} = request;\n let isDone = false;\n\n const done = () => {\n // can only be called once\n if (!isDone) {\n isDone = true;\n\n // Stop tracking a request - it has completed, failed, cancelled etc\n this.requestMap.delete(handle);\n this.activeRequestCount--;\n // A slot just freed up, see if any queued requests are waiting\n this._issueNewRequests();\n }\n };\n\n // Track this request\n this.activeRequestCount++;\n\n return resolve ? resolve({done}) : Promise.resolve({done});\n }\n\n /** We check requests asynchronously, to prevent multiple updates */\n _issueNewRequests(): void {\n if (!this.deferredUpdate) {\n this.deferredUpdate = setTimeout(() => this._issueNewRequestsAsync(), 0);\n }\n }\n\n /** Refresh all requests */\n _issueNewRequestsAsync() {\n // TODO - shouldn't we clear the timeout?\n this.deferredUpdate = null;\n\n const freeSlots = Math.max(this.props.maxRequests - this.activeRequestCount, 0);\n\n if (freeSlots === 0) {\n return;\n }\n\n this._updateAllRequests();\n\n // Resolve pending promises for the top-priority requests\n for (let i = 0; i < freeSlots; ++i) {\n const request = this.requestQueue.shift();\n if (request) {\n this._issueRequest(request); // eslint-disable-line @typescript-eslint/no-floating-promises\n }\n }\n\n // Uncomment to debug\n // console.log(`${freeSlots} free slots, ${this.requestQueue.length} queued requests`);\n }\n\n /** Ensure all requests have updated priorities, and that no longer valid requests are cancelled */\n _updateAllRequests() {\n const requestQueue = this.requestQueue;\n for (let i = 0; i < requestQueue.length; ++i) {\n const request = requestQueue[i];\n if (!this._updateRequest(request)) {\n // Remove the element and make sure to adjust the counter to account for shortened array\n requestQueue.splice(i, 1);\n this.requestMap.delete(request.handle);\n i--;\n }\n }\n\n // Sort the remaining requests based on priority\n requestQueue.sort((a, b) => a.priority - b.priority);\n }\n\n /** Update a single request by calling the callback */\n _updateRequest(request) {\n request.priority = request.getPriority(request.handle); // eslint-disable-line callback-return\n\n // by returning a negative priority, the callback cancels the request\n if (request.priority < 0) {\n request.resolve(null);\n return false;\n }\n return true;\n }\n}\n"],"file":"request-scheduler.js"}
|
|
@@ -89,7 +89,7 @@ async function parseData({
|
|
|
89
89
|
data = textDecoder.decode(arrayBuffer);
|
|
90
90
|
parser = loader.parseTextSync;
|
|
91
91
|
} else {
|
|
92
|
-
throw new Error(
|
|
92
|
+
throw new Error("Could not load data with ".concat(loader.name, " loader"));
|
|
93
93
|
}
|
|
94
94
|
|
|
95
95
|
options = { ...options,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/worker-loader-utils/create-loader-worker.ts"],"names":["WorkerBody","requestId","createLoaderWorker","loader","self","onmessage","type","payload","input","options","result","parseData","arrayBuffer","context","parse","parseOnMainThread","postMessage","error","message","Error","Promise","resolve","reject","id","onMessage","removeEventListener","addEventListener","data","parser","parseSync","parseTextSync","textDecoder","TextDecoder","decode","name","modules","worker"],"mappings":"AAEA,SAAQA,UAAR,QAAyB,0BAAzB;AAGA,IAAIC,SAAS,GAAG,CAAhB;AAMA,OAAO,SAASC,kBAAT,CAA4BC,MAA5B,EAAsD;AAE3D,MAAI,OAAOC,IAAP,KAAgB,WAApB,EAAiC;AAC/B;AACD;;AAEDJ,EAAAA,UAAU,CAACK,SAAX,GAAuB,OAAOC,IAAP,EAAaC,OAAb,KAAyB;AAC9C,YAAQD,IAAR;AACE,WAAK,SAAL;AACE,YAAI;AAGF,gBAAM;AAACE,YAAAA,KAAD;AAAQC,YAAAA,OAAO,GAAG;AAAlB,cAAwBF,OAA9B;AAEA,gBAAMG,MAAM,GAAG,MAAMC,SAAS,CAAC;AAC7BR,YAAAA,MAD6B;AAE7BS,YAAAA,WAAW,EAAEJ,KAFgB;AAG7BC,YAAAA,OAH6B;AAI7BI,YAAAA,OAAO,EAAE;AACPC,cAAAA,KAAK,EAAEC;AADA;AAJoB,WAAD,CAA9B;AAQAf,UAAAA,UAAU,CAACgB,WAAX,CAAuB,MAAvB,EAA+B;AAACN,YAAAA;AAAD,WAA/B;AACD,SAdD,CAcE,OAAOO,KAAP,EAAc;AACd,gBAAMC,OAAO,GAAGD,KAAK,YAAYE,KAAjB,GAAyBF,KAAK,CAACC,OAA/B,GAAyC,EAAzD;AACAlB,UAAAA,UAAU,CAACgB,WAAX,CAAuB,OAAvB,EAAgC;AAACC,YAAAA,KAAK,EAAEC;AAAR,WAAhC;AACD;;AACD;;AACF;AArBF;AAuBD,GAxBD;AAyBD;;AAED,SAASH,iBAAT,CAA2BH,WAA3B,EAAqDH,OAArD,EAAmG;AACjG,SAAO,IAAIW,OAAJ,CAAY,CAACC,OAAD,EAAUC,MAAV,KAAqB;AACtC,UAAMC,EAAE,GAAGtB,SAAS,EAApB;;AAIA,UAAMuB,SAAS,GAAG,CAAClB,IAAD,EAAOC,OAAP,KAAmB;AACnC,UAAIA,OAAO,CAACgB,EAAR,KAAeA,EAAnB,EAAuB;AAErB;AACD;;AAED,cAAQjB,IAAR;AACE,aAAK,MAAL;AACEN,UAAAA,UAAU,CAACyB,mBAAX,CAA+BD,SAA/B;AACAH,UAAAA,OAAO,CAACd,OAAO,CAACG,MAAT,CAAP;AACA;;AAEF,aAAK,OAAL;AACEV,UAAAA,UAAU,CAACyB,mBAAX,CAA+BD,SAA/B;AACAF,UAAAA,MAAM,CAACf,OAAO,CAACU,KAAT,CAAN;AACA;;AAEF;AAXF;AAcD,KApBD;;AAsBAjB,IAAAA,UAAU,CAAC0B,gBAAX,CAA4BF,SAA5B;AAGA,UAAMjB,OAAO,GAAG;AAACgB,MAAAA,EAAD;AAAKf,MAAAA,KAAK,EAAEI,WAAZ;AAAyBH,MAAAA;AAAzB,KAAhB;AACAT,IAAAA,UAAU,CAACgB,WAAX,CAAuB,SAAvB,EAAkCT,OAAlC;AACD,GAhCM,CAAP;AAiCD;;AAMD,eAAeI,SAAf,CAAyB;AAACR,EAAAA,MAAD;AAASS,EAAAA,WAAT;AAAsBH,EAAAA,OAAtB;AAA+BI,EAAAA;AAA/B,CAAzB,EAAkE;AAChE,MAAIc,IAAJ;AACA,MAAIC,MAAJ;;AACA,MAAIzB,MAAM,CAAC0B,SAAP,IAAoB1B,MAAM,CAACW,KAA/B,EAAsC;AACpCa,IAAAA,IAAI,GAAGf,WAAP;AACAgB,IAAAA,MAAM,GAAGzB,MAAM,CAAC0B,SAAP,IAAoB1B,MAAM,CAACW,KAApC;AACD,GAHD,MAGO,IAAIX,MAAM,CAAC2B,aAAX,EAA0B;AAC/B,UAAMC,WAAW,GAAG,IAAIC,WAAJ,EAApB;AACAL,IAAAA,IAAI,GAAGI,WAAW,CAACE,MAAZ,CAAmBrB,WAAnB,CAAP;AACAgB,IAAAA,MAAM,GAAGzB,MAAM,CAAC2B,aAAhB;AACD,GAJM,MAIA;AACL,UAAM,IAAIX,KAAJ,oCAAsChB,MAAM,CAAC+B,IAA7C,aAAN;AACD;;AAGDzB,EAAAA,OAAO,GAAG,EACR,GAAGA,OADK;AAER0B,IAAAA,OAAO,EAAGhC,MAAM,IAAIA,MAAM,CAACM,OAAjB,IAA4BN,MAAM,CAACM,OAAP,CAAe0B,OAA5C,IAAwD,EAFzD;AAGRC,IAAAA,MAAM,EAAE;AAHA,GAAV;AAMA,SAAO,MAAMR,MAAM,CAACD,IAAD,EAAO,EAAC,GAAGlB;AAAJ,GAAP,EAAqBI,OAArB,EAA8BV,MAA9B,CAAnB;AACD","sourcesContent":["/* eslint-disable no-restricted-globals */\nimport type {LoaderWithParser} from '../../types';\nimport {WorkerBody} from '@loaders.gl/worker-utils';\n// import {validateLoaderVersion} from './validate-loader-version';\n\nlet requestId = 0;\n\n/**\n * Set up a WebWorkerGlobalScope to talk with the main thread\n * @param loader\n */\nexport function createLoaderWorker(loader: LoaderWithParser) {\n // Check that we are actually in a worker thread\n if (typeof self === 'undefined') {\n return;\n }\n\n WorkerBody.onmessage = async (type, payload) => {\n switch (type) {\n case 'process':\n try {\n // validateLoaderVersion(loader, data.source.split('@')[1]);\n\n const {input, options = {}} = payload;\n\n const result = await parseData({\n loader,\n arrayBuffer: input,\n options,\n context: {\n parse: parseOnMainThread\n }\n });\n WorkerBody.postMessage('done', {result});\n } catch (error) {\n const message = error instanceof Error ? error.message : '';\n WorkerBody.postMessage('error', {error: message});\n }\n break;\n default:\n }\n };\n}\n\nfunction parseOnMainThread(arrayBuffer: ArrayBuffer, options: {[key: string]: any}): Promise<void> {\n return new Promise((resolve, reject) => {\n const id = requestId++;\n\n /**\n */\n const onMessage = (type, payload) => {\n if (payload.id !== id) {\n // not ours\n return;\n }\n\n switch (type) {\n case 'done':\n WorkerBody.removeEventListener(onMessage);\n resolve(payload.result);\n break;\n\n case 'error':\n WorkerBody.removeEventListener(onMessage);\n reject(payload.error);\n break;\n\n default:\n // ignore\n }\n };\n\n WorkerBody.addEventListener(onMessage);\n\n // Ask the main thread to decode data\n const payload = {id, input: arrayBuffer, options};\n WorkerBody.postMessage('process', payload);\n });\n}\n\n// TODO - Support byteOffset and byteLength (enabling parsing of embedded binaries without copies)\n// TODO - Why not support async loader.parse* funcs here?\n// TODO - Why not reuse a common function instead of reimplementing loader.parse* selection logic? Keeping loader small?\n// TODO - Lack of appropriate parser functions can be detected when we create worker, no need to wait until parse\nasync function parseData({loader, arrayBuffer, options, context}) {\n let data;\n let parser;\n if (loader.parseSync || loader.parse) {\n data = arrayBuffer;\n parser = loader.parseSync || loader.parse;\n } else if (loader.parseTextSync) {\n const textDecoder = new TextDecoder();\n data = textDecoder.decode(arrayBuffer);\n parser = loader.parseTextSync;\n } else {\n throw new Error(`Could not load data with ${loader.name} loader`);\n }\n\n // TODO - proper merge in of loader options...\n options = {\n ...options,\n modules: (loader && loader.options && loader.options.modules) || {},\n worker: false\n };\n\n return await parser(data, {...options}, context, loader);\n}\n"],"file":"create-loader-worker.js"}
|
|
@@ -58,7 +58,7 @@ async function onMessage(parseOnMainThread, job, type, payload) {
|
|
|
58
58
|
break;
|
|
59
59
|
|
|
60
60
|
default:
|
|
61
|
-
console.warn(
|
|
61
|
+
console.warn("parse-with-worker unknown message ".concat(type));
|
|
62
62
|
}
|
|
63
63
|
}
|
|
64
64
|
//# sourceMappingURL=parse-with-worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/worker-loader-utils/parse-with-worker.ts"],"names":["WorkerFarm","getWorkerURL","canParseWithWorker","loader","options","isSupported","worker","parseWithWorker","data","context","parseOnMainThread","name","id","url","workerFarm","getWorkerFarm","workerPool","getWorkerPool","JSON","parse","stringify","job","startJob","onMessage","bind","postMessage","input","result","type","payload","done","error","message","Error","console","warn"],"mappings":"AAEA,SAAQA,UAAR,EAAoBC,YAApB,QAAuC,0BAAvC;AAOA,OAAO,SAASC,kBAAT,CAA4BC,MAA5B,EAA4CC,OAA5C,EAAqE;AAC1E,MAAI,CAACJ,UAAU,CAACK,WAAX,EAAL,EAA+B;AAC7B,WAAO,KAAP;AACD;;AAED,SAAOF,MAAM,CAACG,MAAP,KAAiBF,OAAjB,aAAiBA,OAAjB,uBAAiBA,OAAO,CAAEE,MAA1B,CAAP;AACD;AAMD,OAAO,eAAeC,eAAf,CACLJ,MADK,EAELK,IAFK,EAGLJ,OAHK,EAILK,OAJK,EAKLC,iBALK,EAML;AACA,QAAMC,IAAI,GAAGR,MAAM,CAACS,EAApB;AACA,QAAMC,GAAG,GAAGZ,YAAY,CAACE,MAAD,EAASC,OAAT,CAAxB;AAEA,QAAMU,UAAU,GAAGd,UAAU,CAACe,aAAX,CAAyBX,OAAzB,CAAnB;AACA,QAAMY,UAAU,GAAGF,UAAU,CAACG,aAAX,CAAyB;AAACN,IAAAA,IAAD;AAAOE,IAAAA;AAAP,GAAzB,CAAnB;AAIAT,EAAAA,OAAO,GAAGc,IAAI,CAACC,KAAL,CAAWD,IAAI,CAACE,SAAL,CAAehB,OAAf,CAAX,CAAV;AAEA,QAAMiB,GAAG,GAAG,MAAML,UAAU,CAACM,QAAX,CAChB,mBADgB,EAGhBC,SAAS,CAACC,IAAV,CAAe,IAAf,EAAqBd,iBAArB,CAHgB,CAAlB;AAMAW,EAAAA,GAAG,CAACI,WAAJ,CAAgB,SAAhB,EAA2B;AAEzBC,IAAAA,KAAK,EAAElB,IAFkB;AAGzBJ,IAAAA;AAHyB,GAA3B;AAMA,QAAMuB,MAAM,GAAG,MAAMN,GAAG,CAACM,MAAzB;AACA,SAAO,MAAMA,MAAM,CAACA,MAApB;AACD;;AAQD,eAAeJ,SAAf,CACEb,iBADF,EAEEW,GAFF,EAGEO,IAHF,EAIEC,OAJF,EAKE;AACA,UAAQD,IAAR;AACE,SAAK,MAAL;AACEP,MAAAA,GAAG,CAACS,IAAJ,CAASD,OAAT;AACA;;AAEF,SAAK,OAAL;AACER,MAAAA,GAAG,CAACU,KAAJ,CAAUF,OAAO,CAACE,KAAlB;AACA;;AAEF,SAAK,SAAL;AAEE,YAAM;AAACnB,QAAAA,EAAD;AAAKc,QAAAA,KAAL;AAAYtB,QAAAA;AAAZ,UAAuByB,OAA7B;;AACA,UAAI;AACF,cAAMF,MAAM,GAAG,MAAMjB,iBAAiB,CAACgB,KAAD,EAAQtB,OAAR,CAAtC;AACAiB,QAAAA,GAAG,CAACI,WAAJ,CAAgB,MAAhB,EAAwB;AAACb,UAAAA,EAAD;AAAKe,UAAAA;AAAL,SAAxB;AACD,OAHD,CAGE,OAAOI,KAAP,EAAc;AACd,cAAMC,OAAO,GAAGD,KAAK,YAAYE,KAAjB,GAAyBF,KAAK,CAACC,OAA/B,GAAyC,eAAzD;AACAX,QAAAA,GAAG,CAACI,WAAJ,CAAgB,OAAhB,EAAyB;AAACb,UAAAA,EAAD;AAAKmB,UAAAA,KAAK,EAAEC;AAAZ,SAAzB;AACD;;AACD;;AAEF;AAEEE,MAAAA,OAAO,CAACC,IAAR,6CAAkDP,IAAlD;AAvBJ;AAyBD","sourcesContent":["import type {WorkerJob, WorkerMessageType, WorkerMessagePayload} from '@loaders.gl/worker-utils';\nimport type {Loader, LoaderOptions, LoaderContext} from '../../types';\nimport {WorkerFarm, getWorkerURL} from '@loaders.gl/worker-utils';\n\n/**\n * Determines if a loader can parse with worker\n * @param loader\n * @param options\n */\nexport function canParseWithWorker(loader: Loader, options?: LoaderOptions) {\n if (!WorkerFarm.isSupported()) {\n return false;\n }\n\n return loader.worker && options?.worker;\n}\n\n/**\n * this function expects that the worker function sends certain messages,\n * this can be automated if the worker is wrapper by a call to createLoaderWorker in @loaders.gl/loader-utils.\n */\nexport async function parseWithWorker(\n loader: Loader,\n data,\n options?: LoaderOptions,\n context?: LoaderContext,\n parseOnMainThread?: (arrayBuffer: ArrayBuffer, options: {[key: string]: any}) => Promise<void>\n) {\n const name = loader.id; // TODO\n const url = getWorkerURL(loader, options);\n\n const workerFarm = WorkerFarm.getWorkerFarm(options);\n const workerPool = workerFarm.getWorkerPool({name, url});\n\n // options.log object contains functions which cannot be transferred\n // TODO - decide how to handle logging on workers\n options = JSON.parse(JSON.stringify(options));\n\n const job = await workerPool.startJob(\n 'process-on-worker',\n // eslint-disable-next-line\n onMessage.bind(null, parseOnMainThread)\n );\n\n job.postMessage('process', {\n // @ts-ignore\n input: data,\n options\n });\n\n const result = await job.result;\n return await result.result;\n}\n\n/**\n * Handle worker's responses to the main thread\n * @param job\n * @param type\n * @param payload\n */\nasync function onMessage(\n parseOnMainThread,\n job: WorkerJob,\n type: WorkerMessageType,\n payload: WorkerMessagePayload\n) {\n switch (type) {\n case 'done':\n job.done(payload);\n break;\n\n case 'error':\n job.error(payload.error);\n break;\n\n case 'process':\n // Worker is asking for main thread to parseO\n const {id, input, options} = payload;\n try {\n const result = await parseOnMainThread(input, options);\n job.postMessage('done', {id, result});\n } catch (error) {\n const message = error instanceof Error ? error.message : 'unknown error';\n job.postMessage('error', {id, error: message});\n }\n break;\n\n default:\n // eslint-disable-next-line\n console.warn(`parse-with-worker unknown message ${type}`);\n }\n}\n"],"file":"parse-with-worker.js"}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"names":[],"mappings":"","sourcesContent":[],"file":"types.js"}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/workers/json-worker.ts"],"names":["createLoaderWorker","JSONLoader"],"mappings":"AAAA,SAAQA,kBAAR,QAAiC,iDAAjC;AACA,SAAQC,UAAR,QAAyB,gBAAzB;AAEAD,kBAAkB,CAACC,UAAD,CAAlB","sourcesContent":["import {createLoaderWorker} from '../lib/worker-loader-utils/create-loader-worker';\nimport {JSONLoader} from '../json-loader';\n\ncreateLoaderWorker(JSONLoader);\n"],"file":"json-worker.js"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
export type { Loader, LoaderWithParser, LoaderContext, LoaderOptions, Writer, WriterOptions, DataType, SyncDataType, BatchableDataType, IFileSystem, IRandomAccessReadFileSystem } from './types';
|
|
2
|
+
export { assert } from './lib/env-utils/assert';
|
|
3
|
+
export { isBrowser, isWorker, nodeVersion, self, window, global, document } from './lib/env-utils/globals';
|
|
4
|
+
export { createLoaderWorker } from './lib/worker-loader-utils/create-loader-worker';
|
|
5
|
+
export { parseWithWorker, canParseWithWorker } from './lib/worker-loader-utils/parse-with-worker';
|
|
6
|
+
export { parseJSON } from './lib/parser-utils/parse-json';
|
|
7
|
+
export { toArrayBuffer, sliceArrayBuffer, concatenateArrayBuffers, concatenateTypedArrays, compareArrayBuffers } from './lib/binary-utils/array-buffer-utils';
|
|
8
|
+
export { padToNBytes, copyToArray, copyArrayBuffer } from './lib/binary-utils/memory-copy-utils';
|
|
9
|
+
export { copyPaddedArrayBufferToDataView, copyPaddedStringToDataView } from './lib/binary-utils/binary-copy-utils';
|
|
10
|
+
export { padStringToByteAlignment, copyStringToDataView, copyBinaryToDataView } from './lib/binary-utils/encode-utils';
|
|
11
|
+
export { getFirstCharacters, getMagicString } from './lib/binary-utils/get-first-characters';
|
|
12
|
+
export { makeTextEncoderIterator, makeTextDecoderIterator, makeLineIterator, makeNumberedLineIterator } from './lib/iterators/text-iterators';
|
|
13
|
+
export { forEach, concatenateArrayBuffersAsync } from './lib/iterators/async-iteration';
|
|
14
|
+
export { default as RequestScheduler } from './lib/request-utils/request-scheduler';
|
|
15
|
+
export { setPathPrefix, getPathPrefix, resolvePath } from './lib/path-utils/file-aliases';
|
|
16
|
+
export { addAliases as _addAliases } from './lib/path-utils/file-aliases';
|
|
17
|
+
export { JSONLoader } from './json-loader';
|
|
18
|
+
import * as path from './lib/path-utils/path';
|
|
19
|
+
export { path };
|
|
20
|
+
export { isBuffer, toBuffer, bufferToArrayBuffer } from './lib/binary-utils/buffer-utils';
|
|
21
|
+
import * as util from './lib/node/util';
|
|
22
|
+
export { util };
|
|
23
|
+
export { promisify } from './lib/node/util';
|
|
24
|
+
import * as fs from './lib/node/fs';
|
|
25
|
+
export { fs };
|
|
26
|
+
export { default as _NodeFileSystem } from './lib/filesystems/node-filesystem';
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { LoaderWithParser } from './types';
|
|
2
|
+
/**
|
|
3
|
+
* A JSON Micro loader (minimal bundle size)
|
|
4
|
+
* Alternative to `@loaders.gl/json`
|
|
5
|
+
*/
|
|
6
|
+
export declare const JSONLoader: {
|
|
7
|
+
name: string;
|
|
8
|
+
id: string;
|
|
9
|
+
module: string;
|
|
10
|
+
version: any;
|
|
11
|
+
extensions: string[];
|
|
12
|
+
mimeTypes: string[];
|
|
13
|
+
category: string;
|
|
14
|
+
text: boolean;
|
|
15
|
+
parseTextSync: typeof parseTextSync;
|
|
16
|
+
parse: (arrayBuffer: any) => Promise<any>;
|
|
17
|
+
options: {};
|
|
18
|
+
};
|
|
19
|
+
declare function parseTextSync(text: any): any;
|
|
20
|
+
export declare const _typecheckJSONLoader: LoaderWithParser;
|
|
21
|
+
export {};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Convert an object to an array buffer
|
|
3
|
+
*/
|
|
4
|
+
export declare function toArrayBuffer(data: any): ArrayBuffer;
|
|
5
|
+
/**
|
|
6
|
+
* compare two binary arrays for equality
|
|
7
|
+
* @param {ArrayBuffer} a
|
|
8
|
+
* @param {ArrayBuffer} b
|
|
9
|
+
* @param {number} byteLength
|
|
10
|
+
*/
|
|
11
|
+
export declare function compareArrayBuffers(arrayBuffer1: ArrayBuffer, arrayBuffer2: ArrayBuffer, byteLength?: number): boolean;
|
|
12
|
+
/**
|
|
13
|
+
* Concatenate a sequence of ArrayBuffers
|
|
14
|
+
* @return A concatenated ArrayBuffer
|
|
15
|
+
*/
|
|
16
|
+
export declare function concatenateArrayBuffers(...sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer;
|
|
17
|
+
/**
|
|
18
|
+
* Concatenate arbitrary count of typed arrays
|
|
19
|
+
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays
|
|
20
|
+
* @param {...*} arrays - list of arrays. All arrays should be the same type
|
|
21
|
+
* @return A concatenated TypedArray
|
|
22
|
+
*/
|
|
23
|
+
export declare function concatenateTypedArrays<T>(...typedArrays: T[]): T;
|
|
24
|
+
/**
|
|
25
|
+
* Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0
|
|
26
|
+
* @param arrayBuffer
|
|
27
|
+
* @param byteOffset
|
|
28
|
+
* @param byteLength
|
|
29
|
+
*/
|
|
30
|
+
export declare function sliceArrayBuffer(arrayBuffer: ArrayBuffer, byteOffset: number, byteLength?: number): ArrayBuffer;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { TypedArray } from '../../types';
|
|
2
|
+
/**
|
|
3
|
+
* Copy sourceBuffer to dataView with some padding
|
|
4
|
+
*
|
|
5
|
+
* @param {DataView | null} dataView - destination data container. If null - only new offset is calculated
|
|
6
|
+
* @param {number} byteOffset - destination byte offset to copy to
|
|
7
|
+
* @param {Array | TypedArray} sourceBuffer - source data buffer
|
|
8
|
+
* @param {number} padding - pad the resulting array to multiple of "padding" bytes. Additional bytes are filled with 0x20 (ASCII space)
|
|
9
|
+
*
|
|
10
|
+
* @return new byteOffset of resulting dataView
|
|
11
|
+
*/
|
|
12
|
+
export declare function copyPaddedArrayBufferToDataView(dataView: DataView | null, byteOffset: number, sourceBuffer: TypedArray, padding: number): number;
|
|
13
|
+
/**
|
|
14
|
+
* Copy string to dataView with some padding
|
|
15
|
+
*
|
|
16
|
+
* @param {DataView | null} dataView - destination data container. If null - only new offset is calculated
|
|
17
|
+
* @param {number} byteOffset - destination byte offset to copy to
|
|
18
|
+
* @param {string} string - source string
|
|
19
|
+
* @param {number} padding - pad the resulting array to multiple of "padding" bytes. Additional bytes are filled with 0x20 (ASCII space)
|
|
20
|
+
*
|
|
21
|
+
* @return new byteOffset of resulting dataView
|
|
22
|
+
*/
|
|
23
|
+
export declare function copyPaddedStringToDataView(dataView: DataView | null, byteOffset: number, string: string, padding: number): number;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
/**
|
|
3
|
+
* Check for Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)
|
|
4
|
+
*/
|
|
5
|
+
export declare function isBuffer(value: any): boolean;
|
|
6
|
+
/**
|
|
7
|
+
* Converts to Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)
|
|
8
|
+
* @todo better data type
|
|
9
|
+
*/
|
|
10
|
+
export declare function toBuffer(data: any): Buffer;
|
|
11
|
+
/**
|
|
12
|
+
* Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)
|
|
13
|
+
* @todo better data type
|
|
14
|
+
*/
|
|
15
|
+
export declare function bufferToArrayBuffer(buffer: any): ArrayBuffer;
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
export declare function padStringToByteAlignment(string: any, byteAlignment: any): string;
|
|
2
|
+
export declare function copyStringToDataView(dataView: any, byteOffset: any, string: any, byteLength: any): any;
|
|
3
|
+
export declare function copyBinaryToDataView(dataView: any, byteOffset: any, binary: any, byteLength: any): any;
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Calculate new size of an arrayBuffer to be aligned to an n-byte boundary
|
|
3
|
+
* This function increases `byteLength` by the minimum delta,
|
|
4
|
+
* allowing the total length to be divided by `padding`
|
|
5
|
+
* @param byteLength
|
|
6
|
+
* @param padding
|
|
7
|
+
*/
|
|
8
|
+
export declare function padToNBytes(byteLength: number, padding: number): number;
|
|
9
|
+
/**
|
|
10
|
+
* Creates a new Uint8Array based on two different ArrayBuffers
|
|
11
|
+
* @param targetBuffer The first buffer.
|
|
12
|
+
* @param sourceBuffer The second buffer.
|
|
13
|
+
* @return The new ArrayBuffer created out of the two.
|
|
14
|
+
*/
|
|
15
|
+
export declare function copyArrayBuffer(targetBuffer: ArrayBuffer, sourceBuffer: ArrayBuffer, byteOffset: number, byteLength?: number): ArrayBuffer;
|
|
16
|
+
/**
|
|
17
|
+
* Copy from source to target at the targetOffset
|
|
18
|
+
*
|
|
19
|
+
* @param source - The data to copy
|
|
20
|
+
* @param target - The destination to copy data into
|
|
21
|
+
* @param targetOffset - The start offset into target to place the copied data
|
|
22
|
+
* @returns the new offset taking into account proper padding
|
|
23
|
+
*/
|
|
24
|
+
export declare function copyToArray(source: ArrayBuffer | any, target: any, targetOffset: number): number;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
declare type obj = {
|
|
2
|
+
[key: string]: any;
|
|
3
|
+
};
|
|
4
|
+
declare const self_: obj;
|
|
5
|
+
declare const window_: obj;
|
|
6
|
+
declare const global_: obj;
|
|
7
|
+
declare const document_: obj;
|
|
8
|
+
export { self_ as self, window_ as window, global_ as global, document_ as document };
|
|
9
|
+
/** true if running in a browser */
|
|
10
|
+
export declare const isBrowser: boolean;
|
|
11
|
+
/** true if running in a worker thread */
|
|
12
|
+
export declare const isWorker: boolean;
|
|
13
|
+
/** Major Node version (as a number) */
|
|
14
|
+
export declare const nodeVersion: number;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import * as fs from '../node/fs';
|
|
3
|
+
import { IFileSystem, IRandomAccessReadFileSystem } from '../../types';
|
|
4
|
+
declare type Stat = {
|
|
5
|
+
size: number;
|
|
6
|
+
isDirectory: () => boolean;
|
|
7
|
+
info?: fs.Stats;
|
|
8
|
+
};
|
|
9
|
+
declare type ReadOptions = {
|
|
10
|
+
buffer?: Buffer;
|
|
11
|
+
offset?: number;
|
|
12
|
+
length?: number;
|
|
13
|
+
position?: number;
|
|
14
|
+
};
|
|
15
|
+
/**
|
|
16
|
+
* FileSystem pass-through for Node.js
|
|
17
|
+
* Compatible with BrowserFileSystem.
|
|
18
|
+
* @param options
|
|
19
|
+
*/
|
|
20
|
+
export default class NodeFileSystem implements IFileSystem, IRandomAccessReadFileSystem {
|
|
21
|
+
constructor(options: {
|
|
22
|
+
[key: string]: any;
|
|
23
|
+
});
|
|
24
|
+
readdir(dirname?: string, options?: {}): Promise<any[]>;
|
|
25
|
+
stat(path: string, options?: {}): Promise<Stat>;
|
|
26
|
+
fetch(path: string, options: {
|
|
27
|
+
[key: string]: any;
|
|
28
|
+
}): Promise<any>;
|
|
29
|
+
open(path: string, flags: string | number, mode?: any): Promise<number>;
|
|
30
|
+
close(fd: number): Promise<void>;
|
|
31
|
+
fstat(fd: number): Promise<Stat>;
|
|
32
|
+
read(fd: number, { buffer, offset, length, position }: ReadOptions): Promise<{
|
|
33
|
+
bytesRead: number;
|
|
34
|
+
buffer: Buffer;
|
|
35
|
+
}>;
|
|
36
|
+
}
|
|
37
|
+
export {};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Iterate over async iterator, without resetting iterator if end is not reached
|
|
3
|
+
* - forEach intentionally does not reset iterator if exiting loop prematurely
|
|
4
|
+
* so that iteration can continue in a second loop
|
|
5
|
+
* - It is recommended to use a standard for-await as last loop to ensure
|
|
6
|
+
* iterator gets properly reset
|
|
7
|
+
*
|
|
8
|
+
* TODO - optimize using sync iteration if argument is an Iterable?
|
|
9
|
+
*
|
|
10
|
+
* @param iterator
|
|
11
|
+
* @param visitor
|
|
12
|
+
*/
|
|
13
|
+
export declare function forEach(iterator: any, visitor: any): Promise<void>;
|
|
14
|
+
/**
|
|
15
|
+
* Concatenates all data chunks yielded by an (async) iterator
|
|
16
|
+
* This function can e.g. be used to enable atomic parsers to work on (async) iterator inputs
|
|
17
|
+
*/
|
|
18
|
+
export declare function concatenateArrayBuffersAsync(asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>): Promise<ArrayBuffer>;
|
|
19
|
+
export declare function concatenateStringsAsync(asyncIterator: AsyncIterable<string> | Iterable<string>): Promise<string>;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export declare function makeTextDecoderIterator(arrayBufferIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>, options?: TextDecoderOptions): AsyncIterable<string>;
|
|
2
|
+
export declare function makeTextEncoderIterator(textIterator: AsyncIterable<string> | Iterable<ArrayBuffer>): AsyncIterable<ArrayBuffer>;
|
|
3
|
+
/**
|
|
4
|
+
* @param textIterator async iterable yielding strings
|
|
5
|
+
* @returns an async iterable over lines
|
|
6
|
+
* See http://2ality.com/2018/04/async-iter-nodejs.html
|
|
7
|
+
*/
|
|
8
|
+
export declare function makeLineIterator(textIterator: AsyncIterable<string>): AsyncIterable<string>;
|
|
9
|
+
/**
|
|
10
|
+
* @param lineIterator async iterable yielding lines
|
|
11
|
+
* @returns async iterable yielding numbered lines
|
|
12
|
+
*
|
|
13
|
+
* See http://2ality.com/2018/04/async-iter-nodejs.html
|
|
14
|
+
*/
|
|
15
|
+
export declare function makeNumberedLineIterator(lineIterator: AsyncIterable<string>): AsyncIterable<{
|
|
16
|
+
counter: number;
|
|
17
|
+
line: string;
|
|
18
|
+
}>;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
export type { Stats } from 'fs';
|
|
4
|
+
/** Wrapper for Node.js fs method */
|
|
5
|
+
export declare const readdir: typeof fs.readdir.__promisify__;
|
|
6
|
+
/** Wrapper for Node.js fs method */
|
|
7
|
+
export declare const stat: typeof fs.stat.__promisify__;
|
|
8
|
+
/** Wrapper for Node.js fs method */
|
|
9
|
+
export declare const readFile: typeof fs.readFile.__promisify__;
|
|
10
|
+
/** Wrapper for Node.js fs method */
|
|
11
|
+
export declare const readFileSync: typeof fs.readFileSync;
|
|
12
|
+
/** Wrapper for Node.js fs method */
|
|
13
|
+
export declare const writeFile: typeof fs.writeFile.__promisify__;
|
|
14
|
+
/** Wrapper for Node.js fs method */
|
|
15
|
+
export declare const writeFileSync: typeof fs.writeFileSync;
|
|
16
|
+
/** Wrapper for Node.js fs method */
|
|
17
|
+
export declare const open: typeof fs.open.__promisify__;
|
|
18
|
+
/** Wrapper for Node.js fs method */
|
|
19
|
+
export declare const close: typeof fs.close.__promisify__;
|
|
20
|
+
/** Wrapper for Node.js fs method */
|
|
21
|
+
export declare const read: typeof fs.read.__promisify__;
|
|
22
|
+
/** Wrapper for Node.js fs method */
|
|
23
|
+
export declare const fstat: typeof fs.fstat.__promisify__;
|
|
24
|
+
export declare const isSupported: boolean;
|
|
25
|
+
export declare function _readToArrayBuffer(fd: number, start: number, length: number): Promise<any>;
|