@loaders.gl/polyfills 3.1.0-alpha.4 → 3.1.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/bundle.js +3994 -0
- package/dist/es5/bundle.js +1 -1
- package/dist/es5/bundle.js.map +1 -1
- package/dist/es5/index.js +11 -13
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/encoding-indexes.js +37 -0
- package/dist/es5/lib/encoding-indexes.js.map +1 -0
- package/dist/es5/lib/encoding.js +1459 -0
- package/dist/es5/lib/encoding.js.map +1 -0
- package/dist/{esm/libs/encoding-indexes.js → es5/libs/encoding-indexes-asian.js} +2 -40
- package/dist/es5/node/buffer/to-array-buffer.node.js +1 -1
- package/dist/es5/node/buffer/to-array-buffer.node.js.map +1 -1
- package/dist/es5/node/fetch/fetch.node.js +69 -100
- package/dist/es5/node/fetch/fetch.node.js.map +1 -1
- package/dist/es5/node/fetch/headers.node.js +73 -112
- package/dist/es5/node/fetch/headers.node.js.map +1 -1
- package/dist/es5/node/fetch/response.node.js +47 -182
- package/dist/es5/node/fetch/response.node.js.map +1 -1
- package/dist/es5/node/fetch/utils/decode-data-uri.node.js +10 -18
- package/dist/es5/node/fetch/utils/decode-data-uri.node.js.map +1 -1
- package/dist/es5/node/fetch/utils/stream-utils.node.js +46 -126
- package/dist/es5/node/fetch/utils/stream-utils.node.js.map +1 -1
- package/dist/es5/node/file/blob-stream-controller.js +37 -82
- package/dist/es5/node/file/blob-stream-controller.js.map +1 -1
- package/dist/es5/node/file/blob-stream.js +12 -67
- package/dist/es5/node/file/blob-stream.js.map +1 -1
- package/dist/es5/node/file/blob.js +100 -209
- package/dist/es5/node/file/blob.js.map +1 -1
- package/dist/es5/node/file/file-reader.js +40 -147
- package/dist/es5/node/file/file-reader.js.map +1 -1
- package/dist/es5/node/file/file.js +13 -41
- package/dist/es5/node/file/file.js.map +1 -1
- package/dist/es5/node/file/readable-stream.js +1 -26
- package/dist/es5/node/file/readable-stream.js.map +1 -1
- package/dist/es5/node/images/encode-image.node.js +8 -10
- package/dist/es5/node/images/encode-image.node.js.map +1 -1
- package/dist/es5/node/images/parse-image.node.js +17 -44
- package/dist/es5/node/images/parse-image.node.js.map +1 -1
- package/dist/es5/promise/all-settled.js +7 -7
- package/dist/es5/promise/all-settled.js.map +1 -1
- package/dist/es5/utils/globals.js +3 -8
- package/dist/es5/utils/globals.js.map +1 -1
- package/dist/esm/index.js +2 -2
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/encoding-indexes.js +30 -0
- package/dist/esm/lib/encoding-indexes.js.map +1 -0
- package/dist/esm/lib/encoding.js +1450 -0
- package/dist/esm/lib/encoding.js.map +1 -0
- package/dist/{es5/libs/encoding-indexes.js → esm/libs/encoding-indexes-asian.js} +2 -40
- package/dist/esm/node/fetch/fetch.node.js +12 -1
- package/dist/esm/node/fetch/fetch.node.js.map +1 -1
- package/dist/esm/node/fetch/headers.node.js +1 -1
- package/dist/esm/node/fetch/headers.node.js.map +1 -1
- package/dist/esm/node/fetch/utils/decode-data-uri.node.js +2 -2
- package/dist/esm/node/fetch/utils/decode-data-uri.node.js.map +1 -1
- package/dist/esm/node/file/file-reader.js +1 -1
- package/dist/esm/node/file/file-reader.js.map +1 -1
- package/dist/esm/utils/assert.js +1 -1
- package/dist/esm/utils/assert.js.map +1 -1
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +98 -0
- package/dist/lib/encoding-indexes.d.ts +31 -0
- package/dist/lib/encoding-indexes.d.ts.map +1 -0
- package/dist/lib/encoding-indexes.js +35 -0
- package/dist/lib/encoding.d.ts +15 -0
- package/dist/lib/encoding.d.ts.map +1 -0
- package/dist/lib/encoding.js +2779 -0
- package/dist/libs/encoding-indexes-asian.d.ts +10 -0
- package/dist/libs/encoding-indexes-asian.d.ts.map +1 -0
- package/dist/libs/encoding-indexes-asian.js +14 -0
- package/dist/node/buffer/btoa.node.d.ts +3 -0
- package/dist/node/buffer/btoa.node.d.ts.map +1 -0
- package/dist/node/buffer/btoa.node.js +14 -0
- package/dist/node/buffer/to-array-buffer.node.d.ts +2 -0
- package/dist/node/buffer/to-array-buffer.node.d.ts.map +1 -0
- package/dist/node/buffer/to-array-buffer.node.js +12 -0
- package/dist/node/fetch/fetch.node.d.ts +7 -0
- package/dist/node/fetch/fetch.node.d.ts.map +1 -0
- package/dist/node/fetch/fetch.node.js +117 -0
- package/dist/node/fetch/headers.node.d.ts +34 -0
- package/dist/node/fetch/headers.node.d.ts.map +1 -0
- package/dist/node/fetch/headers.node.js +105 -0
- package/dist/node/fetch/response.node.d.ts +22 -0
- package/dist/node/fetch/response.node.d.ts.map +1 -0
- package/dist/node/fetch/response.node.js +77 -0
- package/dist/node/fetch/utils/decode-data-uri.node.d.ts +16 -0
- package/dist/node/fetch/utils/decode-data-uri.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/decode-data-uri.node.js +63 -0
- package/dist/node/fetch/utils/stream-utils.node.d.ts +4 -0
- package/dist/node/fetch/utils/stream-utils.node.d.ts.map +1 -0
- package/dist/node/fetch/utils/stream-utils.node.js +98 -0
- package/dist/node/file/blob-stream-controller.d.ts +29 -0
- package/dist/node/file/blob-stream-controller.d.ts.map +1 -0
- package/dist/node/file/blob-stream-controller.js +63 -0
- package/dist/node/file/blob-stream.d.ts +25 -0
- package/dist/node/file/blob-stream.d.ts.map +1 -0
- package/dist/node/file/blob-stream.js +37 -0
- package/dist/node/file/blob.d.ts +58 -0
- package/dist/node/file/blob.d.ts.map +1 -0
- package/dist/node/file/blob.js +160 -0
- package/dist/node/file/file-reader.d.ts +24 -0
- package/dist/node/file/file-reader.d.ts.map +1 -0
- package/dist/node/file/file-reader.js +35 -0
- package/dist/node/file/file.d.ts +25 -0
- package/dist/node/file/file.d.ts.map +1 -0
- package/dist/node/file/file.js +37 -0
- package/dist/node/file/install-file-polyfills.d.ts +2 -0
- package/dist/node/file/install-file-polyfills.d.ts.map +1 -0
- package/dist/node/file/install-file-polyfills.js +27 -0
- package/dist/node/file/readable-stream.d.ts +4 -0
- package/dist/node/file/readable-stream.d.ts.map +1 -0
- package/dist/node/file/readable-stream.js +11 -0
- package/dist/node/images/encode-image.node.d.ts +20 -0
- package/dist/node/images/encode-image.node.d.ts.map +1 -0
- package/dist/node/images/encode-image.node.js +41 -0
- package/dist/node/images/parse-image.node.d.ts +11 -0
- package/dist/node/images/parse-image.node.d.ts.map +1 -0
- package/dist/node/images/parse-image.node.js +29 -0
- package/dist/promise/all-settled.d.ts +10 -0
- package/dist/promise/all-settled.d.ts.map +1 -0
- package/dist/promise/all-settled.js +24 -0
- package/dist/utils/assert.d.ts +2 -0
- package/dist/utils/assert.d.ts.map +1 -0
- package/dist/utils/assert.js +9 -0
- package/dist/utils/globals.d.ts +4 -0
- package/dist/utils/globals.d.ts.map +1 -0
- package/dist/utils/globals.js +36 -0
- package/package.json +6 -6
- package/src/index.ts +2 -2
- package/src/lib/encoding-indexes.ts +34 -0
- package/{dist/esm/libs/encoding.js → src/lib/encoding.ts} +78 -78
- package/src/libs/{encoding-indexes.js → encoding-indexes-asian.js} +2 -40
- package/src/node/fetch/fetch.node.ts +19 -2
- package/dist/dist.min.js +0 -2
- package/dist/dist.min.js.map +0 -1
- package/dist/es5/libs/encoding.js +0 -3084
- package/src/libs/encoding.js +0 -3084
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BlobStreamController = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Forked from @gozala's web-blob under MIT license
|
|
6
|
+
* @see https://github.com/Gozala/web-blob
|
|
7
|
+
*/
|
|
8
|
+
class BlobStreamController {
|
|
9
|
+
/**
|
|
10
|
+
* @param chunks
|
|
11
|
+
*/
|
|
12
|
+
constructor(chunks) {
|
|
13
|
+
this.isWorking = false;
|
|
14
|
+
this.isCancelled = false;
|
|
15
|
+
this.chunks = chunks;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* @param controller
|
|
19
|
+
*/
|
|
20
|
+
start(controller) {
|
|
21
|
+
this.work(controller); // eslint-disable-line @typescript-eslint/no-floating-promises
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
*
|
|
25
|
+
* @param controller
|
|
26
|
+
*/
|
|
27
|
+
async work(controller) {
|
|
28
|
+
const { chunks } = this;
|
|
29
|
+
this.isWorking = true;
|
|
30
|
+
while (!this.isCancelled && (controller.desiredSize || 0) > 0) {
|
|
31
|
+
let next;
|
|
32
|
+
try {
|
|
33
|
+
next = chunks.next();
|
|
34
|
+
}
|
|
35
|
+
catch (error) {
|
|
36
|
+
controller.error(error);
|
|
37
|
+
break;
|
|
38
|
+
}
|
|
39
|
+
if (next) {
|
|
40
|
+
if (!next.done && !this.isCancelled) {
|
|
41
|
+
controller.enqueue(next.value);
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
controller.close();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
this.isWorking = false;
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
*
|
|
52
|
+
* @param {ReadableStreamDefaultController} controller
|
|
53
|
+
*/
|
|
54
|
+
pull(controller) {
|
|
55
|
+
if (!this.isWorking) {
|
|
56
|
+
this.work(controller); // eslint-disable-line @typescript-eslint/no-floating-promises
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
cancel() {
|
|
60
|
+
this.isCancelled = true;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
exports.BlobStreamController = BlobStreamController;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Forked from @gozala's web-blob under MIT license
|
|
3
|
+
* @see https://github.com/Gozala/web-blob
|
|
4
|
+
*/
|
|
5
|
+
import { ReadableStreamPolyfill } from './readable-stream';
|
|
6
|
+
/**
|
|
7
|
+
* Blob stream is a `ReadableStream` extension optimized to have minimal
|
|
8
|
+
* overhead when consumed as `AsyncIterable<Uint8Array>`.
|
|
9
|
+
* extends {ReadableStream<Uint8Array>}
|
|
10
|
+
* implements {AsyncIterable<Uint8Array>}
|
|
11
|
+
*/
|
|
12
|
+
export declare class BlobStream<T> extends ReadableStreamPolyfill<T> {
|
|
13
|
+
private readonly _chunks;
|
|
14
|
+
/**
|
|
15
|
+
* @param chunks
|
|
16
|
+
*/
|
|
17
|
+
constructor(chunks: any);
|
|
18
|
+
/**
|
|
19
|
+
* @property [_options.preventCancel]
|
|
20
|
+
*/
|
|
21
|
+
[Symbol.asyncIterator](_options?: {
|
|
22
|
+
preventCancel?: boolean;
|
|
23
|
+
}): AsyncIterable<Uint8Array>;
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=blob-stream.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"blob-stream.d.ts","sourceRoot":"","sources":["../../../src/node/file/blob-stream.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,OAAO,EAAC,sBAAsB,EAAC,MAAM,mBAAmB,CAAC;AAGzD;;;;;GAKG;AAEH,qBAAa,UAAU,CAAC,CAAC,CAAE,SAAQ,sBAAsB,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAe;IACvC;;OAEG;gBACS,MAAM,KAAA;IAOlB;;OAEG;IAEI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,QAAQ,CAAC,EAAE;QAAC,aAAa,CAAC,EAAE,OAAO,CAAA;KAAC,GAAG,aAAa,CAAC,UAAU,CAAC;CAK/F"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BlobStream = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Forked from @gozala's web-blob under MIT license
|
|
6
|
+
* @see https://github.com/Gozala/web-blob
|
|
7
|
+
*/
|
|
8
|
+
const readable_stream_1 = require("./readable-stream");
|
|
9
|
+
const blob_stream_controller_1 = require("./blob-stream-controller");
|
|
10
|
+
/**
|
|
11
|
+
* Blob stream is a `ReadableStream` extension optimized to have minimal
|
|
12
|
+
* overhead when consumed as `AsyncIterable<Uint8Array>`.
|
|
13
|
+
* extends {ReadableStream<Uint8Array>}
|
|
14
|
+
* implements {AsyncIterable<Uint8Array>}
|
|
15
|
+
*/
|
|
16
|
+
// @ts-ignore
|
|
17
|
+
class BlobStream extends readable_stream_1.ReadableStreamPolyfill {
|
|
18
|
+
/**
|
|
19
|
+
* @param chunks
|
|
20
|
+
*/
|
|
21
|
+
constructor(chunks) {
|
|
22
|
+
// @ts-ignore
|
|
23
|
+
super(new blob_stream_controller_1.BlobStreamController(chunks.values()), { type: 'bytes' });
|
|
24
|
+
/** @private */
|
|
25
|
+
this._chunks = chunks;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* @property [_options.preventCancel]
|
|
29
|
+
*/
|
|
30
|
+
// @ts-ignore
|
|
31
|
+
async *[Symbol.asyncIterator](_options) {
|
|
32
|
+
const reader = this.getReader();
|
|
33
|
+
yield* this._chunks;
|
|
34
|
+
reader.releaseLock();
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
exports.BlobStream = BlobStream;
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { BlobStream } from './blob-stream';
|
|
2
|
+
/**
|
|
3
|
+
* Forked from @gozala's web-blob under MIT license
|
|
4
|
+
* @see https://github.com/Gozala/web-blob
|
|
5
|
+
*/
|
|
6
|
+
export declare class BlobPolyfill {
|
|
7
|
+
/** The MIME type of the data contained in the Blob. If type is unknown, string is empty. */
|
|
8
|
+
readonly type: string;
|
|
9
|
+
/** The size, in bytes, of the data contained in the Blob object. */
|
|
10
|
+
size: number;
|
|
11
|
+
private parts;
|
|
12
|
+
/**
|
|
13
|
+
* @param [init]
|
|
14
|
+
* @param [options]
|
|
15
|
+
*/
|
|
16
|
+
constructor(init?: BlobPart[], options?: BlobPropertyBag);
|
|
17
|
+
/**
|
|
18
|
+
* Returns a new Blob object containing the data in the specified range of
|
|
19
|
+
* bytes of the blob on which it's called.
|
|
20
|
+
* @param start=0 - An index into the Blob indicating the first
|
|
21
|
+
* byte to include in the new Blob. If you specify a negative value, it's
|
|
22
|
+
* treated as an offset from the end of the Blob toward the beginning. For
|
|
23
|
+
* example, `-10` would be the 10th from last byte in the Blob. The default
|
|
24
|
+
* value is `0`. If you specify a value for start that is larger than the
|
|
25
|
+
* size of the source Blob, the returned Blob has size 0 and contains no
|
|
26
|
+
* data.
|
|
27
|
+
* @param end - An index into the `Blob` indicating the first byte
|
|
28
|
+
* that will *not* be included in the new `Blob` (i.e. the byte exactly at
|
|
29
|
+
* this index is not included). If you specify a negative value, it's treated
|
|
30
|
+
* as an offset from the end of the Blob toward the beginning. For example,
|
|
31
|
+
* `-10` would be the 10th from last byte in the `Blob`. The default value is
|
|
32
|
+
* size.
|
|
33
|
+
* @param type - The content type to assign to the new Blob;
|
|
34
|
+
* this will be the value of its type property. The default value is an empty
|
|
35
|
+
* string.
|
|
36
|
+
*/
|
|
37
|
+
slice(start?: number, end?: number, type?: string): Blob;
|
|
38
|
+
/**
|
|
39
|
+
* Returns a promise that resolves with an ArrayBuffer containing the entire
|
|
40
|
+
* contents of the Blob as binary data.
|
|
41
|
+
*/
|
|
42
|
+
arrayBuffer(): Promise<ArrayBuffer>;
|
|
43
|
+
/**
|
|
44
|
+
* Returns a promise that resolves with a USVString containing the entire
|
|
45
|
+
* contents of the Blob interpreted as UTF-8 text.
|
|
46
|
+
*/
|
|
47
|
+
text(): Promise<string>;
|
|
48
|
+
/**
|
|
49
|
+
*/
|
|
50
|
+
stream(): BlobStream<any>;
|
|
51
|
+
/**
|
|
52
|
+
* @returns {string}
|
|
53
|
+
*/
|
|
54
|
+
toString(): string;
|
|
55
|
+
get [Symbol.toStringTag](): string;
|
|
56
|
+
_toArrayBuffer(): ArrayBuffer;
|
|
57
|
+
}
|
|
58
|
+
//# sourceMappingURL=blob.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"blob.d.ts","sourceRoot":"","sources":["../../../src/node/file/blob.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,UAAU,EAAC,MAAM,eAAe,CAAC;AAEzC;;;GAGG;AACH,qBAAa,YAAY;IAEvB,4FAA4F;IAC5F,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,oEAAoE;IACpE,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,KAAK,CAAe;IAC5B;;;OAGG;gBACS,IAAI,GAAE,QAAQ,EAAO,EAAE,OAAO,GAAE,eAAoB;IAmChE;;;;;;;;;;;;;;;;;;;OAmBG;IACH,KAAK,CAAC,KAAK,GAAE,MAAU,EAAE,GAAG,GAAE,MAAkB,EAAE,IAAI,GAAE,MAAW,GAAG,IAAI;IAyC1E;;;OAGG;IAEG,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;IAIzC;;;OAGG;IAEG,IAAI,IAAI,OAAO,CAAC,MAAM,CAAC;IAS7B;OACG;IAEH,MAAM,IAAI,UAAU,CAAC,GAAG,CAAC;IAIzB;;OAEG;IACH,QAAQ;IAIR,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,WAEvB;IAED,cAAc,IAAI,WAAW;CAU9B"}
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BlobPolyfill = void 0;
|
|
4
|
+
// Forked from @gozala's web-blob under MIT license https://github.com/Gozala/web-blob
|
|
5
|
+
const blob_stream_1 = require("./blob-stream");
|
|
6
|
+
/**
|
|
7
|
+
* Forked from @gozala's web-blob under MIT license
|
|
8
|
+
* @see https://github.com/Gozala/web-blob
|
|
9
|
+
*/
|
|
10
|
+
class BlobPolyfill {
|
|
11
|
+
/**
|
|
12
|
+
* @param [init]
|
|
13
|
+
* @param [options]
|
|
14
|
+
*/
|
|
15
|
+
constructor(init = [], options = {}) {
|
|
16
|
+
this.parts = [];
|
|
17
|
+
this.size = 0;
|
|
18
|
+
for (const part of init) {
|
|
19
|
+
if (typeof part === 'string') {
|
|
20
|
+
const bytes = new TextEncoder().encode(part);
|
|
21
|
+
this.parts.push(bytes);
|
|
22
|
+
this.size += bytes.byteLength;
|
|
23
|
+
}
|
|
24
|
+
else if (part instanceof BlobPolyfill) {
|
|
25
|
+
this.size += part.size;
|
|
26
|
+
// @ts-ignore - `parts` is marked private so TS will complain about
|
|
27
|
+
// accessing it.
|
|
28
|
+
this.parts.push(...part.parts);
|
|
29
|
+
}
|
|
30
|
+
else if (part instanceof ArrayBuffer) {
|
|
31
|
+
this.parts.push(new Uint8Array(part));
|
|
32
|
+
this.size += part.byteLength;
|
|
33
|
+
}
|
|
34
|
+
else if (part instanceof Uint8Array) {
|
|
35
|
+
this.parts.push(part);
|
|
36
|
+
this.size += part.byteLength;
|
|
37
|
+
}
|
|
38
|
+
else if (ArrayBuffer.isView(part)) {
|
|
39
|
+
const { buffer, byteOffset, byteLength } = part;
|
|
40
|
+
this.parts.push(new Uint8Array(buffer, byteOffset, byteLength));
|
|
41
|
+
this.size += byteLength;
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
const bytes = new TextEncoder().encode(String(part));
|
|
45
|
+
this.parts.push(bytes);
|
|
46
|
+
this.size += bytes.byteLength;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
/** @private */
|
|
50
|
+
this.type = readType(options.type);
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Returns a new Blob object containing the data in the specified range of
|
|
54
|
+
* bytes of the blob on which it's called.
|
|
55
|
+
* @param start=0 - An index into the Blob indicating the first
|
|
56
|
+
* byte to include in the new Blob. If you specify a negative value, it's
|
|
57
|
+
* treated as an offset from the end of the Blob toward the beginning. For
|
|
58
|
+
* example, `-10` would be the 10th from last byte in the Blob. The default
|
|
59
|
+
* value is `0`. If you specify a value for start that is larger than the
|
|
60
|
+
* size of the source Blob, the returned Blob has size 0 and contains no
|
|
61
|
+
* data.
|
|
62
|
+
* @param end - An index into the `Blob` indicating the first byte
|
|
63
|
+
* that will *not* be included in the new `Blob` (i.e. the byte exactly at
|
|
64
|
+
* this index is not included). If you specify a negative value, it's treated
|
|
65
|
+
* as an offset from the end of the Blob toward the beginning. For example,
|
|
66
|
+
* `-10` would be the 10th from last byte in the `Blob`. The default value is
|
|
67
|
+
* size.
|
|
68
|
+
* @param type - The content type to assign to the new Blob;
|
|
69
|
+
* this will be the value of its type property. The default value is an empty
|
|
70
|
+
* string.
|
|
71
|
+
*/
|
|
72
|
+
slice(start = 0, end = this.size, type = '') {
|
|
73
|
+
const { size, parts: parts } = this;
|
|
74
|
+
let offset = start < 0 ? Math.max(size + start, 0) : Math.min(start, size);
|
|
75
|
+
let limit = end < 0 ? Math.max(size + end, 0) : Math.min(end, size);
|
|
76
|
+
const span = Math.max(limit - offset, 0);
|
|
77
|
+
const blob = new BlobPolyfill([], { type });
|
|
78
|
+
if (span === 0) {
|
|
79
|
+
// @ts-ignore
|
|
80
|
+
return blob;
|
|
81
|
+
}
|
|
82
|
+
let blobSize = 0;
|
|
83
|
+
const blobParts = [];
|
|
84
|
+
for (const part of parts) {
|
|
85
|
+
const { byteLength } = part;
|
|
86
|
+
if (offset > 0 && byteLength <= offset) {
|
|
87
|
+
offset -= byteLength;
|
|
88
|
+
limit -= byteLength;
|
|
89
|
+
}
|
|
90
|
+
else {
|
|
91
|
+
const chunk = part.subarray(offset, Math.min(byteLength, limit));
|
|
92
|
+
blobParts.push(chunk);
|
|
93
|
+
blobSize += chunk.byteLength;
|
|
94
|
+
// no longer need to take that into account
|
|
95
|
+
offset = 0;
|
|
96
|
+
// don't add the overflow to new blobParts
|
|
97
|
+
if (blobSize >= span) {
|
|
98
|
+
break;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
blob.parts = blobParts;
|
|
103
|
+
blob.size = blobSize;
|
|
104
|
+
// @ts-ignore
|
|
105
|
+
return blob;
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Returns a promise that resolves with an ArrayBuffer containing the entire
|
|
109
|
+
* contents of the Blob as binary data.
|
|
110
|
+
*/
|
|
111
|
+
// eslint-disable-next-line require-await
|
|
112
|
+
async arrayBuffer() {
|
|
113
|
+
return this._toArrayBuffer();
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Returns a promise that resolves with a USVString containing the entire
|
|
117
|
+
* contents of the Blob interpreted as UTF-8 text.
|
|
118
|
+
*/
|
|
119
|
+
// eslint-disable-next-line require-await
|
|
120
|
+
async text() {
|
|
121
|
+
const decoder = new TextDecoder();
|
|
122
|
+
let text = '';
|
|
123
|
+
for (const part of this.parts) {
|
|
124
|
+
text += decoder.decode(part);
|
|
125
|
+
}
|
|
126
|
+
return text;
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
*/
|
|
130
|
+
// @ts-ignore
|
|
131
|
+
stream() {
|
|
132
|
+
return new blob_stream_1.BlobStream(this.parts);
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* @returns {string}
|
|
136
|
+
*/
|
|
137
|
+
toString() {
|
|
138
|
+
return '[object Blob]';
|
|
139
|
+
}
|
|
140
|
+
get [Symbol.toStringTag]() {
|
|
141
|
+
return 'Blob';
|
|
142
|
+
}
|
|
143
|
+
_toArrayBuffer() {
|
|
144
|
+
const buffer = new ArrayBuffer(this.size);
|
|
145
|
+
const bytes = new Uint8Array(buffer);
|
|
146
|
+
let offset = 0;
|
|
147
|
+
for (const part of this.parts) {
|
|
148
|
+
bytes.set(part, offset);
|
|
149
|
+
offset += part.byteLength;
|
|
150
|
+
}
|
|
151
|
+
return buffer;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
exports.BlobPolyfill = BlobPolyfill;
|
|
155
|
+
/**
|
|
156
|
+
*/
|
|
157
|
+
function readType(input = '') {
|
|
158
|
+
const type = String(input).toLowerCase();
|
|
159
|
+
return /[^\u0020-\u007E]/.test(type) ? '' : type;
|
|
160
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
export declare class FileReaderPolyfill implements FileReader {
|
|
2
|
+
onload: any;
|
|
3
|
+
onabort: any;
|
|
4
|
+
onerror: any;
|
|
5
|
+
error: any;
|
|
6
|
+
onloadstart: any;
|
|
7
|
+
onloadend: any;
|
|
8
|
+
onprogress: any;
|
|
9
|
+
readyState: any;
|
|
10
|
+
result: any;
|
|
11
|
+
DONE: any;
|
|
12
|
+
EMPTY: any;
|
|
13
|
+
LOADING: any;
|
|
14
|
+
addEventListener: any;
|
|
15
|
+
removeEventListener: any;
|
|
16
|
+
dispatchEvent: any;
|
|
17
|
+
constructor();
|
|
18
|
+
abort(): void;
|
|
19
|
+
readAsArrayBuffer(blob: Blob): Promise<void>;
|
|
20
|
+
readAsBinaryString(blob: any): Promise<void>;
|
|
21
|
+
readAsDataURL(blob: any): Promise<void>;
|
|
22
|
+
readAsText(blob: any): Promise<void>;
|
|
23
|
+
}
|
|
24
|
+
//# sourceMappingURL=file-reader.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file-reader.d.ts","sourceRoot":"","sources":["../../../src/node/file/file-reader.ts"],"names":[],"mappings":"AAEA,qBAAa,kBAAmB,YAAW,UAAU;IAEnD,MAAM,MAAC;IACP,OAAO,MAAC;IACR,OAAO,MAAC;IACR,KAAK,MAAC;IACN,WAAW,MAAC;IACZ,SAAS,MAAC;IACV,UAAU,MAAC;IAEX,UAAU,MAAC;IACX,MAAM,MAAC;IACP,IAAI,MAAC;IACL,KAAK,MAAC;IACN,OAAO,MAAC;IACR,gBAAgB,MAAC;IACjB,mBAAmB,MAAC;IACpB,aAAa,MAAC;;IAMd,KAAK,IAAI,IAAI;IAIP,iBAAiB,CAAC,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAO5C,kBAAkB,CAAC,IAAI,KAAA;IAIvB,aAAa,CAAC,IAAI,KAAA;IAQlB,UAAU,CAAC,IAAI,KAAA;CAMtB"}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.FileReaderPolyfill = void 0;
|
|
4
|
+
const btoa_node_1 = require("../buffer/btoa.node");
|
|
5
|
+
class FileReaderPolyfill {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.onload = null;
|
|
8
|
+
}
|
|
9
|
+
abort() {
|
|
10
|
+
return;
|
|
11
|
+
}
|
|
12
|
+
async readAsArrayBuffer(blob) {
|
|
13
|
+
const arrayBuffer = await blob.arrayBuffer();
|
|
14
|
+
if (this.onload) {
|
|
15
|
+
this.onload({ target: { result: arrayBuffer } });
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
async readAsBinaryString(blob) {
|
|
19
|
+
throw Error('Not implemented');
|
|
20
|
+
}
|
|
21
|
+
async readAsDataURL(blob) {
|
|
22
|
+
const text = await blob.text();
|
|
23
|
+
const dataUrl = `data://;base64,${(0, btoa_node_1.atob)(text)}`;
|
|
24
|
+
if (this.onload) {
|
|
25
|
+
this.onload({ target: { result: dataUrl } });
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
async readAsText(blob) {
|
|
29
|
+
const text = await blob.text();
|
|
30
|
+
if (this.onload) {
|
|
31
|
+
this.onload({ target: { result: text } });
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
exports.FileReaderPolyfill = FileReaderPolyfill;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { BlobPolyfill } from './blob';
|
|
2
|
+
/**
|
|
3
|
+
* Forked from @gozala's web-file under MIT license
|
|
4
|
+
* @see https://github.com/Gozala/web-file
|
|
5
|
+
*/
|
|
6
|
+
export declare class FilePolyfill extends BlobPolyfill {
|
|
7
|
+
/** The name of the file referenced by the File object. */
|
|
8
|
+
name: string;
|
|
9
|
+
/** The path the URL of the File is relative to. */
|
|
10
|
+
webkitRelativePath: string;
|
|
11
|
+
/**
|
|
12
|
+
* Returns the last modified time of the file, in millisecond since the UNIX
|
|
13
|
+
* epoch (January 1st, 1970 at Midnight).
|
|
14
|
+
*/
|
|
15
|
+
lastModified: number;
|
|
16
|
+
/**
|
|
17
|
+
* @param init
|
|
18
|
+
* @param name - A USVString representing the file name or the path
|
|
19
|
+
* to the file.
|
|
20
|
+
* @param [options]
|
|
21
|
+
*/
|
|
22
|
+
constructor(init: BlobPart[], name: string, options?: FilePropertyBag);
|
|
23
|
+
get [Symbol.toStringTag](): string;
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=file.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../../src/node/file/file.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,YAAY,EAAC,MAAM,QAAQ,CAAC;AAEpC;;;GAGG;AAEH,qBAAa,YAAa,SAAQ,YAAY;IAG5C,0DAA0D;IAC1D,IAAI,EAAE,MAAM,CAAM;IAClB,mDAAmD;IACnD,kBAAkB,EAAE,MAAM,CAAM;IAEhC;;;OAGG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;;;;OAKG;gBACS,IAAI,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,GAAE,eAAoB;IAUzE,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,WAEvB;CACF"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.FilePolyfill = void 0;
|
|
4
|
+
// Forked from @gozala's web-file under MIT license https://github.com/Gozala/web-file
|
|
5
|
+
const blob_1 = require("./blob");
|
|
6
|
+
/**
|
|
7
|
+
* Forked from @gozala's web-file under MIT license
|
|
8
|
+
* @see https://github.com/Gozala/web-file
|
|
9
|
+
*/
|
|
10
|
+
// @ts-ignore
|
|
11
|
+
class FilePolyfill extends blob_1.BlobPolyfill {
|
|
12
|
+
/**
|
|
13
|
+
* @param init
|
|
14
|
+
* @param name - A USVString representing the file name or the path
|
|
15
|
+
* to the file.
|
|
16
|
+
* @param [options]
|
|
17
|
+
*/
|
|
18
|
+
constructor(init, name, options = {}) {
|
|
19
|
+
super(init, options);
|
|
20
|
+
// implements File {
|
|
21
|
+
// public API
|
|
22
|
+
/** The name of the file referenced by the File object. */
|
|
23
|
+
this.name = '';
|
|
24
|
+
/** The path the URL of the File is relative to. */
|
|
25
|
+
this.webkitRelativePath = '';
|
|
26
|
+
// Per File API spec https://w3c.github.io/FileAPI/#file-constructor
|
|
27
|
+
// Every "/" character of file name must be replaced with a ":".
|
|
28
|
+
/** @private */
|
|
29
|
+
this.name = String(name).replace(/\//g, ':');
|
|
30
|
+
/** @private */
|
|
31
|
+
this.lastModified = options?.lastModified || Date.now();
|
|
32
|
+
}
|
|
33
|
+
get [Symbol.toStringTag]() {
|
|
34
|
+
return 'File';
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
exports.FilePolyfill = FilePolyfill;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"install-file-polyfills.d.ts","sourceRoot":"","sources":["../../../src/node/file/install-file-polyfills.ts"],"names":[],"mappings":"AAKA,wBAAgB,oBAAoB,SAqBnC"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.installFilePolyfills = void 0;
|
|
4
|
+
const readable_stream_1 = require("./readable-stream");
|
|
5
|
+
const blob_1 = require("./blob");
|
|
6
|
+
const file_reader_1 = require("./file-reader");
|
|
7
|
+
const file_1 = require("./file");
|
|
8
|
+
function installFilePolyfills() {
|
|
9
|
+
if (typeof ReadableStream === 'undefined' && global) {
|
|
10
|
+
// @ts-ignore;
|
|
11
|
+
global.ReadableStream = readable_stream_1.ReadableStreamPolyfill;
|
|
12
|
+
}
|
|
13
|
+
if (typeof Blob === 'undefined' && global) {
|
|
14
|
+
// @ts-ignore;
|
|
15
|
+
global.Blob = blob_1.BlobPolyfill;
|
|
16
|
+
}
|
|
17
|
+
if (typeof FileReader === 'undefined' && global) {
|
|
18
|
+
// @ts-ignore;
|
|
19
|
+
global.FileReader = file_reader_1.FileReaderPolyfill;
|
|
20
|
+
}
|
|
21
|
+
// Install minimal Node.js File polyfill
|
|
22
|
+
if (typeof File === 'undefined' && global) {
|
|
23
|
+
// @ts-ignore;
|
|
24
|
+
global.File = file_1.FilePolyfill;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
exports.installFilePolyfills = installFilePolyfills;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"readable-stream.d.ts","sourceRoot":"","sources":["../../../src/node/file/readable-stream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,cAAc,IAAI,iBAAiB,EAAC,MAAM,sBAAsB,CAAC;AAOzE,qBAAa,sBAAsB,CAAC,CAAC,CAAE,SAAQ,iBAAiB,CAAC,CAAC,CAAE,YAAW,cAAc;CAAG"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ReadableStreamPolyfill = void 0;
|
|
4
|
+
const web_streams_polyfill_1 = require("web-streams-polyfill");
|
|
5
|
+
// Want a polyfill, but please don't install it
|
|
6
|
+
// @ts-ignore
|
|
7
|
+
delete global.ReadableStream;
|
|
8
|
+
// @ts-ignore
|
|
9
|
+
class ReadableStreamPolyfill extends web_streams_polyfill_1.ReadableStream {
|
|
10
|
+
}
|
|
11
|
+
exports.ReadableStreamPolyfill = ReadableStreamPolyfill;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Returns data bytes representing a compressed image in PNG or JPG format,
|
|
3
|
+
* This data can be saved using file system (f) methods or
|
|
4
|
+
* used in a request.
|
|
5
|
+
* @param image to save
|
|
6
|
+
* @param options
|
|
7
|
+
* @param options.type='png' - png, jpg or image/png, image/jpg are valid
|
|
8
|
+
* @param options.dataURI - Whether to include a data URI header
|
|
9
|
+
* @return {*} bytes
|
|
10
|
+
*/
|
|
11
|
+
export declare function encodeImageToStreamNode(image: {
|
|
12
|
+
data: any;
|
|
13
|
+
width: number;
|
|
14
|
+
height: number;
|
|
15
|
+
}, options: {
|
|
16
|
+
type?: string;
|
|
17
|
+
dataURI?: string;
|
|
18
|
+
}): any;
|
|
19
|
+
export declare function encodeImageNode(image: any, options: any): Promise<unknown>;
|
|
20
|
+
//# sourceMappingURL=encode-image.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"encode-image.node.d.ts","sourceRoot":"","sources":["../../../src/node/images/encode-image.node.ts"],"names":[],"mappings":"AAMA;;;;;;;;;GASG;AACH,wBAAgB,uBAAuB,CACrC,KAAK,EAAE;IAAC,IAAI,EAAE,GAAG,CAAC;IAAC,KAAK,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAC,EACjD,OAAO,EAAE;IAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAA;CAAC,OAQ3C;AAED,wBAAgB,eAAe,CAAC,KAAK,KAAA,EAAE,OAAO,KAAA,oBAY7C"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Use stackgl modules for DOM-less reading and writing of images
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.encodeImageNode = exports.encodeImageToStreamNode = void 0;
|
|
8
|
+
const save_pixels_1 = __importDefault(require("save-pixels"));
|
|
9
|
+
const ndarray_1 = __importDefault(require("ndarray"));
|
|
10
|
+
const to_array_buffer_node_1 = require("../buffer/to-array-buffer.node");
|
|
11
|
+
/**
|
|
12
|
+
* Returns data bytes representing a compressed image in PNG or JPG format,
|
|
13
|
+
* This data can be saved using file system (f) methods or
|
|
14
|
+
* used in a request.
|
|
15
|
+
* @param image to save
|
|
16
|
+
* @param options
|
|
17
|
+
* @param options.type='png' - png, jpg or image/png, image/jpg are valid
|
|
18
|
+
* @param options.dataURI - Whether to include a data URI header
|
|
19
|
+
* @return {*} bytes
|
|
20
|
+
*/
|
|
21
|
+
function encodeImageToStreamNode(image, options) {
|
|
22
|
+
// Support MIME type strings
|
|
23
|
+
const type = options.type ? options.type.replace('image/', '') : 'jpeg';
|
|
24
|
+
const pixels = (0, ndarray_1.default)(image.data, [image.width, image.height, 4], [4, image.width * 4, 1], 0);
|
|
25
|
+
// Note: savePixels returns a stream
|
|
26
|
+
return (0, save_pixels_1.default)(pixels, type, options);
|
|
27
|
+
}
|
|
28
|
+
exports.encodeImageToStreamNode = encodeImageToStreamNode;
|
|
29
|
+
function encodeImageNode(image, options) {
|
|
30
|
+
const imageStream = encodeImageToStreamNode(image, options);
|
|
31
|
+
return new Promise((resolve) => {
|
|
32
|
+
const buffers = [];
|
|
33
|
+
imageStream.on('data', (buffer) => buffers.push(buffer));
|
|
34
|
+
// TODO - convert to arraybuffer?
|
|
35
|
+
imageStream.on('end', () => {
|
|
36
|
+
const buffer = Buffer.concat(buffers);
|
|
37
|
+
resolve((0, to_array_buffer_node_1.bufferToArrayBuffer)(buffer));
|
|
38
|
+
});
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
exports.encodeImageNode = encodeImageNode;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
declare type NDArray = {
|
|
2
|
+
shape: number[];
|
|
3
|
+
data: Uint8Array;
|
|
4
|
+
width: number;
|
|
5
|
+
height: number;
|
|
6
|
+
components: number;
|
|
7
|
+
layers: number[];
|
|
8
|
+
};
|
|
9
|
+
export declare function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray>;
|
|
10
|
+
export {};
|
|
11
|
+
//# sourceMappingURL=parse-image.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parse-image.node.d.ts","sourceRoot":"","sources":["../../../src/node/images/parse-image.node.ts"],"names":[],"mappings":"AAKA,aAAK,OAAO,GAAG;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB,CAAC;AAEF,wBAAsB,cAAc,CAAC,WAAW,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAuBjG"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.parseImageNode = void 0;
|
|
7
|
+
const get_pixels_1 = __importDefault(require("get-pixels"));
|
|
8
|
+
const assert_1 = require("../../utils/assert");
|
|
9
|
+
const util_1 = __importDefault(require("util"));
|
|
10
|
+
async function parseImageNode(arrayBuffer, mimeType) {
|
|
11
|
+
(0, assert_1.assert)(mimeType, 'MIMEType is required to parse image under Node.js');
|
|
12
|
+
// TODO - check if getPixels callback is asynchronous if provided with buffer input
|
|
13
|
+
// if not, parseImage can be a sync function
|
|
14
|
+
const getPixelsAsync = util_1.default.promisify(get_pixels_1.default);
|
|
15
|
+
const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);
|
|
16
|
+
const ndarray = await getPixelsAsync(buffer, mimeType);
|
|
17
|
+
const shape = [...ndarray.shape];
|
|
18
|
+
const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;
|
|
19
|
+
// extract width/height etc
|
|
20
|
+
return {
|
|
21
|
+
shape,
|
|
22
|
+
data: ndarray.data,
|
|
23
|
+
width: ndarray.shape[0],
|
|
24
|
+
height: ndarray.shape[1],
|
|
25
|
+
components: ndarray.shape[2],
|
|
26
|
+
layers
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
exports.parseImageNode = parseImageNode;
|