@loaders.gl/polyfills 4.0.0-beta.3 → 4.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/fetch/utils/decode-data-uri.node.d.ts +16 -0
- package/dist/fetch/utils/decode-data-uri.node.d.ts.map +1 -0
- package/dist/fetch/utils/decode-data-uri.node.js +45 -0
- package/dist/fetch/utils/decode-data-uri.node.js.map +1 -0
- package/dist/images/encode-image-node.d.ts +20 -0
- package/dist/images/encode-image-node.d.ts.map +1 -0
- package/dist/images/encode-image-node.js +20 -0
- package/dist/images/encode-image-node.js.map +1 -0
- package/dist/images/parse-image-node.d.ts +13 -0
- package/dist/images/parse-image-node.d.ts.map +1 -0
- package/dist/images/parse-image-node.js +29 -0
- package/dist/images/parse-image-node.js.map +1 -0
- package/dist/index.js +12 -9
- package/dist/index.js.map +1 -1
- package/dist/load-library/require-utils.node.d.ts +18 -0
- package/dist/load-library/require-utils.node.d.ts.map +1 -0
- package/dist/load-library/require-utils.node.js +55 -0
- package/dist/load-library/require-utils.node.js.map +1 -0
- package/package.json +4 -4
- package/src/fetch/utils/decode-data-uri.node.ts +69 -0
- package/src/images/encode-image-node.ts +41 -0
- package/src/images/parse-image-node.ts +53 -0
- package/src/index.ts +22 -9
- package/src/load-library/require-utils.node.ts +101 -0
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parses a data URI into a buffer, as well as retrieving its declared MIME type.
|
|
3
|
+
*
|
|
4
|
+
* @param {string} uri - a data URI (assumed to be valid)
|
|
5
|
+
* @returns {Object} { buffer, mimeType }
|
|
6
|
+
*/
|
|
7
|
+
export declare function decodeDataUri(uri: string): {
|
|
8
|
+
arrayBuffer: ArrayBuffer;
|
|
9
|
+
mimeType: string;
|
|
10
|
+
};
|
|
11
|
+
/**
|
|
12
|
+
* @param data
|
|
13
|
+
* @todo Duplicate of core
|
|
14
|
+
*/
|
|
15
|
+
export declare function toArrayBuffer(data: unknown): ArrayBuffer;
|
|
16
|
+
//# sourceMappingURL=decode-data-uri.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"decode-data-uri.node.d.ts","sourceRoot":"","sources":["../../../src/fetch/utils/decode-data-uri.node.ts"],"names":[],"mappings":"AAKA;;;;;GAKG;AACH,wBAAgB,aAAa,CAAC,GAAG,EAAE,MAAM,GAAG;IAAC,WAAW,EAAE,WAAW,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAA;CAAC,CAoBvF;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,IAAI,EAAE,OAAO,GAAG,WAAW,CA+BxD"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
const isArrayBuffer = x => x && x instanceof ArrayBuffer;
|
|
2
|
+
const isBuffer = x => x && x instanceof Buffer;
|
|
3
|
+
export function decodeDataUri(uri) {
|
|
4
|
+
const dataIndex = uri.indexOf(',');
|
|
5
|
+
let buffer;
|
|
6
|
+
let mimeType;
|
|
7
|
+
if (uri.slice(dataIndex - 7, dataIndex) === ';base64') {
|
|
8
|
+
buffer = Buffer.from(uri.slice(dataIndex + 1), 'base64');
|
|
9
|
+
mimeType = uri.slice(5, dataIndex - 7).trim();
|
|
10
|
+
} else {
|
|
11
|
+
buffer = Buffer.from(decodeURIComponent(uri.slice(dataIndex + 1)));
|
|
12
|
+
mimeType = uri.slice(5, dataIndex).trim();
|
|
13
|
+
}
|
|
14
|
+
if (!mimeType) {
|
|
15
|
+
mimeType = 'text/plain;charset=US-ASCII';
|
|
16
|
+
} else if (mimeType.startsWith(';')) {
|
|
17
|
+
mimeType = `text/plain${mimeType}`;
|
|
18
|
+
}
|
|
19
|
+
return {
|
|
20
|
+
arrayBuffer: toArrayBuffer(buffer),
|
|
21
|
+
mimeType
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
export function toArrayBuffer(data) {
|
|
25
|
+
if (isArrayBuffer(data)) {
|
|
26
|
+
return data;
|
|
27
|
+
}
|
|
28
|
+
if (isBuffer(data)) {
|
|
29
|
+
const typedArray = new Uint8Array(data);
|
|
30
|
+
return typedArray.buffer;
|
|
31
|
+
}
|
|
32
|
+
if (ArrayBuffer.isView(data)) {
|
|
33
|
+
return data.buffer;
|
|
34
|
+
}
|
|
35
|
+
if (typeof data === 'string') {
|
|
36
|
+
const text = data;
|
|
37
|
+
const uint8Array = new TextEncoder().encode(text);
|
|
38
|
+
return uint8Array.buffer;
|
|
39
|
+
}
|
|
40
|
+
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
41
|
+
return data._toArrayBuffer();
|
|
42
|
+
}
|
|
43
|
+
throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
|
|
44
|
+
}
|
|
45
|
+
//# sourceMappingURL=decode-data-uri.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"decode-data-uri.node.js","names":["isArrayBuffer","x","ArrayBuffer","isBuffer","Buffer","decodeDataUri","uri","dataIndex","indexOf","buffer","mimeType","slice","from","trim","decodeURIComponent","startsWith","arrayBuffer","toArrayBuffer","data","typedArray","Uint8Array","isView","text","uint8Array","TextEncoder","encode","_toArrayBuffer","Error","JSON","stringify"],"sources":["../../../src/fetch/utils/decode-data-uri.node.ts"],"sourcesContent":["// Based on binary-gltf-utils under MIT license: Copyright (c) 2016-17 Karl Cheng\n\nconst isArrayBuffer = (x) => x && x instanceof ArrayBuffer;\nconst isBuffer = (x) => x && x instanceof Buffer;\n\n/**\n * Parses a data URI into a buffer, as well as retrieving its declared MIME type.\n *\n * @param {string} uri - a data URI (assumed to be valid)\n * @returns {Object} { buffer, mimeType }\n */\nexport function decodeDataUri(uri: string): {arrayBuffer: ArrayBuffer; mimeType: string} {\n const dataIndex = uri.indexOf(',');\n\n let buffer;\n let mimeType;\n if (uri.slice(dataIndex - 7, dataIndex) === ';base64') {\n buffer = Buffer.from(uri.slice(dataIndex + 1), 'base64');\n mimeType = uri.slice(5, dataIndex - 7).trim();\n } else {\n buffer = Buffer.from(decodeURIComponent(uri.slice(dataIndex + 1)));\n mimeType = uri.slice(5, dataIndex).trim();\n }\n\n if (!mimeType) {\n mimeType = 'text/plain;charset=US-ASCII';\n } else if (mimeType.startsWith(';')) {\n mimeType = `text/plain${mimeType}`;\n }\n\n return {arrayBuffer: toArrayBuffer(buffer), mimeType};\n}\n\n/**\n * @param data\n * @todo Duplicate of core\n */\nexport function toArrayBuffer(data: unknown): ArrayBuffer {\n if (isArrayBuffer(data)) {\n return data as ArrayBuffer;\n }\n\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (isBuffer(data)) {\n // @ts-expect-error\n const typedArray = new Uint8Array(data);\n return typedArray.buffer;\n }\n\n // Careful - Node Buffers will look like ArrayBuffers (keep after isBuffer)\n if (ArrayBuffer.isView(data)) {\n return data.buffer;\n }\n\n if (typeof data === 'string') {\n const text = data;\n const uint8Array = new TextEncoder().encode(text);\n return uint8Array.buffer;\n }\n\n // HACK to support Blob polyfill\n // @ts-expect-error\n if (data && typeof data === 'object' && data._toArrayBuffer) {\n // @ts-expect-error\n return data._toArrayBuffer();\n }\n\n throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);\n}\n"],"mappings":"AAEA,MAAMA,aAAa,GAAIC,CAAC,IAAKA,CAAC,IAAIA,CAAC,YAAYC,WAAW;AAC1D,MAAMC,QAAQ,GAAIF,CAAC,IAAKA,CAAC,IAAIA,CAAC,YAAYG,MAAM;AAQhD,OAAO,SAASC,aAAaA,CAACC,GAAW,EAAgD;EACvF,MAAMC,SAAS,GAAGD,GAAG,CAACE,OAAO,CAAC,GAAG,CAAC;EAElC,IAAIC,MAAM;EACV,IAAIC,QAAQ;EACZ,IAAIJ,GAAG,CAACK,KAAK,CAACJ,SAAS,GAAG,CAAC,EAAEA,SAAS,CAAC,KAAK,SAAS,EAAE;IACrDE,MAAM,GAAGL,MAAM,CAACQ,IAAI,CAACN,GAAG,CAACK,KAAK,CAACJ,SAAS,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC;IACxDG,QAAQ,GAAGJ,GAAG,CAACK,KAAK,CAAC,CAAC,EAAEJ,SAAS,GAAG,CAAC,CAAC,CAACM,IAAI,CAAC,CAAC;EAC/C,CAAC,MAAM;IACLJ,MAAM,GAAGL,MAAM,CAACQ,IAAI,CAACE,kBAAkB,CAACR,GAAG,CAACK,KAAK,CAACJ,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC;IAClEG,QAAQ,GAAGJ,GAAG,CAACK,KAAK,CAAC,CAAC,EAAEJ,SAAS,CAAC,CAACM,IAAI,CAAC,CAAC;EAC3C;EAEA,IAAI,CAACH,QAAQ,EAAE;IACbA,QAAQ,GAAG,6BAA6B;EAC1C,CAAC,MAAM,IAAIA,QAAQ,CAACK,UAAU,CAAC,GAAG,CAAC,EAAE;IACnCL,QAAQ,GAAI,aAAYA,QAAS,EAAC;EACpC;EAEA,OAAO;IAACM,WAAW,EAAEC,aAAa,CAACR,MAAM,CAAC;IAAEC;EAAQ,CAAC;AACvD;AAMA,OAAO,SAASO,aAAaA,CAACC,IAAa,EAAe;EACxD,IAAIlB,aAAa,CAACkB,IAAI,CAAC,EAAE;IACvB,OAAOA,IAAI;EACb;EAGA,IAAIf,QAAQ,CAACe,IAAI,CAAC,EAAE;IAElB,MAAMC,UAAU,GAAG,IAAIC,UAAU,CAACF,IAAI,CAAC;IACvC,OAAOC,UAAU,CAACV,MAAM;EAC1B;EAGA,IAAIP,WAAW,CAACmB,MAAM,CAACH,IAAI,CAAC,EAAE;IAC5B,OAAOA,IAAI,CAACT,MAAM;EACpB;EAEA,IAAI,OAAOS,IAAI,KAAK,QAAQ,EAAE;IAC5B,MAAMI,IAAI,GAAGJ,IAAI;IACjB,MAAMK,UAAU,GAAG,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACH,IAAI,CAAC;IACjD,OAAOC,UAAU,CAACd,MAAM;EAC1B;EAIA,IAAIS,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,IAAIA,IAAI,CAACQ,cAAc,EAAE;IAE3D,OAAOR,IAAI,CAACQ,cAAc,CAAC,CAAC;EAC9B;EAEA,MAAM,IAAIC,KAAK,CAAE,iBAAgBC,IAAI,CAACC,SAAS,CAACX,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAACP,KAAK,CAAC,EAAE,CAAE,GAAE,CAAC;AAC9E"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Returns data bytes representing a compressed image in PNG or JPG format,
|
|
3
|
+
* This data can be saved using file system (f) methods or
|
|
4
|
+
* used in a request.
|
|
5
|
+
* @param image to save
|
|
6
|
+
* @param options
|
|
7
|
+
* @param options.type='png' - png, jpg or image/png, image/jpg are valid
|
|
8
|
+
* @param options.dataURI - Whether to include a data URI header
|
|
9
|
+
* @return {*} bytes
|
|
10
|
+
*/
|
|
11
|
+
export declare function encodeImageToStreamNode(image: {
|
|
12
|
+
data: any;
|
|
13
|
+
width: number;
|
|
14
|
+
height: number;
|
|
15
|
+
}, options: {
|
|
16
|
+
type?: string;
|
|
17
|
+
dataURI?: string;
|
|
18
|
+
}): any;
|
|
19
|
+
export declare function encodeImageNode(image: any, options: any): Promise<unknown>;
|
|
20
|
+
//# sourceMappingURL=encode-image-node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"encode-image-node.d.ts","sourceRoot":"","sources":["../../src/images/encode-image-node.ts"],"names":[],"mappings":"AAMA;;;;;;;;;GASG;AACH,wBAAgB,uBAAuB,CACrC,KAAK,EAAE;IAAC,IAAI,EAAE,GAAG,CAAC;IAAC,KAAK,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAC,EACjD,OAAO,EAAE;IAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAA;CAAC,OAQ3C;AAED,wBAAgB,eAAe,CAAC,KAAK,KAAA,EAAE,OAAO,KAAA,oBAY7C"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import savePixels from 'save-pixels';
|
|
2
|
+
import ndarray from 'ndarray';
|
|
3
|
+
import { bufferToArrayBuffer } from "../buffer/to-array-buffer.node.js";
|
|
4
|
+
export function encodeImageToStreamNode(image, options) {
|
|
5
|
+
const type = options.type ? options.type.replace('image/', '') : 'jpeg';
|
|
6
|
+
const pixels = ndarray(image.data, [image.width, image.height, 4], [4, image.width * 4, 1], 0);
|
|
7
|
+
return savePixels(pixels, type, options);
|
|
8
|
+
}
|
|
9
|
+
export function encodeImageNode(image, options) {
|
|
10
|
+
const imageStream = encodeImageToStreamNode(image, options);
|
|
11
|
+
return new Promise(resolve => {
|
|
12
|
+
const buffers = [];
|
|
13
|
+
imageStream.on('data', buffer => buffers.push(buffer));
|
|
14
|
+
imageStream.on('end', () => {
|
|
15
|
+
const buffer = Buffer.concat(buffers);
|
|
16
|
+
resolve(bufferToArrayBuffer(buffer));
|
|
17
|
+
});
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
//# sourceMappingURL=encode-image-node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"encode-image-node.js","names":["savePixels","ndarray","bufferToArrayBuffer","encodeImageToStreamNode","image","options","type","replace","pixels","data","width","height","encodeImageNode","imageStream","Promise","resolve","buffers","on","buffer","push","Buffer","concat"],"sources":["../../src/images/encode-image-node.ts"],"sourcesContent":["// Use stackgl modules for DOM-less reading and writing of images\n\nimport savePixels from 'save-pixels';\nimport ndarray from 'ndarray';\nimport {bufferToArrayBuffer} from '../buffer/to-array-buffer.node';\n\n/**\n * Returns data bytes representing a compressed image in PNG or JPG format,\n * This data can be saved using file system (f) methods or\n * used in a request.\n * @param image to save\n * @param options\n * @param options.type='png' - png, jpg or image/png, image/jpg are valid\n * @param options.dataURI - Whether to include a data URI header\n * @return {*} bytes\n */\nexport function encodeImageToStreamNode(\n image: {data: any; width: number; height: number},\n options: {type?: string; dataURI?: string}\n) {\n // Support MIME type strings\n const type = options.type ? options.type.replace('image/', '') : 'jpeg';\n const pixels = ndarray(image.data, [image.width, image.height, 4], [4, image.width * 4, 1], 0);\n\n // Note: savePixels returns a stream\n return savePixels(pixels, type, options);\n}\n\nexport function encodeImageNode(image, options) {\n const imageStream = encodeImageToStreamNode(image, options);\n\n return new Promise((resolve) => {\n const buffers: any[] = [];\n imageStream.on('data', (buffer) => buffers.push(buffer));\n // TODO - convert to arraybuffer?\n imageStream.on('end', () => {\n const buffer = Buffer.concat(buffers);\n resolve(bufferToArrayBuffer(buffer));\n });\n });\n}\n"],"mappings":"AAEA,OAAOA,UAAU,MAAM,aAAa;AACpC,OAAOC,OAAO,MAAM,SAAS;AAAC,SACtBC,mBAAmB;AAY3B,OAAO,SAASC,uBAAuBA,CACrCC,KAAiD,EACjDC,OAA0C,EAC1C;EAEA,MAAMC,IAAI,GAAGD,OAAO,CAACC,IAAI,GAAGD,OAAO,CAACC,IAAI,CAACC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,GAAG,MAAM;EACvE,MAAMC,MAAM,GAAGP,OAAO,CAACG,KAAK,CAACK,IAAI,EAAE,CAACL,KAAK,CAACM,KAAK,EAAEN,KAAK,CAACO,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAEP,KAAK,CAACM,KAAK,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC;EAG9F,OAAOV,UAAU,CAACQ,MAAM,EAAEF,IAAI,EAAED,OAAO,CAAC;AAC1C;AAEA,OAAO,SAASO,eAAeA,CAACR,KAAK,EAAEC,OAAO,EAAE;EAC9C,MAAMQ,WAAW,GAAGV,uBAAuB,CAACC,KAAK,EAAEC,OAAO,CAAC;EAE3D,OAAO,IAAIS,OAAO,CAAEC,OAAO,IAAK;IAC9B,MAAMC,OAAc,GAAG,EAAE;IACzBH,WAAW,CAACI,EAAE,CAAC,MAAM,EAAGC,MAAM,IAAKF,OAAO,CAACG,IAAI,CAACD,MAAM,CAAC,CAAC;IAExDL,WAAW,CAACI,EAAE,CAAC,KAAK,EAAE,MAAM;MAC1B,MAAMC,MAAM,GAAGE,MAAM,CAACC,MAAM,CAACL,OAAO,CAAC;MACrCD,OAAO,CAACb,mBAAmB,CAACgB,MAAM,CAAC,CAAC;IACtC,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/** Declares which image format mime types this loader polyfill supports */
|
|
2
|
+
export declare const NODE_FORMAT_SUPPORT: string[];
|
|
3
|
+
type NDArray = {
|
|
4
|
+
shape: number[];
|
|
5
|
+
data: Uint8Array;
|
|
6
|
+
width: number;
|
|
7
|
+
height: number;
|
|
8
|
+
components: number;
|
|
9
|
+
layers: number[];
|
|
10
|
+
};
|
|
11
|
+
export declare function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray>;
|
|
12
|
+
export {};
|
|
13
|
+
//# sourceMappingURL=parse-image-node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parse-image-node.d.ts","sourceRoot":"","sources":["../../src/images/parse-image-node.ts"],"names":[],"mappings":"AAIA,2EAA2E;AAC3E,eAAO,MAAM,mBAAmB,UAA2C,CAAC;AAG5E,KAAK,OAAO,GAAG;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB,CAAC;AAEF,wBAAsB,cAAc,CAAC,WAAW,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAQjG"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import getPixels from 'get-pixels';
|
|
2
|
+
export const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];
|
|
3
|
+
export async function parseImageNode(arrayBuffer, mimeType) {
|
|
4
|
+
if (!mimeType) {
|
|
5
|
+
throw new Error('MIMEType is required to parse image under Node.js');
|
|
6
|
+
}
|
|
7
|
+
const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);
|
|
8
|
+
const ndarray = await getPixelsAsync(buffer, mimeType);
|
|
9
|
+
return ndarray;
|
|
10
|
+
}
|
|
11
|
+
function getPixelsAsync(buffer, mimeType) {
|
|
12
|
+
return new Promise(resolve => getPixels(buffer, mimeType, (err, ndarray) => {
|
|
13
|
+
if (err) {
|
|
14
|
+
throw err;
|
|
15
|
+
}
|
|
16
|
+
const shape = [...ndarray.shape];
|
|
17
|
+
const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;
|
|
18
|
+
const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;
|
|
19
|
+
resolve({
|
|
20
|
+
shape,
|
|
21
|
+
data,
|
|
22
|
+
width: ndarray.shape[0],
|
|
23
|
+
height: ndarray.shape[1],
|
|
24
|
+
components: ndarray.shape[2],
|
|
25
|
+
layers: layers ? [layers] : []
|
|
26
|
+
});
|
|
27
|
+
}));
|
|
28
|
+
}
|
|
29
|
+
//# sourceMappingURL=parse-image-node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parse-image-node.js","names":["getPixels","NODE_FORMAT_SUPPORT","parseImageNode","arrayBuffer","mimeType","Error","buffer","Buffer","from","ndarray","getPixelsAsync","Promise","resolve","err","shape","layers","length","shift","data","Uint8Array","width","height","components"],"sources":["../../src/images/parse-image-node.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport getPixels from 'get-pixels';\n\n/** Declares which image format mime types this loader polyfill supports */\nexport const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];\n\n// Note: These types are also defined in @loaders.gl/images and need to be kept in sync\ntype NDArray = {\n shape: number[];\n data: Uint8Array;\n width: number;\n height: number;\n components: number;\n layers: number[];\n};\n\nexport async function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray> {\n if (!mimeType) {\n throw new Error('MIMEType is required to parse image under Node.js');\n }\n\n const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);\n const ndarray = await getPixelsAsync(buffer, mimeType);\n return ndarray;\n}\n\n// TODO - check if getPixels callback is asynchronous if provided with buffer input\n// if not, parseImage can be a sync function\nfunction getPixelsAsync(buffer: Buffer, mimeType: string): Promise<NDArray> {\n return new Promise<NDArray>((resolve) =>\n getPixels(buffer, mimeType, (err, ndarray) => {\n if (err) {\n throw err;\n }\n\n const shape = [...ndarray.shape];\n const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;\n const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;\n\n // extract width/height etc\n resolve({\n shape,\n data,\n width: ndarray.shape[0],\n height: ndarray.shape[1],\n components: ndarray.shape[2],\n // TODO - error\n layers: layers ? [layers] : []\n });\n })\n );\n}\n"],"mappings":"AAEA,OAAOA,SAAS,MAAM,YAAY;AAGlC,OAAO,MAAMC,mBAAmB,GAAG,CAAC,WAAW,EAAE,YAAY,EAAE,WAAW,CAAC;AAY3E,OAAO,eAAeC,cAAcA,CAACC,WAAwB,EAAEC,QAAgB,EAAoB;EACjG,IAAI,CAACA,QAAQ,EAAE;IACb,MAAM,IAAIC,KAAK,CAAC,mDAAmD,CAAC;EACtE;EAEA,MAAMC,MAAM,GAAGH,WAAW,YAAYI,MAAM,GAAGJ,WAAW,GAAGI,MAAM,CAACC,IAAI,CAACL,WAAW,CAAC;EACrF,MAAMM,OAAO,GAAG,MAAMC,cAAc,CAACJ,MAAM,EAAEF,QAAQ,CAAC;EACtD,OAAOK,OAAO;AAChB;AAIA,SAASC,cAAcA,CAACJ,MAAc,EAAEF,QAAgB,EAAoB;EAC1E,OAAO,IAAIO,OAAO,CAAWC,OAAO,IAClCZ,SAAS,CAACM,MAAM,EAAEF,QAAQ,EAAE,CAACS,GAAG,EAAEJ,OAAO,KAAK;IAC5C,IAAII,GAAG,EAAE;MACP,MAAMA,GAAG;IACX;IAEA,MAAMC,KAAK,GAAG,CAAC,GAAGL,OAAO,CAACK,KAAK,CAAC;IAChC,MAAMC,MAAM,GAAGN,OAAO,CAACK,KAAK,CAACE,MAAM,KAAK,CAAC,GAAGP,OAAO,CAACK,KAAK,CAACG,KAAK,CAAC,CAAC,GAAG,CAAC;IACrE,MAAMC,IAAI,GAAGT,OAAO,CAACS,IAAI,YAAYX,MAAM,GAAG,IAAIY,UAAU,CAACV,OAAO,CAACS,IAAI,CAAC,GAAGT,OAAO,CAACS,IAAI;IAGzFN,OAAO,CAAC;MACNE,KAAK;MACLI,IAAI;MACJE,KAAK,EAAEX,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACvBO,MAAM,EAAEZ,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACxBQ,UAAU,EAAEb,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MAE5BC,MAAM,EAAEA,MAAM,GAAG,CAACA,MAAM,CAAC,GAAG;IAC9B,CAAC,CAAC;EACJ,CAAC,CACH,CAAC;AACH"}
|
package/dist/index.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { isBrowser } from "./utils/is-browser.js";
|
|
2
2
|
import { TextDecoder, TextEncoder } from "./text-encoder/text-encoder.js";
|
|
3
3
|
import { atob, btoa } from "./buffer/btoa.node.js";
|
|
4
|
-
import { encodeImageNode } from "./images/encode-image
|
|
5
|
-
import { parseImageNode, NODE_FORMAT_SUPPORT } from "./images/parse-image
|
|
4
|
+
import { encodeImageNode } from "./images/encode-image-node.js";
|
|
5
|
+
import { parseImageNode, NODE_FORMAT_SUPPORT } from "./images/parse-image-node.js";
|
|
6
6
|
import { NodeFile } from "./filesystems/node-file.js";
|
|
7
7
|
import { NodeFileSystem } from "./filesystems/node-filesystem.js";
|
|
8
8
|
import { fetchNode } from "./filesystems/fetch-node.js";
|
|
@@ -32,13 +32,16 @@ if (!('atob' in globalThis) && atob) {
|
|
|
32
32
|
if (!('btoa' in globalThis) && btoa) {
|
|
33
33
|
globalThis['btoa'] = btoa;
|
|
34
34
|
}
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
35
|
+
globalThis.loaders.encodeImageNode = encodeImageNode;
|
|
36
|
+
globalThis.loaders.parseImageNode = parseImageNode;
|
|
37
|
+
globalThis.loaders.imageFormatsNode = NODE_FORMAT_SUPPORT;
|
|
38
|
+
globalThis._parseImageNode = parseImageNode;
|
|
39
|
+
globalThis._imageFormatsNode = NODE_FORMAT_SUPPORT;
|
|
40
|
+
import { readFileAsArrayBuffer, readFileAsText, requireFromFile, requireFromString } from "./load-library/require-utils.node.js";
|
|
41
|
+
globalThis.loaders.readFileAsArrayBuffer = readFileAsArrayBuffer;
|
|
42
|
+
globalThis.loaders.readFileAsText = readFileAsText;
|
|
43
|
+
globalThis.loaders.requireFromFile = requireFromFile;
|
|
44
|
+
globalThis.loaders.requireFromString = requireFromString;
|
|
42
45
|
import { Headers as HeadersNode } from "./fetch/headers-polyfill.js";
|
|
43
46
|
import { Response as ResponseNode } from "./fetch/response-polyfill.js";
|
|
44
47
|
import { fetchNode as fetchNodePolyfill } from "./fetch/fetch-polyfill.js";
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["isBrowser","TextDecoder","TextEncoder","atob","btoa","encodeImageNode","parseImageNode","NODE_FORMAT_SUPPORT","NodeFile","NodeFileSystem","fetchNode","versions","nodeVersion","parseInt","node","split","makeNodeStream","Blob_","Blob","File_","File","console","error","globalThis","loaders","Headers","HeadersNode","Response","ResponseNode","fetchNodePolyfill","fetch"],"sources":["../src/index.ts"],"sourcesContent":["/* eslint-disable dot-notation */\nimport {isBrowser} from './utils/is-browser';\n\nimport {TextDecoder, TextEncoder} from './text-encoder/text-encoder';\n\n// Node specific\nimport {atob, btoa} from './buffer/btoa.node';\n\nimport {encodeImageNode} from './images/encode-image
|
|
1
|
+
{"version":3,"file":"index.js","names":["isBrowser","TextDecoder","TextEncoder","atob","btoa","encodeImageNode","parseImageNode","NODE_FORMAT_SUPPORT","NodeFile","NodeFileSystem","fetchNode","versions","nodeVersion","parseInt","node","split","makeNodeStream","Blob_","Blob","File_","File","console","error","globalThis","loaders","imageFormatsNode","_parseImageNode","_imageFormatsNode","readFileAsArrayBuffer","readFileAsText","requireFromFile","requireFromString","Headers","HeadersNode","Response","ResponseNode","fetchNodePolyfill","fetch"],"sources":["../src/index.ts"],"sourcesContent":["/* eslint-disable dot-notation */\nimport {isBrowser} from './utils/is-browser';\n\nimport {TextDecoder, TextEncoder} from './text-encoder/text-encoder';\n\n// Node specific\nimport {atob, btoa} from './buffer/btoa.node';\n\nimport {encodeImageNode} from './images/encode-image-node';\nimport {parseImageNode, NODE_FORMAT_SUPPORT} from './images/parse-image-node';\n\n// FILESYSTEM POLYFILLS\nimport {NodeFile} from './filesystems/node-file';\nimport {NodeFileSystem} from './filesystems/node-filesystem';\nimport {fetchNode} from './filesystems/fetch-node';\n\n// NODE VERSION\nimport {versions} from 'node:process';\nexport const nodeVersion = parseInt(versions.node.split('.')[0]);\n\n// STREAM POLYFILLS\nimport {makeNodeStream} from './streams/make-node-stream';\n\n// BLOB AND FILE POLYFILLS\nexport {Blob_ as Blob} from './file/install-blob-polyfills';\nexport {File_ as File} from './file/install-file-polyfills';\n\nif (isBrowser) {\n // eslint-disable-next-line no-console\n console.error(\n 'loaders.gl: The @loaders.gl/polyfills should only be used in Node.js environments'\n );\n}\n\nglobalThis.loaders = globalThis.loaders || {};\n\n// STREAM POLYFILLS\nexport {makeNodeStream} from './streams/make-node-stream';\nglobalThis.loaders.makeNodeStream = makeNodeStream;\n\n// FILESYSTEM POLYFILLS\nglobalThis.loaders.NodeFile = NodeFile;\nglobalThis.loaders.NodeFileSystem = NodeFileSystem;\nglobalThis.loaders.fetchNode = fetchNode;\n\n// POLYFILLS: TextEncoder, TextDecoder\n// - Recent Node versions have these classes but virtually no encodings unless special build.\n// - Browser: Edge, IE11 do not have these\n\nif (!globalThis.TextEncoder) {\n // @ts-expect-error\n globalThis.TextEncoder = TextEncoder;\n}\n\nif (!globalThis.TextDecoder) {\n // @ts-expect-error\n globalThis.TextDecoder = TextDecoder;\n}\n\n// POLYFILLS: btoa, atob\n// - Node: Yes\n// - Browser: No\n\nif (!('atob' in globalThis) && atob) {\n globalThis['atob'] = atob;\n}\nif (!('btoa' in globalThis) && btoa) {\n globalThis['btoa'] = btoa;\n}\n\n// NODE IMAGE FUNCTIONS:\n// These are not official polyfills but used by the @loaders.gl/images module if installed\n// TODO - is there an appropriate Image API we could polyfill using an adapter?\n\nglobalThis.loaders.encodeImageNode = encodeImageNode;\nglobalThis.loaders.parseImageNode = parseImageNode;\nglobalThis.loaders.imageFormatsNode = NODE_FORMAT_SUPPORT;\n\n// Deprecated, remove after republish\nglobalThis._parseImageNode = parseImageNode;\nglobalThis._imageFormatsNode = NODE_FORMAT_SUPPORT;\n\n// LOAD LIBRARY\n\nimport {\n readFileAsArrayBuffer,\n readFileAsText,\n requireFromFile,\n requireFromString\n} from './load-library/require-utils.node';\n\nglobalThis.loaders.readFileAsArrayBuffer = readFileAsArrayBuffer;\nglobalThis.loaders.readFileAsText = readFileAsText;\nglobalThis.loaders.requireFromFile = requireFromFile;\nglobalThis.loaders.requireFromString = requireFromString;\n\n// DEPRECATED POLYFILL:\n// - Node v18+: No, not needed\n// - Node v16 and lower: Yes\n// - Browsers (evergreen): Not needed.\n// - IE11: No. This polyfill is node only, install external polyfill\nimport {Headers as HeadersNode} from './fetch/headers-polyfill';\nimport {Response as ResponseNode} from './fetch/response-polyfill';\nimport {fetchNode as fetchNodePolyfill} from './fetch/fetch-polyfill';\n\nif (nodeVersion < 18) {\n if (!('Headers' in globalThis) && HeadersNode) {\n // @ts-ignore\n globalThis.Headers = HeadersNode;\n }\n\n if (!('Response' in globalThis) && ResponseNode) {\n // @ts-ignore\n globalThis.Response = ResponseNode;\n }\n\n if (!('fetch' in globalThis) && fetchNodePolyfill) {\n // @ts-ignore\n globalThis.fetch = fetchNodePolyfill;\n }\n}\n"],"mappings":"SACQA,SAAS;AAAA,SAETC,WAAW,EAAEC,WAAW;AAAA,SAGxBC,IAAI,EAAEC,IAAI;AAAA,SAEVC,eAAe;AAAA,SACfC,cAAc,EAAEC,mBAAmB;AAAA,SAGnCC,QAAQ;AAAA,SACRC,cAAc;AAAA,SACdC,SAAS;AAGjB,SAAQC,QAAQ,QAAO,cAAc;AACrC,OAAO,MAAMC,WAAW,GAAGC,QAAQ,CAACF,QAAQ,CAACG,IAAI,CAACC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAAC,SAGzDC,cAAc;AAAA,SAGdC,KAAK,IAAIC,IAAI;AAAA,SACbC,KAAK,IAAIC,IAAI;AAErB,IAAIpB,SAAS,EAAE;EAEbqB,OAAO,CAACC,KAAK,CACX,mFACF,CAAC;AACH;AAEAC,UAAU,CAACC,OAAO,GAAGD,UAAU,CAACC,OAAO,IAAI,CAAC,CAAC;AAAC,SAGtCR,cAAc;AACtBO,UAAU,CAACC,OAAO,CAACR,cAAc,GAAGA,cAAc;AAGlDO,UAAU,CAACC,OAAO,CAAChB,QAAQ,GAAGA,QAAQ;AACtCe,UAAU,CAACC,OAAO,CAACf,cAAc,GAAGA,cAAc;AAClDc,UAAU,CAACC,OAAO,CAACd,SAAS,GAAGA,SAAS;AAMxC,IAAI,CAACa,UAAU,CAACrB,WAAW,EAAE;EAE3BqB,UAAU,CAACrB,WAAW,GAAGA,WAAW;AACtC;AAEA,IAAI,CAACqB,UAAU,CAACtB,WAAW,EAAE;EAE3BsB,UAAU,CAACtB,WAAW,GAAGA,WAAW;AACtC;AAMA,IAAI,EAAE,MAAM,IAAIsB,UAAU,CAAC,IAAIpB,IAAI,EAAE;EACnCoB,UAAU,CAAC,MAAM,CAAC,GAAGpB,IAAI;AAC3B;AACA,IAAI,EAAE,MAAM,IAAIoB,UAAU,CAAC,IAAInB,IAAI,EAAE;EACnCmB,UAAU,CAAC,MAAM,CAAC,GAAGnB,IAAI;AAC3B;AAMAmB,UAAU,CAACC,OAAO,CAACnB,eAAe,GAAGA,eAAe;AACpDkB,UAAU,CAACC,OAAO,CAAClB,cAAc,GAAGA,cAAc;AAClDiB,UAAU,CAACC,OAAO,CAACC,gBAAgB,GAAGlB,mBAAmB;AAGzDgB,UAAU,CAACG,eAAe,GAAGpB,cAAc;AAC3CiB,UAAU,CAACI,iBAAiB,GAAGpB,mBAAmB;AAAC,SAKjDqB,qBAAqB,EACrBC,cAAc,EACdC,eAAe,EACfC,iBAAiB;AAGnBR,UAAU,CAACC,OAAO,CAACI,qBAAqB,GAAGA,qBAAqB;AAChEL,UAAU,CAACC,OAAO,CAACK,cAAc,GAAGA,cAAc;AAClDN,UAAU,CAACC,OAAO,CAACM,eAAe,GAAGA,eAAe;AACpDP,UAAU,CAACC,OAAO,CAACO,iBAAiB,GAAGA,iBAAiB;AAAC,SAOjDC,OAAO,IAAIC,WAAW;AAAA,SACtBC,QAAQ,IAAIC,YAAY;AAAA,SACxBzB,SAAS,IAAI0B,iBAAiB;AAEtC,IAAIxB,WAAW,GAAG,EAAE,EAAE;EACpB,IAAI,EAAE,SAAS,IAAIW,UAAU,CAAC,IAAIU,WAAW,EAAE;IAE7CV,UAAU,CAACS,OAAO,GAAGC,WAAW;EAClC;EAEA,IAAI,EAAE,UAAU,IAAIV,UAAU,CAAC,IAAIY,YAAY,EAAE;IAE/CZ,UAAU,CAACW,QAAQ,GAAGC,YAAY;EACpC;EAEA,IAAI,EAAE,OAAO,IAAIZ,UAAU,CAAC,IAAIa,iBAAiB,EAAE;IAEjDb,UAAU,CAACc,KAAK,GAAGD,iBAAiB;EACtC;AACF"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Load a file from local file system
|
|
3
|
+
* @param filename
|
|
4
|
+
* @returns
|
|
5
|
+
*/
|
|
6
|
+
export declare function readFileAsArrayBuffer(filename: string): Promise<ArrayBuffer>;
|
|
7
|
+
/**
|
|
8
|
+
* Load a file from local file system
|
|
9
|
+
* @param filename
|
|
10
|
+
* @returns
|
|
11
|
+
*/
|
|
12
|
+
export declare function readFileAsText(filename: string): Promise<string>;
|
|
13
|
+
export declare function requireFromFile(filename: string): Promise<any>;
|
|
14
|
+
export declare function requireFromString(code: string, filename?: string, options?: {
|
|
15
|
+
prependPaths?: string[];
|
|
16
|
+
appendPaths?: string[];
|
|
17
|
+
}): any;
|
|
18
|
+
//# sourceMappingURL=require-utils.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"require-utils.node.d.ts","sourceRoot":"","sources":["../../src/load-library/require-utils.node.ts"],"names":[],"mappings":"AAUA;;;;GAIG;AACH,wBAAsB,qBAAqB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC,CAOlF;AAED;;;;GAIG;AACH,wBAAsB,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAOtE;AAMD,wBAAsB,eAAe,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAYpE;AAQD,wBAAgB,iBAAiB,CAC/B,IAAI,EAAE,MAAM,EACZ,QAAQ,SAAK,EACb,OAAO,CAAC,EAAE;IACR,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;IACxB,WAAW,CAAC,EAAE,MAAM,EAAE,CAAC;CACxB,GACA,GAAG,CA+BL"}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import Module from 'module';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import fs from 'fs';
|
|
4
|
+
export async function readFileAsArrayBuffer(filename) {
|
|
5
|
+
if (filename.startsWith('http')) {
|
|
6
|
+
const response = await fetch(filename);
|
|
7
|
+
return await response.arrayBuffer();
|
|
8
|
+
}
|
|
9
|
+
const buffer = fs.readFileSync(filename);
|
|
10
|
+
return buffer.buffer;
|
|
11
|
+
}
|
|
12
|
+
export async function readFileAsText(filename) {
|
|
13
|
+
if (filename.startsWith('http')) {
|
|
14
|
+
const response = await fetch(filename);
|
|
15
|
+
return await response.text();
|
|
16
|
+
}
|
|
17
|
+
const text = fs.readFileSync(filename, 'utf8');
|
|
18
|
+
return text;
|
|
19
|
+
}
|
|
20
|
+
export async function requireFromFile(filename) {
|
|
21
|
+
if (filename.startsWith('http')) {
|
|
22
|
+
const response = await fetch(filename);
|
|
23
|
+
const code = await response.text();
|
|
24
|
+
return requireFromString(code);
|
|
25
|
+
}
|
|
26
|
+
if (!filename.startsWith('/')) {
|
|
27
|
+
filename = `${process.cwd()}/${filename}`;
|
|
28
|
+
}
|
|
29
|
+
const code = await fs.readFileSync(filename, 'utf8');
|
|
30
|
+
return requireFromString(code);
|
|
31
|
+
}
|
|
32
|
+
export function requireFromString(code) {
|
|
33
|
+
var _module, _options, _options2;
|
|
34
|
+
let filename = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
|
|
35
|
+
let options = arguments.length > 2 ? arguments[2] : undefined;
|
|
36
|
+
if (typeof filename === 'object') {
|
|
37
|
+
options = filename;
|
|
38
|
+
filename = '';
|
|
39
|
+
}
|
|
40
|
+
filename = filename.replace('file://', '');
|
|
41
|
+
if (typeof code !== 'string') {
|
|
42
|
+
throw new Error(`code must be a string, not ${typeof code}`);
|
|
43
|
+
}
|
|
44
|
+
const paths = Module._nodeModulePaths(path.dirname(filename));
|
|
45
|
+
const parent = typeof module !== 'undefined' && ((_module = module) === null || _module === void 0 ? void 0 : _module.parent);
|
|
46
|
+
const newModule = new Module(filename, parent);
|
|
47
|
+
newModule.filename = filename;
|
|
48
|
+
newModule.paths = [].concat(((_options = options) === null || _options === void 0 ? void 0 : _options.prependPaths) || []).concat(paths).concat(((_options2 = options) === null || _options2 === void 0 ? void 0 : _options2.appendPaths) || []);
|
|
49
|
+
newModule._compile(code, filename);
|
|
50
|
+
if (parent && parent.children) {
|
|
51
|
+
parent.children.splice(parent.children.indexOf(newModule), 1);
|
|
52
|
+
}
|
|
53
|
+
return newModule.exports;
|
|
54
|
+
}
|
|
55
|
+
//# sourceMappingURL=require-utils.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"require-utils.node.js","names":["Module","path","fs","readFileAsArrayBuffer","filename","startsWith","response","fetch","arrayBuffer","buffer","readFileSync","readFileAsText","text","requireFromFile","code","requireFromString","process","cwd","_module","_options","_options2","arguments","length","undefined","options","replace","Error","paths","_nodeModulePaths","dirname","parent","module","newModule","concat","prependPaths","appendPaths","_compile","children","splice","indexOf","exports"],"sources":["../../src/load-library/require-utils.node.ts"],"sourcesContent":["// Fork of https://github.com/floatdrop/require-from-string/blob/master/index.js\n// Copyright (c) Vsevolod Strukchinsky <floatdrop@gmail.com> (github.com/floatdrop)\n// MIT license\n\n// this file is not visible to webpack (it is excluded in the package.json \"browser\" field).\n\nimport Module from 'module';\nimport path from 'path';\nimport fs from 'fs';\n\n/**\n * Load a file from local file system\n * @param filename\n * @returns\n */\nexport async function readFileAsArrayBuffer(filename: string): Promise<ArrayBuffer> {\n if (filename.startsWith('http')) {\n const response = await fetch(filename);\n return await response.arrayBuffer();\n }\n const buffer = fs.readFileSync(filename);\n return buffer.buffer;\n}\n\n/**\n * Load a file from local file system\n * @param filename\n * @returns\n */\nexport async function readFileAsText(filename: string): Promise<string> {\n if (filename.startsWith('http')) {\n const response = await fetch(filename);\n return await response.text();\n }\n const text = fs.readFileSync(filename, 'utf8');\n return text;\n}\n\n// Node.js Dynamically require from file\n// Relative names are resolved relative to cwd\n// This indirect function is provided because webpack will try to bundle `module.require`.\n// this file is not visible to webpack (it is excluded in the package.json \"browser\" field).\nexport async function requireFromFile(filename: string): Promise<any> {\n if (filename.startsWith('http')) {\n const response = await fetch(filename);\n const code = await response.text();\n return requireFromString(code);\n }\n\n if (!filename.startsWith('/')) {\n filename = `${process.cwd()}/${filename}`;\n }\n const code = await fs.readFileSync(filename, 'utf8');\n return requireFromString(code);\n}\n\n// Dynamically require from string\n// - `code` - Required - Type: string - Module code.\n// - `filename` - Type: string - Default: '' - Optional filename.\n// - `options.appendPaths` Type: Array List of paths, that will be appended to module paths.\n// Useful, when you want to be able require modules from these paths.\n// - `options.prependPaths` Type: Array Same as appendPaths, but paths will be prepended.\nexport function requireFromString(\n code: string,\n filename = '',\n options?: {\n prependPaths?: string[];\n appendPaths?: string[];\n }\n): any {\n if (typeof filename === 'object') {\n options = filename;\n filename = '';\n }\n filename = filename.replace('file://', '');\n\n if (typeof code !== 'string') {\n throw new Error(`code must be a string, not ${typeof code}`);\n }\n\n // @ts-ignore\n const paths = Module._nodeModulePaths(path.dirname(filename));\n\n const parent = typeof module !== 'undefined' && module?.parent;\n\n // @ts-ignore\n const newModule = new Module(filename, parent);\n newModule.filename = filename;\n newModule.paths = ([] as string[])\n .concat(options?.prependPaths || [])\n .concat(paths)\n .concat(options?.appendPaths || []);\n // @ts-ignore\n newModule._compile(code, filename);\n\n if (parent && parent.children) {\n parent.children.splice(parent.children.indexOf(newModule), 1);\n }\n\n return newModule.exports;\n}\n"],"mappings":"AAMA,OAAOA,MAAM,MAAM,QAAQ;AAC3B,OAAOC,IAAI,MAAM,MAAM;AACvB,OAAOC,EAAE,MAAM,IAAI;AAOnB,OAAO,eAAeC,qBAAqBA,CAACC,QAAgB,EAAwB;EAClF,IAAIA,QAAQ,CAACC,UAAU,CAAC,MAAM,CAAC,EAAE;IAC/B,MAAMC,QAAQ,GAAG,MAAMC,KAAK,CAACH,QAAQ,CAAC;IACtC,OAAO,MAAME,QAAQ,CAACE,WAAW,CAAC,CAAC;EACrC;EACA,MAAMC,MAAM,GAAGP,EAAE,CAACQ,YAAY,CAACN,QAAQ,CAAC;EACxC,OAAOK,MAAM,CAACA,MAAM;AACtB;AAOA,OAAO,eAAeE,cAAcA,CAACP,QAAgB,EAAmB;EACtE,IAAIA,QAAQ,CAACC,UAAU,CAAC,MAAM,CAAC,EAAE;IAC/B,MAAMC,QAAQ,GAAG,MAAMC,KAAK,CAACH,QAAQ,CAAC;IACtC,OAAO,MAAME,QAAQ,CAACM,IAAI,CAAC,CAAC;EAC9B;EACA,MAAMA,IAAI,GAAGV,EAAE,CAACQ,YAAY,CAACN,QAAQ,EAAE,MAAM,CAAC;EAC9C,OAAOQ,IAAI;AACb;AAMA,OAAO,eAAeC,eAAeA,CAACT,QAAgB,EAAgB;EACpE,IAAIA,QAAQ,CAACC,UAAU,CAAC,MAAM,CAAC,EAAE;IAC/B,MAAMC,QAAQ,GAAG,MAAMC,KAAK,CAACH,QAAQ,CAAC;IACtC,MAAMU,IAAI,GAAG,MAAMR,QAAQ,CAACM,IAAI,CAAC,CAAC;IAClC,OAAOG,iBAAiB,CAACD,IAAI,CAAC;EAChC;EAEA,IAAI,CAACV,QAAQ,CAACC,UAAU,CAAC,GAAG,CAAC,EAAE;IAC7BD,QAAQ,GAAI,GAAEY,OAAO,CAACC,GAAG,CAAC,CAAE,IAAGb,QAAS,EAAC;EAC3C;EACA,MAAMU,IAAI,GAAG,MAAMZ,EAAE,CAACQ,YAAY,CAACN,QAAQ,EAAE,MAAM,CAAC;EACpD,OAAOW,iBAAiB,CAACD,IAAI,CAAC;AAChC;AAQA,OAAO,SAASC,iBAAiBA,CAC/BD,IAAY,EAMP;EAAA,IAAAI,OAAA,EAAAC,QAAA,EAAAC,SAAA;EAAA,IALLhB,QAAQ,GAAAiB,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,EAAE;EAAA,IACbG,OAGC,GAAAH,SAAA,CAAAC,MAAA,OAAAD,SAAA,MAAAE,SAAA;EAED,IAAI,OAAOnB,QAAQ,KAAK,QAAQ,EAAE;IAChCoB,OAAO,GAAGpB,QAAQ;IAClBA,QAAQ,GAAG,EAAE;EACf;EACAA,QAAQ,GAAGA,QAAQ,CAACqB,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC;EAE1C,IAAI,OAAOX,IAAI,KAAK,QAAQ,EAAE;IAC5B,MAAM,IAAIY,KAAK,CAAE,8BAA6B,OAAOZ,IAAK,EAAC,CAAC;EAC9D;EAGA,MAAMa,KAAK,GAAG3B,MAAM,CAAC4B,gBAAgB,CAAC3B,IAAI,CAAC4B,OAAO,CAACzB,QAAQ,CAAC,CAAC;EAE7D,MAAM0B,MAAM,GAAG,OAAOC,MAAM,KAAK,WAAW,MAAAb,OAAA,GAAIa,MAAM,cAAAb,OAAA,uBAANA,OAAA,CAAQY,MAAM;EAG9D,MAAME,SAAS,GAAG,IAAIhC,MAAM,CAACI,QAAQ,EAAE0B,MAAM,CAAC;EAC9CE,SAAS,CAAC5B,QAAQ,GAAGA,QAAQ;EAC7B4B,SAAS,CAACL,KAAK,GAAI,EAAE,CAClBM,MAAM,CAAC,EAAAd,QAAA,GAAAK,OAAO,cAAAL,QAAA,uBAAPA,QAAA,CAASe,YAAY,KAAI,EAAE,CAAC,CACnCD,MAAM,CAACN,KAAK,CAAC,CACbM,MAAM,CAAC,EAAAb,SAAA,GAAAI,OAAO,cAAAJ,SAAA,uBAAPA,SAAA,CAASe,WAAW,KAAI,EAAE,CAAC;EAErCH,SAAS,CAACI,QAAQ,CAACtB,IAAI,EAAEV,QAAQ,CAAC;EAElC,IAAI0B,MAAM,IAAIA,MAAM,CAACO,QAAQ,EAAE;IAC7BP,MAAM,CAACO,QAAQ,CAACC,MAAM,CAACR,MAAM,CAACO,QAAQ,CAACE,OAAO,CAACP,SAAS,CAAC,EAAE,CAAC,CAAC;EAC/D;EAEA,OAAOA,SAAS,CAACQ,OAAO;AAC1B"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/polyfills",
|
|
3
|
-
"version": "4.0.0-beta.
|
|
3
|
+
"version": "4.0.0-beta.4",
|
|
4
4
|
"description": "Polyfills for TextEncoder/TextDecoder",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -45,8 +45,8 @@
|
|
|
45
45
|
},
|
|
46
46
|
"dependencies": {
|
|
47
47
|
"@babel/runtime": "^7.3.1",
|
|
48
|
-
"@loaders.gl/crypto": "4.0.0-beta.
|
|
49
|
-
"@loaders.gl/loader-utils": "4.0.0-beta.
|
|
48
|
+
"@loaders.gl/crypto": "4.0.0-beta.4",
|
|
49
|
+
"@loaders.gl/loader-utils": "4.0.0-beta.4",
|
|
50
50
|
"buffer": "^6.0.3",
|
|
51
51
|
"get-pixels": "^3.3.2",
|
|
52
52
|
"ndarray": "^1.0.18",
|
|
@@ -55,5 +55,5 @@
|
|
|
55
55
|
"through": "^2.3.8",
|
|
56
56
|
"web-streams-polyfill": "^3.0.0"
|
|
57
57
|
},
|
|
58
|
-
"gitHead": "
|
|
58
|
+
"gitHead": "848c20b474532d301f2c3f8d4e1fb9bf262b86d4"
|
|
59
59
|
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
// Based on binary-gltf-utils under MIT license: Copyright (c) 2016-17 Karl Cheng
|
|
2
|
+
|
|
3
|
+
const isArrayBuffer = (x) => x && x instanceof ArrayBuffer;
|
|
4
|
+
const isBuffer = (x) => x && x instanceof Buffer;
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Parses a data URI into a buffer, as well as retrieving its declared MIME type.
|
|
8
|
+
*
|
|
9
|
+
* @param {string} uri - a data URI (assumed to be valid)
|
|
10
|
+
* @returns {Object} { buffer, mimeType }
|
|
11
|
+
*/
|
|
12
|
+
export function decodeDataUri(uri: string): {arrayBuffer: ArrayBuffer; mimeType: string} {
|
|
13
|
+
const dataIndex = uri.indexOf(',');
|
|
14
|
+
|
|
15
|
+
let buffer;
|
|
16
|
+
let mimeType;
|
|
17
|
+
if (uri.slice(dataIndex - 7, dataIndex) === ';base64') {
|
|
18
|
+
buffer = Buffer.from(uri.slice(dataIndex + 1), 'base64');
|
|
19
|
+
mimeType = uri.slice(5, dataIndex - 7).trim();
|
|
20
|
+
} else {
|
|
21
|
+
buffer = Buffer.from(decodeURIComponent(uri.slice(dataIndex + 1)));
|
|
22
|
+
mimeType = uri.slice(5, dataIndex).trim();
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (!mimeType) {
|
|
26
|
+
mimeType = 'text/plain;charset=US-ASCII';
|
|
27
|
+
} else if (mimeType.startsWith(';')) {
|
|
28
|
+
mimeType = `text/plain${mimeType}`;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return {arrayBuffer: toArrayBuffer(buffer), mimeType};
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* @param data
|
|
36
|
+
* @todo Duplicate of core
|
|
37
|
+
*/
|
|
38
|
+
export function toArrayBuffer(data: unknown): ArrayBuffer {
|
|
39
|
+
if (isArrayBuffer(data)) {
|
|
40
|
+
return data as ArrayBuffer;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
44
|
+
if (isBuffer(data)) {
|
|
45
|
+
// @ts-expect-error
|
|
46
|
+
const typedArray = new Uint8Array(data);
|
|
47
|
+
return typedArray.buffer;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Careful - Node Buffers will look like ArrayBuffers (keep after isBuffer)
|
|
51
|
+
if (ArrayBuffer.isView(data)) {
|
|
52
|
+
return data.buffer;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (typeof data === 'string') {
|
|
56
|
+
const text = data;
|
|
57
|
+
const uint8Array = new TextEncoder().encode(text);
|
|
58
|
+
return uint8Array.buffer;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// HACK to support Blob polyfill
|
|
62
|
+
// @ts-expect-error
|
|
63
|
+
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
64
|
+
// @ts-expect-error
|
|
65
|
+
return data._toArrayBuffer();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);
|
|
69
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
// Use stackgl modules for DOM-less reading and writing of images
|
|
2
|
+
|
|
3
|
+
import savePixels from 'save-pixels';
|
|
4
|
+
import ndarray from 'ndarray';
|
|
5
|
+
import {bufferToArrayBuffer} from '../buffer/to-array-buffer.node';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Returns data bytes representing a compressed image in PNG or JPG format,
|
|
9
|
+
* This data can be saved using file system (f) methods or
|
|
10
|
+
* used in a request.
|
|
11
|
+
* @param image to save
|
|
12
|
+
* @param options
|
|
13
|
+
* @param options.type='png' - png, jpg or image/png, image/jpg are valid
|
|
14
|
+
* @param options.dataURI - Whether to include a data URI header
|
|
15
|
+
* @return {*} bytes
|
|
16
|
+
*/
|
|
17
|
+
export function encodeImageToStreamNode(
|
|
18
|
+
image: {data: any; width: number; height: number},
|
|
19
|
+
options: {type?: string; dataURI?: string}
|
|
20
|
+
) {
|
|
21
|
+
// Support MIME type strings
|
|
22
|
+
const type = options.type ? options.type.replace('image/', '') : 'jpeg';
|
|
23
|
+
const pixels = ndarray(image.data, [image.width, image.height, 4], [4, image.width * 4, 1], 0);
|
|
24
|
+
|
|
25
|
+
// Note: savePixels returns a stream
|
|
26
|
+
return savePixels(pixels, type, options);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export function encodeImageNode(image, options) {
|
|
30
|
+
const imageStream = encodeImageToStreamNode(image, options);
|
|
31
|
+
|
|
32
|
+
return new Promise((resolve) => {
|
|
33
|
+
const buffers: any[] = [];
|
|
34
|
+
imageStream.on('data', (buffer) => buffers.push(buffer));
|
|
35
|
+
// TODO - convert to arraybuffer?
|
|
36
|
+
imageStream.on('end', () => {
|
|
37
|
+
const buffer = Buffer.concat(buffers);
|
|
38
|
+
resolve(bufferToArrayBuffer(buffer));
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
// loaders.gl, MIT license
|
|
2
|
+
|
|
3
|
+
import getPixels from 'get-pixels';
|
|
4
|
+
|
|
5
|
+
/** Declares which image format mime types this loader polyfill supports */
|
|
6
|
+
export const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];
|
|
7
|
+
|
|
8
|
+
// Note: These types are also defined in @loaders.gl/images and need to be kept in sync
|
|
9
|
+
type NDArray = {
|
|
10
|
+
shape: number[];
|
|
11
|
+
data: Uint8Array;
|
|
12
|
+
width: number;
|
|
13
|
+
height: number;
|
|
14
|
+
components: number;
|
|
15
|
+
layers: number[];
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
export async function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray> {
|
|
19
|
+
if (!mimeType) {
|
|
20
|
+
throw new Error('MIMEType is required to parse image under Node.js');
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);
|
|
24
|
+
const ndarray = await getPixelsAsync(buffer, mimeType);
|
|
25
|
+
return ndarray;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// TODO - check if getPixels callback is asynchronous if provided with buffer input
|
|
29
|
+
// if not, parseImage can be a sync function
|
|
30
|
+
function getPixelsAsync(buffer: Buffer, mimeType: string): Promise<NDArray> {
|
|
31
|
+
return new Promise<NDArray>((resolve) =>
|
|
32
|
+
getPixels(buffer, mimeType, (err, ndarray) => {
|
|
33
|
+
if (err) {
|
|
34
|
+
throw err;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const shape = [...ndarray.shape];
|
|
38
|
+
const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;
|
|
39
|
+
const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;
|
|
40
|
+
|
|
41
|
+
// extract width/height etc
|
|
42
|
+
resolve({
|
|
43
|
+
shape,
|
|
44
|
+
data,
|
|
45
|
+
width: ndarray.shape[0],
|
|
46
|
+
height: ndarray.shape[1],
|
|
47
|
+
components: ndarray.shape[2],
|
|
48
|
+
// TODO - error
|
|
49
|
+
layers: layers ? [layers] : []
|
|
50
|
+
});
|
|
51
|
+
})
|
|
52
|
+
);
|
|
53
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -6,8 +6,8 @@ import {TextDecoder, TextEncoder} from './text-encoder/text-encoder';
|
|
|
6
6
|
// Node specific
|
|
7
7
|
import {atob, btoa} from './buffer/btoa.node';
|
|
8
8
|
|
|
9
|
-
import {encodeImageNode} from './images/encode-image
|
|
10
|
-
import {parseImageNode, NODE_FORMAT_SUPPORT} from './images/parse-image
|
|
9
|
+
import {encodeImageNode} from './images/encode-image-node';
|
|
10
|
+
import {parseImageNode, NODE_FORMAT_SUPPORT} from './images/parse-image-node';
|
|
11
11
|
|
|
12
12
|
// FILESYSTEM POLYFILLS
|
|
13
13
|
import {NodeFile} from './filesystems/node-file';
|
|
@@ -72,14 +72,27 @@ if (!('btoa' in globalThis) && btoa) {
|
|
|
72
72
|
// These are not official polyfills but used by the @loaders.gl/images module if installed
|
|
73
73
|
// TODO - is there an appropriate Image API we could polyfill using an adapter?
|
|
74
74
|
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
75
|
+
globalThis.loaders.encodeImageNode = encodeImageNode;
|
|
76
|
+
globalThis.loaders.parseImageNode = parseImageNode;
|
|
77
|
+
globalThis.loaders.imageFormatsNode = NODE_FORMAT_SUPPORT;
|
|
78
78
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
79
|
+
// Deprecated, remove after republish
|
|
80
|
+
globalThis._parseImageNode = parseImageNode;
|
|
81
|
+
globalThis._imageFormatsNode = NODE_FORMAT_SUPPORT;
|
|
82
|
+
|
|
83
|
+
// LOAD LIBRARY
|
|
84
|
+
|
|
85
|
+
import {
|
|
86
|
+
readFileAsArrayBuffer,
|
|
87
|
+
readFileAsText,
|
|
88
|
+
requireFromFile,
|
|
89
|
+
requireFromString
|
|
90
|
+
} from './load-library/require-utils.node';
|
|
91
|
+
|
|
92
|
+
globalThis.loaders.readFileAsArrayBuffer = readFileAsArrayBuffer;
|
|
93
|
+
globalThis.loaders.readFileAsText = readFileAsText;
|
|
94
|
+
globalThis.loaders.requireFromFile = requireFromFile;
|
|
95
|
+
globalThis.loaders.requireFromString = requireFromString;
|
|
83
96
|
|
|
84
97
|
// DEPRECATED POLYFILL:
|
|
85
98
|
// - Node v18+: No, not needed
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
// Fork of https://github.com/floatdrop/require-from-string/blob/master/index.js
|
|
2
|
+
// Copyright (c) Vsevolod Strukchinsky <floatdrop@gmail.com> (github.com/floatdrop)
|
|
3
|
+
// MIT license
|
|
4
|
+
|
|
5
|
+
// this file is not visible to webpack (it is excluded in the package.json "browser" field).
|
|
6
|
+
|
|
7
|
+
import Module from 'module';
|
|
8
|
+
import path from 'path';
|
|
9
|
+
import fs from 'fs';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Load a file from local file system
|
|
13
|
+
* @param filename
|
|
14
|
+
* @returns
|
|
15
|
+
*/
|
|
16
|
+
export async function readFileAsArrayBuffer(filename: string): Promise<ArrayBuffer> {
|
|
17
|
+
if (filename.startsWith('http')) {
|
|
18
|
+
const response = await fetch(filename);
|
|
19
|
+
return await response.arrayBuffer();
|
|
20
|
+
}
|
|
21
|
+
const buffer = fs.readFileSync(filename);
|
|
22
|
+
return buffer.buffer;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Load a file from local file system
|
|
27
|
+
* @param filename
|
|
28
|
+
* @returns
|
|
29
|
+
*/
|
|
30
|
+
export async function readFileAsText(filename: string): Promise<string> {
|
|
31
|
+
if (filename.startsWith('http')) {
|
|
32
|
+
const response = await fetch(filename);
|
|
33
|
+
return await response.text();
|
|
34
|
+
}
|
|
35
|
+
const text = fs.readFileSync(filename, 'utf8');
|
|
36
|
+
return text;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Node.js Dynamically require from file
|
|
40
|
+
// Relative names are resolved relative to cwd
|
|
41
|
+
// This indirect function is provided because webpack will try to bundle `module.require`.
|
|
42
|
+
// this file is not visible to webpack (it is excluded in the package.json "browser" field).
|
|
43
|
+
export async function requireFromFile(filename: string): Promise<any> {
|
|
44
|
+
if (filename.startsWith('http')) {
|
|
45
|
+
const response = await fetch(filename);
|
|
46
|
+
const code = await response.text();
|
|
47
|
+
return requireFromString(code);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
if (!filename.startsWith('/')) {
|
|
51
|
+
filename = `${process.cwd()}/${filename}`;
|
|
52
|
+
}
|
|
53
|
+
const code = await fs.readFileSync(filename, 'utf8');
|
|
54
|
+
return requireFromString(code);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Dynamically require from string
|
|
58
|
+
// - `code` - Required - Type: string - Module code.
|
|
59
|
+
// - `filename` - Type: string - Default: '' - Optional filename.
|
|
60
|
+
// - `options.appendPaths` Type: Array List of paths, that will be appended to module paths.
|
|
61
|
+
// Useful, when you want to be able require modules from these paths.
|
|
62
|
+
// - `options.prependPaths` Type: Array Same as appendPaths, but paths will be prepended.
|
|
63
|
+
export function requireFromString(
|
|
64
|
+
code: string,
|
|
65
|
+
filename = '',
|
|
66
|
+
options?: {
|
|
67
|
+
prependPaths?: string[];
|
|
68
|
+
appendPaths?: string[];
|
|
69
|
+
}
|
|
70
|
+
): any {
|
|
71
|
+
if (typeof filename === 'object') {
|
|
72
|
+
options = filename;
|
|
73
|
+
filename = '';
|
|
74
|
+
}
|
|
75
|
+
filename = filename.replace('file://', '');
|
|
76
|
+
|
|
77
|
+
if (typeof code !== 'string') {
|
|
78
|
+
throw new Error(`code must be a string, not ${typeof code}`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// @ts-ignore
|
|
82
|
+
const paths = Module._nodeModulePaths(path.dirname(filename));
|
|
83
|
+
|
|
84
|
+
const parent = typeof module !== 'undefined' && module?.parent;
|
|
85
|
+
|
|
86
|
+
// @ts-ignore
|
|
87
|
+
const newModule = new Module(filename, parent);
|
|
88
|
+
newModule.filename = filename;
|
|
89
|
+
newModule.paths = ([] as string[])
|
|
90
|
+
.concat(options?.prependPaths || [])
|
|
91
|
+
.concat(paths)
|
|
92
|
+
.concat(options?.appendPaths || []);
|
|
93
|
+
// @ts-ignore
|
|
94
|
+
newModule._compile(code, filename);
|
|
95
|
+
|
|
96
|
+
if (parent && parent.children) {
|
|
97
|
+
parent.children.splice(parent.children.indexOf(newModule), 1);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return newModule.exports;
|
|
101
|
+
}
|