@loaders.gl/polyfills 4.0.0-beta.7 → 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,9 +1,6 @@
1
- Copyright (c) 2015 Uber Technologies, Inc.
2
-
3
- This software includes parts of PhiloGL (https://github.com/philogb/philogl)
4
- under MIT license. PhiloGL parts Copyright © 2013 Sencha Labs.
1
+ loaders.gl is licensed under the MIT license
5
2
 
6
- This software includes adaptations of postprocessing code from THREE.js (https://github.com/mrdoob/three.js/) under MIT license. Additional attribution given in specific source files. THREE.js parts Copyright © 2010-2018 three.js authors.
3
+ Copyright (c) vis.gl contributors
7
4
 
8
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
9
6
  of this software and associated documentation files (the "Software"), to deal
@@ -23,8 +20,12 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23
20
  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24
21
  THE SOFTWARE.
25
22
 
23
+ ---
24
+
25
+ Copyright (c) 2015 Uber Technologies, Inc.
26
26
 
27
- loaders.gl includes certain files from Cesium (https://github.com/AnalyticalGraphicsInc/cesium) under the Apache 2 License:
27
+ loaders.gl includes certain files from Cesium (https://github.com/AnalyticalGraphicsInc/cesium)
28
+ under the Apache 2 License (found in the submodule: modules/3d-tiles):)
28
29
 
29
30
  Copyright 2011-2018 CesiumJS Contributors
30
31
 
@@ -38,4 +39,3 @@ distributed under the License is distributed on an "AS IS" BASIS,
38
39
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
39
40
  See the License for the specific language governing permissions and limitations under the License.
40
41
 
41
- Cesium-derived code can be found in the submodule: modules/3d-tiles
@@ -1 +1 @@
1
- {"version":3,"file":"fetch-polyfill.d.ts","sourceRoot":"","sources":["../../src/fetch/fetch-polyfill.ts"],"names":[],"mappings":";AAEA,OAAO,IAAI,MAAM,MAAM,CAAC;AAExB,OAAO,EAAC,QAAQ,EAAC,MAAM,qBAAqB,CAAC;AAO7C;;;;GAIG;AAEH,wBAAsB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CAgDpF;AAED,2DAA2D;AAC3D,wBAAsB,2BAA2B,CAC/C,GAAG,EAAE,MAAM,EACX,OAAO,KAAA,GACN,OAAO,CAAC,IAAI,CAAC,eAAe,CAAC,CAW/B"}
1
+ {"version":3,"file":"fetch-polyfill.d.ts","sourceRoot":"","sources":["../../src/fetch/fetch-polyfill.ts"],"names":[],"mappings":";AAGA,OAAO,IAAI,MAAM,MAAM,CAAC;AAExB,OAAO,EAAC,QAAQ,EAAC,MAAM,qBAAqB,CAAC;AAO7C;;;;GAIG;AAEH,wBAAsB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CAgDpF;AAED,2DAA2D;AAC3D,wBAAsB,2BAA2B,CAC/C,GAAG,EAAE,MAAM,EACX,OAAO,KAAA,GACN,OAAO,CAAC,IAAI,CAAC,eAAe,CAAC,CAW/B"}
@@ -1 +1 @@
1
- {"version":3,"file":"fetch-polyfill.js","names":["http","https","Response","Headers","decodeDataUri","isDataURL","url","startsWith","isRequestURL","fetchNode","options","globalThis","fetch","arrayBuffer","mimeType","response","headers","syntheticResponseHeaders","originalUrl","endsWith","slice","body","createHTTPRequestReadStream","getHeaders","status","statusText","getStatus","followRedirect","undefined","has","redirectUrl","generateRedirectUrl","get","error","String","Promise","resolve","reject","requestOptions","getRequestOptions","req","request","res","on","end","location","URL","pathname","href","originalHeaders","key","Object","keys","toLowerCase","urlObject","hostname","path","method","port","httpResponse","statusCode","statusMessage","additionalHeaders","arguments","length","httpHeaders","header","contentLength","getContentLength","Number","isFinite","assign"],"sources":["../../src/fetch/fetch-polyfill.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport http from 'http';\nimport https from 'https';\nimport {Response} from './response-polyfill';\nimport {Headers} from './headers-polyfill';\nimport {decodeDataUri} from './decode-data-uri';\n\nconst isDataURL = (url: string): boolean => url.startsWith('data:');\nconst isRequestURL = (url: string): boolean => url.startsWith('http:') || url.startsWith('https:');\n\n/**\n * Emulation of Browser fetch for Node.js\n * @param url\n * @param options\n */\n// eslint-disable-next-line complexity\nexport async function fetchNode(url: string, options: RequestInit): Promise<Response> {\n try {\n // Handle file streams in node\n // @ts-expect-error\n if (globalThis.fetch !== fetchNode && (isRequestURL(url) || isDataURL(url))) {\n // @ts-expect-error\n return await fetch(url, options);\n }\n\n // Handle data urls in node, to match `fetch``\n // Note - this loses the MIME type, data URIs are handled directly in fetch\n if (isDataURL(url)) {\n const {arrayBuffer, mimeType} = decodeDataUri(url);\n const response = new Response(arrayBuffer, {\n headers: {'content-type': mimeType},\n url\n });\n return response;\n }\n\n // Automatically decompress gzipped files with .gz extension\n const syntheticResponseHeaders = {};\n const originalUrl = url;\n if (url.endsWith('.gz')) {\n url = url.slice(0, -3);\n syntheticResponseHeaders['content-encoding'] = 'gzip';\n }\n\n // Need to create the stream in advance since Response constructor needs to be sync\n const body = await createHTTPRequestReadStream(originalUrl, options);\n const headers = getHeaders(url, body, syntheticResponseHeaders);\n const {status, statusText} = getStatus(body);\n\n const followRedirect =\n // @ts-expect-error\n !options || options.followRedirect || options.followRedirect === undefined;\n\n if (status >= 300 && status < 400 && headers.has('location') && followRedirect) {\n const redirectUrl = generateRedirectUrl(url, headers.get('location'));\n\n // Redirect\n return await fetchNode(redirectUrl, options);\n }\n return new Response(body, {headers, status, statusText, url});\n } catch (error) {\n // TODO - what error code to use here?\n return new Response(null, {status: 400, statusText: String(error), url});\n }\n}\n\n/** Returns a promise that resolves to a readable stream */\nexport async function createHTTPRequestReadStream(\n url: string,\n options\n): Promise<http.IncomingMessage> {\n // HANDLE HTTP/HTTPS REQUESTS IN NODE\n // TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM\n return await new Promise((resolve, reject) => {\n const requestOptions = getRequestOptions(url, options);\n const req = url.startsWith('https:')\n ? https.request(requestOptions, (res) => resolve(res))\n : http.request(requestOptions, (res) => resolve(res));\n req.on('error', (error) => reject(error));\n req.end();\n });\n}\n\n/**\n * Generate redirect url from location without origin and protocol.\n * @param originalUrl\n * @param redirectUrl\n */\nfunction generateRedirectUrl(originalUrl: string, location: string): string {\n if (location.startsWith('http')) {\n return location;\n }\n // If url doesn't have origin and protocol just extend current url origin with location.\n const url = new URL(originalUrl);\n url.pathname = location;\n\n return url.href;\n}\n\n// HELPER FUNCTIONS\n\nfunction getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {\n // Ensure header keys are lower case so that we can merge without duplicates\n const originalHeaders = options?.headers || {};\n const headers = {};\n for (const key of Object.keys(originalHeaders)) {\n headers[key.toLowerCase()] = originalHeaders[key];\n }\n\n // Add default accept-encoding to headers\n headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';\n\n const urlObject = new URL(url);\n return {\n hostname: urlObject.hostname,\n path: urlObject.pathname,\n method: 'GET',\n // Add options and user provided 'options.fetch' overrides if available\n ...options,\n ...options?.fetch,\n // Override with updated headers with accepted encodings:\n headers,\n port: urlObject.port\n };\n}\n\nfunction getStatus(httpResponse: http.IncomingMessage): {status: number; statusText: string} {\n if (httpResponse.statusCode) {\n return {status: httpResponse.statusCode, statusText: httpResponse.statusMessage || 'NA'};\n }\n return {status: 200, statusText: 'OK'};\n}\n\nfunction getHeaders(url, httpResponse, additionalHeaders = {}) {\n const headers = {};\n\n if (httpResponse && httpResponse.headers) {\n const httpHeaders = httpResponse.headers;\n for (const key in httpHeaders) {\n const header = httpHeaders[key];\n headers[key.toLowerCase()] = String(header);\n }\n }\n\n // Fix up content length if we can for best progress experience\n if (!headers['content-length']) {\n const contentLength = getContentLength(url);\n if (Number.isFinite(contentLength)) {\n headers['content-length'] = contentLength;\n }\n }\n\n Object.assign(headers, additionalHeaders);\n\n return new Headers(headers);\n}\n\n/** Needs to be read from actual headers */\nfunction getContentLength(url: string): number | null {\n // TODO - remove media type etc\n return isDataURL(url) ? url.length - 'data:'.length : null;\n}\n"],"mappings":"AAEA,OAAOA,IAAI,MAAM,MAAM;AACvB,OAAOC,KAAK,MAAM,OAAO;AAAC,SAClBC,QAAQ;AAAA,SACRC,OAAO;AAAA,SACPC,aAAa;AAErB,MAAMC,SAAS,GAAIC,GAAW,IAAcA,GAAG,CAACC,UAAU,CAAC,OAAO,CAAC;AACnE,MAAMC,YAAY,GAAIF,GAAW,IAAcA,GAAG,CAACC,UAAU,CAAC,OAAO,CAAC,IAAID,GAAG,CAACC,UAAU,CAAC,QAAQ,CAAC;AAQlG,OAAO,eAAeE,SAASA,CAACH,GAAW,EAAEI,OAAoB,EAAqB;EACpF,IAAI;IAGF,IAAIC,UAAU,CAACC,KAAK,KAAKH,SAAS,KAAKD,YAAY,CAACF,GAAG,CAAC,IAAID,SAAS,CAACC,GAAG,CAAC,CAAC,EAAE;MAE3E,OAAO,MAAMM,KAAK,CAACN,GAAG,EAAEI,OAAO,CAAC;IAClC;IAIA,IAAIL,SAAS,CAACC,GAAG,CAAC,EAAE;MAClB,MAAM;QAACO,WAAW;QAAEC;MAAQ,CAAC,GAAGV,aAAa,CAACE,GAAG,CAAC;MAClD,MAAMS,QAAQ,GAAG,IAAIb,QAAQ,CAACW,WAAW,EAAE;QACzCG,OAAO,EAAE;UAAC,cAAc,EAAEF;QAAQ,CAAC;QACnCR;MACF,CAAC,CAAC;MACF,OAAOS,QAAQ;IACjB;IAGA,MAAME,wBAAwB,GAAG,CAAC,CAAC;IACnC,MAAMC,WAAW,GAAGZ,GAAG;IACvB,IAAIA,GAAG,CAACa,QAAQ,CAAC,KAAK,CAAC,EAAE;MACvBb,GAAG,GAAGA,GAAG,CAACc,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;MACtBH,wBAAwB,CAAC,kBAAkB,CAAC,GAAG,MAAM;IACvD;IAGA,MAAMI,IAAI,GAAG,MAAMC,2BAA2B,CAACJ,WAAW,EAAER,OAAO,CAAC;IACpE,MAAMM,OAAO,GAAGO,UAAU,CAACjB,GAAG,EAAEe,IAAI,EAAEJ,wBAAwB,CAAC;IAC/D,MAAM;MAACO,MAAM;MAAEC;IAAU,CAAC,GAAGC,SAAS,CAACL,IAAI,CAAC;IAE5C,MAAMM,cAAc,GAElB,CAACjB,OAAO,IAAIA,OAAO,CAACiB,cAAc,IAAIjB,OAAO,CAACiB,cAAc,KAAKC,SAAS;IAE5E,IAAIJ,MAAM,IAAI,GAAG,IAAIA,MAAM,GAAG,GAAG,IAAIR,OAAO,CAACa,GAAG,CAAC,UAAU,CAAC,IAAIF,cAAc,EAAE;MAC9E,MAAMG,WAAW,GAAGC,mBAAmB,CAACzB,GAAG,EAAEU,OAAO,CAACgB,GAAG,CAAC,UAAU,CAAC,CAAC;MAGrE,OAAO,MAAMvB,SAAS,CAACqB,WAAW,EAAEpB,OAAO,CAAC;IAC9C;IACA,OAAO,IAAIR,QAAQ,CAACmB,IAAI,EAAE;MAACL,OAAO;MAAEQ,MAAM;MAAEC,UAAU;MAAEnB;IAAG,CAAC,CAAC;EAC/D,CAAC,CAAC,OAAO2B,KAAK,EAAE;IAEd,OAAO,IAAI/B,QAAQ,CAAC,IAAI,EAAE;MAACsB,MAAM,EAAE,GAAG;MAAEC,UAAU,EAAES,MAAM,CAACD,KAAK,CAAC;MAAE3B;IAAG,CAAC,CAAC;EAC1E;AACF;AAGA,OAAO,eAAegB,2BAA2BA,CAC/ChB,GAAW,EACXI,OAAO,EACwB;EAG/B,OAAO,MAAM,IAAIyB,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IAC5C,MAAMC,cAAc,GAAGC,iBAAiB,CAACjC,GAAG,EAAEI,OAAO,CAAC;IACtD,MAAM8B,GAAG,GAAGlC,GAAG,CAACC,UAAU,CAAC,QAAQ,CAAC,GAChCN,KAAK,CAACwC,OAAO,CAACH,cAAc,EAAGI,GAAG,IAAKN,OAAO,CAACM,GAAG,CAAC,CAAC,GACpD1C,IAAI,CAACyC,OAAO,CAACH,cAAc,EAAGI,GAAG,IAAKN,OAAO,CAACM,GAAG,CAAC,CAAC;IACvDF,GAAG,CAACG,EAAE,CAAC,OAAO,EAAGV,KAAK,IAAKI,MAAM,CAACJ,KAAK,CAAC,CAAC;IACzCO,GAAG,CAACI,GAAG,CAAC,CAAC;EACX,CAAC,CAAC;AACJ;AAOA,SAASb,mBAAmBA,CAACb,WAAmB,EAAE2B,QAAgB,EAAU;EAC1E,IAAIA,QAAQ,CAACtC,UAAU,CAAC,MAAM,CAAC,EAAE;IAC/B,OAAOsC,QAAQ;EACjB;EAEA,MAAMvC,GAAG,GAAG,IAAIwC,GAAG,CAAC5B,WAAW,CAAC;EAChCZ,GAAG,CAACyC,QAAQ,GAAGF,QAAQ;EAEvB,OAAOvC,GAAG,CAAC0C,IAAI;AACjB;AAIA,SAAST,iBAAiBA,CAACjC,GAAW,EAAEI,OAA0C,EAAE;EAElF,MAAMuC,eAAe,GAAG,CAAAvC,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEM,OAAO,KAAI,CAAC,CAAC;EAC9C,MAAMA,OAAO,GAAG,CAAC,CAAC;EAClB,KAAK,MAAMkC,GAAG,IAAIC,MAAM,CAACC,IAAI,CAACH,eAAe,CAAC,EAAE;IAC9CjC,OAAO,CAACkC,GAAG,CAACG,WAAW,CAAC,CAAC,CAAC,GAAGJ,eAAe,CAACC,GAAG,CAAC;EACnD;EAGAlC,OAAO,CAAC,iBAAiB,CAAC,GAAGA,OAAO,CAAC,iBAAiB,CAAC,IAAI,iBAAiB;EAE5E,MAAMsC,SAAS,GAAG,IAAIR,GAAG,CAACxC,GAAG,CAAC;EAC9B,OAAO;IACLiD,QAAQ,EAAED,SAAS,CAACC,QAAQ;IAC5BC,IAAI,EAAEF,SAAS,CAACP,QAAQ;IACxBU,MAAM,EAAE,KAAK;IAEb,GAAG/C,OAAO;IACV,IAAGA,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEE,KAAK;IAEjBI,OAAO;IACP0C,IAAI,EAAEJ,SAAS,CAACI;EAClB,CAAC;AACH;AAEA,SAAShC,SAASA,CAACiC,YAAkC,EAAwC;EAC3F,IAAIA,YAAY,CAACC,UAAU,EAAE;IAC3B,OAAO;MAACpC,MAAM,EAAEmC,YAAY,CAACC,UAAU;MAAEnC,UAAU,EAAEkC,YAAY,CAACE,aAAa,IAAI;IAAI,CAAC;EAC1F;EACA,OAAO;IAACrC,MAAM,EAAE,GAAG;IAAEC,UAAU,EAAE;EAAI,CAAC;AACxC;AAEA,SAASF,UAAUA,CAACjB,GAAG,EAAEqD,YAAY,EAA0B;EAAA,IAAxBG,iBAAiB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAnC,SAAA,GAAAmC,SAAA,MAAG,CAAC,CAAC;EAC3D,MAAM/C,OAAO,GAAG,CAAC,CAAC;EAElB,IAAI2C,YAAY,IAAIA,YAAY,CAAC3C,OAAO,EAAE;IACxC,MAAMiD,WAAW,GAAGN,YAAY,CAAC3C,OAAO;IACxC,KAAK,MAAMkC,GAAG,IAAIe,WAAW,EAAE;MAC7B,MAAMC,MAAM,GAAGD,WAAW,CAACf,GAAG,CAAC;MAC/BlC,OAAO,CAACkC,GAAG,CAACG,WAAW,CAAC,CAAC,CAAC,GAAGnB,MAAM,CAACgC,MAAM,CAAC;IAC7C;EACF;EAGA,IAAI,CAAClD,OAAO,CAAC,gBAAgB,CAAC,EAAE;IAC9B,MAAMmD,aAAa,GAAGC,gBAAgB,CAAC9D,GAAG,CAAC;IAC3C,IAAI+D,MAAM,CAACC,QAAQ,CAACH,aAAa,CAAC,EAAE;MAClCnD,OAAO,CAAC,gBAAgB,CAAC,GAAGmD,aAAa;IAC3C;EACF;EAEAhB,MAAM,CAACoB,MAAM,CAACvD,OAAO,EAAE8C,iBAAiB,CAAC;EAEzC,OAAO,IAAI3D,OAAO,CAACa,OAAO,CAAC;AAC7B;AAGA,SAASoD,gBAAgBA,CAAC9D,GAAW,EAAiB;EAEpD,OAAOD,SAAS,CAACC,GAAG,CAAC,GAAGA,GAAG,CAAC0D,MAAM,GAAG,OAAO,CAACA,MAAM,GAAG,IAAI;AAC5D"}
1
+ {"version":3,"file":"fetch-polyfill.js","names":["http","https","Response","Headers","decodeDataUri","isDataURL","url","startsWith","isRequestURL","fetchNode","options","globalThis","fetch","arrayBuffer","mimeType","response","headers","syntheticResponseHeaders","originalUrl","endsWith","slice","body","createHTTPRequestReadStream","getHeaders","status","statusText","getStatus","followRedirect","undefined","has","redirectUrl","generateRedirectUrl","get","error","String","Promise","resolve","reject","requestOptions","getRequestOptions","req","request","res","on","end","location","URL","pathname","href","originalHeaders","key","Object","keys","toLowerCase","urlObject","hostname","path","method","port","httpResponse","statusCode","statusMessage","additionalHeaders","arguments","length","httpHeaders","header","contentLength","getContentLength","Number","isFinite","assign"],"sources":["../../src/fetch/fetch-polyfill.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport http from 'http';\nimport https from 'https';\nimport {Response} from './response-polyfill';\nimport {Headers} from './headers-polyfill';\nimport {decodeDataUri} from './decode-data-uri';\n\nconst isDataURL = (url: string): boolean => url.startsWith('data:');\nconst isRequestURL = (url: string): boolean => url.startsWith('http:') || url.startsWith('https:');\n\n/**\n * Emulation of Browser fetch for Node.js\n * @param url\n * @param options\n */\n// eslint-disable-next-line complexity\nexport async function fetchNode(url: string, options: RequestInit): Promise<Response> {\n try {\n // Handle file streams in node\n // @ts-expect-error\n if (globalThis.fetch !== fetchNode && (isRequestURL(url) || isDataURL(url))) {\n // @ts-expect-error\n return await fetch(url, options);\n }\n\n // Handle data urls in node, to match `fetch``\n // Note - this loses the MIME type, data URIs are handled directly in fetch\n if (isDataURL(url)) {\n const {arrayBuffer, mimeType} = decodeDataUri(url);\n const response = new Response(arrayBuffer, {\n headers: {'content-type': mimeType},\n url\n });\n return response;\n }\n\n // Automatically decompress gzipped files with .gz extension\n const syntheticResponseHeaders = {};\n const originalUrl = url;\n if (url.endsWith('.gz')) {\n url = url.slice(0, -3);\n syntheticResponseHeaders['content-encoding'] = 'gzip';\n }\n\n // Need to create the stream in advance since Response constructor needs to be sync\n const body = await createHTTPRequestReadStream(originalUrl, options);\n const headers = getHeaders(url, body, syntheticResponseHeaders);\n const {status, statusText} = getStatus(body);\n\n const followRedirect =\n // @ts-expect-error\n !options || options.followRedirect || options.followRedirect === undefined;\n\n if (status >= 300 && status < 400 && headers.has('location') && followRedirect) {\n const redirectUrl = generateRedirectUrl(url, headers.get('location'));\n\n // Redirect\n return await fetchNode(redirectUrl, options);\n }\n return new Response(body, {headers, status, statusText, url});\n } catch (error) {\n // TODO - what error code to use here?\n return new Response(null, {status: 400, statusText: String(error), url});\n }\n}\n\n/** Returns a promise that resolves to a readable stream */\nexport async function createHTTPRequestReadStream(\n url: string,\n options\n): Promise<http.IncomingMessage> {\n // HANDLE HTTP/HTTPS REQUESTS IN NODE\n // TODO: THIS IS BAD SINCE WE RETURN A PROMISE INSTEAD OF A STREAM\n return await new Promise((resolve, reject) => {\n const requestOptions = getRequestOptions(url, options);\n const req = url.startsWith('https:')\n ? https.request(requestOptions, (res) => resolve(res))\n : http.request(requestOptions, (res) => resolve(res));\n req.on('error', (error) => reject(error));\n req.end();\n });\n}\n\n/**\n * Generate redirect url from location without origin and protocol.\n * @param originalUrl\n * @param redirectUrl\n */\nfunction generateRedirectUrl(originalUrl: string, location: string): string {\n if (location.startsWith('http')) {\n return location;\n }\n // If url doesn't have origin and protocol just extend current url origin with location.\n const url = new URL(originalUrl);\n url.pathname = location;\n\n return url.href;\n}\n\n// HELPER FUNCTIONS\n\nfunction getRequestOptions(url: string, options?: {fetch?: typeof fetch; headers?}) {\n // Ensure header keys are lower case so that we can merge without duplicates\n const originalHeaders = options?.headers || {};\n const headers = {};\n for (const key of Object.keys(originalHeaders)) {\n headers[key.toLowerCase()] = originalHeaders[key];\n }\n\n // Add default accept-encoding to headers\n headers['accept-encoding'] = headers['accept-encoding'] || 'gzip,br,deflate';\n\n const urlObject = new URL(url);\n return {\n hostname: urlObject.hostname,\n path: urlObject.pathname,\n method: 'GET',\n // Add options and user provided 'options.fetch' overrides if available\n ...options,\n ...options?.fetch,\n // Override with updated headers with accepted encodings:\n headers,\n port: urlObject.port\n };\n}\n\nfunction getStatus(httpResponse: http.IncomingMessage): {status: number; statusText: string} {\n if (httpResponse.statusCode) {\n return {status: httpResponse.statusCode, statusText: httpResponse.statusMessage || 'NA'};\n }\n return {status: 200, statusText: 'OK'};\n}\n\nfunction getHeaders(url, httpResponse, additionalHeaders = {}) {\n const headers = {};\n\n if (httpResponse && httpResponse.headers) {\n const httpHeaders = httpResponse.headers;\n for (const key in httpHeaders) {\n const header = httpHeaders[key];\n headers[key.toLowerCase()] = String(header);\n }\n }\n\n // Fix up content length if we can for best progress experience\n if (!headers['content-length']) {\n const contentLength = getContentLength(url);\n if (Number.isFinite(contentLength)) {\n headers['content-length'] = contentLength;\n }\n }\n\n Object.assign(headers, additionalHeaders);\n\n return new Headers(headers);\n}\n\n/** Needs to be read from actual headers */\nfunction getContentLength(url: string): number | null {\n // TODO - remove media type etc\n return isDataURL(url) ? url.length - 'data:'.length : null;\n}\n"],"mappings":"AAGA,OAAOA,IAAI,MAAM,MAAM;AACvB,OAAOC,KAAK,MAAM,OAAO;AAAC,SAClBC,QAAQ;AAAA,SACRC,OAAO;AAAA,SACPC,aAAa;AAErB,MAAMC,SAAS,GAAIC,GAAW,IAAcA,GAAG,CAACC,UAAU,CAAC,OAAO,CAAC;AACnE,MAAMC,YAAY,GAAIF,GAAW,IAAcA,GAAG,CAACC,UAAU,CAAC,OAAO,CAAC,IAAID,GAAG,CAACC,UAAU,CAAC,QAAQ,CAAC;AAQlG,OAAO,eAAeE,SAASA,CAACH,GAAW,EAAEI,OAAoB,EAAqB;EACpF,IAAI;IAGF,IAAIC,UAAU,CAACC,KAAK,KAAKH,SAAS,KAAKD,YAAY,CAACF,GAAG,CAAC,IAAID,SAAS,CAACC,GAAG,CAAC,CAAC,EAAE;MAE3E,OAAO,MAAMM,KAAK,CAACN,GAAG,EAAEI,OAAO,CAAC;IAClC;IAIA,IAAIL,SAAS,CAACC,GAAG,CAAC,EAAE;MAClB,MAAM;QAACO,WAAW;QAAEC;MAAQ,CAAC,GAAGV,aAAa,CAACE,GAAG,CAAC;MAClD,MAAMS,QAAQ,GAAG,IAAIb,QAAQ,CAACW,WAAW,EAAE;QACzCG,OAAO,EAAE;UAAC,cAAc,EAAEF;QAAQ,CAAC;QACnCR;MACF,CAAC,CAAC;MACF,OAAOS,QAAQ;IACjB;IAGA,MAAME,wBAAwB,GAAG,CAAC,CAAC;IACnC,MAAMC,WAAW,GAAGZ,GAAG;IACvB,IAAIA,GAAG,CAACa,QAAQ,CAAC,KAAK,CAAC,EAAE;MACvBb,GAAG,GAAGA,GAAG,CAACc,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;MACtBH,wBAAwB,CAAC,kBAAkB,CAAC,GAAG,MAAM;IACvD;IAGA,MAAMI,IAAI,GAAG,MAAMC,2BAA2B,CAACJ,WAAW,EAAER,OAAO,CAAC;IACpE,MAAMM,OAAO,GAAGO,UAAU,CAACjB,GAAG,EAAEe,IAAI,EAAEJ,wBAAwB,CAAC;IAC/D,MAAM;MAACO,MAAM;MAAEC;IAAU,CAAC,GAAGC,SAAS,CAACL,IAAI,CAAC;IAE5C,MAAMM,cAAc,GAElB,CAACjB,OAAO,IAAIA,OAAO,CAACiB,cAAc,IAAIjB,OAAO,CAACiB,cAAc,KAAKC,SAAS;IAE5E,IAAIJ,MAAM,IAAI,GAAG,IAAIA,MAAM,GAAG,GAAG,IAAIR,OAAO,CAACa,GAAG,CAAC,UAAU,CAAC,IAAIF,cAAc,EAAE;MAC9E,MAAMG,WAAW,GAAGC,mBAAmB,CAACzB,GAAG,EAAEU,OAAO,CAACgB,GAAG,CAAC,UAAU,CAAC,CAAC;MAGrE,OAAO,MAAMvB,SAAS,CAACqB,WAAW,EAAEpB,OAAO,CAAC;IAC9C;IACA,OAAO,IAAIR,QAAQ,CAACmB,IAAI,EAAE;MAACL,OAAO;MAAEQ,MAAM;MAAEC,UAAU;MAAEnB;IAAG,CAAC,CAAC;EAC/D,CAAC,CAAC,OAAO2B,KAAK,EAAE;IAEd,OAAO,IAAI/B,QAAQ,CAAC,IAAI,EAAE;MAACsB,MAAM,EAAE,GAAG;MAAEC,UAAU,EAAES,MAAM,CAACD,KAAK,CAAC;MAAE3B;IAAG,CAAC,CAAC;EAC1E;AACF;AAGA,OAAO,eAAegB,2BAA2BA,CAC/ChB,GAAW,EACXI,OAAO,EACwB;EAG/B,OAAO,MAAM,IAAIyB,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IAC5C,MAAMC,cAAc,GAAGC,iBAAiB,CAACjC,GAAG,EAAEI,OAAO,CAAC;IACtD,MAAM8B,GAAG,GAAGlC,GAAG,CAACC,UAAU,CAAC,QAAQ,CAAC,GAChCN,KAAK,CAACwC,OAAO,CAACH,cAAc,EAAGI,GAAG,IAAKN,OAAO,CAACM,GAAG,CAAC,CAAC,GACpD1C,IAAI,CAACyC,OAAO,CAACH,cAAc,EAAGI,GAAG,IAAKN,OAAO,CAACM,GAAG,CAAC,CAAC;IACvDF,GAAG,CAACG,EAAE,CAAC,OAAO,EAAGV,KAAK,IAAKI,MAAM,CAACJ,KAAK,CAAC,CAAC;IACzCO,GAAG,CAACI,GAAG,CAAC,CAAC;EACX,CAAC,CAAC;AACJ;AAOA,SAASb,mBAAmBA,CAACb,WAAmB,EAAE2B,QAAgB,EAAU;EAC1E,IAAIA,QAAQ,CAACtC,UAAU,CAAC,MAAM,CAAC,EAAE;IAC/B,OAAOsC,QAAQ;EACjB;EAEA,MAAMvC,GAAG,GAAG,IAAIwC,GAAG,CAAC5B,WAAW,CAAC;EAChCZ,GAAG,CAACyC,QAAQ,GAAGF,QAAQ;EAEvB,OAAOvC,GAAG,CAAC0C,IAAI;AACjB;AAIA,SAAST,iBAAiBA,CAACjC,GAAW,EAAEI,OAA0C,EAAE;EAElF,MAAMuC,eAAe,GAAG,CAAAvC,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEM,OAAO,KAAI,CAAC,CAAC;EAC9C,MAAMA,OAAO,GAAG,CAAC,CAAC;EAClB,KAAK,MAAMkC,GAAG,IAAIC,MAAM,CAACC,IAAI,CAACH,eAAe,CAAC,EAAE;IAC9CjC,OAAO,CAACkC,GAAG,CAACG,WAAW,CAAC,CAAC,CAAC,GAAGJ,eAAe,CAACC,GAAG,CAAC;EACnD;EAGAlC,OAAO,CAAC,iBAAiB,CAAC,GAAGA,OAAO,CAAC,iBAAiB,CAAC,IAAI,iBAAiB;EAE5E,MAAMsC,SAAS,GAAG,IAAIR,GAAG,CAACxC,GAAG,CAAC;EAC9B,OAAO;IACLiD,QAAQ,EAAED,SAAS,CAACC,QAAQ;IAC5BC,IAAI,EAAEF,SAAS,CAACP,QAAQ;IACxBU,MAAM,EAAE,KAAK;IAEb,GAAG/C,OAAO;IACV,IAAGA,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEE,KAAK;IAEjBI,OAAO;IACP0C,IAAI,EAAEJ,SAAS,CAACI;EAClB,CAAC;AACH;AAEA,SAAShC,SAASA,CAACiC,YAAkC,EAAwC;EAC3F,IAAIA,YAAY,CAACC,UAAU,EAAE;IAC3B,OAAO;MAACpC,MAAM,EAAEmC,YAAY,CAACC,UAAU;MAAEnC,UAAU,EAAEkC,YAAY,CAACE,aAAa,IAAI;IAAI,CAAC;EAC1F;EACA,OAAO;IAACrC,MAAM,EAAE,GAAG;IAAEC,UAAU,EAAE;EAAI,CAAC;AACxC;AAEA,SAASF,UAAUA,CAACjB,GAAG,EAAEqD,YAAY,EAA0B;EAAA,IAAxBG,iBAAiB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAnC,SAAA,GAAAmC,SAAA,MAAG,CAAC,CAAC;EAC3D,MAAM/C,OAAO,GAAG,CAAC,CAAC;EAElB,IAAI2C,YAAY,IAAIA,YAAY,CAAC3C,OAAO,EAAE;IACxC,MAAMiD,WAAW,GAAGN,YAAY,CAAC3C,OAAO;IACxC,KAAK,MAAMkC,GAAG,IAAIe,WAAW,EAAE;MAC7B,MAAMC,MAAM,GAAGD,WAAW,CAACf,GAAG,CAAC;MAC/BlC,OAAO,CAACkC,GAAG,CAACG,WAAW,CAAC,CAAC,CAAC,GAAGnB,MAAM,CAACgC,MAAM,CAAC;IAC7C;EACF;EAGA,IAAI,CAAClD,OAAO,CAAC,gBAAgB,CAAC,EAAE;IAC9B,MAAMmD,aAAa,GAAGC,gBAAgB,CAAC9D,GAAG,CAAC;IAC3C,IAAI+D,MAAM,CAACC,QAAQ,CAACH,aAAa,CAAC,EAAE;MAClCnD,OAAO,CAAC,gBAAgB,CAAC,GAAGmD,aAAa;IAC3C;EACF;EAEAhB,MAAM,CAACoB,MAAM,CAACvD,OAAO,EAAE8C,iBAAiB,CAAC;EAEzC,OAAO,IAAI3D,OAAO,CAACa,OAAO,CAAC;AAC7B;AAGA,SAASoD,gBAAgBA,CAAC9D,GAAW,EAAiB;EAEpD,OAAOD,SAAS,CAACC,GAAG,CAAC,GAAGA,GAAG,CAAC0D,MAAM,GAAG,OAAO,CAACA,MAAM,GAAG,IAAI;AAC5D"}
@@ -1 +1 @@
1
- {"version":3,"file":"response-polyfill.d.ts","sourceRoot":"","sources":["../../src/fetch/response-polyfill.ts"],"names":[],"mappings":"AAIA,OAAO,EAAC,OAAO,EAAC,MAAM,oBAAoB,CAAC;AAoB3C,qBAAa,QAAQ;IACnB,QAAQ,CAAC,EAAE,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,OAAO,CAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC;gBAIrB,IAAI,KAAA,EACJ,OAAO,EAAE;QACP,OAAO,CAAC,MAAC;QACT,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,GAAG,EAAE,MAAM,CAAC;KACb;IAuBH,IAAI,IAAI,QAKP;IAIK,WAAW;IAQX,IAAI;IAMJ,IAAI;IAKJ,IAAI;CAMX"}
1
+ {"version":3,"file":"response-polyfill.d.ts","sourceRoot":"","sources":["../../src/fetch/response-polyfill.ts"],"names":[],"mappings":"AAKA,OAAO,EAAC,OAAO,EAAC,MAAM,oBAAoB,CAAC;AAoB3C,qBAAa,QAAQ;IACnB,QAAQ,CAAC,EAAE,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,OAAO,CAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC;gBAIrB,IAAI,KAAA,EACJ,OAAO,EAAE;QACP,OAAO,CAAC,MAAC;QACT,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,GAAG,EAAE,MAAM,CAAC;KACb;IAuBH,IAAI,IAAI,QAKP;IAIK,WAAW;IAQX,IAAI;IAMJ,IAAI;IAKJ,IAAI;CAMX"}
@@ -1 +1 @@
1
- {"version":3,"file":"response-polyfill.js","names":["assert","decompressReadStream","concatenateReadStream","Headers","isBoolean","x","isFunction","isObject","isReadableNodeStream","read","pipe","readable","stream","Response","constructor","body","options","ok","status","statusText","headers","url","bodyUsed","_body","Readable","from","TextEncoder","encode","ArrayBuffer","arrayBuffer","data","text","textDecoder","TextDecoder","decode","json","JSON","parse","blob","Blob","Error"],"sources":["../../src/fetch/response-polyfill.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport {assert} from '../utils/assert';\nimport {decompressReadStream, concatenateReadStream} from '../filesystems/stream-utils.node';\nimport {Headers} from './headers-polyfill';\n\nconst isBoolean = (x) => typeof x === 'boolean';\nconst isFunction = (x) => typeof x === 'function';\nconst isObject = (x) => x !== null && typeof x === 'object';\nconst isReadableNodeStream = (x) =>\n isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);\n\n/**\n * Polyfill for Browser Response\n *\n * Under Node.js we return a mock \"fetch response object\"\n * so that apps can use the same API as in the browser.\n *\n * Note: This is intended to be a \"lightweight\" implementation and will have limitations.\n *\n * See https://developer.mozilla.org/en-US/docs/Web/API/Response\n */\nimport * as stream from 'stream';\n\nexport class Response {\n readonly ok: boolean;\n readonly status: number;\n readonly statusText: string;\n readonly headers: Headers;\n readonly url: string;\n bodyUsed: boolean = false;\n private readonly _body;\n\n // TODO - handle ArrayBuffer, ArrayBufferView, Buffer\n constructor(\n body,\n options: {\n headers?;\n status?: number;\n statusText?: string;\n url: string;\n }\n ) {\n const {headers, status = 200, statusText = 'OK', url} = options || {};\n\n this.url = url;\n this.ok = status === 200;\n this.status = status; // TODO - handle errors and set status\n this.statusText = statusText;\n this.headers = new Headers(options?.headers || {});\n\n // Check for content-encoding and create a decompression stream\n if (isReadableNodeStream(body)) {\n this._body = decompressReadStream(body, headers);\n } else if (typeof body === 'string') {\n this._body = stream.Readable.from([new TextEncoder().encode(body)]);\n } else {\n this._body = stream.Readable.from([body || new ArrayBuffer(0)]);\n }\n }\n\n // Subset of Properties\n\n // Returns a readable stream to the \"body\" of the response (or file)\n get body() {\n assert(!this.bodyUsed);\n assert(isReadableNodeStream(this._body)); // Not implemented: conversion of ArrayBuffer etc to stream\n this.bodyUsed = true;\n return this._body;\n }\n\n // Subset of Methods\n\n async arrayBuffer() {\n if (!isReadableNodeStream(this._body)) {\n return this._body || new ArrayBuffer(0);\n }\n const data = await concatenateReadStream(this._body);\n return data;\n }\n\n async text() {\n const arrayBuffer = await this.arrayBuffer();\n const textDecoder = new TextDecoder();\n return textDecoder.decode(arrayBuffer);\n }\n\n async json() {\n const text = await this.text();\n return JSON.parse(text);\n }\n\n async blob() {\n if (typeof Blob === 'undefined') {\n throw new Error('Blob polyfill not installed');\n }\n return new Blob([await this.arrayBuffer()]);\n }\n}\n"],"mappings":"SAEQA,MAAM;AAAA,SACNC,oBAAoB,EAAEC,qBAAqB;AAAA,SAC3CC,OAAO;AAEf,MAAMC,SAAS,GAAIC,CAAC,IAAK,OAAOA,CAAC,KAAK,SAAS;AAC/C,MAAMC,UAAU,GAAID,CAAC,IAAK,OAAOA,CAAC,KAAK,UAAU;AACjD,MAAME,QAAQ,GAAIF,CAAC,IAAKA,CAAC,KAAK,IAAI,IAAI,OAAOA,CAAC,KAAK,QAAQ;AAC3D,MAAMG,oBAAoB,GAAIH,CAAC,IAC7BE,QAAQ,CAACF,CAAC,CAAC,IAAIC,UAAU,CAACD,CAAC,CAACI,IAAI,CAAC,IAAIH,UAAU,CAACD,CAAC,CAACK,IAAI,CAAC,IAAIN,SAAS,CAACC,CAAC,CAACM,QAAQ,CAAC;AAYlF,OAAO,KAAKC,MAAM,MAAM,QAAQ;AAEhC,OAAO,MAAMC,QAAQ,CAAC;EAUpBC,WAAWA,CACTC,IAAI,EACJC,OAKC,EACD;IAAA,KAjBOC,EAAE;IAAA,KACFC,MAAM;IAAA,KACNC,UAAU;IAAA,KACVC,OAAO;IAAA,KACPC,GAAG;IAAA,KACZC,QAAQ,GAAY,KAAK;IAAA,KACRC,KAAK;IAYpB,MAAM;MAACH,OAAO;MAAEF,MAAM,GAAG,GAAG;MAAEC,UAAU,GAAG,IAAI;MAAEE;IAAG,CAAC,GAAGL,OAAO,IAAI,CAAC,CAAC;IAErE,IAAI,CAACK,GAAG,GAAGA,GAAG;IACd,IAAI,CAACJ,EAAE,GAAGC,MAAM,KAAK,GAAG;IACxB,IAAI,CAACA,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,UAAU,GAAGA,UAAU;IAC5B,IAAI,CAACC,OAAO,GAAG,IAAIjB,OAAO,CAAC,CAAAa,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEI,OAAO,KAAI,CAAC,CAAC,CAAC;IAGlD,IAAIZ,oBAAoB,CAACO,IAAI,CAAC,EAAE;MAC9B,IAAI,CAACQ,KAAK,GAAGtB,oBAAoB,CAACc,IAAI,EAAEK,OAAO,CAAC;IAClD,CAAC,MAAM,IAAI,OAAOL,IAAI,KAAK,QAAQ,EAAE;MACnC,IAAI,CAACQ,KAAK,GAAGX,MAAM,CAACY,QAAQ,CAACC,IAAI,CAAC,CAAC,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACZ,IAAI,CAAC,CAAC,CAAC;IACrE,CAAC,MAAM;MACL,IAAI,CAACQ,KAAK,GAAGX,MAAM,CAACY,QAAQ,CAACC,IAAI,CAAC,CAACV,IAAI,IAAI,IAAIa,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;IACjE;EACF;EAKA,IAAIb,IAAIA,CAAA,EAAG;IACTf,MAAM,CAAC,CAAC,IAAI,CAACsB,QAAQ,CAAC;IACtBtB,MAAM,CAACQ,oBAAoB,CAAC,IAAI,CAACe,KAAK,CAAC,CAAC;IACxC,IAAI,CAACD,QAAQ,GAAG,IAAI;IACpB,OAAO,IAAI,CAACC,KAAK;EACnB;EAIA,MAAMM,WAAWA,CAAA,EAAG;IAClB,IAAI,CAACrB,oBAAoB,CAAC,IAAI,CAACe,KAAK,CAAC,EAAE;MACrC,OAAO,IAAI,CAACA,KAAK,IAAI,IAAIK,WAAW,CAAC,CAAC,CAAC;IACzC;IACA,MAAME,IAAI,GAAG,MAAM5B,qBAAqB,CAAC,IAAI,CAACqB,KAAK,CAAC;IACpD,OAAOO,IAAI;EACb;EAEA,MAAMC,IAAIA,CAAA,EAAG;IACX,MAAMF,WAAW,GAAG,MAAM,IAAI,CAACA,WAAW,CAAC,CAAC;IAC5C,MAAMG,WAAW,GAAG,IAAIC,WAAW,CAAC,CAAC;IACrC,OAAOD,WAAW,CAACE,MAAM,CAACL,WAAW,CAAC;EACxC;EAEA,MAAMM,IAAIA,CAAA,EAAG;IACX,MAAMJ,IAAI,GAAG,MAAM,IAAI,CAACA,IAAI,CAAC,CAAC;IAC9B,OAAOK,IAAI,CAACC,KAAK,CAACN,IAAI,CAAC;EACzB;EAEA,MAAMO,IAAIA,CAAA,EAAG;IACX,IAAI,OAAOC,IAAI,KAAK,WAAW,EAAE;MAC/B,MAAM,IAAIC,KAAK,CAAC,6BAA6B,CAAC;IAChD;IACA,OAAO,IAAID,IAAI,CAAC,CAAC,MAAM,IAAI,CAACV,WAAW,CAAC,CAAC,CAAC,CAAC;EAC7C;AACF"}
1
+ {"version":3,"file":"response-polyfill.js","names":["assert","decompressReadStream","concatenateReadStream","Headers","isBoolean","x","isFunction","isObject","isReadableNodeStream","read","pipe","readable","stream","Response","constructor","body","options","ok","status","statusText","headers","url","bodyUsed","_body","Readable","from","TextEncoder","encode","ArrayBuffer","arrayBuffer","data","text","textDecoder","TextDecoder","decode","json","JSON","parse","blob","Blob","Error"],"sources":["../../src/fetch/response-polyfill.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport {assert} from '../utils/assert';\nimport {decompressReadStream, concatenateReadStream} from '../filesystems/stream-utils.node';\nimport {Headers} from './headers-polyfill';\n\nconst isBoolean = (x) => typeof x === 'boolean';\nconst isFunction = (x) => typeof x === 'function';\nconst isObject = (x) => x !== null && typeof x === 'object';\nconst isReadableNodeStream = (x) =>\n isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);\n\n/**\n * Polyfill for Browser Response\n *\n * Under Node.js we return a mock \"fetch response object\"\n * so that apps can use the same API as in the browser.\n *\n * Note: This is intended to be a \"lightweight\" implementation and will have limitations.\n *\n * See https://developer.mozilla.org/en-US/docs/Web/API/Response\n */\nimport * as stream from 'stream';\n\nexport class Response {\n readonly ok: boolean;\n readonly status: number;\n readonly statusText: string;\n readonly headers: Headers;\n readonly url: string;\n bodyUsed: boolean = false;\n private readonly _body;\n\n // TODO - handle ArrayBuffer, ArrayBufferView, Buffer\n constructor(\n body,\n options: {\n headers?;\n status?: number;\n statusText?: string;\n url: string;\n }\n ) {\n const {headers, status = 200, statusText = 'OK', url} = options || {};\n\n this.url = url;\n this.ok = status === 200;\n this.status = status; // TODO - handle errors and set status\n this.statusText = statusText;\n this.headers = new Headers(options?.headers || {});\n\n // Check for content-encoding and create a decompression stream\n if (isReadableNodeStream(body)) {\n this._body = decompressReadStream(body, headers);\n } else if (typeof body === 'string') {\n this._body = stream.Readable.from([new TextEncoder().encode(body)]);\n } else {\n this._body = stream.Readable.from([body || new ArrayBuffer(0)]);\n }\n }\n\n // Subset of Properties\n\n // Returns a readable stream to the \"body\" of the response (or file)\n get body() {\n assert(!this.bodyUsed);\n assert(isReadableNodeStream(this._body)); // Not implemented: conversion of ArrayBuffer etc to stream\n this.bodyUsed = true;\n return this._body;\n }\n\n // Subset of Methods\n\n async arrayBuffer() {\n if (!isReadableNodeStream(this._body)) {\n return this._body || new ArrayBuffer(0);\n }\n const data = await concatenateReadStream(this._body);\n return data;\n }\n\n async text() {\n const arrayBuffer = await this.arrayBuffer();\n const textDecoder = new TextDecoder();\n return textDecoder.decode(arrayBuffer);\n }\n\n async json() {\n const text = await this.text();\n return JSON.parse(text);\n }\n\n async blob() {\n if (typeof Blob === 'undefined') {\n throw new Error('Blob polyfill not installed');\n }\n return new Blob([await this.arrayBuffer()]);\n }\n}\n"],"mappings":"SAGQA,MAAM;AAAA,SACNC,oBAAoB,EAAEC,qBAAqB;AAAA,SAC3CC,OAAO;AAEf,MAAMC,SAAS,GAAIC,CAAC,IAAK,OAAOA,CAAC,KAAK,SAAS;AAC/C,MAAMC,UAAU,GAAID,CAAC,IAAK,OAAOA,CAAC,KAAK,UAAU;AACjD,MAAME,QAAQ,GAAIF,CAAC,IAAKA,CAAC,KAAK,IAAI,IAAI,OAAOA,CAAC,KAAK,QAAQ;AAC3D,MAAMG,oBAAoB,GAAIH,CAAC,IAC7BE,QAAQ,CAACF,CAAC,CAAC,IAAIC,UAAU,CAACD,CAAC,CAACI,IAAI,CAAC,IAAIH,UAAU,CAACD,CAAC,CAACK,IAAI,CAAC,IAAIN,SAAS,CAACC,CAAC,CAACM,QAAQ,CAAC;AAYlF,OAAO,KAAKC,MAAM,MAAM,QAAQ;AAEhC,OAAO,MAAMC,QAAQ,CAAC;EAUpBC,WAAWA,CACTC,IAAI,EACJC,OAKC,EACD;IAAA,KAjBOC,EAAE;IAAA,KACFC,MAAM;IAAA,KACNC,UAAU;IAAA,KACVC,OAAO;IAAA,KACPC,GAAG;IAAA,KACZC,QAAQ,GAAY,KAAK;IAAA,KACRC,KAAK;IAYpB,MAAM;MAACH,OAAO;MAAEF,MAAM,GAAG,GAAG;MAAEC,UAAU,GAAG,IAAI;MAAEE;IAAG,CAAC,GAAGL,OAAO,IAAI,CAAC,CAAC;IAErE,IAAI,CAACK,GAAG,GAAGA,GAAG;IACd,IAAI,CAACJ,EAAE,GAAGC,MAAM,KAAK,GAAG;IACxB,IAAI,CAACA,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,UAAU,GAAGA,UAAU;IAC5B,IAAI,CAACC,OAAO,GAAG,IAAIjB,OAAO,CAAC,CAAAa,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEI,OAAO,KAAI,CAAC,CAAC,CAAC;IAGlD,IAAIZ,oBAAoB,CAACO,IAAI,CAAC,EAAE;MAC9B,IAAI,CAACQ,KAAK,GAAGtB,oBAAoB,CAACc,IAAI,EAAEK,OAAO,CAAC;IAClD,CAAC,MAAM,IAAI,OAAOL,IAAI,KAAK,QAAQ,EAAE;MACnC,IAAI,CAACQ,KAAK,GAAGX,MAAM,CAACY,QAAQ,CAACC,IAAI,CAAC,CAAC,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACZ,IAAI,CAAC,CAAC,CAAC;IACrE,CAAC,MAAM;MACL,IAAI,CAACQ,KAAK,GAAGX,MAAM,CAACY,QAAQ,CAACC,IAAI,CAAC,CAACV,IAAI,IAAI,IAAIa,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;IACjE;EACF;EAKA,IAAIb,IAAIA,CAAA,EAAG;IACTf,MAAM,CAAC,CAAC,IAAI,CAACsB,QAAQ,CAAC;IACtBtB,MAAM,CAACQ,oBAAoB,CAAC,IAAI,CAACe,KAAK,CAAC,CAAC;IACxC,IAAI,CAACD,QAAQ,GAAG,IAAI;IACpB,OAAO,IAAI,CAACC,KAAK;EACnB;EAIA,MAAMM,WAAWA,CAAA,EAAG;IAClB,IAAI,CAACrB,oBAAoB,CAAC,IAAI,CAACe,KAAK,CAAC,EAAE;MACrC,OAAO,IAAI,CAACA,KAAK,IAAI,IAAIK,WAAW,CAAC,CAAC,CAAC;IACzC;IACA,MAAME,IAAI,GAAG,MAAM5B,qBAAqB,CAAC,IAAI,CAACqB,KAAK,CAAC;IACpD,OAAOO,IAAI;EACb;EAEA,MAAMC,IAAIA,CAAA,EAAG;IACX,MAAMF,WAAW,GAAG,MAAM,IAAI,CAACA,WAAW,CAAC,CAAC;IAC5C,MAAMG,WAAW,GAAG,IAAIC,WAAW,CAAC,CAAC;IACrC,OAAOD,WAAW,CAACE,MAAM,CAACL,WAAW,CAAC;EACxC;EAEA,MAAMM,IAAIA,CAAA,EAAG;IACX,MAAMJ,IAAI,GAAG,MAAM,IAAI,CAACA,IAAI,CAAC,CAAC;IAC9B,OAAOK,IAAI,CAACC,KAAK,CAACN,IAAI,CAAC;EACzB;EAEA,MAAMO,IAAIA,CAAA,EAAG;IACX,IAAI,OAAOC,IAAI,KAAK,WAAW,EAAE;MAC/B,MAAM,IAAIC,KAAK,CAAC,6BAA6B,CAAC;IAChD;IACA,OAAO,IAAID,IAAI,CAAC,CAAC,MAAM,IAAI,CAACV,WAAW,CAAC,CAAC,CAAC,CAAC;EAC7C;AACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"fetch-node.d.ts","sourceRoot":"","sources":["../../src/filesystems/fetch-node.ts"],"names":[],"mappings":"AAaA;;;;;GAKG;AAEH,wBAAsB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CAyDrF"}
1
+ {"version":3,"file":"fetch-node.d.ts","sourceRoot":"","sources":["../../src/filesystems/fetch-node.ts"],"names":[],"mappings":"AAcA;;;;;GAKG;AAEH,wBAAsB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CAyDrF"}
@@ -1 +1 @@
1
- {"version":3,"file":"fetch-node.js","names":["fs","Readable","resolvePath","decompressReadStream","isBoolean","x","isFunction","isObject","isReadableNodeStream","read","pipe","readable","fetchNode","url","options","FILE_PROTOCOL_REGEX","replace","noqueryUrl","split","responseHeaders","Headers","endsWith","body","Promise","resolve","reject","stream","createReadStream","encoding","once","on","error","bodyStream","from","TextEncoder","encode","ArrayBuffer","status","statusText","headers","getHeadersForFile","response","Response","Object","defineProperty","value","errorMessage","message","stats","statSync","size","slice"],"sources":["../../src/filesystems/fetch-node.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport fs from 'fs';\nimport {Readable} from 'stream';\nimport {resolvePath} from '@loaders.gl/loader-utils';\nimport {decompressReadStream} from './stream-utils.node';\n\nconst isBoolean = (x) => typeof x === 'boolean';\nconst isFunction = (x) => typeof x === 'function';\nconst isObject = (x) => x !== null && typeof x === 'object';\nconst isReadableNodeStream = (x) =>\n isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);\n\n/**\n * Enables\n * @param url\n * @param options\n * @returns\n */\n// eslint-disable-next-line max-statements\nexport async function fetchNode(url: string, options?: RequestInit): Promise<Response> {\n // Support `file://` protocol\n const FILE_PROTOCOL_REGEX = /^file:\\/\\//;\n url.replace(FILE_PROTOCOL_REGEX, '/');\n\n // Remove any query parameters, as they have no meaning\n let noqueryUrl = url.split('?')[0];\n noqueryUrl = resolvePath(noqueryUrl);\n\n const responseHeaders = new Headers();\n // Automatically decompress gzipped files with .gz extension\n if (url.endsWith('.gz')) {\n // url = url.slice(0, -3);\n responseHeaders['content-encoding'] = 'gzip';\n }\n if (url.endsWith('.br')) {\n // url = url.slice(0, -3);\n responseHeaders['content-encoding'] = 'br';\n }\n\n try {\n // Now open the stream\n const body = await new Promise<fs.ReadStream>((resolve, reject) => {\n // @ts-ignore\n const stream = fs.createReadStream(noqueryUrl, {encoding: null});\n stream.once('readable', () => resolve(stream));\n stream.on('error', (error) => reject(error));\n });\n\n let bodyStream: Readable = body;\n\n // Check for content-encoding and create a decompression stream\n if (isReadableNodeStream(body)) {\n bodyStream = decompressReadStream(body, responseHeaders);\n } else if (typeof body === 'string') {\n bodyStream = Readable.from([new TextEncoder().encode(body)]);\n } else {\n bodyStream = Readable.from([body || new ArrayBuffer(0)]);\n }\n\n const status = 200;\n const statusText = 'OK';\n const headers = getHeadersForFile(noqueryUrl);\n // @ts-expect-error\n const response = new Response(bodyStream, {headers, status, statusText});\n Object.defineProperty(response, 'url', {value: url});\n return response;\n } catch (error) {\n // console.error(error);\n const errorMessage = (error as Error).message;\n const status = 400;\n const statusText = errorMessage;\n const headers = {};\n const response = new Response(errorMessage, {headers, status, statusText});\n Object.defineProperty(response, 'url', {value: url});\n return response;\n }\n}\n\nfunction getHeadersForFile(noqueryUrl: string): Headers {\n const headers = {};\n\n // Fix up content length if we can for best progress experience\n if (!headers['content-length']) {\n const stats = fs.statSync(noqueryUrl);\n headers['content-length'] = stats.size;\n }\n\n // Automatically decompress gzipped files with .gz extension\n if (noqueryUrl.endsWith('.gz')) {\n noqueryUrl = noqueryUrl.slice(0, -3);\n headers['content-encoding'] = 'gzip';\n }\n\n return new Headers(headers);\n}\n"],"mappings":"AAEA,OAAOA,EAAE,MAAM,IAAI;AACnB,SAAQC,QAAQ,QAAO,QAAQ;AAC/B,SAAQC,WAAW,QAAO,0BAA0B;AAAC,SAC7CC,oBAAoB;AAE5B,MAAMC,SAAS,GAAIC,CAAC,IAAK,OAAOA,CAAC,KAAK,SAAS;AAC/C,MAAMC,UAAU,GAAID,CAAC,IAAK,OAAOA,CAAC,KAAK,UAAU;AACjD,MAAME,QAAQ,GAAIF,CAAC,IAAKA,CAAC,KAAK,IAAI,IAAI,OAAOA,CAAC,KAAK,QAAQ;AAC3D,MAAMG,oBAAoB,GAAIH,CAAC,IAC7BE,QAAQ,CAACF,CAAC,CAAC,IAAIC,UAAU,CAACD,CAAC,CAACI,IAAI,CAAC,IAAIH,UAAU,CAACD,CAAC,CAACK,IAAI,CAAC,IAAIN,SAAS,CAACC,CAAC,CAACM,QAAQ,CAAC;AASlF,OAAO,eAAeC,SAASA,CAACC,GAAW,EAAEC,OAAqB,EAAqB;EAErF,MAAMC,mBAAmB,GAAG,YAAY;EACxCF,GAAG,CAACG,OAAO,CAACD,mBAAmB,EAAE,GAAG,CAAC;EAGrC,IAAIE,UAAU,GAAGJ,GAAG,CAACK,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EAClCD,UAAU,GAAGf,WAAW,CAACe,UAAU,CAAC;EAEpC,MAAME,eAAe,GAAG,IAAIC,OAAO,CAAC,CAAC;EAErC,IAAIP,GAAG,CAACQ,QAAQ,CAAC,KAAK,CAAC,EAAE;IAEvBF,eAAe,CAAC,kBAAkB,CAAC,GAAG,MAAM;EAC9C;EACA,IAAIN,GAAG,CAACQ,QAAQ,CAAC,KAAK,CAAC,EAAE;IAEvBF,eAAe,CAAC,kBAAkB,CAAC,GAAG,IAAI;EAC5C;EAEA,IAAI;IAEF,MAAMG,IAAI,GAAG,MAAM,IAAIC,OAAO,CAAgB,CAACC,OAAO,EAAEC,MAAM,KAAK;MAEjE,MAAMC,MAAM,GAAG1B,EAAE,CAAC2B,gBAAgB,CAACV,UAAU,EAAE;QAACW,QAAQ,EAAE;MAAI,CAAC,CAAC;MAChEF,MAAM,CAACG,IAAI,CAAC,UAAU,EAAE,MAAML,OAAO,CAACE,MAAM,CAAC,CAAC;MAC9CA,MAAM,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAK,IAAKN,MAAM,CAACM,KAAK,CAAC,CAAC;IAC9C,CAAC,CAAC;IAEF,IAAIC,UAAoB,GAAGV,IAAI;IAG/B,IAAId,oBAAoB,CAACc,IAAI,CAAC,EAAE;MAC9BU,UAAU,GAAG7B,oBAAoB,CAACmB,IAAI,EAAEH,eAAe,CAAC;IAC1D,CAAC,MAAM,IAAI,OAAOG,IAAI,KAAK,QAAQ,EAAE;MACnCU,UAAU,GAAG/B,QAAQ,CAACgC,IAAI,CAAC,CAAC,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACb,IAAI,CAAC,CAAC,CAAC;IAC9D,CAAC,MAAM;MACLU,UAAU,GAAG/B,QAAQ,CAACgC,IAAI,CAAC,CAACX,IAAI,IAAI,IAAIc,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;IAC1D;IAEA,MAAMC,MAAM,GAAG,GAAG;IAClB,MAAMC,UAAU,GAAG,IAAI;IACvB,MAAMC,OAAO,GAAGC,iBAAiB,CAACvB,UAAU,CAAC;IAE7C,MAAMwB,QAAQ,GAAG,IAAIC,QAAQ,CAACV,UAAU,EAAE;MAACO,OAAO;MAAEF,MAAM;MAAEC;IAAU,CAAC,CAAC;IACxEK,MAAM,CAACC,cAAc,CAACH,QAAQ,EAAE,KAAK,EAAE;MAACI,KAAK,EAAEhC;IAAG,CAAC,CAAC;IACpD,OAAO4B,QAAQ;EACjB,CAAC,CAAC,OAAOV,KAAK,EAAE;IAEd,MAAMe,YAAY,GAAIf,KAAK,CAAWgB,OAAO;IAC7C,MAAMV,MAAM,GAAG,GAAG;IAClB,MAAMC,UAAU,GAAGQ,YAAY;IAC/B,MAAMP,OAAO,GAAG,CAAC,CAAC;IAClB,MAAME,QAAQ,GAAG,IAAIC,QAAQ,CAACI,YAAY,EAAE;MAACP,OAAO;MAAEF,MAAM;MAAEC;IAAU,CAAC,CAAC;IAC1EK,MAAM,CAACC,cAAc,CAACH,QAAQ,EAAE,KAAK,EAAE;MAACI,KAAK,EAAEhC;IAAG,CAAC,CAAC;IACpD,OAAO4B,QAAQ;EACjB;AACF;AAEA,SAASD,iBAAiBA,CAACvB,UAAkB,EAAW;EACtD,MAAMsB,OAAO,GAAG,CAAC,CAAC;EAGlB,IAAI,CAACA,OAAO,CAAC,gBAAgB,CAAC,EAAE;IAC9B,MAAMS,KAAK,GAAGhD,EAAE,CAACiD,QAAQ,CAAChC,UAAU,CAAC;IACrCsB,OAAO,CAAC,gBAAgB,CAAC,GAAGS,KAAK,CAACE,IAAI;EACxC;EAGA,IAAIjC,UAAU,CAACI,QAAQ,CAAC,KAAK,CAAC,EAAE;IAC9BJ,UAAU,GAAGA,UAAU,CAACkC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACpCZ,OAAO,CAAC,kBAAkB,CAAC,GAAG,MAAM;EACtC;EAEA,OAAO,IAAInB,OAAO,CAACmB,OAAO,CAAC;AAC7B"}
1
+ {"version":3,"file":"fetch-node.js","names":["fs","Readable","resolvePath","decompressReadStream","isBoolean","x","isFunction","isObject","isReadableNodeStream","read","pipe","readable","fetchNode","url","options","FILE_PROTOCOL_REGEX","replace","noqueryUrl","split","responseHeaders","Headers","endsWith","body","Promise","resolve","reject","stream","createReadStream","encoding","once","on","error","bodyStream","from","TextEncoder","encode","ArrayBuffer","status","statusText","headers","getHeadersForFile","response","Response","Object","defineProperty","value","errorMessage","message","stats","statSync","size","slice"],"sources":["../../src/filesystems/fetch-node.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport fs from 'fs';\nimport {Readable} from 'stream';\nimport {resolvePath} from '@loaders.gl/loader-utils';\nimport {decompressReadStream} from './stream-utils.node';\n\nconst isBoolean = (x) => typeof x === 'boolean';\nconst isFunction = (x) => typeof x === 'function';\nconst isObject = (x) => x !== null && typeof x === 'object';\nconst isReadableNodeStream = (x) =>\n isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);\n\n/**\n * Enables\n * @param url\n * @param options\n * @returns\n */\n// eslint-disable-next-line max-statements\nexport async function fetchNode(url: string, options?: RequestInit): Promise<Response> {\n // Support `file://` protocol\n const FILE_PROTOCOL_REGEX = /^file:\\/\\//;\n url.replace(FILE_PROTOCOL_REGEX, '/');\n\n // Remove any query parameters, as they have no meaning\n let noqueryUrl = url.split('?')[0];\n noqueryUrl = resolvePath(noqueryUrl);\n\n const responseHeaders = new Headers();\n // Automatically decompress gzipped files with .gz extension\n if (url.endsWith('.gz')) {\n // url = url.slice(0, -3);\n responseHeaders['content-encoding'] = 'gzip';\n }\n if (url.endsWith('.br')) {\n // url = url.slice(0, -3);\n responseHeaders['content-encoding'] = 'br';\n }\n\n try {\n // Now open the stream\n const body = await new Promise<fs.ReadStream>((resolve, reject) => {\n // @ts-ignore\n const stream = fs.createReadStream(noqueryUrl, {encoding: null});\n stream.once('readable', () => resolve(stream));\n stream.on('error', (error) => reject(error));\n });\n\n let bodyStream: Readable = body;\n\n // Check for content-encoding and create a decompression stream\n if (isReadableNodeStream(body)) {\n bodyStream = decompressReadStream(body, responseHeaders);\n } else if (typeof body === 'string') {\n bodyStream = Readable.from([new TextEncoder().encode(body)]);\n } else {\n bodyStream = Readable.from([body || new ArrayBuffer(0)]);\n }\n\n const status = 200;\n const statusText = 'OK';\n const headers = getHeadersForFile(noqueryUrl);\n // @ts-expect-error\n const response = new Response(bodyStream, {headers, status, statusText});\n Object.defineProperty(response, 'url', {value: url});\n return response;\n } catch (error) {\n // console.error(error);\n const errorMessage = (error as Error).message;\n const status = 400;\n const statusText = errorMessage;\n const headers = {};\n const response = new Response(errorMessage, {headers, status, statusText});\n Object.defineProperty(response, 'url', {value: url});\n return response;\n }\n}\n\nfunction getHeadersForFile(noqueryUrl: string): Headers {\n const headers = {};\n\n // Fix up content length if we can for best progress experience\n if (!headers['content-length']) {\n const stats = fs.statSync(noqueryUrl);\n headers['content-length'] = stats.size;\n }\n\n // Automatically decompress gzipped files with .gz extension\n if (noqueryUrl.endsWith('.gz')) {\n noqueryUrl = noqueryUrl.slice(0, -3);\n headers['content-encoding'] = 'gzip';\n }\n\n return new Headers(headers);\n}\n"],"mappings":"AAGA,OAAOA,EAAE,MAAM,IAAI;AACnB,SAAQC,QAAQ,QAAO,QAAQ;AAC/B,SAAQC,WAAW,QAAO,0BAA0B;AAAC,SAC7CC,oBAAoB;AAE5B,MAAMC,SAAS,GAAIC,CAAC,IAAK,OAAOA,CAAC,KAAK,SAAS;AAC/C,MAAMC,UAAU,GAAID,CAAC,IAAK,OAAOA,CAAC,KAAK,UAAU;AACjD,MAAME,QAAQ,GAAIF,CAAC,IAAKA,CAAC,KAAK,IAAI,IAAI,OAAOA,CAAC,KAAK,QAAQ;AAC3D,MAAMG,oBAAoB,GAAIH,CAAC,IAC7BE,QAAQ,CAACF,CAAC,CAAC,IAAIC,UAAU,CAACD,CAAC,CAACI,IAAI,CAAC,IAAIH,UAAU,CAACD,CAAC,CAACK,IAAI,CAAC,IAAIN,SAAS,CAACC,CAAC,CAACM,QAAQ,CAAC;AASlF,OAAO,eAAeC,SAASA,CAACC,GAAW,EAAEC,OAAqB,EAAqB;EAErF,MAAMC,mBAAmB,GAAG,YAAY;EACxCF,GAAG,CAACG,OAAO,CAACD,mBAAmB,EAAE,GAAG,CAAC;EAGrC,IAAIE,UAAU,GAAGJ,GAAG,CAACK,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EAClCD,UAAU,GAAGf,WAAW,CAACe,UAAU,CAAC;EAEpC,MAAME,eAAe,GAAG,IAAIC,OAAO,CAAC,CAAC;EAErC,IAAIP,GAAG,CAACQ,QAAQ,CAAC,KAAK,CAAC,EAAE;IAEvBF,eAAe,CAAC,kBAAkB,CAAC,GAAG,MAAM;EAC9C;EACA,IAAIN,GAAG,CAACQ,QAAQ,CAAC,KAAK,CAAC,EAAE;IAEvBF,eAAe,CAAC,kBAAkB,CAAC,GAAG,IAAI;EAC5C;EAEA,IAAI;IAEF,MAAMG,IAAI,GAAG,MAAM,IAAIC,OAAO,CAAgB,CAACC,OAAO,EAAEC,MAAM,KAAK;MAEjE,MAAMC,MAAM,GAAG1B,EAAE,CAAC2B,gBAAgB,CAACV,UAAU,EAAE;QAACW,QAAQ,EAAE;MAAI,CAAC,CAAC;MAChEF,MAAM,CAACG,IAAI,CAAC,UAAU,EAAE,MAAML,OAAO,CAACE,MAAM,CAAC,CAAC;MAC9CA,MAAM,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAK,IAAKN,MAAM,CAACM,KAAK,CAAC,CAAC;IAC9C,CAAC,CAAC;IAEF,IAAIC,UAAoB,GAAGV,IAAI;IAG/B,IAAId,oBAAoB,CAACc,IAAI,CAAC,EAAE;MAC9BU,UAAU,GAAG7B,oBAAoB,CAACmB,IAAI,EAAEH,eAAe,CAAC;IAC1D,CAAC,MAAM,IAAI,OAAOG,IAAI,KAAK,QAAQ,EAAE;MACnCU,UAAU,GAAG/B,QAAQ,CAACgC,IAAI,CAAC,CAAC,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACb,IAAI,CAAC,CAAC,CAAC;IAC9D,CAAC,MAAM;MACLU,UAAU,GAAG/B,QAAQ,CAACgC,IAAI,CAAC,CAACX,IAAI,IAAI,IAAIc,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;IAC1D;IAEA,MAAMC,MAAM,GAAG,GAAG;IAClB,MAAMC,UAAU,GAAG,IAAI;IACvB,MAAMC,OAAO,GAAGC,iBAAiB,CAACvB,UAAU,CAAC;IAE7C,MAAMwB,QAAQ,GAAG,IAAIC,QAAQ,CAACV,UAAU,EAAE;MAACO,OAAO;MAAEF,MAAM;MAAEC;IAAU,CAAC,CAAC;IACxEK,MAAM,CAACC,cAAc,CAACH,QAAQ,EAAE,KAAK,EAAE;MAACI,KAAK,EAAEhC;IAAG,CAAC,CAAC;IACpD,OAAO4B,QAAQ;EACjB,CAAC,CAAC,OAAOV,KAAK,EAAE;IAEd,MAAMe,YAAY,GAAIf,KAAK,CAAWgB,OAAO;IAC7C,MAAMV,MAAM,GAAG,GAAG;IAClB,MAAMC,UAAU,GAAGQ,YAAY;IAC/B,MAAMP,OAAO,GAAG,CAAC,CAAC;IAClB,MAAME,QAAQ,GAAG,IAAIC,QAAQ,CAACI,YAAY,EAAE;MAACP,OAAO;MAAEF,MAAM;MAAEC;IAAU,CAAC,CAAC;IAC1EK,MAAM,CAACC,cAAc,CAACH,QAAQ,EAAE,KAAK,EAAE;MAACI,KAAK,EAAEhC;IAAG,CAAC,CAAC;IACpD,OAAO4B,QAAQ;EACjB;AACF;AAEA,SAASD,iBAAiBA,CAACvB,UAAkB,EAAW;EACtD,MAAMsB,OAAO,GAAG,CAAC,CAAC;EAGlB,IAAI,CAACA,OAAO,CAAC,gBAAgB,CAAC,EAAE;IAC9B,MAAMS,KAAK,GAAGhD,EAAE,CAACiD,QAAQ,CAAChC,UAAU,CAAC;IACrCsB,OAAO,CAAC,gBAAgB,CAAC,GAAGS,KAAK,CAACE,IAAI;EACxC;EAGA,IAAIjC,UAAU,CAACI,QAAQ,CAAC,KAAK,CAAC,EAAE;IAC9BJ,UAAU,GAAGA,UAAU,CAACkC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACpCZ,OAAO,CAAC,kBAAkB,CAAC,GAAG,MAAM;EACtC;EAEA,OAAO,IAAInB,OAAO,CAACmB,OAAO,CAAC;AAC7B"}
@@ -1 +1 @@
1
- {"version":3,"file":"node-filesystem.d.ts","sourceRoot":"","sources":["../../src/filesystems/node-filesystem.ts"],"names":[],"mappings":"AAEA,OAAO,EAAC,IAAI,EAAE,sBAAsB,EAAC,MAAM,0BAA0B,CAAC;AAEtE,OAAO,EAAC,QAAQ,EAAC,MAAM,aAAa,CAAC;AAMrC;;;;GAIG;AACH,qBAAa,cAAe,YAAW,sBAAsB;IAC3D,QAAQ,EAAE,OAAO,CAAQ;IACzB,QAAQ,EAAE,OAAO,CAAQ;;IAKnB,OAAO,CAAC,OAAO,SAAM,EAAE,OAAO,CAAC,EAAE,EAAE,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAIpD,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IASjC,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAInC,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC;IAK5D,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,GAAE,GAAS,GAAG,OAAO,CAAC,QAAQ,CAAC;IAInE,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,GAAE,GAAG,GAAG,IAAU,EAAE,IAAI,CAAC,EAAE,GAAG,GAAG,OAAO,CAAC,QAAQ,CAAC;CAG7F"}
1
+ {"version":3,"file":"node-filesystem.d.ts","sourceRoot":"","sources":["../../src/filesystems/node-filesystem.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,IAAI,EAAE,sBAAsB,EAAC,MAAM,0BAA0B,CAAC;AAEtE,OAAO,EAAC,QAAQ,EAAC,MAAM,aAAa,CAAC;AAMrC;;;;GAIG;AACH,qBAAa,cAAe,YAAW,sBAAsB;IAC3D,QAAQ,EAAE,OAAO,CAAQ;IACzB,QAAQ,EAAE,OAAO,CAAQ;;IAKnB,OAAO,CAAC,OAAO,SAAM,EAAE,OAAO,CAAC,EAAE,EAAE,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAIpD,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IASjC,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAInC,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC;IAK5D,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,GAAE,GAAS,GAAG,OAAO,CAAC,QAAQ,CAAC;IAInE,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,GAAE,GAAG,GAAG,IAAU,EAAE,IAAI,CAAC,EAAE,GAAG,GAAG,OAAO,CAAC,QAAQ,CAAC;CAG7F"}
@@ -1 +1 @@
1
- {"version":3,"file":"node-filesystem.js","names":["fsPromise","NodeFile","fetchNode","NodeFileSystem","constructor","readable","writable","readdir","dirname","arguments","length","undefined","options","stat","path","info","bigint","size","Number","bigsize","isDirectory","unlink","fetch","openReadableFile","flags","openWritableFile","mode"],"sources":["../../src/filesystems/node-filesystem.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport {Stat, RandomAccessFileSystem} from '@loaders.gl/loader-utils';\nimport fsPromise from 'fs/promises';\nimport {NodeFile} from './node-file';\nimport {fetchNode} from './fetch-node';\n\n// import {fetchFile} from \"../fetch/fetch-file\"\n// import {selectLoader} from \"../api/select-loader\";\n\n/**\n * FileSystem pass-through for Node.js\n * Compatible with BrowserFileSystem.\n * @param options\n */\nexport class NodeFileSystem implements RandomAccessFileSystem {\n readable: boolean = true;\n writable: boolean = true;\n\n // implements FileSystem\n constructor() {}\n\n async readdir(dirname = '.', options?: {}): Promise<any[]> {\n return await fsPromise.readdir(dirname, options);\n }\n\n async stat(path: string): Promise<Stat> {\n const info = await fsPromise.stat(path, {bigint: true});\n return {\n size: Number(info.size),\n bigsize: info.size,\n isDirectory: info.isDirectory()\n };\n }\n\n async unlink(path: string): Promise<void> {\n return await fsPromise.unlink(path);\n }\n\n async fetch(path: string, options: RequestInit): Promise<Response> {\n return await fetchNode(path, options);\n }\n\n // implements IRandomAccessFileSystem\n async openReadableFile(path: string, flags: 'r' = 'r'): Promise<NodeFile> {\n return new NodeFile(path, flags);\n }\n\n async openWritableFile(path: string, flags: 'w' | 'wx' = 'w', mode?: any): Promise<NodeFile> {\n return new NodeFile(path, flags, mode);\n }\n}\n"],"mappings":"AAGA,OAAOA,SAAS,MAAM,aAAa;AAAC,SAC5BC,QAAQ;AAAA,SACRC,SAAS;AAUjB,OAAO,MAAMC,cAAc,CAAmC;EAK5DC,WAAWA,CAAA,EAAG;IAAA,KAJdC,QAAQ,GAAY,IAAI;IAAA,KACxBC,QAAQ,GAAY,IAAI;EAGT;EAEf,MAAMC,OAAOA,CAAA,EAA8C;IAAA,IAA7CC,OAAO,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;IAAA,IAAEG,OAAY,GAAAH,SAAA,CAAAC,MAAA,OAAAD,SAAA,MAAAE,SAAA;IACvC,OAAO,MAAMX,SAAS,CAACO,OAAO,CAACC,OAAO,EAAEI,OAAO,CAAC;EAClD;EAEA,MAAMC,IAAIA,CAACC,IAAY,EAAiB;IACtC,MAAMC,IAAI,GAAG,MAAMf,SAAS,CAACa,IAAI,CAACC,IAAI,EAAE;MAACE,MAAM,EAAE;IAAI,CAAC,CAAC;IACvD,OAAO;MACLC,IAAI,EAAEC,MAAM,CAACH,IAAI,CAACE,IAAI,CAAC;MACvBE,OAAO,EAAEJ,IAAI,CAACE,IAAI;MAClBG,WAAW,EAAEL,IAAI,CAACK,WAAW,CAAC;IAChC,CAAC;EACH;EAEA,MAAMC,MAAMA,CAACP,IAAY,EAAiB;IACxC,OAAO,MAAMd,SAAS,CAACqB,MAAM,CAACP,IAAI,CAAC;EACrC;EAEA,MAAMQ,KAAKA,CAACR,IAAY,EAAEF,OAAoB,EAAqB;IACjE,OAAO,MAAMV,SAAS,CAACY,IAAI,EAAEF,OAAO,CAAC;EACvC;EAGA,MAAMW,gBAAgBA,CAACT,IAAY,EAAuC;IAAA,IAArCU,KAAU,GAAAf,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;IACnD,OAAO,IAAIR,QAAQ,CAACa,IAAI,EAAEU,KAAK,CAAC;EAClC;EAEA,MAAMC,gBAAgBA,CAACX,IAAY,EAA0D;IAAA,IAAxDU,KAAiB,GAAAf,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;IAAA,IAAEiB,IAAU,GAAAjB,SAAA,CAAAC,MAAA,OAAAD,SAAA,MAAAE,SAAA;IACtE,OAAO,IAAIV,QAAQ,CAACa,IAAI,EAAEU,KAAK,EAAEE,IAAI,CAAC;EACxC;AACF"}
1
+ {"version":3,"file":"node-filesystem.js","names":["fsPromise","NodeFile","fetchNode","NodeFileSystem","constructor","readable","writable","readdir","dirname","arguments","length","undefined","options","stat","path","info","bigint","size","Number","bigsize","isDirectory","unlink","fetch","openReadableFile","flags","openWritableFile","mode"],"sources":["../../src/filesystems/node-filesystem.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport {Stat, RandomAccessFileSystem} from '@loaders.gl/loader-utils';\nimport fsPromise from 'fs/promises';\nimport {NodeFile} from './node-file';\nimport {fetchNode} from './fetch-node';\n\n// import {fetchFile} from \"../fetch/fetch-file\"\n// import {selectLoader} from \"../api/select-loader\";\n\n/**\n * FileSystem pass-through for Node.js\n * Compatible with BrowserFileSystem.\n * @param options\n */\nexport class NodeFileSystem implements RandomAccessFileSystem {\n readable: boolean = true;\n writable: boolean = true;\n\n // implements FileSystem\n constructor() {}\n\n async readdir(dirname = '.', options?: {}): Promise<any[]> {\n return await fsPromise.readdir(dirname, options);\n }\n\n async stat(path: string): Promise<Stat> {\n const info = await fsPromise.stat(path, {bigint: true});\n return {\n size: Number(info.size),\n bigsize: info.size,\n isDirectory: info.isDirectory()\n };\n }\n\n async unlink(path: string): Promise<void> {\n return await fsPromise.unlink(path);\n }\n\n async fetch(path: string, options: RequestInit): Promise<Response> {\n return await fetchNode(path, options);\n }\n\n // implements IRandomAccessFileSystem\n async openReadableFile(path: string, flags: 'r' = 'r'): Promise<NodeFile> {\n return new NodeFile(path, flags);\n }\n\n async openWritableFile(path: string, flags: 'w' | 'wx' = 'w', mode?: any): Promise<NodeFile> {\n return new NodeFile(path, flags, mode);\n }\n}\n"],"mappings":"AAIA,OAAOA,SAAS,MAAM,aAAa;AAAC,SAC5BC,QAAQ;AAAA,SACRC,SAAS;AAUjB,OAAO,MAAMC,cAAc,CAAmC;EAK5DC,WAAWA,CAAA,EAAG;IAAA,KAJdC,QAAQ,GAAY,IAAI;IAAA,KACxBC,QAAQ,GAAY,IAAI;EAGT;EAEf,MAAMC,OAAOA,CAAA,EAA8C;IAAA,IAA7CC,OAAO,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;IAAA,IAAEG,OAAY,GAAAH,SAAA,CAAAC,MAAA,OAAAD,SAAA,MAAAE,SAAA;IACvC,OAAO,MAAMX,SAAS,CAACO,OAAO,CAACC,OAAO,EAAEI,OAAO,CAAC;EAClD;EAEA,MAAMC,IAAIA,CAACC,IAAY,EAAiB;IACtC,MAAMC,IAAI,GAAG,MAAMf,SAAS,CAACa,IAAI,CAACC,IAAI,EAAE;MAACE,MAAM,EAAE;IAAI,CAAC,CAAC;IACvD,OAAO;MACLC,IAAI,EAAEC,MAAM,CAACH,IAAI,CAACE,IAAI,CAAC;MACvBE,OAAO,EAAEJ,IAAI,CAACE,IAAI;MAClBG,WAAW,EAAEL,IAAI,CAACK,WAAW,CAAC;IAChC,CAAC;EACH;EAEA,MAAMC,MAAMA,CAACP,IAAY,EAAiB;IACxC,OAAO,MAAMd,SAAS,CAACqB,MAAM,CAACP,IAAI,CAAC;EACrC;EAEA,MAAMQ,KAAKA,CAACR,IAAY,EAAEF,OAAoB,EAAqB;IACjE,OAAO,MAAMV,SAAS,CAACY,IAAI,EAAEF,OAAO,CAAC;EACvC;EAGA,MAAMW,gBAAgBA,CAACT,IAAY,EAAuC;IAAA,IAArCU,KAAU,GAAAf,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;IACnD,OAAO,IAAIR,QAAQ,CAACa,IAAI,EAAEU,KAAK,CAAC;EAClC;EAEA,MAAMC,gBAAgBA,CAACX,IAAY,EAA0D;IAAA,IAAxDU,KAAiB,GAAAf,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;IAAA,IAAEiB,IAAU,GAAAjB,SAAA,CAAAC,MAAA,OAAAD,SAAA,MAAAE,SAAA;IACtE,OAAO,IAAIV,QAAQ,CAACa,IAAI,EAAEU,KAAK,EAAEE,IAAI,CAAC;EACxC;AACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"stream-utils.node.d.ts","sourceRoot":"","sources":["../../src/filesystems/stream-utils.node.ts"],"names":[],"mappings":";AAGA,OAAO,EAAC,QAAQ,EAAC,MAAM,QAAQ,CAAC;AAKhC;;GAEG;AACH,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,QAAQ,EAAE,OAAO,CAAC,EAAE,OAAO,YAY3E;AAED;;;;GAIG;AACH,wBAAsB,qBAAqB,CAAC,UAAU,KAAA,GAAG,OAAO,CAAC,WAAW,CAAC,CAsB5E;AAED;;;;GAIG;AACH,wBAAgB,uBAAuB,CAAC,OAAO,EAAE,CAAC,WAAW,GAAG,UAAU,CAAC,EAAE,GAAG,WAAW,CAqB1F;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,IAAI,EAAE,OAAO,GAAG,WAAW,CA+BxD"}
1
+ {"version":3,"file":"stream-utils.node.d.ts","sourceRoot":"","sources":["../../src/filesystems/stream-utils.node.ts"],"names":[],"mappings":";AAIA,OAAO,EAAC,QAAQ,EAAC,MAAM,QAAQ,CAAC;AAKhC;;GAEG;AACH,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,QAAQ,EAAE,OAAO,CAAC,EAAE,OAAO,YAY3E;AAED;;;;GAIG;AACH,wBAAsB,qBAAqB,CAAC,UAAU,KAAA,GAAG,OAAO,CAAC,WAAW,CAAC,CAsB5E;AAED;;;;GAIG;AACH,wBAAgB,uBAAuB,CAAC,OAAO,EAAE,CAAC,WAAW,GAAG,UAAU,CAAC,EAAE,GAAG,WAAW,CAqB1F;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,IAAI,EAAE,OAAO,GAAG,WAAW,CA+BxD"}
@@ -1 +1 @@
1
- {"version":3,"file":"stream-utils.node.js","names":["zlib","isArrayBuffer","x","ArrayBuffer","isBuffer","Buffer","decompressReadStream","readStream","headers","get","pipe","createBrotliDecompress","createGunzip","createDeflate","concatenateReadStream","arrayBufferChunks","Promise","resolve","reject","on","error","read","chunk","Error","push","toArrayBuffer","arrayBuffer","concatenateArrayBuffers","sources","sourceArrays","map","source2","Uint8Array","byteLength","reduce","length","typedArray","result","offset","sourceArray","set","buffer","data","isView","text","uint8Array","TextEncoder","encode","_toArrayBuffer","JSON","stringify","slice"],"sources":["../../src/filesystems/stream-utils.node.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport zlib from 'zlib';\nimport {Readable} from 'stream';\n\nconst isArrayBuffer = (x) => x && x instanceof ArrayBuffer;\nconst isBuffer = (x) => x && x instanceof Buffer;\n\n/**\n *\n */\nexport function decompressReadStream(readStream: Readable, headers?: Headers) {\n switch (headers?.get('content-encoding')) {\n case 'br':\n return readStream.pipe(zlib.createBrotliDecompress());\n case 'gzip':\n return readStream.pipe(zlib.createGunzip());\n case 'deflate':\n return readStream.pipe(zlib.createDeflate());\n default:\n // No compression or an unknown one, just return it as is\n return readStream;\n }\n}\n\n/**\n *\n * @param readStream\n * @returns\n */\nexport async function concatenateReadStream(readStream): Promise<ArrayBuffer> {\n const arrayBufferChunks: ArrayBuffer[] = [];\n\n return await new Promise((resolve, reject) => {\n readStream.on('error', (error) => reject(error));\n\n // Once the readable callback has been added, stream switches to \"flowing mode\"\n // In Node 10 (but not 12 and 14) this causes `data` and `end` to never be called unless we read data here\n readStream.on('readable', () => readStream.read());\n\n readStream.on('data', (chunk) => {\n if (typeof chunk === 'string') {\n reject(new Error('Read stream not binary'));\n }\n arrayBufferChunks.push(toArrayBuffer(chunk));\n });\n\n readStream.on('end', () => {\n const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);\n resolve(arrayBuffer);\n });\n });\n}\n\n/**\n * Concatenate a sequence of ArrayBuffers\n * @return A concatenated ArrayBuffer\n * @note duplicates loader-utils since polyfills should be independent\n */\nexport function concatenateArrayBuffers(sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {\n // Make sure all inputs are wrapped in typed arrays\n const sourceArrays = sources.map((source2) =>\n source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2\n );\n\n // Get length of all inputs\n const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);\n\n // Allocate array with space for all inputs\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let offset = 0;\n for (const sourceArray of sourceArrays) {\n result.set(sourceArray, offset);\n offset += sourceArray.byteLength;\n }\n\n // We work with ArrayBuffers, discard the typed array wrapper\n return result.buffer;\n}\n\n/**\n * @param data\n * @todo Duplicate of core\n */\nexport function toArrayBuffer(data: unknown): ArrayBuffer {\n if (isArrayBuffer(data)) {\n return data as ArrayBuffer;\n }\n\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (isBuffer(data)) {\n // @ts-expect-error\n const typedArray = new Uint8Array(data);\n return typedArray.buffer;\n }\n\n // Careful - Node Buffers will look like ArrayBuffers (keep after isBuffer)\n if (ArrayBuffer.isView(data)) {\n return data.buffer;\n }\n\n if (typeof data === 'string') {\n const text = data;\n const uint8Array = new TextEncoder().encode(text);\n return uint8Array.buffer;\n }\n\n // HACK to support Blob polyfill\n // @ts-expect-error\n if (data && typeof data === 'object' && data._toArrayBuffer) {\n // @ts-expect-error\n return data._toArrayBuffer();\n }\n\n throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);\n}\n"],"mappings":"AAEA,OAAOA,IAAI,MAAM,MAAM;AAGvB,MAAMC,aAAa,GAAIC,CAAC,IAAKA,CAAC,IAAIA,CAAC,YAAYC,WAAW;AAC1D,MAAMC,QAAQ,GAAIF,CAAC,IAAKA,CAAC,IAAIA,CAAC,YAAYG,MAAM;AAKhD,OAAO,SAASC,oBAAoBA,CAACC,UAAoB,EAAEC,OAAiB,EAAE;EAC5E,QAAQA,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEC,GAAG,CAAC,kBAAkB,CAAC;IACtC,KAAK,IAAI;MACP,OAAOF,UAAU,CAACG,IAAI,CAACV,IAAI,CAACW,sBAAsB,CAAC,CAAC,CAAC;IACvD,KAAK,MAAM;MACT,OAAOJ,UAAU,CAACG,IAAI,CAACV,IAAI,CAACY,YAAY,CAAC,CAAC,CAAC;IAC7C,KAAK,SAAS;MACZ,OAAOL,UAAU,CAACG,IAAI,CAACV,IAAI,CAACa,aAAa,CAAC,CAAC,CAAC;IAC9C;MAEE,OAAON,UAAU;EACrB;AACF;AAOA,OAAO,eAAeO,qBAAqBA,CAACP,UAAU,EAAwB;EAC5E,MAAMQ,iBAAgC,GAAG,EAAE;EAE3C,OAAO,MAAM,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IAC5CX,UAAU,CAACY,EAAE,CAAC,OAAO,EAAGC,KAAK,IAAKF,MAAM,CAACE,KAAK,CAAC,CAAC;IAIhDb,UAAU,CAACY,EAAE,CAAC,UAAU,EAAE,MAAMZ,UAAU,CAACc,IAAI,CAAC,CAAC,CAAC;IAElDd,UAAU,CAACY,EAAE,CAAC,MAAM,EAAGG,KAAK,IAAK;MAC/B,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;QAC7BJ,MAAM,CAAC,IAAIK,KAAK,CAAC,wBAAwB,CAAC,CAAC;MAC7C;MACAR,iBAAiB,CAACS,IAAI,CAACC,aAAa,CAACH,KAAK,CAAC,CAAC;IAC9C,CAAC,CAAC;IAEFf,UAAU,CAACY,EAAE,CAAC,KAAK,EAAE,MAAM;MACzB,MAAMO,WAAW,GAAGC,uBAAuB,CAACZ,iBAAiB,CAAC;MAC9DE,OAAO,CAACS,WAAW,CAAC;IACtB,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAOA,OAAO,SAASC,uBAAuBA,CAACC,OAAqC,EAAe;EAE1F,MAAMC,YAAY,GAAGD,OAAO,CAACE,GAAG,CAAEC,OAAO,IACvCA,OAAO,YAAY5B,WAAW,GAAG,IAAI6B,UAAU,CAACD,OAAO,CAAC,GAAGA,OAC7D,CAAC;EAGD,MAAME,UAAU,GAAGJ,YAAY,CAACK,MAAM,CAAC,CAACC,MAAM,EAAEC,UAAU,KAAKD,MAAM,GAAGC,UAAU,CAACH,UAAU,EAAE,CAAC,CAAC;EAGjG,MAAMI,MAAM,GAAG,IAAIL,UAAU,CAACC,UAAU,CAAC;EAGzC,IAAIK,MAAM,GAAG,CAAC;EACd,KAAK,MAAMC,WAAW,IAAIV,YAAY,EAAE;IACtCQ,MAAM,CAACG,GAAG,CAACD,WAAW,EAAED,MAAM,CAAC;IAC/BA,MAAM,IAAIC,WAAW,CAACN,UAAU;EAClC;EAGA,OAAOI,MAAM,CAACI,MAAM;AACtB;AAMA,OAAO,SAAShB,aAAaA,CAACiB,IAAa,EAAe;EACxD,IAAIzC,aAAa,CAACyC,IAAI,CAAC,EAAE;IACvB,OAAOA,IAAI;EACb;EAGA,IAAItC,QAAQ,CAACsC,IAAI,CAAC,EAAE;IAElB,MAAMN,UAAU,GAAG,IAAIJ,UAAU,CAACU,IAAI,CAAC;IACvC,OAAON,UAAU,CAACK,MAAM;EAC1B;EAGA,IAAItC,WAAW,CAACwC,MAAM,CAACD,IAAI,CAAC,EAAE;IAC5B,OAAOA,IAAI,CAACD,MAAM;EACpB;EAEA,IAAI,OAAOC,IAAI,KAAK,QAAQ,EAAE;IAC5B,MAAME,IAAI,GAAGF,IAAI;IACjB,MAAMG,UAAU,GAAG,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACH,IAAI,CAAC;IACjD,OAAOC,UAAU,CAACJ,MAAM;EAC1B;EAIA,IAAIC,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,IAAIA,IAAI,CAACM,cAAc,EAAE;IAE3D,OAAON,IAAI,CAACM,cAAc,CAAC,CAAC;EAC9B;EAEA,MAAM,IAAIzB,KAAK,CAAE,iBAAgB0B,IAAI,CAACC,SAAS,CAACR,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAACS,KAAK,CAAC,EAAE,CAAE,GAAE,CAAC;AAC9E"}
1
+ {"version":3,"file":"stream-utils.node.js","names":["zlib","isArrayBuffer","x","ArrayBuffer","isBuffer","Buffer","decompressReadStream","readStream","headers","get","pipe","createBrotliDecompress","createGunzip","createDeflate","concatenateReadStream","arrayBufferChunks","Promise","resolve","reject","on","error","read","chunk","Error","push","toArrayBuffer","arrayBuffer","concatenateArrayBuffers","sources","sourceArrays","map","source2","Uint8Array","byteLength","reduce","length","typedArray","result","offset","sourceArray","set","buffer","data","isView","text","uint8Array","TextEncoder","encode","_toArrayBuffer","JSON","stringify","slice"],"sources":["../../src/filesystems/stream-utils.node.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport zlib from 'zlib';\nimport {Readable} from 'stream';\n\nconst isArrayBuffer = (x) => x && x instanceof ArrayBuffer;\nconst isBuffer = (x) => x && x instanceof Buffer;\n\n/**\n *\n */\nexport function decompressReadStream(readStream: Readable, headers?: Headers) {\n switch (headers?.get('content-encoding')) {\n case 'br':\n return readStream.pipe(zlib.createBrotliDecompress());\n case 'gzip':\n return readStream.pipe(zlib.createGunzip());\n case 'deflate':\n return readStream.pipe(zlib.createDeflate());\n default:\n // No compression or an unknown one, just return it as is\n return readStream;\n }\n}\n\n/**\n *\n * @param readStream\n * @returns\n */\nexport async function concatenateReadStream(readStream): Promise<ArrayBuffer> {\n const arrayBufferChunks: ArrayBuffer[] = [];\n\n return await new Promise((resolve, reject) => {\n readStream.on('error', (error) => reject(error));\n\n // Once the readable callback has been added, stream switches to \"flowing mode\"\n // In Node 10 (but not 12 and 14) this causes `data` and `end` to never be called unless we read data here\n readStream.on('readable', () => readStream.read());\n\n readStream.on('data', (chunk) => {\n if (typeof chunk === 'string') {\n reject(new Error('Read stream not binary'));\n }\n arrayBufferChunks.push(toArrayBuffer(chunk));\n });\n\n readStream.on('end', () => {\n const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);\n resolve(arrayBuffer);\n });\n });\n}\n\n/**\n * Concatenate a sequence of ArrayBuffers\n * @return A concatenated ArrayBuffer\n * @note duplicates loader-utils since polyfills should be independent\n */\nexport function concatenateArrayBuffers(sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {\n // Make sure all inputs are wrapped in typed arrays\n const sourceArrays = sources.map((source2) =>\n source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2\n );\n\n // Get length of all inputs\n const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);\n\n // Allocate array with space for all inputs\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let offset = 0;\n for (const sourceArray of sourceArrays) {\n result.set(sourceArray, offset);\n offset += sourceArray.byteLength;\n }\n\n // We work with ArrayBuffers, discard the typed array wrapper\n return result.buffer;\n}\n\n/**\n * @param data\n * @todo Duplicate of core\n */\nexport function toArrayBuffer(data: unknown): ArrayBuffer {\n if (isArrayBuffer(data)) {\n return data as ArrayBuffer;\n }\n\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (isBuffer(data)) {\n // @ts-expect-error\n const typedArray = new Uint8Array(data);\n return typedArray.buffer;\n }\n\n // Careful - Node Buffers will look like ArrayBuffers (keep after isBuffer)\n if (ArrayBuffer.isView(data)) {\n return data.buffer;\n }\n\n if (typeof data === 'string') {\n const text = data;\n const uint8Array = new TextEncoder().encode(text);\n return uint8Array.buffer;\n }\n\n // HACK to support Blob polyfill\n // @ts-expect-error\n if (data && typeof data === 'object' && data._toArrayBuffer) {\n // @ts-expect-error\n return data._toArrayBuffer();\n }\n\n throw new Error(`toArrayBuffer(${JSON.stringify(data, null, 2).slice(10)})`);\n}\n"],"mappings":"AAGA,OAAOA,IAAI,MAAM,MAAM;AAGvB,MAAMC,aAAa,GAAIC,CAAC,IAAKA,CAAC,IAAIA,CAAC,YAAYC,WAAW;AAC1D,MAAMC,QAAQ,GAAIF,CAAC,IAAKA,CAAC,IAAIA,CAAC,YAAYG,MAAM;AAKhD,OAAO,SAASC,oBAAoBA,CAACC,UAAoB,EAAEC,OAAiB,EAAE;EAC5E,QAAQA,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEC,GAAG,CAAC,kBAAkB,CAAC;IACtC,KAAK,IAAI;MACP,OAAOF,UAAU,CAACG,IAAI,CAACV,IAAI,CAACW,sBAAsB,CAAC,CAAC,CAAC;IACvD,KAAK,MAAM;MACT,OAAOJ,UAAU,CAACG,IAAI,CAACV,IAAI,CAACY,YAAY,CAAC,CAAC,CAAC;IAC7C,KAAK,SAAS;MACZ,OAAOL,UAAU,CAACG,IAAI,CAACV,IAAI,CAACa,aAAa,CAAC,CAAC,CAAC;IAC9C;MAEE,OAAON,UAAU;EACrB;AACF;AAOA,OAAO,eAAeO,qBAAqBA,CAACP,UAAU,EAAwB;EAC5E,MAAMQ,iBAAgC,GAAG,EAAE;EAE3C,OAAO,MAAM,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IAC5CX,UAAU,CAACY,EAAE,CAAC,OAAO,EAAGC,KAAK,IAAKF,MAAM,CAACE,KAAK,CAAC,CAAC;IAIhDb,UAAU,CAACY,EAAE,CAAC,UAAU,EAAE,MAAMZ,UAAU,CAACc,IAAI,CAAC,CAAC,CAAC;IAElDd,UAAU,CAACY,EAAE,CAAC,MAAM,EAAGG,KAAK,IAAK;MAC/B,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;QAC7BJ,MAAM,CAAC,IAAIK,KAAK,CAAC,wBAAwB,CAAC,CAAC;MAC7C;MACAR,iBAAiB,CAACS,IAAI,CAACC,aAAa,CAACH,KAAK,CAAC,CAAC;IAC9C,CAAC,CAAC;IAEFf,UAAU,CAACY,EAAE,CAAC,KAAK,EAAE,MAAM;MACzB,MAAMO,WAAW,GAAGC,uBAAuB,CAACZ,iBAAiB,CAAC;MAC9DE,OAAO,CAACS,WAAW,CAAC;IACtB,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAOA,OAAO,SAASC,uBAAuBA,CAACC,OAAqC,EAAe;EAE1F,MAAMC,YAAY,GAAGD,OAAO,CAACE,GAAG,CAAEC,OAAO,IACvCA,OAAO,YAAY5B,WAAW,GAAG,IAAI6B,UAAU,CAACD,OAAO,CAAC,GAAGA,OAC7D,CAAC;EAGD,MAAME,UAAU,GAAGJ,YAAY,CAACK,MAAM,CAAC,CAACC,MAAM,EAAEC,UAAU,KAAKD,MAAM,GAAGC,UAAU,CAACH,UAAU,EAAE,CAAC,CAAC;EAGjG,MAAMI,MAAM,GAAG,IAAIL,UAAU,CAACC,UAAU,CAAC;EAGzC,IAAIK,MAAM,GAAG,CAAC;EACd,KAAK,MAAMC,WAAW,IAAIV,YAAY,EAAE;IACtCQ,MAAM,CAACG,GAAG,CAACD,WAAW,EAAED,MAAM,CAAC;IAC/BA,MAAM,IAAIC,WAAW,CAACN,UAAU;EAClC;EAGA,OAAOI,MAAM,CAACI,MAAM;AACtB;AAMA,OAAO,SAAShB,aAAaA,CAACiB,IAAa,EAAe;EACxD,IAAIzC,aAAa,CAACyC,IAAI,CAAC,EAAE;IACvB,OAAOA,IAAI;EACb;EAGA,IAAItC,QAAQ,CAACsC,IAAI,CAAC,EAAE;IAElB,MAAMN,UAAU,GAAG,IAAIJ,UAAU,CAACU,IAAI,CAAC;IACvC,OAAON,UAAU,CAACK,MAAM;EAC1B;EAGA,IAAItC,WAAW,CAACwC,MAAM,CAACD,IAAI,CAAC,EAAE;IAC5B,OAAOA,IAAI,CAACD,MAAM;EACpB;EAEA,IAAI,OAAOC,IAAI,KAAK,QAAQ,EAAE;IAC5B,MAAME,IAAI,GAAGF,IAAI;IACjB,MAAMG,UAAU,GAAG,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACH,IAAI,CAAC;IACjD,OAAOC,UAAU,CAACJ,MAAM;EAC1B;EAIA,IAAIC,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,IAAIA,IAAI,CAACM,cAAc,EAAE;IAE3D,OAAON,IAAI,CAACM,cAAc,CAAC,CAAC;EAC9B;EAEA,MAAM,IAAIzB,KAAK,CAAE,iBAAgB0B,IAAI,CAACC,SAAS,CAACR,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAACS,KAAK,CAAC,EAAE,CAAE,GAAE,CAAC;AAC9E"}
@@ -1 +1 @@
1
- {"version":3,"file":"parse-image-node.d.ts","sourceRoot":"","sources":["../../src/images/parse-image-node.ts"],"names":[],"mappings":"AAIA,2EAA2E;AAC3E,eAAO,MAAM,mBAAmB,UAA2C,CAAC;AAG5E,KAAK,OAAO,GAAG;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB,CAAC;AAEF,wBAAsB,cAAc,CAAC,WAAW,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAQjG"}
1
+ {"version":3,"file":"parse-image-node.d.ts","sourceRoot":"","sources":["../../src/images/parse-image-node.ts"],"names":[],"mappings":"AAKA,2EAA2E;AAC3E,eAAO,MAAM,mBAAmB,UAA2C,CAAC;AAG5E,KAAK,OAAO,GAAG;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB,CAAC;AAEF,wBAAsB,cAAc,CAAC,WAAW,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAQjG"}
@@ -1 +1 @@
1
- {"version":3,"file":"parse-image-node.js","names":["getPixels","NODE_FORMAT_SUPPORT","parseImageNode","arrayBuffer","mimeType","Error","buffer","Buffer","from","ndarray","getPixelsAsync","Promise","resolve","err","shape","layers","length","shift","data","Uint8Array","width","height","components"],"sources":["../../src/images/parse-image-node.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport getPixels from 'get-pixels';\n\n/** Declares which image format mime types this loader polyfill supports */\nexport const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];\n\n// Note: These types are also defined in @loaders.gl/images and need to be kept in sync\ntype NDArray = {\n shape: number[];\n data: Uint8Array;\n width: number;\n height: number;\n components: number;\n layers: number[];\n};\n\nexport async function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray> {\n if (!mimeType) {\n throw new Error('MIMEType is required to parse image under Node.js');\n }\n\n const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);\n const ndarray = await getPixelsAsync(buffer, mimeType);\n return ndarray;\n}\n\n// TODO - check if getPixels callback is asynchronous if provided with buffer input\n// if not, parseImage can be a sync function\nfunction getPixelsAsync(buffer: Buffer, mimeType: string): Promise<NDArray> {\n return new Promise<NDArray>((resolve) =>\n getPixels(buffer, mimeType, (err, ndarray) => {\n if (err) {\n throw err;\n }\n\n const shape = [...ndarray.shape];\n const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;\n const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;\n\n // extract width/height etc\n resolve({\n shape,\n data,\n width: ndarray.shape[0],\n height: ndarray.shape[1],\n components: ndarray.shape[2],\n // TODO - error\n layers: layers ? [layers] : []\n });\n })\n );\n}\n"],"mappings":"AAEA,OAAOA,SAAS,MAAM,YAAY;AAGlC,OAAO,MAAMC,mBAAmB,GAAG,CAAC,WAAW,EAAE,YAAY,EAAE,WAAW,CAAC;AAY3E,OAAO,eAAeC,cAAcA,CAACC,WAAwB,EAAEC,QAAgB,EAAoB;EACjG,IAAI,CAACA,QAAQ,EAAE;IACb,MAAM,IAAIC,KAAK,CAAC,mDAAmD,CAAC;EACtE;EAEA,MAAMC,MAAM,GAAGH,WAAW,YAAYI,MAAM,GAAGJ,WAAW,GAAGI,MAAM,CAACC,IAAI,CAACL,WAAW,CAAC;EACrF,MAAMM,OAAO,GAAG,MAAMC,cAAc,CAACJ,MAAM,EAAEF,QAAQ,CAAC;EACtD,OAAOK,OAAO;AAChB;AAIA,SAASC,cAAcA,CAACJ,MAAc,EAAEF,QAAgB,EAAoB;EAC1E,OAAO,IAAIO,OAAO,CAAWC,OAAO,IAClCZ,SAAS,CAACM,MAAM,EAAEF,QAAQ,EAAE,CAACS,GAAG,EAAEJ,OAAO,KAAK;IAC5C,IAAII,GAAG,EAAE;MACP,MAAMA,GAAG;IACX;IAEA,MAAMC,KAAK,GAAG,CAAC,GAAGL,OAAO,CAACK,KAAK,CAAC;IAChC,MAAMC,MAAM,GAAGN,OAAO,CAACK,KAAK,CAACE,MAAM,KAAK,CAAC,GAAGP,OAAO,CAACK,KAAK,CAACG,KAAK,CAAC,CAAC,GAAG,CAAC;IACrE,MAAMC,IAAI,GAAGT,OAAO,CAACS,IAAI,YAAYX,MAAM,GAAG,IAAIY,UAAU,CAACV,OAAO,CAACS,IAAI,CAAC,GAAGT,OAAO,CAACS,IAAI;IAGzFN,OAAO,CAAC;MACNE,KAAK;MACLI,IAAI;MACJE,KAAK,EAAEX,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACvBO,MAAM,EAAEZ,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACxBQ,UAAU,EAAEb,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MAE5BC,MAAM,EAAEA,MAAM,GAAG,CAACA,MAAM,CAAC,GAAG;IAC9B,CAAC,CAAC;EACJ,CAAC,CACH,CAAC;AACH"}
1
+ {"version":3,"file":"parse-image-node.js","names":["getPixels","NODE_FORMAT_SUPPORT","parseImageNode","arrayBuffer","mimeType","Error","buffer","Buffer","from","ndarray","getPixelsAsync","Promise","resolve","err","shape","layers","length","shift","data","Uint8Array","width","height","components"],"sources":["../../src/images/parse-image-node.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport getPixels from 'get-pixels';\n\n/** Declares which image format mime types this loader polyfill supports */\nexport const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];\n\n// Note: These types are also defined in @loaders.gl/images and need to be kept in sync\ntype NDArray = {\n shape: number[];\n data: Uint8Array;\n width: number;\n height: number;\n components: number;\n layers: number[];\n};\n\nexport async function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray> {\n if (!mimeType) {\n throw new Error('MIMEType is required to parse image under Node.js');\n }\n\n const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);\n const ndarray = await getPixelsAsync(buffer, mimeType);\n return ndarray;\n}\n\n// TODO - check if getPixels callback is asynchronous if provided with buffer input\n// if not, parseImage can be a sync function\nfunction getPixelsAsync(buffer: Buffer, mimeType: string): Promise<NDArray> {\n return new Promise<NDArray>((resolve) =>\n getPixels(buffer, mimeType, (err, ndarray) => {\n if (err) {\n throw err;\n }\n\n const shape = [...ndarray.shape];\n const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;\n const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;\n\n // extract width/height etc\n resolve({\n shape,\n data,\n width: ndarray.shape[0],\n height: ndarray.shape[1],\n components: ndarray.shape[2],\n // TODO - error\n layers: layers ? [layers] : []\n });\n })\n );\n}\n"],"mappings":"AAGA,OAAOA,SAAS,MAAM,YAAY;AAGlC,OAAO,MAAMC,mBAAmB,GAAG,CAAC,WAAW,EAAE,YAAY,EAAE,WAAW,CAAC;AAY3E,OAAO,eAAeC,cAAcA,CAACC,WAAwB,EAAEC,QAAgB,EAAoB;EACjG,IAAI,CAACA,QAAQ,EAAE;IACb,MAAM,IAAIC,KAAK,CAAC,mDAAmD,CAAC;EACtE;EAEA,MAAMC,MAAM,GAAGH,WAAW,YAAYI,MAAM,GAAGJ,WAAW,GAAGI,MAAM,CAACC,IAAI,CAACL,WAAW,CAAC;EACrF,MAAMM,OAAO,GAAG,MAAMC,cAAc,CAACJ,MAAM,EAAEF,QAAQ,CAAC;EACtD,OAAOK,OAAO;AAChB;AAIA,SAASC,cAAcA,CAACJ,MAAc,EAAEF,QAAgB,EAAoB;EAC1E,OAAO,IAAIO,OAAO,CAAWC,OAAO,IAClCZ,SAAS,CAACM,MAAM,EAAEF,QAAQ,EAAE,CAACS,GAAG,EAAEJ,OAAO,KAAK;IAC5C,IAAII,GAAG,EAAE;MACP,MAAMA,GAAG;IACX;IAEA,MAAMC,KAAK,GAAG,CAAC,GAAGL,OAAO,CAACK,KAAK,CAAC;IAChC,MAAMC,MAAM,GAAGN,OAAO,CAACK,KAAK,CAACE,MAAM,KAAK,CAAC,GAAGP,OAAO,CAACK,KAAK,CAACG,KAAK,CAAC,CAAC,GAAG,CAAC;IACrE,MAAMC,IAAI,GAAGT,OAAO,CAACS,IAAI,YAAYX,MAAM,GAAG,IAAIY,UAAU,CAACV,OAAO,CAACS,IAAI,CAAC,GAAGT,OAAO,CAACS,IAAI;IAGzFN,OAAO,CAAC;MACNE,KAAK;MACLI,IAAI;MACJE,KAAK,EAAEX,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACvBO,MAAM,EAAEZ,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACxBQ,UAAU,EAAEb,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MAE5BC,MAAM,EAAEA,MAAM,GAAG,CAACA,MAAM,CAAC,GAAG;IAC9B,CAAC,CAAC;EACJ,CAAC,CACH,CAAC;AACH"}
@@ -1 +1 @@
1
- {"version":3,"file":"parse-image.node.d.ts","sourceRoot":"","sources":["../../src/images/parse-image.node.ts"],"names":[],"mappings":"AAIA,2EAA2E;AAC3E,eAAO,MAAM,mBAAmB,UAA2C,CAAC;AAG5E,KAAK,OAAO,GAAG;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB,CAAC;AAEF,wBAAsB,cAAc,CAAC,WAAW,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAQjG"}
1
+ {"version":3,"file":"parse-image.node.d.ts","sourceRoot":"","sources":["../../src/images/parse-image.node.ts"],"names":[],"mappings":"AAKA,2EAA2E;AAC3E,eAAO,MAAM,mBAAmB,UAA2C,CAAC;AAG5E,KAAK,OAAO,GAAG;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB,CAAC;AAEF,wBAAsB,cAAc,CAAC,WAAW,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAQjG"}
@@ -1 +1 @@
1
- {"version":3,"file":"parse-image.node.js","names":["getPixels","NODE_FORMAT_SUPPORT","parseImageNode","arrayBuffer","mimeType","Error","buffer","Buffer","from","ndarray","getPixelsAsync","Promise","resolve","err","shape","layers","length","shift","data","Uint8Array","width","height","components"],"sources":["../../src/images/parse-image.node.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport getPixels from 'get-pixels';\n\n/** Declares which image format mime types this loader polyfill supports */\nexport const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];\n\n// Note: These types are also defined in @loaders.gl/images and need to be kept in sync\ntype NDArray = {\n shape: number[];\n data: Uint8Array;\n width: number;\n height: number;\n components: number;\n layers: number[];\n};\n\nexport async function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray> {\n if (!mimeType) {\n throw new Error('MIMEType is required to parse image under Node.js');\n }\n\n const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);\n const ndarray = await getPixelsAsync(buffer, mimeType);\n return ndarray;\n}\n\n// TODO - check if getPixels callback is asynchronous if provided with buffer input\n// if not, parseImage can be a sync function\nfunction getPixelsAsync(buffer: Buffer, mimeType: string): Promise<NDArray> {\n return new Promise<NDArray>((resolve) =>\n getPixels(buffer, mimeType, (err, ndarray) => {\n if (err) {\n throw err;\n }\n\n const shape = [...ndarray.shape];\n const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;\n const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;\n\n // extract width/height etc\n resolve({\n shape,\n data,\n width: ndarray.shape[0],\n height: ndarray.shape[1],\n components: ndarray.shape[2],\n // TODO - error\n layers: layers ? [layers] : []\n });\n })\n );\n}\n"],"mappings":"AAEA,OAAOA,SAAS,MAAM,YAAY;AAGlC,OAAO,MAAMC,mBAAmB,GAAG,CAAC,WAAW,EAAE,YAAY,EAAE,WAAW,CAAC;AAY3E,OAAO,eAAeC,cAAcA,CAACC,WAAwB,EAAEC,QAAgB,EAAoB;EACjG,IAAI,CAACA,QAAQ,EAAE;IACb,MAAM,IAAIC,KAAK,CAAC,mDAAmD,CAAC;EACtE;EAEA,MAAMC,MAAM,GAAGH,WAAW,YAAYI,MAAM,GAAGJ,WAAW,GAAGI,MAAM,CAACC,IAAI,CAACL,WAAW,CAAC;EACrF,MAAMM,OAAO,GAAG,MAAMC,cAAc,CAACJ,MAAM,EAAEF,QAAQ,CAAC;EACtD,OAAOK,OAAO;AAChB;AAIA,SAASC,cAAcA,CAACJ,MAAc,EAAEF,QAAgB,EAAoB;EAC1E,OAAO,IAAIO,OAAO,CAAWC,OAAO,IAClCZ,SAAS,CAACM,MAAM,EAAEF,QAAQ,EAAE,CAACS,GAAG,EAAEJ,OAAO,KAAK;IAC5C,IAAII,GAAG,EAAE;MACP,MAAMA,GAAG;IACX;IAEA,MAAMC,KAAK,GAAG,CAAC,GAAGL,OAAO,CAACK,KAAK,CAAC;IAChC,MAAMC,MAAM,GAAGN,OAAO,CAACK,KAAK,CAACE,MAAM,KAAK,CAAC,GAAGP,OAAO,CAACK,KAAK,CAACG,KAAK,CAAC,CAAC,GAAG,CAAC;IACrE,MAAMC,IAAI,GAAGT,OAAO,CAACS,IAAI,YAAYX,MAAM,GAAG,IAAIY,UAAU,CAACV,OAAO,CAACS,IAAI,CAAC,GAAGT,OAAO,CAACS,IAAI;IAGzFN,OAAO,CAAC;MACNE,KAAK;MACLI,IAAI;MACJE,KAAK,EAAEX,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACvBO,MAAM,EAAEZ,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACxBQ,UAAU,EAAEb,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MAE5BC,MAAM,EAAEA,MAAM,GAAG,CAACA,MAAM,CAAC,GAAG;IAC9B,CAAC,CAAC;EACJ,CAAC,CACH,CAAC;AACH"}
1
+ {"version":3,"file":"parse-image.node.js","names":["getPixels","NODE_FORMAT_SUPPORT","parseImageNode","arrayBuffer","mimeType","Error","buffer","Buffer","from","ndarray","getPixelsAsync","Promise","resolve","err","shape","layers","length","shift","data","Uint8Array","width","height","components"],"sources":["../../src/images/parse-image.node.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport getPixels from 'get-pixels';\n\n/** Declares which image format mime types this loader polyfill supports */\nexport const NODE_FORMAT_SUPPORT = ['image/png', 'image/jpeg', 'image/gif'];\n\n// Note: These types are also defined in @loaders.gl/images and need to be kept in sync\ntype NDArray = {\n shape: number[];\n data: Uint8Array;\n width: number;\n height: number;\n components: number;\n layers: number[];\n};\n\nexport async function parseImageNode(arrayBuffer: ArrayBuffer, mimeType: string): Promise<NDArray> {\n if (!mimeType) {\n throw new Error('MIMEType is required to parse image under Node.js');\n }\n\n const buffer = arrayBuffer instanceof Buffer ? arrayBuffer : Buffer.from(arrayBuffer);\n const ndarray = await getPixelsAsync(buffer, mimeType);\n return ndarray;\n}\n\n// TODO - check if getPixels callback is asynchronous if provided with buffer input\n// if not, parseImage can be a sync function\nfunction getPixelsAsync(buffer: Buffer, mimeType: string): Promise<NDArray> {\n return new Promise<NDArray>((resolve) =>\n getPixels(buffer, mimeType, (err, ndarray) => {\n if (err) {\n throw err;\n }\n\n const shape = [...ndarray.shape];\n const layers = ndarray.shape.length === 4 ? ndarray.shape.shift() : 1;\n const data = ndarray.data instanceof Buffer ? new Uint8Array(ndarray.data) : ndarray.data;\n\n // extract width/height etc\n resolve({\n shape,\n data,\n width: ndarray.shape[0],\n height: ndarray.shape[1],\n components: ndarray.shape[2],\n // TODO - error\n layers: layers ? [layers] : []\n });\n })\n );\n}\n"],"mappings":"AAGA,OAAOA,SAAS,MAAM,YAAY;AAGlC,OAAO,MAAMC,mBAAmB,GAAG,CAAC,WAAW,EAAE,YAAY,EAAE,WAAW,CAAC;AAY3E,OAAO,eAAeC,cAAcA,CAACC,WAAwB,EAAEC,QAAgB,EAAoB;EACjG,IAAI,CAACA,QAAQ,EAAE;IACb,MAAM,IAAIC,KAAK,CAAC,mDAAmD,CAAC;EACtE;EAEA,MAAMC,MAAM,GAAGH,WAAW,YAAYI,MAAM,GAAGJ,WAAW,GAAGI,MAAM,CAACC,IAAI,CAACL,WAAW,CAAC;EACrF,MAAMM,OAAO,GAAG,MAAMC,cAAc,CAACJ,MAAM,EAAEF,QAAQ,CAAC;EACtD,OAAOK,OAAO;AAChB;AAIA,SAASC,cAAcA,CAACJ,MAAc,EAAEF,QAAgB,EAAoB;EAC1E,OAAO,IAAIO,OAAO,CAAWC,OAAO,IAClCZ,SAAS,CAACM,MAAM,EAAEF,QAAQ,EAAE,CAACS,GAAG,EAAEJ,OAAO,KAAK;IAC5C,IAAII,GAAG,EAAE;MACP,MAAMA,GAAG;IACX;IAEA,MAAMC,KAAK,GAAG,CAAC,GAAGL,OAAO,CAACK,KAAK,CAAC;IAChC,MAAMC,MAAM,GAAGN,OAAO,CAACK,KAAK,CAACE,MAAM,KAAK,CAAC,GAAGP,OAAO,CAACK,KAAK,CAACG,KAAK,CAAC,CAAC,GAAG,CAAC;IACrE,MAAMC,IAAI,GAAGT,OAAO,CAACS,IAAI,YAAYX,MAAM,GAAG,IAAIY,UAAU,CAACV,OAAO,CAACS,IAAI,CAAC,GAAGT,OAAO,CAACS,IAAI;IAGzFN,OAAO,CAAC;MACNE,KAAK;MACLI,IAAI;MACJE,KAAK,EAAEX,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACvBO,MAAM,EAAEZ,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MACxBQ,UAAU,EAAEb,OAAO,CAACK,KAAK,CAAC,CAAC,CAAC;MAE5BC,MAAM,EAAEA,MAAM,GAAG,CAACA,MAAM,CAAC,GAAG;IAC9B,CAAC,CAAC;EACJ,CAAC,CACH,CAAC;AACH"}
package/dist/index.cjs CHANGED
@@ -656,7 +656,7 @@ var require_stream_readable = __commonJS({
656
656
  if (state.pipesCount === 1)
657
657
  write(state.pipes, 0, null);
658
658
  else
659
- forEach(state.pipes, write);
659
+ forEach2(state.pipes, write);
660
660
  src.emit("data", chunk);
661
661
  if (state.awaitDrain > 0)
662
662
  return;
@@ -813,7 +813,7 @@ var require_stream_readable = __commonJS({
813
813
  }
814
814
  }
815
815
  var events = ["error", "close", "destroy", "pause", "resume"];
816
- forEach(events, function(ev) {
816
+ forEach2(events, function(ev) {
817
817
  stream2.on(ev, self2.emit.bind(self2, ev));
818
818
  });
819
819
  self2._read = function(n) {
@@ -888,7 +888,7 @@ var require_stream_readable = __commonJS({
888
888
  });
889
889
  }
890
890
  }
891
- function forEach(xs, f) {
891
+ function forEach2(xs, f) {
892
892
  for (var i = 0, l = xs.length; i < l; i++) {
893
893
  f(xs[i], i);
894
894
  }
@@ -918,7 +918,7 @@ var require_stream_duplex = __commonJS({
918
918
  var Readable5 = require_stream_readable();
919
919
  var Writable = require_stream_writable();
920
920
  util.inherits(Duplex, Readable5);
921
- forEach(objectKeys(Writable.prototype), function(method) {
921
+ forEach2(objectKeys(Writable.prototype), function(method) {
922
922
  if (!Duplex.prototype[method])
923
923
  Duplex.prototype[method] = Writable.prototype[method];
924
924
  });
@@ -941,7 +941,7 @@ var require_stream_duplex = __commonJS({
941
941
  return;
942
942
  process.nextTick(this.end.bind(this));
943
943
  }
944
- function forEach(xs, f) {
944
+ function forEach2(xs, f) {
945
945
  for (var i = 0, l = xs.length; i < l; i++) {
946
946
  f(xs[i], i);
947
947
  }
@@ -1654,7 +1654,7 @@ var require_stream_duplex2 = __commonJS({
1654
1654
  var Readable5 = require_stream_readable2();
1655
1655
  var Writable = require_stream_writable2();
1656
1656
  util.inherits(Duplex, Readable5);
1657
- forEach(objectKeys(Writable.prototype), function(method) {
1657
+ forEach2(objectKeys(Writable.prototype), function(method) {
1658
1658
  if (!Duplex.prototype[method])
1659
1659
  Duplex.prototype[method] = Writable.prototype[method];
1660
1660
  });
@@ -1677,7 +1677,7 @@ var require_stream_duplex2 = __commonJS({
1677
1677
  return;
1678
1678
  process.nextTick(this.end.bind(this));
1679
1679
  }
1680
- function forEach(xs, f) {
1680
+ function forEach2(xs, f) {
1681
1681
  for (var i = 0, l = xs.length; i < l; i++) {
1682
1682
  f(xs[i], i);
1683
1683
  }
@@ -2221,7 +2221,7 @@ var require_stream_readable2 = __commonJS({
2221
2221
  }
2222
2222
  }
2223
2223
  var events = ["error", "close", "destroy", "pause", "resume"];
2224
- forEach(events, function(ev) {
2224
+ forEach2(events, function(ev) {
2225
2225
  stream2.on(ev, self2.emit.bind(self2, ev));
2226
2226
  });
2227
2227
  self2._read = function(n) {
@@ -2297,7 +2297,7 @@ var require_stream_readable2 = __commonJS({
2297
2297
  });
2298
2298
  }
2299
2299
  }
2300
- function forEach(xs, f) {
2300
+ function forEach2(xs, f) {
2301
2301
  for (var i = 0, l = xs.length; i < l; i++) {
2302
2302
  f(xs[i], i);
2303
2303
  }
@@ -22771,7 +22771,7 @@ var require_helpers = __commonJS({
22771
22771
  "../../node_modules/request/lib/helpers.js"(exports) {
22772
22772
  "use strict";
22773
22773
  var jsonSafeStringify = require_stringify();
22774
- var crypto = require("crypto");
22774
+ var crypto2 = require("crypto");
22775
22775
  var Buffer2 = require_safe_buffer().Buffer;
22776
22776
  var defer = typeof setImmediate === "undefined" ? process.nextTick : setImmediate;
22777
22777
  function paramsHaveRequestBody(params) {
@@ -22787,7 +22787,7 @@ var require_helpers = __commonJS({
22787
22787
  return ret;
22788
22788
  }
22789
22789
  function md5(str) {
22790
- return crypto.createHash("md5").update(str).digest("hex");
22790
+ return crypto2.createHash("md5").update(str).digest("hex");
22791
22791
  }
22792
22792
  function isReadStream(rs) {
22793
22793
  return rs.readable && rs.path && rs.mode;
@@ -22824,7 +22824,7 @@ var require_helpers = __commonJS({
22824
22824
  // ../../node_modules/aws-sign2/index.js
22825
22825
  var require_aws_sign2 = __commonJS({
22826
22826
  "../../node_modules/aws-sign2/index.js"(exports, module2) {
22827
- var crypto = require("crypto");
22827
+ var crypto2 = require("crypto");
22828
22828
  var parse = require("url").parse;
22829
22829
  var keys = [
22830
22830
  "acl",
@@ -22848,7 +22848,7 @@ var require_aws_sign2 = __commonJS({
22848
22848
  module2.exports = authorization;
22849
22849
  module2.exports.authorization = authorization;
22850
22850
  function hmacSha1(options) {
22851
- return crypto.createHmac("sha1", options.secret).update(options.message).digest("base64");
22851
+ return crypto2.createHmac("sha1", options.secret).update(options.message).digest("base64");
22852
22852
  }
22853
22853
  module2.exports.hmacSha1 = hmacSha1;
22854
22854
  function sign(options) {
@@ -23002,14 +23002,14 @@ var require_aws4 = __commonJS({
23002
23002
  var aws4 = exports;
23003
23003
  var url = require("url");
23004
23004
  var querystring = require("querystring");
23005
- var crypto = require("crypto");
23005
+ var crypto2 = require("crypto");
23006
23006
  var lru = require_lru();
23007
23007
  var credentialsCache = lru(1e3);
23008
23008
  function hmac(key, string, encoding) {
23009
- return crypto.createHmac("sha256", key).update(string, "utf8").digest(encoding);
23009
+ return crypto2.createHmac("sha256", key).update(string, "utf8").digest(encoding);
23010
23010
  }
23011
23011
  function hash(string, encoding) {
23012
- return crypto.createHash("sha256").update(string, "utf8").digest(encoding);
23012
+ return crypto2.createHash("sha256").update(string, "utf8").digest(encoding);
23013
23013
  }
23014
23014
  function encodeRfc3986(urlEncodedString) {
23015
23015
  return urlEncodedString.replace(/[!'()*]/g, function(c) {
@@ -28320,23 +28320,23 @@ var require_nacl_fast = __commonJS({
28320
28320
  randombytes = fn;
28321
28321
  };
28322
28322
  (function() {
28323
- var crypto = typeof self !== "undefined" ? self.crypto || self.msCrypto : null;
28324
- if (crypto && crypto.getRandomValues) {
28323
+ var crypto2 = typeof self !== "undefined" ? self.crypto || self.msCrypto : null;
28324
+ if (crypto2 && crypto2.getRandomValues) {
28325
28325
  var QUOTA = 65536;
28326
28326
  nacl.setPRNG(function(x, n) {
28327
28327
  var i, v = new Uint8Array(n);
28328
28328
  for (i = 0; i < n; i += QUOTA) {
28329
- crypto.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA)));
28329
+ crypto2.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA)));
28330
28330
  }
28331
28331
  for (i = 0; i < n; i++)
28332
28332
  x[i] = v[i];
28333
28333
  cleanup(v);
28334
28334
  });
28335
28335
  } else if (typeof require !== "undefined") {
28336
- crypto = require("crypto");
28337
- if (crypto && crypto.randomBytes) {
28336
+ crypto2 = require("crypto");
28337
+ if (crypto2 && crypto2.randomBytes) {
28338
28338
  nacl.setPRNG(function(x, n) {
28339
- var i, v = crypto.randomBytes(n);
28339
+ var i, v = crypto2.randomBytes(n);
28340
28340
  for (i = 0; i < n; i++)
28341
28341
  x[i] = v[i];
28342
28342
  cleanup(v);
@@ -28375,7 +28375,7 @@ var require_utils = __commonJS({
28375
28375
  var Buffer2 = require_safer().Buffer;
28376
28376
  var PrivateKey = require_private_key();
28377
28377
  var Key = require_key();
28378
- var crypto = require("crypto");
28378
+ var crypto2 = require("crypto");
28379
28379
  var algs = require_algs();
28380
28380
  var asn1 = require_lib();
28381
28381
  var ec = require_ec();
@@ -28459,7 +28459,7 @@ var require_utils = __commonJS({
28459
28459
  bufs.push(salt);
28460
28460
  D = Buffer2.concat(bufs);
28461
28461
  for (var j = 0; j < count; ++j)
28462
- D = crypto.createHash("md5").update(D).digest();
28462
+ D = crypto2.createHash("md5").update(D).digest();
28463
28463
  material = Buffer2.concat([material, D]);
28464
28464
  D_prev = D;
28465
28465
  }
@@ -28481,13 +28481,13 @@ var require_utils = __commonJS({
28481
28481
  return Buffer2.concat(ts).slice(0, size);
28482
28482
  function T(I) {
28483
28483
  hkey.writeUInt32BE(I, hkey.length - 4);
28484
- var hmac = crypto.createHmac(hashAlg, passphrase);
28484
+ var hmac = crypto2.createHmac(hashAlg, passphrase);
28485
28485
  hmac.update(hkey);
28486
28486
  var Ti = hmac.digest();
28487
28487
  var Uc = Ti;
28488
28488
  var c = 1;
28489
28489
  while (c++ < iterations) {
28490
- hmac = crypto.createHmac(hashAlg, passphrase);
28490
+ hmac = crypto2.createHmac(hashAlg, passphrase);
28491
28491
  hmac.update(Uc);
28492
28492
  Uc = hmac.digest();
28493
28493
  for (var x = 0; x < Ti.length; ++x)
@@ -28856,7 +28856,7 @@ var require_signature = __commonJS({
28856
28856
  var assert2 = require_assert();
28857
28857
  var Buffer2 = require_safer().Buffer;
28858
28858
  var algs = require_algs();
28859
- var crypto = require("crypto");
28859
+ var crypto2 = require("crypto");
28860
28860
  var errs = require_errors();
28861
28861
  var utils = require_utils();
28862
28862
  var asn1 = require_lib();
@@ -29234,7 +29234,7 @@ var require_sec = __commonJS({
29234
29234
  // ../../node_modules/ecc-jsbn/index.js
29235
29235
  var require_ecc_jsbn = __commonJS({
29236
29236
  "../../node_modules/ecc-jsbn/index.js"(exports) {
29237
- var crypto = require("crypto");
29237
+ var crypto2 = require("crypto");
29238
29238
  var BigInteger = require_jsbn().BigInteger;
29239
29239
  var ECPointFp = require_ec().ECPointFp;
29240
29240
  var Buffer2 = require_safer().Buffer;
@@ -29258,7 +29258,7 @@ var require_ecc_jsbn = __commonJS({
29258
29258
  }
29259
29259
  } else {
29260
29260
  var n1 = n.subtract(BigInteger.ONE);
29261
- var r = new BigInteger(crypto.randomBytes(n.bitLength()));
29261
+ var r = new BigInteger(crypto2.randomBytes(n.bitLength()));
29262
29262
  priv = r.mod(n1).add(BigInteger.ONE);
29263
29263
  this.P = c.getG().multiply(priv);
29264
29264
  }
@@ -29287,14 +29287,14 @@ var require_dhe = __commonJS({
29287
29287
  generateED25519
29288
29288
  };
29289
29289
  var assert2 = require_assert();
29290
- var crypto = require("crypto");
29290
+ var crypto2 = require("crypto");
29291
29291
  var Buffer2 = require_safer().Buffer;
29292
29292
  var algs = require_algs();
29293
29293
  var utils = require_utils();
29294
29294
  var nacl = require_nacl_fast();
29295
29295
  var Key = require_key();
29296
29296
  var PrivateKey = require_private_key();
29297
- var CRYPTO_HAVE_ECDH = crypto.createECDH !== void 0;
29297
+ var CRYPTO_HAVE_ECDH = crypto2.createECDH !== void 0;
29298
29298
  var ecdh = require_ecc_jsbn();
29299
29299
  var ec = require_ec();
29300
29300
  var jsbn = require_jsbn().BigInteger;
@@ -29308,7 +29308,7 @@ var require_dhe = __commonJS({
29308
29308
  if (!CRYPTO_HAVE_ECDH) {
29309
29309
  throw new Error("Due to bugs in the node 0.10 crypto API, node 0.12.x or later is required to use DH");
29310
29310
  }
29311
- this._dh = crypto.createDiffieHellman(
29311
+ this._dh = crypto2.createDiffieHellman(
29312
29312
  key.part.p.data,
29313
29313
  void 0,
29314
29314
  key.part.g.data,
@@ -29335,7 +29335,7 @@ var require_dhe = __commonJS({
29335
29335
  "nistp384": "secp384r1",
29336
29336
  "nistp521": "secp521r1"
29337
29337
  }[key.curve];
29338
- this._dh = crypto.createECDH(curve);
29338
+ this._dh = crypto2.createECDH(curve);
29339
29339
  if (typeof this._dh !== "object" || typeof this._dh.setPrivateKey !== "function") {
29340
29340
  CRYPTO_HAVE_ECDH = false;
29341
29341
  DiffieHellman.call(this, key);
@@ -29487,7 +29487,7 @@ var require_dhe = __commonJS({
29487
29487
  return this._key;
29488
29488
  } else {
29489
29489
  var n = this._ecParams.getN();
29490
- var r = new jsbn(crypto.randomBytes(n.bitLength()));
29490
+ var r = new jsbn(crypto2.randomBytes(n.bitLength()));
29491
29491
  var n1 = n.subtract(jsbn.ONE);
29492
29492
  priv = r.mod(n1).add(jsbn.ONE);
29493
29493
  pub = this._ecParams.getG().multiply(priv);
@@ -29593,7 +29593,7 @@ var require_dhe = __commonJS({
29593
29593
  "nistp384": "secp384r1",
29594
29594
  "nistp521": "secp521r1"
29595
29595
  }[curve];
29596
- var dh = crypto.createECDH(osCurve);
29596
+ var dh = crypto2.createECDH(osCurve);
29597
29597
  dh.generateKeys();
29598
29598
  parts.push({
29599
29599
  name: "curve",
@@ -29611,7 +29611,7 @@ var require_dhe = __commonJS({
29611
29611
  var ecParams = new X9ECParameters(curve);
29612
29612
  var n = ecParams.getN();
29613
29613
  var cByteLen = Math.ceil((n.bitLength() + 64) / 8);
29614
- var c = new jsbn(crypto.randomBytes(cByteLen));
29614
+ var c = new jsbn(crypto2.randomBytes(cByteLen));
29615
29615
  var n1 = n.subtract(jsbn.ONE);
29616
29616
  var priv = c.mod(n1).add(jsbn.ONE);
29617
29617
  var pub = ecParams.getG().multiply(priv);
@@ -31943,7 +31943,7 @@ var require_ssh_private = __commonJS({
31943
31943
  var Buffer2 = require_safer().Buffer;
31944
31944
  var algs = require_algs();
31945
31945
  var utils = require_utils();
31946
- var crypto = require("crypto");
31946
+ var crypto2 = require("crypto");
31947
31947
  var Key = require_key();
31948
31948
  var PrivateKey = require_private_key();
31949
31949
  var pem = require_pem();
@@ -32017,7 +32017,7 @@ var require_ssh_private = __commonJS({
32017
32017
  out = Buffer2.from(out);
32018
32018
  var ckey = out.slice(0, cinf.keySize);
32019
32019
  var iv = out.slice(cinf.keySize, cinf.keySize + cinf.blockSize);
32020
- var cipherStream = crypto.createDecipheriv(
32020
+ var cipherStream = crypto2.createDecipheriv(
32021
32021
  cinf.opensslName,
32022
32022
  ckey,
32023
32023
  iv
@@ -32082,7 +32082,7 @@ var require_ssh_private = __commonJS({
32082
32082
  var privBuf;
32083
32083
  if (PrivateKey.isPrivateKey(key)) {
32084
32084
  privBuf = new SSHBuffer({});
32085
- var checkInt = crypto.randomBytes(4).readUInt32BE(0);
32085
+ var checkInt = crypto2.randomBytes(4).readUInt32BE(0);
32086
32086
  privBuf.writeInt(checkInt);
32087
32087
  privBuf.writeInt(checkInt);
32088
32088
  privBuf.write(key.toBuffer("rfc4253"));
@@ -32096,7 +32096,7 @@ var require_ssh_private = __commonJS({
32096
32096
  case "none":
32097
32097
  break;
32098
32098
  case "bcrypt":
32099
- var salt = crypto.randomBytes(16);
32099
+ var salt = crypto2.randomBytes(16);
32100
32100
  var rounds = 16;
32101
32101
  var kdfssh = new SSHBuffer({});
32102
32102
  kdfssh.writeBuffer(salt);
@@ -32123,7 +32123,7 @@ var require_ssh_private = __commonJS({
32123
32123
  out = Buffer2.from(out);
32124
32124
  var ckey = out.slice(0, cinf.keySize);
32125
32125
  var iv = out.slice(cinf.keySize, cinf.keySize + cinf.blockSize);
32126
- var cipherStream = crypto.createCipheriv(
32126
+ var cipherStream = crypto2.createCipheriv(
32127
32127
  cinf.opensslName,
32128
32128
  ckey,
32129
32129
  iv
@@ -32185,7 +32185,7 @@ var require_pem = __commonJS({
32185
32185
  };
32186
32186
  var assert2 = require_assert();
32187
32187
  var asn1 = require_lib();
32188
- var crypto = require("crypto");
32188
+ var crypto2 = require("crypto");
32189
32189
  var Buffer2 = require_safer().Buffer;
32190
32190
  var algs = require_algs();
32191
32191
  var utils = require_utils();
@@ -32353,7 +32353,7 @@ var require_pem = __commonJS({
32353
32353
  alg = void 0;
32354
32354
  }
32355
32355
  if (cipher && key && iv) {
32356
- var cipherStream = crypto.createDecipheriv(cipher, key, iv);
32356
+ var cipherStream = crypto2.createDecipheriv(cipher, key, iv);
32357
32357
  var chunk, chunks = [];
32358
32358
  cipherStream.once("error", function(e) {
32359
32359
  if (e.toString().indexOf("bad decrypt") !== -1) {
@@ -32778,7 +32778,7 @@ var require_putty = __commonJS({
32778
32778
  var rfc4253 = require_rfc4253();
32779
32779
  var Key = require_key();
32780
32780
  var SSHBuffer = require_ssh_buffer();
32781
- var crypto = require("crypto");
32781
+ var crypto2 = require("crypto");
32782
32782
  var PrivateKey = require_private_key();
32783
32783
  var errors = require_errors();
32784
32784
  function read(buf, options) {
@@ -32848,7 +32848,7 @@ var require_putty = __commonJS({
32848
32848
  );
32849
32849
  }
32850
32850
  var iv = Buffer2.alloc(16, 0);
32851
- var decipher = crypto.createDecipheriv(
32851
+ var decipher = crypto2.createDecipheriv(
32852
32852
  "aes-256-cbc",
32853
32853
  derivePPK2EncryptionKey(options.passphrase),
32854
32854
  iv
@@ -32899,11 +32899,11 @@ var require_putty = __commonJS({
32899
32899
  return key;
32900
32900
  }
32901
32901
  function derivePPK2EncryptionKey(passphrase) {
32902
- var hash1 = crypto.createHash("sha1").update(Buffer2.concat([
32902
+ var hash1 = crypto2.createHash("sha1").update(Buffer2.concat([
32903
32903
  Buffer2.from([0, 0, 0, 0]),
32904
32904
  Buffer2.from(passphrase)
32905
32905
  ])).digest();
32906
- var hash2 = crypto.createHash("sha1").update(Buffer2.concat([
32906
+ var hash2 = crypto2.createHash("sha1").update(Buffer2.concat([
32907
32907
  Buffer2.from([0, 0, 0, 1]),
32908
32908
  Buffer2.from(passphrase)
32909
32909
  ])).digest();
@@ -33057,7 +33057,7 @@ var require_private_key = __commonJS({
33057
33057
  var assert2 = require_assert();
33058
33058
  var Buffer2 = require_safer().Buffer;
33059
33059
  var algs = require_algs();
33060
- var crypto = require("crypto");
33060
+ var crypto2 = require("crypto");
33061
33061
  var Fingerprint = require_fingerprint();
33062
33062
  var Signature = require_signature();
33063
33063
  var errs = require_errors();
@@ -33168,14 +33168,14 @@ var require_private_key = __commonJS({
33168
33168
  var v, nm, err;
33169
33169
  try {
33170
33170
  nm = hashAlgo.toUpperCase();
33171
- v = crypto.createSign(nm);
33171
+ v = crypto2.createSign(nm);
33172
33172
  } catch (e) {
33173
33173
  err = e;
33174
33174
  }
33175
33175
  if (v === void 0 || err instanceof Error && err.message.match(/Unknown message digest/)) {
33176
33176
  nm = "RSA-";
33177
33177
  nm += hashAlgo.toUpperCase();
33178
- v = crypto.createSign(nm);
33178
+ v = crypto2.createSign(nm);
33179
33179
  }
33180
33180
  assert2.ok(v, "failed to create verifier");
33181
33181
  var oldSign = v.sign.bind(v);
@@ -33260,7 +33260,7 @@ var require_identity = __commonJS({
33260
33260
  module2.exports = Identity;
33261
33261
  var assert2 = require_assert();
33262
33262
  var algs = require_algs();
33263
- var crypto = require("crypto");
33263
+ var crypto2 = require("crypto");
33264
33264
  var Fingerprint = require_fingerprint();
33265
33265
  var Signature = require_signature();
33266
33266
  var errs = require_errors();
@@ -33577,7 +33577,7 @@ var require_openssh_cert = __commonJS({
33577
33577
  };
33578
33578
  var assert2 = require_assert();
33579
33579
  var SSHBuffer = require_ssh_buffer();
33580
- var crypto = require("crypto");
33580
+ var crypto2 = require("crypto");
33581
33581
  var Buffer2 = require_safer().Buffer;
33582
33582
  var algs = require_algs();
33583
33583
  var Key = require_key();
@@ -33766,7 +33766,7 @@ var require_openssh_cert = __commonJS({
33766
33766
  assert2.object(cert.signatures.openssh, "signature for openssh format");
33767
33767
  var sig = cert.signatures.openssh;
33768
33768
  if (sig.nonce === void 0)
33769
- sig.nonce = crypto.randomBytes(16);
33769
+ sig.nonce = crypto2.randomBytes(16);
33770
33770
  var buf = new SSHBuffer({});
33771
33771
  buf.writeString(getCertType(cert.subjectKey));
33772
33772
  buf.writeBuffer(sig.nonce);
@@ -34569,7 +34569,7 @@ var require_certificate = __commonJS({
34569
34569
  var assert2 = require_assert();
34570
34570
  var Buffer2 = require_safer().Buffer;
34571
34571
  var algs = require_algs();
34572
- var crypto = require("crypto");
34572
+ var crypto2 = require("crypto");
34573
34573
  var Fingerprint = require_fingerprint();
34574
34574
  var Signature = require_signature();
34575
34575
  var errs = require_errors();
@@ -34656,7 +34656,7 @@ var require_certificate = __commonJS({
34656
34656
  throw new InvalidAlgorithmError(algo);
34657
34657
  if (this._hashCache[algo])
34658
34658
  return this._hashCache[algo];
34659
- var hash = crypto.createHash(algo).update(this.toBuffer("x509")).digest();
34659
+ var hash = crypto2.createHash(algo).update(this.toBuffer("x509")).digest();
34660
34660
  this._hashCache[algo] = hash;
34661
34661
  return hash;
34662
34662
  };
@@ -34926,7 +34926,7 @@ var require_fingerprint = __commonJS({
34926
34926
  var assert2 = require_assert();
34927
34927
  var Buffer2 = require_safer().Buffer;
34928
34928
  var algs = require_algs();
34929
- var crypto = require("crypto");
34929
+ var crypto2 = require("crypto");
34930
34930
  var errs = require_errors();
34931
34931
  var Key = require_key();
34932
34932
  var PrivateKey = require_private_key();
@@ -34993,9 +34993,9 @@ var require_fingerprint = __commonJS({
34993
34993
  );
34994
34994
  }
34995
34995
  var theirHash = other.hash(this.algorithm, this.hashType);
34996
- var theirHash2 = crypto.createHash(this.algorithm).update(theirHash).digest("base64");
34996
+ var theirHash2 = crypto2.createHash(this.algorithm).update(theirHash).digest("base64");
34997
34997
  if (this.hash2 === void 0)
34998
- this.hash2 = crypto.createHash(this.algorithm).update(this.hash).digest("base64");
34998
+ this.hash2 = crypto2.createHash(this.algorithm).update(this.hash).digest("base64");
34999
34999
  return this.hash2 === theirHash2;
35000
35000
  };
35001
35001
  var base64RE = /^[A-Za-z0-9+\/=]+$/;
@@ -35120,7 +35120,7 @@ var require_key = __commonJS({
35120
35120
  module2.exports = Key;
35121
35121
  var assert2 = require_assert();
35122
35122
  var algs = require_algs();
35123
- var crypto = require("crypto");
35123
+ var crypto2 = require("crypto");
35124
35124
  var Fingerprint = require_fingerprint();
35125
35125
  var Signature = require_signature();
35126
35126
  var DiffieHellman = require_dhe().DiffieHellman;
@@ -35218,7 +35218,7 @@ var require_key = __commonJS({
35218
35218
  } else {
35219
35219
  throw new Error("Hash type " + type + " not supported");
35220
35220
  }
35221
- var hash = crypto.createHash(algo).update(buf).digest();
35221
+ var hash = crypto2.createHash(algo).update(buf).digest();
35222
35222
  this._hashCache[cacheKey] = hash;
35223
35223
  return hash;
35224
35224
  };
@@ -35266,14 +35266,14 @@ var require_key = __commonJS({
35266
35266
  var v, nm, err;
35267
35267
  try {
35268
35268
  nm = hashAlgo.toUpperCase();
35269
- v = crypto.createVerify(nm);
35269
+ v = crypto2.createVerify(nm);
35270
35270
  } catch (e) {
35271
35271
  err = e;
35272
35272
  }
35273
35273
  if (v === void 0 || err instanceof Error && err.message.match(/Unknown message digest/)) {
35274
35274
  nm = "RSA-";
35275
35275
  nm += hashAlgo.toUpperCase();
35276
- v = crypto.createVerify(nm);
35276
+ v = crypto2.createVerify(nm);
35277
35277
  }
35278
35278
  assert2.ok(v, "failed to create verifier");
35279
35279
  var oldVerify = v.verify.bind(v);
@@ -37068,7 +37068,7 @@ var require_jsprim = __commonJS({
37068
37068
  var require_signer = __commonJS({
37069
37069
  "../../node_modules/http-signature/lib/signer.js"(exports, module2) {
37070
37070
  var assert2 = require_assert();
37071
- var crypto = require("crypto");
37071
+ var crypto2 = require("crypto");
37072
37072
  var http2 = require("http");
37073
37073
  var util = require("util");
37074
37074
  var sshpk = require_lib2();
@@ -37105,7 +37105,7 @@ var require_signer = __commonJS({
37105
37105
  this.rs_keyId = options.keyId;
37106
37106
  if (typeof options.key !== "string" && !Buffer.isBuffer(options.key))
37107
37107
  throw new TypeError("options.key for HMAC must be a string or Buffer");
37108
- this.rs_signer = crypto.createHmac(alg[1].toUpperCase(), options.key);
37108
+ this.rs_signer = crypto2.createHmac(alg[1].toUpperCase(), options.key);
37109
37109
  this.rs_signer.sign = function() {
37110
37110
  var digest = this.digest("base64");
37111
37111
  return {
@@ -37323,7 +37323,7 @@ var require_signer = __commonJS({
37323
37323
  if (alg[0] === "hmac") {
37324
37324
  if (typeof options.key !== "string" && !Buffer.isBuffer(options.key))
37325
37325
  throw new TypeError("options.key must be a string or Buffer");
37326
- var hmac = crypto.createHmac(alg[1].toUpperCase(), options.key);
37326
+ var hmac = crypto2.createHmac(alg[1].toUpperCase(), options.key);
37327
37327
  hmac.update(stringToSign);
37328
37328
  signature = hmac.digest("base64");
37329
37329
  } else {
@@ -37368,7 +37368,7 @@ var require_signer = __commonJS({
37368
37368
  var require_verify = __commonJS({
37369
37369
  "../../node_modules/http-signature/lib/verify.js"(exports, module2) {
37370
37370
  var assert2 = require_assert();
37371
- var crypto = require("crypto");
37371
+ var crypto2 = require("crypto");
37372
37372
  var sshpk = require_lib2();
37373
37373
  var utils = require_utils2();
37374
37374
  var HASH_ALGOS = utils.HASH_ALGOS;
@@ -37416,12 +37416,12 @@ var require_verify = __commonJS({
37416
37416
  if (alg[0] !== "hmac")
37417
37417
  return false;
37418
37418
  var hashAlg = alg[1].toUpperCase();
37419
- var hmac = crypto.createHmac(hashAlg, secret);
37419
+ var hmac = crypto2.createHmac(hashAlg, secret);
37420
37420
  hmac.update(parsedSignature.signingString);
37421
- var h1 = crypto.createHmac(hashAlg, secret);
37421
+ var h1 = crypto2.createHmac(hashAlg, secret);
37422
37422
  h1.update(hmac.digest());
37423
37423
  h1 = h1.digest();
37424
- var h2 = crypto.createHmac(hashAlg, secret);
37424
+ var h2 = crypto2.createHmac(hashAlg, secret);
37425
37425
  h2.update(new Buffer(parsedSignature.params.signature, "base64"));
37426
37426
  h2 = h2.digest();
37427
37427
  if (typeof h1 === "string")
@@ -55216,9 +55216,9 @@ var require_har2 = __commonJS({
55216
55216
  // ../../node_modules/uuid/lib/rng.js
55217
55217
  var require_rng = __commonJS({
55218
55218
  "../../node_modules/uuid/lib/rng.js"(exports, module2) {
55219
- var crypto = require("crypto");
55219
+ var crypto2 = require("crypto");
55220
55220
  module2.exports = function nodeRNG() {
55221
- return crypto.randomBytes(16);
55221
+ return crypto2.randomBytes(16);
55222
55222
  };
55223
55223
  }
55224
55224
  });
@@ -55424,12 +55424,12 @@ var require_auth = __commonJS({
55424
55424
  // ../../node_modules/oauth-sign/index.js
55425
55425
  var require_oauth_sign = __commonJS({
55426
55426
  "../../node_modules/oauth-sign/index.js"(exports) {
55427
- var crypto = require("crypto");
55427
+ var crypto2 = require("crypto");
55428
55428
  function sha(key, body, algorithm) {
55429
- return crypto.createHmac(algorithm, key).update(body).digest("base64");
55429
+ return crypto2.createHmac(algorithm, key).update(body).digest("base64");
55430
55430
  }
55431
55431
  function rsa(key, body) {
55432
- return crypto.createSign("RSA-SHA1").update(body).sign(key, "base64");
55432
+ return crypto2.createSign("RSA-SHA1").update(body).sign(key, "base64");
55433
55433
  }
55434
55434
  function rfc3986(str) {
55435
55435
  return encodeURIComponent(str).replace(/!/g, "%21").replace(/\*/g, "%2A").replace(/\(/g, "%28").replace(/\)/g, "%29").replace(/'/g, "%27");
@@ -55536,7 +55536,7 @@ var require_oauth = __commonJS({
55536
55536
  var caseless = require_caseless();
55537
55537
  var uuid = require_v4();
55538
55538
  var oauth = require_oauth_sign();
55539
- var crypto = require("crypto");
55539
+ var crypto2 = require("crypto");
55540
55540
  var Buffer2 = require_safe_buffer().Buffer;
55541
55541
  function OAuth(request) {
55542
55542
  this.request = request;
@@ -55588,7 +55588,7 @@ var require_oauth = __commonJS({
55588
55588
  if (["HMAC-SHA1", "RSA-SHA1"].indexOf(_oauth.signature_method || "HMAC-SHA1") < 0) {
55589
55589
  this.request.emit("error", new Error("oauth: " + _oauth.signature_method + " signature_method not supported with body_hash signing."));
55590
55590
  }
55591
- var shasum = crypto.createHash("sha1");
55591
+ var shasum = crypto2.createHash("sha1");
55592
55592
  shasum.update(body || "");
55593
55593
  var sha1 = shasum.digest("hex");
55594
55594
  return Buffer2.from(sha1, "hex").toString("base64");
@@ -55657,15 +55657,15 @@ var require_oauth = __commonJS({
55657
55657
  var require_hawk = __commonJS({
55658
55658
  "../../node_modules/request/lib/hawk.js"(exports) {
55659
55659
  "use strict";
55660
- var crypto = require("crypto");
55660
+ var crypto2 = require("crypto");
55661
55661
  function randomString(size) {
55662
55662
  var bits = (size + 1) * 6;
55663
- var buffer = crypto.randomBytes(Math.ceil(bits / 8));
55663
+ var buffer = crypto2.randomBytes(Math.ceil(bits / 8));
55664
55664
  var string = buffer.toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, "");
55665
55665
  return string.slice(0, size);
55666
55666
  }
55667
55667
  function calculatePayloadHash(payload, algorithm, contentType) {
55668
- var hash = crypto.createHash(algorithm);
55668
+ var hash = crypto2.createHash(algorithm);
55669
55669
  hash.update("hawk.1.payload\n");
55670
55670
  hash.update((contentType ? contentType.split(";")[0].trim().toLowerCase() : "") + "\n");
55671
55671
  hash.update(payload || "");
@@ -55681,7 +55681,7 @@ var require_hawk = __commonJS({
55681
55681
  if (opts.app) {
55682
55682
  normalized = normalized + opts.app + "\n" + (opts.dlg || "") + "\n";
55683
55683
  }
55684
- var hmac = crypto.createHmac(credentials.algorithm, credentials.key).update(normalized);
55684
+ var hmac = crypto2.createHmac(credentials.algorithm, credentials.key).update(normalized);
55685
55685
  var digest = hmac.digest("base64");
55686
55686
  return digest;
55687
55687
  };
@@ -63711,6 +63711,30 @@ function getPixelsAsync(buffer, mimeType) {
63711
63711
  );
63712
63712
  }
63713
63713
 
63714
+ // ../loader-utils/src/lib/binary-utils/array-buffer-utils.ts
63715
+ function concatenateArrayBuffers(...sources) {
63716
+ const sourceArrays = sources.map(
63717
+ (source2) => source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2
63718
+ );
63719
+ const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
63720
+ const result = new Uint8Array(byteLength);
63721
+ let offset = 0;
63722
+ for (const sourceArray of sourceArrays) {
63723
+ result.set(sourceArray, offset);
63724
+ offset += sourceArray.byteLength;
63725
+ }
63726
+ return result.buffer;
63727
+ }
63728
+
63729
+ // ../loader-utils/src/lib/iterators/async-iteration.ts
63730
+ async function concatenateArrayBuffersAsync(asyncIterator) {
63731
+ const arrayBuffers = [];
63732
+ for await (const chunk of asyncIterator) {
63733
+ arrayBuffers.push(chunk);
63734
+ }
63735
+ return concatenateArrayBuffers(...arrayBuffers);
63736
+ }
63737
+
63714
63738
  // ../loader-utils/src/lib/path-utils/file-aliases.ts
63715
63739
  var pathPrefix = "";
63716
63740
  var fileAliases = {};
@@ -63842,12 +63866,12 @@ async function concatenateReadStream(readStream) {
63842
63866
  arrayBufferChunks.push(toArrayBuffer(chunk));
63843
63867
  });
63844
63868
  readStream.on("end", () => {
63845
- const arrayBuffer = concatenateArrayBuffers(arrayBufferChunks);
63869
+ const arrayBuffer = concatenateArrayBuffers2(arrayBufferChunks);
63846
63870
  resolve(arrayBuffer);
63847
63871
  });
63848
63872
  });
63849
63873
  }
63850
- function concatenateArrayBuffers(sources) {
63874
+ function concatenateArrayBuffers2(sources) {
63851
63875
  const sourceArrays = sources.map(
63852
63876
  (source2) => source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2
63853
63877
  );
@@ -63975,6 +63999,77 @@ var NodeFileSystem = class {
63975
63999
  }
63976
64000
  };
63977
64001
 
64002
+ // ../crypto/src/lib/hash.ts
64003
+ var Hash = class {
64004
+ constructor(options = {}) {
64005
+ this.hashBatches = this.hashBatches.bind(this);
64006
+ }
64007
+ async preload() {
64008
+ return;
64009
+ }
64010
+ async *hashBatches(asyncIterator, encoding = "base64") {
64011
+ var _a, _b;
64012
+ const arrayBuffers = [];
64013
+ for await (const arrayBuffer of asyncIterator) {
64014
+ arrayBuffers.push(arrayBuffer);
64015
+ yield arrayBuffer;
64016
+ }
64017
+ const output = await this.concatenate(arrayBuffers);
64018
+ const hash = await this.hash(output, encoding);
64019
+ (_b = (_a = this.options.crypto) == null ? void 0 : _a.onEnd) == null ? void 0 : _b.call(_a, { hash });
64020
+ }
64021
+ // HELPERS
64022
+ async concatenate(asyncIterator) {
64023
+ return await concatenateArrayBuffersAsync(asyncIterator);
64024
+ }
64025
+ };
64026
+
64027
+ // src/crypto/node-hash.ts
64028
+ var crypto = __toESM(require("crypto"), 1);
64029
+ var NodeHash = class extends Hash {
64030
+ constructor(options) {
64031
+ var _a, _b;
64032
+ super();
64033
+ this.name = "crypto-node";
64034
+ this.options = options;
64035
+ if (!((_b = (_a = this.options) == null ? void 0 : _a.crypto) == null ? void 0 : _b.algorithm)) {
64036
+ throw new Error(this.name);
64037
+ }
64038
+ }
64039
+ /**
64040
+ * Atomic hash calculation
64041
+ * @returns base64 encoded hash
64042
+ */
64043
+ async hash(input, encoding) {
64044
+ var _a, _b, _c, _d;
64045
+ const algorithm = (_c = (_b = (_a = this.options) == null ? void 0 : _a.crypto) == null ? void 0 : _b.algorithm) == null ? void 0 : _c.toLowerCase();
64046
+ try {
64047
+ if (!crypto.createHash) {
64048
+ throw new Error("crypto.createHash not available");
64049
+ }
64050
+ const hash = (_d = crypto.createHash) == null ? void 0 : _d(algorithm);
64051
+ const inputArray = new Uint8Array(input);
64052
+ return hash.update(inputArray).digest("base64");
64053
+ } catch (error) {
64054
+ throw Error(`${algorithm} hash not available. ${error}`);
64055
+ }
64056
+ }
64057
+ async *hashBatches(asyncIterator, encoding = "base64") {
64058
+ var _a, _b, _c, _d, _e, _f, _g;
64059
+ if (!crypto.createHash) {
64060
+ throw new Error("crypto.createHash not available");
64061
+ }
64062
+ const hash = (_d = crypto.createHash) == null ? void 0 : _d((_c = (_b = (_a = this.options) == null ? void 0 : _a.crypto) == null ? void 0 : _b.algorithm) == null ? void 0 : _c.toLowerCase());
64063
+ for await (const chunk of asyncIterator) {
64064
+ const inputArray = new Uint8Array(chunk);
64065
+ hash.update(inputArray);
64066
+ yield chunk;
64067
+ }
64068
+ const digest = hash.digest(encoding);
64069
+ (_g = (_f = (_e = this.options) == null ? void 0 : _e.crypto) == null ? void 0 : _f.onEnd) == null ? void 0 : _g.call(_f, { hash: digest });
64070
+ }
64071
+ };
64072
+
63978
64073
  // src/index.ts
63979
64074
  var import_node_process = require("node:process");
63980
64075
 
@@ -64706,6 +64801,7 @@ globalThis.loaders.makeNodeStream = makeNodeStream;
64706
64801
  globalThis.loaders.NodeFile = NodeFile;
64707
64802
  globalThis.loaders.NodeFileSystem = NodeFileSystem;
64708
64803
  globalThis.loaders.fetchNode = fetchNode;
64804
+ globalThis.loaders.NodeHash = NodeHash;
64709
64805
  if (!globalThis.TextEncoder) {
64710
64806
  globalThis.TextEncoder = TextEncoder2;
64711
64807
  }
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAkBA,eAAO,MAAM,WAAW,QAAwC,CAAC;AAMjE,OAAO,EAAC,KAAK,IAAI,IAAI,EAAC,MAAM,+BAA+B,CAAC;AAC5D,OAAO,EAAC,KAAK,IAAI,IAAI,EAAC,MAAM,+BAA+B,CAAC;AAY5D,OAAO,EAAC,cAAc,EAAC,MAAM,4BAA4B,CAAC;AA2D1D,OAAO,EAAC,oBAAoB,EAAC,MAAM,+BAA+B,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAoBA,eAAO,MAAM,WAAW,QAAwC,CAAC;AAMjE,OAAO,EAAC,KAAK,IAAI,IAAI,EAAC,MAAM,+BAA+B,CAAC;AAC5D,OAAO,EAAC,KAAK,IAAI,IAAI,EAAC,MAAM,+BAA+B,CAAC;AAY5D,OAAO,EAAC,cAAc,EAAC,MAAM,4BAA4B,CAAC;AA8D1D,OAAO,EAAC,oBAAoB,EAAC,MAAM,+BAA+B,CAAC"}
package/dist/index.js CHANGED
@@ -6,6 +6,7 @@ import { parseImageNode, NODE_FORMAT_SUPPORT } from "./images/parse-image-node.j
6
6
  import { NodeFile } from "./filesystems/node-file.js";
7
7
  import { NodeFileSystem } from "./filesystems/node-filesystem.js";
8
8
  import { fetchNode } from "./filesystems/fetch-node.js";
9
+ import { NodeHash } from "./crypto/node-hash.js";
9
10
  import { versions } from 'node:process';
10
11
  export const nodeVersion = parseInt(versions.node.split('.')[0]);
11
12
  import { makeNodeStream } from "./streams/make-node-stream.js";
@@ -20,6 +21,7 @@ globalThis.loaders.makeNodeStream = makeNodeStream;
20
21
  globalThis.loaders.NodeFile = NodeFile;
21
22
  globalThis.loaders.NodeFileSystem = NodeFileSystem;
22
23
  globalThis.loaders.fetchNode = fetchNode;
24
+ globalThis.loaders.NodeHash = NodeHash;
23
25
  if (!globalThis.TextEncoder) {
24
26
  globalThis.TextEncoder = TextEncoder;
25
27
  }
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["isBrowser","TextDecoder","TextEncoder","atob","btoa","encodeImageNode","parseImageNode","NODE_FORMAT_SUPPORT","NodeFile","NodeFileSystem","fetchNode","versions","nodeVersion","parseInt","node","split","makeNodeStream","Blob_","Blob","File_","File","console","error","globalThis","loaders","imageFormatsNode","_parseImageNode","_imageFormatsNode","readFileAsArrayBuffer","readFileAsText","requireFromFile","requireFromString","installFilePolyfills","Headers","HeadersNode","Response","ResponseNode","fetchNodePolyfill","fetch"],"sources":["../src/index.ts"],"sourcesContent":["/* eslint-disable dot-notation */\nimport {isBrowser} from './utils/is-browser';\n\nimport {TextDecoder, TextEncoder} from './text-encoder/text-encoder';\n\n// Node specific\nimport {atob, btoa} from './buffer/btoa.node';\n\nimport {encodeImageNode} from './images/encode-image-node';\nimport {parseImageNode, NODE_FORMAT_SUPPORT} from './images/parse-image-node';\n\n// FILESYSTEM POLYFILLS\nimport {NodeFile} from './filesystems/node-file';\nimport {NodeFileSystem} from './filesystems/node-filesystem';\nimport {fetchNode} from './filesystems/fetch-node';\n\n// NODE VERSION\nimport {versions} from 'node:process';\nexport const nodeVersion = parseInt(versions.node.split('.')[0]);\n\n// STREAM POLYFILLS\nimport {makeNodeStream} from './streams/make-node-stream';\n\n// BLOB AND FILE POLYFILLS\nexport {Blob_ as Blob} from './file/install-blob-polyfills';\nexport {File_ as File} from './file/install-file-polyfills';\n\nif (isBrowser) {\n // eslint-disable-next-line no-console\n console.error(\n 'loaders.gl: The @loaders.gl/polyfills should only be used in Node.js environments'\n );\n}\n\nglobalThis.loaders = globalThis.loaders || {};\n\n// STREAM POLYFILLS\nexport {makeNodeStream} from './streams/make-node-stream';\nglobalThis.loaders.makeNodeStream = makeNodeStream;\n\n// FILESYSTEM POLYFILLS\nglobalThis.loaders.NodeFile = NodeFile;\nglobalThis.loaders.NodeFileSystem = NodeFileSystem;\nglobalThis.loaders.fetchNode = fetchNode;\n\n// POLYFILLS: TextEncoder, TextDecoder\n// - Recent Node versions have these classes but virtually no encodings unless special build.\n// - Browser: Edge, IE11 do not have these\n\nif (!globalThis.TextEncoder) {\n // @ts-expect-error\n globalThis.TextEncoder = TextEncoder;\n}\n\nif (!globalThis.TextDecoder) {\n // @ts-expect-error\n globalThis.TextDecoder = TextDecoder;\n}\n\n// POLYFILLS: btoa, atob\n// - Node: Yes\n// - Browser: No\n\nif (!('atob' in globalThis) && atob) {\n globalThis['atob'] = atob;\n}\nif (!('btoa' in globalThis) && btoa) {\n globalThis['btoa'] = btoa;\n}\n\n// NODE IMAGE FUNCTIONS:\n// These are not official polyfills but used by the @loaders.gl/images module if installed\n// TODO - is there an appropriate Image API we could polyfill using an adapter?\n\nglobalThis.loaders.encodeImageNode = encodeImageNode;\nglobalThis.loaders.parseImageNode = parseImageNode;\nglobalThis.loaders.imageFormatsNode = NODE_FORMAT_SUPPORT;\n\n// Deprecated, remove after republish\nglobalThis._parseImageNode = parseImageNode;\nglobalThis._imageFormatsNode = NODE_FORMAT_SUPPORT;\n\n// LOAD LIBRARY\n\nimport {\n readFileAsArrayBuffer,\n readFileAsText,\n requireFromFile,\n requireFromString\n} from './load-library/require-utils.node';\n\nglobalThis.loaders.readFileAsArrayBuffer = readFileAsArrayBuffer;\nglobalThis.loaders.readFileAsText = readFileAsText;\nglobalThis.loaders.requireFromFile = requireFromFile;\nglobalThis.loaders.requireFromString = requireFromString;\n\nexport {installFilePolyfills} from './file/install-file-polyfills';\n\n// DEPRECATED POLYFILL:\n// - Node v18+: No, not needed\n// - Node v16 and lower: Yes\n// - Browsers (evergreen): Not needed.\n// - IE11: No. This polyfill is node only, install external polyfill\nimport {Headers as HeadersNode} from './fetch/headers-polyfill';\nimport {Response as ResponseNode} from './fetch/response-polyfill';\nimport {fetchNode as fetchNodePolyfill} from './fetch/fetch-polyfill';\n\nif (nodeVersion < 18) {\n if (!('Headers' in globalThis) && HeadersNode) {\n // @ts-ignore\n globalThis.Headers = HeadersNode;\n }\n\n if (!('Response' in globalThis) && ResponseNode) {\n // @ts-ignore\n globalThis.Response = ResponseNode;\n }\n\n if (!('fetch' in globalThis) && fetchNodePolyfill) {\n // @ts-ignore\n globalThis.fetch = fetchNodePolyfill;\n }\n}\n"],"mappings":"SACQA,SAAS;AAAA,SAETC,WAAW,EAAEC,WAAW;AAAA,SAGxBC,IAAI,EAAEC,IAAI;AAAA,SAEVC,eAAe;AAAA,SACfC,cAAc,EAAEC,mBAAmB;AAAA,SAGnCC,QAAQ;AAAA,SACRC,cAAc;AAAA,SACdC,SAAS;AAGjB,SAAQC,QAAQ,QAAO,cAAc;AACrC,OAAO,MAAMC,WAAW,GAAGC,QAAQ,CAACF,QAAQ,CAACG,IAAI,CAACC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAAC,SAGzDC,cAAc;AAAA,SAGdC,KAAK,IAAIC,IAAI;AAAA,SACbC,KAAK,IAAIC,IAAI;AAErB,IAAIpB,SAAS,EAAE;EAEbqB,OAAO,CAACC,KAAK,CACX,mFACF,CAAC;AACH;AAEAC,UAAU,CAACC,OAAO,GAAGD,UAAU,CAACC,OAAO,IAAI,CAAC,CAAC;AAAC,SAGtCR,cAAc;AACtBO,UAAU,CAACC,OAAO,CAACR,cAAc,GAAGA,cAAc;AAGlDO,UAAU,CAACC,OAAO,CAAChB,QAAQ,GAAGA,QAAQ;AACtCe,UAAU,CAACC,OAAO,CAACf,cAAc,GAAGA,cAAc;AAClDc,UAAU,CAACC,OAAO,CAACd,SAAS,GAAGA,SAAS;AAMxC,IAAI,CAACa,UAAU,CAACrB,WAAW,EAAE;EAE3BqB,UAAU,CAACrB,WAAW,GAAGA,WAAW;AACtC;AAEA,IAAI,CAACqB,UAAU,CAACtB,WAAW,EAAE;EAE3BsB,UAAU,CAACtB,WAAW,GAAGA,WAAW;AACtC;AAMA,IAAI,EAAE,MAAM,IAAIsB,UAAU,CAAC,IAAIpB,IAAI,EAAE;EACnCoB,UAAU,CAAC,MAAM,CAAC,GAAGpB,IAAI;AAC3B;AACA,IAAI,EAAE,MAAM,IAAIoB,UAAU,CAAC,IAAInB,IAAI,EAAE;EACnCmB,UAAU,CAAC,MAAM,CAAC,GAAGnB,IAAI;AAC3B;AAMAmB,UAAU,CAACC,OAAO,CAACnB,eAAe,GAAGA,eAAe;AACpDkB,UAAU,CAACC,OAAO,CAAClB,cAAc,GAAGA,cAAc;AAClDiB,UAAU,CAACC,OAAO,CAACC,gBAAgB,GAAGlB,mBAAmB;AAGzDgB,UAAU,CAACG,eAAe,GAAGpB,cAAc;AAC3CiB,UAAU,CAACI,iBAAiB,GAAGpB,mBAAmB;AAAC,SAKjDqB,qBAAqB,EACrBC,cAAc,EACdC,eAAe,EACfC,iBAAiB;AAGnBR,UAAU,CAACC,OAAO,CAACI,qBAAqB,GAAGA,qBAAqB;AAChEL,UAAU,CAACC,OAAO,CAACK,cAAc,GAAGA,cAAc;AAClDN,UAAU,CAACC,OAAO,CAACM,eAAe,GAAGA,eAAe;AACpDP,UAAU,CAACC,OAAO,CAACO,iBAAiB,GAAGA,iBAAiB;AAAC,SAEjDC,oBAAoB;AAAA,SAOpBC,OAAO,IAAIC,WAAW;AAAA,SACtBC,QAAQ,IAAIC,YAAY;AAAA,SACxB1B,SAAS,IAAI2B,iBAAiB;AAEtC,IAAIzB,WAAW,GAAG,EAAE,EAAE;EACpB,IAAI,EAAE,SAAS,IAAIW,UAAU,CAAC,IAAIW,WAAW,EAAE;IAE7CX,UAAU,CAACU,OAAO,GAAGC,WAAW;EAClC;EAEA,IAAI,EAAE,UAAU,IAAIX,UAAU,CAAC,IAAIa,YAAY,EAAE;IAE/Cb,UAAU,CAACY,QAAQ,GAAGC,YAAY;EACpC;EAEA,IAAI,EAAE,OAAO,IAAIb,UAAU,CAAC,IAAIc,iBAAiB,EAAE;IAEjDd,UAAU,CAACe,KAAK,GAAGD,iBAAiB;EACtC;AACF"}
1
+ {"version":3,"file":"index.js","names":["isBrowser","TextDecoder","TextEncoder","atob","btoa","encodeImageNode","parseImageNode","NODE_FORMAT_SUPPORT","NodeFile","NodeFileSystem","fetchNode","NodeHash","versions","nodeVersion","parseInt","node","split","makeNodeStream","Blob_","Blob","File_","File","console","error","globalThis","loaders","imageFormatsNode","_parseImageNode","_imageFormatsNode","readFileAsArrayBuffer","readFileAsText","requireFromFile","requireFromString","installFilePolyfills","Headers","HeadersNode","Response","ResponseNode","fetchNodePolyfill","fetch"],"sources":["../src/index.ts"],"sourcesContent":["/* eslint-disable dot-notation */\nimport {isBrowser} from './utils/is-browser';\n\nimport {TextDecoder, TextEncoder} from './text-encoder/text-encoder';\n\n// Node specific\nimport {atob, btoa} from './buffer/btoa.node';\n\nimport {encodeImageNode} from './images/encode-image-node';\nimport {parseImageNode, NODE_FORMAT_SUPPORT} from './images/parse-image-node';\n\n// FILESYSTEM POLYFILLS\nimport {NodeFile} from './filesystems/node-file';\nimport {NodeFileSystem} from './filesystems/node-filesystem';\nimport {fetchNode} from './filesystems/fetch-node';\n\nimport {NodeHash} from './crypto/node-hash';\n\n// NODE VERSION\nimport {versions} from 'node:process';\nexport const nodeVersion = parseInt(versions.node.split('.')[0]);\n\n// STREAM POLYFILLS\nimport {makeNodeStream} from './streams/make-node-stream';\n\n// BLOB AND FILE POLYFILLS\nexport {Blob_ as Blob} from './file/install-blob-polyfills';\nexport {File_ as File} from './file/install-file-polyfills';\n\nif (isBrowser) {\n // eslint-disable-next-line no-console\n console.error(\n 'loaders.gl: The @loaders.gl/polyfills should only be used in Node.js environments'\n );\n}\n\nglobalThis.loaders = globalThis.loaders || {};\n\n// STREAM POLYFILLS\nexport {makeNodeStream} from './streams/make-node-stream';\nglobalThis.loaders.makeNodeStream = makeNodeStream;\n\n// FILESYSTEM POLYFILLS\nglobalThis.loaders.NodeFile = NodeFile;\nglobalThis.loaders.NodeFileSystem = NodeFileSystem;\nglobalThis.loaders.fetchNode = fetchNode;\n\n// CRYPTO POLYFILLS\nglobalThis.loaders.NodeHash = NodeHash;\n\n// POLYFILLS: TextEncoder, TextDecoder\n// - Recent Node versions have these classes but virtually no encodings unless special build.\n// - Browser: Edge, IE11 do not have these\n\nif (!globalThis.TextEncoder) {\n // @ts-expect-error\n globalThis.TextEncoder = TextEncoder;\n}\n\nif (!globalThis.TextDecoder) {\n // @ts-expect-error\n globalThis.TextDecoder = TextDecoder;\n}\n\n// POLYFILLS: btoa, atob\n// - Node: Yes\n// - Browser: No\n\nif (!('atob' in globalThis) && atob) {\n globalThis['atob'] = atob;\n}\nif (!('btoa' in globalThis) && btoa) {\n globalThis['btoa'] = btoa;\n}\n\n// NODE IMAGE FUNCTIONS:\n// These are not official polyfills but used by the @loaders.gl/images module if installed\n// TODO - is there an appropriate Image API we could polyfill using an adapter?\n\nglobalThis.loaders.encodeImageNode = encodeImageNode;\nglobalThis.loaders.parseImageNode = parseImageNode;\nglobalThis.loaders.imageFormatsNode = NODE_FORMAT_SUPPORT;\n\n// Deprecated, remove after republish\nglobalThis._parseImageNode = parseImageNode;\nglobalThis._imageFormatsNode = NODE_FORMAT_SUPPORT;\n\n// LOAD LIBRARY\n\nimport {\n readFileAsArrayBuffer,\n readFileAsText,\n requireFromFile,\n requireFromString\n} from './load-library/require-utils.node';\n\nglobalThis.loaders.readFileAsArrayBuffer = readFileAsArrayBuffer;\nglobalThis.loaders.readFileAsText = readFileAsText;\nglobalThis.loaders.requireFromFile = requireFromFile;\nglobalThis.loaders.requireFromString = requireFromString;\n\nexport {installFilePolyfills} from './file/install-file-polyfills';\n\n// DEPRECATED POLYFILL:\n// - Node v18+: No, not needed\n// - Node v16 and lower: Yes\n// - Browsers (evergreen): Not needed.\n// - IE11: No. This polyfill is node only, install external polyfill\nimport {Headers as HeadersNode} from './fetch/headers-polyfill';\nimport {Response as ResponseNode} from './fetch/response-polyfill';\nimport {fetchNode as fetchNodePolyfill} from './fetch/fetch-polyfill';\n\nif (nodeVersion < 18) {\n if (!('Headers' in globalThis) && HeadersNode) {\n // @ts-ignore\n globalThis.Headers = HeadersNode;\n }\n\n if (!('Response' in globalThis) && ResponseNode) {\n // @ts-ignore\n globalThis.Response = ResponseNode;\n }\n\n if (!('fetch' in globalThis) && fetchNodePolyfill) {\n // @ts-ignore\n globalThis.fetch = fetchNodePolyfill;\n }\n}\n"],"mappings":"SACQA,SAAS;AAAA,SAETC,WAAW,EAAEC,WAAW;AAAA,SAGxBC,IAAI,EAAEC,IAAI;AAAA,SAEVC,eAAe;AAAA,SACfC,cAAc,EAAEC,mBAAmB;AAAA,SAGnCC,QAAQ;AAAA,SACRC,cAAc;AAAA,SACdC,SAAS;AAAA,SAETC,QAAQ;AAGhB,SAAQC,QAAQ,QAAO,cAAc;AACrC,OAAO,MAAMC,WAAW,GAAGC,QAAQ,CAACF,QAAQ,CAACG,IAAI,CAACC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAAC,SAGzDC,cAAc;AAAA,SAGdC,KAAK,IAAIC,IAAI;AAAA,SACbC,KAAK,IAAIC,IAAI;AAErB,IAAIrB,SAAS,EAAE;EAEbsB,OAAO,CAACC,KAAK,CACX,mFACF,CAAC;AACH;AAEAC,UAAU,CAACC,OAAO,GAAGD,UAAU,CAACC,OAAO,IAAI,CAAC,CAAC;AAAC,SAGtCR,cAAc;AACtBO,UAAU,CAACC,OAAO,CAACR,cAAc,GAAGA,cAAc;AAGlDO,UAAU,CAACC,OAAO,CAACjB,QAAQ,GAAGA,QAAQ;AACtCgB,UAAU,CAACC,OAAO,CAAChB,cAAc,GAAGA,cAAc;AAClDe,UAAU,CAACC,OAAO,CAACf,SAAS,GAAGA,SAAS;AAGxCc,UAAU,CAACC,OAAO,CAACd,QAAQ,GAAGA,QAAQ;AAMtC,IAAI,CAACa,UAAU,CAACtB,WAAW,EAAE;EAE3BsB,UAAU,CAACtB,WAAW,GAAGA,WAAW;AACtC;AAEA,IAAI,CAACsB,UAAU,CAACvB,WAAW,EAAE;EAE3BuB,UAAU,CAACvB,WAAW,GAAGA,WAAW;AACtC;AAMA,IAAI,EAAE,MAAM,IAAIuB,UAAU,CAAC,IAAIrB,IAAI,EAAE;EACnCqB,UAAU,CAAC,MAAM,CAAC,GAAGrB,IAAI;AAC3B;AACA,IAAI,EAAE,MAAM,IAAIqB,UAAU,CAAC,IAAIpB,IAAI,EAAE;EACnCoB,UAAU,CAAC,MAAM,CAAC,GAAGpB,IAAI;AAC3B;AAMAoB,UAAU,CAACC,OAAO,CAACpB,eAAe,GAAGA,eAAe;AACpDmB,UAAU,CAACC,OAAO,CAACnB,cAAc,GAAGA,cAAc;AAClDkB,UAAU,CAACC,OAAO,CAACC,gBAAgB,GAAGnB,mBAAmB;AAGzDiB,UAAU,CAACG,eAAe,GAAGrB,cAAc;AAC3CkB,UAAU,CAACI,iBAAiB,GAAGrB,mBAAmB;AAAC,SAKjDsB,qBAAqB,EACrBC,cAAc,EACdC,eAAe,EACfC,iBAAiB;AAGnBR,UAAU,CAACC,OAAO,CAACI,qBAAqB,GAAGA,qBAAqB;AAChEL,UAAU,CAACC,OAAO,CAACK,cAAc,GAAGA,cAAc;AAClDN,UAAU,CAACC,OAAO,CAACM,eAAe,GAAGA,eAAe;AACpDP,UAAU,CAACC,OAAO,CAACO,iBAAiB,GAAGA,iBAAiB;AAAC,SAEjDC,oBAAoB;AAAA,SAOpBC,OAAO,IAAIC,WAAW;AAAA,SACtBC,QAAQ,IAAIC,YAAY;AAAA,SACxB3B,SAAS,IAAI4B,iBAAiB;AAEtC,IAAIzB,WAAW,GAAG,EAAE,EAAE;EACpB,IAAI,EAAE,SAAS,IAAIW,UAAU,CAAC,IAAIW,WAAW,EAAE;IAE7CX,UAAU,CAACU,OAAO,GAAGC,WAAW;EAClC;EAEA,IAAI,EAAE,UAAU,IAAIX,UAAU,CAAC,IAAIa,YAAY,EAAE;IAE/Cb,UAAU,CAACY,QAAQ,GAAGC,YAAY;EACpC;EAEA,IAAI,EAAE,OAAO,IAAIb,UAAU,CAAC,IAAIc,iBAAiB,EAAE;IAEjDd,UAAU,CAACe,KAAK,GAAGD,iBAAiB;EACtC;AACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"is-browser.d.ts","sourceRoot":"","sources":["../../src/utils/is-browser.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,SAAS,EAAE,OAEkE,CAAC"}
1
+ {"version":3,"file":"is-browser.d.ts","sourceRoot":"","sources":["../../src/utils/is-browser.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,SAAS,EAAE,OAEkE,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"is-browser.js","names":["isBrowser","process","String","browser"],"sources":["../../src/utils/is-browser.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n/* eslint-disable no-restricted-globals */\nexport const isBrowser: boolean =\n // @ts-ignore process.browser\n typeof process !== 'object' || String(process) !== '[object process]' || process.browser;\n"],"mappings":"AAGA,OAAO,MAAMA,SAAkB,GAE7B,OAAOC,OAAO,KAAK,QAAQ,IAAIC,MAAM,CAACD,OAAO,CAAC,KAAK,kBAAkB,IAAIA,OAAO,CAACE,OAAO"}
1
+ {"version":3,"file":"is-browser.js","names":["isBrowser","process","String","browser"],"sources":["../../src/utils/is-browser.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\n/* eslint-disable no-restricted-globals */\nexport const isBrowser: boolean =\n // @ts-ignore process.browser\n typeof process !== 'object' || String(process) !== '[object process]' || process.browser;\n"],"mappings":"AAIA,OAAO,MAAMA,SAAkB,GAE7B,OAAOC,OAAO,KAAK,QAAQ,IAAIC,MAAM,CAACD,OAAO,CAAC,KAAK,kBAAkB,IAAIA,OAAO,CAACE,OAAO"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@loaders.gl/polyfills",
3
- "version": "4.0.0-beta.7",
3
+ "version": "4.0.0",
4
4
  "description": "Polyfills for TextEncoder/TextDecoder",
5
5
  "license": "MIT",
6
6
  "type": "module",
@@ -46,8 +46,8 @@
46
46
  },
47
47
  "dependencies": {
48
48
  "@babel/runtime": "^7.3.1",
49
- "@loaders.gl/crypto": "4.0.0-beta.7",
50
- "@loaders.gl/loader-utils": "4.0.0-beta.7",
49
+ "@loaders.gl/crypto": "4.0.0",
50
+ "@loaders.gl/loader-utils": "4.0.0",
51
51
  "buffer": "^6.0.3",
52
52
  "get-pixels": "^3.3.3",
53
53
  "ndarray": "^1.0.19",
@@ -56,5 +56,5 @@
56
56
  "through": "^2.3.8",
57
57
  "web-streams-polyfill": "^3.2.1"
58
58
  },
59
- "gitHead": "5b6cab0ab5d73212cfa37fa5da6e25ad7ef83fe5"
59
+ "gitHead": "9b4211dc0ecd4134a1638ac0a29c5ea9008fd971"
60
60
  }
@@ -1,4 +1,5 @@
1
1
  // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
2
3
 
3
4
  import http from 'http';
4
5
  import https from 'https';
@@ -1,4 +1,5 @@
1
1
  // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
2
3
 
3
4
  import {assert} from '../utils/assert';
4
5
  import {decompressReadStream, concatenateReadStream} from '../filesystems/stream-utils.node';
@@ -1,4 +1,5 @@
1
1
  // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
2
3
 
3
4
  import fs from 'fs';
4
5
  import {Readable} from 'stream';
@@ -1,4 +1,5 @@
1
1
  // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
2
3
 
3
4
  import {Stat, RandomAccessFileSystem} from '@loaders.gl/loader-utils';
4
5
  import fsPromise from 'fs/promises';
@@ -1,4 +1,5 @@
1
1
  // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
2
3
 
3
4
  import zlib from 'zlib';
4
5
  import {Readable} from 'stream';
@@ -1,4 +1,5 @@
1
1
  // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
2
3
 
3
4
  import getPixels from 'get-pixels';
4
5
 
@@ -1,4 +1,5 @@
1
1
  // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
2
3
 
3
4
  import getPixels from 'get-pixels';
4
5
 
package/src/index.ts CHANGED
@@ -14,6 +14,8 @@ import {NodeFile} from './filesystems/node-file';
14
14
  import {NodeFileSystem} from './filesystems/node-filesystem';
15
15
  import {fetchNode} from './filesystems/fetch-node';
16
16
 
17
+ import {NodeHash} from './crypto/node-hash';
18
+
17
19
  // NODE VERSION
18
20
  import {versions} from 'node:process';
19
21
  export const nodeVersion = parseInt(versions.node.split('.')[0]);
@@ -43,6 +45,9 @@ globalThis.loaders.NodeFile = NodeFile;
43
45
  globalThis.loaders.NodeFileSystem = NodeFileSystem;
44
46
  globalThis.loaders.fetchNode = fetchNode;
45
47
 
48
+ // CRYPTO POLYFILLS
49
+ globalThis.loaders.NodeHash = NodeHash;
50
+
46
51
  // POLYFILLS: TextEncoder, TextDecoder
47
52
  // - Recent Node versions have these classes but virtually no encodings unless special build.
48
53
  // - Browser: Edge, IE11 do not have these
@@ -1,4 +1,5 @@
1
1
  // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
2
3
 
3
4
  /* eslint-disable no-restricted-globals */
4
5
  export const isBrowser: boolean =