@loaders.gl/zip 4.3.3 → 4.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +1 -1
- package/dist/index.cjs +2 -2
- package/dist/index.cjs.map +1 -1
- package/dist/zip-loader.js +1 -1
- package/dist/zip-writer.js +1 -1
- package/package.json +5 -5
package/dist/index.cjs
CHANGED
|
@@ -55,7 +55,7 @@ module.exports = __toCommonJS(dist_exports);
|
|
|
55
55
|
|
|
56
56
|
// dist/zip-loader.js
|
|
57
57
|
var import_jszip = __toESM(require("jszip"), 1);
|
|
58
|
-
var VERSION = true ? "4.3.
|
|
58
|
+
var VERSION = true ? "4.3.3" : "latest";
|
|
59
59
|
var ZipLoader = {
|
|
60
60
|
dataType: null,
|
|
61
61
|
batchType: null,
|
|
@@ -102,7 +102,7 @@ async function loadZipEntry(jsZip, subFilename, options = {}) {
|
|
|
102
102
|
|
|
103
103
|
// dist/zip-writer.js
|
|
104
104
|
var import_jszip2 = __toESM(require("jszip"), 1);
|
|
105
|
-
var VERSION2 = true ? "4.3.
|
|
105
|
+
var VERSION2 = true ? "4.3.3" : "latest";
|
|
106
106
|
var ZipWriter = {
|
|
107
107
|
name: "Zip Archive",
|
|
108
108
|
id: "zip",
|
package/dist/index.cjs.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["index.js", "zip-loader.js", "zip-writer.js", "lib/tar/utils.js", "lib/tar/header.js", "lib/tar/tar.js", "tar-builder.js", "parse-zip/cd-file-header.js", "parse-zip/end-of-central-directory.js", "parse-zip/search-from-the-end.js", "parse-zip/zip64-info-generation.js", "parse-zip/local-file-header.js", "parse-zip/zip-composition.js", "filesystems/zip-filesystem.js", "filesystems/IndexedArchive.js", "hash-file-utility.js"],
|
|
4
|
-
"sourcesContent": ["// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nexport { ZipLoader } from \"./zip-loader.js\";\nexport { ZipWriter } from \"./zip-writer.js\";\nexport { TarBuilder } from \"./tar-builder.js\";\nexport { parseZipCDFileHeader, makeZipCDHeaderIterator, signature as CD_HEADER_SIGNATURE, generateCDHeader } from \"./parse-zip/cd-file-header.js\";\nexport { parseZipLocalFileHeader, signature as localHeaderSignature, generateLocalHeader } from \"./parse-zip/local-file-header.js\";\nexport { parseEoCDRecord } from \"./parse-zip/end-of-central-directory.js\";\nexport { searchFromTheEnd } from \"./parse-zip/search-from-the-end.js\";\nexport { addOneFile, createZip } from \"./parse-zip/zip-composition.js\";\n// export type {HashElement} from './hash-file-utility';\nexport { IndexedArchive } from \"./filesystems/IndexedArchive.js\";\nexport { parseHashTable, makeHashTableFromZipHeaders, composeHashFile } from \"./hash-file-utility.js\";\nexport { ZipFileSystem, ZIP_COMPRESSION_HANDLERS } from \"./filesystems/zip-filesystem.js\";\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport JSZip from 'jszip';\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof \"4.3.2\" !== 'undefined' ? \"4.3.2\" : 'latest';\nexport const ZipLoader = {\n dataType: null,\n batchType: null,\n id: 'zip',\n module: 'zip',\n name: 'Zip Archive',\n version: VERSION,\n extensions: ['zip'],\n mimeTypes: ['application/zip'],\n category: 'archive',\n tests: ['PK'],\n options: {},\n parse: parseZipAsync\n};\n// TODO - Could return a map of promises, perhaps as an option...\nasync function parseZipAsync(data, options = {}) {\n const promises = [];\n const fileMap = {};\n try {\n const jsZip = new JSZip();\n const zip = await jsZip.loadAsync(data, options);\n // start to load each file in this zip\n zip.forEach((relativePath, zipEntry) => {\n const subFilename = zipEntry.name;\n const promise = loadZipEntry(jsZip, subFilename, options).then((arrayBufferOrError) => {\n fileMap[relativePath] = arrayBufferOrError;\n });\n // Ensure Promise.all doesn't ignore rejected promises.\n promises.push(promise);\n });\n await Promise.all(promises);\n return fileMap;\n }\n catch (error) {\n // @ts-ignore\n options.log.error(`Unable to read zip archive: ${error}`);\n throw error;\n }\n}\nasync function loadZipEntry(jsZip, subFilename, options = {}) {\n // jszip supports both arraybuffer and text, the main loaders.gl types\n // https://stuk.github.io/jszip/documentation/api_zipobject/async.html\n try {\n const arrayBuffer = await jsZip.file(subFilename).async(options.dataType || 'arraybuffer');\n return arrayBuffer;\n }\n catch (error) {\n options.log.error(`Unable to read ${subFilename} from zip archive: ${error}`);\n // Store error in place of data in map\n return error;\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport JSZip from 'jszip';\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof \"4.3.2\" !== 'undefined' ? \"4.3.2\" : 'latest';\n/**\n * Zip exporter\n */\nexport const ZipWriter = {\n name: 'Zip Archive',\n id: 'zip',\n module: 'zip',\n version: VERSION,\n extensions: ['zip'],\n category: 'archive',\n mimeTypes: ['application/zip'],\n options: {\n zip: {\n onUpdate: () => { }\n },\n jszip: {}\n },\n encode: encodeZipAsync\n};\nasync function encodeZipAsync(fileMap, options = {}) {\n const jsZip = new JSZip();\n // add files to the zip\n for (const subFileName in fileMap) {\n const subFileData = fileMap[subFileName];\n // jszip supports both arraybuffer and string data (the main loaders.gl types)\n // https://stuk.github.io/jszip/documentation/api_zipobject/async.html\n jsZip.file(subFileName, subFileData, options?.jszip || {});\n }\n const zipOptions = { ...ZipWriter.options.zip, ...options?.zip };\n const jszipOptions = { ...ZipWriter.options?.jszip, ...options.jszip };\n try {\n return await jsZip.generateAsync({ ...jszipOptions, type: 'arraybuffer' }, // generate an arraybuffer\n zipOptions.onUpdate);\n }\n catch (error) {\n options.log.error(`Unable to encode zip archive: ${error}`);\n throw error;\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\n/**\n * Returns the memory area specified by length\n * @param length\n * @returns {Uint8Array}\n */\nexport function clean(length) {\n let i;\n const buffer = new Uint8Array(length);\n for (i = 0; i < length; i += 1) {\n buffer[i] = 0;\n }\n return buffer;\n}\n/**\n * Converting data to a string\n * @param num\n * @param bytes\n * @param base\n * @returns string\n */\nexport function pad(num, bytes, base) {\n const numStr = num.toString(base || 8);\n return '000000000000'.substr(numStr.length + 12 - bytes) + numStr;\n}\n/**\n * Converting input to binary data\n * @param input\n * @param out\n * @param offset\n * @returns {Uint8Array}\n */\nexport function stringToUint8(input, out, offset) {\n let i;\n let length;\n out = out || clean(input.length);\n offset = offset || 0;\n for (i = 0, length = input.length; i < length; i += 1) {\n out[offset] = input.charCodeAt(i);\n offset += 1;\n }\n return out;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\n/* eslint-disable */\nimport * as utils from \"./utils.js\";\n/*\nstruct posix_header { // byte offset\n char name[100]; // 0\n char mode[8]; // 100\n char uid[8]; // 108\n char gid[8]; // 116\n char size[12]; // 124\n char mtime[12]; // 136\n char chksum[8]; // 148\n char typeflag; // 156\n char linkname[100]; // 157\n char magic[6]; // 257\n char version[2]; // 263\n char uname[32]; // 265\n char gname[32]; // 297\n char devmajor[8]; // 329\n char devminor[8]; // 337\n char prefix[155]; // 345\n // 500\n};\n*/\nconst structure = {\n fileName: 100,\n fileMode: 8,\n uid: 8,\n gid: 8,\n fileSize: 12,\n mtime: 12,\n checksum: 8,\n type: 1,\n linkName: 100,\n ustar: 8,\n owner: 32,\n group: 32,\n majorNumber: 8,\n minorNumber: 8,\n filenamePrefix: 155,\n padding: 12\n};\n/**\n * Getting the header\n * @param data\n * @param [cb]\n * @returns {Uint8Array} | Array\n */\nexport function format(data, cb) {\n const buffer = utils.clean(512);\n let offset = 0;\n Object.entries(structure).forEach(([field, length]) => {\n const str = data[field] || '';\n let i;\n let fieldLength;\n for (i = 0, fieldLength = str.length; i < fieldLength; i += 1) {\n buffer[offset] = str.charCodeAt(i);\n offset += 1;\n }\n // space it out with nulls\n offset += length - i;\n });\n if (typeof cb === 'function') {\n return cb(buffer, offset);\n }\n return buffer;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\nimport { clean, pad, stringToUint8 } from \"./utils.js\";\nimport { format } from \"./header.js\";\nlet blockSize;\nlet headerLength;\nlet inputLength;\nconst recordSize = 512;\nclass Tar {\n written;\n out;\n blocks = [];\n length;\n /**\n * @param [recordsPerBlock]\n */\n constructor(recordsPerBlock) {\n this.written = 0;\n blockSize = (recordsPerBlock || 20) * recordSize;\n this.out = clean(blockSize);\n this.blocks = [];\n this.length = 0;\n this.save = this.save.bind(this);\n this.clear = this.clear.bind(this);\n this.append = this.append.bind(this);\n }\n /**\n * Append a file to the tar archive\n * @param filepath\n * @param input\n * @param [opts]\n */\n // eslint-disable-next-line complexity\n append(filepath, input, opts) {\n let checksum;\n if (typeof input === 'string') {\n input = stringToUint8(input);\n }\n else if (input.constructor && input.constructor !== Uint8Array.prototype.constructor) {\n // @ts-ignore\n const errorInputMatch = /function\\s*([$A-Za-z_][0-9A-Za-z_]*)\\s*\\(/.exec(input.constructor.toString());\n const errorInput = errorInputMatch && errorInputMatch[1];\n const errorMessage = `Invalid input type. You gave me: ${errorInput}`;\n throw errorMessage;\n }\n opts = opts || {};\n const mode = opts.mode || parseInt('777', 8) & 0xfff;\n const mtime = opts.mtime || Math.floor(Number(new Date()) / 1000);\n const uid = opts.uid || 0;\n const gid = opts.gid || 0;\n const data = {\n fileName: filepath,\n fileMode: pad(mode, 7),\n uid: pad(uid, 7),\n gid: pad(gid, 7),\n fileSize: pad(input.length, 11),\n mtime: pad(mtime, 11),\n checksum: ' ',\n // 0 = just a file\n type: '0',\n ustar: 'ustar ',\n owner: opts.owner || '',\n group: opts.group || ''\n };\n // calculate the checksum\n checksum = 0;\n Object.keys(data).forEach((key) => {\n let i;\n const value = data[key];\n let length;\n for (i = 0, length = value.length; i < length; i += 1) {\n checksum += value.charCodeAt(i);\n }\n });\n data.checksum = `${pad(checksum, 6)}\\u0000 `;\n const headerArr = format(data);\n headerLength = Math.ceil(headerArr.length / recordSize) * recordSize;\n inputLength = Math.ceil(input.length / recordSize) * recordSize;\n this.blocks.push({\n header: headerArr,\n input,\n headerLength,\n inputLength\n });\n }\n /**\n * Compiling data to a Blob object\n * @returns {Blob}\n */\n save() {\n const buffers = [];\n const chunks = new Array();\n let length = 0;\n const max = Math.pow(2, 20);\n let chunk = new Array();\n this.blocks.forEach((b = []) => {\n if (length + b.headerLength + b.inputLength > max) {\n chunks.push({ blocks: chunk, length });\n chunk = [];\n length = 0;\n }\n chunk.push(b);\n length += b.headerLength + b.inputLength;\n });\n chunks.push({ blocks: chunk, length });\n chunks.forEach((c = []) => {\n const buffer = new Uint8Array(c.length);\n let written = 0;\n c.blocks.forEach((b = []) => {\n buffer.set(b.header, written);\n written += b.headerLength;\n buffer.set(b.input, written);\n written += b.inputLength;\n });\n buffers.push(buffer);\n });\n buffers.push(new Uint8Array(2 * recordSize));\n return new Blob(buffers, { type: 'octet/stream' });\n }\n /**\n * Clear the data by its blocksize\n */\n clear() {\n this.written = 0;\n this.out = clean(blockSize);\n }\n}\nexport default Tar;\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport Tar from \"./lib/tar/tar.js\";\nconst TAR_BUILDER_OPTIONS = {\n recordsPerBlock: 20\n};\n/**\n * Build a tar file by adding files\n */\nexport class TarBuilder {\n static get properties() {\n return {\n id: 'tar',\n name: 'TAR',\n extensions: ['tar'],\n mimeTypes: ['application/x-tar'],\n builder: TarBuilder,\n options: TAR_BUILDER_OPTIONS\n };\n }\n options;\n tape;\n count = 0;\n constructor(options) {\n this.options = { ...TAR_BUILDER_OPTIONS, ...options };\n this.tape = new Tar(this.options.recordsPerBlock);\n }\n /** Adds a file to the archive. */\n addFile(filename, buffer) {\n this.tape.append(filename, new Uint8Array(buffer));\n this.count++;\n }\n async build() {\n return new Response(this.tape.save()).arrayBuffer();\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { DataViewFile, compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { parseEoCDRecord } from \"./end-of-central-directory.js\";\nimport { createZip64Info, setFieldToNumber } from \"./zip64-info-generation.js\";\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst CD_COMPRESSED_SIZE_OFFSET = 20;\nconst CD_UNCOMPRESSED_SIZE_OFFSET = 24;\nconst CD_FILE_NAME_LENGTH_OFFSET = 28;\nconst CD_EXTRA_FIELD_LENGTH_OFFSET = 30;\nconst CD_START_DISK_OFFSET = 32;\nconst CD_LOCAL_HEADER_OFFSET_OFFSET = 42;\nconst CD_FILE_NAME_OFFSET = 46n;\nexport const signature = new Uint8Array([0x50, 0x4b, 0x01, 0x02]);\n/**\n * Parses central directory file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipCDFileHeader = async (headerOffset, file) => {\n if (headerOffset >= file.length) {\n return null;\n }\n const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + CD_FILE_NAME_OFFSET));\n const magicBytes = mainHeader.buffer.slice(0, 4);\n if (!compareArrayBuffers(magicBytes, signature.buffer)) {\n return null;\n }\n const compressedSize = BigInt(mainHeader.getUint32(CD_COMPRESSED_SIZE_OFFSET, true));\n const uncompressedSize = BigInt(mainHeader.getUint32(CD_UNCOMPRESSED_SIZE_OFFSET, true));\n const extraFieldLength = mainHeader.getUint16(CD_EXTRA_FIELD_LENGTH_OFFSET, true);\n const startDisk = BigInt(mainHeader.getUint16(CD_START_DISK_OFFSET, true));\n const fileNameLength = mainHeader.getUint16(CD_FILE_NAME_LENGTH_OFFSET, true);\n const additionalHeader = await file.slice(headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));\n const filenameBytes = additionalHeader.slice(0, fileNameLength);\n const fileName = new TextDecoder().decode(filenameBytes);\n const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);\n const oldFormatOffset = mainHeader.getUint32(CD_LOCAL_HEADER_OFFSET_OFFSET, true);\n const localHeaderOffset = BigInt(oldFormatOffset);\n const extraField = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));\n // looking for info that might be also be in zip64 extra field\n const zip64data = {\n uncompressedSize,\n compressedSize,\n localHeaderOffset,\n startDisk\n };\n const res = findZip64DataInExtra(zip64data, extraField);\n return {\n ...zip64data,\n ...res,\n extraFieldLength,\n fileNameLength,\n fileName,\n extraOffset\n };\n};\n/**\n * Create iterator over files of zip archive\n * @param fileProvider - file provider that provider random access to the file\n */\nexport async function* makeZipCDHeaderIterator(fileProvider) {\n const { cdStartOffset, cdByteSize } = await parseEoCDRecord(fileProvider);\n const centralDirectory = new DataViewFile(new DataView(await fileProvider.slice(cdStartOffset, cdStartOffset + cdByteSize)));\n let cdHeader = await parseZipCDFileHeader(0n, centralDirectory);\n while (cdHeader) {\n yield cdHeader;\n cdHeader = await parseZipCDFileHeader(cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength), centralDirectory);\n }\n}\n/**\n * returns the number written in the provided bytes\n * @param bytes two bytes containing the number\n * @returns the number written in the provided bytes\n */\nconst getUint16 = (...bytes) => {\n return bytes[0] + bytes[1] * 16;\n};\n/**\n * reads all nesessary data from zip64 record in the extra data\n * @param zip64data values that might be in zip64 record\n * @param extraField full extra data\n * @returns data read from zip64\n */\nconst findZip64DataInExtra = (zip64data, extraField) => {\n const zip64dataList = findExpectedData(zip64data);\n const zip64DataRes = {};\n if (zip64dataList.length > 0) {\n // total length of data in zip64 notation in bytes\n const zip64chunkSize = zip64dataList.reduce((sum, curr) => sum + curr.length, 0);\n // we're looking for the zip64 nontation header (0x0001)\n // and a size field with a correct value next to it\n const offsetInExtraData = new Uint8Array(extraField.buffer).findIndex((_val, i, arr) => getUint16(arr[i], arr[i + 1]) === 0x0001 &&\n getUint16(arr[i + 2], arr[i + 3]) === zip64chunkSize);\n // then we read all the nesessary fields from the zip64 data\n let bytesRead = 0;\n for (const note of zip64dataList) {\n const offset = bytesRead;\n zip64DataRes[note.name] = extraField.getBigUint64(offsetInExtraData + 4 + offset, true);\n bytesRead = offset + note.length;\n }\n }\n return zip64DataRes;\n};\n/**\n * frind data that's expected to be in zip64\n * @param zip64data values that might be in zip64 record\n * @returns zip64 data description\n */\nconst findExpectedData = (zip64data) => {\n // We define fields that should be in zip64 data\n const zip64dataList = [];\n if (zip64data.uncompressedSize === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'uncompressedSize', length: 8 });\n }\n if (zip64data.compressedSize === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'compressedSize', length: 8 });\n }\n if (zip64data.localHeaderOffset === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'localHeaderOffset', length: 8 });\n }\n if (zip64data.startDisk === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'startDisk', length: 4 });\n }\n return zip64dataList;\n};\n/**\n * generates cd header for the file\n * @param options info that can be placed into cd header\n * @returns buffer with header\n */\nexport function generateCDHeader(options) {\n const optionsToUse = {\n ...options,\n fnlength: options.fileName.length,\n extraLength: 0\n };\n let zip64header = new ArrayBuffer(0);\n const optionsToZip64 = {};\n if (optionsToUse.offset >= 0xffffffff) {\n optionsToZip64.offset = optionsToUse.offset;\n optionsToUse.offset = BigInt(0xffffffff);\n }\n if (optionsToUse.length >= 0xffffffff) {\n optionsToZip64.size = optionsToUse.length;\n optionsToUse.length = 0xffffffff;\n }\n if (Object.keys(optionsToZip64).length) {\n zip64header = createZip64Info(optionsToZip64);\n optionsToUse.extraLength = zip64header.byteLength;\n }\n const header = new DataView(new ArrayBuffer(Number(CD_FILE_NAME_OFFSET)));\n for (const field of ZIP_HEADER_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ''] ?? field.default ?? 0);\n }\n const encodedName = new TextEncoder().encode(optionsToUse.fileName);\n const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);\n return resHeader;\n}\n/** Fields map */\nconst ZIP_HEADER_FIELDS = [\n // Central directory file header signature = 0x02014b50\n {\n offset: 0,\n size: 4,\n default: new DataView(signature.buffer).getUint32(0, true)\n },\n // Version made by\n {\n offset: 4,\n size: 2,\n default: 45\n },\n // Version needed to extract (minimum)\n {\n offset: 6,\n size: 2,\n default: 45\n },\n // General purpose bit flag\n {\n offset: 8,\n size: 2,\n default: 0\n },\n // Compression method\n {\n offset: 10,\n size: 2,\n default: 0\n },\n // File last modification time\n {\n offset: 12,\n size: 2,\n default: 0\n },\n // File last modification date\n {\n offset: 14,\n size: 2,\n default: 0\n },\n // CRC-32 of uncompressed data\n {\n offset: 16,\n size: 4,\n name: 'crc32'\n },\n // Compressed size (or 0xffffffff for ZIP64)\n {\n offset: 20,\n size: 4,\n name: 'length'\n },\n // Uncompressed size (or 0xffffffff for ZIP64)\n {\n offset: 24,\n size: 4,\n name: 'length'\n },\n // File name length (n)\n {\n offset: 28,\n size: 2,\n name: 'fnlength'\n },\n // Extra field length (m)\n {\n offset: 30,\n size: 2,\n default: 0,\n name: 'extraLength'\n },\n // File comment length (k)\n {\n offset: 32,\n size: 2,\n default: 0\n },\n // Disk number where file starts (or 0xffff for ZIP64)\n {\n offset: 34,\n size: 2,\n default: 0\n },\n // Internal file attributes\n {\n offset: 36,\n size: 2,\n default: 0\n },\n // External file attributes\n {\n offset: 38,\n size: 4,\n default: 0\n },\n // Relative offset of local file header\n {\n offset: 42,\n size: 4,\n name: 'offset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { searchFromTheEnd } from \"./search-from-the-end.js\";\nimport { setFieldToNumber } from \"./zip64-info-generation.js\";\nconst eoCDSignature = new Uint8Array([0x50, 0x4b, 0x05, 0x06]);\nconst zip64EoCDLocatorSignature = new Uint8Array([0x50, 0x4b, 0x06, 0x07]);\nconst zip64EoCDSignature = new Uint8Array([0x50, 0x4b, 0x06, 0x06]);\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst CD_RECORDS_NUMBER_OFFSET = 8n;\nconst CD_RECORDS_NUMBER_ON_DISC_OFFSET = 10n;\nconst CD_CD_BYTE_SIZE_OFFSET = 12n;\nconst CD_START_OFFSET_OFFSET = 16n;\nconst CD_COMMENT_OFFSET = 22n;\nconst ZIP64_EOCD_START_OFFSET_OFFSET = 8n;\nconst ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;\nconst ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET = 32n;\nconst ZIP64_CD_CD_BYTE_SIZE_OFFSET = 40n;\nconst ZIP64_CD_START_OFFSET_OFFSET = 48n;\nconst ZIP64_COMMENT_OFFSET = 56n;\n/**\n * Parses end of central directory record of zip file\n * @param file - FileProvider instance\n * @returns Info from the header\n */\nexport const parseEoCDRecord = async (file) => {\n const zipEoCDOffset = await searchFromTheEnd(file, eoCDSignature);\n let cdRecordsNumber = BigInt(await file.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));\n let cdByteSize = BigInt(await file.getUint32(zipEoCDOffset + CD_CD_BYTE_SIZE_OFFSET));\n let cdStartOffset = BigInt(await file.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));\n let zip64EoCDLocatorOffset = zipEoCDOffset - 20n;\n let zip64EoCDOffset = 0n;\n const magicBytes = await file.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);\n if (compareArrayBuffers(magicBytes, zip64EoCDLocatorSignature)) {\n zip64EoCDOffset = await file.getBigUint64(zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);\n const endOfCDMagicBytes = await file.slice(zip64EoCDOffset, zip64EoCDOffset + 4n);\n if (!compareArrayBuffers(endOfCDMagicBytes, zip64EoCDSignature.buffer)) {\n throw new Error('zip64 EoCD not found');\n }\n cdRecordsNumber = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);\n cdByteSize = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_CD_BYTE_SIZE_OFFSET);\n cdStartOffset = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);\n }\n else {\n zip64EoCDLocatorOffset = 0n;\n }\n return {\n cdRecordsNumber,\n cdStartOffset,\n cdByteSize,\n offsets: {\n zip64EoCDOffset,\n zip64EoCDLocatorOffset,\n zipEoCDOffset\n }\n };\n};\n/**\n * updates EoCD record to add more files to the archieve\n * @param eocdBody buffer containing header\n * @param oldEoCDOffsets info read from EoCD record befor updating\n * @param newCDStartOffset CD start offset to be updated\n * @param eocdStartOffset EoCD start offset to be updated\n * @returns new EoCD header\n */\nexport function updateEoCD(eocdBody, oldEoCDOffsets, newCDStartOffset, eocdStartOffset, newCDRecordsNumber) {\n const eocd = new DataView(eocdBody);\n const classicEoCDOffset = oldEoCDOffsets.zip64EoCDOffset\n ? oldEoCDOffsets.zipEoCDOffset - oldEoCDOffsets.zip64EoCDOffset\n : 0n;\n // updating classic EoCD record with new CD records number in general and on disc\n if (Number(newCDRecordsNumber) <= 0xffff) {\n setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);\n setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);\n }\n // updating zip64 EoCD record with new size of CD\n if (eocdStartOffset - newCDStartOffset <= 0xffffffff) {\n setFieldToNumber(eocd, 4, classicEoCDOffset + CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);\n }\n // updating classic EoCD record with new CD start offset\n if (newCDStartOffset < 0xffffffff) {\n setFieldToNumber(eocd, 4, classicEoCDOffset + CD_START_OFFSET_OFFSET, newCDStartOffset);\n }\n // updating zip64 EoCD locator and record with new EoCD record start offset and cd records number\n if (oldEoCDOffsets.zip64EoCDLocatorOffset && oldEoCDOffsets.zip64EoCDOffset) {\n // updating zip64 EoCD locator with new EoCD record start offset\n const locatorOffset = oldEoCDOffsets.zip64EoCDLocatorOffset - oldEoCDOffsets.zip64EoCDOffset;\n setFieldToNumber(eocd, 8, locatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET, eocdStartOffset);\n // updating zip64 EoCD record with new cd start offset\n setFieldToNumber(eocd, 8, ZIP64_CD_START_OFFSET_OFFSET, newCDStartOffset);\n // updating zip64 EoCD record with new cd records number\n setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);\n setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);\n // updating zip64 EoCD record with new size of CD\n setFieldToNumber(eocd, 8, ZIP64_CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);\n }\n return new Uint8Array(eocd.buffer);\n}\n/**\n * generates EoCD record\n * @param options data to generate EoCD record\n * @returns ArrayBuffer with EoCD record\n */\nexport function generateEoCD(options) {\n const header = new DataView(new ArrayBuffer(Number(CD_COMMENT_OFFSET)));\n for (const field of EOCD_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n const locator = generateZip64InfoLocator(options);\n const zip64Record = generateZip64Info(options);\n return concatenateArrayBuffers(zip64Record, locator, header.buffer);\n}\n/** standart EoCD fields */\nconst EOCD_FIELDS = [\n // End of central directory signature = 0x06054b50\n {\n offset: 0,\n size: 4,\n default: new DataView(eoCDSignature.buffer).getUint32(0, true)\n },\n // Number of this disk (or 0xffff for ZIP64)\n {\n offset: 4,\n size: 2,\n default: 0\n },\n // Disk where central directory starts (or 0xffff for ZIP64)\n {\n offset: 6,\n size: 2,\n default: 0\n },\n // Number of central directory records on this disk (or 0xffff for ZIP64)\n {\n offset: 8,\n size: 2,\n name: 'recordsNumber'\n },\n // Total number of central directory records (or 0xffff for ZIP64)\n {\n offset: 10,\n size: 2,\n name: 'recordsNumber'\n },\n // Size of central directory (bytes) (or 0xffffffff for ZIP64)\n {\n offset: 12,\n size: 4,\n name: 'cdSize'\n },\n // Offset of start of central directory, relative to start of archive (or 0xffffffff for ZIP64)\n {\n offset: 16,\n size: 4,\n name: 'cdOffset'\n },\n // Comment length (n)\n {\n offset: 20,\n size: 2,\n default: 0\n }\n];\n/**\n * generates eocd zip64 record\n * @param options data to generate eocd zip64 record\n * @returns buffer with eocd zip64 record\n */\nfunction generateZip64Info(options) {\n const record = new DataView(new ArrayBuffer(Number(ZIP64_COMMENT_OFFSET)));\n for (const field of ZIP64_EOCD_FIELDS) {\n setFieldToNumber(record, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n return record.buffer;\n}\n/**\n * generates eocd zip64 record locator\n * @param options data to generate eocd zip64 record\n * @returns buffer with eocd zip64 record\n */\nfunction generateZip64InfoLocator(options) {\n const locator = new DataView(new ArrayBuffer(Number(20)));\n for (const field of ZIP64_EOCD_LOCATOR_FIELDS) {\n setFieldToNumber(locator, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n return locator.buffer;\n}\n/** zip64 EoCD record locater fields */\nconst ZIP64_EOCD_LOCATOR_FIELDS = [\n // zip64 end of central dir locator signature\n {\n offset: 0,\n size: 4,\n default: new DataView(zip64EoCDLocatorSignature.buffer).getUint32(0, true)\n },\n // number of the disk with the start of the zip64 end of\n {\n offset: 4,\n size: 4,\n default: 0\n },\n // start of the zip64 end of central directory\n {\n offset: 8,\n size: 8,\n name: 'eoCDStart'\n },\n // total number of disks\n {\n offset: 16,\n size: 4,\n default: 1\n }\n];\n/** zip64 EoCD recodrd fields */\nconst ZIP64_EOCD_FIELDS = [\n // End of central directory signature = 0x06064b50\n {\n offset: 0,\n size: 4,\n default: new DataView(zip64EoCDSignature.buffer).getUint32(0, true)\n },\n // Size of the EOCD64 minus 12\n {\n offset: 4,\n size: 8,\n default: 44\n },\n // Version made by\n {\n offset: 12,\n size: 2,\n default: 45\n },\n // Version needed to extract (minimum)\n {\n offset: 14,\n size: 2,\n default: 45\n },\n // Number of this disk\n {\n offset: 16,\n size: 4,\n default: 0\n },\n // Disk where central directory starts\n {\n offset: 20,\n size: 4,\n default: 0\n },\n // Number of central directory records on this disk\n {\n offset: 24,\n size: 8,\n name: 'recordsNumber'\n },\n // Total number of central directory records\n {\n offset: 32,\n size: 8,\n name: 'recordsNumber'\n },\n // Size of central directory (bytes)\n {\n offset: 40,\n size: 8,\n name: 'cdSize'\n },\n // Offset of start of central directory, relative to start of archive\n {\n offset: 48,\n size: 8,\n name: 'cdOffset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nconst buffLength = 1024;\n/**\n * looking for the last occurrence of the provided\n * @param file\n * @param target\n * @returns\n */\nexport const searchFromTheEnd = async (file, target) => {\n const searchWindow = [\n await file.getUint8(file.length - 1n),\n await file.getUint8(file.length - 2n),\n await file.getUint8(file.length - 3n),\n undefined\n ];\n let targetOffset = -1;\n // looking for the last record in the central directory\n let point = file.length - 4n;\n do {\n const prevPoint = point;\n point -= BigInt(buffLength);\n point = point >= 0n ? point : 0n;\n const buff = new Uint8Array(await file.slice(point, prevPoint));\n for (let i = buff.length - 1; i > -1; i--) {\n searchWindow[3] = searchWindow[2];\n searchWindow[2] = searchWindow[1];\n searchWindow[1] = searchWindow[0];\n searchWindow[0] = buff[i];\n if (searchWindow.every((val, index) => val === target[index])) {\n targetOffset = i;\n break;\n }\n }\n } while (targetOffset === -1 && point > 0n);\n return point + BigInt(targetOffset);\n};\n", "import { concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nexport const signature = new Uint8Array([0x01, 0x00]);\n/**\n * creates zip64 extra field\n * @param options info that can be placed into zip64 field\n * @returns buffer with field\n */\nexport function createZip64Info(options) {\n const optionsToUse = {\n ...options,\n zip64Length: (options.offset ? 1 : 0) * 8 + (options.size ? 1 : 0) * 16\n };\n const arraysToConcat = [];\n for (const field of ZIP64_FIELDS) {\n if (!optionsToUse[field.name ?? ''] && !field.default) {\n continue; // eslint-disable-line no-continue\n }\n const newValue = new DataView(new ArrayBuffer(field.size));\n NUMBER_SETTERS[field.size](newValue, 0, optionsToUse[field.name ?? ''] ?? field.default);\n arraysToConcat.push(newValue.buffer);\n }\n return concatenateArrayBuffers(...arraysToConcat);\n}\n/**\n * Writes values into buffer according to the bytes amount\n * @param header header where to write the data\n * @param fieldSize size of the field in bytes\n * @param fieldOffset offset of the field\n * @param value value to be written\n */\nexport function setFieldToNumber(header, fieldSize, fieldOffset, value) {\n NUMBER_SETTERS[fieldSize](header, Number(fieldOffset), value);\n}\n/** functions to write values into buffer according to the bytes amount */\nconst NUMBER_SETTERS = {\n 2: (header, offset, value) => {\n header.setUint16(offset, Number(value > 0xffff ? 0xffff : value), true);\n },\n 4: (header, offset, value) => {\n header.setUint32(offset, Number(value > 0xffffffff ? 0xffffffff : value), true);\n },\n 8: (header, offset, value) => {\n header.setBigUint64(offset, BigInt(value), true);\n }\n};\n/** zip64 info fields description, we need it as a pattern to build a zip64 info */\nconst ZIP64_FIELDS = [\n // Header ID 0x0001\n {\n size: 2,\n default: new DataView(signature.buffer).getUint16(0, true)\n },\n // Size of the extra field chunk (8, 16, 24 or 28)\n {\n size: 2,\n name: 'zip64Length'\n },\n // Original uncompressed file size\n {\n size: 8,\n name: 'size'\n },\n // Size of compressed data\n {\n size: 8,\n name: 'size'\n },\n // Offset of local header record\n {\n size: 8,\n name: 'offset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { createZip64Info, setFieldToNumber } from \"./zip64-info-generation.js\";\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst COMPRESSION_METHOD_OFFSET = 8;\nconst COMPRESSED_SIZE_OFFSET = 18;\nconst UNCOMPRESSED_SIZE_OFFSET = 22;\nconst FILE_NAME_LENGTH_OFFSET = 26;\nconst EXTRA_FIELD_LENGTH_OFFSET = 28;\nconst FILE_NAME_OFFSET = 30n;\nexport const signature = new Uint8Array([0x50, 0x4b, 0x03, 0x04]);\n/**\n * Parses local file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipLocalFileHeader = async (headerOffset, file) => {\n const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + FILE_NAME_OFFSET));\n const magicBytes = mainHeader.buffer.slice(0, 4);\n if (!compareArrayBuffers(magicBytes, signature)) {\n return null;\n }\n const fileNameLength = mainHeader.getUint16(FILE_NAME_LENGTH_OFFSET, true);\n const extraFieldLength = mainHeader.getUint16(EXTRA_FIELD_LENGTH_OFFSET, true);\n const additionalHeader = await file.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));\n const fileNameBuffer = additionalHeader.slice(0, fileNameLength);\n const extraDataBuffer = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));\n const fileName = new TextDecoder().decode(fileNameBuffer).split('\\\\').join('/');\n let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);\n const compressionMethod = mainHeader.getUint16(COMPRESSION_METHOD_OFFSET, true);\n let compressedSize = BigInt(mainHeader.getUint32(COMPRESSED_SIZE_OFFSET, true)); // add zip 64 logic\n let uncompressedSize = BigInt(mainHeader.getUint32(UNCOMPRESSED_SIZE_OFFSET, true)); // add zip 64 logic\n let offsetInZip64Data = 4;\n // looking for info that might be also be in zip64 extra field\n if (uncompressedSize === BigInt(0xffffffff)) {\n uncompressedSize = extraDataBuffer.getBigUint64(offsetInZip64Data, true);\n offsetInZip64Data += 8;\n }\n if (compressedSize === BigInt(0xffffffff)) {\n compressedSize = extraDataBuffer.getBigUint64(offsetInZip64Data, true);\n offsetInZip64Data += 8;\n }\n if (fileDataOffset === BigInt(0xffffffff)) {\n fileDataOffset = extraDataBuffer.getBigUint64(offsetInZip64Data, true); // setting it to the one from zip64\n }\n return {\n fileNameLength,\n fileName,\n extraFieldLength,\n fileDataOffset,\n compressedSize,\n compressionMethod\n };\n};\n/**\n * generates local header for the file\n * @param options info that can be placed into local header\n * @returns buffer with header\n */\nexport function generateLocalHeader(options) {\n const optionsToUse = {\n ...options,\n extraLength: 0,\n fnlength: options.fileName.length\n };\n let zip64header = new ArrayBuffer(0);\n const optionsToZip64 = {};\n if (optionsToUse.length >= 0xffffffff) {\n optionsToZip64.size = optionsToUse.length;\n optionsToUse.length = 0xffffffff;\n }\n if (Object.keys(optionsToZip64).length) {\n zip64header = createZip64Info(optionsToZip64);\n optionsToUse.extraLength = zip64header.byteLength;\n }\n // base length without file name and extra info is static\n const header = new DataView(new ArrayBuffer(Number(FILE_NAME_OFFSET)));\n for (const field of ZIP_HEADER_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ''] ?? field.default ?? 0);\n }\n const encodedName = new TextEncoder().encode(optionsToUse.fileName);\n const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);\n return resHeader;\n}\nconst ZIP_HEADER_FIELDS = [\n // Local file header signature = 0x04034b50\n {\n offset: 0,\n size: 4,\n default: new DataView(signature.buffer).getUint32(0, true)\n },\n // Version needed to extract (minimum)\n {\n offset: 4,\n size: 2,\n default: 45\n },\n // General purpose bit flag\n {\n offset: 6,\n size: 2,\n default: 0\n },\n // Compression method\n {\n offset: 8,\n size: 2,\n default: 0\n },\n // File last modification time\n {\n offset: 10,\n size: 2,\n default: 0\n },\n // File last modification date\n {\n offset: 12,\n size: 2,\n default: 0\n },\n // CRC-32 of uncompressed data\n {\n offset: 14,\n size: 4,\n name: 'crc32'\n },\n // Compressed size (or 0xffffffff for ZIP64)\n {\n offset: 18,\n size: 4,\n name: 'length'\n },\n // Uncompressed size (or 0xffffffff for ZIP64)\n {\n offset: 22,\n size: 4,\n name: 'length'\n },\n // File name length (n)\n {\n offset: 26,\n size: 2,\n name: 'fnlength'\n },\n // Extra field length (m)\n {\n offset: 28,\n size: 2,\n default: 0,\n name: 'extraLength'\n }\n];\n", "import { FileHandleFile, concatenateArrayBuffers, path, NodeFilesystem, NodeFile } from '@loaders.gl/loader-utils';\nimport { generateEoCD, parseEoCDRecord, updateEoCD } from \"./end-of-central-directory.js\";\nimport { CRC32Hash } from '@loaders.gl/crypto';\nimport { generateLocalHeader } from \"./local-file-header.js\";\nimport { generateCDHeader } from \"./cd-file-header.js\";\nimport { fetchFile } from '@loaders.gl/core';\n/**\n * cut off CD and EoCD records from zip file\n * @param provider zip file\n * @returns tuple with three values: CD, EoCD record, EoCD information\n */\nasync function cutTheTailOff(provider) {\n // define where the body ends\n const oldEoCDinfo = await parseEoCDRecord(provider);\n const oldCDStartOffset = oldEoCDinfo.cdStartOffset;\n // define cd length\n const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset\n ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset\n : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);\n // cut off everything except of archieve body\n const zipEnding = await provider.slice(oldCDStartOffset, provider.length);\n await provider.truncate(Number(oldCDStartOffset));\n // divide cd body and eocd record\n const oldCDBody = zipEnding.slice(0, oldCDLength);\n const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);\n return [oldCDBody, eocdBody, oldEoCDinfo];\n}\n/**\n * generates CD and local headers for the file\n * @param fileName name of the file\n * @param fileToAdd buffer with the file\n * @param localFileHeaderOffset offset of the file local header\n * @returns tuple with two values: local header and file body, cd header\n */\nasync function generateFileHeaders(fileName, fileToAdd, localFileHeaderOffset) {\n // generating CRC32 of the content\n const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);\n // generate local header for the file\n const newFileLocalHeader = generateLocalHeader({\n crc32: newFileCRC322,\n fileName,\n length: fileToAdd.byteLength\n });\n // generate hash file cd header\n const newFileCDHeader = generateCDHeader({\n crc32: newFileCRC322,\n fileName,\n offset: localFileHeaderOffset,\n length: fileToAdd.byteLength\n });\n return [\n new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)),\n new Uint8Array(newFileCDHeader)\n ];\n}\n/**\n * adds one file in the end of the archieve\n * @param zipUrl path to the file\n * @param fileToAdd new file body\n * @param fileName new file name\n */\nexport async function addOneFile(zipUrl, fileToAdd, fileName) {\n // init file handler\n const provider = new FileHandleFile(zipUrl, true);\n const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);\n // remember the new file local header start offset\n const newFileOffset = provider.length;\n const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);\n // write down the file local header\n await provider.append(localPart);\n // add the file CD header to the CD\n const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);\n // remember the CD start offset\n const newCDStartOffset = provider.length;\n // write down new CD\n await provider.append(new Uint8Array(newCDBody));\n // remember where eocd starts\n const eocdOffset = provider.length;\n await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));\n}\n/**\n * creates zip archive with no compression\n * @note This is a node specific function that works on files\n * @param inputPath path where files for the achive are stored\n * @param outputPath path where zip archive will be placed\n */\nexport async function createZip(inputPath, outputPath, createAdditionalData) {\n const fileIterator = getFileIterator(inputPath);\n const resFile = new NodeFile(outputPath, 'w');\n const fileList = [];\n const cdArray = [];\n for await (const file of fileIterator) {\n await addFile(file, resFile, cdArray, fileList);\n }\n if (createAdditionalData) {\n const additionaldata = await createAdditionalData(fileList);\n await addFile(additionaldata, resFile, cdArray);\n }\n const cdOffset = (await resFile.stat()).bigsize;\n const cd = concatenateArrayBuffers(...cdArray);\n await resFile.append(new Uint8Array(cd));\n const eoCDStart = (await resFile.stat()).bigsize;\n await resFile.append(new Uint8Array(generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })));\n}\n/**\n * Adds file to zip parts\n * @param file file to add\n * @param resFile zip file body\n * @param cdArray zip file central directory\n * @param fileList list of file offsets\n */\nasync function addFile(file, resFile, cdArray, fileList) {\n const size = (await resFile.stat()).bigsize;\n fileList?.push({ fileName: file.path, localHeaderOffset: size });\n const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);\n await resFile.append(localPart);\n cdArray.push(cdHeaderPart);\n}\n/**\n * creates iterator providing buffer with file content and path to every file in the input folder\n * @param inputPath path to the input folder\n * @returns iterator\n */\nexport function getFileIterator(inputPath) {\n async function* iterable() {\n const fileList = await getAllFiles(inputPath);\n for (const filePath of fileList) {\n const file = await (await fetchFile(path.join(inputPath, filePath))).arrayBuffer();\n yield { path: filePath, file };\n }\n }\n return iterable();\n}\n/**\n * creates a list of relative paths to all files in the provided folder\n * @param basePath path of the root folder\n * @param subfolder relative path from the root folder.\n * @returns list of paths\n */\nexport async function getAllFiles(basePath, subfolder = '', fsPassed) {\n const fs = fsPassed ? fsPassed : new NodeFilesystem({});\n const files = await fs.readdir(pathJoin(basePath, subfolder));\n const arrayOfFiles = [];\n for (const file of files) {\n const fullPath = pathJoin(basePath, subfolder, file);\n if ((await fs.stat(fullPath)).isDirectory) {\n const files = await getAllFiles(basePath, pathJoin(subfolder, file));\n arrayOfFiles.push(...files);\n }\n else {\n arrayOfFiles.push(pathJoin(subfolder, file));\n }\n }\n return arrayOfFiles;\n}\n/**\n * removes empty parts from path array and joins it\n * @param paths paths to join\n * @returns joined path\n */\nfunction pathJoin(...paths) {\n const resPaths = paths.filter((val) => val.length);\n return path.join(...resPaths);\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { isBrowser, isFileProvider, FileHandleFile } from '@loaders.gl/loader-utils';\nimport { makeZipCDHeaderIterator } from \"../parse-zip/cd-file-header.js\";\nimport { parseZipLocalFileHeader } from \"../parse-zip/local-file-header.js\";\nimport { DeflateCompression } from '@loaders.gl/compression';\nimport { IndexedArchive } from \"./IndexedArchive.js\";\n/** Handling different compression types in zip */\nexport const ZIP_COMPRESSION_HANDLERS = {\n /** No compression */\n 0: async (compressedFile) => compressedFile,\n /** Deflation */\n 8: async (compressedFile) => {\n const compression = new DeflateCompression({ raw: true });\n const decompressedData = await compression.decompress(compressedFile);\n return decompressedData;\n }\n};\n/**\n * FileSystem adapter for a ZIP file\n * Holds FileProvider object that provides random access to archived files\n */\nexport class ZipFileSystem {\n /** FileProvider instance promise */\n fileProvider = null;\n fileName;\n archive = null;\n /**\n * Constructor\n * @param file - instance of FileProvider or file path string\n */\n constructor(file) {\n // Try to open file in NodeJS\n if (typeof file === 'string') {\n this.fileName = file;\n if (!isBrowser) {\n this.fileProvider = new FileHandleFile(file);\n }\n else {\n throw new Error('Cannot open file for random access in a WEB browser');\n }\n }\n else if (file instanceof IndexedArchive) {\n this.fileProvider = file.fileProvider;\n this.archive = file;\n this.fileName = file.fileName;\n }\n else if (isFileProvider(file)) {\n this.fileProvider = file;\n }\n }\n /** Clean up resources */\n async destroy() {\n if (this.fileProvider) {\n await this.fileProvider.destroy();\n }\n }\n /**\n * Get file names list from zip archive\n * @returns array of file names\n */\n async readdir() {\n if (!this.fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const fileNames = [];\n const zipCDIterator = makeZipCDHeaderIterator(this.fileProvider);\n for await (const cdHeader of zipCDIterator) {\n fileNames.push(cdHeader.fileName);\n }\n return fileNames;\n }\n /**\n * Get file metadata\n * @param filename - name of a file\n * @returns central directory data\n */\n async stat(filename) {\n const cdFileHeader = await this.getCDFileHeader(filename);\n return { ...cdFileHeader, size: Number(cdFileHeader.uncompressedSize) };\n }\n /**\n * Implementation of fetch against this file system\n * @param filename - name of a file\n * @returns - Response with file data\n */\n async fetch(filename) {\n if (this.fileName && filename.indexOf(this.fileName) === 0) {\n filename = filename.substring(this.fileName.length + 1);\n }\n let uncompressedFile;\n if (this.archive) {\n uncompressedFile = await this.archive.getFile(filename, 'http');\n }\n else {\n if (!this.fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const cdFileHeader = await this.getCDFileHeader(filename);\n const localFileHeader = await parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, this.fileProvider);\n if (!localFileHeader) {\n throw new Error('Local file header has not been found in the zip archive`');\n }\n const compressionHandler = ZIP_COMPRESSION_HANDLERS[localFileHeader.compressionMethod.toString()];\n if (!compressionHandler) {\n throw Error('Only Deflation compression is supported');\n }\n const compressedFile = await this.fileProvider.slice(localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);\n uncompressedFile = await compressionHandler(compressedFile);\n }\n const response = new Response(uncompressedFile);\n Object.defineProperty(response, 'url', {\n value: filename ? `${this.fileName || ''}/${filename}` : this.fileName || ''\n });\n return response;\n }\n /**\n * Get central directory file header\n * @param filename - name of a file\n * @returns central directory file header\n */\n async getCDFileHeader(filename) {\n if (!this.fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const zipCDIterator = makeZipCDHeaderIterator(this.fileProvider);\n let result = null;\n for await (const cdHeader of zipCDIterator) {\n if (cdHeader.fileName === filename) {\n result = cdHeader;\n break;\n }\n }\n if (!result) {\n throw new Error('File has not been found in the zip archive');\n }\n return result;\n }\n}\n", "import { ZipFileSystem } from \"./zip-filesystem.js\";\n/**\n * Abstract class for fetching indexed archive formats (SLPK, 3TZ). Those types of zip archive has\n * a hash file inside that allows to increase reading speed\n */\nexport class IndexedArchive {\n fileProvider;\n fileName;\n /**\n * Constructor\n * @param fileProvider - instance of a binary data reader\n * @param hashTable - pre-loaded hashTable. If presented, getFile will skip reading the hash file\n * @param fileName - name of the archive. It is used to add to an URL of a loader context\n */\n constructor(fileProvider, hashTable, fileName) {\n this.fileProvider = fileProvider;\n this.fileName = fileName;\n }\n /**\n * Get file as from order ZIP arhive without using the hash file\n * @param filename - path to the internal file\n * @returns\n */\n async getFileWithoutHash(filename) {\n const zipFS = new ZipFileSystem(this.fileProvider);\n const response = await zipFS.fetch(filename);\n return await response.arrayBuffer();\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { MD5Hash } from '@loaders.gl/crypto';\nimport { concatenateArrayBuffers, concatenateArrayBuffersFromArray } from '@loaders.gl/loader-utils';\nimport { makeZipCDHeaderIterator } from \"./parse-zip/cd-file-header.js\";\n/**\n * Reads hash file from buffer and returns it in ready-to-use form\n * @param arrayBuffer - buffer containing hash file\n * @returns Map containing hash and offset\n */\nexport function parseHashTable(arrayBuffer) {\n const dataView = new DataView(arrayBuffer);\n const hashMap = {};\n for (let i = 0; i < arrayBuffer.byteLength; i = i + 24) {\n const offset = dataView.getBigUint64(i + 16, true);\n const hash = bufferToHex(arrayBuffer, i, 16);\n hashMap[hash] = offset;\n }\n return hashMap;\n}\nfunction bufferToHex(buffer, start, length) {\n // buffer is an ArrayBuffer\n return [...new Uint8Array(buffer, start, length)]\n .map((x) => x.toString(16).padStart(2, '0'))\n .join('');\n}\n/**\n * generates hash info from zip files \"central directory\"\n * @param fileProvider - provider of the archive\n * @returns ready to use hash info\n */\nexport async function makeHashTableFromZipHeaders(fileProvider) {\n const zipCDIterator = makeZipCDHeaderIterator(fileProvider);\n return getHashTable(zipCDIterator);\n}\n/**\n * creates hash table from file offset iterator\n * @param zipCDIterator iterator to use\n * @returns hash table\n */\nexport async function getHashTable(zipCDIterator) {\n const md5Hash = new MD5Hash();\n const textEncoder = new TextEncoder();\n const hashTable = {};\n for await (const cdHeader of zipCDIterator) {\n const filename = cdHeader.fileName.split('\\\\').join('/').toLocaleLowerCase();\n const arrayBuffer = textEncoder.encode(filename).buffer;\n const md5 = await md5Hash.hash(arrayBuffer, 'hex');\n hashTable[md5] = cdHeader.localHeaderOffset;\n }\n return hashTable;\n}\n/**\n * creates hash file that later can be added to the SLPK archive\n * @param zipCDIterator iterator to use\n * @returns ArrayBuffer containing hash file\n */\nexport async function composeHashFile(zipCDIterator) {\n const md5Hash = new MD5Hash();\n const textEncoder = new TextEncoder();\n const hashArray = [];\n for await (const cdHeader of zipCDIterator) {\n let filename = cdHeader.fileName.split('\\\\').join('/');\n // I3S edge case. All files should be lower case by spec. However, ArcGIS\n // and official i3s_converter https://github.com/Esri/i3s-spec/blob/master/i3s_converter/i3s_converter_ReadMe.md\n // expect `3dSceneLayer.json.gz` in camel case\n if (filename !== '3dSceneLayer.json.gz') {\n filename = filename.toLocaleLowerCase();\n }\n const arrayBuffer = textEncoder.encode(filename).buffer;\n const md5 = await md5Hash.hash(arrayBuffer, 'hex');\n hashArray.push(concatenateArrayBuffers(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset)));\n }\n const bufferArray = hashArray.sort(compareHashes);\n return concatenateArrayBuffersFromArray(bufferArray);\n}\n/**\n * Function to compare md5 hashes according to https://github.com/Esri/i3s-spec/blob/master/docs/2.0/slpk_hashtable.pcsl.md\n * @param arrA first hash to compare\n * @param arrB second hash to compare\n * @returns 0 if equal, negative number if a<b, pozitive if a>b\n */\nfunction compareHashes(arrA, arrB) {\n const a = new BigUint64Array(arrA);\n const b = new BigUint64Array(arrB);\n return Number(a[0] === b[0] ? a[1] - b[1] : a[0] - b[0]);\n}\n/**\n * converts hex string to buffer\n * @param str hex string to convert\n * @returns conversion result\n */\nfunction hexStringToBuffer(str) {\n const byteArray = str.match(/../g)?.map((h) => parseInt(h, 16));\n return new Uint8Array(byteArray ?? new Array(16)).buffer;\n}\n/**\n * converts bigint to buffer\n * @param n bigint to convert\n * @returns convertion result\n */\nfunction bigintToBuffer(n) {\n return new BigUint64Array([n]).buffer;\n}\n"],
|
|
4
|
+
"sourcesContent": ["// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nexport { ZipLoader } from \"./zip-loader.js\";\nexport { ZipWriter } from \"./zip-writer.js\";\nexport { TarBuilder } from \"./tar-builder.js\";\nexport { parseZipCDFileHeader, makeZipCDHeaderIterator, signature as CD_HEADER_SIGNATURE, generateCDHeader } from \"./parse-zip/cd-file-header.js\";\nexport { parseZipLocalFileHeader, signature as localHeaderSignature, generateLocalHeader } from \"./parse-zip/local-file-header.js\";\nexport { parseEoCDRecord } from \"./parse-zip/end-of-central-directory.js\";\nexport { searchFromTheEnd } from \"./parse-zip/search-from-the-end.js\";\nexport { addOneFile, createZip } from \"./parse-zip/zip-composition.js\";\n// export type {HashElement} from './hash-file-utility';\nexport { IndexedArchive } from \"./filesystems/IndexedArchive.js\";\nexport { parseHashTable, makeHashTableFromZipHeaders, composeHashFile } from \"./hash-file-utility.js\";\nexport { ZipFileSystem, ZIP_COMPRESSION_HANDLERS } from \"./filesystems/zip-filesystem.js\";\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport JSZip from 'jszip';\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof \"4.3.3\" !== 'undefined' ? \"4.3.3\" : 'latest';\nexport const ZipLoader = {\n dataType: null,\n batchType: null,\n id: 'zip',\n module: 'zip',\n name: 'Zip Archive',\n version: VERSION,\n extensions: ['zip'],\n mimeTypes: ['application/zip'],\n category: 'archive',\n tests: ['PK'],\n options: {},\n parse: parseZipAsync\n};\n// TODO - Could return a map of promises, perhaps as an option...\nasync function parseZipAsync(data, options = {}) {\n const promises = [];\n const fileMap = {};\n try {\n const jsZip = new JSZip();\n const zip = await jsZip.loadAsync(data, options);\n // start to load each file in this zip\n zip.forEach((relativePath, zipEntry) => {\n const subFilename = zipEntry.name;\n const promise = loadZipEntry(jsZip, subFilename, options).then((arrayBufferOrError) => {\n fileMap[relativePath] = arrayBufferOrError;\n });\n // Ensure Promise.all doesn't ignore rejected promises.\n promises.push(promise);\n });\n await Promise.all(promises);\n return fileMap;\n }\n catch (error) {\n // @ts-ignore\n options.log.error(`Unable to read zip archive: ${error}`);\n throw error;\n }\n}\nasync function loadZipEntry(jsZip, subFilename, options = {}) {\n // jszip supports both arraybuffer and text, the main loaders.gl types\n // https://stuk.github.io/jszip/documentation/api_zipobject/async.html\n try {\n const arrayBuffer = await jsZip.file(subFilename).async(options.dataType || 'arraybuffer');\n return arrayBuffer;\n }\n catch (error) {\n options.log.error(`Unable to read ${subFilename} from zip archive: ${error}`);\n // Store error in place of data in map\n return error;\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport JSZip from 'jszip';\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof \"4.3.3\" !== 'undefined' ? \"4.3.3\" : 'latest';\n/**\n * Zip exporter\n */\nexport const ZipWriter = {\n name: 'Zip Archive',\n id: 'zip',\n module: 'zip',\n version: VERSION,\n extensions: ['zip'],\n category: 'archive',\n mimeTypes: ['application/zip'],\n options: {\n zip: {\n onUpdate: () => { }\n },\n jszip: {}\n },\n encode: encodeZipAsync\n};\nasync function encodeZipAsync(fileMap, options = {}) {\n const jsZip = new JSZip();\n // add files to the zip\n for (const subFileName in fileMap) {\n const subFileData = fileMap[subFileName];\n // jszip supports both arraybuffer and string data (the main loaders.gl types)\n // https://stuk.github.io/jszip/documentation/api_zipobject/async.html\n jsZip.file(subFileName, subFileData, options?.jszip || {});\n }\n const zipOptions = { ...ZipWriter.options.zip, ...options?.zip };\n const jszipOptions = { ...ZipWriter.options?.jszip, ...options.jszip };\n try {\n return await jsZip.generateAsync({ ...jszipOptions, type: 'arraybuffer' }, // generate an arraybuffer\n zipOptions.onUpdate);\n }\n catch (error) {\n options.log.error(`Unable to encode zip archive: ${error}`);\n throw error;\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\n/**\n * Returns the memory area specified by length\n * @param length\n * @returns {Uint8Array}\n */\nexport function clean(length) {\n let i;\n const buffer = new Uint8Array(length);\n for (i = 0; i < length; i += 1) {\n buffer[i] = 0;\n }\n return buffer;\n}\n/**\n * Converting data to a string\n * @param num\n * @param bytes\n * @param base\n * @returns string\n */\nexport function pad(num, bytes, base) {\n const numStr = num.toString(base || 8);\n return '000000000000'.substr(numStr.length + 12 - bytes) + numStr;\n}\n/**\n * Converting input to binary data\n * @param input\n * @param out\n * @param offset\n * @returns {Uint8Array}\n */\nexport function stringToUint8(input, out, offset) {\n let i;\n let length;\n out = out || clean(input.length);\n offset = offset || 0;\n for (i = 0, length = input.length; i < length; i += 1) {\n out[offset] = input.charCodeAt(i);\n offset += 1;\n }\n return out;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\n/* eslint-disable */\nimport * as utils from \"./utils.js\";\n/*\nstruct posix_header { // byte offset\n char name[100]; // 0\n char mode[8]; // 100\n char uid[8]; // 108\n char gid[8]; // 116\n char size[12]; // 124\n char mtime[12]; // 136\n char chksum[8]; // 148\n char typeflag; // 156\n char linkname[100]; // 157\n char magic[6]; // 257\n char version[2]; // 263\n char uname[32]; // 265\n char gname[32]; // 297\n char devmajor[8]; // 329\n char devminor[8]; // 337\n char prefix[155]; // 345\n // 500\n};\n*/\nconst structure = {\n fileName: 100,\n fileMode: 8,\n uid: 8,\n gid: 8,\n fileSize: 12,\n mtime: 12,\n checksum: 8,\n type: 1,\n linkName: 100,\n ustar: 8,\n owner: 32,\n group: 32,\n majorNumber: 8,\n minorNumber: 8,\n filenamePrefix: 155,\n padding: 12\n};\n/**\n * Getting the header\n * @param data\n * @param [cb]\n * @returns {Uint8Array} | Array\n */\nexport function format(data, cb) {\n const buffer = utils.clean(512);\n let offset = 0;\n Object.entries(structure).forEach(([field, length]) => {\n const str = data[field] || '';\n let i;\n let fieldLength;\n for (i = 0, fieldLength = str.length; i < fieldLength; i += 1) {\n buffer[offset] = str.charCodeAt(i);\n offset += 1;\n }\n // space it out with nulls\n offset += length - i;\n });\n if (typeof cb === 'function') {\n return cb(buffer, offset);\n }\n return buffer;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\nimport { clean, pad, stringToUint8 } from \"./utils.js\";\nimport { format } from \"./header.js\";\nlet blockSize;\nlet headerLength;\nlet inputLength;\nconst recordSize = 512;\nclass Tar {\n written;\n out;\n blocks = [];\n length;\n /**\n * @param [recordsPerBlock]\n */\n constructor(recordsPerBlock) {\n this.written = 0;\n blockSize = (recordsPerBlock || 20) * recordSize;\n this.out = clean(blockSize);\n this.blocks = [];\n this.length = 0;\n this.save = this.save.bind(this);\n this.clear = this.clear.bind(this);\n this.append = this.append.bind(this);\n }\n /**\n * Append a file to the tar archive\n * @param filepath\n * @param input\n * @param [opts]\n */\n // eslint-disable-next-line complexity\n append(filepath, input, opts) {\n let checksum;\n if (typeof input === 'string') {\n input = stringToUint8(input);\n }\n else if (input.constructor && input.constructor !== Uint8Array.prototype.constructor) {\n // @ts-ignore\n const errorInputMatch = /function\\s*([$A-Za-z_][0-9A-Za-z_]*)\\s*\\(/.exec(input.constructor.toString());\n const errorInput = errorInputMatch && errorInputMatch[1];\n const errorMessage = `Invalid input type. You gave me: ${errorInput}`;\n throw errorMessage;\n }\n opts = opts || {};\n const mode = opts.mode || parseInt('777', 8) & 0xfff;\n const mtime = opts.mtime || Math.floor(Number(new Date()) / 1000);\n const uid = opts.uid || 0;\n const gid = opts.gid || 0;\n const data = {\n fileName: filepath,\n fileMode: pad(mode, 7),\n uid: pad(uid, 7),\n gid: pad(gid, 7),\n fileSize: pad(input.length, 11),\n mtime: pad(mtime, 11),\n checksum: ' ',\n // 0 = just a file\n type: '0',\n ustar: 'ustar ',\n owner: opts.owner || '',\n group: opts.group || ''\n };\n // calculate the checksum\n checksum = 0;\n Object.keys(data).forEach((key) => {\n let i;\n const value = data[key];\n let length;\n for (i = 0, length = value.length; i < length; i += 1) {\n checksum += value.charCodeAt(i);\n }\n });\n data.checksum = `${pad(checksum, 6)}\\u0000 `;\n const headerArr = format(data);\n headerLength = Math.ceil(headerArr.length / recordSize) * recordSize;\n inputLength = Math.ceil(input.length / recordSize) * recordSize;\n this.blocks.push({\n header: headerArr,\n input,\n headerLength,\n inputLength\n });\n }\n /**\n * Compiling data to a Blob object\n * @returns {Blob}\n */\n save() {\n const buffers = [];\n const chunks = new Array();\n let length = 0;\n const max = Math.pow(2, 20);\n let chunk = new Array();\n this.blocks.forEach((b = []) => {\n if (length + b.headerLength + b.inputLength > max) {\n chunks.push({ blocks: chunk, length });\n chunk = [];\n length = 0;\n }\n chunk.push(b);\n length += b.headerLength + b.inputLength;\n });\n chunks.push({ blocks: chunk, length });\n chunks.forEach((c = []) => {\n const buffer = new Uint8Array(c.length);\n let written = 0;\n c.blocks.forEach((b = []) => {\n buffer.set(b.header, written);\n written += b.headerLength;\n buffer.set(b.input, written);\n written += b.inputLength;\n });\n buffers.push(buffer);\n });\n buffers.push(new Uint8Array(2 * recordSize));\n return new Blob(buffers, { type: 'octet/stream' });\n }\n /**\n * Clear the data by its blocksize\n */\n clear() {\n this.written = 0;\n this.out = clean(blockSize);\n }\n}\nexport default Tar;\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport Tar from \"./lib/tar/tar.js\";\nconst TAR_BUILDER_OPTIONS = {\n recordsPerBlock: 20\n};\n/**\n * Build a tar file by adding files\n */\nexport class TarBuilder {\n static get properties() {\n return {\n id: 'tar',\n name: 'TAR',\n extensions: ['tar'],\n mimeTypes: ['application/x-tar'],\n builder: TarBuilder,\n options: TAR_BUILDER_OPTIONS\n };\n }\n options;\n tape;\n count = 0;\n constructor(options) {\n this.options = { ...TAR_BUILDER_OPTIONS, ...options };\n this.tape = new Tar(this.options.recordsPerBlock);\n }\n /** Adds a file to the archive. */\n addFile(filename, buffer) {\n this.tape.append(filename, new Uint8Array(buffer));\n this.count++;\n }\n async build() {\n return new Response(this.tape.save()).arrayBuffer();\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { DataViewFile, compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { parseEoCDRecord } from \"./end-of-central-directory.js\";\nimport { createZip64Info, setFieldToNumber } from \"./zip64-info-generation.js\";\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst CD_COMPRESSED_SIZE_OFFSET = 20;\nconst CD_UNCOMPRESSED_SIZE_OFFSET = 24;\nconst CD_FILE_NAME_LENGTH_OFFSET = 28;\nconst CD_EXTRA_FIELD_LENGTH_OFFSET = 30;\nconst CD_START_DISK_OFFSET = 32;\nconst CD_LOCAL_HEADER_OFFSET_OFFSET = 42;\nconst CD_FILE_NAME_OFFSET = 46n;\nexport const signature = new Uint8Array([0x50, 0x4b, 0x01, 0x02]);\n/**\n * Parses central directory file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipCDFileHeader = async (headerOffset, file) => {\n if (headerOffset >= file.length) {\n return null;\n }\n const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + CD_FILE_NAME_OFFSET));\n const magicBytes = mainHeader.buffer.slice(0, 4);\n if (!compareArrayBuffers(magicBytes, signature.buffer)) {\n return null;\n }\n const compressedSize = BigInt(mainHeader.getUint32(CD_COMPRESSED_SIZE_OFFSET, true));\n const uncompressedSize = BigInt(mainHeader.getUint32(CD_UNCOMPRESSED_SIZE_OFFSET, true));\n const extraFieldLength = mainHeader.getUint16(CD_EXTRA_FIELD_LENGTH_OFFSET, true);\n const startDisk = BigInt(mainHeader.getUint16(CD_START_DISK_OFFSET, true));\n const fileNameLength = mainHeader.getUint16(CD_FILE_NAME_LENGTH_OFFSET, true);\n const additionalHeader = await file.slice(headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));\n const filenameBytes = additionalHeader.slice(0, fileNameLength);\n const fileName = new TextDecoder().decode(filenameBytes);\n const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);\n const oldFormatOffset = mainHeader.getUint32(CD_LOCAL_HEADER_OFFSET_OFFSET, true);\n const localHeaderOffset = BigInt(oldFormatOffset);\n const extraField = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));\n // looking for info that might be also be in zip64 extra field\n const zip64data = {\n uncompressedSize,\n compressedSize,\n localHeaderOffset,\n startDisk\n };\n const res = findZip64DataInExtra(zip64data, extraField);\n return {\n ...zip64data,\n ...res,\n extraFieldLength,\n fileNameLength,\n fileName,\n extraOffset\n };\n};\n/**\n * Create iterator over files of zip archive\n * @param fileProvider - file provider that provider random access to the file\n */\nexport async function* makeZipCDHeaderIterator(fileProvider) {\n const { cdStartOffset, cdByteSize } = await parseEoCDRecord(fileProvider);\n const centralDirectory = new DataViewFile(new DataView(await fileProvider.slice(cdStartOffset, cdStartOffset + cdByteSize)));\n let cdHeader = await parseZipCDFileHeader(0n, centralDirectory);\n while (cdHeader) {\n yield cdHeader;\n cdHeader = await parseZipCDFileHeader(cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength), centralDirectory);\n }\n}\n/**\n * returns the number written in the provided bytes\n * @param bytes two bytes containing the number\n * @returns the number written in the provided bytes\n */\nconst getUint16 = (...bytes) => {\n return bytes[0] + bytes[1] * 16;\n};\n/**\n * reads all nesessary data from zip64 record in the extra data\n * @param zip64data values that might be in zip64 record\n * @param extraField full extra data\n * @returns data read from zip64\n */\nconst findZip64DataInExtra = (zip64data, extraField) => {\n const zip64dataList = findExpectedData(zip64data);\n const zip64DataRes = {};\n if (zip64dataList.length > 0) {\n // total length of data in zip64 notation in bytes\n const zip64chunkSize = zip64dataList.reduce((sum, curr) => sum + curr.length, 0);\n // we're looking for the zip64 nontation header (0x0001)\n // and a size field with a correct value next to it\n const offsetInExtraData = new Uint8Array(extraField.buffer).findIndex((_val, i, arr) => getUint16(arr[i], arr[i + 1]) === 0x0001 &&\n getUint16(arr[i + 2], arr[i + 3]) === zip64chunkSize);\n // then we read all the nesessary fields from the zip64 data\n let bytesRead = 0;\n for (const note of zip64dataList) {\n const offset = bytesRead;\n zip64DataRes[note.name] = extraField.getBigUint64(offsetInExtraData + 4 + offset, true);\n bytesRead = offset + note.length;\n }\n }\n return zip64DataRes;\n};\n/**\n * frind data that's expected to be in zip64\n * @param zip64data values that might be in zip64 record\n * @returns zip64 data description\n */\nconst findExpectedData = (zip64data) => {\n // We define fields that should be in zip64 data\n const zip64dataList = [];\n if (zip64data.uncompressedSize === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'uncompressedSize', length: 8 });\n }\n if (zip64data.compressedSize === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'compressedSize', length: 8 });\n }\n if (zip64data.localHeaderOffset === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'localHeaderOffset', length: 8 });\n }\n if (zip64data.startDisk === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'startDisk', length: 4 });\n }\n return zip64dataList;\n};\n/**\n * generates cd header for the file\n * @param options info that can be placed into cd header\n * @returns buffer with header\n */\nexport function generateCDHeader(options) {\n const optionsToUse = {\n ...options,\n fnlength: options.fileName.length,\n extraLength: 0\n };\n let zip64header = new ArrayBuffer(0);\n const optionsToZip64 = {};\n if (optionsToUse.offset >= 0xffffffff) {\n optionsToZip64.offset = optionsToUse.offset;\n optionsToUse.offset = BigInt(0xffffffff);\n }\n if (optionsToUse.length >= 0xffffffff) {\n optionsToZip64.size = optionsToUse.length;\n optionsToUse.length = 0xffffffff;\n }\n if (Object.keys(optionsToZip64).length) {\n zip64header = createZip64Info(optionsToZip64);\n optionsToUse.extraLength = zip64header.byteLength;\n }\n const header = new DataView(new ArrayBuffer(Number(CD_FILE_NAME_OFFSET)));\n for (const field of ZIP_HEADER_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ''] ?? field.default ?? 0);\n }\n const encodedName = new TextEncoder().encode(optionsToUse.fileName);\n const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);\n return resHeader;\n}\n/** Fields map */\nconst ZIP_HEADER_FIELDS = [\n // Central directory file header signature = 0x02014b50\n {\n offset: 0,\n size: 4,\n default: new DataView(signature.buffer).getUint32(0, true)\n },\n // Version made by\n {\n offset: 4,\n size: 2,\n default: 45\n },\n // Version needed to extract (minimum)\n {\n offset: 6,\n size: 2,\n default: 45\n },\n // General purpose bit flag\n {\n offset: 8,\n size: 2,\n default: 0\n },\n // Compression method\n {\n offset: 10,\n size: 2,\n default: 0\n },\n // File last modification time\n {\n offset: 12,\n size: 2,\n default: 0\n },\n // File last modification date\n {\n offset: 14,\n size: 2,\n default: 0\n },\n // CRC-32 of uncompressed data\n {\n offset: 16,\n size: 4,\n name: 'crc32'\n },\n // Compressed size (or 0xffffffff for ZIP64)\n {\n offset: 20,\n size: 4,\n name: 'length'\n },\n // Uncompressed size (or 0xffffffff for ZIP64)\n {\n offset: 24,\n size: 4,\n name: 'length'\n },\n // File name length (n)\n {\n offset: 28,\n size: 2,\n name: 'fnlength'\n },\n // Extra field length (m)\n {\n offset: 30,\n size: 2,\n default: 0,\n name: 'extraLength'\n },\n // File comment length (k)\n {\n offset: 32,\n size: 2,\n default: 0\n },\n // Disk number where file starts (or 0xffff for ZIP64)\n {\n offset: 34,\n size: 2,\n default: 0\n },\n // Internal file attributes\n {\n offset: 36,\n size: 2,\n default: 0\n },\n // External file attributes\n {\n offset: 38,\n size: 4,\n default: 0\n },\n // Relative offset of local file header\n {\n offset: 42,\n size: 4,\n name: 'offset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { searchFromTheEnd } from \"./search-from-the-end.js\";\nimport { setFieldToNumber } from \"./zip64-info-generation.js\";\nconst eoCDSignature = new Uint8Array([0x50, 0x4b, 0x05, 0x06]);\nconst zip64EoCDLocatorSignature = new Uint8Array([0x50, 0x4b, 0x06, 0x07]);\nconst zip64EoCDSignature = new Uint8Array([0x50, 0x4b, 0x06, 0x06]);\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst CD_RECORDS_NUMBER_OFFSET = 8n;\nconst CD_RECORDS_NUMBER_ON_DISC_OFFSET = 10n;\nconst CD_CD_BYTE_SIZE_OFFSET = 12n;\nconst CD_START_OFFSET_OFFSET = 16n;\nconst CD_COMMENT_OFFSET = 22n;\nconst ZIP64_EOCD_START_OFFSET_OFFSET = 8n;\nconst ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;\nconst ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET = 32n;\nconst ZIP64_CD_CD_BYTE_SIZE_OFFSET = 40n;\nconst ZIP64_CD_START_OFFSET_OFFSET = 48n;\nconst ZIP64_COMMENT_OFFSET = 56n;\n/**\n * Parses end of central directory record of zip file\n * @param file - FileProvider instance\n * @returns Info from the header\n */\nexport const parseEoCDRecord = async (file) => {\n const zipEoCDOffset = await searchFromTheEnd(file, eoCDSignature);\n let cdRecordsNumber = BigInt(await file.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));\n let cdByteSize = BigInt(await file.getUint32(zipEoCDOffset + CD_CD_BYTE_SIZE_OFFSET));\n let cdStartOffset = BigInt(await file.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));\n let zip64EoCDLocatorOffset = zipEoCDOffset - 20n;\n let zip64EoCDOffset = 0n;\n const magicBytes = await file.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);\n if (compareArrayBuffers(magicBytes, zip64EoCDLocatorSignature)) {\n zip64EoCDOffset = await file.getBigUint64(zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);\n const endOfCDMagicBytes = await file.slice(zip64EoCDOffset, zip64EoCDOffset + 4n);\n if (!compareArrayBuffers(endOfCDMagicBytes, zip64EoCDSignature.buffer)) {\n throw new Error('zip64 EoCD not found');\n }\n cdRecordsNumber = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);\n cdByteSize = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_CD_BYTE_SIZE_OFFSET);\n cdStartOffset = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);\n }\n else {\n zip64EoCDLocatorOffset = 0n;\n }\n return {\n cdRecordsNumber,\n cdStartOffset,\n cdByteSize,\n offsets: {\n zip64EoCDOffset,\n zip64EoCDLocatorOffset,\n zipEoCDOffset\n }\n };\n};\n/**\n * updates EoCD record to add more files to the archieve\n * @param eocdBody buffer containing header\n * @param oldEoCDOffsets info read from EoCD record befor updating\n * @param newCDStartOffset CD start offset to be updated\n * @param eocdStartOffset EoCD start offset to be updated\n * @returns new EoCD header\n */\nexport function updateEoCD(eocdBody, oldEoCDOffsets, newCDStartOffset, eocdStartOffset, newCDRecordsNumber) {\n const eocd = new DataView(eocdBody);\n const classicEoCDOffset = oldEoCDOffsets.zip64EoCDOffset\n ? oldEoCDOffsets.zipEoCDOffset - oldEoCDOffsets.zip64EoCDOffset\n : 0n;\n // updating classic EoCD record with new CD records number in general and on disc\n if (Number(newCDRecordsNumber) <= 0xffff) {\n setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);\n setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);\n }\n // updating zip64 EoCD record with new size of CD\n if (eocdStartOffset - newCDStartOffset <= 0xffffffff) {\n setFieldToNumber(eocd, 4, classicEoCDOffset + CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);\n }\n // updating classic EoCD record with new CD start offset\n if (newCDStartOffset < 0xffffffff) {\n setFieldToNumber(eocd, 4, classicEoCDOffset + CD_START_OFFSET_OFFSET, newCDStartOffset);\n }\n // updating zip64 EoCD locator and record with new EoCD record start offset and cd records number\n if (oldEoCDOffsets.zip64EoCDLocatorOffset && oldEoCDOffsets.zip64EoCDOffset) {\n // updating zip64 EoCD locator with new EoCD record start offset\n const locatorOffset = oldEoCDOffsets.zip64EoCDLocatorOffset - oldEoCDOffsets.zip64EoCDOffset;\n setFieldToNumber(eocd, 8, locatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET, eocdStartOffset);\n // updating zip64 EoCD record with new cd start offset\n setFieldToNumber(eocd, 8, ZIP64_CD_START_OFFSET_OFFSET, newCDStartOffset);\n // updating zip64 EoCD record with new cd records number\n setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);\n setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);\n // updating zip64 EoCD record with new size of CD\n setFieldToNumber(eocd, 8, ZIP64_CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);\n }\n return new Uint8Array(eocd.buffer);\n}\n/**\n * generates EoCD record\n * @param options data to generate EoCD record\n * @returns ArrayBuffer with EoCD record\n */\nexport function generateEoCD(options) {\n const header = new DataView(new ArrayBuffer(Number(CD_COMMENT_OFFSET)));\n for (const field of EOCD_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n const locator = generateZip64InfoLocator(options);\n const zip64Record = generateZip64Info(options);\n return concatenateArrayBuffers(zip64Record, locator, header.buffer);\n}\n/** standart EoCD fields */\nconst EOCD_FIELDS = [\n // End of central directory signature = 0x06054b50\n {\n offset: 0,\n size: 4,\n default: new DataView(eoCDSignature.buffer).getUint32(0, true)\n },\n // Number of this disk (or 0xffff for ZIP64)\n {\n offset: 4,\n size: 2,\n default: 0\n },\n // Disk where central directory starts (or 0xffff for ZIP64)\n {\n offset: 6,\n size: 2,\n default: 0\n },\n // Number of central directory records on this disk (or 0xffff for ZIP64)\n {\n offset: 8,\n size: 2,\n name: 'recordsNumber'\n },\n // Total number of central directory records (or 0xffff for ZIP64)\n {\n offset: 10,\n size: 2,\n name: 'recordsNumber'\n },\n // Size of central directory (bytes) (or 0xffffffff for ZIP64)\n {\n offset: 12,\n size: 4,\n name: 'cdSize'\n },\n // Offset of start of central directory, relative to start of archive (or 0xffffffff for ZIP64)\n {\n offset: 16,\n size: 4,\n name: 'cdOffset'\n },\n // Comment length (n)\n {\n offset: 20,\n size: 2,\n default: 0\n }\n];\n/**\n * generates eocd zip64 record\n * @param options data to generate eocd zip64 record\n * @returns buffer with eocd zip64 record\n */\nfunction generateZip64Info(options) {\n const record = new DataView(new ArrayBuffer(Number(ZIP64_COMMENT_OFFSET)));\n for (const field of ZIP64_EOCD_FIELDS) {\n setFieldToNumber(record, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n return record.buffer;\n}\n/**\n * generates eocd zip64 record locator\n * @param options data to generate eocd zip64 record\n * @returns buffer with eocd zip64 record\n */\nfunction generateZip64InfoLocator(options) {\n const locator = new DataView(new ArrayBuffer(Number(20)));\n for (const field of ZIP64_EOCD_LOCATOR_FIELDS) {\n setFieldToNumber(locator, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n return locator.buffer;\n}\n/** zip64 EoCD record locater fields */\nconst ZIP64_EOCD_LOCATOR_FIELDS = [\n // zip64 end of central dir locator signature\n {\n offset: 0,\n size: 4,\n default: new DataView(zip64EoCDLocatorSignature.buffer).getUint32(0, true)\n },\n // number of the disk with the start of the zip64 end of\n {\n offset: 4,\n size: 4,\n default: 0\n },\n // start of the zip64 end of central directory\n {\n offset: 8,\n size: 8,\n name: 'eoCDStart'\n },\n // total number of disks\n {\n offset: 16,\n size: 4,\n default: 1\n }\n];\n/** zip64 EoCD recodrd fields */\nconst ZIP64_EOCD_FIELDS = [\n // End of central directory signature = 0x06064b50\n {\n offset: 0,\n size: 4,\n default: new DataView(zip64EoCDSignature.buffer).getUint32(0, true)\n },\n // Size of the EOCD64 minus 12\n {\n offset: 4,\n size: 8,\n default: 44\n },\n // Version made by\n {\n offset: 12,\n size: 2,\n default: 45\n },\n // Version needed to extract (minimum)\n {\n offset: 14,\n size: 2,\n default: 45\n },\n // Number of this disk\n {\n offset: 16,\n size: 4,\n default: 0\n },\n // Disk where central directory starts\n {\n offset: 20,\n size: 4,\n default: 0\n },\n // Number of central directory records on this disk\n {\n offset: 24,\n size: 8,\n name: 'recordsNumber'\n },\n // Total number of central directory records\n {\n offset: 32,\n size: 8,\n name: 'recordsNumber'\n },\n // Size of central directory (bytes)\n {\n offset: 40,\n size: 8,\n name: 'cdSize'\n },\n // Offset of start of central directory, relative to start of archive\n {\n offset: 48,\n size: 8,\n name: 'cdOffset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nconst buffLength = 1024;\n/**\n * looking for the last occurrence of the provided\n * @param file\n * @param target\n * @returns\n */\nexport const searchFromTheEnd = async (file, target) => {\n const searchWindow = [\n await file.getUint8(file.length - 1n),\n await file.getUint8(file.length - 2n),\n await file.getUint8(file.length - 3n),\n undefined\n ];\n let targetOffset = -1;\n // looking for the last record in the central directory\n let point = file.length - 4n;\n do {\n const prevPoint = point;\n point -= BigInt(buffLength);\n point = point >= 0n ? point : 0n;\n const buff = new Uint8Array(await file.slice(point, prevPoint));\n for (let i = buff.length - 1; i > -1; i--) {\n searchWindow[3] = searchWindow[2];\n searchWindow[2] = searchWindow[1];\n searchWindow[1] = searchWindow[0];\n searchWindow[0] = buff[i];\n if (searchWindow.every((val, index) => val === target[index])) {\n targetOffset = i;\n break;\n }\n }\n } while (targetOffset === -1 && point > 0n);\n return point + BigInt(targetOffset);\n};\n", "import { concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nexport const signature = new Uint8Array([0x01, 0x00]);\n/**\n * creates zip64 extra field\n * @param options info that can be placed into zip64 field\n * @returns buffer with field\n */\nexport function createZip64Info(options) {\n const optionsToUse = {\n ...options,\n zip64Length: (options.offset ? 1 : 0) * 8 + (options.size ? 1 : 0) * 16\n };\n const arraysToConcat = [];\n for (const field of ZIP64_FIELDS) {\n if (!optionsToUse[field.name ?? ''] && !field.default) {\n continue; // eslint-disable-line no-continue\n }\n const newValue = new DataView(new ArrayBuffer(field.size));\n NUMBER_SETTERS[field.size](newValue, 0, optionsToUse[field.name ?? ''] ?? field.default);\n arraysToConcat.push(newValue.buffer);\n }\n return concatenateArrayBuffers(...arraysToConcat);\n}\n/**\n * Writes values into buffer according to the bytes amount\n * @param header header where to write the data\n * @param fieldSize size of the field in bytes\n * @param fieldOffset offset of the field\n * @param value value to be written\n */\nexport function setFieldToNumber(header, fieldSize, fieldOffset, value) {\n NUMBER_SETTERS[fieldSize](header, Number(fieldOffset), value);\n}\n/** functions to write values into buffer according to the bytes amount */\nconst NUMBER_SETTERS = {\n 2: (header, offset, value) => {\n header.setUint16(offset, Number(value > 0xffff ? 0xffff : value), true);\n },\n 4: (header, offset, value) => {\n header.setUint32(offset, Number(value > 0xffffffff ? 0xffffffff : value), true);\n },\n 8: (header, offset, value) => {\n header.setBigUint64(offset, BigInt(value), true);\n }\n};\n/** zip64 info fields description, we need it as a pattern to build a zip64 info */\nconst ZIP64_FIELDS = [\n // Header ID 0x0001\n {\n size: 2,\n default: new DataView(signature.buffer).getUint16(0, true)\n },\n // Size of the extra field chunk (8, 16, 24 or 28)\n {\n size: 2,\n name: 'zip64Length'\n },\n // Original uncompressed file size\n {\n size: 8,\n name: 'size'\n },\n // Size of compressed data\n {\n size: 8,\n name: 'size'\n },\n // Offset of local header record\n {\n size: 8,\n name: 'offset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { createZip64Info, setFieldToNumber } from \"./zip64-info-generation.js\";\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst COMPRESSION_METHOD_OFFSET = 8;\nconst COMPRESSED_SIZE_OFFSET = 18;\nconst UNCOMPRESSED_SIZE_OFFSET = 22;\nconst FILE_NAME_LENGTH_OFFSET = 26;\nconst EXTRA_FIELD_LENGTH_OFFSET = 28;\nconst FILE_NAME_OFFSET = 30n;\nexport const signature = new Uint8Array([0x50, 0x4b, 0x03, 0x04]);\n/**\n * Parses local file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipLocalFileHeader = async (headerOffset, file) => {\n const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + FILE_NAME_OFFSET));\n const magicBytes = mainHeader.buffer.slice(0, 4);\n if (!compareArrayBuffers(magicBytes, signature)) {\n return null;\n }\n const fileNameLength = mainHeader.getUint16(FILE_NAME_LENGTH_OFFSET, true);\n const extraFieldLength = mainHeader.getUint16(EXTRA_FIELD_LENGTH_OFFSET, true);\n const additionalHeader = await file.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));\n const fileNameBuffer = additionalHeader.slice(0, fileNameLength);\n const extraDataBuffer = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));\n const fileName = new TextDecoder().decode(fileNameBuffer).split('\\\\').join('/');\n let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);\n const compressionMethod = mainHeader.getUint16(COMPRESSION_METHOD_OFFSET, true);\n let compressedSize = BigInt(mainHeader.getUint32(COMPRESSED_SIZE_OFFSET, true)); // add zip 64 logic\n let uncompressedSize = BigInt(mainHeader.getUint32(UNCOMPRESSED_SIZE_OFFSET, true)); // add zip 64 logic\n let offsetInZip64Data = 4;\n // looking for info that might be also be in zip64 extra field\n if (uncompressedSize === BigInt(0xffffffff)) {\n uncompressedSize = extraDataBuffer.getBigUint64(offsetInZip64Data, true);\n offsetInZip64Data += 8;\n }\n if (compressedSize === BigInt(0xffffffff)) {\n compressedSize = extraDataBuffer.getBigUint64(offsetInZip64Data, true);\n offsetInZip64Data += 8;\n }\n if (fileDataOffset === BigInt(0xffffffff)) {\n fileDataOffset = extraDataBuffer.getBigUint64(offsetInZip64Data, true); // setting it to the one from zip64\n }\n return {\n fileNameLength,\n fileName,\n extraFieldLength,\n fileDataOffset,\n compressedSize,\n compressionMethod\n };\n};\n/**\n * generates local header for the file\n * @param options info that can be placed into local header\n * @returns buffer with header\n */\nexport function generateLocalHeader(options) {\n const optionsToUse = {\n ...options,\n extraLength: 0,\n fnlength: options.fileName.length\n };\n let zip64header = new ArrayBuffer(0);\n const optionsToZip64 = {};\n if (optionsToUse.length >= 0xffffffff) {\n optionsToZip64.size = optionsToUse.length;\n optionsToUse.length = 0xffffffff;\n }\n if (Object.keys(optionsToZip64).length) {\n zip64header = createZip64Info(optionsToZip64);\n optionsToUse.extraLength = zip64header.byteLength;\n }\n // base length without file name and extra info is static\n const header = new DataView(new ArrayBuffer(Number(FILE_NAME_OFFSET)));\n for (const field of ZIP_HEADER_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ''] ?? field.default ?? 0);\n }\n const encodedName = new TextEncoder().encode(optionsToUse.fileName);\n const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);\n return resHeader;\n}\nconst ZIP_HEADER_FIELDS = [\n // Local file header signature = 0x04034b50\n {\n offset: 0,\n size: 4,\n default: new DataView(signature.buffer).getUint32(0, true)\n },\n // Version needed to extract (minimum)\n {\n offset: 4,\n size: 2,\n default: 45\n },\n // General purpose bit flag\n {\n offset: 6,\n size: 2,\n default: 0\n },\n // Compression method\n {\n offset: 8,\n size: 2,\n default: 0\n },\n // File last modification time\n {\n offset: 10,\n size: 2,\n default: 0\n },\n // File last modification date\n {\n offset: 12,\n size: 2,\n default: 0\n },\n // CRC-32 of uncompressed data\n {\n offset: 14,\n size: 4,\n name: 'crc32'\n },\n // Compressed size (or 0xffffffff for ZIP64)\n {\n offset: 18,\n size: 4,\n name: 'length'\n },\n // Uncompressed size (or 0xffffffff for ZIP64)\n {\n offset: 22,\n size: 4,\n name: 'length'\n },\n // File name length (n)\n {\n offset: 26,\n size: 2,\n name: 'fnlength'\n },\n // Extra field length (m)\n {\n offset: 28,\n size: 2,\n default: 0,\n name: 'extraLength'\n }\n];\n", "import { FileHandleFile, concatenateArrayBuffers, path, NodeFilesystem, NodeFile } from '@loaders.gl/loader-utils';\nimport { generateEoCD, parseEoCDRecord, updateEoCD } from \"./end-of-central-directory.js\";\nimport { CRC32Hash } from '@loaders.gl/crypto';\nimport { generateLocalHeader } from \"./local-file-header.js\";\nimport { generateCDHeader } from \"./cd-file-header.js\";\nimport { fetchFile } from '@loaders.gl/core';\n/**\n * cut off CD and EoCD records from zip file\n * @param provider zip file\n * @returns tuple with three values: CD, EoCD record, EoCD information\n */\nasync function cutTheTailOff(provider) {\n // define where the body ends\n const oldEoCDinfo = await parseEoCDRecord(provider);\n const oldCDStartOffset = oldEoCDinfo.cdStartOffset;\n // define cd length\n const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset\n ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset\n : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);\n // cut off everything except of archieve body\n const zipEnding = await provider.slice(oldCDStartOffset, provider.length);\n await provider.truncate(Number(oldCDStartOffset));\n // divide cd body and eocd record\n const oldCDBody = zipEnding.slice(0, oldCDLength);\n const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);\n return [oldCDBody, eocdBody, oldEoCDinfo];\n}\n/**\n * generates CD and local headers for the file\n * @param fileName name of the file\n * @param fileToAdd buffer with the file\n * @param localFileHeaderOffset offset of the file local header\n * @returns tuple with two values: local header and file body, cd header\n */\nasync function generateFileHeaders(fileName, fileToAdd, localFileHeaderOffset) {\n // generating CRC32 of the content\n const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);\n // generate local header for the file\n const newFileLocalHeader = generateLocalHeader({\n crc32: newFileCRC322,\n fileName,\n length: fileToAdd.byteLength\n });\n // generate hash file cd header\n const newFileCDHeader = generateCDHeader({\n crc32: newFileCRC322,\n fileName,\n offset: localFileHeaderOffset,\n length: fileToAdd.byteLength\n });\n return [\n new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)),\n new Uint8Array(newFileCDHeader)\n ];\n}\n/**\n * adds one file in the end of the archieve\n * @param zipUrl path to the file\n * @param fileToAdd new file body\n * @param fileName new file name\n */\nexport async function addOneFile(zipUrl, fileToAdd, fileName) {\n // init file handler\n const provider = new FileHandleFile(zipUrl, true);\n const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);\n // remember the new file local header start offset\n const newFileOffset = provider.length;\n const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);\n // write down the file local header\n await provider.append(localPart);\n // add the file CD header to the CD\n const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);\n // remember the CD start offset\n const newCDStartOffset = provider.length;\n // write down new CD\n await provider.append(new Uint8Array(newCDBody));\n // remember where eocd starts\n const eocdOffset = provider.length;\n await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));\n}\n/**\n * creates zip archive with no compression\n * @note This is a node specific function that works on files\n * @param inputPath path where files for the achive are stored\n * @param outputPath path where zip archive will be placed\n */\nexport async function createZip(inputPath, outputPath, createAdditionalData) {\n const fileIterator = getFileIterator(inputPath);\n const resFile = new NodeFile(outputPath, 'w');\n const fileList = [];\n const cdArray = [];\n for await (const file of fileIterator) {\n await addFile(file, resFile, cdArray, fileList);\n }\n if (createAdditionalData) {\n const additionaldata = await createAdditionalData(fileList);\n await addFile(additionaldata, resFile, cdArray);\n }\n const cdOffset = (await resFile.stat()).bigsize;\n const cd = concatenateArrayBuffers(...cdArray);\n await resFile.append(new Uint8Array(cd));\n const eoCDStart = (await resFile.stat()).bigsize;\n await resFile.append(new Uint8Array(generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })));\n}\n/**\n * Adds file to zip parts\n * @param file file to add\n * @param resFile zip file body\n * @param cdArray zip file central directory\n * @param fileList list of file offsets\n */\nasync function addFile(file, resFile, cdArray, fileList) {\n const size = (await resFile.stat()).bigsize;\n fileList?.push({ fileName: file.path, localHeaderOffset: size });\n const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);\n await resFile.append(localPart);\n cdArray.push(cdHeaderPart);\n}\n/**\n * creates iterator providing buffer with file content and path to every file in the input folder\n * @param inputPath path to the input folder\n * @returns iterator\n */\nexport function getFileIterator(inputPath) {\n async function* iterable() {\n const fileList = await getAllFiles(inputPath);\n for (const filePath of fileList) {\n const file = await (await fetchFile(path.join(inputPath, filePath))).arrayBuffer();\n yield { path: filePath, file };\n }\n }\n return iterable();\n}\n/**\n * creates a list of relative paths to all files in the provided folder\n * @param basePath path of the root folder\n * @param subfolder relative path from the root folder.\n * @returns list of paths\n */\nexport async function getAllFiles(basePath, subfolder = '', fsPassed) {\n const fs = fsPassed ? fsPassed : new NodeFilesystem({});\n const files = await fs.readdir(pathJoin(basePath, subfolder));\n const arrayOfFiles = [];\n for (const file of files) {\n const fullPath = pathJoin(basePath, subfolder, file);\n if ((await fs.stat(fullPath)).isDirectory) {\n const files = await getAllFiles(basePath, pathJoin(subfolder, file));\n arrayOfFiles.push(...files);\n }\n else {\n arrayOfFiles.push(pathJoin(subfolder, file));\n }\n }\n return arrayOfFiles;\n}\n/**\n * removes empty parts from path array and joins it\n * @param paths paths to join\n * @returns joined path\n */\nfunction pathJoin(...paths) {\n const resPaths = paths.filter((val) => val.length);\n return path.join(...resPaths);\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { isBrowser, isFileProvider, FileHandleFile } from '@loaders.gl/loader-utils';\nimport { makeZipCDHeaderIterator } from \"../parse-zip/cd-file-header.js\";\nimport { parseZipLocalFileHeader } from \"../parse-zip/local-file-header.js\";\nimport { DeflateCompression } from '@loaders.gl/compression';\nimport { IndexedArchive } from \"./IndexedArchive.js\";\n/** Handling different compression types in zip */\nexport const ZIP_COMPRESSION_HANDLERS = {\n /** No compression */\n 0: async (compressedFile) => compressedFile,\n /** Deflation */\n 8: async (compressedFile) => {\n const compression = new DeflateCompression({ raw: true });\n const decompressedData = await compression.decompress(compressedFile);\n return decompressedData;\n }\n};\n/**\n * FileSystem adapter for a ZIP file\n * Holds FileProvider object that provides random access to archived files\n */\nexport class ZipFileSystem {\n /** FileProvider instance promise */\n fileProvider = null;\n fileName;\n archive = null;\n /**\n * Constructor\n * @param file - instance of FileProvider or file path string\n */\n constructor(file) {\n // Try to open file in NodeJS\n if (typeof file === 'string') {\n this.fileName = file;\n if (!isBrowser) {\n this.fileProvider = new FileHandleFile(file);\n }\n else {\n throw new Error('Cannot open file for random access in a WEB browser');\n }\n }\n else if (file instanceof IndexedArchive) {\n this.fileProvider = file.fileProvider;\n this.archive = file;\n this.fileName = file.fileName;\n }\n else if (isFileProvider(file)) {\n this.fileProvider = file;\n }\n }\n /** Clean up resources */\n async destroy() {\n if (this.fileProvider) {\n await this.fileProvider.destroy();\n }\n }\n /**\n * Get file names list from zip archive\n * @returns array of file names\n */\n async readdir() {\n if (!this.fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const fileNames = [];\n const zipCDIterator = makeZipCDHeaderIterator(this.fileProvider);\n for await (const cdHeader of zipCDIterator) {\n fileNames.push(cdHeader.fileName);\n }\n return fileNames;\n }\n /**\n * Get file metadata\n * @param filename - name of a file\n * @returns central directory data\n */\n async stat(filename) {\n const cdFileHeader = await this.getCDFileHeader(filename);\n return { ...cdFileHeader, size: Number(cdFileHeader.uncompressedSize) };\n }\n /**\n * Implementation of fetch against this file system\n * @param filename - name of a file\n * @returns - Response with file data\n */\n async fetch(filename) {\n if (this.fileName && filename.indexOf(this.fileName) === 0) {\n filename = filename.substring(this.fileName.length + 1);\n }\n let uncompressedFile;\n if (this.archive) {\n uncompressedFile = await this.archive.getFile(filename, 'http');\n }\n else {\n if (!this.fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const cdFileHeader = await this.getCDFileHeader(filename);\n const localFileHeader = await parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, this.fileProvider);\n if (!localFileHeader) {\n throw new Error('Local file header has not been found in the zip archive`');\n }\n const compressionHandler = ZIP_COMPRESSION_HANDLERS[localFileHeader.compressionMethod.toString()];\n if (!compressionHandler) {\n throw Error('Only Deflation compression is supported');\n }\n const compressedFile = await this.fileProvider.slice(localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);\n uncompressedFile = await compressionHandler(compressedFile);\n }\n const response = new Response(uncompressedFile);\n Object.defineProperty(response, 'url', {\n value: filename ? `${this.fileName || ''}/${filename}` : this.fileName || ''\n });\n return response;\n }\n /**\n * Get central directory file header\n * @param filename - name of a file\n * @returns central directory file header\n */\n async getCDFileHeader(filename) {\n if (!this.fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const zipCDIterator = makeZipCDHeaderIterator(this.fileProvider);\n let result = null;\n for await (const cdHeader of zipCDIterator) {\n if (cdHeader.fileName === filename) {\n result = cdHeader;\n break;\n }\n }\n if (!result) {\n throw new Error('File has not been found in the zip archive');\n }\n return result;\n }\n}\n", "import { ZipFileSystem } from \"./zip-filesystem.js\";\n/**\n * Abstract class for fetching indexed archive formats (SLPK, 3TZ). Those types of zip archive has\n * a hash file inside that allows to increase reading speed\n */\nexport class IndexedArchive {\n fileProvider;\n fileName;\n /**\n * Constructor\n * @param fileProvider - instance of a binary data reader\n * @param hashTable - pre-loaded hashTable. If presented, getFile will skip reading the hash file\n * @param fileName - name of the archive. It is used to add to an URL of a loader context\n */\n constructor(fileProvider, hashTable, fileName) {\n this.fileProvider = fileProvider;\n this.fileName = fileName;\n }\n /**\n * Get file as from order ZIP arhive without using the hash file\n * @param filename - path to the internal file\n * @returns\n */\n async getFileWithoutHash(filename) {\n const zipFS = new ZipFileSystem(this.fileProvider);\n const response = await zipFS.fetch(filename);\n return await response.arrayBuffer();\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { MD5Hash } from '@loaders.gl/crypto';\nimport { concatenateArrayBuffers, concatenateArrayBuffersFromArray } from '@loaders.gl/loader-utils';\nimport { makeZipCDHeaderIterator } from \"./parse-zip/cd-file-header.js\";\n/**\n * Reads hash file from buffer and returns it in ready-to-use form\n * @param arrayBuffer - buffer containing hash file\n * @returns Map containing hash and offset\n */\nexport function parseHashTable(arrayBuffer) {\n const dataView = new DataView(arrayBuffer);\n const hashMap = {};\n for (let i = 0; i < arrayBuffer.byteLength; i = i + 24) {\n const offset = dataView.getBigUint64(i + 16, true);\n const hash = bufferToHex(arrayBuffer, i, 16);\n hashMap[hash] = offset;\n }\n return hashMap;\n}\nfunction bufferToHex(buffer, start, length) {\n // buffer is an ArrayBuffer\n return [...new Uint8Array(buffer, start, length)]\n .map((x) => x.toString(16).padStart(2, '0'))\n .join('');\n}\n/**\n * generates hash info from zip files \"central directory\"\n * @param fileProvider - provider of the archive\n * @returns ready to use hash info\n */\nexport async function makeHashTableFromZipHeaders(fileProvider) {\n const zipCDIterator = makeZipCDHeaderIterator(fileProvider);\n return getHashTable(zipCDIterator);\n}\n/**\n * creates hash table from file offset iterator\n * @param zipCDIterator iterator to use\n * @returns hash table\n */\nexport async function getHashTable(zipCDIterator) {\n const md5Hash = new MD5Hash();\n const textEncoder = new TextEncoder();\n const hashTable = {};\n for await (const cdHeader of zipCDIterator) {\n const filename = cdHeader.fileName.split('\\\\').join('/').toLocaleLowerCase();\n const arrayBuffer = textEncoder.encode(filename).buffer;\n const md5 = await md5Hash.hash(arrayBuffer, 'hex');\n hashTable[md5] = cdHeader.localHeaderOffset;\n }\n return hashTable;\n}\n/**\n * creates hash file that later can be added to the SLPK archive\n * @param zipCDIterator iterator to use\n * @returns ArrayBuffer containing hash file\n */\nexport async function composeHashFile(zipCDIterator) {\n const md5Hash = new MD5Hash();\n const textEncoder = new TextEncoder();\n const hashArray = [];\n for await (const cdHeader of zipCDIterator) {\n let filename = cdHeader.fileName.split('\\\\').join('/');\n // I3S edge case. All files should be lower case by spec. However, ArcGIS\n // and official i3s_converter https://github.com/Esri/i3s-spec/blob/master/i3s_converter/i3s_converter_ReadMe.md\n // expect `3dSceneLayer.json.gz` in camel case\n if (filename !== '3dSceneLayer.json.gz') {\n filename = filename.toLocaleLowerCase();\n }\n const arrayBuffer = textEncoder.encode(filename).buffer;\n const md5 = await md5Hash.hash(arrayBuffer, 'hex');\n hashArray.push(concatenateArrayBuffers(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset)));\n }\n const bufferArray = hashArray.sort(compareHashes);\n return concatenateArrayBuffersFromArray(bufferArray);\n}\n/**\n * Function to compare md5 hashes according to https://github.com/Esri/i3s-spec/blob/master/docs/2.0/slpk_hashtable.pcsl.md\n * @param arrA first hash to compare\n * @param arrB second hash to compare\n * @returns 0 if equal, negative number if a<b, pozitive if a>b\n */\nfunction compareHashes(arrA, arrB) {\n const a = new BigUint64Array(arrA);\n const b = new BigUint64Array(arrB);\n return Number(a[0] === b[0] ? a[1] - b[1] : a[0] - b[0]);\n}\n/**\n * converts hex string to buffer\n * @param str hex string to convert\n * @returns conversion result\n */\nfunction hexStringToBuffer(str) {\n const byteArray = str.match(/../g)?.map((h) => parseInt(h, 16));\n return new Uint8Array(byteArray ?? new Array(16)).buffer;\n}\n/**\n * converts bigint to buffer\n * @param n bigint to convert\n * @returns convertion result\n */\nfunction bigintToBuffer(n) {\n return new BigUint64Array([n]).buffer;\n}\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA,6BAAAA;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAAAA;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACGA,mBAAkB;AAGlB,IAAM,UAAU,OAAiC,UAAU;AACpD,IAAM,YAAY;AAAA,EACrB,UAAU;AAAA,EACV,WAAW;AAAA,EACX,IAAI;AAAA,EACJ,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,YAAY,CAAC,KAAK;AAAA,EAClB,WAAW,CAAC,iBAAiB;AAAA,EAC7B,UAAU;AAAA,EACV,OAAO,CAAC,IAAI;AAAA,EACZ,SAAS,CAAC;AAAA,EACV,OAAO;AACX;AAEA,eAAe,cAAc,MAAM,UAAU,CAAC,GAAG;AAC7C,QAAM,WAAW,CAAC;AAClB,QAAM,UAAU,CAAC;AACjB,MAAI;AACA,UAAM,QAAQ,IAAI,aAAAC,QAAM;AACxB,UAAM,MAAM,MAAM,MAAM,UAAU,MAAM,OAAO;AAE/C,QAAI,QAAQ,CAAC,cAAc,aAAa;AACpC,YAAM,cAAc,SAAS;AAC7B,YAAM,UAAU,aAAa,OAAO,aAAa,OAAO,EAAE,KAAK,CAAC,uBAAuB;AACnF,gBAAQ,YAAY,IAAI;AAAA,MAC5B,CAAC;AAED,eAAS,KAAK,OAAO;AAAA,IACzB,CAAC;AACD,UAAM,QAAQ,IAAI,QAAQ;AAC1B,WAAO;AAAA,EACX,SACO,OAAP;AAEI,YAAQ,IAAI,MAAM,+BAA+B,OAAO;AACxD,UAAM;AAAA,EACV;AACJ;AACA,eAAe,aAAa,OAAO,aAAa,UAAU,CAAC,GAAG;AAG1D,MAAI;AACA,UAAM,cAAc,MAAM,MAAM,KAAK,WAAW,EAAE,MAAM,QAAQ,YAAY,aAAa;AACzF,WAAO;AAAA,EACX,SACO,OAAP;AACI,YAAQ,IAAI,MAAM,kBAAkB,iCAAiC,OAAO;AAE5E,WAAO;AAAA,EACX;AACJ;;;ACvDA,IAAAC,gBAAkB;AAElB,IAAMC,WAAU,OAAiC,UAAU;AAIpD,IAAM,YAAY;AAAA,EACrB,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,QAAQ;AAAA,EACR,SAASA;AAAA,EACT,YAAY,CAAC,KAAK;AAAA,EAClB,UAAU;AAAA,EACV,WAAW,CAAC,iBAAiB;AAAA,EAC7B,SAAS;AAAA,IACL,KAAK;AAAA,MACD,UAAU,MAAM;AAAA,MAAE;AAAA,IACtB;AAAA,IACA,OAAO,CAAC;AAAA,EACZ;AAAA,EACA,QAAQ;AACZ;AACA,eAAe,eAAe,SAAS,UAAU,CAAC,GAAG;AAzBrD;AA0BI,QAAM,QAAQ,IAAI,cAAAC,QAAM;AAExB,aAAW,eAAe,SAAS;AAC/B,UAAM,cAAc,QAAQ,WAAW;AAGvC,UAAM,KAAK,aAAa,cAAa,mCAAS,UAAS,CAAC,CAAC;AAAA,EAC7D;AACA,QAAM,aAAa,EAAE,GAAG,UAAU,QAAQ,KAAK,GAAG,mCAAS,IAAI;AAC/D,QAAM,eAAe,EAAE,IAAG,eAAU,YAAV,mBAAmB,OAAO,GAAG,QAAQ,MAAM;AACrE,MAAI;AACA,WAAO,MAAM,MAAM;AAAA,MAAc,EAAE,GAAG,cAAc,MAAM,cAAc;AAAA;AAAA,MACxE,WAAW;AAAA,IAAQ;AAAA,EACvB,SACO,OAAP;AACI,YAAQ,IAAI,MAAM,iCAAiC,OAAO;AAC1D,UAAM;AAAA,EACV;AACJ;;;AC9BO,SAAS,MAAM,QAAQ;AAC1B,MAAI;AACJ,QAAM,SAAS,IAAI,WAAW,MAAM;AACpC,OAAK,IAAI,GAAG,IAAI,QAAQ,KAAK,GAAG;AAC5B,WAAO,CAAC,IAAI;AAAA,EAChB;AACA,SAAO;AACX;AAQO,SAAS,IAAI,KAAK,OAAO,MAAM;AAClC,QAAM,SAAS,IAAI,SAAS,QAAQ,CAAC;AACrC,SAAO,eAAe,OAAO,OAAO,SAAS,KAAK,KAAK,IAAI;AAC/D;AAQO,SAAS,cAAc,OAAO,KAAK,QAAQ;AAC9C,MAAI;AACJ,MAAI;AACJ,QAAM,OAAO,MAAM,MAAM,MAAM;AAC/B,WAAS,UAAU;AACnB,OAAK,IAAI,GAAG,SAAS,MAAM,QAAQ,IAAI,QAAQ,KAAK,GAAG;AACnD,QAAI,MAAM,IAAI,MAAM,WAAW,CAAC;AAChC,cAAU;AAAA,EACd;AACA,SAAO;AACX;;;AClBA,IAAM,YAAY;AAAA,EACd,UAAU;AAAA,EACV,UAAU;AAAA,EACV,KAAK;AAAA,EACL,KAAK;AAAA,EACL,UAAU;AAAA,EACV,OAAO;AAAA,EACP,UAAU;AAAA,EACV,MAAM;AAAA,EACN,UAAU;AAAA,EACV,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,aAAa;AAAA,EACb,aAAa;AAAA,EACb,gBAAgB;AAAA,EAChB,SAAS;AACb;AAOO,SAAS,OAAO,MAAM,IAAI;AAC7B,QAAM,SAAe,MAAM,GAAG;AAC9B,MAAI,SAAS;AACb,SAAO,QAAQ,SAAS,EAAE,QAAQ,CAAC,CAAC,OAAO,MAAM,MAAM;AACnD,UAAM,MAAM,KAAK,KAAK,KAAK;AAC3B,QAAI;AACJ,QAAI;AACJ,SAAK,IAAI,GAAG,cAAc,IAAI,QAAQ,IAAI,aAAa,KAAK,GAAG;AAC3D,aAAO,MAAM,IAAI,IAAI,WAAW,CAAC;AACjC,gBAAU;AAAA,IACd;AAEA,cAAU,SAAS;AAAA,EACvB,CAAC;AACD,MAAI,OAAO,OAAO,YAAY;AAC1B,WAAO,GAAG,QAAQ,MAAM;AAAA,EAC5B;AACA,SAAO;AACX;;;AC/DA,IAAI;AACJ,IAAI;AACJ,IAAI;AACJ,IAAM,aAAa;AACnB,IAAM,MAAN,MAAU;AAAA,EACN;AAAA,EACA;AAAA,EACA,SAAS,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA,EAIA,YAAY,iBAAiB;AACzB,SAAK,UAAU;AACf,iBAAa,mBAAmB,MAAM;AACtC,SAAK,MAAM,MAAM,SAAS;AAC1B,SAAK,SAAS,CAAC;AACf,SAAK,SAAS;AACd,SAAK,OAAO,KAAK,KAAK,KAAK,IAAI;AAC/B,SAAK,QAAQ,KAAK,MAAM,KAAK,IAAI;AACjC,SAAK,SAAS,KAAK,OAAO,KAAK,IAAI;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,UAAU,OAAO,MAAM;AAC1B,QAAI;AACJ,QAAI,OAAO,UAAU,UAAU;AAC3B,cAAQ,cAAc,KAAK;AAAA,IAC/B,WACS,MAAM,eAAe,MAAM,gBAAgB,WAAW,UAAU,aAAa;AAElF,YAAM,kBAAkB,4CAA4C,KAAK,MAAM,YAAY,SAAS,CAAC;AACrG,YAAM,aAAa,mBAAmB,gBAAgB,CAAC;AACvD,YAAM,eAAe,oCAAoC;AACzD,YAAM;AAAA,IACV;AACA,WAAO,QAAQ,CAAC;AAChB,UAAM,OAAO,KAAK,QAAQ,SAAS,OAAO,CAAC,IAAI;AAC/C,UAAM,QAAQ,KAAK,SAAS,KAAK,MAAM,OAAO,IAAI,KAAK,CAAC,IAAI,GAAI;AAChE,UAAM,MAAM,KAAK,OAAO;AACxB,UAAM,MAAM,KAAK,OAAO;AACxB,UAAM,OAAO;AAAA,MACT,UAAU;AAAA,MACV,UAAU,IAAI,MAAM,CAAC;AAAA,MACrB,KAAK,IAAI,KAAK,CAAC;AAAA,MACf,KAAK,IAAI,KAAK,CAAC;AAAA,MACf,UAAU,IAAI,MAAM,QAAQ,EAAE;AAAA,MAC9B,OAAO,IAAI,OAAO,EAAE;AAAA,MACpB,UAAU;AAAA;AAAA,MAEV,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO,KAAK,SAAS;AAAA,MACrB,OAAO,KAAK,SAAS;AAAA,IACzB;AAEA,eAAW;AACX,WAAO,KAAK,IAAI,EAAE,QAAQ,CAAC,QAAQ;AAC/B,UAAI;AACJ,YAAM,QAAQ,KAAK,GAAG;AACtB,UAAI;AACJ,WAAK,IAAI,GAAG,SAAS,MAAM,QAAQ,IAAI,QAAQ,KAAK,GAAG;AACnD,oBAAY,MAAM,WAAW,CAAC;AAAA,MAClC;AAAA,IACJ,CAAC;AACD,SAAK,WAAW,GAAG,IAAI,UAAU,CAAC;AAClC,UAAM,YAAY,OAAO,IAAI;AAC7B,mBAAe,KAAK,KAAK,UAAU,SAAS,UAAU,IAAI;AAC1D,kBAAc,KAAK,KAAK,MAAM,SAAS,UAAU,IAAI;AACrD,SAAK,OAAO,KAAK;AAAA,MACb,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACJ,CAAC;AAAA,EACL;AAAA;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO;AACH,UAAM,UAAU,CAAC;AACjB,UAAM,SAAS,IAAI,MAAM;AACzB,QAAI,SAAS;AACb,UAAM,MAAM,KAAK,IAAI,GAAG,EAAE;AAC1B,QAAI,QAAQ,IAAI,MAAM;AACtB,SAAK,OAAO,QAAQ,CAAC,IAAI,CAAC,MAAM;AAC5B,UAAI,SAAS,EAAE,eAAe,EAAE,cAAc,KAAK;AAC/C,eAAO,KAAK,EAAE,QAAQ,OAAO,OAAO,CAAC;AACrC,gBAAQ,CAAC;AACT,iBAAS;AAAA,MACb;AACA,YAAM,KAAK,CAAC;AACZ,gBAAU,EAAE,eAAe,EAAE;AAAA,IACjC,CAAC;AACD,WAAO,KAAK,EAAE,QAAQ,OAAO,OAAO,CAAC;AACrC,WAAO,QAAQ,CAAC,IAAI,CAAC,MAAM;AACvB,YAAM,SAAS,IAAI,WAAW,EAAE,MAAM;AACtC,UAAI,UAAU;AACd,QAAE,OAAO,QAAQ,CAAC,IAAI,CAAC,MAAM;AACzB,eAAO,IAAI,EAAE,QAAQ,OAAO;AAC5B,mBAAW,EAAE;AACb,eAAO,IAAI,EAAE,OAAO,OAAO;AAC3B,mBAAW,EAAE;AAAA,MACjB,CAAC;AACD,cAAQ,KAAK,MAAM;AAAA,IACvB,CAAC;AACD,YAAQ,KAAK,IAAI,WAAW,IAAI,UAAU,CAAC;AAC3C,WAAO,IAAI,KAAK,SAAS,EAAE,MAAM,eAAe,CAAC;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAIA,QAAQ;AACJ,SAAK,UAAU;AACf,SAAK,MAAM,MAAM,SAAS;AAAA,EAC9B;AACJ;AACA,IAAO,cAAQ;;;AClIf,IAAM,sBAAsB;AAAA,EACxB,iBAAiB;AACrB;AAIO,IAAM,aAAN,MAAiB;AAAA,EACpB,WAAW,aAAa;AACpB,WAAO;AAAA,MACH,IAAI;AAAA,MACJ,MAAM;AAAA,MACN,YAAY,CAAC,KAAK;AAAA,MAClB,WAAW,CAAC,mBAAmB;AAAA,MAC/B,SAAS;AAAA,MACT,SAAS;AAAA,IACb;AAAA,EACJ;AAAA,EACA;AAAA,EACA;AAAA,EACA,QAAQ;AAAA,EACR,YAAY,SAAS;AACjB,SAAK,UAAU,EAAE,GAAG,qBAAqB,GAAG,QAAQ;AACpD,SAAK,OAAO,IAAI,YAAI,KAAK,QAAQ,eAAe;AAAA,EACpD;AAAA;AAAA,EAEA,QAAQ,UAAU,QAAQ;AACtB,SAAK,KAAK,OAAO,UAAU,IAAI,WAAW,MAAM,CAAC;AACjD,SAAK;AAAA,EACT;AAAA,EACA,MAAM,QAAQ;AACV,WAAO,IAAI,SAAS,KAAK,KAAK,KAAK,CAAC,EAAE,YAAY;AAAA,EACtD;AACJ;;;ACjCA,IAAAC,uBAA2E;;;ACA3E,IAAAC,uBAA6D;;;ACA7D,IAAM,aAAa;AAOZ,IAAM,mBAAmB,OAAO,MAAM,WAAW;AACpD,QAAM,eAAe;AAAA,IACjB,MAAM,KAAK,SAAS,KAAK,SAAS,EAAE;AAAA,IACpC,MAAM,KAAK,SAAS,KAAK,SAAS,EAAE;AAAA,IACpC,MAAM,KAAK,SAAS,KAAK,SAAS,EAAE;AAAA,IACpC;AAAA,EACJ;AACA,MAAI,eAAe;AAEnB,MAAI,QAAQ,KAAK,SAAS;AAC1B,KAAG;AACC,UAAM,YAAY;AAClB,aAAS,OAAO,UAAU;AAC1B,YAAQ,SAAS,KAAK,QAAQ;AAC9B,UAAM,OAAO,IAAI,WAAW,MAAM,KAAK,MAAM,OAAO,SAAS,CAAC;AAC9D,aAAS,IAAI,KAAK,SAAS,GAAG,IAAI,IAAI,KAAK;AACvC,mBAAa,CAAC,IAAI,aAAa,CAAC;AAChC,mBAAa,CAAC,IAAI,aAAa,CAAC;AAChC,mBAAa,CAAC,IAAI,aAAa,CAAC;AAChC,mBAAa,CAAC,IAAI,KAAK,CAAC;AACxB,UAAI,aAAa,MAAM,CAAC,KAAK,UAAU,QAAQ,OAAO,KAAK,CAAC,GAAG;AAC3D,uBAAe;AACf;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ,SAAS,iBAAiB,MAAM,QAAQ;AACxC,SAAO,QAAQ,OAAO,YAAY;AACtC;;;ACrCA,0BAAwC;AACjC,IAAM,YAAY,IAAI,WAAW,CAAC,GAAM,CAAI,CAAC;AAM7C,SAAS,gBAAgB,SAAS;AACrC,QAAM,eAAe;AAAA,IACjB,GAAG;AAAA,IACH,cAAc,QAAQ,SAAS,IAAI,KAAK,KAAK,QAAQ,OAAO,IAAI,KAAK;AAAA,EACzE;AACA,QAAM,iBAAiB,CAAC;AACxB,aAAW,SAAS,cAAc;AAC9B,QAAI,CAAC,aAAa,MAAM,QAAQ,EAAE,KAAK,CAAC,MAAM,SAAS;AACnD;AAAA,IACJ;AACA,UAAM,WAAW,IAAI,SAAS,IAAI,YAAY,MAAM,IAAI,CAAC;AACzD,mBAAe,MAAM,IAAI,EAAE,UAAU,GAAG,aAAa,MAAM,QAAQ,EAAE,KAAK,MAAM,OAAO;AACvF,mBAAe,KAAK,SAAS,MAAM;AAAA,EACvC;AACA,aAAO,6CAAwB,GAAG,cAAc;AACpD;AAQO,SAAS,iBAAiB,QAAQ,WAAW,aAAa,OAAO;AACpE,iBAAe,SAAS,EAAE,QAAQ,OAAO,WAAW,GAAG,KAAK;AAChE;AAEA,IAAM,iBAAiB;AAAA,EACnB,GAAG,CAAC,QAAQ,QAAQ,UAAU;AAC1B,WAAO,UAAU,QAAQ,OAAO,QAAQ,QAAS,QAAS,KAAK,GAAG,IAAI;AAAA,EAC1E;AAAA,EACA,GAAG,CAAC,QAAQ,QAAQ,UAAU;AAC1B,WAAO,UAAU,QAAQ,OAAO,QAAQ,aAAa,aAAa,KAAK,GAAG,IAAI;AAAA,EAClF;AAAA,EACA,GAAG,CAAC,QAAQ,QAAQ,UAAU;AAC1B,WAAO,aAAa,QAAQ,OAAO,KAAK,GAAG,IAAI;AAAA,EACnD;AACJ;AAEA,IAAM,eAAe;AAAA;AAAA,EAEjB;AAAA,IACI,MAAM;AAAA,IACN,SAAS,IAAI,SAAS,UAAU,MAAM,EAAE,UAAU,GAAG,IAAI;AAAA,EAC7D;AAAA;AAAA,EAEA;AAAA,IACI,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AACJ;;;AFlEA,IAAM,gBAAgB,IAAI,WAAW,CAAC,IAAM,IAAM,GAAM,CAAI,CAAC;AAC7D,IAAM,4BAA4B,IAAI,WAAW,CAAC,IAAM,IAAM,GAAM,CAAI,CAAC;AACzE,IAAM,qBAAqB,IAAI,WAAW,CAAC,IAAM,IAAM,GAAM,CAAI,CAAC;AAElE,IAAM,2BAA2B;AACjC,IAAM,mCAAmC;AACzC,IAAM,yBAAyB;AAC/B,IAAM,yBAAyB;AAC/B,IAAM,oBAAoB;AAC1B,IAAM,iCAAiC;AACvC,IAAM,iCAAiC;AACvC,IAAM,yCAAyC;AAC/C,IAAM,+BAA+B;AACrC,IAAM,+BAA+B;AACrC,IAAM,uBAAuB;AAMtB,IAAM,kBAAkB,OAAO,SAAS;AAC3C,QAAM,gBAAgB,MAAM,iBAAiB,MAAM,aAAa;AAChE,MAAI,kBAAkB,OAAO,MAAM,KAAK,UAAU,gBAAgB,wBAAwB,CAAC;AAC3F,MAAI,aAAa,OAAO,MAAM,KAAK,UAAU,gBAAgB,sBAAsB,CAAC;AACpF,MAAI,gBAAgB,OAAO,MAAM,KAAK,UAAU,gBAAgB,sBAAsB,CAAC;AACvF,MAAI,yBAAyB,gBAAgB;AAC7C,MAAI,kBAAkB;AACtB,QAAM,aAAa,MAAM,KAAK,MAAM,wBAAwB,yBAAyB,EAAE;AACvF,UAAI,0CAAoB,YAAY,yBAAyB,GAAG;AAC5D,sBAAkB,MAAM,KAAK,aAAa,yBAAyB,8BAA8B;AACjG,UAAM,oBAAoB,MAAM,KAAK,MAAM,iBAAiB,kBAAkB,EAAE;AAChF,QAAI,KAAC,0CAAoB,mBAAmB,mBAAmB,MAAM,GAAG;AACpE,YAAM,IAAI,MAAM,sBAAsB;AAAA,IAC1C;AACA,sBAAkB,MAAM,KAAK,aAAa,kBAAkB,8BAA8B;AAC1F,iBAAa,MAAM,KAAK,aAAa,kBAAkB,4BAA4B;AACnF,oBAAgB,MAAM,KAAK,aAAa,kBAAkB,4BAA4B;AAAA,EAC1F,OACK;AACD,6BAAyB;AAAA,EAC7B;AACA,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACJ;AAAA,EACJ;AACJ;AASO,SAAS,WAAW,UAAU,gBAAgB,kBAAkB,iBAAiB,oBAAoB;AACxG,QAAM,OAAO,IAAI,SAAS,QAAQ;AAClC,QAAM,oBAAoB,eAAe,kBACnC,eAAe,gBAAgB,eAAe,kBAC9C;AAEN,MAAI,OAAO,kBAAkB,KAAK,OAAQ;AACtC,qBAAiB,MAAM,GAAG,oBAAoB,0BAA0B,kBAAkB;AAC1F,qBAAiB,MAAM,GAAG,oBAAoB,kCAAkC,kBAAkB;AAAA,EACtG;AAEA,MAAI,kBAAkB,oBAAoB,YAAY;AAClD,qBAAiB,MAAM,GAAG,oBAAoB,wBAAwB,kBAAkB,gBAAgB;AAAA,EAC5G;AAEA,MAAI,mBAAmB,YAAY;AAC/B,qBAAiB,MAAM,GAAG,oBAAoB,wBAAwB,gBAAgB;AAAA,EAC1F;AAEA,MAAI,eAAe,0BAA0B,eAAe,iBAAiB;AAEzE,UAAM,gBAAgB,eAAe,yBAAyB,eAAe;AAC7E,qBAAiB,MAAM,GAAG,gBAAgB,gCAAgC,eAAe;AAEzF,qBAAiB,MAAM,GAAG,8BAA8B,gBAAgB;AAExE,qBAAiB,MAAM,GAAG,gCAAgC,kBAAkB;AAC5E,qBAAiB,MAAM,GAAG,wCAAwC,kBAAkB;AAEpF,qBAAiB,MAAM,GAAG,8BAA8B,kBAAkB,gBAAgB;AAAA,EAC9F;AACA,SAAO,IAAI,WAAW,KAAK,MAAM;AACrC;AAMO,SAAS,aAAa,SAAS;AAClC,QAAM,SAAS,IAAI,SAAS,IAAI,YAAY,OAAO,iBAAiB,CAAC,CAAC;AACtE,aAAW,SAAS,aAAa;AAC7B,qBAAiB,QAAQ,MAAM,MAAM,MAAM,QAAQ,QAAQ,MAAM,QAAQ,EAAE,KAAK,MAAM,WAAW,CAAC;AAAA,EACtG;AACA,QAAM,UAAU,yBAAyB,OAAO;AAChD,QAAM,cAAc,kBAAkB,OAAO;AAC7C,aAAO,8CAAwB,aAAa,SAAS,OAAO,MAAM;AACtE;AAEA,IAAM,cAAc;AAAA;AAAA,EAEhB;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS,IAAI,SAAS,cAAc,MAAM,EAAE,UAAU,GAAG,IAAI;AAAA,EACjE;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AACJ;AAMA,SAAS,kBAAkB,SAAS;AAChC,QAAM,SAAS,IAAI,SAAS,IAAI,YAAY,OAAO,oBAAoB,CAAC,CAAC;AACzE,aAAW,SAAS,mBAAmB;AACnC,qBAAiB,QAAQ,MAAM,MAAM,MAAM,QAAQ,QAAQ,MAAM,QAAQ,EAAE,KAAK,MAAM,WAAW,CAAC;AAAA,EACtG;AACA,SAAO,OAAO;AAClB;AAMA,SAAS,yBAAyB,SAAS;AACvC,QAAM,UAAU,IAAI,SAAS,IAAI,YAAY,OAAO,EAAE,CAAC,CAAC;AACxD,aAAW,SAAS,2BAA2B;AAC3C,qBAAiB,SAAS,MAAM,MAAM,MAAM,QAAQ,QAAQ,MAAM,QAAQ,EAAE,KAAK,MAAM,WAAW,CAAC;AAAA,EACvG;AACA,SAAO,QAAQ;AACnB;AAEA,IAAM,4BAA4B;AAAA;AAAA,EAE9B;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS,IAAI,SAAS,0BAA0B,MAAM,EAAE,UAAU,GAAG,IAAI;AAAA,EAC7E;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AACJ;AAEA,IAAM,oBAAoB;AAAA;AAAA,EAEtB;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS,IAAI,SAAS,mBAAmB,MAAM,EAAE,UAAU,GAAG,IAAI;AAAA,EACtE;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AACJ;;;AD9QA,IAAM,4BAA4B;AAClC,IAAM,8BAA8B;AACpC,IAAM,6BAA6B;AACnC,IAAM,+BAA+B;AACrC,IAAM,uBAAuB;AAC7B,IAAM,gCAAgC;AACtC,IAAM,sBAAsB;AACrB,IAAMC,aAAY,IAAI,WAAW,CAAC,IAAM,IAAM,GAAM,CAAI,CAAC;AAOzD,IAAM,uBAAuB,OAAO,cAAc,SAAS;AAC9D,MAAI,gBAAgB,KAAK,QAAQ;AAC7B,WAAO;AAAA,EACX;AACA,QAAM,aAAa,IAAI,SAAS,MAAM,KAAK,MAAM,cAAc,eAAe,mBAAmB,CAAC;AAClG,QAAM,aAAa,WAAW,OAAO,MAAM,GAAG,CAAC;AAC/C,MAAI,KAAC,0CAAoB,YAAYA,WAAU,MAAM,GAAG;AACpD,WAAO;AAAA,EACX;AACA,QAAM,iBAAiB,OAAO,WAAW,UAAU,2BAA2B,IAAI,CAAC;AACnF,QAAM,mBAAmB,OAAO,WAAW,UAAU,6BAA6B,IAAI,CAAC;AACvF,QAAM,mBAAmB,WAAW,UAAU,8BAA8B,IAAI;AAChF,QAAM,YAAY,OAAO,WAAW,UAAU,sBAAsB,IAAI,CAAC;AACzE,QAAM,iBAAiB,WAAW,UAAU,4BAA4B,IAAI;AAC5E,QAAM,mBAAmB,MAAM,KAAK,MAAM,eAAe,qBAAqB,eAAe,sBAAsB,OAAO,iBAAiB,gBAAgB,CAAC;AAC5J,QAAM,gBAAgB,iBAAiB,MAAM,GAAG,cAAc;AAC9D,QAAM,WAAW,IAAI,YAAY,EAAE,OAAO,aAAa;AACvD,QAAM,cAAc,eAAe,sBAAsB,OAAO,cAAc;AAC9E,QAAM,kBAAkB,WAAW,UAAU,+BAA+B,IAAI;AAChF,QAAM,oBAAoB,OAAO,eAAe;AAChD,QAAM,aAAa,IAAI,SAAS,iBAAiB,MAAM,gBAAgB,iBAAiB,UAAU,CAAC;AAEnG,QAAM,YAAY;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACA,QAAM,MAAM,qBAAqB,WAAW,UAAU;AACtD,SAAO;AAAA,IACH,GAAG;AAAA,IACH,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACJ;AAKA,gBAAuB,wBAAwB,cAAc;AACzD,QAAM,EAAE,eAAe,WAAW,IAAI,MAAM,gBAAgB,YAAY;AACxE,QAAM,mBAAmB,IAAI,kCAAa,IAAI,SAAS,MAAM,aAAa,MAAM,eAAe,gBAAgB,UAAU,CAAC,CAAC;AAC3H,MAAI,WAAW,MAAM,qBAAqB,IAAI,gBAAgB;AAC9D,SAAO,UAAU;AACb,UAAM;AACN,eAAW,MAAM,qBAAqB,SAAS,cAAc,OAAO,SAAS,gBAAgB,GAAG,gBAAgB;AAAA,EACpH;AACJ;AAMA,IAAM,YAAY,IAAI,UAAU;AAC5B,SAAO,MAAM,CAAC,IAAI,MAAM,CAAC,IAAI;AACjC;AAOA,IAAM,uBAAuB,CAAC,WAAW,eAAe;AACpD,QAAM,gBAAgB,iBAAiB,SAAS;AAChD,QAAM,eAAe,CAAC;AACtB,MAAI,cAAc,SAAS,GAAG;AAE1B,UAAM,iBAAiB,cAAc,OAAO,CAAC,KAAK,SAAS,MAAM,KAAK,QAAQ,CAAC;AAG/E,UAAM,oBAAoB,IAAI,WAAW,WAAW,MAAM,EAAE,UAAU,CAAC,MAAM,GAAG,QAAQ,UAAU,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,MAAM,KACtH,UAAU,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,MAAM,cAAc;AAExD,QAAI,YAAY;AAChB,eAAW,QAAQ,eAAe;AAC9B,YAAM,SAAS;AACf,mBAAa,KAAK,IAAI,IAAI,WAAW,aAAa,oBAAoB,IAAI,QAAQ,IAAI;AACtF,kBAAY,SAAS,KAAK;AAAA,IAC9B;AAAA,EACJ;AACA,SAAO;AACX;AAMA,IAAM,mBAAmB,CAAC,cAAc;AAEpC,QAAM,gBAAgB,CAAC;AACvB,MAAI,UAAU,qBAAqB,OAAO,UAAU,GAAG;AACnD,kBAAc,KAAK,EAAE,MAAM,oBAAoB,QAAQ,EAAE,CAAC;AAAA,EAC9D;AACA,MAAI,UAAU,mBAAmB,OAAO,UAAU,GAAG;AACjD,kBAAc,KAAK,EAAE,MAAM,kBAAkB,QAAQ,EAAE,CAAC;AAAA,EAC5D;AACA,MAAI,UAAU,sBAAsB,OAAO,UAAU,GAAG;AACpD,kBAAc,KAAK,EAAE,MAAM,qBAAqB,QAAQ,EAAE,CAAC;AAAA,EAC/D;AACA,MAAI,UAAU,cAAc,OAAO,UAAU,GAAG;AAC5C,kBAAc,KAAK,EAAE,MAAM,aAAa,QAAQ,EAAE,CAAC;AAAA,EACvD;AACA,SAAO;AACX;AAMO,SAAS,iBAAiB,SAAS;AACtC,QAAM,eAAe;AAAA,IACjB,GAAG;AAAA,IACH,UAAU,QAAQ,SAAS;AAAA,IAC3B,aAAa;AAAA,EACjB;AACA,MAAI,cAAc,IAAI,YAAY,CAAC;AACnC,QAAM,iBAAiB,CAAC;AACxB,MAAI,aAAa,UAAU,YAAY;AACnC,mBAAe,SAAS,aAAa;AACrC,iBAAa,SAAS,OAAO,UAAU;AAAA,EAC3C;AACA,MAAI,aAAa,UAAU,YAAY;AACnC,mBAAe,OAAO,aAAa;AACnC,iBAAa,SAAS;AAAA,EAC1B;AACA,MAAI,OAAO,KAAK,cAAc,EAAE,QAAQ;AACpC,kBAAc,gBAAgB,cAAc;AAC5C,iBAAa,cAAc,YAAY;AAAA,EAC3C;AACA,QAAM,SAAS,IAAI,SAAS,IAAI,YAAY,OAAO,mBAAmB,CAAC,CAAC;AACxE,aAAW,SAAS,mBAAmB;AACnC,qBAAiB,QAAQ,MAAM,MAAM,MAAM,QAAQ,aAAa,MAAM,QAAQ,EAAE,KAAK,MAAM,WAAW,CAAC;AAAA,EAC3G;AACA,QAAM,cAAc,IAAI,YAAY,EAAE,OAAO,aAAa,QAAQ;AAClE,QAAM,gBAAY,8CAAwB,OAAO,QAAQ,aAAa,WAAW;AACjF,SAAO;AACX;AAEA,IAAM,oBAAoB;AAAA;AAAA,EAEtB;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS,IAAI,SAASA,WAAU,MAAM,EAAE,UAAU,GAAG,IAAI;AAAA,EAC7D;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,IACT,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AACJ;;;AIvQA,IAAAC,uBAA6D;AAG7D,IAAM,4BAA4B;AAClC,IAAM,yBAAyB;AAC/B,IAAM,2BAA2B;AACjC,IAAM,0BAA0B;AAChC,IAAM,4BAA4B;AAClC,IAAM,mBAAmB;AAClB,IAAMC,aAAY,IAAI,WAAW,CAAC,IAAM,IAAM,GAAM,CAAI,CAAC;AAOzD,IAAM,0BAA0B,OAAO,cAAc,SAAS;AACjE,QAAM,aAAa,IAAI,SAAS,MAAM,KAAK,MAAM,cAAc,eAAe,gBAAgB,CAAC;AAC/F,QAAM,aAAa,WAAW,OAAO,MAAM,GAAG,CAAC;AAC/C,MAAI,KAAC,0CAAoB,YAAYA,UAAS,GAAG;AAC7C,WAAO;AAAA,EACX;AACA,QAAM,iBAAiB,WAAW,UAAU,yBAAyB,IAAI;AACzE,QAAM,mBAAmB,WAAW,UAAU,2BAA2B,IAAI;AAC7E,QAAM,mBAAmB,MAAM,KAAK,MAAM,eAAe,kBAAkB,eAAe,mBAAmB,OAAO,iBAAiB,gBAAgB,CAAC;AACtJ,QAAM,iBAAiB,iBAAiB,MAAM,GAAG,cAAc;AAC/D,QAAM,kBAAkB,IAAI,SAAS,iBAAiB,MAAM,gBAAgB,iBAAiB,UAAU,CAAC;AACxG,QAAM,WAAW,IAAI,YAAY,EAAE,OAAO,cAAc,EAAE,MAAM,IAAI,EAAE,KAAK,GAAG;AAC9E,MAAI,iBAAiB,eAAe,mBAAmB,OAAO,iBAAiB,gBAAgB;AAC/F,QAAM,oBAAoB,WAAW,UAAU,2BAA2B,IAAI;AAC9E,MAAI,iBAAiB,OAAO,WAAW,UAAU,wBAAwB,IAAI,CAAC;AAC9E,MAAI,mBAAmB,OAAO,WAAW,UAAU,0BAA0B,IAAI,CAAC;AAClF,MAAI,oBAAoB;AAExB,MAAI,qBAAqB,OAAO,UAAU,GAAG;AACzC,uBAAmB,gBAAgB,aAAa,mBAAmB,IAAI;AACvE,yBAAqB;AAAA,EACzB;AACA,MAAI,mBAAmB,OAAO,UAAU,GAAG;AACvC,qBAAiB,gBAAgB,aAAa,mBAAmB,IAAI;AACrE,yBAAqB;AAAA,EACzB;AACA,MAAI,mBAAmB,OAAO,UAAU,GAAG;AACvC,qBAAiB,gBAAgB,aAAa,mBAAmB,IAAI;AAAA,EACzE;AACA,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACJ;AAMO,SAAS,oBAAoB,SAAS;AACzC,QAAM,eAAe;AAAA,IACjB,GAAG;AAAA,IACH,aAAa;AAAA,IACb,UAAU,QAAQ,SAAS;AAAA,EAC/B;AACA,MAAI,cAAc,IAAI,YAAY,CAAC;AACnC,QAAM,iBAAiB,CAAC;AACxB,MAAI,aAAa,UAAU,YAAY;AACnC,mBAAe,OAAO,aAAa;AACnC,iBAAa,SAAS;AAAA,EAC1B;AACA,MAAI,OAAO,KAAK,cAAc,EAAE,QAAQ;AACpC,kBAAc,gBAAgB,cAAc;AAC5C,iBAAa,cAAc,YAAY;AAAA,EAC3C;AAEA,QAAM,SAAS,IAAI,SAAS,IAAI,YAAY,OAAO,gBAAgB,CAAC,CAAC;AACrE,aAAW,SAASC,oBAAmB;AACnC,qBAAiB,QAAQ,MAAM,MAAM,MAAM,QAAQ,aAAa,MAAM,QAAQ,EAAE,KAAK,MAAM,WAAW,CAAC;AAAA,EAC3G;AACA,QAAM,cAAc,IAAI,YAAY,EAAE,OAAO,aAAa,QAAQ;AAClE,QAAM,gBAAY,8CAAwB,OAAO,QAAQ,aAAa,WAAW;AACjF,SAAO;AACX;AACA,IAAMA,qBAAoB;AAAA;AAAA,EAEtB;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS,IAAI,SAASD,WAAU,MAAM,EAAE,UAAU,GAAG,IAAI;AAAA,EAC7D;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,EACb;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EACV;AAAA;AAAA,EAEA;AAAA,IACI,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,SAAS;AAAA,IACT,MAAM;AAAA,EACV;AACJ;;;AC3JA,IAAAE,uBAAwF;AAExF,oBAA0B;AAG1B,kBAA0B;AAM1B,eAAe,cAAc,UAAU;AAEnC,QAAM,cAAc,MAAM,gBAAgB,QAAQ;AAClD,QAAM,mBAAmB,YAAY;AAErC,QAAM,cAAc,OAAO,YAAY,QAAQ,kBACzC,YAAY,QAAQ,kBAAkB,mBACtC,YAAY,QAAQ,gBAAgB,gBAAgB;AAE1D,QAAM,YAAY,MAAM,SAAS,MAAM,kBAAkB,SAAS,MAAM;AACxE,QAAM,SAAS,SAAS,OAAO,gBAAgB,CAAC;AAEhD,QAAM,YAAY,UAAU,MAAM,GAAG,WAAW;AAChD,QAAM,WAAW,UAAU,MAAM,aAAa,UAAU,UAAU;AAClE,SAAO,CAAC,WAAW,UAAU,WAAW;AAC5C;AAQA,eAAe,oBAAoB,UAAU,WAAW,uBAAuB;AAE3E,QAAM,gBAAgB,SAAS,MAAM,IAAI,wBAAU,EAAE,KAAK,WAAW,KAAK,GAAG,EAAE;AAE/E,QAAM,qBAAqB,oBAAoB;AAAA,IAC3C,OAAO;AAAA,IACP;AAAA,IACA,QAAQ,UAAU;AAAA,EACtB,CAAC;AAED,QAAM,kBAAkB,iBAAiB;AAAA,IACrC,OAAO;AAAA,IACP;AAAA,IACA,QAAQ;AAAA,IACR,QAAQ,UAAU;AAAA,EACtB,CAAC;AACD,SAAO;AAAA,IACH,IAAI,eAAW,8CAAwB,oBAAoB,SAAS,CAAC;AAAA,IACrE,IAAI,WAAW,eAAe;AAAA,EAClC;AACJ;AAOA,eAAsB,WAAW,QAAQ,WAAW,UAAU;AAE1D,QAAM,WAAW,IAAI,oCAAe,QAAQ,IAAI;AAChD,QAAM,CAAC,WAAW,UAAU,WAAW,IAAI,MAAM,cAAc,QAAQ;AAEvE,QAAM,gBAAgB,SAAS;AAC/B,QAAM,CAAC,WAAW,YAAY,IAAI,MAAM,oBAAoB,UAAU,WAAW,aAAa;AAE9F,QAAM,SAAS,OAAO,SAAS;AAE/B,QAAM,gBAAY,8CAAwB,WAAW,YAAY;AAEjE,QAAM,mBAAmB,SAAS;AAElC,QAAM,SAAS,OAAO,IAAI,WAAW,SAAS,CAAC;AAE/C,QAAM,aAAa,SAAS;AAC5B,QAAM,SAAS,OAAO,WAAW,UAAU,YAAY,SAAS,kBAAkB,YAAY,YAAY,kBAAkB,EAAE,CAAC;AACnI;AAOA,eAAsB,UAAU,WAAW,YAAY,sBAAsB;AACzE,QAAM,eAAe,gBAAgB,SAAS;AAC9C,QAAM,UAAU,IAAI,8BAAS,YAAY,GAAG;AAC5C,QAAM,WAAW,CAAC;AAClB,QAAM,UAAU,CAAC;AACjB,mBAAiB,QAAQ,cAAc;AACnC,UAAM,QAAQ,MAAM,SAAS,SAAS,QAAQ;AAAA,EAClD;AACA,MAAI,sBAAsB;AACtB,UAAM,iBAAiB,MAAM,qBAAqB,QAAQ;AAC1D,UAAM,QAAQ,gBAAgB,SAAS,OAAO;AAAA,EAClD;AACA,QAAM,YAAY,MAAM,QAAQ,KAAK,GAAG;AACxC,QAAM,SAAK,8CAAwB,GAAG,OAAO;AAC7C,QAAM,QAAQ,OAAO,IAAI,WAAW,EAAE,CAAC;AACvC,QAAM,aAAa,MAAM,QAAQ,KAAK,GAAG;AACzC,QAAM,QAAQ,OAAO,IAAI,WAAW,aAAa,EAAE,eAAe,QAAQ,QAAQ,QAAQ,GAAG,YAAY,UAAU,UAAU,CAAC,CAAC,CAAC;AACpI;AAQA,eAAe,QAAQ,MAAM,SAAS,SAAS,UAAU;AACrD,QAAM,QAAQ,MAAM,QAAQ,KAAK,GAAG;AACpC,uCAAU,KAAK,EAAE,UAAU,KAAK,MAAM,mBAAmB,KAAK;AAC9D,QAAM,CAAC,WAAW,YAAY,IAAI,MAAM,oBAAoB,KAAK,MAAM,KAAK,MAAM,IAAI;AACtF,QAAM,QAAQ,OAAO,SAAS;AAC9B,UAAQ,KAAK,YAAY;AAC7B;AAMO,SAAS,gBAAgB,WAAW;AACvC,kBAAgB,WAAW;AACvB,UAAM,WAAW,MAAM,YAAY,SAAS;AAC5C,eAAW,YAAY,UAAU;AAC7B,YAAM,OAAO,OAAO,UAAM,uBAAU,0BAAK,KAAK,WAAW,QAAQ,CAAC,GAAG,YAAY;AACjF,YAAM,EAAE,MAAM,UAAU,KAAK;AAAA,IACjC;AAAA,EACJ;AACA,SAAO,SAAS;AACpB;AAOA,eAAsB,YAAY,UAAU,YAAY,IAAI,UAAU;AAClE,QAAM,KAAK,WAAW,WAAW,IAAI,oCAAe,CAAC,CAAC;AACtD,QAAM,QAAQ,MAAM,GAAG,QAAQ,SAAS,UAAU,SAAS,CAAC;AAC5D,QAAM,eAAe,CAAC;AACtB,aAAW,QAAQ,OAAO;AACtB,UAAM,WAAW,SAAS,UAAU,WAAW,IAAI;AACnD,SAAK,MAAM,GAAG,KAAK,QAAQ,GAAG,aAAa;AACvC,YAAMC,SAAQ,MAAM,YAAY,UAAU,SAAS,WAAW,IAAI,CAAC;AACnE,mBAAa,KAAK,GAAGA,MAAK;AAAA,IAC9B,OACK;AACD,mBAAa,KAAK,SAAS,WAAW,IAAI,CAAC;AAAA,IAC/C;AAAA,EACJ;AACA,SAAO;AACX;AAMA,SAAS,YAAY,OAAO;AACxB,QAAM,WAAW,MAAM,OAAO,CAAC,QAAQ,IAAI,MAAM;AACjD,SAAO,0BAAK,KAAK,GAAG,QAAQ;AAChC;;;AChKA,IAAAC,uBAA0D;AAG1D,yBAAmC;AAG5B,IAAM,2BAA2B;AAAA;AAAA,EAEpC,GAAG,OAAO,mBAAmB;AAAA;AAAA,EAE7B,GAAG,OAAO,mBAAmB;AACzB,UAAM,cAAc,IAAI,sCAAmB,EAAE,KAAK,KAAK,CAAC;AACxD,UAAM,mBAAmB,MAAM,YAAY,WAAW,cAAc;AACpE,WAAO;AAAA,EACX;AACJ;AAKO,IAAM,gBAAN,MAAoB;AAAA;AAAA,EAEvB,eAAe;AAAA,EACf;AAAA,EACA,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAKV,YAAY,MAAM;AAEd,QAAI,OAAO,SAAS,UAAU;AAC1B,WAAK,WAAW;AAChB,UAAI,CAAC,gCAAW;AACZ,aAAK,eAAe,IAAI,oCAAe,IAAI;AAAA,MAC/C,OACK;AACD,cAAM,IAAI,MAAM,qDAAqD;AAAA,MACzE;AAAA,IACJ,WACS,gBAAgB,gBAAgB;AACrC,WAAK,eAAe,KAAK;AACzB,WAAK,UAAU;AACf,WAAK,WAAW,KAAK;AAAA,IACzB,eACS,qCAAe,IAAI,GAAG;AAC3B,WAAK,eAAe;AAAA,IACxB;AAAA,EACJ;AAAA;AAAA,EAEA,MAAM,UAAU;AACZ,QAAI,KAAK,cAAc;AACnB,YAAM,KAAK,aAAa,QAAQ;AAAA,IACpC;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAU;AACZ,QAAI,CAAC,KAAK,cAAc;AACpB,YAAM,IAAI,MAAM,qCAAqC;AAAA,IACzD;AACA,UAAM,YAAY,CAAC;AACnB,UAAM,gBAAgB,wBAAwB,KAAK,YAAY;AAC/D,qBAAiB,YAAY,eAAe;AACxC,gBAAU,KAAK,SAAS,QAAQ;AAAA,IACpC;AACA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAAK,UAAU;AACjB,UAAM,eAAe,MAAM,KAAK,gBAAgB,QAAQ;AACxD,WAAO,EAAE,GAAG,cAAc,MAAM,OAAO,aAAa,gBAAgB,EAAE;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,UAAU;AAClB,QAAI,KAAK,YAAY,SAAS,QAAQ,KAAK,QAAQ,MAAM,GAAG;AACxD,iBAAW,SAAS,UAAU,KAAK,SAAS,SAAS,CAAC;AAAA,IAC1D;AACA,QAAI;AACJ,QAAI,KAAK,SAAS;AACd,yBAAmB,MAAM,KAAK,QAAQ,QAAQ,UAAU,MAAM;AAAA,IAClE,OACK;AACD,UAAI,CAAC,KAAK,cAAc;AACpB,cAAM,IAAI,MAAM,qCAAqC;AAAA,MACzD;AACA,YAAM,eAAe,MAAM,KAAK,gBAAgB,QAAQ;AACxD,YAAM,kBAAkB,MAAM,wBAAwB,aAAa,mBAAmB,KAAK,YAAY;AACvG,UAAI,CAAC,iBAAiB;AAClB,cAAM,IAAI,MAAM,0DAA0D;AAAA,MAC9E;AACA,YAAM,qBAAqB,yBAAyB,gBAAgB,kBAAkB,SAAS,CAAC;AAChG,UAAI,CAAC,oBAAoB;AACrB,cAAM,MAAM,yCAAyC;AAAA,MACzD;AACA,YAAM,iBAAiB,MAAM,KAAK,aAAa,MAAM,gBAAgB,gBAAgB,gBAAgB,iBAAiB,gBAAgB,cAAc;AACpJ,yBAAmB,MAAM,mBAAmB,cAAc;AAAA,IAC9D;AACA,UAAM,WAAW,IAAI,SAAS,gBAAgB;AAC9C,WAAO,eAAe,UAAU,OAAO;AAAA,MACnC,OAAO,WAAW,GAAG,KAAK,YAAY,MAAM,aAAa,KAAK,YAAY;AAAA,IAC9E,CAAC;AACD,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBAAgB,UAAU;AAC5B,QAAI,CAAC,KAAK,cAAc;AACpB,YAAM,IAAI,MAAM,qCAAqC;AAAA,IACzD;AACA,UAAM,gBAAgB,wBAAwB,KAAK,YAAY;AAC/D,QAAI,SAAS;AACb,qBAAiB,YAAY,eAAe;AACxC,UAAI,SAAS,aAAa,UAAU;AAChC,iBAAS;AACT;AAAA,MACJ;AAAA,IACJ;AACA,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,4CAA4C;AAAA,IAChE;AACA,WAAO;AAAA,EACX;AACJ;;;ACtIO,IAAM,iBAAN,MAAqB;AAAA,EACxB;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,YAAY,cAAc,WAAW,UAAU;AAC3C,SAAK,eAAe;AACpB,SAAK,WAAW;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,mBAAmB,UAAU;AAC/B,UAAM,QAAQ,IAAI,cAAc,KAAK,YAAY;AACjD,UAAM,WAAW,MAAM,MAAM,MAAM,QAAQ;AAC3C,WAAO,MAAM,SAAS,YAAY;AAAA,EACtC;AACJ;;;ACzBA,IAAAC,iBAAwB;AACxB,IAAAC,uBAA0E;AAOnE,SAAS,eAAe,aAAa;AACxC,QAAM,WAAW,IAAI,SAAS,WAAW;AACzC,QAAM,UAAU,CAAC;AACjB,WAAS,IAAI,GAAG,IAAI,YAAY,YAAY,IAAI,IAAI,IAAI;AACpD,UAAM,SAAS,SAAS,aAAa,IAAI,IAAI,IAAI;AACjD,UAAM,OAAO,YAAY,aAAa,GAAG,EAAE;AAC3C,YAAQ,IAAI,IAAI;AAAA,EACpB;AACA,SAAO;AACX;AACA,SAAS,YAAY,QAAQ,OAAO,QAAQ;AAExC,SAAO,CAAC,GAAG,IAAI,WAAW,QAAQ,OAAO,MAAM,CAAC,EAC3C,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AAChB;AAMA,eAAsB,4BAA4B,cAAc;AAC5D,QAAM,gBAAgB,wBAAwB,YAAY;AAC1D,SAAO,aAAa,aAAa;AACrC;AAMA,eAAsB,aAAa,eAAe;AAC9C,QAAM,UAAU,IAAI,uBAAQ;AAC5B,QAAM,cAAc,IAAI,YAAY;AACpC,QAAM,YAAY,CAAC;AACnB,mBAAiB,YAAY,eAAe;AACxC,UAAM,WAAW,SAAS,SAAS,MAAM,IAAI,EAAE,KAAK,GAAG,EAAE,kBAAkB;AAC3E,UAAM,cAAc,YAAY,OAAO,QAAQ,EAAE;AACjD,UAAM,MAAM,MAAM,QAAQ,KAAK,aAAa,KAAK;AACjD,cAAU,GAAG,IAAI,SAAS;AAAA,EAC9B;AACA,SAAO;AACX;AAMA,eAAsB,gBAAgB,eAAe;AACjD,QAAM,UAAU,IAAI,uBAAQ;AAC5B,QAAM,cAAc,IAAI,YAAY;AACpC,QAAM,YAAY,CAAC;AACnB,mBAAiB,YAAY,eAAe;AACxC,QAAI,WAAW,SAAS,SAAS,MAAM,IAAI,EAAE,KAAK,GAAG;AAIrD,QAAI,aAAa,wBAAwB;AACrC,iBAAW,SAAS,kBAAkB;AAAA,IAC1C;AACA,UAAM,cAAc,YAAY,OAAO,QAAQ,EAAE;AACjD,UAAM,MAAM,MAAM,QAAQ,KAAK,aAAa,KAAK;AACjD,cAAU,SAAK,8CAAwB,kBAAkB,GAAG,GAAG,eAAe,SAAS,iBAAiB,CAAC,CAAC;AAAA,EAC9G;AACA,QAAM,cAAc,UAAU,KAAK,aAAa;AAChD,aAAO,uDAAiC,WAAW;AACvD;AAOA,SAAS,cAAc,MAAM,MAAM;AAC/B,QAAM,IAAI,IAAI,eAAe,IAAI;AACjC,QAAM,IAAI,IAAI,eAAe,IAAI;AACjC,SAAO,OAAO,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAC3D;AAMA,SAAS,kBAAkB,KAAK;AA7FhC;AA8FI,QAAM,aAAY,SAAI,MAAM,KAAK,MAAf,mBAAkB,IAAI,CAAC,MAAM,SAAS,GAAG,EAAE;AAC7D,SAAO,IAAI,WAAW,aAAa,IAAI,MAAM,EAAE,CAAC,EAAE;AACtD;AAMA,SAAS,eAAe,GAAG;AACvB,SAAO,IAAI,eAAe,CAAC,CAAC,CAAC,EAAE;AACnC;",
|
|
6
6
|
"names": ["signature", "JSZip", "import_jszip", "VERSION", "JSZip", "import_loader_utils", "import_loader_utils", "signature", "import_loader_utils", "signature", "ZIP_HEADER_FIELDS", "import_loader_utils", "files", "import_loader_utils", "import_crypto", "import_loader_utils"]
|
|
7
7
|
}
|
package/dist/zip-loader.js
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
import JSZip from 'jszip';
|
|
5
5
|
// __VERSION__ is injected by babel-plugin-version-inline
|
|
6
6
|
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
7
|
-
const VERSION = typeof "4.3.
|
|
7
|
+
const VERSION = typeof "4.3.3" !== 'undefined' ? "4.3.3" : 'latest';
|
|
8
8
|
export const ZipLoader = {
|
|
9
9
|
dataType: null,
|
|
10
10
|
batchType: null,
|
package/dist/zip-writer.js
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
// Copyright (c) vis.gl contributors
|
|
4
4
|
import JSZip from 'jszip';
|
|
5
5
|
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
6
|
-
const VERSION = typeof "4.3.
|
|
6
|
+
const VERSION = typeof "4.3.3" !== 'undefined' ? "4.3.3" : 'latest';
|
|
7
7
|
/**
|
|
8
8
|
* Zip exporter
|
|
9
9
|
*/
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/zip",
|
|
3
|
-
"version": "4.3.
|
|
3
|
+
"version": "4.3.4",
|
|
4
4
|
"description": "Zip Archive Loader",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -39,14 +39,14 @@
|
|
|
39
39
|
"build-bundle-dev": "ocular-bundle ./bundle.ts --env=dev --output=dist/dist.dev.js"
|
|
40
40
|
},
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@loaders.gl/compression": "4.3.
|
|
43
|
-
"@loaders.gl/crypto": "4.3.
|
|
44
|
-
"@loaders.gl/loader-utils": "4.3.
|
|
42
|
+
"@loaders.gl/compression": "4.3.4",
|
|
43
|
+
"@loaders.gl/crypto": "4.3.4",
|
|
44
|
+
"@loaders.gl/loader-utils": "4.3.4",
|
|
45
45
|
"jszip": "^3.1.5",
|
|
46
46
|
"md5": "^2.3.0"
|
|
47
47
|
},
|
|
48
48
|
"peerDependencies": {
|
|
49
49
|
"@loaders.gl/core": "^4.3.0"
|
|
50
50
|
},
|
|
51
|
-
"gitHead": "
|
|
51
|
+
"gitHead": "d18246f4ef6382f787a6ae2e9e21d8a7f40e5917"
|
|
52
52
|
}
|