@loaders.gl/zip 4.1.0-alpha.3 → 4.1.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.dev.js +198 -38
- package/dist/hash-file-utility.d.ts +6 -0
- package/dist/hash-file-utility.d.ts.map +1 -1
- package/dist/hash-file-utility.js +22 -0
- package/dist/hash-file-utility.js.map +1 -1
- package/dist/index.cjs +165 -48
- package/dist/index.d.ts +2 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -1
- package/dist/index.js.map +1 -1
- package/dist/parse-zip/cd-file-header.d.ts +1 -1
- package/dist/parse-zip/cd-file-header.js +2 -2
- package/dist/parse-zip/cd-file-header.js.map +1 -1
- package/dist/parse-zip/end-of-central-directory.d.ts +19 -0
- package/dist/parse-zip/end-of-central-directory.d.ts.map +1 -1
- package/dist/parse-zip/end-of-central-directory.js +41 -8
- package/dist/parse-zip/end-of-central-directory.js.map +1 -1
- package/dist/parse-zip/zip-compozition.d.ts +8 -0
- package/dist/parse-zip/zip-compozition.d.ts.map +1 -0
- package/dist/parse-zip/zip-compozition.js +43 -0
- package/dist/parse-zip/zip-compozition.js.map +1 -0
- package/dist/parse-zip/zip64-info-generation.d.ts +1 -1
- package/dist/parse-zip/zip64-info-generation.d.ts.map +1 -1
- package/dist/parse-zip/zip64-info-generation.js +1 -1
- package/dist/parse-zip/zip64-info-generation.js.map +1 -1
- package/dist/zip-loader.js +1 -1
- package/dist/zip-writer.js +1 -1
- package/package.json +5 -5
- package/src/hash-file-utility.ts +46 -1
- package/src/index.ts +2 -1
- package/src/parse-zip/cd-file-header.ts +3 -3
- package/src/parse-zip/end-of-central-directory.ts +97 -8
- package/src/parse-zip/zip-compozition.ts +113 -0
- package/src/parse-zip/zip64-info-generation.ts +2 -2
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { FileHandleFile, concatenateArrayBuffers } from '@loaders.gl/loader-utils';
|
|
2
|
+
import { parseEoCDRecord, updateEoCD } from "./end-of-central-directory.js";
|
|
3
|
+
import { CRC32Hash } from '@loaders.gl/crypto';
|
|
4
|
+
import { generateLocalHeader } from "./local-file-header.js";
|
|
5
|
+
import { generateCDHeader } from "./cd-file-header.js";
|
|
6
|
+
async function cutTheTailOff(provider) {
|
|
7
|
+
const oldEoCDinfo = await parseEoCDRecord(provider);
|
|
8
|
+
const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
|
|
9
|
+
const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);
|
|
10
|
+
const zipEnding = await provider.slice(oldCDStartOffset, provider.length);
|
|
11
|
+
await provider.truncate(Number(oldCDStartOffset));
|
|
12
|
+
const oldCDBody = zipEnding.slice(0, oldCDLength);
|
|
13
|
+
const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);
|
|
14
|
+
return [oldCDBody, eocdBody, oldEoCDinfo];
|
|
15
|
+
}
|
|
16
|
+
async function generateFileHeaders(fileName, fileToAdd, localFileHeaderOffset) {
|
|
17
|
+
const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);
|
|
18
|
+
const newFileLocalHeader = generateLocalHeader({
|
|
19
|
+
crc32: newFileCRC322,
|
|
20
|
+
fileName,
|
|
21
|
+
length: fileToAdd.byteLength
|
|
22
|
+
});
|
|
23
|
+
const newFileCDHeader = generateCDHeader({
|
|
24
|
+
crc32: newFileCRC322,
|
|
25
|
+
fileName,
|
|
26
|
+
offset: localFileHeaderOffset,
|
|
27
|
+
length: fileToAdd.byteLength
|
|
28
|
+
});
|
|
29
|
+
return [new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)), new Uint8Array(newFileCDHeader)];
|
|
30
|
+
}
|
|
31
|
+
export async function addOneFile(zipUrl, fileToAdd, fileName) {
|
|
32
|
+
const provider = new FileHandleFile(zipUrl, true);
|
|
33
|
+
const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);
|
|
34
|
+
const newFileOffset = provider.length;
|
|
35
|
+
const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);
|
|
36
|
+
await provider.append(localPart);
|
|
37
|
+
const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);
|
|
38
|
+
const newCDStartOffset = provider.length;
|
|
39
|
+
await provider.append(new Uint8Array(newCDBody));
|
|
40
|
+
const eocdOffset = provider.length;
|
|
41
|
+
await provider.append(await updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));
|
|
42
|
+
}
|
|
43
|
+
//# sourceMappingURL=zip-compozition.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"zip-compozition.js","names":["FileHandleFile","concatenateArrayBuffers","parseEoCDRecord","updateEoCD","CRC32Hash","generateLocalHeader","generateCDHeader","cutTheTailOff","provider","oldEoCDinfo","oldCDStartOffset","cdStartOffset","oldCDLength","Number","offsets","zip64EoCDOffset","zipEoCDOffset","zipEnding","slice","length","truncate","oldCDBody","eocdBody","byteLength","generateFileHeaders","fileName","fileToAdd","localFileHeaderOffset","newFileCRC322","parseInt","hash","newFileLocalHeader","crc32","newFileCDHeader","offset","Uint8Array","addOneFile","zipUrl","newFileOffset","localPart","cdHeaderPart","append","newCDBody","newCDStartOffset","eocdOffset","cdRecordsNumber"],"sources":["../../src/parse-zip/zip-compozition.ts"],"sourcesContent":["import {FileHandleFile, concatenateArrayBuffers} from '@loaders.gl/loader-utils';\nimport {ZipEoCDRecord, parseEoCDRecord, updateEoCD} from './end-of-central-directory';\nimport {CRC32Hash} from '@loaders.gl/crypto';\nimport {generateLocalHeader} from './local-file-header';\nimport {generateCDHeader} from './cd-file-header';\n\n/**\n * cut off CD and EoCD records from zip file\n * @param provider zip file\n * @returns tuple with three values: CD, EoCD record, EoCD information\n */\nasync function cutTheTailOff(\n provider: FileHandleFile\n): Promise<[ArrayBuffer, ArrayBuffer, ZipEoCDRecord]> {\n // define where the body ends\n const oldEoCDinfo = await parseEoCDRecord(provider);\n const oldCDStartOffset = oldEoCDinfo.cdStartOffset;\n\n // define cd length\n const oldCDLength = Number(\n oldEoCDinfo.offsets.zip64EoCDOffset\n ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset\n : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset\n );\n\n // cut off everything except of archieve body\n const zipEnding = await provider.slice(oldCDStartOffset, provider.length);\n await provider.truncate(Number(oldCDStartOffset));\n\n // divide cd body and eocd record\n const oldCDBody = zipEnding.slice(0, oldCDLength);\n const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);\n\n return [oldCDBody, eocdBody, oldEoCDinfo];\n}\n\n/**\n * generates CD and local headers for the file\n * @param fileName name of the file\n * @param fileToAdd buffer with the file\n * @param localFileHeaderOffset offset of the file local header\n * @returns tuple with two values: local header and file body, cd header\n */\nasync function generateFileHeaders(\n fileName: string,\n fileToAdd: ArrayBuffer,\n localFileHeaderOffset: bigint\n): Promise<[Uint8Array, Uint8Array]> {\n // generating CRC32 of the content\n const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);\n\n // generate local header for the file\n const newFileLocalHeader = generateLocalHeader({\n crc32: newFileCRC322,\n fileName,\n length: fileToAdd.byteLength\n });\n\n // generate hash file cd header\n const newFileCDHeader = generateCDHeader({\n crc32: newFileCRC322,\n fileName,\n offset: localFileHeaderOffset,\n length: fileToAdd.byteLength\n });\n return [\n new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)),\n new Uint8Array(newFileCDHeader)\n ];\n}\n\n/**\n * adds one file in the end of the archieve\n * @param zipUrl path to the file\n * @param fileToAdd new file body\n * @param fileName new file name\n */\nexport async function addOneFile(zipUrl: string, fileToAdd: ArrayBuffer, fileName: string) {\n // init file handler\n const provider = new FileHandleFile(zipUrl, true);\n\n const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);\n\n // remember the new file local header start offset\n const newFileOffset = provider.length;\n\n const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);\n\n // write down the file local header\n await provider.append(localPart);\n\n // add the file CD header to the CD\n const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);\n\n // remember the CD start offset\n const newCDStartOffset = provider.length;\n\n // write down new CD\n await provider.append(new Uint8Array(newCDBody));\n\n // remember where eocd starts\n const eocdOffset = provider.length;\n\n await provider.append(\n await updateEoCD(\n eocdBody,\n oldEoCDinfo.offsets,\n newCDStartOffset,\n eocdOffset,\n oldEoCDinfo.cdRecordsNumber + 1n\n )\n );\n}\n"],"mappings":"AAAA,SAAQA,cAAc,EAAEC,uBAAuB,QAAO,0BAA0B;AAAC,SAC1DC,eAAe,EAAEC,UAAU;AAClD,SAAQC,SAAS,QAAO,oBAAoB;AAAC,SACrCC,mBAAmB;AAAA,SACnBC,gBAAgB;AAOxB,eAAeC,aAAaA,CAC1BC,QAAwB,EAC4B;EAEpD,MAAMC,WAAW,GAAG,MAAMP,eAAe,CAACM,QAAQ,CAAC;EACnD,MAAME,gBAAgB,GAAGD,WAAW,CAACE,aAAa;EAGlD,MAAMC,WAAW,GAAGC,MAAM,CACxBJ,WAAW,CAACK,OAAO,CAACC,eAAe,GAC/BN,WAAW,CAACK,OAAO,CAACC,eAAe,GAAGL,gBAAgB,GACtDD,WAAW,CAACK,OAAO,CAACE,aAAa,GAAGN,gBAC1C,CAAC;EAGD,MAAMO,SAAS,GAAG,MAAMT,QAAQ,CAACU,KAAK,CAACR,gBAAgB,EAAEF,QAAQ,CAACW,MAAM,CAAC;EACzE,MAAMX,QAAQ,CAACY,QAAQ,CAACP,MAAM,CAACH,gBAAgB,CAAC,CAAC;EAGjD,MAAMW,SAAS,GAAGJ,SAAS,CAACC,KAAK,CAAC,CAAC,EAAEN,WAAW,CAAC;EACjD,MAAMU,QAAQ,GAAGL,SAAS,CAACC,KAAK,CAACN,WAAW,EAAEK,SAAS,CAACM,UAAU,CAAC;EAEnE,OAAO,CAACF,SAAS,EAAEC,QAAQ,EAAEb,WAAW,CAAC;AAC3C;AASA,eAAee,mBAAmBA,CAChCC,QAAgB,EAChBC,SAAsB,EACtBC,qBAA6B,EACM;EAEnC,MAAMC,aAAa,GAAGC,QAAQ,CAAC,MAAM,IAAIzB,SAAS,CAAC,CAAC,CAAC0B,IAAI,CAACJ,SAAS,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC;EAGhF,MAAMK,kBAAkB,GAAG1B,mBAAmB,CAAC;IAC7C2B,KAAK,EAAEJ,aAAa;IACpBH,QAAQ;IACRN,MAAM,EAAEO,SAAS,CAACH;EACpB,CAAC,CAAC;EAGF,MAAMU,eAAe,GAAG3B,gBAAgB,CAAC;IACvC0B,KAAK,EAAEJ,aAAa;IACpBH,QAAQ;IACRS,MAAM,EAAEP,qBAAqB;IAC7BR,MAAM,EAAEO,SAAS,CAACH;EACpB,CAAC,CAAC;EACF,OAAO,CACL,IAAIY,UAAU,CAAClC,uBAAuB,CAAC8B,kBAAkB,EAAEL,SAAS,CAAC,CAAC,EACtE,IAAIS,UAAU,CAACF,eAAe,CAAC,CAChC;AACH;AAQA,OAAO,eAAeG,UAAUA,CAACC,MAAc,EAAEX,SAAsB,EAAED,QAAgB,EAAE;EAEzF,MAAMjB,QAAQ,GAAG,IAAIR,cAAc,CAACqC,MAAM,EAAE,IAAI,CAAC;EAEjD,MAAM,CAAChB,SAAS,EAAEC,QAAQ,EAAEb,WAAW,CAAC,GAAG,MAAMF,aAAa,CAACC,QAAQ,CAAC;EAGxE,MAAM8B,aAAa,GAAG9B,QAAQ,CAACW,MAAM;EAErC,MAAM,CAACoB,SAAS,EAAEC,YAAY,CAAC,GAAG,MAAMhB,mBAAmB,CAACC,QAAQ,EAAEC,SAAS,EAAEY,aAAa,CAAC;EAG/F,MAAM9B,QAAQ,CAACiC,MAAM,CAACF,SAAS,CAAC;EAGhC,MAAMG,SAAS,GAAGzC,uBAAuB,CAACoB,SAAS,EAAEmB,YAAY,CAAC;EAGlE,MAAMG,gBAAgB,GAAGnC,QAAQ,CAACW,MAAM;EAGxC,MAAMX,QAAQ,CAACiC,MAAM,CAAC,IAAIN,UAAU,CAACO,SAAS,CAAC,CAAC;EAGhD,MAAME,UAAU,GAAGpC,QAAQ,CAACW,MAAM;EAElC,MAAMX,QAAQ,CAACiC,MAAM,CACnB,MAAMtC,UAAU,CACdmB,QAAQ,EACRb,WAAW,CAACK,OAAO,EACnB6B,gBAAgB,EAChBC,UAAU,EACVnC,WAAW,CAACoC,eAAe,GAAG,EAChC,CACF,CAAC;AACH"}
|
|
@@ -19,6 +19,6 @@ export declare function createZip64Info(options: Zip64Options): ArrayBuffer;
|
|
|
19
19
|
* @param fieldOffset offset of the field
|
|
20
20
|
* @param value value to be written
|
|
21
21
|
*/
|
|
22
|
-
export declare function setFieldToNumber(header: DataView, fieldSize: number, fieldOffset: number, value: number | bigint): void;
|
|
22
|
+
export declare function setFieldToNumber(header: DataView, fieldSize: number, fieldOffset: number | bigint, value: number | bigint): void;
|
|
23
23
|
export {};
|
|
24
24
|
//# sourceMappingURL=zip64-info-generation.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"zip64-info-generation.d.ts","sourceRoot":"","sources":["../../src/parse-zip/zip64-info-generation.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,SAAS,YAA+B,CAAC;AAEtD,2GAA2G;AAC3G,KAAK,YAAY,GAAG;IAClB,kEAAkE;IAClE,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,oCAAoC;IACpC,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,OAAO,EAAE,YAAY,GAAG,WAAW,CAkBlE;AAUD;;;;;;GAMG;AACH,wBAAgB,gBAAgB,CAC9B,MAAM,EAAE,QAAQ,EAChB,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM,
|
|
1
|
+
{"version":3,"file":"zip64-info-generation.d.ts","sourceRoot":"","sources":["../../src/parse-zip/zip64-info-generation.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,SAAS,YAA+B,CAAC;AAEtD,2GAA2G;AAC3G,KAAK,YAAY,GAAG;IAClB,kEAAkE;IAClE,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,oCAAoC;IACpC,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,OAAO,EAAE,YAAY,GAAG,WAAW,CAkBlE;AAUD;;;;;;GAMG;AACH,wBAAgB,gBAAgB,CAC9B,MAAM,EAAE,QAAQ,EAChB,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM,GAAG,MAAM,EAC5B,KAAK,EAAE,MAAM,GAAG,MAAM,GACrB,IAAI,CAEN"}
|
|
@@ -18,7 +18,7 @@ export function createZip64Info(options) {
|
|
|
18
18
|
return concatenateArrayBuffers(...arraysToConcat);
|
|
19
19
|
}
|
|
20
20
|
export function setFieldToNumber(header, fieldSize, fieldOffset, value) {
|
|
21
|
-
NUMBER_SETTERS[fieldSize](header, fieldOffset, value);
|
|
21
|
+
NUMBER_SETTERS[fieldSize](header, Number(fieldOffset), value);
|
|
22
22
|
}
|
|
23
23
|
const NUMBER_SETTERS = {
|
|
24
24
|
2: (header, offset, value) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"zip64-info-generation.js","names":["concatenateArrayBuffers","signature","Uint8Array","createZip64Info","options","optionsToUse","zip64Length","offset","size","arraysToConcat","field","ZIP64_FIELDS","_field$name","_optionsToUse","_field$name2","name","default","newValue","DataView","ArrayBuffer","NUMBER_SETTERS","push","buffer","setFieldToNumber","header","fieldSize","fieldOffset","value","
|
|
1
|
+
{"version":3,"file":"zip64-info-generation.js","names":["concatenateArrayBuffers","signature","Uint8Array","createZip64Info","options","optionsToUse","zip64Length","offset","size","arraysToConcat","field","ZIP64_FIELDS","_field$name","_optionsToUse","_field$name2","name","default","newValue","DataView","ArrayBuffer","NUMBER_SETTERS","push","buffer","setFieldToNumber","header","fieldSize","fieldOffset","value","Number","setUint16","setUint32","setBigUint64","BigInt","getUint16"],"sources":["../../src/parse-zip/zip64-info-generation.ts"],"sourcesContent":["import {concatenateArrayBuffers} from '@loaders.gl/loader-utils';\n\nexport const signature = new Uint8Array([0x01, 0x00]);\n\n/** info that can be placed into zip64 field, doc: https://en.wikipedia.org/wiki/ZIP_(file_format)#ZIP64 */\ntype Zip64Options = {\n /** Original uncompressed file size and Size of compressed data */\n size?: number;\n /** Offset of local header record */\n offset?: number;\n};\n\n/**\n * creates zip64 extra field\n * @param options info that can be placed into zip64 field\n * @returns buffer with field\n */\nexport function createZip64Info(options: Zip64Options): ArrayBuffer {\n const optionsToUse = {\n ...options,\n zip64Length: (options.offset ? 1 : 0) * 8 + (options.size ? 1 : 0) * 16\n };\n\n const arraysToConcat: ArrayBuffer[] = [];\n\n for (const field of ZIP64_FIELDS) {\n if (!optionsToUse[field.name ?? ''] && !field.default) {\n continue;\n }\n const newValue = new DataView(new ArrayBuffer(field.size));\n NUMBER_SETTERS[field.size](newValue, 0, optionsToUse[field.name ?? ''] ?? field.default);\n arraysToConcat.push(newValue.buffer);\n }\n\n return concatenateArrayBuffers(...arraysToConcat);\n}\n\n/**\n * Function to write values into buffer\n * @param header buffer where to write a value\n * @param offset offset of the writing start\n * @param value value to be written\n */\ntype NumberSetter = (header: DataView, offset: number, value: number | bigint) => void;\n\n/**\n * Writes values into buffer according to the bytes amount\n * @param header header where to write the data\n * @param fieldSize size of the field in bytes\n * @param fieldOffset offset of the field\n * @param value value to be written\n */\nexport function setFieldToNumber(\n header: DataView,\n fieldSize: number,\n fieldOffset: number | bigint,\n value: number | bigint\n): void {\n NUMBER_SETTERS[fieldSize](header, Number(fieldOffset), value);\n}\n\n/** functions to write values into buffer according to the bytes amount */\nconst NUMBER_SETTERS: {[key: number]: NumberSetter} = {\n 2: (header, offset, value) => {\n header.setUint16(offset, Number(value), true);\n },\n 4: (header, offset, value) => {\n header.setUint32(offset, Number(value), true);\n },\n 8: (header, offset, value) => {\n header.setBigUint64(offset, BigInt(value), true);\n }\n};\n\n/** zip64 info fields description, we need it as a pattern to build a zip64 info */\nconst ZIP64_FIELDS = [\n // Header ID 0x0001\n {\n size: 2,\n default: new DataView(signature.buffer).getUint16(0, true)\n },\n\n // Size of the extra field chunk (8, 16, 24 or 28)\n {\n size: 2,\n name: 'zip64Length'\n },\n\n // Original uncompressed file size\n {\n size: 8,\n name: 'size'\n },\n\n // Size of compressed data\n {\n size: 8,\n name: 'size'\n },\n\n // Offset of local header record\n {\n size: 8,\n name: 'offset'\n }\n];\n"],"mappings":"AAAA,SAAQA,uBAAuB,QAAO,0BAA0B;AAEhE,OAAO,MAAMC,SAAS,GAAG,IAAIC,UAAU,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAerD,OAAO,SAASC,eAAeA,CAACC,OAAqB,EAAe;EAClE,MAAMC,YAAY,GAAG;IACnB,GAAGD,OAAO;IACVE,WAAW,EAAE,CAACF,OAAO,CAACG,MAAM,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAACH,OAAO,CAACI,IAAI,GAAG,CAAC,GAAG,CAAC,IAAI;EACvE,CAAC;EAED,MAAMC,cAA6B,GAAG,EAAE;EAExC,KAAK,MAAMC,KAAK,IAAIC,YAAY,EAAE;IAAA,IAAAC,WAAA,EAAAC,aAAA,EAAAC,YAAA;IAChC,IAAI,CAACT,YAAY,EAAAO,WAAA,GAACF,KAAK,CAACK,IAAI,cAAAH,WAAA,cAAAA,WAAA,GAAI,EAAE,CAAC,IAAI,CAACF,KAAK,CAACM,OAAO,EAAE;MACrD;IACF;IACA,MAAMC,QAAQ,GAAG,IAAIC,QAAQ,CAAC,IAAIC,WAAW,CAACT,KAAK,CAACF,IAAI,CAAC,CAAC;IAC1DY,cAAc,CAACV,KAAK,CAACF,IAAI,CAAC,CAACS,QAAQ,EAAE,CAAC,GAAAJ,aAAA,GAAER,YAAY,EAAAS,YAAA,GAACJ,KAAK,CAACK,IAAI,cAAAD,YAAA,cAAAA,YAAA,GAAI,EAAE,CAAC,cAAAD,aAAA,cAAAA,aAAA,GAAIH,KAAK,CAACM,OAAO,CAAC;IACxFP,cAAc,CAACY,IAAI,CAACJ,QAAQ,CAACK,MAAM,CAAC;EACtC;EAEA,OAAOtB,uBAAuB,CAAC,GAAGS,cAAc,CAAC;AACnD;AAiBA,OAAO,SAASc,gBAAgBA,CAC9BC,MAAgB,EAChBC,SAAiB,EACjBC,WAA4B,EAC5BC,KAAsB,EAChB;EACNP,cAAc,CAACK,SAAS,CAAC,CAACD,MAAM,EAAEI,MAAM,CAACF,WAAW,CAAC,EAAEC,KAAK,CAAC;AAC/D;AAGA,MAAMP,cAA6C,GAAG;EACpD,CAAC,EAAE,CAACI,MAAM,EAAEjB,MAAM,EAAEoB,KAAK,KAAK;IAC5BH,MAAM,CAACK,SAAS,CAACtB,MAAM,EAAEqB,MAAM,CAACD,KAAK,CAAC,EAAE,IAAI,CAAC;EAC/C,CAAC;EACD,CAAC,EAAE,CAACH,MAAM,EAAEjB,MAAM,EAAEoB,KAAK,KAAK;IAC5BH,MAAM,CAACM,SAAS,CAACvB,MAAM,EAAEqB,MAAM,CAACD,KAAK,CAAC,EAAE,IAAI,CAAC;EAC/C,CAAC;EACD,CAAC,EAAE,CAACH,MAAM,EAAEjB,MAAM,EAAEoB,KAAK,KAAK;IAC5BH,MAAM,CAACO,YAAY,CAACxB,MAAM,EAAEyB,MAAM,CAACL,KAAK,CAAC,EAAE,IAAI,CAAC;EAClD;AACF,CAAC;AAGD,MAAMhB,YAAY,GAAG,CAEnB;EACEH,IAAI,EAAE,CAAC;EACPQ,OAAO,EAAE,IAAIE,QAAQ,CAACjB,SAAS,CAACqB,MAAM,CAAC,CAACW,SAAS,CAAC,CAAC,EAAE,IAAI;AAC3D,CAAC,EAGD;EACEzB,IAAI,EAAE,CAAC;EACPO,IAAI,EAAE;AACR,CAAC,EAGD;EACEP,IAAI,EAAE,CAAC;EACPO,IAAI,EAAE;AACR,CAAC,EAGD;EACEP,IAAI,EAAE,CAAC;EACPO,IAAI,EAAE;AACR,CAAC,EAGD;EACEP,IAAI,EAAE,CAAC;EACPO,IAAI,EAAE;AACR,CAAC,CACF"}
|
package/dist/zip-loader.js
CHANGED
package/dist/zip-writer.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/zip",
|
|
3
|
-
"version": "4.1.0-alpha.
|
|
3
|
+
"version": "4.1.0-alpha.5",
|
|
4
4
|
"description": "Zip Archive Loader",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -38,11 +38,11 @@
|
|
|
38
38
|
"build-bundle": "ocular-bundle ./src/index.ts"
|
|
39
39
|
},
|
|
40
40
|
"dependencies": {
|
|
41
|
-
"@loaders.gl/compression": "4.1.0-alpha.
|
|
42
|
-
"@loaders.gl/crypto": "4.1.0-alpha.
|
|
43
|
-
"@loaders.gl/loader-utils": "4.1.0-alpha.
|
|
41
|
+
"@loaders.gl/compression": "4.1.0-alpha.5",
|
|
42
|
+
"@loaders.gl/crypto": "4.1.0-alpha.5",
|
|
43
|
+
"@loaders.gl/loader-utils": "4.1.0-alpha.5",
|
|
44
44
|
"jszip": "^3.1.5",
|
|
45
45
|
"md5": "^2.3.0"
|
|
46
46
|
},
|
|
47
|
-
"gitHead": "
|
|
47
|
+
"gitHead": "3250842d8cc0fc8b76a575168c1fd57e5a66b6ba"
|
|
48
48
|
}
|
package/src/hash-file-utility.ts
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
// Copyright (c) vis.gl contributors
|
|
4
4
|
|
|
5
5
|
import {MD5Hash} from '@loaders.gl/crypto';
|
|
6
|
-
import {FileProvider} from '@loaders.gl/loader-utils';
|
|
6
|
+
import {FileProvider, concatenateArrayBuffers} from '@loaders.gl/loader-utils';
|
|
7
7
|
import {makeZipCDHeaderIterator} from './parse-zip/cd-file-header';
|
|
8
8
|
|
|
9
9
|
/**
|
|
@@ -55,3 +55,48 @@ export async function makeHashTableFromZipHeaders(
|
|
|
55
55
|
|
|
56
56
|
return hashTable;
|
|
57
57
|
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* creates hash file that later can be added to the SLPK archive
|
|
61
|
+
* @param fileProvider SLPK archive where we need to add hash file
|
|
62
|
+
* @returns ArrayBuffer containing hash file
|
|
63
|
+
*/
|
|
64
|
+
export async function composeHashFile(fileProvider: FileProvider): Promise<ArrayBuffer> {
|
|
65
|
+
const hashArray = await makeHashTableFromZipHeaders(fileProvider);
|
|
66
|
+
const bufferArray = Object.entries(hashArray)
|
|
67
|
+
.map(([key, value]) => concatenateArrayBuffers(hexStringToBuffer(key), bigintToBuffer(value)))
|
|
68
|
+
.sort(compareHashes);
|
|
69
|
+
return concatenateArrayBuffers(...bufferArray);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Function to compare md5 hashes according to https://github.com/Esri/i3s-spec/blob/master/docs/2.0/slpk_hashtable.pcsl.md
|
|
74
|
+
* @param arrA first hash to compare
|
|
75
|
+
* @param arrB second hash to compare
|
|
76
|
+
* @returns 0 if equal, negative number if a<b, pozitive if a>b
|
|
77
|
+
*/
|
|
78
|
+
function compareHashes(arrA: ArrayBuffer, arrB: ArrayBuffer): number {
|
|
79
|
+
const a = new BigUint64Array(arrA);
|
|
80
|
+
const b = new BigUint64Array(arrB);
|
|
81
|
+
|
|
82
|
+
return Number(a[0] === b[0] ? a[1] - b[1] : a[0] - b[0]);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* converts hex string to buffer
|
|
87
|
+
* @param str hex string to convert
|
|
88
|
+
* @returns conversion result
|
|
89
|
+
*/
|
|
90
|
+
function hexStringToBuffer(str: string): ArrayBuffer {
|
|
91
|
+
const byteArray = str.match(/../g)?.map((h) => parseInt(h, 16));
|
|
92
|
+
return new Uint8Array(byteArray ?? new Array(16)).buffer;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* converts bigint to buffer
|
|
97
|
+
* @param n bigint to convert
|
|
98
|
+
* @returns convertion result
|
|
99
|
+
*/
|
|
100
|
+
function bigintToBuffer(n: bigint): ArrayBuffer {
|
|
101
|
+
return new BigUint64Array([n]).buffer;
|
|
102
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -19,8 +19,9 @@ export {
|
|
|
19
19
|
} from './parse-zip/local-file-header';
|
|
20
20
|
export {parseEoCDRecord} from './parse-zip/end-of-central-directory';
|
|
21
21
|
export {searchFromTheEnd} from './parse-zip/search-from-the-end';
|
|
22
|
+
export {addOneFile} from './parse-zip/zip-compozition';
|
|
22
23
|
|
|
23
24
|
// export type {HashElement} from './hash-file-utility';
|
|
24
|
-
export {parseHashTable, makeHashTableFromZipHeaders} from './hash-file-utility';
|
|
25
|
+
export {parseHashTable, makeHashTableFromZipHeaders, composeHashFile} from './hash-file-utility';
|
|
25
26
|
|
|
26
27
|
export {ZipFileSystem} from './filesystems/zip-filesystem';
|
|
@@ -200,7 +200,7 @@ type GenerateCDOptions = {
|
|
|
200
200
|
/** File size */
|
|
201
201
|
length: number;
|
|
202
202
|
/** Relative offset of local file header */
|
|
203
|
-
offset:
|
|
203
|
+
offset: bigint;
|
|
204
204
|
};
|
|
205
205
|
|
|
206
206
|
/**
|
|
@@ -220,7 +220,7 @@ export function generateCDHeader(options: GenerateCDOptions): ArrayBuffer {
|
|
|
220
220
|
const optionsToZip64: any = {};
|
|
221
221
|
if (optionsToUse.offset >= 0xffffffff) {
|
|
222
222
|
optionsToZip64.offset = optionsToUse.offset;
|
|
223
|
-
optionsToUse.offset = 0xffffffff;
|
|
223
|
+
optionsToUse.offset = BigInt(0xffffffff);
|
|
224
224
|
}
|
|
225
225
|
if (optionsToUse.length >= 0xffffffff) {
|
|
226
226
|
optionsToZip64.size = optionsToUse.length;
|
|
@@ -231,7 +231,7 @@ export function generateCDHeader(options: GenerateCDOptions): ArrayBuffer {
|
|
|
231
231
|
zip64header = createZip64Info(optionsToZip64);
|
|
232
232
|
optionsToUse.extraLength = zip64header.byteLength;
|
|
233
233
|
}
|
|
234
|
-
const header = new DataView(new ArrayBuffer(
|
|
234
|
+
const header = new DataView(new ArrayBuffer(Number(CD_FILE_NAME_OFFSET)));
|
|
235
235
|
|
|
236
236
|
for (const field of ZIP_HEADER_FIELDS) {
|
|
237
237
|
setFieldToNumber(
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import {FileProvider, compareArrayBuffers} from '@loaders.gl/loader-utils';
|
|
6
6
|
import {ZipSignature, searchFromTheEnd} from './search-from-the-end';
|
|
7
|
+
import {setFieldToNumber} from './zip64-info-generation';
|
|
7
8
|
|
|
8
9
|
/**
|
|
9
10
|
* End of central directory info
|
|
@@ -14,6 +15,18 @@ export type ZipEoCDRecord = {
|
|
|
14
15
|
cdStartOffset: bigint;
|
|
15
16
|
/** Relative offset of local file header */
|
|
16
17
|
cdRecordsNumber: bigint;
|
|
18
|
+
offsets: ZipEoCDRecordOffsets;
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* End of central directory offsets
|
|
23
|
+
* according to https://en.wikipedia.org/wiki/ZIP_(file_format)
|
|
24
|
+
*/
|
|
25
|
+
export type ZipEoCDRecordOffsets = {
|
|
26
|
+
zipEoCDOffset: bigint;
|
|
27
|
+
|
|
28
|
+
zip64EoCDOffset?: bigint;
|
|
29
|
+
zip64EoCDLocatorOffset?: bigint;
|
|
17
30
|
};
|
|
18
31
|
|
|
19
32
|
const eoCDSignature: ZipSignature = new Uint8Array([0x50, 0x4b, 0x05, 0x06]);
|
|
@@ -22,9 +35,13 @@ const zip64EoCDSignature = new Uint8Array([0x50, 0x4b, 0x06, 0x06]);
|
|
|
22
35
|
|
|
23
36
|
// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)
|
|
24
37
|
const CD_RECORDS_NUMBER_OFFSET = 8n;
|
|
38
|
+
const CD_RECORDS_NUMBER_ON_DISC_OFFSET = 10n;
|
|
39
|
+
const CD_CD_BYTE_SIZE_OFFSET = 12n;
|
|
25
40
|
const CD_START_OFFSET_OFFSET = 16n;
|
|
26
41
|
const ZIP64_EOCD_START_OFFSET_OFFSET = 8n;
|
|
27
42
|
const ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;
|
|
43
|
+
const ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET = 32n;
|
|
44
|
+
const ZIP64_CD_CD_BYTE_SIZE_OFFSET = 40n;
|
|
28
45
|
const ZIP64_CD_START_OFFSET_OFFSET = 48n;
|
|
29
46
|
|
|
30
47
|
/**
|
|
@@ -38,14 +55,12 @@ export const parseEoCDRecord = async (file: FileProvider): Promise<ZipEoCDRecord
|
|
|
38
55
|
let cdRecordsNumber = BigInt(await file.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));
|
|
39
56
|
let cdStartOffset = BigInt(await file.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));
|
|
40
57
|
|
|
41
|
-
|
|
42
|
-
|
|
58
|
+
let zip64EoCDLocatorOffset = zipEoCDOffset - 20n;
|
|
59
|
+
let zip64EoCDOffset = 0n;
|
|
43
60
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
}
|
|
48
|
-
const zip64EoCDOffset = await file.getBigUint64(
|
|
61
|
+
const magicBytes = await file.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);
|
|
62
|
+
if (compareArrayBuffers(magicBytes, zip64EoCDLocatorSignature)) {
|
|
63
|
+
zip64EoCDOffset = await file.getBigUint64(
|
|
49
64
|
zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET
|
|
50
65
|
);
|
|
51
66
|
|
|
@@ -56,10 +71,84 @@ export const parseEoCDRecord = async (file: FileProvider): Promise<ZipEoCDRecord
|
|
|
56
71
|
|
|
57
72
|
cdRecordsNumber = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);
|
|
58
73
|
cdStartOffset = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);
|
|
74
|
+
} else {
|
|
75
|
+
zip64EoCDLocatorOffset = 0n;
|
|
59
76
|
}
|
|
60
77
|
|
|
61
78
|
return {
|
|
62
79
|
cdRecordsNumber,
|
|
63
|
-
cdStartOffset
|
|
80
|
+
cdStartOffset,
|
|
81
|
+
offsets: {
|
|
82
|
+
zip64EoCDOffset,
|
|
83
|
+
zip64EoCDLocatorOffset,
|
|
84
|
+
zipEoCDOffset
|
|
85
|
+
}
|
|
64
86
|
};
|
|
65
87
|
};
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* updates EoCD record to add more files to the archieve
|
|
91
|
+
* @param eocdBody buffer containing header
|
|
92
|
+
* @param oldEoCDOffsets info read from EoCD record befor updating
|
|
93
|
+
* @param newCDStartOffset CD start offset to be updated
|
|
94
|
+
* @param eocdStartOffset EoCD start offset to be updated
|
|
95
|
+
* @returns new EoCD header
|
|
96
|
+
*/
|
|
97
|
+
export async function updateEoCD(
|
|
98
|
+
eocdBody: ArrayBuffer,
|
|
99
|
+
oldEoCDOffsets: ZipEoCDRecordOffsets,
|
|
100
|
+
newCDStartOffset: bigint,
|
|
101
|
+
eocdStartOffset: bigint,
|
|
102
|
+
newCDRecordsNumber: bigint
|
|
103
|
+
): Promise<Uint8Array> {
|
|
104
|
+
const eocd = new DataView(eocdBody);
|
|
105
|
+
|
|
106
|
+
const classicEoCDOffset = oldEoCDOffsets.zip64EoCDOffset
|
|
107
|
+
? oldEoCDOffsets.zipEoCDOffset - oldEoCDOffsets.zip64EoCDOffset
|
|
108
|
+
: 0n;
|
|
109
|
+
|
|
110
|
+
// updating classic EoCD record with new CD records number in general and on disc
|
|
111
|
+
if (Number(newCDRecordsNumber) <= 0xffff) {
|
|
112
|
+
setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);
|
|
113
|
+
setFieldToNumber(
|
|
114
|
+
eocd,
|
|
115
|
+
2,
|
|
116
|
+
classicEoCDOffset + CD_RECORDS_NUMBER_ON_DISC_OFFSET,
|
|
117
|
+
newCDRecordsNumber
|
|
118
|
+
);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// updating zip64 EoCD record with new size of CD
|
|
122
|
+
if (eocdStartOffset - newCDStartOffset <= 0xffffffff) {
|
|
123
|
+
setFieldToNumber(
|
|
124
|
+
eocd,
|
|
125
|
+
4,
|
|
126
|
+
classicEoCDOffset + CD_CD_BYTE_SIZE_OFFSET,
|
|
127
|
+
eocdStartOffset - newCDStartOffset
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// updating classic EoCD record with new CD start offset
|
|
132
|
+
if (newCDStartOffset < 0xffffffff) {
|
|
133
|
+
setFieldToNumber(eocd, 4, classicEoCDOffset + CD_START_OFFSET_OFFSET, newCDStartOffset);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// updating zip64 EoCD locator and record with new EoCD record start offset and cd records number
|
|
137
|
+
if (oldEoCDOffsets.zip64EoCDLocatorOffset && oldEoCDOffsets.zip64EoCDOffset) {
|
|
138
|
+
// updating zip64 EoCD locator with new EoCD record start offset
|
|
139
|
+
const locatorOffset = oldEoCDOffsets.zip64EoCDLocatorOffset - oldEoCDOffsets.zip64EoCDOffset;
|
|
140
|
+
setFieldToNumber(eocd, 8, locatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET, eocdStartOffset);
|
|
141
|
+
|
|
142
|
+
// updating zip64 EoCD record with new cd start offset
|
|
143
|
+
setFieldToNumber(eocd, 8, ZIP64_CD_START_OFFSET_OFFSET, newCDStartOffset);
|
|
144
|
+
|
|
145
|
+
// updating zip64 EoCD record with new cd records number
|
|
146
|
+
setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);
|
|
147
|
+
setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);
|
|
148
|
+
|
|
149
|
+
// updating zip64 EoCD record with new size of CD
|
|
150
|
+
setFieldToNumber(eocd, 8, ZIP64_CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
return new Uint8Array(eocd.buffer);
|
|
154
|
+
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import {FileHandleFile, concatenateArrayBuffers} from '@loaders.gl/loader-utils';
|
|
2
|
+
import {ZipEoCDRecord, parseEoCDRecord, updateEoCD} from './end-of-central-directory';
|
|
3
|
+
import {CRC32Hash} from '@loaders.gl/crypto';
|
|
4
|
+
import {generateLocalHeader} from './local-file-header';
|
|
5
|
+
import {generateCDHeader} from './cd-file-header';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* cut off CD and EoCD records from zip file
|
|
9
|
+
* @param provider zip file
|
|
10
|
+
* @returns tuple with three values: CD, EoCD record, EoCD information
|
|
11
|
+
*/
|
|
12
|
+
async function cutTheTailOff(
|
|
13
|
+
provider: FileHandleFile
|
|
14
|
+
): Promise<[ArrayBuffer, ArrayBuffer, ZipEoCDRecord]> {
|
|
15
|
+
// define where the body ends
|
|
16
|
+
const oldEoCDinfo = await parseEoCDRecord(provider);
|
|
17
|
+
const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
|
|
18
|
+
|
|
19
|
+
// define cd length
|
|
20
|
+
const oldCDLength = Number(
|
|
21
|
+
oldEoCDinfo.offsets.zip64EoCDOffset
|
|
22
|
+
? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset
|
|
23
|
+
: oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset
|
|
24
|
+
);
|
|
25
|
+
|
|
26
|
+
// cut off everything except of archieve body
|
|
27
|
+
const zipEnding = await provider.slice(oldCDStartOffset, provider.length);
|
|
28
|
+
await provider.truncate(Number(oldCDStartOffset));
|
|
29
|
+
|
|
30
|
+
// divide cd body and eocd record
|
|
31
|
+
const oldCDBody = zipEnding.slice(0, oldCDLength);
|
|
32
|
+
const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);
|
|
33
|
+
|
|
34
|
+
return [oldCDBody, eocdBody, oldEoCDinfo];
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* generates CD and local headers for the file
|
|
39
|
+
* @param fileName name of the file
|
|
40
|
+
* @param fileToAdd buffer with the file
|
|
41
|
+
* @param localFileHeaderOffset offset of the file local header
|
|
42
|
+
* @returns tuple with two values: local header and file body, cd header
|
|
43
|
+
*/
|
|
44
|
+
async function generateFileHeaders(
|
|
45
|
+
fileName: string,
|
|
46
|
+
fileToAdd: ArrayBuffer,
|
|
47
|
+
localFileHeaderOffset: bigint
|
|
48
|
+
): Promise<[Uint8Array, Uint8Array]> {
|
|
49
|
+
// generating CRC32 of the content
|
|
50
|
+
const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);
|
|
51
|
+
|
|
52
|
+
// generate local header for the file
|
|
53
|
+
const newFileLocalHeader = generateLocalHeader({
|
|
54
|
+
crc32: newFileCRC322,
|
|
55
|
+
fileName,
|
|
56
|
+
length: fileToAdd.byteLength
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
// generate hash file cd header
|
|
60
|
+
const newFileCDHeader = generateCDHeader({
|
|
61
|
+
crc32: newFileCRC322,
|
|
62
|
+
fileName,
|
|
63
|
+
offset: localFileHeaderOffset,
|
|
64
|
+
length: fileToAdd.byteLength
|
|
65
|
+
});
|
|
66
|
+
return [
|
|
67
|
+
new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)),
|
|
68
|
+
new Uint8Array(newFileCDHeader)
|
|
69
|
+
];
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* adds one file in the end of the archieve
|
|
74
|
+
* @param zipUrl path to the file
|
|
75
|
+
* @param fileToAdd new file body
|
|
76
|
+
* @param fileName new file name
|
|
77
|
+
*/
|
|
78
|
+
export async function addOneFile(zipUrl: string, fileToAdd: ArrayBuffer, fileName: string) {
|
|
79
|
+
// init file handler
|
|
80
|
+
const provider = new FileHandleFile(zipUrl, true);
|
|
81
|
+
|
|
82
|
+
const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);
|
|
83
|
+
|
|
84
|
+
// remember the new file local header start offset
|
|
85
|
+
const newFileOffset = provider.length;
|
|
86
|
+
|
|
87
|
+
const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);
|
|
88
|
+
|
|
89
|
+
// write down the file local header
|
|
90
|
+
await provider.append(localPart);
|
|
91
|
+
|
|
92
|
+
// add the file CD header to the CD
|
|
93
|
+
const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);
|
|
94
|
+
|
|
95
|
+
// remember the CD start offset
|
|
96
|
+
const newCDStartOffset = provider.length;
|
|
97
|
+
|
|
98
|
+
// write down new CD
|
|
99
|
+
await provider.append(new Uint8Array(newCDBody));
|
|
100
|
+
|
|
101
|
+
// remember where eocd starts
|
|
102
|
+
const eocdOffset = provider.length;
|
|
103
|
+
|
|
104
|
+
await provider.append(
|
|
105
|
+
await updateEoCD(
|
|
106
|
+
eocdBody,
|
|
107
|
+
oldEoCDinfo.offsets,
|
|
108
|
+
newCDStartOffset,
|
|
109
|
+
eocdOffset,
|
|
110
|
+
oldEoCDinfo.cdRecordsNumber + 1n
|
|
111
|
+
)
|
|
112
|
+
);
|
|
113
|
+
}
|
|
@@ -53,10 +53,10 @@ type NumberSetter = (header: DataView, offset: number, value: number | bigint) =
|
|
|
53
53
|
export function setFieldToNumber(
|
|
54
54
|
header: DataView,
|
|
55
55
|
fieldSize: number,
|
|
56
|
-
fieldOffset: number,
|
|
56
|
+
fieldOffset: number | bigint,
|
|
57
57
|
value: number | bigint
|
|
58
58
|
): void {
|
|
59
|
-
NUMBER_SETTERS[fieldSize](header, fieldOffset, value);
|
|
59
|
+
NUMBER_SETTERS[fieldSize](header, Number(fieldOffset), value);
|
|
60
60
|
}
|
|
61
61
|
|
|
62
62
|
/** functions to write values into buffer according to the bytes amount */
|