@loaders.gl/zip 4.0.0-alpha.19 → 4.0.0-alpha.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +638 -1
- package/dist/es5/file-provider/data-view-file.js +146 -0
- package/dist/es5/file-provider/data-view-file.js.map +1 -0
- package/dist/es5/file-provider/file-handle-file.js +234 -0
- package/dist/es5/file-provider/file-handle-file.js.map +1 -0
- package/dist/es5/file-provider/file-handle.js +101 -0
- package/dist/es5/file-provider/file-handle.js.map +1 -0
- package/dist/es5/file-provider/file-provider.js +11 -0
- package/dist/es5/file-provider/file-provider.js.map +1 -0
- package/dist/es5/filesystems/zip-filesystem.js +340 -0
- package/dist/es5/filesystems/zip-filesystem.js.map +1 -0
- package/dist/es5/hash-file-utility.js +130 -0
- package/dist/es5/hash-file-utility.js.map +1 -0
- package/dist/es5/index.js +85 -0
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/parse-zip/cd-file-header.js +163 -0
- package/dist/es5/parse-zip/cd-file-header.js.map +1 -0
- package/dist/es5/parse-zip/end-of-central-directory.js +98 -0
- package/dist/es5/parse-zip/end-of-central-directory.js.map +1 -0
- package/dist/es5/parse-zip/local-file-header.js +117 -0
- package/dist/es5/parse-zip/local-file-header.js.map +1 -0
- package/dist/es5/parse-zip/search-from-the-end.js +69 -0
- package/dist/es5/parse-zip/search-from-the-end.js.map +1 -0
- package/dist/es5/zip-loader.js +1 -1
- package/dist/esm/file-provider/data-view-file.js +33 -0
- package/dist/esm/file-provider/data-view-file.js.map +1 -0
- package/dist/esm/file-provider/file-handle-file.js +57 -0
- package/dist/esm/file-provider/file-handle-file.js.map +1 -0
- package/dist/esm/file-provider/file-handle.js +37 -0
- package/dist/esm/file-provider/file-handle.js.map +1 -0
- package/dist/esm/file-provider/file-provider.js +4 -0
- package/dist/esm/file-provider/file-provider.js.map +1 -0
- package/dist/esm/filesystems/zip-filesystem.js +86 -0
- package/dist/esm/filesystems/zip-filesystem.js.map +1 -0
- package/dist/esm/hash-file-utility.js +55 -0
- package/dist/esm/hash-file-utility.js.map +1 -0
- package/dist/esm/index.js +7 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/parse-zip/cd-file-header.js +54 -0
- package/dist/esm/parse-zip/cd-file-header.js.map +1 -0
- package/dist/esm/parse-zip/end-of-central-directory.js +31 -0
- package/dist/esm/parse-zip/end-of-central-directory.js.map +1 -0
- package/dist/esm/parse-zip/local-file-header.js +41 -0
- package/dist/esm/parse-zip/local-file-header.js.map +1 -0
- package/dist/esm/parse-zip/search-from-the-end.js +16 -0
- package/dist/esm/parse-zip/search-from-the-end.js.map +1 -0
- package/dist/esm/zip-loader.js +1 -1
- package/dist/file-provider/data-view-file.d.ts +37 -0
- package/dist/file-provider/data-view-file.d.ts.map +1 -0
- package/dist/file-provider/data-view-file.js +63 -0
- package/dist/file-provider/file-handle-file.d.ts +53 -0
- package/dist/file-provider/file-handle-file.d.ts.map +1 -0
- package/dist/file-provider/file-handle-file.js +90 -0
- package/dist/file-provider/file-handle.d.ts +40 -0
- package/dist/file-provider/file-handle.d.ts.map +1 -0
- package/dist/file-provider/file-handle.js +57 -0
- package/dist/file-provider/file-provider.d.ts +45 -0
- package/dist/file-provider/file-provider.d.ts.map +1 -0
- package/dist/file-provider/file-provider.js +13 -0
- package/dist/filesystems/zip-filesystem.d.ts +44 -0
- package/dist/filesystems/zip-filesystem.d.ts.map +1 -0
- package/dist/filesystems/zip-filesystem.js +119 -0
- package/dist/hash-file-utility.d.ts +35 -0
- package/dist/hash-file-utility.d.ts.map +1 -0
- package/dist/hash-file-utility.js +88 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +21 -1
- package/dist/parse-zip/cd-file-header.d.ts +36 -0
- package/dist/parse-zip/cd-file-header.d.ts.map +1 -0
- package/dist/parse-zip/cd-file-header.js +68 -0
- package/dist/parse-zip/end-of-central-directory.d.ts +18 -0
- package/dist/parse-zip/end-of-central-directory.d.ts.map +1 -0
- package/dist/parse-zip/end-of-central-directory.js +40 -0
- package/dist/parse-zip/local-file-header.d.ts +29 -0
- package/dist/parse-zip/local-file-header.d.ts.map +1 -0
- package/dist/parse-zip/local-file-header.js +55 -0
- package/dist/parse-zip/search-from-the-end.d.ts +11 -0
- package/dist/parse-zip/search-from-the-end.d.ts.map +1 -0
- package/dist/parse-zip/search-from-the-end.js +31 -0
- package/package.json +9 -2
- package/src/file-provider/data-view-file.ts +72 -0
- package/src/file-provider/file-handle-file.ts +114 -0
- package/src/file-provider/file-handle.ts +73 -0
- package/src/file-provider/file-provider.ts +56 -0
- package/src/filesystems/zip-filesystem.ts +132 -0
- package/src/hash-file-utility.ts +101 -0
- package/src/index.ts +19 -0
- package/src/parse-zip/cd-file-header.ts +114 -0
- package/src/parse-zip/end-of-central-directory.ts +71 -0
- package/src/parse-zip/local-file-header.ts +91 -0
- package/src/parse-zip/search-from-the-end.ts +38 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
+
import { read, open, close, stat } from 'fs';
|
|
3
|
+
export class FileHandle {
|
|
4
|
+
constructor(fileDescriptor, stats) {
|
|
5
|
+
_defineProperty(this, "fileDescriptor", void 0);
|
|
6
|
+
_defineProperty(this, "stats", void 0);
|
|
7
|
+
_defineProperty(this, "read", (buffer, offset, length, position) => {
|
|
8
|
+
return new Promise(s => {
|
|
9
|
+
read(this.fileDescriptor, buffer, offset, length, position, (_err, bytesRead, buffer) => s({
|
|
10
|
+
bytesRead,
|
|
11
|
+
buffer
|
|
12
|
+
}));
|
|
13
|
+
});
|
|
14
|
+
});
|
|
15
|
+
this.fileDescriptor = fileDescriptor;
|
|
16
|
+
this.stats = stats;
|
|
17
|
+
}
|
|
18
|
+
async close() {
|
|
19
|
+
return new Promise(resolve => {
|
|
20
|
+
close(this.fileDescriptor, _err => resolve());
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
get stat() {
|
|
24
|
+
return this.stats;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
_defineProperty(FileHandle, "open", async path => {
|
|
28
|
+
const [fd, stats] = await Promise.all([new Promise(s => {
|
|
29
|
+
open(path, undefined, undefined, (_err, fd) => s(fd));
|
|
30
|
+
}), new Promise(s => {
|
|
31
|
+
stat(path, {
|
|
32
|
+
bigint: true
|
|
33
|
+
}, (_err, stats) => s(stats));
|
|
34
|
+
})]);
|
|
35
|
+
return new FileHandle(fd, stats);
|
|
36
|
+
});
|
|
37
|
+
//# sourceMappingURL=file-handle.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file-handle.js","names":["read","open","close","stat","FileHandle","constructor","fileDescriptor","stats","_defineProperty","buffer","offset","length","position","Promise","s","_err","bytesRead","resolve","path","fd","all","undefined","bigint"],"sources":["../../../src/file-provider/file-handle.ts"],"sourcesContent":["import {read, open, close, stat, BigIntStats} from 'fs';\n\n/** file reading result */\nexport type FileReadResult = {\n /** amount of the bytes read */\n bytesRead: number;\n /** the buffer filled with data from file*/\n buffer: Buffer;\n};\n\n/** Object handling file info */\nexport class FileHandle {\n private fileDescriptor: number;\n private stats: BigIntStats;\n private constructor(fileDescriptor: number, stats: BigIntStats) {\n this.fileDescriptor = fileDescriptor;\n this.stats = stats;\n }\n /**\n * Opens a `FileHandle`.\n *\n * @param path path to the file\n * @return Fulfills with a {FileHandle} object.\n */\n\n static open = async (path: string): Promise<FileHandle> => {\n const [fd, stats] = await Promise.all([\n new Promise<number>((s) => {\n open(path, undefined, undefined, (_err, fd) => s(fd));\n }),\n new Promise<BigIntStats>((s) => {\n stat(path, {bigint: true}, (_err, stats) => s(stats));\n })\n ]);\n return new FileHandle(fd, stats);\n };\n\n /** Close file */\n async close(): Promise<void> {\n return new Promise<void>((resolve) => {\n close(this.fileDescriptor, (_err) => resolve());\n });\n }\n\n /**\n * Reads data from the file and stores that in the given buffer.\n *\n * If the file is not modified concurrently, the end-of-file is reached when the\n * number of bytes read is zero.\n * @param buffer A buffer that will be filled with the file data read.\n * @param offset The location in the buffer at which to start filling.\n * @param length The number of bytes to read.\n * @param position The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an\n * integer, the current file position will remain unchanged.\n * @return Fulfills upon success with a FileReadResult object\n */\n read = (\n buffer: Buffer,\n offset: number,\n length: number,\n position: number | bigint\n ): Promise<FileReadResult> => {\n return new Promise((s) => {\n read(this.fileDescriptor, buffer, offset, length, position, (_err, bytesRead, buffer) =>\n s({bytesRead, buffer})\n );\n });\n };\n\n get stat(): BigIntStats {\n return this.stats;\n }\n}\n"],"mappings":";AAAA,SAAQA,IAAI,EAAEC,IAAI,EAAEC,KAAK,EAAEC,IAAI,QAAoB,IAAI;AAWvD,OAAO,MAAMC,UAAU,CAAC;EAGdC,WAAWA,CAACC,cAAsB,EAAEC,KAAkB,EAAE;IAAAC,eAAA;IAAAA,eAAA;IAAAA,eAAA,eA0CzD,CACLC,MAAc,EACdC,MAAc,EACdC,MAAc,EACdC,QAAyB,KACG;MAC5B,OAAO,IAAIC,OAAO,CAAEC,CAAC,IAAK;QACxBd,IAAI,CAAC,IAAI,CAACM,cAAc,EAAEG,MAAM,EAAEC,MAAM,EAAEC,MAAM,EAAEC,QAAQ,EAAE,CAACG,IAAI,EAAEC,SAAS,EAAEP,MAAM,KAClFK,CAAC,CAAC;UAACE,SAAS;UAAEP;QAAM,CAAC,CACvB,CAAC;MACH,CAAC,CAAC;IACJ,CAAC;IApDC,IAAI,CAACH,cAAc,GAAGA,cAAc;IACpC,IAAI,CAACC,KAAK,GAAGA,KAAK;EACpB;EAqBA,MAAML,KAAKA,CAAA,EAAkB;IAC3B,OAAO,IAAIW,OAAO,CAAQI,OAAO,IAAK;MACpCf,KAAK,CAAC,IAAI,CAACI,cAAc,EAAGS,IAAI,IAAKE,OAAO,CAAC,CAAC,CAAC;IACjD,CAAC,CAAC;EACJ;EA2BA,IAAId,IAAIA,CAAA,EAAgB;IACtB,OAAO,IAAI,CAACI,KAAK;EACnB;AACF;AAACC,eAAA,CA7DYJ,UAAU,UAcP,MAAOc,IAAY,IAA0B;EACzD,MAAM,CAACC,EAAE,EAAEZ,KAAK,CAAC,GAAG,MAAMM,OAAO,CAACO,GAAG,CAAC,CACpC,IAAIP,OAAO,CAAUC,CAAC,IAAK;IACzBb,IAAI,CAACiB,IAAI,EAAEG,SAAS,EAAEA,SAAS,EAAE,CAACN,IAAI,EAAEI,EAAE,KAAKL,CAAC,CAACK,EAAE,CAAC,CAAC;EACvD,CAAC,CAAC,EACF,IAAIN,OAAO,CAAeC,CAAC,IAAK;IAC9BX,IAAI,CAACe,IAAI,EAAE;MAACI,MAAM,EAAE;IAAI,CAAC,EAAE,CAACP,IAAI,EAAER,KAAK,KAAKO,CAAC,CAACP,KAAK,CAAC,CAAC;EACvD,CAAC,CAAC,CACH,CAAC;EACF,OAAO,IAAIH,UAAU,CAACe,EAAE,EAAEZ,KAAK,CAAC;AAClC,CAAC"}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export const isFileProvider = fileProvider => {
|
|
2
|
+
return (fileProvider === null || fileProvider === void 0 ? void 0 : fileProvider.getUint8) && (fileProvider === null || fileProvider === void 0 ? void 0 : fileProvider.slice) && (fileProvider === null || fileProvider === void 0 ? void 0 : fileProvider.length);
|
|
3
|
+
};
|
|
4
|
+
//# sourceMappingURL=file-provider.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file-provider.js","names":["isFileProvider","fileProvider","getUint8","slice","length"],"sources":["../../../src/file-provider/file-provider.ts"],"sourcesContent":["/**\n * Interface for providing file data\n */\nexport interface FileProvider {\n /**\n * Cleanup class data\n */\n destroy(): Promise<void>;\n /**\n * Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.\n * @param offset The offset, in bytes, from the start of the file where to read the data.\n */\n getUint8(offset: bigint): Promise<number>;\n\n /**\n * Gets an unsigned 16-bit integer at the specified byte offset from the start of the file.\n * @param offset The offset, in bytes, from the start of the file where to read the data.\n */\n getUint16(offset: bigint): Promise<number>;\n\n /**\n * Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.\n * @param offset The offset, in bytes, from the file of the view where to read the data.\n */\n getUint32(offset: bigint): Promise<number>;\n\n /**\n * Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.\n * @param offset The offset, in byte, from the file of the view where to read the data.\n */\n getBigUint64(offset: bigint): Promise<bigint>;\n\n /**\n * returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.\n * @param startOffset The offset, in bytes, from the start of the file where to start reading the data.\n * @param endOffset The offset, in bytes, from the start of the file where to end reading the data.\n */\n slice(startOffset: bigint, endOffset: bigint): Promise<ArrayBuffer>;\n\n /**\n * the length (in bytes) of the data.\n */\n length: bigint;\n}\n\n/**\n * Check is the object has FileProvider members\n * @param fileProvider - tested object\n */\nexport const isFileProvider = (fileProvider: unknown) => {\n return (\n (fileProvider as FileProvider)?.getUint8 &&\n (fileProvider as FileProvider)?.slice &&\n (fileProvider as FileProvider)?.length\n );\n};\n"],"mappings":"AAiDA,OAAO,MAAMA,cAAc,GAAIC,YAAqB,IAAK;EACvD,OACE,CAACA,YAAY,aAAZA,YAAY,uBAAZA,YAAY,CAAmBC,QAAQ,MACvCD,YAAY,aAAZA,YAAY,uBAAZA,YAAY,CAAmBE,KAAK,MACpCF,YAAY,aAAZA,YAAY,uBAAZA,YAAY,CAAmBG,MAAM;AAE1C,CAAC"}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
+
import { isBrowser } from '@loaders.gl/core';
|
|
3
|
+
import { isFileProvider } from '../file-provider/file-provider';
|
|
4
|
+
import { FileHandleFile } from '../file-provider/file-handle-file';
|
|
5
|
+
import { zipCDFileHeaderGenerator } from '../parse-zip/cd-file-header';
|
|
6
|
+
import { parseZipLocalFileHeader } from '../parse-zip/local-file-header';
|
|
7
|
+
const COMPRESSION_METHODS = {
|
|
8
|
+
0: async compressedFile => compressedFile
|
|
9
|
+
};
|
|
10
|
+
export class ZipFileSystem {
|
|
11
|
+
constructor(file) {
|
|
12
|
+
_defineProperty(this, "fileProvider", Promise.resolve(null));
|
|
13
|
+
if (typeof file === 'string') {
|
|
14
|
+
if (!isBrowser) {
|
|
15
|
+
this.fileProvider = FileHandleFile.from(file);
|
|
16
|
+
} else {
|
|
17
|
+
throw new Error('Cannot open file for random access in a WEB browser');
|
|
18
|
+
}
|
|
19
|
+
} else if (isFileProvider(file)) {
|
|
20
|
+
this.fileProvider = Promise.resolve(file);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
async destroy() {
|
|
24
|
+
const fileProvider = await this.fileProvider;
|
|
25
|
+
if (fileProvider) {
|
|
26
|
+
fileProvider.destroy();
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
async readdir() {
|
|
30
|
+
const fileProvider = await this.fileProvider;
|
|
31
|
+
if (!fileProvider) {
|
|
32
|
+
throw new Error('No data detected in the zip archive');
|
|
33
|
+
}
|
|
34
|
+
const fileNames = [];
|
|
35
|
+
const zipCDIterator = zipCDFileHeaderGenerator(fileProvider);
|
|
36
|
+
for await (const cdHeader of zipCDIterator) {
|
|
37
|
+
fileNames.push(cdHeader.fileName);
|
|
38
|
+
}
|
|
39
|
+
return fileNames;
|
|
40
|
+
}
|
|
41
|
+
async stat(filename) {
|
|
42
|
+
const cdFileHeader = await this.getCDFileHeader(filename);
|
|
43
|
+
return {
|
|
44
|
+
...cdFileHeader,
|
|
45
|
+
size: Number(cdFileHeader.uncompressedSize)
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
async fetch(filename) {
|
|
49
|
+
const fileProvider = await this.fileProvider;
|
|
50
|
+
if (!fileProvider) {
|
|
51
|
+
throw new Error('No data detected in the zip archive');
|
|
52
|
+
}
|
|
53
|
+
const cdFileHeader = await this.getCDFileHeader(filename);
|
|
54
|
+
const localFileHeader = await parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, fileProvider);
|
|
55
|
+
if (!localFileHeader) {
|
|
56
|
+
throw new Error('Local file header has not been found in the zip archive`');
|
|
57
|
+
}
|
|
58
|
+
const compressionHandler = COMPRESSION_METHODS[localFileHeader.compressionMethod.toString()];
|
|
59
|
+
if (!compressionHandler) {
|
|
60
|
+
throw Error('Only Deflation compression is supported');
|
|
61
|
+
}
|
|
62
|
+
const compressedFile = await fileProvider.slice(localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);
|
|
63
|
+
const uncompressedFile = await compressionHandler(compressedFile);
|
|
64
|
+
const response = new Response(uncompressedFile);
|
|
65
|
+
return response;
|
|
66
|
+
}
|
|
67
|
+
async getCDFileHeader(filename) {
|
|
68
|
+
const fileProvider = await this.fileProvider;
|
|
69
|
+
if (!fileProvider) {
|
|
70
|
+
throw new Error('No data detected in the zip archive');
|
|
71
|
+
}
|
|
72
|
+
const zipCDIterator = zipCDFileHeaderGenerator(fileProvider);
|
|
73
|
+
let result = null;
|
|
74
|
+
for await (const cdHeader of zipCDIterator) {
|
|
75
|
+
if (cdHeader.fileName === filename) {
|
|
76
|
+
result = cdHeader;
|
|
77
|
+
break;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
if (!result) {
|
|
81
|
+
throw new Error('File has not been found in the zip archive');
|
|
82
|
+
}
|
|
83
|
+
return result;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
//# sourceMappingURL=zip-filesystem.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"zip-filesystem.js","names":["isBrowser","isFileProvider","FileHandleFile","zipCDFileHeaderGenerator","parseZipLocalFileHeader","COMPRESSION_METHODS","compressedFile","ZipFileSystem","constructor","file","_defineProperty","Promise","resolve","fileProvider","from","Error","destroy","readdir","fileNames","zipCDIterator","cdHeader","push","fileName","stat","filename","cdFileHeader","getCDFileHeader","size","Number","uncompressedSize","fetch","localFileHeader","localHeaderOffset","compressionHandler","compressionMethod","toString","slice","fileDataOffset","compressedSize","uncompressedFile","response","Response","result"],"sources":["../../../src/filesystems/zip-filesystem.ts"],"sourcesContent":["import {FileSystem, isBrowser} from '@loaders.gl/core';\nimport {FileProvider, isFileProvider} from '../file-provider/file-provider';\nimport {FileHandleFile} from '../file-provider/file-handle-file';\nimport {ZipCDFileHeader, zipCDFileHeaderGenerator} from '../parse-zip/cd-file-header';\nimport {parseZipLocalFileHeader} from '../parse-zip/local-file-header';\n\ntype CompressionHandler = (compressedFile: ArrayBuffer) => Promise<ArrayBuffer>;\n/** Handling different compression types in zip */\nconst COMPRESSION_METHODS: {[key: number]: CompressionHandler} = {\n /** No compression */\n 0: async (compressedFile) => compressedFile\n};\n\n/**\n * FileSystem adapter for a ZIP file\n * Holds FileProvider object that provides random access to archived files\n */\nexport class ZipFileSystem implements FileSystem {\n /** FileProvider instance promise */\n private fileProvider: Promise<FileProvider | null> = Promise.resolve(null);\n\n /**\n * Constructor\n * @param file - instance of FileProvider or file path string\n */\n constructor(file: FileProvider | string) {\n // Try to open file in NodeJS\n if (typeof file === 'string') {\n if (!isBrowser) {\n this.fileProvider = FileHandleFile.from(file);\n } else {\n throw new Error('Cannot open file for random access in a WEB browser');\n }\n } else if (isFileProvider(file)) {\n this.fileProvider = Promise.resolve(file);\n }\n }\n\n /** Clean up resources */\n async destroy() {\n const fileProvider = await this.fileProvider;\n if (fileProvider) {\n fileProvider.destroy();\n }\n }\n\n /**\n * Get file names list from zip archive\n * @returns array of file names\n */\n async readdir(): Promise<string[]> {\n const fileProvider = await this.fileProvider;\n if (!fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const fileNames: string[] = [];\n const zipCDIterator = zipCDFileHeaderGenerator(fileProvider);\n for await (const cdHeader of zipCDIterator) {\n fileNames.push(cdHeader.fileName);\n }\n return fileNames;\n }\n\n /**\n * Get file metadata\n * @param filename - name of a file\n * @returns central directory data\n */\n async stat(filename: string): Promise<ZipCDFileHeader & {size: number}> {\n const cdFileHeader = await this.getCDFileHeader(filename);\n return {...cdFileHeader, size: Number(cdFileHeader.uncompressedSize)};\n }\n\n /**\n * Implementation of fetch against this file system\n * @param filename - name of a file\n * @returns - Response with file data\n */\n async fetch(filename: string): Promise<Response> {\n const fileProvider = await this.fileProvider;\n if (!fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const cdFileHeader = await this.getCDFileHeader(filename);\n const localFileHeader = await parseZipLocalFileHeader(\n cdFileHeader.localHeaderOffset,\n fileProvider\n );\n if (!localFileHeader) {\n throw new Error('Local file header has not been found in the zip archive`');\n }\n\n const compressionHandler = COMPRESSION_METHODS[localFileHeader.compressionMethod.toString()];\n if (!compressionHandler) {\n throw Error('Only Deflation compression is supported');\n }\n\n const compressedFile = await fileProvider.slice(\n localFileHeader.fileDataOffset,\n localFileHeader.fileDataOffset + localFileHeader.compressedSize\n );\n\n const uncompressedFile = await compressionHandler(compressedFile);\n\n const response = new Response(uncompressedFile);\n return response;\n }\n\n /**\n * Get central directory file header\n * @param filename - name of a file\n * @returns central directory file header\n */\n private async getCDFileHeader(filename: string): Promise<ZipCDFileHeader> {\n const fileProvider = await this.fileProvider;\n if (!fileProvider) {\n throw new Error('No data detected in the zip archive');\n }\n const zipCDIterator = zipCDFileHeaderGenerator(fileProvider);\n let result: ZipCDFileHeader | null = null;\n for await (const cdHeader of zipCDIterator) {\n if (cdHeader.fileName === filename) {\n result = cdHeader;\n break;\n }\n }\n if (!result) {\n throw new Error('File has not been found in the zip archive');\n }\n return result;\n }\n}\n"],"mappings":";AAAA,SAAoBA,SAAS,QAAO,kBAAkB;AACtD,SAAsBC,cAAc,QAAO,gCAAgC;AAC3E,SAAQC,cAAc,QAAO,mCAAmC;AAChE,SAAyBC,wBAAwB,QAAO,6BAA6B;AACrF,SAAQC,uBAAuB,QAAO,gCAAgC;AAItE,MAAMC,mBAAwD,GAAG;EAE/D,CAAC,EAAE,MAAOC,cAAc,IAAKA;AAC/B,CAAC;AAMD,OAAO,MAAMC,aAAa,CAAuB;EAQ/CC,WAAWA,CAACC,IAA2B,EAAE;IAAAC,eAAA,uBANYC,OAAO,CAACC,OAAO,CAAC,IAAI,CAAC;IAQxE,IAAI,OAAOH,IAAI,KAAK,QAAQ,EAAE;MAC5B,IAAI,CAACT,SAAS,EAAE;QACd,IAAI,CAACa,YAAY,GAAGX,cAAc,CAACY,IAAI,CAACL,IAAI,CAAC;MAC/C,CAAC,MAAM;QACL,MAAM,IAAIM,KAAK,CAAC,qDAAqD,CAAC;MACxE;IACF,CAAC,MAAM,IAAId,cAAc,CAACQ,IAAI,CAAC,EAAE;MAC/B,IAAI,CAACI,YAAY,GAAGF,OAAO,CAACC,OAAO,CAACH,IAAI,CAAC;IAC3C;EACF;EAGA,MAAMO,OAAOA,CAAA,EAAG;IACd,MAAMH,YAAY,GAAG,MAAM,IAAI,CAACA,YAAY;IAC5C,IAAIA,YAAY,EAAE;MAChBA,YAAY,CAACG,OAAO,CAAC,CAAC;IACxB;EACF;EAMA,MAAMC,OAAOA,CAAA,EAAsB;IACjC,MAAMJ,YAAY,GAAG,MAAM,IAAI,CAACA,YAAY;IAC5C,IAAI,CAACA,YAAY,EAAE;MACjB,MAAM,IAAIE,KAAK,CAAC,qCAAqC,CAAC;IACxD;IACA,MAAMG,SAAmB,GAAG,EAAE;IAC9B,MAAMC,aAAa,GAAGhB,wBAAwB,CAACU,YAAY,CAAC;IAC5D,WAAW,MAAMO,QAAQ,IAAID,aAAa,EAAE;MAC1CD,SAAS,CAACG,IAAI,CAACD,QAAQ,CAACE,QAAQ,CAAC;IACnC;IACA,OAAOJ,SAAS;EAClB;EAOA,MAAMK,IAAIA,CAACC,QAAgB,EAA6C;IACtE,MAAMC,YAAY,GAAG,MAAM,IAAI,CAACC,eAAe,CAACF,QAAQ,CAAC;IACzD,OAAO;MAAC,GAAGC,YAAY;MAAEE,IAAI,EAAEC,MAAM,CAACH,YAAY,CAACI,gBAAgB;IAAC,CAAC;EACvE;EAOA,MAAMC,KAAKA,CAACN,QAAgB,EAAqB;IAC/C,MAAMX,YAAY,GAAG,MAAM,IAAI,CAACA,YAAY;IAC5C,IAAI,CAACA,YAAY,EAAE;MACjB,MAAM,IAAIE,KAAK,CAAC,qCAAqC,CAAC;IACxD;IACA,MAAMU,YAAY,GAAG,MAAM,IAAI,CAACC,eAAe,CAACF,QAAQ,CAAC;IACzD,MAAMO,eAAe,GAAG,MAAM3B,uBAAuB,CACnDqB,YAAY,CAACO,iBAAiB,EAC9BnB,YACF,CAAC;IACD,IAAI,CAACkB,eAAe,EAAE;MACpB,MAAM,IAAIhB,KAAK,CAAC,0DAA0D,CAAC;IAC7E;IAEA,MAAMkB,kBAAkB,GAAG5B,mBAAmB,CAAC0B,eAAe,CAACG,iBAAiB,CAACC,QAAQ,CAAC,CAAC,CAAC;IAC5F,IAAI,CAACF,kBAAkB,EAAE;MACvB,MAAMlB,KAAK,CAAC,yCAAyC,CAAC;IACxD;IAEA,MAAMT,cAAc,GAAG,MAAMO,YAAY,CAACuB,KAAK,CAC7CL,eAAe,CAACM,cAAc,EAC9BN,eAAe,CAACM,cAAc,GAAGN,eAAe,CAACO,cACnD,CAAC;IAED,MAAMC,gBAAgB,GAAG,MAAMN,kBAAkB,CAAC3B,cAAc,CAAC;IAEjE,MAAMkC,QAAQ,GAAG,IAAIC,QAAQ,CAACF,gBAAgB,CAAC;IAC/C,OAAOC,QAAQ;EACjB;EAOA,MAAcd,eAAeA,CAACF,QAAgB,EAA4B;IACxE,MAAMX,YAAY,GAAG,MAAM,IAAI,CAACA,YAAY;IAC5C,IAAI,CAACA,YAAY,EAAE;MACjB,MAAM,IAAIE,KAAK,CAAC,qCAAqC,CAAC;IACxD;IACA,MAAMI,aAAa,GAAGhB,wBAAwB,CAACU,YAAY,CAAC;IAC5D,IAAI6B,MAA8B,GAAG,IAAI;IACzC,WAAW,MAAMtB,QAAQ,IAAID,aAAa,EAAE;MAC1C,IAAIC,QAAQ,CAACE,QAAQ,KAAKE,QAAQ,EAAE;QAClCkB,MAAM,GAAGtB,QAAQ;QACjB;MACF;IACF;IACA,IAAI,CAACsB,MAAM,EAAE;MACX,MAAM,IAAI3B,KAAK,CAAC,4CAA4C,CAAC;IAC/D;IACA,OAAO2B,MAAM;EACf;AACF"}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import md5 from 'md5';
|
|
2
|
+
import { zipCDFileHeaderGenerator } from './parse-zip/cd-file-header';
|
|
3
|
+
export const compareHashes = (hash1, hash2) => {
|
|
4
|
+
const h1 = new BigUint64Array(hash1.buffer, hash1.byteOffset, 2);
|
|
5
|
+
const h2 = new BigUint64Array(hash2.buffer, hash2.byteOffset, 2);
|
|
6
|
+
const diff = h1[0] === h2[0] ? h1[1] - h2[1] : h1[0] - h2[0];
|
|
7
|
+
if (diff < 0n) {
|
|
8
|
+
return -1;
|
|
9
|
+
} else if (diff === 0n) {
|
|
10
|
+
return 0;
|
|
11
|
+
}
|
|
12
|
+
return 1;
|
|
13
|
+
};
|
|
14
|
+
export const parseHashFile = hashFile => {
|
|
15
|
+
const hashFileBuffer = Buffer.from(hashFile);
|
|
16
|
+
const hashArray = [];
|
|
17
|
+
for (let i = 0; i < hashFileBuffer.buffer.byteLength; i = i + 24) {
|
|
18
|
+
const offsetBuffer = new DataView(hashFileBuffer.buffer.slice(hashFileBuffer.byteOffset + i + 16, hashFileBuffer.byteOffset + i + 24));
|
|
19
|
+
const offset = offsetBuffer.getBigUint64(offsetBuffer.byteOffset, true);
|
|
20
|
+
hashArray.push({
|
|
21
|
+
hash: Buffer.from(hashFileBuffer.subarray(hashFileBuffer.byteOffset + i, hashFileBuffer.byteOffset + i + 16)),
|
|
22
|
+
offset
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
return hashArray;
|
|
26
|
+
};
|
|
27
|
+
export const findBin = (hashToSearch, hashArray) => {
|
|
28
|
+
let lowerBorder = 0;
|
|
29
|
+
let upperBorder = hashArray.length;
|
|
30
|
+
while (upperBorder - lowerBorder > 1) {
|
|
31
|
+
const middle = lowerBorder + Math.floor((upperBorder - lowerBorder) / 2);
|
|
32
|
+
const value = compareHashes(hashArray[middle].hash, hashToSearch);
|
|
33
|
+
if (value === 0) {
|
|
34
|
+
return hashArray[middle];
|
|
35
|
+
} else if (value < 0) {
|
|
36
|
+
lowerBorder = middle;
|
|
37
|
+
} else {
|
|
38
|
+
upperBorder = middle;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return undefined;
|
|
42
|
+
};
|
|
43
|
+
export const generateHashInfo = async fileProvider => {
|
|
44
|
+
const zipCDIterator = zipCDFileHeaderGenerator(fileProvider);
|
|
45
|
+
const hashInfo = [];
|
|
46
|
+
for await (const cdHeader of zipCDIterator) {
|
|
47
|
+
hashInfo.push({
|
|
48
|
+
hash: Buffer.from(md5(cdHeader.fileName.split('\\').join('/').toLocaleLowerCase()), 'hex'),
|
|
49
|
+
offset: cdHeader.localHeaderOffset
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
hashInfo.sort((a, b) => compareHashes(a.hash, b.hash));
|
|
53
|
+
return hashInfo;
|
|
54
|
+
};
|
|
55
|
+
//# sourceMappingURL=hash-file-utility.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hash-file-utility.js","names":["md5","zipCDFileHeaderGenerator","compareHashes","hash1","hash2","h1","BigUint64Array","buffer","byteOffset","h2","diff","parseHashFile","hashFile","hashFileBuffer","Buffer","from","hashArray","i","byteLength","offsetBuffer","DataView","slice","offset","getBigUint64","push","hash","subarray","findBin","hashToSearch","lowerBorder","upperBorder","length","middle","Math","floor","value","undefined","generateHashInfo","fileProvider","zipCDIterator","hashInfo","cdHeader","fileName","split","join","toLocaleLowerCase","localHeaderOffset","sort","a","b"],"sources":["../../src/hash-file-utility.ts"],"sourcesContent":["import md5 from 'md5';\nimport {zipCDFileHeaderGenerator} from './parse-zip/cd-file-header';\nimport {FileProvider} from './file-provider/file-provider';\n\n/** Element of hash array */\nexport type HashElement = {\n /** File name hash */\n hash: Buffer;\n /** File offset in the archive */\n offset: bigint;\n};\n\n/**\n * Comparing md5 hashes according to https://github.com/Esri/i3s-spec/blob/master/docs/2.0/slpk_hashtable.pcsl.md step 5\n * @param hash1 hash to compare\n * @param hash2 hash to compare\n * @returns -1 if hash1 < hash2, 0 of hash1 === hash2, 1 if hash1 > hash2\n */\nexport const compareHashes = (hash1: Buffer, hash2: Buffer): number => {\n const h1 = new BigUint64Array(hash1.buffer, hash1.byteOffset, 2);\n const h2 = new BigUint64Array(hash2.buffer, hash2.byteOffset, 2);\n\n const diff = h1[0] === h2[0] ? h1[1] - h2[1] : h1[0] - h2[0];\n\n if (diff < 0n) {\n return -1;\n } else if (diff === 0n) {\n return 0;\n }\n return 1;\n};\n\n/**\n * Reads hash file from buffer and returns it in ready-to-use form\n * @param hashFile - bufer containing hash file\n * @returns Array containing file info\n */\nexport const parseHashFile = (hashFile: ArrayBuffer): HashElement[] => {\n const hashFileBuffer = Buffer.from(hashFile);\n const hashArray: HashElement[] = [];\n for (let i = 0; i < hashFileBuffer.buffer.byteLength; i = i + 24) {\n const offsetBuffer = new DataView(\n hashFileBuffer.buffer.slice(\n hashFileBuffer.byteOffset + i + 16,\n hashFileBuffer.byteOffset + i + 24\n )\n );\n const offset = offsetBuffer.getBigUint64(offsetBuffer.byteOffset, true);\n hashArray.push({\n hash: Buffer.from(\n hashFileBuffer.subarray(hashFileBuffer.byteOffset + i, hashFileBuffer.byteOffset + i + 16)\n ),\n offset\n });\n }\n return hashArray;\n};\n\n/**\n * Binary search in the hash info\n * @param hashToSearch hash that we need to find\n * @returns required hash element or undefined if not found\n */\nexport const findBin = (\n hashToSearch: Buffer,\n hashArray: HashElement[]\n): HashElement | undefined => {\n let lowerBorder = 0;\n let upperBorder = hashArray.length;\n\n while (upperBorder - lowerBorder > 1) {\n const middle = lowerBorder + Math.floor((upperBorder - lowerBorder) / 2);\n const value = compareHashes(hashArray[middle].hash, hashToSearch);\n if (value === 0) {\n return hashArray[middle];\n } else if (value < 0) {\n lowerBorder = middle;\n } else {\n upperBorder = middle;\n }\n }\n return undefined;\n};\n\n/**\n * generates hash info from central directory\n * @param fileProvider - provider of the archive\n * @returns ready to use hash info\n */\nexport const generateHashInfo = async (fileProvider: FileProvider): Promise<HashElement[]> => {\n const zipCDIterator = zipCDFileHeaderGenerator(fileProvider);\n const hashInfo: HashElement[] = [];\n for await (const cdHeader of zipCDIterator) {\n hashInfo.push({\n hash: Buffer.from(md5(cdHeader.fileName.split('\\\\').join('/').toLocaleLowerCase()), 'hex'),\n offset: cdHeader.localHeaderOffset\n });\n }\n hashInfo.sort((a, b) => compareHashes(a.hash, b.hash));\n return hashInfo;\n};\n"],"mappings":"AAAA,OAAOA,GAAG,MAAM,KAAK;AACrB,SAAQC,wBAAwB,QAAO,4BAA4B;AAiBnE,OAAO,MAAMC,aAAa,GAAGA,CAACC,KAAa,EAAEC,KAAa,KAAa;EACrE,MAAMC,EAAE,GAAG,IAAIC,cAAc,CAACH,KAAK,CAACI,MAAM,EAAEJ,KAAK,CAACK,UAAU,EAAE,CAAC,CAAC;EAChE,MAAMC,EAAE,GAAG,IAAIH,cAAc,CAACF,KAAK,CAACG,MAAM,EAAEH,KAAK,CAACI,UAAU,EAAE,CAAC,CAAC;EAEhE,MAAME,IAAI,GAAGL,EAAE,CAAC,CAAC,CAAC,KAAKI,EAAE,CAAC,CAAC,CAAC,GAAGJ,EAAE,CAAC,CAAC,CAAC,GAAGI,EAAE,CAAC,CAAC,CAAC,GAAGJ,EAAE,CAAC,CAAC,CAAC,GAAGI,EAAE,CAAC,CAAC,CAAC;EAE5D,IAAIC,IAAI,GAAG,EAAE,EAAE;IACb,OAAO,CAAC,CAAC;EACX,CAAC,MAAM,IAAIA,IAAI,KAAK,EAAE,EAAE;IACtB,OAAO,CAAC;EACV;EACA,OAAO,CAAC;AACV,CAAC;AAOD,OAAO,MAAMC,aAAa,GAAIC,QAAqB,IAAoB;EACrE,MAAMC,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACH,QAAQ,CAAC;EAC5C,MAAMI,SAAwB,GAAG,EAAE;EACnC,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGJ,cAAc,CAACN,MAAM,CAACW,UAAU,EAAED,CAAC,GAAGA,CAAC,GAAG,EAAE,EAAE;IAChE,MAAME,YAAY,GAAG,IAAIC,QAAQ,CAC/BP,cAAc,CAACN,MAAM,CAACc,KAAK,CACzBR,cAAc,CAACL,UAAU,GAAGS,CAAC,GAAG,EAAE,EAClCJ,cAAc,CAACL,UAAU,GAAGS,CAAC,GAAG,EAClC,CACF,CAAC;IACD,MAAMK,MAAM,GAAGH,YAAY,CAACI,YAAY,CAACJ,YAAY,CAACX,UAAU,EAAE,IAAI,CAAC;IACvEQ,SAAS,CAACQ,IAAI,CAAC;MACbC,IAAI,EAAEX,MAAM,CAACC,IAAI,CACfF,cAAc,CAACa,QAAQ,CAACb,cAAc,CAACL,UAAU,GAAGS,CAAC,EAAEJ,cAAc,CAACL,UAAU,GAAGS,CAAC,GAAG,EAAE,CAC3F,CAAC;MACDK;IACF,CAAC,CAAC;EACJ;EACA,OAAON,SAAS;AAClB,CAAC;AAOD,OAAO,MAAMW,OAAO,GAAGA,CACrBC,YAAoB,EACpBZ,SAAwB,KACI;EAC5B,IAAIa,WAAW,GAAG,CAAC;EACnB,IAAIC,WAAW,GAAGd,SAAS,CAACe,MAAM;EAElC,OAAOD,WAAW,GAAGD,WAAW,GAAG,CAAC,EAAE;IACpC,MAAMG,MAAM,GAAGH,WAAW,GAAGI,IAAI,CAACC,KAAK,CAAC,CAACJ,WAAW,GAAGD,WAAW,IAAI,CAAC,CAAC;IACxE,MAAMM,KAAK,GAAGjC,aAAa,CAACc,SAAS,CAACgB,MAAM,CAAC,CAACP,IAAI,EAAEG,YAAY,CAAC;IACjE,IAAIO,KAAK,KAAK,CAAC,EAAE;MACf,OAAOnB,SAAS,CAACgB,MAAM,CAAC;IAC1B,CAAC,MAAM,IAAIG,KAAK,GAAG,CAAC,EAAE;MACpBN,WAAW,GAAGG,MAAM;IACtB,CAAC,MAAM;MACLF,WAAW,GAAGE,MAAM;IACtB;EACF;EACA,OAAOI,SAAS;AAClB,CAAC;AAOD,OAAO,MAAMC,gBAAgB,GAAG,MAAOC,YAA0B,IAA6B;EAC5F,MAAMC,aAAa,GAAGtC,wBAAwB,CAACqC,YAAY,CAAC;EAC5D,MAAME,QAAuB,GAAG,EAAE;EAClC,WAAW,MAAMC,QAAQ,IAAIF,aAAa,EAAE;IAC1CC,QAAQ,CAAChB,IAAI,CAAC;MACZC,IAAI,EAAEX,MAAM,CAACC,IAAI,CAACf,GAAG,CAACyC,QAAQ,CAACC,QAAQ,CAACC,KAAK,CAAC,IAAI,CAAC,CAACC,IAAI,CAAC,GAAG,CAAC,CAACC,iBAAiB,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC;MAC1FvB,MAAM,EAAEmB,QAAQ,CAACK;IACnB,CAAC,CAAC;EACJ;EACAN,QAAQ,CAACO,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAK/C,aAAa,CAAC8C,CAAC,CAACvB,IAAI,EAAEwB,CAAC,CAACxB,IAAI,CAAC,CAAC;EACtD,OAAOe,QAAQ;AACjB,CAAC"}
|
package/dist/esm/index.js
CHANGED
|
@@ -1,4 +1,11 @@
|
|
|
1
1
|
export { ZipLoader } from './zip-loader';
|
|
2
2
|
export { ZipWriter } from './zip-writer';
|
|
3
3
|
export { default as TarBuilder } from './tar-builder';
|
|
4
|
+
export { FileHandleFile } from './file-provider/file-handle-file';
|
|
5
|
+
export { DataViewFile } from './file-provider/data-view-file';
|
|
6
|
+
export { parseZipCDFileHeader, zipCDFileHeaderGenerator, signature as cdSignature } from './parse-zip/cd-file-header';
|
|
7
|
+
export { parseZipLocalFileHeader, signature as localHeaderSignature } from './parse-zip/local-file-header';
|
|
8
|
+
export { parseEoCDRecord } from './parse-zip/end-of-central-directory';
|
|
9
|
+
export { searchFromTheEnd } from './parse-zip/search-from-the-end';
|
|
10
|
+
export { compareHashes, parseHashFile, findBin, generateHashInfo } from './hash-file-utility';
|
|
4
11
|
//# sourceMappingURL=index.js.map
|
package/dist/esm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["ZipLoader","ZipWriter","default","TarBuilder"],"sources":["../../src/index.ts"],"sourcesContent":["export {ZipLoader} from './zip-loader';\nexport {ZipWriter} from './zip-writer';\nexport {default as TarBuilder} from './tar-builder';\n"],"mappings":"AAAA,SAAQA,SAAS,QAAO,cAAc;AACtC,SAAQC,SAAS,QAAO,cAAc;AACtC,SAAQC,OAAO,IAAIC,UAAU,QAAO,eAAe"}
|
|
1
|
+
{"version":3,"file":"index.js","names":["ZipLoader","ZipWriter","default","TarBuilder","FileHandleFile","DataViewFile","parseZipCDFileHeader","zipCDFileHeaderGenerator","signature","cdSignature","parseZipLocalFileHeader","localHeaderSignature","parseEoCDRecord","searchFromTheEnd","compareHashes","parseHashFile","findBin","generateHashInfo"],"sources":["../../src/index.ts"],"sourcesContent":["export {ZipLoader} from './zip-loader';\nexport {ZipWriter} from './zip-writer';\nexport {default as TarBuilder} from './tar-builder';\n\nexport type {FileProvider} from './file-provider/file-provider';\nexport {FileHandleFile} from './file-provider/file-handle-file';\nexport {DataViewFile} from './file-provider/data-view-file';\n\nexport {\n parseZipCDFileHeader,\n zipCDFileHeaderGenerator,\n signature as cdSignature\n} from './parse-zip/cd-file-header';\nexport {\n parseZipLocalFileHeader,\n signature as localHeaderSignature\n} from './parse-zip/local-file-header';\nexport {parseEoCDRecord} from './parse-zip/end-of-central-directory';\nexport {searchFromTheEnd} from './parse-zip/search-from-the-end';\n\nexport type {HashElement} from './hash-file-utility';\nexport {compareHashes, parseHashFile, findBin, generateHashInfo} from './hash-file-utility';\n"],"mappings":"AAAA,SAAQA,SAAS,QAAO,cAAc;AACtC,SAAQC,SAAS,QAAO,cAAc;AACtC,SAAQC,OAAO,IAAIC,UAAU,QAAO,eAAe;AAGnD,SAAQC,cAAc,QAAO,kCAAkC;AAC/D,SAAQC,YAAY,QAAO,gCAAgC;AAE3D,SACEC,oBAAoB,EACpBC,wBAAwB,EACxBC,SAAS,IAAIC,WAAW,QACnB,4BAA4B;AACnC,SACEC,uBAAuB,EACvBF,SAAS,IAAIG,oBAAoB,QAC5B,+BAA+B;AACtC,SAAQC,eAAe,QAAO,sCAAsC;AACpE,SAAQC,gBAAgB,QAAO,iCAAiC;AAGhE,SAAQC,aAAa,EAAEC,aAAa,EAAEC,OAAO,EAAEC,gBAAgB,QAAO,qBAAqB"}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { parseEoCDRecord } from './end-of-central-directory';
|
|
2
|
+
const CD_COMPRESSED_SIZE_OFFSET = 20n;
|
|
3
|
+
const CD_UNCOMPRESSED_SIZE_OFFSET = 24n;
|
|
4
|
+
const CD_FILE_NAME_LENGTH_OFFSET = 28n;
|
|
5
|
+
const CD_EXTRA_FIELD_LENGTH_OFFSET = 30n;
|
|
6
|
+
const CD_LOCAL_HEADER_OFFSET_OFFSET = 42n;
|
|
7
|
+
const CD_FILE_NAME_OFFSET = 46n;
|
|
8
|
+
export const signature = [0x50, 0x4b, 0x01, 0x02];
|
|
9
|
+
export const parseZipCDFileHeader = async (headerOffset, buffer) => {
|
|
10
|
+
if (Buffer.from(await buffer.slice(headerOffset, headerOffset + 4n)).compare(Buffer.from(signature)) !== 0) {
|
|
11
|
+
return null;
|
|
12
|
+
}
|
|
13
|
+
let compressedSize = BigInt(await buffer.getUint32(headerOffset + CD_COMPRESSED_SIZE_OFFSET));
|
|
14
|
+
let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + CD_UNCOMPRESSED_SIZE_OFFSET));
|
|
15
|
+
const extraFieldLength = await buffer.getUint16(headerOffset + CD_EXTRA_FIELD_LENGTH_OFFSET);
|
|
16
|
+
const fileNameLength = await buffer.getUint16(headerOffset + CD_FILE_NAME_LENGTH_OFFSET);
|
|
17
|
+
const fileName = new TextDecoder().decode(await buffer.slice(headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength)));
|
|
18
|
+
const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);
|
|
19
|
+
const oldFormatOffset = await buffer.getUint32(headerOffset + CD_LOCAL_HEADER_OFFSET_OFFSET);
|
|
20
|
+
let fileDataOffset = BigInt(oldFormatOffset);
|
|
21
|
+
let offsetInZip64Data = 4n;
|
|
22
|
+
if (uncompressedSize === BigInt(0xffffffff)) {
|
|
23
|
+
uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
24
|
+
offsetInZip64Data += 8n;
|
|
25
|
+
}
|
|
26
|
+
if (compressedSize === BigInt(0xffffffff)) {
|
|
27
|
+
compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
28
|
+
offsetInZip64Data += 8n;
|
|
29
|
+
}
|
|
30
|
+
if (fileDataOffset === BigInt(0xffffffff)) {
|
|
31
|
+
fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
32
|
+
}
|
|
33
|
+
const localHeaderOffset = fileDataOffset;
|
|
34
|
+
return {
|
|
35
|
+
compressedSize,
|
|
36
|
+
uncompressedSize,
|
|
37
|
+
extraFieldLength,
|
|
38
|
+
fileNameLength,
|
|
39
|
+
fileName,
|
|
40
|
+
extraOffset,
|
|
41
|
+
localHeaderOffset
|
|
42
|
+
};
|
|
43
|
+
};
|
|
44
|
+
export async function* zipCDFileHeaderGenerator(fileProvider) {
|
|
45
|
+
const {
|
|
46
|
+
cdStartOffset
|
|
47
|
+
} = await parseEoCDRecord(fileProvider);
|
|
48
|
+
let cdHeader = await parseZipCDFileHeader(cdStartOffset, fileProvider);
|
|
49
|
+
while (cdHeader) {
|
|
50
|
+
yield cdHeader;
|
|
51
|
+
cdHeader = await parseZipCDFileHeader(cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength), fileProvider);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
//# sourceMappingURL=cd-file-header.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cd-file-header.js","names":["parseEoCDRecord","CD_COMPRESSED_SIZE_OFFSET","CD_UNCOMPRESSED_SIZE_OFFSET","CD_FILE_NAME_LENGTH_OFFSET","CD_EXTRA_FIELD_LENGTH_OFFSET","CD_LOCAL_HEADER_OFFSET_OFFSET","CD_FILE_NAME_OFFSET","signature","parseZipCDFileHeader","headerOffset","buffer","Buffer","from","slice","compare","compressedSize","BigInt","getUint32","uncompressedSize","extraFieldLength","getUint16","fileNameLength","fileName","TextDecoder","decode","extraOffset","oldFormatOffset","fileDataOffset","offsetInZip64Data","getBigUint64","localHeaderOffset","zipCDFileHeaderGenerator","fileProvider","cdStartOffset","cdHeader"],"sources":["../../../src/parse-zip/cd-file-header.ts"],"sourcesContent":["import {FileProvider} from '../file-provider/file-provider';\nimport {parseEoCDRecord} from './end-of-central-directory';\nimport {ZipSignature} from './search-from-the-end';\n\n/**\n * zip central directory file header info\n * according to https://en.wikipedia.org/wiki/ZIP_(file_format)\n */\nexport type ZipCDFileHeader = {\n /** Compressed size */\n compressedSize: bigint;\n /** Uncompressed size */\n uncompressedSize: bigint;\n /** Extra field size */\n extraFieldLength: number;\n /** File name length */\n fileNameLength: number;\n /** File name */\n fileName: string;\n /** Extra field offset */\n extraOffset: bigint;\n /** Relative offset of local file header */\n localHeaderOffset: bigint;\n};\n\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst CD_COMPRESSED_SIZE_OFFSET = 20n;\nconst CD_UNCOMPRESSED_SIZE_OFFSET = 24n;\nconst CD_FILE_NAME_LENGTH_OFFSET = 28n;\nconst CD_EXTRA_FIELD_LENGTH_OFFSET = 30n;\nconst CD_LOCAL_HEADER_OFFSET_OFFSET = 42n;\nconst CD_FILE_NAME_OFFSET = 46n;\n\nexport const signature: ZipSignature = [0x50, 0x4b, 0x01, 0x02];\n\n/**\n * Parses central directory file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipCDFileHeader = async (\n headerOffset: bigint,\n buffer: FileProvider\n): Promise<ZipCDFileHeader | null> => {\n if (\n Buffer.from(await buffer.slice(headerOffset, headerOffset + 4n)).compare(\n Buffer.from(signature)\n ) !== 0\n ) {\n return null;\n }\n\n let compressedSize = BigInt(await buffer.getUint32(headerOffset + CD_COMPRESSED_SIZE_OFFSET));\n\n let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + CD_UNCOMPRESSED_SIZE_OFFSET));\n\n const extraFieldLength = await buffer.getUint16(headerOffset + CD_EXTRA_FIELD_LENGTH_OFFSET);\n\n const fileNameLength = await buffer.getUint16(headerOffset + CD_FILE_NAME_LENGTH_OFFSET);\n\n const fileName = new TextDecoder().decode(\n await buffer.slice(\n headerOffset + CD_FILE_NAME_OFFSET,\n headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength)\n )\n );\n\n const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);\n\n const oldFormatOffset = await buffer.getUint32(headerOffset + CD_LOCAL_HEADER_OFFSET_OFFSET);\n\n let fileDataOffset = BigInt(oldFormatOffset);\n let offsetInZip64Data = 4n;\n // looking for info that might be also be in zip64 extra field\n if (uncompressedSize === BigInt(0xffffffff)) {\n uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);\n offsetInZip64Data += 8n;\n }\n if (compressedSize === BigInt(0xffffffff)) {\n compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);\n offsetInZip64Data += 8n;\n }\n if (fileDataOffset === BigInt(0xffffffff)) {\n fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data); // setting it to the one from zip64\n }\n const localHeaderOffset = fileDataOffset;\n\n return {\n compressedSize,\n uncompressedSize,\n extraFieldLength,\n fileNameLength,\n fileName,\n extraOffset,\n localHeaderOffset\n };\n};\n\n/**\n * Create iterator over files of zip archive\n * @param fileProvider - file provider that provider random access to the file\n */\nexport async function* zipCDFileHeaderGenerator(fileProvider: FileProvider) {\n const {cdStartOffset} = await parseEoCDRecord(fileProvider);\n let cdHeader = await parseZipCDFileHeader(cdStartOffset, fileProvider);\n while (cdHeader) {\n yield cdHeader;\n cdHeader = await parseZipCDFileHeader(\n cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength),\n fileProvider\n );\n }\n}\n"],"mappings":"AACA,SAAQA,eAAe,QAAO,4BAA4B;AAyB1D,MAAMC,yBAAyB,GAAG,GAAG;AACrC,MAAMC,2BAA2B,GAAG,GAAG;AACvC,MAAMC,0BAA0B,GAAG,GAAG;AACtC,MAAMC,4BAA4B,GAAG,GAAG;AACxC,MAAMC,6BAA6B,GAAG,GAAG;AACzC,MAAMC,mBAAmB,GAAG,GAAG;AAE/B,OAAO,MAAMC,SAAuB,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC;AAQ/D,OAAO,MAAMC,oBAAoB,GAAG,MAAAA,CAClCC,YAAoB,EACpBC,MAAoB,KACgB;EACpC,IACEC,MAAM,CAACC,IAAI,CAAC,MAAMF,MAAM,CAACG,KAAK,CAACJ,YAAY,EAAEA,YAAY,GAAG,EAAE,CAAC,CAAC,CAACK,OAAO,CACtEH,MAAM,CAACC,IAAI,CAACL,SAAS,CACvB,CAAC,KAAK,CAAC,EACP;IACA,OAAO,IAAI;EACb;EAEA,IAAIQ,cAAc,GAAGC,MAAM,CAAC,MAAMN,MAAM,CAACO,SAAS,CAACR,YAAY,GAAGR,yBAAyB,CAAC,CAAC;EAE7F,IAAIiB,gBAAgB,GAAGF,MAAM,CAAC,MAAMN,MAAM,CAACO,SAAS,CAACR,YAAY,GAAGP,2BAA2B,CAAC,CAAC;EAEjG,MAAMiB,gBAAgB,GAAG,MAAMT,MAAM,CAACU,SAAS,CAACX,YAAY,GAAGL,4BAA4B,CAAC;EAE5F,MAAMiB,cAAc,GAAG,MAAMX,MAAM,CAACU,SAAS,CAACX,YAAY,GAAGN,0BAA0B,CAAC;EAExF,MAAMmB,QAAQ,GAAG,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CACvC,MAAMd,MAAM,CAACG,KAAK,CAChBJ,YAAY,GAAGH,mBAAmB,EAClCG,YAAY,GAAGH,mBAAmB,GAAGU,MAAM,CAACK,cAAc,CAC5D,CACF,CAAC;EAED,MAAMI,WAAW,GAAGhB,YAAY,GAAGH,mBAAmB,GAAGU,MAAM,CAACK,cAAc,CAAC;EAE/E,MAAMK,eAAe,GAAG,MAAMhB,MAAM,CAACO,SAAS,CAACR,YAAY,GAAGJ,6BAA6B,CAAC;EAE5F,IAAIsB,cAAc,GAAGX,MAAM,CAACU,eAAe,CAAC;EAC5C,IAAIE,iBAAiB,GAAG,EAAE;EAE1B,IAAIV,gBAAgB,KAAKF,MAAM,CAAC,UAAU,CAAC,EAAE;IAC3CE,gBAAgB,GAAG,MAAMR,MAAM,CAACmB,YAAY,CAACJ,WAAW,GAAGG,iBAAiB,CAAC;IAC7EA,iBAAiB,IAAI,EAAE;EACzB;EACA,IAAIb,cAAc,KAAKC,MAAM,CAAC,UAAU,CAAC,EAAE;IACzCD,cAAc,GAAG,MAAML,MAAM,CAACmB,YAAY,CAACJ,WAAW,GAAGG,iBAAiB,CAAC;IAC3EA,iBAAiB,IAAI,EAAE;EACzB;EACA,IAAID,cAAc,KAAKX,MAAM,CAAC,UAAU,CAAC,EAAE;IACzCW,cAAc,GAAG,MAAMjB,MAAM,CAACmB,YAAY,CAACJ,WAAW,GAAGG,iBAAiB,CAAC;EAC7E;EACA,MAAME,iBAAiB,GAAGH,cAAc;EAExC,OAAO;IACLZ,cAAc;IACdG,gBAAgB;IAChBC,gBAAgB;IAChBE,cAAc;IACdC,QAAQ;IACRG,WAAW;IACXK;EACF,CAAC;AACH,CAAC;AAMD,OAAO,gBAAgBC,wBAAwBA,CAACC,YAA0B,EAAE;EAC1E,MAAM;IAACC;EAAa,CAAC,GAAG,MAAMjC,eAAe,CAACgC,YAAY,CAAC;EAC3D,IAAIE,QAAQ,GAAG,MAAM1B,oBAAoB,CAACyB,aAAa,EAAED,YAAY,CAAC;EACtE,OAAOE,QAAQ,EAAE;IACf,MAAMA,QAAQ;IACdA,QAAQ,GAAG,MAAM1B,oBAAoB,CACnC0B,QAAQ,CAACT,WAAW,GAAGT,MAAM,CAACkB,QAAQ,CAACf,gBAAgB,CAAC,EACxDa,YACF,CAAC;EACH;AACF"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { searchFromTheEnd } from './search-from-the-end';
|
|
2
|
+
const eoCDSignature = [0x50, 0x4b, 0x05, 0x06];
|
|
3
|
+
const zip64EoCDLocatorSignature = Buffer.from([0x50, 0x4b, 0x06, 0x07]);
|
|
4
|
+
const zip64EoCDSignature = Buffer.from([0x50, 0x4b, 0x06, 0x06]);
|
|
5
|
+
const CD_RECORDS_NUMBER_OFFSET = 8n;
|
|
6
|
+
const CD_START_OFFSET_OFFSET = 16n;
|
|
7
|
+
const ZIP64_EOCD_START_OFFSET_OFFSET = 8n;
|
|
8
|
+
const ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;
|
|
9
|
+
const ZIP64_CD_START_OFFSET_OFFSET = 48n;
|
|
10
|
+
export const parseEoCDRecord = async fileProvider => {
|
|
11
|
+
const zipEoCDOffset = await searchFromTheEnd(fileProvider, eoCDSignature);
|
|
12
|
+
let cdRecordsNumber = BigInt(await fileProvider.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));
|
|
13
|
+
let cdStartOffset = BigInt(await fileProvider.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));
|
|
14
|
+
if (cdStartOffset === BigInt(0xffffffff) || cdRecordsNumber === BigInt(0xffffffff)) {
|
|
15
|
+
const zip64EoCDLocatorOffset = zipEoCDOffset - 20n;
|
|
16
|
+
if (Buffer.from(await fileProvider.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n)).compare(zip64EoCDLocatorSignature) !== 0) {
|
|
17
|
+
throw new Error('zip64 EoCD locator not found');
|
|
18
|
+
}
|
|
19
|
+
const zip64EoCDOffset = await fileProvider.getBigUint64(zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);
|
|
20
|
+
if (Buffer.from(await fileProvider.slice(zip64EoCDOffset, zip64EoCDOffset + 4n)).compare(zip64EoCDSignature) !== 0) {
|
|
21
|
+
throw new Error('zip64 EoCD not found');
|
|
22
|
+
}
|
|
23
|
+
cdRecordsNumber = await fileProvider.getBigUint64(zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);
|
|
24
|
+
cdStartOffset = await fileProvider.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);
|
|
25
|
+
}
|
|
26
|
+
return {
|
|
27
|
+
cdRecordsNumber,
|
|
28
|
+
cdStartOffset
|
|
29
|
+
};
|
|
30
|
+
};
|
|
31
|
+
//# sourceMappingURL=end-of-central-directory.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"end-of-central-directory.js","names":["searchFromTheEnd","eoCDSignature","zip64EoCDLocatorSignature","Buffer","from","zip64EoCDSignature","CD_RECORDS_NUMBER_OFFSET","CD_START_OFFSET_OFFSET","ZIP64_EOCD_START_OFFSET_OFFSET","ZIP64_CD_RECORDS_NUMBER_OFFSET","ZIP64_CD_START_OFFSET_OFFSET","parseEoCDRecord","fileProvider","zipEoCDOffset","cdRecordsNumber","BigInt","getUint16","cdStartOffset","getUint32","zip64EoCDLocatorOffset","slice","compare","Error","zip64EoCDOffset","getBigUint64"],"sources":["../../../src/parse-zip/end-of-central-directory.ts"],"sourcesContent":["import {FileProvider} from '../file-provider/file-provider';\nimport {ZipSignature, searchFromTheEnd} from './search-from-the-end';\n\n/**\n * End of central directory info\n * according to https://en.wikipedia.org/wiki/ZIP_(file_format)\n */\nexport type ZipEoCDRecord = {\n /** Relative offset of local file header */\n cdStartOffset: bigint;\n /** Relative offset of local file header */\n cdRecordsNumber: bigint;\n};\n\nconst eoCDSignature: ZipSignature = [0x50, 0x4b, 0x05, 0x06];\nconst zip64EoCDLocatorSignature = Buffer.from([0x50, 0x4b, 0x06, 0x07]);\nconst zip64EoCDSignature = Buffer.from([0x50, 0x4b, 0x06, 0x06]);\n\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst CD_RECORDS_NUMBER_OFFSET = 8n;\nconst CD_START_OFFSET_OFFSET = 16n;\nconst ZIP64_EOCD_START_OFFSET_OFFSET = 8n;\nconst ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;\nconst ZIP64_CD_START_OFFSET_OFFSET = 48n;\n\n/**\n * Parses end of central directory record of zip file\n * @param fileProvider - FileProvider instance\n * @returns Info from the header\n */\nexport const parseEoCDRecord = async (fileProvider: FileProvider): Promise<ZipEoCDRecord> => {\n const zipEoCDOffset = await searchFromTheEnd(fileProvider, eoCDSignature);\n\n let cdRecordsNumber = BigInt(\n await fileProvider.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET)\n );\n let cdStartOffset = BigInt(await fileProvider.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));\n\n if (cdStartOffset === BigInt(0xffffffff) || cdRecordsNumber === BigInt(0xffffffff)) {\n const zip64EoCDLocatorOffset = zipEoCDOffset - 20n;\n\n if (\n Buffer.from(\n await fileProvider.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n)\n ).compare(zip64EoCDLocatorSignature) !== 0\n ) {\n throw new Error('zip64 EoCD locator not found');\n }\n const zip64EoCDOffset = await fileProvider.getBigUint64(\n zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET\n );\n\n if (\n Buffer.from(await fileProvider.slice(zip64EoCDOffset, zip64EoCDOffset + 4n)).compare(\n zip64EoCDSignature\n ) !== 0\n ) {\n throw new Error('zip64 EoCD not found');\n }\n\n cdRecordsNumber = await fileProvider.getBigUint64(\n zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET\n );\n cdStartOffset = await fileProvider.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);\n }\n\n return {\n cdRecordsNumber,\n cdStartOffset\n };\n};\n"],"mappings":"AACA,SAAsBA,gBAAgB,QAAO,uBAAuB;AAapE,MAAMC,aAA2B,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC;AAC5D,MAAMC,yBAAyB,GAAGC,MAAM,CAACC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AACvE,MAAMC,kBAAkB,GAAGF,MAAM,CAACC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AAGhE,MAAME,wBAAwB,GAAG,EAAE;AACnC,MAAMC,sBAAsB,GAAG,GAAG;AAClC,MAAMC,8BAA8B,GAAG,EAAE;AACzC,MAAMC,8BAA8B,GAAG,GAAG;AAC1C,MAAMC,4BAA4B,GAAG,GAAG;AAOxC,OAAO,MAAMC,eAAe,GAAG,MAAOC,YAA0B,IAA6B;EAC3F,MAAMC,aAAa,GAAG,MAAMb,gBAAgB,CAACY,YAAY,EAAEX,aAAa,CAAC;EAEzE,IAAIa,eAAe,GAAGC,MAAM,CAC1B,MAAMH,YAAY,CAACI,SAAS,CAACH,aAAa,GAAGP,wBAAwB,CACvE,CAAC;EACD,IAAIW,aAAa,GAAGF,MAAM,CAAC,MAAMH,YAAY,CAACM,SAAS,CAACL,aAAa,GAAGN,sBAAsB,CAAC,CAAC;EAEhG,IAAIU,aAAa,KAAKF,MAAM,CAAC,UAAU,CAAC,IAAID,eAAe,KAAKC,MAAM,CAAC,UAAU,CAAC,EAAE;IAClF,MAAMI,sBAAsB,GAAGN,aAAa,GAAG,GAAG;IAElD,IACEV,MAAM,CAACC,IAAI,CACT,MAAMQ,YAAY,CAACQ,KAAK,CAACD,sBAAsB,EAAEA,sBAAsB,GAAG,EAAE,CAC9E,CAAC,CAACE,OAAO,CAACnB,yBAAyB,CAAC,KAAK,CAAC,EAC1C;MACA,MAAM,IAAIoB,KAAK,CAAC,8BAA8B,CAAC;IACjD;IACA,MAAMC,eAAe,GAAG,MAAMX,YAAY,CAACY,YAAY,CACrDL,sBAAsB,GAAGX,8BAC3B,CAAC;IAED,IACEL,MAAM,CAACC,IAAI,CAAC,MAAMQ,YAAY,CAACQ,KAAK,CAACG,eAAe,EAAEA,eAAe,GAAG,EAAE,CAAC,CAAC,CAACF,OAAO,CAClFhB,kBACF,CAAC,KAAK,CAAC,EACP;MACA,MAAM,IAAIiB,KAAK,CAAC,sBAAsB,CAAC;IACzC;IAEAR,eAAe,GAAG,MAAMF,YAAY,CAACY,YAAY,CAC/CD,eAAe,GAAGd,8BACpB,CAAC;IACDQ,aAAa,GAAG,MAAML,YAAY,CAACY,YAAY,CAACD,eAAe,GAAGb,4BAA4B,CAAC;EACjG;EAEA,OAAO;IACLI,eAAe;IACfG;EACF,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
const COMPRESSION_METHOD_OFFSET = 8n;
|
|
2
|
+
const COMPRESSED_SIZE_OFFSET = 18n;
|
|
3
|
+
const UNCOMPRESSED_SIZE_OFFSET = 22n;
|
|
4
|
+
const FILE_NAME_LENGTH_OFFSET = 26n;
|
|
5
|
+
const EXTRA_FIELD_LENGTH_OFFSET = 28n;
|
|
6
|
+
const FILE_NAME_OFFSET = 30n;
|
|
7
|
+
export const signature = Buffer.from([0x50, 0x4b, 0x03, 0x04]);
|
|
8
|
+
export const parseZipLocalFileHeader = async (headerOffset, buffer) => {
|
|
9
|
+
if (Buffer.from(await buffer.slice(headerOffset, headerOffset + 4n)).compare(signature) !== 0) {
|
|
10
|
+
return null;
|
|
11
|
+
}
|
|
12
|
+
const fileNameLength = await buffer.getUint16(headerOffset + FILE_NAME_LENGTH_OFFSET);
|
|
13
|
+
const fileName = new TextDecoder().decode(await buffer.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength))).split('\\').join('/');
|
|
14
|
+
const extraFieldLength = await buffer.getUint16(headerOffset + EXTRA_FIELD_LENGTH_OFFSET);
|
|
15
|
+
let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);
|
|
16
|
+
const compressionMethod = await buffer.getUint16(headerOffset + COMPRESSION_METHOD_OFFSET);
|
|
17
|
+
let compressedSize = BigInt(await buffer.getUint32(headerOffset + COMPRESSED_SIZE_OFFSET));
|
|
18
|
+
let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + UNCOMPRESSED_SIZE_OFFSET));
|
|
19
|
+
const extraOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength);
|
|
20
|
+
let offsetInZip64Data = 4n;
|
|
21
|
+
if (uncompressedSize === BigInt(0xffffffff)) {
|
|
22
|
+
uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
23
|
+
offsetInZip64Data += 8n;
|
|
24
|
+
}
|
|
25
|
+
if (compressedSize === BigInt(0xffffffff)) {
|
|
26
|
+
compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
27
|
+
offsetInZip64Data += 8n;
|
|
28
|
+
}
|
|
29
|
+
if (fileDataOffset === BigInt(0xffffffff)) {
|
|
30
|
+
fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
31
|
+
}
|
|
32
|
+
return {
|
|
33
|
+
fileNameLength,
|
|
34
|
+
fileName,
|
|
35
|
+
extraFieldLength,
|
|
36
|
+
fileDataOffset,
|
|
37
|
+
compressedSize,
|
|
38
|
+
compressionMethod
|
|
39
|
+
};
|
|
40
|
+
};
|
|
41
|
+
//# sourceMappingURL=local-file-header.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"local-file-header.js","names":["COMPRESSION_METHOD_OFFSET","COMPRESSED_SIZE_OFFSET","UNCOMPRESSED_SIZE_OFFSET","FILE_NAME_LENGTH_OFFSET","EXTRA_FIELD_LENGTH_OFFSET","FILE_NAME_OFFSET","signature","Buffer","from","parseZipLocalFileHeader","headerOffset","buffer","slice","compare","fileNameLength","getUint16","fileName","TextDecoder","decode","BigInt","split","join","extraFieldLength","fileDataOffset","compressionMethod","compressedSize","getUint32","uncompressedSize","extraOffset","offsetInZip64Data","getBigUint64"],"sources":["../../../src/parse-zip/local-file-header.ts"],"sourcesContent":["import {FileProvider} from '../file-provider/file-provider';\n\n/**\n * zip local file header info\n * according to https://en.wikipedia.org/wiki/ZIP_(file_format)\n */\nexport type ZipLocalFileHeader = {\n /** File name length */\n fileNameLength: number;\n /** File name */\n fileName: string;\n /** Extra field length */\n extraFieldLength: number;\n /** Offset of the file data */\n fileDataOffset: bigint;\n /** Compressed size */\n compressedSize: bigint;\n /** Compression method */\n compressionMethod: number;\n};\n\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst COMPRESSION_METHOD_OFFSET = 8n;\nconst COMPRESSED_SIZE_OFFSET = 18n;\nconst UNCOMPRESSED_SIZE_OFFSET = 22n;\nconst FILE_NAME_LENGTH_OFFSET = 26n;\nconst EXTRA_FIELD_LENGTH_OFFSET = 28n;\nconst FILE_NAME_OFFSET = 30n;\n\nexport const signature = Buffer.from([0x50, 0x4b, 0x03, 0x04]);\n\n/**\n * Parses local file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipLocalFileHeader = async (\n headerOffset: bigint,\n buffer: FileProvider\n): Promise<ZipLocalFileHeader | null> => {\n if (Buffer.from(await buffer.slice(headerOffset, headerOffset + 4n)).compare(signature) !== 0) {\n return null;\n }\n\n const fileNameLength = await buffer.getUint16(headerOffset + FILE_NAME_LENGTH_OFFSET);\n\n const fileName = new TextDecoder()\n .decode(\n await buffer.slice(\n headerOffset + FILE_NAME_OFFSET,\n headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength)\n )\n )\n .split('\\\\')\n .join('/');\n const extraFieldLength = await buffer.getUint16(headerOffset + EXTRA_FIELD_LENGTH_OFFSET);\n\n let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);\n\n const compressionMethod = await buffer.getUint16(headerOffset + COMPRESSION_METHOD_OFFSET);\n\n let compressedSize = BigInt(await buffer.getUint32(headerOffset + COMPRESSED_SIZE_OFFSET)); // add zip 64 logic\n\n let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + UNCOMPRESSED_SIZE_OFFSET)); // add zip 64 logic\n\n const extraOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength);\n\n let offsetInZip64Data = 4n;\n // looking for info that might be also be in zip64 extra field\n if (uncompressedSize === BigInt(0xffffffff)) {\n uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);\n offsetInZip64Data += 8n;\n }\n if (compressedSize === BigInt(0xffffffff)) {\n compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);\n offsetInZip64Data += 8n;\n }\n if (fileDataOffset === BigInt(0xffffffff)) {\n fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data); // setting it to the one from zip64\n }\n\n return {\n fileNameLength,\n fileName,\n extraFieldLength,\n fileDataOffset,\n compressedSize,\n compressionMethod\n };\n};\n"],"mappings":"AAsBA,MAAMA,yBAAyB,GAAG,EAAE;AACpC,MAAMC,sBAAsB,GAAG,GAAG;AAClC,MAAMC,wBAAwB,GAAG,GAAG;AACpC,MAAMC,uBAAuB,GAAG,GAAG;AACnC,MAAMC,yBAAyB,GAAG,GAAG;AACrC,MAAMC,gBAAgB,GAAG,GAAG;AAE5B,OAAO,MAAMC,SAAS,GAAGC,MAAM,CAACC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AAQ9D,OAAO,MAAMC,uBAAuB,GAAG,MAAAA,CACrCC,YAAoB,EACpBC,MAAoB,KACmB;EACvC,IAAIJ,MAAM,CAACC,IAAI,CAAC,MAAMG,MAAM,CAACC,KAAK,CAACF,YAAY,EAAEA,YAAY,GAAG,EAAE,CAAC,CAAC,CAACG,OAAO,CAACP,SAAS,CAAC,KAAK,CAAC,EAAE;IAC7F,OAAO,IAAI;EACb;EAEA,MAAMQ,cAAc,GAAG,MAAMH,MAAM,CAACI,SAAS,CAACL,YAAY,GAAGP,uBAAuB,CAAC;EAErF,MAAMa,QAAQ,GAAG,IAAIC,WAAW,CAAC,CAAC,CAC/BC,MAAM,CACL,MAAMP,MAAM,CAACC,KAAK,CAChBF,YAAY,GAAGL,gBAAgB,EAC/BK,YAAY,GAAGL,gBAAgB,GAAGc,MAAM,CAACL,cAAc,CACzD,CACF,CAAC,CACAM,KAAK,CAAC,IAAI,CAAC,CACXC,IAAI,CAAC,GAAG,CAAC;EACZ,MAAMC,gBAAgB,GAAG,MAAMX,MAAM,CAACI,SAAS,CAACL,YAAY,GAAGN,yBAAyB,CAAC;EAEzF,IAAImB,cAAc,GAAGb,YAAY,GAAGL,gBAAgB,GAAGc,MAAM,CAACL,cAAc,GAAGQ,gBAAgB,CAAC;EAEhG,MAAME,iBAAiB,GAAG,MAAMb,MAAM,CAACI,SAAS,CAACL,YAAY,GAAGV,yBAAyB,CAAC;EAE1F,IAAIyB,cAAc,GAAGN,MAAM,CAAC,MAAMR,MAAM,CAACe,SAAS,CAAChB,YAAY,GAAGT,sBAAsB,CAAC,CAAC;EAE1F,IAAI0B,gBAAgB,GAAGR,MAAM,CAAC,MAAMR,MAAM,CAACe,SAAS,CAAChB,YAAY,GAAGR,wBAAwB,CAAC,CAAC;EAE9F,MAAM0B,WAAW,GAAGlB,YAAY,GAAGL,gBAAgB,GAAGc,MAAM,CAACL,cAAc,CAAC;EAE5E,IAAIe,iBAAiB,GAAG,EAAE;EAE1B,IAAIF,gBAAgB,KAAKR,MAAM,CAAC,UAAU,CAAC,EAAE;IAC3CQ,gBAAgB,GAAG,MAAMhB,MAAM,CAACmB,YAAY,CAACF,WAAW,GAAGC,iBAAiB,CAAC;IAC7EA,iBAAiB,IAAI,EAAE;EACzB;EACA,IAAIJ,cAAc,KAAKN,MAAM,CAAC,UAAU,CAAC,EAAE;IACzCM,cAAc,GAAG,MAAMd,MAAM,CAACmB,YAAY,CAACF,WAAW,GAAGC,iBAAiB,CAAC;IAC3EA,iBAAiB,IAAI,EAAE;EACzB;EACA,IAAIN,cAAc,KAAKJ,MAAM,CAAC,UAAU,CAAC,EAAE;IACzCI,cAAc,GAAG,MAAMZ,MAAM,CAACmB,YAAY,CAACF,WAAW,GAAGC,iBAAiB,CAAC;EAC7E;EAEA,OAAO;IACLf,cAAc;IACdE,QAAQ;IACRM,gBAAgB;IAChBC,cAAc;IACdE,cAAc;IACdD;EACF,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export const searchFromTheEnd = async (file, target) => {
|
|
2
|
+
const searchWindow = [await file.getUint8(file.length - 1n), await file.getUint8(file.length - 2n), await file.getUint8(file.length - 3n), undefined];
|
|
3
|
+
let targetOffset = 0n;
|
|
4
|
+
for (let i = file.length - 4n; i > -1; i--) {
|
|
5
|
+
searchWindow[3] = searchWindow[2];
|
|
6
|
+
searchWindow[2] = searchWindow[1];
|
|
7
|
+
searchWindow[1] = searchWindow[0];
|
|
8
|
+
searchWindow[0] = await file.getUint8(i);
|
|
9
|
+
if (searchWindow.every((val, index) => val === target[index])) {
|
|
10
|
+
targetOffset = i;
|
|
11
|
+
break;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
return targetOffset;
|
|
15
|
+
};
|
|
16
|
+
//# sourceMappingURL=search-from-the-end.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"search-from-the-end.js","names":["searchFromTheEnd","file","target","searchWindow","getUint8","length","undefined","targetOffset","i","every","val","index"],"sources":["../../../src/parse-zip/search-from-the-end.ts"],"sourcesContent":["import {FileProvider} from '../file-provider/file-provider';\n\n/** Description of zip signature type */\nexport type ZipSignature = [number, number, number, number];\n\n/**\n * looking for the last occurrence of the provided\n * @param file\n * @param target\n * @returns\n */\nexport const searchFromTheEnd = async (\n file: FileProvider,\n target: ZipSignature\n): Promise<bigint> => {\n const searchWindow = [\n await file.getUint8(file.length - 1n),\n await file.getUint8(file.length - 2n),\n await file.getUint8(file.length - 3n),\n undefined\n ];\n\n let targetOffset = 0n;\n\n // looking for the last record in the central directory\n for (let i = file.length - 4n; i > -1; i--) {\n searchWindow[3] = searchWindow[2];\n searchWindow[2] = searchWindow[1];\n searchWindow[1] = searchWindow[0];\n searchWindow[0] = await file.getUint8(i);\n if (searchWindow.every((val, index) => val === target[index])) {\n targetOffset = i;\n break;\n }\n }\n\n return targetOffset;\n};\n"],"mappings":"AAWA,OAAO,MAAMA,gBAAgB,GAAG,MAAAA,CAC9BC,IAAkB,EAClBC,MAAoB,KACA;EACpB,MAAMC,YAAY,GAAG,CACnB,MAAMF,IAAI,CAACG,QAAQ,CAACH,IAAI,CAACI,MAAM,GAAG,EAAE,CAAC,EACrC,MAAMJ,IAAI,CAACG,QAAQ,CAACH,IAAI,CAACI,MAAM,GAAG,EAAE,CAAC,EACrC,MAAMJ,IAAI,CAACG,QAAQ,CAACH,IAAI,CAACI,MAAM,GAAG,EAAE,CAAC,EACrCC,SAAS,CACV;EAED,IAAIC,YAAY,GAAG,EAAE;EAGrB,KAAK,IAAIC,CAAC,GAAGP,IAAI,CAACI,MAAM,GAAG,EAAE,EAAEG,CAAC,GAAG,CAAC,CAAC,EAAEA,CAAC,EAAE,EAAE;IAC1CL,YAAY,CAAC,CAAC,CAAC,GAAGA,YAAY,CAAC,CAAC,CAAC;IACjCA,YAAY,CAAC,CAAC,CAAC,GAAGA,YAAY,CAAC,CAAC,CAAC;IACjCA,YAAY,CAAC,CAAC,CAAC,GAAGA,YAAY,CAAC,CAAC,CAAC;IACjCA,YAAY,CAAC,CAAC,CAAC,GAAG,MAAMF,IAAI,CAACG,QAAQ,CAACI,CAAC,CAAC;IACxC,IAAIL,YAAY,CAACM,KAAK,CAAC,CAACC,GAAG,EAAEC,KAAK,KAAKD,GAAG,KAAKR,MAAM,CAACS,KAAK,CAAC,CAAC,EAAE;MAC7DJ,YAAY,GAAGC,CAAC;MAChB;IACF;EACF;EAEA,OAAOD,YAAY;AACrB,CAAC"}
|
package/dist/esm/zip-loader.js
CHANGED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { FileProvider } from './file-provider';
|
|
2
|
+
/** Provides file data using DataView */
|
|
3
|
+
export declare class DataViewFile implements FileProvider {
|
|
4
|
+
/** The DataView from which data is provided */
|
|
5
|
+
private file;
|
|
6
|
+
constructor(file: DataView);
|
|
7
|
+
destroy(): Promise<void>;
|
|
8
|
+
/**
|
|
9
|
+
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
|
|
10
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
11
|
+
*/
|
|
12
|
+
getUint8(offset: bigint): Promise<number>;
|
|
13
|
+
/**
|
|
14
|
+
* Gets an unsigned 16-bit intege at the specified byte offset from the start of the file.
|
|
15
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
16
|
+
*/
|
|
17
|
+
getUint16(offset: bigint): Promise<number>;
|
|
18
|
+
/**
|
|
19
|
+
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
20
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
21
|
+
*/
|
|
22
|
+
getUint32(offset: bigint): Promise<number>;
|
|
23
|
+
/**
|
|
24
|
+
* Gets an unsigned 64-bit integer at the specified byte offset from the start of the file.
|
|
25
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
26
|
+
*/
|
|
27
|
+
getBigUint64(offset: bigint): Promise<bigint>;
|
|
28
|
+
/**
|
|
29
|
+
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
|
|
30
|
+
* @param startOffset The offset, in bytes, from the start of the file where to start reading the data.
|
|
31
|
+
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
|
|
32
|
+
*/
|
|
33
|
+
slice(startOffset: bigint, endOffset: bigint): Promise<ArrayBuffer>;
|
|
34
|
+
/** the length (in bytes) of the data. */
|
|
35
|
+
get length(): bigint;
|
|
36
|
+
}
|
|
37
|
+
//# sourceMappingURL=data-view-file.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data-view-file.d.ts","sourceRoot":"","sources":["../../src/file-provider/data-view-file.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,YAAY,EAAC,MAAM,iBAAiB,CAAC;AAc7C,wCAAwC;AACxC,qBAAa,YAAa,YAAW,YAAY;IAC/C,+CAA+C;IAC/C,OAAO,CAAC,IAAI,CAAW;gBAEX,IAAI,EAAE,QAAQ;IAKpB,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAE9B;;;OAGG;IACG,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAI/C;;;OAGG;IACG,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAIhD;;;OAGG;IACG,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAIhD;;;OAGG;IACG,YAAY,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAInD;;;;OAIG;IACG,KAAK,CAAC,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,yCAAyC;IACzC,IAAI,MAAM,WAET;CACF"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DataViewFile = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Checks if bigint can be converted to number and convert it if possible
|
|
6
|
+
* @param bigint bigint to be converted
|
|
7
|
+
* @returns number
|
|
8
|
+
*/
|
|
9
|
+
const toNumber = (bigint) => {
|
|
10
|
+
if (bigint > Number.MAX_SAFE_INTEGER) {
|
|
11
|
+
throw new Error('Offset is out of bounds');
|
|
12
|
+
}
|
|
13
|
+
return Number(bigint);
|
|
14
|
+
};
|
|
15
|
+
/** Provides file data using DataView */
|
|
16
|
+
class DataViewFile {
|
|
17
|
+
constructor(file) {
|
|
18
|
+
this.file = file;
|
|
19
|
+
}
|
|
20
|
+
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
|
21
|
+
async destroy() { }
|
|
22
|
+
/**
|
|
23
|
+
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
|
|
24
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
25
|
+
*/
|
|
26
|
+
async getUint8(offset) {
|
|
27
|
+
return this.file.getUint8(toNumber(offset));
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Gets an unsigned 16-bit intege at the specified byte offset from the start of the file.
|
|
31
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
32
|
+
*/
|
|
33
|
+
async getUint16(offset) {
|
|
34
|
+
return this.file.getUint16(toNumber(offset), true);
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
38
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
39
|
+
*/
|
|
40
|
+
async getUint32(offset) {
|
|
41
|
+
return this.file.getUint32(toNumber(offset), true);
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Gets an unsigned 64-bit integer at the specified byte offset from the start of the file.
|
|
45
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
46
|
+
*/
|
|
47
|
+
async getBigUint64(offset) {
|
|
48
|
+
return this.file.getBigUint64(toNumber(offset), true);
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
|
|
52
|
+
* @param startOffset The offset, in bytes, from the start of the file where to start reading the data.
|
|
53
|
+
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
|
|
54
|
+
*/
|
|
55
|
+
async slice(startOffset, endOffset) {
|
|
56
|
+
return this.file.buffer.slice(toNumber(startOffset), toNumber(endOffset));
|
|
57
|
+
}
|
|
58
|
+
/** the length (in bytes) of the data. */
|
|
59
|
+
get length() {
|
|
60
|
+
return BigInt(this.file.byteLength);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
exports.DataViewFile = DataViewFile;
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { FileProvider } from '@loaders.gl/zip';
|
|
2
|
+
/**
|
|
3
|
+
* Provides file data using node fs library
|
|
4
|
+
*/
|
|
5
|
+
export declare class FileHandleFile implements FileProvider {
|
|
6
|
+
/**
|
|
7
|
+
* Returns a new copy of FileHandleFile
|
|
8
|
+
* @param path The path to the file in file system
|
|
9
|
+
*/
|
|
10
|
+
static from(path: string): Promise<FileHandleFile>;
|
|
11
|
+
/**
|
|
12
|
+
* The FileHandle from which data is provided
|
|
13
|
+
*/
|
|
14
|
+
private fileDescriptor;
|
|
15
|
+
/**
|
|
16
|
+
* The file length in bytes
|
|
17
|
+
*/
|
|
18
|
+
private size;
|
|
19
|
+
private constructor();
|
|
20
|
+
/** Close file */
|
|
21
|
+
destroy(): Promise<void>;
|
|
22
|
+
/**
|
|
23
|
+
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
|
|
24
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
25
|
+
*/
|
|
26
|
+
getUint8(offset: bigint): Promise<number>;
|
|
27
|
+
/**
|
|
28
|
+
* Gets an unsigned 16-bit integer at the specified byte offset from the start of the file.
|
|
29
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
30
|
+
*/
|
|
31
|
+
getUint16(offset: bigint): Promise<number>;
|
|
32
|
+
/**
|
|
33
|
+
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
34
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
35
|
+
*/
|
|
36
|
+
getUint32(offset: bigint): Promise<number>;
|
|
37
|
+
/**
|
|
38
|
+
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
39
|
+
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
40
|
+
*/
|
|
41
|
+
getBigUint64(offset: bigint): Promise<bigint>;
|
|
42
|
+
/**
|
|
43
|
+
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
|
|
44
|
+
* @param startOffsset The offset, in byte, from the start of the file where to start reading the data.
|
|
45
|
+
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
|
|
46
|
+
*/
|
|
47
|
+
slice(startOffsset: bigint, endOffset: bigint): Promise<ArrayBuffer>;
|
|
48
|
+
/**
|
|
49
|
+
* the length (in bytes) of the data.
|
|
50
|
+
*/
|
|
51
|
+
get length(): bigint;
|
|
52
|
+
}
|
|
53
|
+
//# sourceMappingURL=file-handle-file.d.ts.map
|