@js-ak/excel-toolbox 1.2.5 โ 1.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/build/cjs/lib/merge-sheets-to-base-file-process.js +5 -5
- package/build/cjs/lib/utils/remove-sheet-by-name.js +9 -7
- package/build/cjs/lib/xml/build-merged-sheet.js +2 -2
- package/build/cjs/lib/xml/extract-rows-from-sheet.js +1 -0
- package/build/cjs/lib/zip/create-sync.js +32 -32
- package/build/cjs/lib/zip/create.js +32 -32
- package/build/cjs/lib/zip/read-sync.js +72 -24
- package/build/cjs/lib/zip/read.js +68 -23
- package/build/cjs/lib/zip/utils/crc-32.js +76 -0
- package/build/cjs/lib/zip/utils/dos-time.js +50 -0
- package/build/cjs/lib/zip/utils/find-data-descriptor.js +29 -0
- package/build/cjs/lib/zip/utils/index.js +20 -0
- package/build/cjs/lib/zip/utils/to-bytes.js +37 -0
- package/build/esm/lib/merge-sheets-to-base-file-process.js +5 -5
- package/build/esm/lib/utils/remove-sheet-by-name.js +9 -7
- package/build/esm/lib/xml/build-merged-sheet.js +2 -2
- package/build/esm/lib/xml/extract-rows-from-sheet.js +1 -0
- package/build/esm/lib/zip/create-sync.js +1 -1
- package/build/esm/lib/zip/create.js +1 -1
- package/build/esm/lib/zip/read-sync.js +36 -24
- package/build/esm/lib/zip/read.js +35 -23
- package/build/esm/lib/zip/utils/crc-32.js +73 -0
- package/build/esm/lib/zip/utils/dos-time.js +47 -0
- package/build/esm/lib/zip/utils/find-data-descriptor.js +26 -0
- package/build/esm/lib/zip/utils/index.js +4 -0
- package/build/esm/lib/zip/utils/to-bytes.js +34 -0
- package/build/types/lib/merge-sheets-to-base-file-process.d.ts +2 -2
- package/build/types/lib/utils/remove-sheet-by-name.d.ts +1 -1
- package/build/types/lib/xml/build-merged-sheet.d.ts +2 -2
- package/build/types/lib/xml/extract-rows-from-sheet.d.ts +1 -0
- package/build/types/lib/zip/read-sync.d.ts +2 -2
- package/build/types/lib/zip/read.d.ts +2 -2
- package/build/types/lib/zip/utils/crc-32.d.ts +15 -0
- package/build/types/lib/zip/utils/dos-time.d.ts +25 -0
- package/build/types/lib/zip/utils/find-data-descriptor.d.ts +15 -0
- package/build/types/lib/zip/utils/index.d.ts +4 -0
- package/build/types/lib/zip/utils/to-bytes.d.ts +20 -0
- package/package.json +1 -1
- package/build/cjs/lib/zip/utils.js +0 -157
- package/build/esm/lib/zip/utils.js +0 -152
- package/build/types/lib/zip/utils.d.ts +0 -58
package/README.md
CHANGED
@@ -39,11 +39,11 @@ fs.writeFileSync("output.xlsx", resultBuffer);
|
|
39
39
|
- ๐งฉ **Merge sheets** from multiple Excel files
|
40
40
|
- ๐งผ **Clean sheet removal** โ by name or index
|
41
41
|
- ๐ **Keeps styles and merged cells**
|
42
|
-
-
|
42
|
+
- ๐ **Lightweight ZIP and XML handling**
|
43
43
|
|
44
44
|
## API
|
45
45
|
|
46
|
-
### `mergeSheetsToBaseFileSync(options)`
|
46
|
+
### `mergeSheetsToBaseFileSync(options): Buffer`
|
47
47
|
|
48
48
|
#### Parameters
|
49
49
|
|
@@ -58,7 +58,7 @@ function mergeSheetsToBaseFileProcess(data) {
|
|
58
58
|
if (!baseFiles[basePath]) {
|
59
59
|
throw new Error(`Base file does not contain ${basePath}`);
|
60
60
|
}
|
61
|
-
const { lastRowNumber, mergeCells: baseMergeCells, rows: baseRows, } = Xml.extractRowsFromSheet(baseFiles[basePath]);
|
61
|
+
const { lastRowNumber, mergeCells: baseMergeCells, rows: baseRows, xml, } = Xml.extractRowsFromSheet(baseFiles[basePath]);
|
62
62
|
const allRows = [...baseRows];
|
63
63
|
const allMergeCells = [...baseMergeCells];
|
64
64
|
let currentRowOffset = lastRowNumber + gap;
|
@@ -84,19 +84,19 @@ function mergeSheetsToBaseFileProcess(data) {
|
|
84
84
|
currentRowOffset += Utils.getMaxRowNumber(rows) + gap;
|
85
85
|
}
|
86
86
|
}
|
87
|
-
const mergedXml = Xml.buildMergedSheet(
|
87
|
+
const mergedXml = Xml.buildMergedSheet(xml, allRows, allMergeCells);
|
88
88
|
baseFiles[basePath] = mergedXml;
|
89
89
|
for (const sheetIndex of sheetsToRemove) {
|
90
90
|
const sheetPath = `xl/worksheets/sheet${sheetIndex}.xml`;
|
91
91
|
delete baseFiles[sheetPath];
|
92
92
|
if (baseFiles["xl/workbook.xml"]) {
|
93
|
-
baseFiles["xl/workbook.xml"] = Utils.removeSheetFromWorkbook(baseFiles["xl/workbook.xml"], sheetIndex);
|
93
|
+
baseFiles["xl/workbook.xml"] = Buffer.from(Utils.removeSheetFromWorkbook(baseFiles["xl/workbook.xml"].toString(), sheetIndex));
|
94
94
|
}
|
95
95
|
if (baseFiles["xl/_rels/workbook.xml.rels"]) {
|
96
|
-
baseFiles["xl/_rels/workbook.xml.rels"] = Utils.removeSheetFromRels(baseFiles["xl/_rels/workbook.xml.rels"], sheetIndex);
|
96
|
+
baseFiles["xl/_rels/workbook.xml.rels"] = Buffer.from(Utils.removeSheetFromRels(baseFiles["xl/_rels/workbook.xml.rels"].toString(), sheetIndex));
|
97
97
|
}
|
98
98
|
if (baseFiles["[Content_Types].xml"]) {
|
99
|
-
baseFiles["[Content_Types].xml"] = Utils.removeSheetFromContentTypes(baseFiles["[Content_Types].xml"], sheetIndex);
|
99
|
+
baseFiles["[Content_Types].xml"] = Buffer.from(Utils.removeSheetFromContentTypes(baseFiles["[Content_Types].xml"].toString(), sheetIndex));
|
100
100
|
}
|
101
101
|
}
|
102
102
|
for (const sheetName of sheetNamesToRemove) {
|
@@ -8,8 +8,8 @@ exports.removeSheetByName = removeSheetByName;
|
|
8
8
|
* @returns {void}
|
9
9
|
*/
|
10
10
|
function removeSheetByName(files, sheetName) {
|
11
|
-
const workbookXml = files["xl/workbook.xml"];
|
12
|
-
const relsXml = files["xl/_rels/workbook.xml.rels"];
|
11
|
+
const workbookXml = files["xl/workbook.xml"]?.toString();
|
12
|
+
const relsXml = files["xl/_rels/workbook.xml.rels"]?.toString();
|
13
13
|
if (!workbookXml || !relsXml) {
|
14
14
|
return;
|
15
15
|
}
|
@@ -34,11 +34,13 @@ function removeSheetByName(files, sheetName) {
|
|
34
34
|
return;
|
35
35
|
}
|
36
36
|
const targetPath = `xl/${targetMatch[1]}`.replace(/\\/g, "/");
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
37
|
+
if (targetPath) {
|
38
|
+
delete files[targetPath];
|
39
|
+
}
|
40
|
+
files["xl/workbook.xml"] = Buffer.from(workbookXml.replace(sheetTag, ""));
|
41
|
+
files["xl/_rels/workbook.xml.rels"] = Buffer.from(relsXml.replace(relTag, ""));
|
42
|
+
const contentTypes = files["[Content_Types].xml"]?.toString();
|
41
43
|
if (contentTypes) {
|
42
|
-
files["[Content_Types].xml"] = contentTypes.replace(new RegExp(`<Override[^>]+PartName=["']/${targetPath}["'][^>]*/>`, "g"), "");
|
44
|
+
files["[Content_Types].xml"] = Buffer.from(contentTypes.replace(new RegExp(`<Override[^>]+PartName=["']/${targetPath}["'][^>]*/>`, "g"), ""));
|
43
45
|
}
|
44
46
|
}
|
@@ -12,7 +12,7 @@ exports.buildMergedSheet = buildMergedSheet;
|
|
12
12
|
* @param {string[]} mergedRows - Array of XML strings representing each row in the merged sheet.
|
13
13
|
* @param {Object[]} [mergeCells] - Optional array of merge cell definitions.
|
14
14
|
* Each object should have a 'ref' property specifying the merge range (e.g., "A1:B2").
|
15
|
-
* @returns {
|
15
|
+
* @returns {Buffer} - The reconstructed XML string with merged content.
|
16
16
|
*/
|
17
17
|
function buildMergedSheet(originalXml, mergedRows, mergeCells = []) {
|
18
18
|
// Remove any existing <mergeCells> section from the XML
|
@@ -27,5 +27,5 @@ function buildMergedSheet(originalXml, mergedRows, mergeCells = []) {
|
|
27
27
|
// Insert <mergeCells> after </sheetData> and before the next XML tag
|
28
28
|
xmlData = xmlData.replace(/(<\/sheetData>)(\s*<)/, `$1\n${mergeCellsXml}\n$2`);
|
29
29
|
}
|
30
|
-
return xmlData;
|
30
|
+
return Buffer.from(xmlData);
|
31
31
|
}
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.createSync = createSync;
|
4
4
|
const node_buffer_1 = require("node:buffer");
|
5
5
|
const node_zlib_1 = require("node:zlib");
|
6
|
-
const
|
6
|
+
const index_js_1 = require("./utils/index.js");
|
7
7
|
const constants_js_1 = require("./constants.js");
|
8
8
|
/**
|
9
9
|
* Creates a ZIP archive from a collection of files.
|
@@ -21,23 +21,23 @@ function createSync(files) {
|
|
21
21
|
}
|
22
22
|
const content = node_buffer_1.Buffer.isBuffer(rawContent) ? rawContent : node_buffer_1.Buffer.from(rawContent);
|
23
23
|
const fileNameBuf = node_buffer_1.Buffer.from(filename, "utf8");
|
24
|
-
const modTime = (0,
|
25
|
-
const crc = (0,
|
24
|
+
const modTime = (0, index_js_1.dosTime)(new Date());
|
25
|
+
const crc = (0, index_js_1.crc32)(content);
|
26
26
|
const compressed = (0, node_zlib_1.deflateRawSync)(content);
|
27
27
|
const compSize = compressed.length;
|
28
28
|
const uncompSize = content.length;
|
29
29
|
// Local file header
|
30
30
|
const localHeader = node_buffer_1.Buffer.concat([
|
31
31
|
constants_js_1.LOCAL_FILE_HEADER_SIG,
|
32
|
-
(0,
|
33
|
-
(0,
|
34
|
-
(0,
|
32
|
+
(0, index_js_1.toBytes)(20, 2),
|
33
|
+
(0, index_js_1.toBytes)(0, 2),
|
34
|
+
(0, index_js_1.toBytes)(8, 2),
|
35
35
|
modTime,
|
36
|
-
(0,
|
37
|
-
(0,
|
38
|
-
(0,
|
39
|
-
(0,
|
40
|
-
(0,
|
36
|
+
(0, index_js_1.toBytes)(crc, 4),
|
37
|
+
(0, index_js_1.toBytes)(compSize, 4),
|
38
|
+
(0, index_js_1.toBytes)(uncompSize, 4),
|
39
|
+
(0, index_js_1.toBytes)(fileNameBuf.length, 2),
|
40
|
+
(0, index_js_1.toBytes)(0, 2),
|
41
41
|
]);
|
42
42
|
const localEntry = node_buffer_1.Buffer.concat([
|
43
43
|
localHeader,
|
@@ -47,21 +47,21 @@ function createSync(files) {
|
|
47
47
|
fileEntries.push(localEntry);
|
48
48
|
const centralEntry = node_buffer_1.Buffer.concat([
|
49
49
|
node_buffer_1.Buffer.from(constants_js_1.CENTRAL_DIR_HEADER_SIG),
|
50
|
-
node_buffer_1.Buffer.from((0,
|
51
|
-
node_buffer_1.Buffer.from((0,
|
52
|
-
node_buffer_1.Buffer.from((0,
|
53
|
-
node_buffer_1.Buffer.from((0,
|
50
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(20, 2)), // Version made by
|
51
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(20, 2)), // Version needed
|
52
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Flags
|
53
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(8, 2)), // Compression
|
54
54
|
node_buffer_1.Buffer.from(modTime),
|
55
|
-
node_buffer_1.Buffer.from((0,
|
56
|
-
node_buffer_1.Buffer.from((0,
|
57
|
-
node_buffer_1.Buffer.from((0,
|
58
|
-
node_buffer_1.Buffer.from((0,
|
59
|
-
node_buffer_1.Buffer.from((0,
|
60
|
-
node_buffer_1.Buffer.from((0,
|
61
|
-
node_buffer_1.Buffer.from((0,
|
62
|
-
node_buffer_1.Buffer.from((0,
|
63
|
-
node_buffer_1.Buffer.from((0,
|
64
|
-
node_buffer_1.Buffer.from((0,
|
55
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(crc, 4)),
|
56
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(compSize, 4)),
|
57
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(uncompSize, 4)),
|
58
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(fileNameBuf.length, 2)),
|
59
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Extra field length
|
60
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Comment length
|
61
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Disk start
|
62
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Internal attrs
|
63
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 4)), // External attrs
|
64
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(offset, 4)),
|
65
65
|
fileNameBuf,
|
66
66
|
]);
|
67
67
|
centralDirectory.push(centralEntry);
|
@@ -71,13 +71,13 @@ function createSync(files) {
|
|
71
71
|
const centralDirOffset = offset;
|
72
72
|
const endRecord = node_buffer_1.Buffer.concat([
|
73
73
|
node_buffer_1.Buffer.from(constants_js_1.END_OF_CENTRAL_DIR_SIG),
|
74
|
-
node_buffer_1.Buffer.from((0,
|
75
|
-
node_buffer_1.Buffer.from((0,
|
76
|
-
node_buffer_1.Buffer.from((0,
|
77
|
-
node_buffer_1.Buffer.from((0,
|
78
|
-
node_buffer_1.Buffer.from((0,
|
79
|
-
node_buffer_1.Buffer.from((0,
|
80
|
-
node_buffer_1.Buffer.from((0,
|
74
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Disk #
|
75
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Start disk #
|
76
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(centralDirectory.length, 2)),
|
77
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(centralDirectory.length, 2)),
|
78
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(centralDirSize, 4)),
|
79
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(centralDirOffset, 4)),
|
80
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Comment length
|
81
81
|
]);
|
82
82
|
return node_buffer_1.Buffer.concat(fileEntries.concat(centralDirectory).concat([endRecord]));
|
83
83
|
}
|
@@ -8,7 +8,7 @@ const node_buffer_1 = require("node:buffer");
|
|
8
8
|
const node_util_1 = __importDefault(require("node:util"));
|
9
9
|
const node_zlib_1 = __importDefault(require("node:zlib"));
|
10
10
|
const deflateRaw = node_util_1.default.promisify(node_zlib_1.default.deflateRaw);
|
11
|
-
const
|
11
|
+
const index_js_1 = require("./utils/index.js");
|
12
12
|
const constants_js_1 = require("./constants.js");
|
13
13
|
/**
|
14
14
|
* Creates a ZIP archive from a collection of files.
|
@@ -26,23 +26,23 @@ async function create(files) {
|
|
26
26
|
}
|
27
27
|
const content = node_buffer_1.Buffer.isBuffer(rawContent) ? rawContent : node_buffer_1.Buffer.from(rawContent);
|
28
28
|
const fileNameBuf = node_buffer_1.Buffer.from(filename, "utf8");
|
29
|
-
const modTime = (0,
|
30
|
-
const crc = (0,
|
29
|
+
const modTime = (0, index_js_1.dosTime)(new Date());
|
30
|
+
const crc = (0, index_js_1.crc32)(content);
|
31
31
|
const compressed = await deflateRaw(content);
|
32
32
|
const compSize = compressed.length;
|
33
33
|
const uncompSize = content.length;
|
34
34
|
// Local file header
|
35
35
|
const localHeader = node_buffer_1.Buffer.concat([
|
36
36
|
constants_js_1.LOCAL_FILE_HEADER_SIG,
|
37
|
-
(0,
|
38
|
-
(0,
|
39
|
-
(0,
|
37
|
+
(0, index_js_1.toBytes)(20, 2),
|
38
|
+
(0, index_js_1.toBytes)(0, 2),
|
39
|
+
(0, index_js_1.toBytes)(8, 2),
|
40
40
|
modTime,
|
41
|
-
(0,
|
42
|
-
(0,
|
43
|
-
(0,
|
44
|
-
(0,
|
45
|
-
(0,
|
41
|
+
(0, index_js_1.toBytes)(crc, 4),
|
42
|
+
(0, index_js_1.toBytes)(compSize, 4),
|
43
|
+
(0, index_js_1.toBytes)(uncompSize, 4),
|
44
|
+
(0, index_js_1.toBytes)(fileNameBuf.length, 2),
|
45
|
+
(0, index_js_1.toBytes)(0, 2),
|
46
46
|
]);
|
47
47
|
const localEntry = node_buffer_1.Buffer.concat([
|
48
48
|
localHeader,
|
@@ -52,21 +52,21 @@ async function create(files) {
|
|
52
52
|
fileEntries.push(localEntry);
|
53
53
|
const centralEntry = node_buffer_1.Buffer.concat([
|
54
54
|
node_buffer_1.Buffer.from(constants_js_1.CENTRAL_DIR_HEADER_SIG),
|
55
|
-
node_buffer_1.Buffer.from((0,
|
56
|
-
node_buffer_1.Buffer.from((0,
|
57
|
-
node_buffer_1.Buffer.from((0,
|
58
|
-
node_buffer_1.Buffer.from((0,
|
55
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(20, 2)), // Version made by
|
56
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(20, 2)), // Version needed
|
57
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Flags
|
58
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(8, 2)), // Compression
|
59
59
|
node_buffer_1.Buffer.from(modTime),
|
60
|
-
node_buffer_1.Buffer.from((0,
|
61
|
-
node_buffer_1.Buffer.from((0,
|
62
|
-
node_buffer_1.Buffer.from((0,
|
63
|
-
node_buffer_1.Buffer.from((0,
|
64
|
-
node_buffer_1.Buffer.from((0,
|
65
|
-
node_buffer_1.Buffer.from((0,
|
66
|
-
node_buffer_1.Buffer.from((0,
|
67
|
-
node_buffer_1.Buffer.from((0,
|
68
|
-
node_buffer_1.Buffer.from((0,
|
69
|
-
node_buffer_1.Buffer.from((0,
|
60
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(crc, 4)),
|
61
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(compSize, 4)),
|
62
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(uncompSize, 4)),
|
63
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(fileNameBuf.length, 2)),
|
64
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Extra field length
|
65
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Comment length
|
66
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Disk start
|
67
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Internal attrs
|
68
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 4)), // External attrs
|
69
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(offset, 4)),
|
70
70
|
fileNameBuf,
|
71
71
|
]);
|
72
72
|
centralDirectory.push(centralEntry);
|
@@ -76,13 +76,13 @@ async function create(files) {
|
|
76
76
|
const centralDirOffset = offset;
|
77
77
|
const endRecord = node_buffer_1.Buffer.concat([
|
78
78
|
node_buffer_1.Buffer.from(constants_js_1.END_OF_CENTRAL_DIR_SIG),
|
79
|
-
node_buffer_1.Buffer.from((0,
|
80
|
-
node_buffer_1.Buffer.from((0,
|
81
|
-
node_buffer_1.Buffer.from((0,
|
82
|
-
node_buffer_1.Buffer.from((0,
|
83
|
-
node_buffer_1.Buffer.from((0,
|
84
|
-
node_buffer_1.Buffer.from((0,
|
85
|
-
node_buffer_1.Buffer.from((0,
|
79
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Disk #
|
80
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Start disk #
|
81
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(centralDirectory.length, 2)),
|
82
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(centralDirectory.length, 2)),
|
83
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(centralDirSize, 4)),
|
84
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(centralDirOffset, 4)),
|
85
|
+
node_buffer_1.Buffer.from((0, index_js_1.toBytes)(0, 2)), // Comment length
|
86
86
|
]);
|
87
87
|
return node_buffer_1.Buffer.concat(fileEntries.concat(centralDirectory).concat([endRecord]));
|
88
88
|
}
|
@@ -1,48 +1,97 @@
|
|
1
1
|
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
19
|
+
var ownKeys = function(o) {
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
21
|
+
var ar = [];
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
23
|
+
return ar;
|
24
|
+
};
|
25
|
+
return ownKeys(o);
|
26
|
+
};
|
27
|
+
return function (mod) {
|
28
|
+
if (mod && mod.__esModule) return mod;
|
29
|
+
var result = {};
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
31
|
+
__setModuleDefault(result, mod);
|
32
|
+
return result;
|
33
|
+
};
|
34
|
+
})();
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
37
|
+
};
|
2
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
39
|
exports.readSync = readSync;
|
4
|
-
const node_zlib_1 = require("node:zlib");
|
40
|
+
const node_zlib_1 = __importDefault(require("node:zlib"));
|
41
|
+
const Utils = __importStar(require("./utils/index.js"));
|
5
42
|
/**
|
6
43
|
* Parses a ZIP archive from a buffer and extracts the files within.
|
7
44
|
*
|
8
45
|
* @param {Buffer} buffer - The buffer containing the ZIP archive data.
|
9
|
-
* @returns {Object.<string,
|
46
|
+
* @returns {Object.<string, Buffer>} - An object where keys are file names and values are file contents as Buffers.
|
10
47
|
* @throws {Error} - Throws an error if an unsupported compression method is encountered or if decompression fails.
|
11
48
|
*/
|
12
49
|
function readSync(buffer) {
|
13
50
|
const files = {};
|
14
51
|
let offset = 0;
|
15
|
-
while (offset +
|
52
|
+
while (offset + 30 <= buffer.length) {
|
16
53
|
const signature = buffer.readUInt32LE(offset);
|
17
54
|
if (signature !== 0x04034b50)
|
18
|
-
break;
|
55
|
+
break; // not a local file header
|
56
|
+
const generalPurposeBitFlag = buffer.readUInt16LE(offset + 6);
|
19
57
|
const compressionMethod = buffer.readUInt16LE(offset + 8);
|
20
58
|
const fileNameLength = buffer.readUInt16LE(offset + 26);
|
21
|
-
const
|
59
|
+
const extraFieldLength = buffer.readUInt16LE(offset + 28);
|
22
60
|
const fileNameStart = offset + 30;
|
23
61
|
const fileNameEnd = fileNameStart + fileNameLength;
|
24
62
|
const fileName = buffer.subarray(fileNameStart, fileNameEnd).toString();
|
25
|
-
const dataStart = fileNameEnd +
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
break;
|
30
|
-
nextOffset++;
|
31
|
-
}
|
32
|
-
if (nextOffset + 4 > buffer.length) {
|
33
|
-
nextOffset = buffer.length;
|
34
|
-
}
|
35
|
-
const compressedData = buffer.subarray(dataStart, nextOffset);
|
36
|
-
let content = "";
|
63
|
+
const dataStart = fileNameEnd + extraFieldLength;
|
64
|
+
const useDataDescriptor = (generalPurposeBitFlag & 0x08) !== 0;
|
65
|
+
let compressedData;
|
66
|
+
let content;
|
37
67
|
try {
|
38
|
-
if (
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
68
|
+
if (useDataDescriptor) {
|
69
|
+
const { compressedSize, offset: ddOffset } = Utils.findDataDescriptor(buffer, dataStart);
|
70
|
+
compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
|
71
|
+
if (compressionMethod === 0) {
|
72
|
+
content = compressedData;
|
73
|
+
}
|
74
|
+
else if (compressionMethod === 8) {
|
75
|
+
content = node_zlib_1.default.inflateRawSync(compressedData);
|
76
|
+
}
|
77
|
+
else {
|
78
|
+
throw new Error(`Unsupported compression method ${compressionMethod}`);
|
79
|
+
}
|
80
|
+
offset = ddOffset + 16; // Skip over data descriptor
|
43
81
|
}
|
44
82
|
else {
|
45
|
-
|
83
|
+
const compressedSize = buffer.readUInt32LE(offset + 18);
|
84
|
+
compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
|
85
|
+
if (compressionMethod === 0) {
|
86
|
+
content = compressedData;
|
87
|
+
}
|
88
|
+
else if (compressionMethod === 8) {
|
89
|
+
content = node_zlib_1.default.inflateRawSync(compressedData);
|
90
|
+
}
|
91
|
+
else {
|
92
|
+
throw new Error(`Unsupported compression method ${compressionMethod}`);
|
93
|
+
}
|
94
|
+
offset = dataStart + compressedSize;
|
46
95
|
}
|
47
96
|
}
|
48
97
|
catch (error) {
|
@@ -50,7 +99,6 @@ function readSync(buffer) {
|
|
50
99
|
throw new Error(`Error unpacking file ${fileName}: ${message}`);
|
51
100
|
}
|
52
101
|
files[fileName] = content;
|
53
|
-
offset = nextOffset;
|
54
102
|
}
|
55
103
|
return files;
|
56
104
|
}
|
@@ -1,4 +1,37 @@
|
|
1
1
|
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
19
|
+
var ownKeys = function(o) {
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
21
|
+
var ar = [];
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
23
|
+
return ar;
|
24
|
+
};
|
25
|
+
return ownKeys(o);
|
26
|
+
};
|
27
|
+
return function (mod) {
|
28
|
+
if (mod && mod.__esModule) return mod;
|
29
|
+
var result = {};
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
31
|
+
__setModuleDefault(result, mod);
|
32
|
+
return result;
|
33
|
+
};
|
34
|
+
})();
|
2
35
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
37
|
};
|
@@ -6,48 +39,61 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
39
|
exports.read = read;
|
7
40
|
const node_util_1 = __importDefault(require("node:util"));
|
8
41
|
const node_zlib_1 = __importDefault(require("node:zlib"));
|
42
|
+
const Utils = __importStar(require("./utils/index.js"));
|
9
43
|
const inflateRaw = node_util_1.default.promisify(node_zlib_1.default.inflateRaw);
|
10
44
|
/**
|
11
45
|
* Parses a ZIP archive from a buffer and extracts the files within.
|
12
46
|
*
|
13
47
|
* @param {Buffer} buffer - The buffer containing the ZIP archive data.
|
14
|
-
* @returns {Object.<string,
|
48
|
+
* @returns {Object.<string, Buffer>} - An object where keys are file names and values are file contents as Buffers.
|
15
49
|
* @throws {Error} - Throws an error if an unsupported compression method is encountered or if decompression fails.
|
16
50
|
*/
|
17
51
|
async function read(buffer) {
|
18
52
|
const files = {};
|
19
53
|
let offset = 0;
|
20
|
-
while (offset +
|
54
|
+
while (offset + 30 <= buffer.length) {
|
21
55
|
const signature = buffer.readUInt32LE(offset);
|
22
56
|
if (signature !== 0x04034b50)
|
23
|
-
break;
|
57
|
+
break; // not a local file header
|
58
|
+
const generalPurposeBitFlag = buffer.readUInt16LE(offset + 6);
|
24
59
|
const compressionMethod = buffer.readUInt16LE(offset + 8);
|
25
60
|
const fileNameLength = buffer.readUInt16LE(offset + 26);
|
26
|
-
const
|
61
|
+
const extraFieldLength = buffer.readUInt16LE(offset + 28);
|
27
62
|
const fileNameStart = offset + 30;
|
28
63
|
const fileNameEnd = fileNameStart + fileNameLength;
|
29
64
|
const fileName = buffer.subarray(fileNameStart, fileNameEnd).toString();
|
30
|
-
const dataStart = fileNameEnd +
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
break;
|
35
|
-
nextOffset++;
|
36
|
-
}
|
37
|
-
if (nextOffset + 4 > buffer.length) {
|
38
|
-
nextOffset = buffer.length;
|
39
|
-
}
|
40
|
-
const compressedData = buffer.subarray(dataStart, nextOffset);
|
41
|
-
let content = "";
|
65
|
+
const dataStart = fileNameEnd + extraFieldLength;
|
66
|
+
const useDataDescriptor = (generalPurposeBitFlag & 0x08) !== 0;
|
67
|
+
let compressedData;
|
68
|
+
let content;
|
42
69
|
try {
|
43
|
-
if (
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
70
|
+
if (useDataDescriptor) {
|
71
|
+
const { compressedSize, offset: ddOffset } = Utils.findDataDescriptor(buffer, dataStart);
|
72
|
+
compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
|
73
|
+
if (compressionMethod === 0) {
|
74
|
+
content = compressedData;
|
75
|
+
}
|
76
|
+
else if (compressionMethod === 8) {
|
77
|
+
content = await inflateRaw(compressedData);
|
78
|
+
}
|
79
|
+
else {
|
80
|
+
throw new Error(`Unsupported compression method ${compressionMethod}`);
|
81
|
+
}
|
82
|
+
offset = ddOffset + 16; // Skip over data descriptor
|
48
83
|
}
|
49
84
|
else {
|
50
|
-
|
85
|
+
const compressedSize = buffer.readUInt32LE(offset + 18);
|
86
|
+
compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
|
87
|
+
if (compressionMethod === 0) {
|
88
|
+
content = compressedData;
|
89
|
+
}
|
90
|
+
else if (compressionMethod === 8) {
|
91
|
+
content = await inflateRaw(compressedData);
|
92
|
+
}
|
93
|
+
else {
|
94
|
+
throw new Error(`Unsupported compression method ${compressionMethod}`);
|
95
|
+
}
|
96
|
+
offset = dataStart + compressedSize;
|
51
97
|
}
|
52
98
|
}
|
53
99
|
catch (error) {
|
@@ -55,7 +101,6 @@ async function read(buffer) {
|
|
55
101
|
throw new Error(`Error unpacking file ${fileName}: ${message}`);
|
56
102
|
}
|
57
103
|
files[fileName] = content;
|
58
|
-
offset = nextOffset;
|
59
104
|
}
|
60
105
|
return files;
|
61
106
|
}
|
@@ -0,0 +1,76 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.crc32 = crc32;
|
4
|
+
/**
|
5
|
+
* Precomputed CRC-32 lookup table for optimized checksum calculation.
|
6
|
+
* The table is generated using the standard IEEE 802.3 (Ethernet) polynomial:
|
7
|
+
* 0xEDB88320 (reversed representation of 0x04C11DB7).
|
8
|
+
*
|
9
|
+
* The table is immediately invoked and cached as a constant for performance,
|
10
|
+
* following the common implementation pattern for CRC algorithms.
|
11
|
+
*/
|
12
|
+
const crcTable = (() => {
|
13
|
+
// Create a typed array for better performance with 256 32-bit unsigned integers
|
14
|
+
const table = new Uint32Array(256);
|
15
|
+
// Generate table entries for all possible byte values (0-255)
|
16
|
+
for (let i = 0; i < 256; i++) {
|
17
|
+
let crc = i; // Initialize with current byte value
|
18
|
+
// Process each bit (8 times)
|
19
|
+
for (let j = 0; j < 8; j++) {
|
20
|
+
/*
|
21
|
+
* CRC division algorithm:
|
22
|
+
* 1. If LSB is set (crc & 1), XOR with polynomial
|
23
|
+
* 2. Right-shift by 1 (unsigned)
|
24
|
+
*
|
25
|
+
* The polynomial 0xEDB88320 is:
|
26
|
+
* - Bit-reversed version of 0x04C11DB7
|
27
|
+
* - Uses reflected input/output algorithm
|
28
|
+
*/
|
29
|
+
crc = crc & 1
|
30
|
+
? 0xedb88320 ^ (crc >>> 1) // XOR with polynomial if LSB is set
|
31
|
+
: crc >>> 1; // Just shift right if LSB is not set
|
32
|
+
}
|
33
|
+
// Store final 32-bit value (>>> 0 ensures unsigned 32-bit representation)
|
34
|
+
table[i] = crc >>> 0;
|
35
|
+
}
|
36
|
+
return table;
|
37
|
+
})();
|
38
|
+
/**
|
39
|
+
* Computes a CRC-32 checksum for the given Buffer using the standard IEEE 802.3 polynomial.
|
40
|
+
* This implementation uses a precomputed lookup table for optimal performance.
|
41
|
+
*
|
42
|
+
* The algorithm follows these characteristics:
|
43
|
+
* - Polynomial: 0xEDB88320 (reversed representation of 0x04C11DB7)
|
44
|
+
* - Initial value: 0xFFFFFFFF (inverted by ~0)
|
45
|
+
* - Final XOR value: 0xFFFFFFFF (achieved by inverting the result)
|
46
|
+
* - Input and output reflection: Yes
|
47
|
+
*
|
48
|
+
* @param {Buffer} buf - The input buffer to calculate checksum for
|
49
|
+
* @returns {number} - The 32-bit unsigned CRC-32 checksum (0x00000000 to 0xFFFFFFFF)
|
50
|
+
*/
|
51
|
+
function crc32(buf) {
|
52
|
+
// Initialize CRC with all 1's (0xFFFFFFFF) using bitwise NOT
|
53
|
+
let crc = ~0;
|
54
|
+
// Process each byte in the buffer
|
55
|
+
for (let i = 0; i < buf.length; i++) {
|
56
|
+
/*
|
57
|
+
* CRC update algorithm steps:
|
58
|
+
* 1. XOR current CRC with next byte (lowest 8 bits)
|
59
|
+
* 2. Use result as index in precomputed table (0-255)
|
60
|
+
* 3. XOR the table value with right-shifted CRC (8 bits)
|
61
|
+
*
|
62
|
+
* The operation breakdown:
|
63
|
+
* - (crc ^ buf[i]) - XOR with next byte
|
64
|
+
* - & 0xff - Isolate lowest 8 bits
|
65
|
+
* - crc >>> 8 - Shift CRC right by 8 bits (unsigned)
|
66
|
+
* - ^ crcTable[...] - XOR with precomputed table value
|
67
|
+
*/
|
68
|
+
crc = (crc >>> 8) ^ crcTable[(crc ^ buf[i]) & 0xff];
|
69
|
+
}
|
70
|
+
/*
|
71
|
+
* Final processing:
|
72
|
+
* 1. Invert all bits (~crc) to match standard CRC-32 output
|
73
|
+
* 2. Convert to unsigned 32-bit integer (>>> 0)
|
74
|
+
*/
|
75
|
+
return ~crc >>> 0;
|
76
|
+
}
|