@cj-tech-master/excelts 1.4.2 → 1.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/README_zh.md +3 -3
- package/dist/browser/excelts.iife.js +8135 -2722
- package/dist/browser/excelts.iife.js.map +1 -1
- package/dist/browser/excelts.iife.min.js +86 -23
- package/dist/cjs/stream/xlsx/workbook-writer.js +3 -2
- package/dist/cjs/utils/cell-format.js +13 -9
- package/dist/cjs/utils/sheet-utils.js +125 -15
- package/dist/cjs/utils/unzip/extract.js +166 -0
- package/dist/cjs/utils/unzip/index.js +7 -1
- package/dist/cjs/utils/xml-stream.js +25 -3
- package/dist/cjs/utils/zip/compress.js +261 -0
- package/dist/cjs/utils/zip/crc32.js +154 -0
- package/dist/cjs/utils/zip/index.js +70 -0
- package/dist/cjs/utils/zip/zip-builder.js +378 -0
- package/dist/cjs/utils/zip-stream.js +30 -34
- package/dist/cjs/xlsx/xform/book/defined-name-xform.js +36 -2
- package/dist/cjs/xlsx/xform/list-xform.js +6 -0
- package/dist/cjs/xlsx/xform/sheet/cell-xform.js +6 -1
- package/dist/cjs/xlsx/xform/sheet/row-xform.js +24 -2
- package/dist/cjs/xlsx/xform/table/filter-column-xform.js +4 -0
- package/dist/esm/stream/xlsx/workbook-writer.js +3 -2
- package/dist/esm/utils/cell-format.js +13 -9
- package/dist/esm/utils/sheet-utils.js +125 -15
- package/dist/esm/utils/unzip/extract.js +160 -0
- package/dist/esm/utils/unzip/index.js +2 -0
- package/dist/esm/utils/xml-stream.js +25 -3
- package/dist/esm/utils/zip/compress.js +220 -0
- package/dist/esm/utils/zip/crc32.js +116 -0
- package/dist/esm/utils/zip/index.js +55 -0
- package/dist/esm/utils/zip/zip-builder.js +372 -0
- package/dist/esm/utils/zip-stream.js +30 -34
- package/dist/esm/xlsx/xform/book/defined-name-xform.js +36 -2
- package/dist/esm/xlsx/xform/list-xform.js +6 -0
- package/dist/esm/xlsx/xform/sheet/cell-xform.js +6 -1
- package/dist/esm/xlsx/xform/sheet/row-xform.js +24 -2
- package/dist/esm/xlsx/xform/table/filter-column-xform.js +4 -0
- package/dist/types/utils/sheet-utils.d.ts +8 -2
- package/dist/types/utils/unzip/extract.d.ts +92 -0
- package/dist/types/utils/unzip/index.d.ts +1 -0
- package/dist/types/utils/xml-stream.d.ts +2 -0
- package/dist/types/utils/zip/compress.d.ts +83 -0
- package/dist/types/utils/zip/crc32.d.ts +55 -0
- package/dist/types/utils/zip/index.d.ts +52 -0
- package/dist/types/utils/zip/zip-builder.d.ts +110 -0
- package/dist/types/utils/zip-stream.d.ts +6 -12
- package/dist/types/xlsx/xform/list-xform.d.ts +1 -0
- package/dist/types/xlsx/xform/sheet/row-xform.d.ts +2 -0
- package/package.json +1 -1
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CRC32 calculation utility for ZIP files
|
|
3
|
+
*
|
|
4
|
+
* - Node.js: Uses native zlib.crc32 (C++ implementation, ~100x faster)
|
|
5
|
+
* - Browser: Uses lookup table optimization
|
|
6
|
+
*
|
|
7
|
+
* The polynomial used is the standard CRC-32 IEEE 802.3:
|
|
8
|
+
* x^32 + x^26 + x^23 + x^22 + x^16 + x^12 + x^11 + x^10 + x^8 + x^7 + x^5 + x^4 + x^2 + x + 1
|
|
9
|
+
* Represented as 0xEDB88320 in reversed (LSB-first) form
|
|
10
|
+
*/
|
|
11
|
+
// Detect Node.js environment
|
|
12
|
+
const isNode = typeof process !== "undefined" && process.versions?.node;
|
|
13
|
+
// Lazy-loaded zlib module for Node.js
|
|
14
|
+
let _zlib = null;
|
|
15
|
+
let _zlibLoading = null;
|
|
16
|
+
// Auto-initialize zlib in Node.js environment
|
|
17
|
+
if (isNode) {
|
|
18
|
+
_zlibLoading = import("zlib")
|
|
19
|
+
.then(module => {
|
|
20
|
+
_zlib = module.default ?? module;
|
|
21
|
+
return _zlib;
|
|
22
|
+
})
|
|
23
|
+
.catch(() => {
|
|
24
|
+
_zlib = null;
|
|
25
|
+
return null;
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Pre-computed CRC32 lookup table (256 entries)
|
|
30
|
+
* Generated using the standard polynomial 0xEDB88320
|
|
31
|
+
* Used as fallback when native zlib is not available
|
|
32
|
+
*/
|
|
33
|
+
const CRC32_TABLE = /* @__PURE__ */ (() => {
|
|
34
|
+
const table = new Uint32Array(256);
|
|
35
|
+
for (let i = 0; i < 256; i++) {
|
|
36
|
+
let crc = i;
|
|
37
|
+
for (let j = 0; j < 8; j++) {
|
|
38
|
+
crc = crc & 1 ? 0xedb88320 ^ (crc >>> 1) : crc >>> 1;
|
|
39
|
+
}
|
|
40
|
+
table[i] = crc;
|
|
41
|
+
}
|
|
42
|
+
return table;
|
|
43
|
+
})();
|
|
44
|
+
/**
|
|
45
|
+
* JavaScript fallback CRC32 implementation using lookup table
|
|
46
|
+
*/
|
|
47
|
+
function crc32JS(data) {
|
|
48
|
+
let crc = 0xffffffff;
|
|
49
|
+
for (let i = 0; i < data.length; i++) {
|
|
50
|
+
crc = CRC32_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8);
|
|
51
|
+
}
|
|
52
|
+
return (crc ^ 0xffffffff) >>> 0;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Calculate CRC32 checksum for the given data
|
|
56
|
+
* Uses native zlib.crc32 in Node.js for ~100x better performance
|
|
57
|
+
*
|
|
58
|
+
* @param data - Input data as Uint8Array or Buffer
|
|
59
|
+
* @returns CRC32 checksum as unsigned 32-bit integer
|
|
60
|
+
*
|
|
61
|
+
* @example
|
|
62
|
+
* ```ts
|
|
63
|
+
* const data = new TextEncoder().encode("Hello, World!");
|
|
64
|
+
* const checksum = crc32(data);
|
|
65
|
+
* console.log(checksum.toString(16)); // "ec4ac3d0"
|
|
66
|
+
* ```
|
|
67
|
+
*/
|
|
68
|
+
export function crc32(data) {
|
|
69
|
+
// Use native zlib.crc32 if available (Node.js)
|
|
70
|
+
if (_zlib && typeof _zlib.crc32 === "function") {
|
|
71
|
+
return _zlib.crc32(data) >>> 0;
|
|
72
|
+
}
|
|
73
|
+
// Fallback to JS implementation
|
|
74
|
+
return crc32JS(data);
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Ensure zlib is loaded (for use before calling crc32)
|
|
78
|
+
*/
|
|
79
|
+
export async function ensureCrc32() {
|
|
80
|
+
if (_zlibLoading) {
|
|
81
|
+
await _zlibLoading;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Calculate CRC32 incrementally (useful for streaming)
|
|
86
|
+
* Call with initial crc of 0xffffffff, then finalize with crc32Finalize
|
|
87
|
+
* Note: This always uses JS implementation for consistency in streaming
|
|
88
|
+
*
|
|
89
|
+
* @param crc - Current CRC value (start with 0xffffffff)
|
|
90
|
+
* @param data - Input data chunk
|
|
91
|
+
* @returns Updated CRC value (not finalized)
|
|
92
|
+
*
|
|
93
|
+
* @example
|
|
94
|
+
* ```ts
|
|
95
|
+
* let crc = 0xffffffff;
|
|
96
|
+
* crc = crc32Update(crc, chunk1);
|
|
97
|
+
* crc = crc32Update(crc, chunk2);
|
|
98
|
+
* const checksum = crc32Finalize(crc);
|
|
99
|
+
* ```
|
|
100
|
+
*/
|
|
101
|
+
export function crc32Update(crc, data) {
|
|
102
|
+
for (let i = 0; i < data.length; i++) {
|
|
103
|
+
crc = CRC32_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8);
|
|
104
|
+
}
|
|
105
|
+
return crc;
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Finalize CRC32 calculation
|
|
109
|
+
* XOR with 0xffffffff and convert to unsigned 32-bit
|
|
110
|
+
*
|
|
111
|
+
* @param crc - CRC value from crc32Update
|
|
112
|
+
* @returns Final CRC32 checksum
|
|
113
|
+
*/
|
|
114
|
+
export function crc32Finalize(crc) {
|
|
115
|
+
return (crc ^ 0xffffffff) >>> 0;
|
|
116
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Native ZIP utilities - Pure native implementation without third-party dependencies
|
|
3
|
+
*
|
|
4
|
+
* This module provides ZIP file creation using only native platform APIs:
|
|
5
|
+
* - Node.js: Uses native zlib module (C++ implementation, fastest)
|
|
6
|
+
* - Browser: Uses CompressionStream API (Chrome 80+, Firefox 113+, Safari 16.4+)
|
|
7
|
+
*
|
|
8
|
+
* Features:
|
|
9
|
+
* - Full ZIP format support (Local File Headers, Central Directory, EOCD)
|
|
10
|
+
* - DEFLATE compression (level 0-9 on Node.js, fixed level on browser)
|
|
11
|
+
* - STORE mode (no compression)
|
|
12
|
+
* - UTF-8 filename support
|
|
13
|
+
* - File comments and ZIP comments
|
|
14
|
+
* - Streaming API for large files
|
|
15
|
+
* - Both sync (Node.js) and async APIs
|
|
16
|
+
*
|
|
17
|
+
* @example Basic usage
|
|
18
|
+
* ```ts
|
|
19
|
+
* import { createZip } from "./utils/zip/index.js";
|
|
20
|
+
*
|
|
21
|
+
* const zipData = await createZip([
|
|
22
|
+
* { name: "hello.txt", data: new TextEncoder().encode("Hello!") },
|
|
23
|
+
* { name: "folder/nested.txt", data: new TextEncoder().encode("Nested file") }
|
|
24
|
+
* ], { level: 6 });
|
|
25
|
+
*
|
|
26
|
+
* // Write to file (Node.js)
|
|
27
|
+
* fs.writeFileSync("output.zip", zipData);
|
|
28
|
+
* ```
|
|
29
|
+
*
|
|
30
|
+
* @example Streaming usage
|
|
31
|
+
* ```ts
|
|
32
|
+
* import { ZipBuilder } from "./utils/zip/index.js";
|
|
33
|
+
*
|
|
34
|
+
* const builder = new ZipBuilder({ level: 1 });
|
|
35
|
+
*
|
|
36
|
+
* // Add files one by one
|
|
37
|
+
* const [header1, data1] = await builder.addFile({
|
|
38
|
+
* name: "file1.txt",
|
|
39
|
+
* data: new TextEncoder().encode("File 1 content")
|
|
40
|
+
* });
|
|
41
|
+
* stream.write(header1);
|
|
42
|
+
* stream.write(data1);
|
|
43
|
+
*
|
|
44
|
+
* // Finalize and write central directory
|
|
45
|
+
* for (const chunk of builder.finalize()) {
|
|
46
|
+
* stream.write(chunk);
|
|
47
|
+
* }
|
|
48
|
+
* ```
|
|
49
|
+
*/
|
|
50
|
+
// CRC32 utilities
|
|
51
|
+
export { crc32, crc32Update, crc32Finalize } from "./crc32.js";
|
|
52
|
+
// Compression utilities
|
|
53
|
+
export { compress, compressSync, decompress, decompressSync, hasNativeZlib, hasCompressionStream } from "./compress.js";
|
|
54
|
+
// ZIP builder
|
|
55
|
+
export { createZip, createZipSync, ZipBuilder } from "./zip-builder.js";
|
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ZIP file format builder
|
|
3
|
+
*
|
|
4
|
+
* Implements ZIP file structure according to PKWARE's APPNOTE.TXT specification
|
|
5
|
+
* https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
|
|
6
|
+
*
|
|
7
|
+
* ZIP file structure:
|
|
8
|
+
* ┌──────────────────────────┐
|
|
9
|
+
* │ Local File Header 1 │
|
|
10
|
+
* │ File Data 1 │
|
|
11
|
+
* ├──────────────────────────┤
|
|
12
|
+
* │ Local File Header 2 │
|
|
13
|
+
* │ File Data 2 │
|
|
14
|
+
* ├──────────────────────────┤
|
|
15
|
+
* │ ... │
|
|
16
|
+
* ├──────────────────────────┤
|
|
17
|
+
* │ Central Directory 1 │
|
|
18
|
+
* │ Central Directory 2 │
|
|
19
|
+
* │ ... │
|
|
20
|
+
* ├──────────────────────────┤
|
|
21
|
+
* │ End of Central Directory │
|
|
22
|
+
* └──────────────────────────┘
|
|
23
|
+
*/
|
|
24
|
+
import { crc32 } from "./crc32.js";
|
|
25
|
+
import { compress, compressSync } from "./compress.js";
|
|
26
|
+
// ZIP signature constants
|
|
27
|
+
const LOCAL_FILE_HEADER_SIG = 0x04034b50;
|
|
28
|
+
const CENTRAL_DIR_HEADER_SIG = 0x02014b50;
|
|
29
|
+
const END_OF_CENTRAL_DIR_SIG = 0x06054b50;
|
|
30
|
+
// ZIP version constants
|
|
31
|
+
const VERSION_NEEDED = 20; // 2.0 - supports DEFLATE
|
|
32
|
+
const VERSION_MADE_BY = 20; // 2.0
|
|
33
|
+
// Compression methods
|
|
34
|
+
const COMPRESSION_STORE = 0;
|
|
35
|
+
const COMPRESSION_DEFLATE = 8;
|
|
36
|
+
/**
|
|
37
|
+
* Convert Date to DOS time format
|
|
38
|
+
* @param date - Date to convert
|
|
39
|
+
* @returns [dosTime, dosDate]
|
|
40
|
+
*/
|
|
41
|
+
function dateToDos(date) {
|
|
42
|
+
const dosTime = ((date.getHours() & 0x1f) << 11) |
|
|
43
|
+
((date.getMinutes() & 0x3f) << 5) |
|
|
44
|
+
((date.getSeconds() >> 1) & 0x1f);
|
|
45
|
+
const dosDate = (((date.getFullYear() - 1980) & 0x7f) << 9) |
|
|
46
|
+
(((date.getMonth() + 1) & 0x0f) << 5) |
|
|
47
|
+
(date.getDate() & 0x1f);
|
|
48
|
+
return [dosTime, dosDate];
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Encode string to UTF-8 bytes
|
|
52
|
+
*/
|
|
53
|
+
const encoder = new TextEncoder();
|
|
54
|
+
function encodeString(str) {
|
|
55
|
+
return encoder.encode(str);
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Build Local File Header (30 bytes + filename + extra)
|
|
59
|
+
*/
|
|
60
|
+
function buildLocalFileHeader(entry) {
|
|
61
|
+
const header = new Uint8Array(30 + entry.name.length);
|
|
62
|
+
const view = new DataView(header.buffer);
|
|
63
|
+
view.setUint32(0, LOCAL_FILE_HEADER_SIG, true); // Signature
|
|
64
|
+
view.setUint16(4, VERSION_NEEDED, true); // Version needed to extract
|
|
65
|
+
view.setUint16(6, 0x0800, true); // General purpose bit flag (UTF-8 names)
|
|
66
|
+
view.setUint16(8, entry.compressionMethod, true); // Compression method
|
|
67
|
+
view.setUint16(10, entry.modTime, true); // Last mod time
|
|
68
|
+
view.setUint16(12, entry.modDate, true); // Last mod date
|
|
69
|
+
view.setUint32(14, entry.crc, true); // CRC-32
|
|
70
|
+
view.setUint32(18, entry.compressedData.length, true); // Compressed size
|
|
71
|
+
view.setUint32(22, entry.data.length, true); // Uncompressed size
|
|
72
|
+
view.setUint16(26, entry.name.length, true); // Filename length
|
|
73
|
+
view.setUint16(28, 0, true); // Extra field length
|
|
74
|
+
header.set(entry.name, 30);
|
|
75
|
+
return header;
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Build Central Directory Header (46 bytes + filename + extra + comment)
|
|
79
|
+
*/
|
|
80
|
+
function buildCentralDirHeader(entry) {
|
|
81
|
+
const header = new Uint8Array(46 + entry.name.length + entry.comment.length);
|
|
82
|
+
const view = new DataView(header.buffer);
|
|
83
|
+
view.setUint32(0, CENTRAL_DIR_HEADER_SIG, true); // Signature
|
|
84
|
+
view.setUint16(4, VERSION_MADE_BY, true); // Version made by
|
|
85
|
+
view.setUint16(6, VERSION_NEEDED, true); // Version needed to extract
|
|
86
|
+
view.setUint16(8, 0x0800, true); // General purpose bit flag (UTF-8 names)
|
|
87
|
+
view.setUint16(10, entry.compressionMethod, true); // Compression method
|
|
88
|
+
view.setUint16(12, entry.modTime, true); // Last mod time
|
|
89
|
+
view.setUint16(14, entry.modDate, true); // Last mod date
|
|
90
|
+
view.setUint32(16, entry.crc, true); // CRC-32
|
|
91
|
+
view.setUint32(20, entry.compressedData.length, true); // Compressed size
|
|
92
|
+
view.setUint32(24, entry.data.length, true); // Uncompressed size
|
|
93
|
+
view.setUint16(28, entry.name.length, true); // Filename length
|
|
94
|
+
view.setUint16(30, 0, true); // Extra field length
|
|
95
|
+
view.setUint16(32, entry.comment.length, true); // Comment length
|
|
96
|
+
view.setUint16(34, 0, true); // Disk number start
|
|
97
|
+
view.setUint16(36, 0, true); // Internal file attributes
|
|
98
|
+
view.setUint32(38, 0, true); // External file attributes
|
|
99
|
+
view.setUint32(42, entry.offset, true); // Relative offset of local header
|
|
100
|
+
header.set(entry.name, 46);
|
|
101
|
+
if (entry.comment.length > 0) {
|
|
102
|
+
header.set(entry.comment, 46 + entry.name.length);
|
|
103
|
+
}
|
|
104
|
+
return header;
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Build End of Central Directory Record (22 bytes + comment)
|
|
108
|
+
*/
|
|
109
|
+
function buildEndOfCentralDir(entryCount, centralDirSize, centralDirOffset, comment) {
|
|
110
|
+
const record = new Uint8Array(22 + comment.length);
|
|
111
|
+
const view = new DataView(record.buffer);
|
|
112
|
+
view.setUint32(0, END_OF_CENTRAL_DIR_SIG, true); // Signature
|
|
113
|
+
view.setUint16(4, 0, true); // Number of this disk
|
|
114
|
+
view.setUint16(6, 0, true); // Disk where central dir starts
|
|
115
|
+
view.setUint16(8, entryCount, true); // Number of entries on this disk
|
|
116
|
+
view.setUint16(10, entryCount, true); // Total number of entries
|
|
117
|
+
view.setUint32(12, centralDirSize, true); // Size of central directory
|
|
118
|
+
view.setUint32(16, centralDirOffset, true); // Offset of central directory
|
|
119
|
+
view.setUint16(20, comment.length, true); // Comment length
|
|
120
|
+
if (comment.length > 0) {
|
|
121
|
+
record.set(comment, 22);
|
|
122
|
+
}
|
|
123
|
+
return record;
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Create a ZIP file from entries (async)
|
|
127
|
+
*
|
|
128
|
+
* @param entries - Files to include in ZIP
|
|
129
|
+
* @param options - ZIP options
|
|
130
|
+
* @returns ZIP file as Uint8Array
|
|
131
|
+
*
|
|
132
|
+
* @example
|
|
133
|
+
* ```ts
|
|
134
|
+
* const zip = await createZip([
|
|
135
|
+
* { name: "hello.txt", data: new TextEncoder().encode("Hello!") },
|
|
136
|
+
* { name: "folder/file.txt", data: new TextEncoder().encode("Nested!") }
|
|
137
|
+
* ], { level: 6 });
|
|
138
|
+
* ```
|
|
139
|
+
*/
|
|
140
|
+
export async function createZip(entries, options = {}) {
|
|
141
|
+
const level = options.level ?? 6;
|
|
142
|
+
const zipComment = encodeString(options.comment ?? "");
|
|
143
|
+
const now = new Date();
|
|
144
|
+
// Process entries
|
|
145
|
+
const processedEntries = [];
|
|
146
|
+
let currentOffset = 0;
|
|
147
|
+
for (const entry of entries) {
|
|
148
|
+
const nameBytes = encodeString(entry.name);
|
|
149
|
+
const commentBytes = encodeString(entry.comment ?? "");
|
|
150
|
+
const modDate = entry.modTime ?? now;
|
|
151
|
+
const [dosTime, dosDate] = dateToDos(modDate);
|
|
152
|
+
// Compress data
|
|
153
|
+
const isCompressed = level > 0 && entry.data.length > 0;
|
|
154
|
+
const compressedData = isCompressed ? await compress(entry.data, { level }) : entry.data;
|
|
155
|
+
const processedEntry = {
|
|
156
|
+
name: nameBytes,
|
|
157
|
+
data: entry.data,
|
|
158
|
+
compressedData,
|
|
159
|
+
crc: crc32(entry.data),
|
|
160
|
+
compressionMethod: isCompressed ? COMPRESSION_DEFLATE : COMPRESSION_STORE,
|
|
161
|
+
modTime: dosTime,
|
|
162
|
+
modDate: dosDate,
|
|
163
|
+
comment: commentBytes,
|
|
164
|
+
offset: currentOffset
|
|
165
|
+
};
|
|
166
|
+
// Calculate offset for next entry
|
|
167
|
+
currentOffset += 30 + nameBytes.length + compressedData.length;
|
|
168
|
+
processedEntries.push(processedEntry);
|
|
169
|
+
}
|
|
170
|
+
// Build ZIP structure
|
|
171
|
+
const chunks = [];
|
|
172
|
+
// Local file headers and data
|
|
173
|
+
for (const entry of processedEntries) {
|
|
174
|
+
chunks.push(buildLocalFileHeader(entry));
|
|
175
|
+
chunks.push(entry.compressedData);
|
|
176
|
+
}
|
|
177
|
+
const centralDirOffset = currentOffset;
|
|
178
|
+
// Central directory
|
|
179
|
+
const centralDirChunks = [];
|
|
180
|
+
for (const entry of processedEntries) {
|
|
181
|
+
centralDirChunks.push(buildCentralDirHeader(entry));
|
|
182
|
+
}
|
|
183
|
+
chunks.push(...centralDirChunks);
|
|
184
|
+
const centralDirSize = centralDirChunks.reduce((sum, c) => sum + c.length, 0);
|
|
185
|
+
// End of central directory
|
|
186
|
+
chunks.push(buildEndOfCentralDir(processedEntries.length, centralDirSize, centralDirOffset, zipComment));
|
|
187
|
+
// Combine all chunks
|
|
188
|
+
const totalSize = chunks.reduce((sum, c) => sum + c.length, 0);
|
|
189
|
+
const result = new Uint8Array(totalSize);
|
|
190
|
+
let offset = 0;
|
|
191
|
+
for (const chunk of chunks) {
|
|
192
|
+
result.set(chunk, offset);
|
|
193
|
+
offset += chunk.length;
|
|
194
|
+
}
|
|
195
|
+
return result;
|
|
196
|
+
}
|
|
197
|
+
/**
|
|
198
|
+
* Create a ZIP file from entries (sync, Node.js only)
|
|
199
|
+
*
|
|
200
|
+
* @param entries - Files to include in ZIP
|
|
201
|
+
* @param options - ZIP options
|
|
202
|
+
* @returns ZIP file as Uint8Array
|
|
203
|
+
* @throws Error if not in Node.js environment
|
|
204
|
+
*/
|
|
205
|
+
export function createZipSync(entries, options = {}) {
|
|
206
|
+
const level = options.level ?? 6;
|
|
207
|
+
const zipComment = encodeString(options.comment ?? "");
|
|
208
|
+
const now = new Date();
|
|
209
|
+
// Process entries
|
|
210
|
+
const processedEntries = [];
|
|
211
|
+
let currentOffset = 0;
|
|
212
|
+
for (const entry of entries) {
|
|
213
|
+
const nameBytes = encodeString(entry.name);
|
|
214
|
+
const commentBytes = encodeString(entry.comment ?? "");
|
|
215
|
+
const modDate = entry.modTime ?? now;
|
|
216
|
+
const [dosTime, dosDate] = dateToDos(modDate);
|
|
217
|
+
// Compress data
|
|
218
|
+
const isCompressed = level > 0 && entry.data.length > 0;
|
|
219
|
+
const compressedData = isCompressed ? compressSync(entry.data, { level }) : entry.data;
|
|
220
|
+
const processedEntry = {
|
|
221
|
+
name: nameBytes,
|
|
222
|
+
data: entry.data,
|
|
223
|
+
compressedData,
|
|
224
|
+
crc: crc32(entry.data),
|
|
225
|
+
compressionMethod: isCompressed ? COMPRESSION_DEFLATE : COMPRESSION_STORE,
|
|
226
|
+
modTime: dosTime,
|
|
227
|
+
modDate: dosDate,
|
|
228
|
+
comment: commentBytes,
|
|
229
|
+
offset: currentOffset
|
|
230
|
+
};
|
|
231
|
+
currentOffset += 30 + nameBytes.length + compressedData.length;
|
|
232
|
+
processedEntries.push(processedEntry);
|
|
233
|
+
}
|
|
234
|
+
// Build ZIP structure
|
|
235
|
+
const chunks = [];
|
|
236
|
+
// Local file headers and data
|
|
237
|
+
for (const entry of processedEntries) {
|
|
238
|
+
chunks.push(buildLocalFileHeader(entry));
|
|
239
|
+
chunks.push(entry.compressedData);
|
|
240
|
+
}
|
|
241
|
+
const centralDirOffset = currentOffset;
|
|
242
|
+
// Central directory
|
|
243
|
+
const centralDirChunks = [];
|
|
244
|
+
for (const entry of processedEntries) {
|
|
245
|
+
centralDirChunks.push(buildCentralDirHeader(entry));
|
|
246
|
+
}
|
|
247
|
+
chunks.push(...centralDirChunks);
|
|
248
|
+
const centralDirSize = centralDirChunks.reduce((sum, c) => sum + c.length, 0);
|
|
249
|
+
// End of central directory
|
|
250
|
+
chunks.push(buildEndOfCentralDir(processedEntries.length, centralDirSize, centralDirOffset, zipComment));
|
|
251
|
+
// Combine all chunks
|
|
252
|
+
const totalSize = chunks.reduce((sum, c) => sum + c.length, 0);
|
|
253
|
+
const result = new Uint8Array(totalSize);
|
|
254
|
+
let offset = 0;
|
|
255
|
+
for (const chunk of chunks) {
|
|
256
|
+
result.set(chunk, offset);
|
|
257
|
+
offset += chunk.length;
|
|
258
|
+
}
|
|
259
|
+
return result;
|
|
260
|
+
}
|
|
261
|
+
/**
|
|
262
|
+
* Streaming ZIP builder for large files
|
|
263
|
+
* Writes chunks to a callback as they are generated
|
|
264
|
+
*/
|
|
265
|
+
export class ZipBuilder {
|
|
266
|
+
/**
|
|
267
|
+
* Create a new ZIP builder
|
|
268
|
+
* @param options - ZIP options
|
|
269
|
+
*/
|
|
270
|
+
constructor(options = {}) {
|
|
271
|
+
this.entries = [];
|
|
272
|
+
this.currentOffset = 0;
|
|
273
|
+
this.finalized = false;
|
|
274
|
+
this.level = options.level ?? 6;
|
|
275
|
+
this.zipComment = encodeString(options.comment ?? "");
|
|
276
|
+
}
|
|
277
|
+
/**
|
|
278
|
+
* Add a file to the ZIP (async)
|
|
279
|
+
* @param entry - File entry
|
|
280
|
+
* @returns Local file header and compressed data chunks
|
|
281
|
+
*/
|
|
282
|
+
async addFile(entry) {
|
|
283
|
+
if (this.finalized) {
|
|
284
|
+
throw new Error("Cannot add files after finalizing");
|
|
285
|
+
}
|
|
286
|
+
const nameBytes = encodeString(entry.name);
|
|
287
|
+
const commentBytes = encodeString(entry.comment ?? "");
|
|
288
|
+
const [dosTime, dosDate] = dateToDos(entry.modTime ?? new Date());
|
|
289
|
+
// Compress data
|
|
290
|
+
const isCompressed = this.level > 0 && entry.data.length > 0;
|
|
291
|
+
const compressedData = isCompressed
|
|
292
|
+
? await compress(entry.data, { level: this.level })
|
|
293
|
+
: entry.data;
|
|
294
|
+
const processedEntry = {
|
|
295
|
+
name: nameBytes,
|
|
296
|
+
data: entry.data,
|
|
297
|
+
compressedData,
|
|
298
|
+
crc: crc32(entry.data),
|
|
299
|
+
compressionMethod: isCompressed ? COMPRESSION_DEFLATE : COMPRESSION_STORE,
|
|
300
|
+
modTime: dosTime,
|
|
301
|
+
modDate: dosDate,
|
|
302
|
+
comment: commentBytes,
|
|
303
|
+
offset: this.currentOffset
|
|
304
|
+
};
|
|
305
|
+
this.entries.push(processedEntry);
|
|
306
|
+
this.currentOffset += 30 + nameBytes.length + compressedData.length;
|
|
307
|
+
return [buildLocalFileHeader(processedEntry), compressedData];
|
|
308
|
+
}
|
|
309
|
+
/**
|
|
310
|
+
* Add a file to the ZIP (sync, Node.js only)
|
|
311
|
+
* @param entry - File entry
|
|
312
|
+
* @returns Local file header and compressed data chunks
|
|
313
|
+
*/
|
|
314
|
+
addFileSync(entry) {
|
|
315
|
+
if (this.finalized) {
|
|
316
|
+
throw new Error("Cannot add files after finalizing");
|
|
317
|
+
}
|
|
318
|
+
const nameBytes = encodeString(entry.name);
|
|
319
|
+
const commentBytes = encodeString(entry.comment ?? "");
|
|
320
|
+
const [dosTime, dosDate] = dateToDos(entry.modTime ?? new Date());
|
|
321
|
+
// Compress data
|
|
322
|
+
const isCompressed = this.level > 0 && entry.data.length > 0;
|
|
323
|
+
const compressedData = isCompressed
|
|
324
|
+
? compressSync(entry.data, { level: this.level })
|
|
325
|
+
: entry.data;
|
|
326
|
+
const processedEntry = {
|
|
327
|
+
name: nameBytes,
|
|
328
|
+
data: entry.data,
|
|
329
|
+
compressedData,
|
|
330
|
+
crc: crc32(entry.data),
|
|
331
|
+
compressionMethod: isCompressed ? COMPRESSION_DEFLATE : COMPRESSION_STORE,
|
|
332
|
+
modTime: dosTime,
|
|
333
|
+
modDate: dosDate,
|
|
334
|
+
comment: commentBytes,
|
|
335
|
+
offset: this.currentOffset
|
|
336
|
+
};
|
|
337
|
+
this.entries.push(processedEntry);
|
|
338
|
+
this.currentOffset += 30 + nameBytes.length + compressedData.length;
|
|
339
|
+
return [buildLocalFileHeader(processedEntry), compressedData];
|
|
340
|
+
}
|
|
341
|
+
/**
|
|
342
|
+
* Finalize the ZIP and return central directory + end record
|
|
343
|
+
* @returns Central directory and end of central directory chunks
|
|
344
|
+
*/
|
|
345
|
+
finalize() {
|
|
346
|
+
if (this.finalized) {
|
|
347
|
+
throw new Error("ZIP already finalized");
|
|
348
|
+
}
|
|
349
|
+
this.finalized = true;
|
|
350
|
+
const chunks = [];
|
|
351
|
+
// Central directory
|
|
352
|
+
for (const entry of this.entries) {
|
|
353
|
+
chunks.push(buildCentralDirHeader(entry));
|
|
354
|
+
}
|
|
355
|
+
const centralDirSize = chunks.reduce((sum, c) => sum + c.length, 0);
|
|
356
|
+
// End of central directory
|
|
357
|
+
chunks.push(buildEndOfCentralDir(this.entries.length, centralDirSize, this.currentOffset, this.zipComment));
|
|
358
|
+
return chunks;
|
|
359
|
+
}
|
|
360
|
+
/**
|
|
361
|
+
* Get current number of entries
|
|
362
|
+
*/
|
|
363
|
+
get entryCount() {
|
|
364
|
+
return this.entries.length;
|
|
365
|
+
}
|
|
366
|
+
/**
|
|
367
|
+
* Get current ZIP data size (without central directory)
|
|
368
|
+
*/
|
|
369
|
+
get dataSize() {
|
|
370
|
+
return this.currentOffset;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
@@ -1,35 +1,23 @@
|
|
|
1
1
|
import events from "events";
|
|
2
|
-
import {
|
|
2
|
+
import { ZipBuilder } from "./zip/index.js";
|
|
3
3
|
import { StreamBuf } from "./stream-buf.js";
|
|
4
4
|
// =============================================================================
|
|
5
5
|
// The ZipWriter class
|
|
6
6
|
// Packs streamed data into an output zip stream
|
|
7
|
+
// Uses native zlib (Node.js) or CompressionStream (browser) for best performance
|
|
7
8
|
class ZipWriter extends events.EventEmitter {
|
|
8
9
|
constructor(options) {
|
|
9
10
|
super();
|
|
10
|
-
this.options = Object.assign({
|
|
11
|
-
type: "nodebuffer",
|
|
12
|
-
compression: "DEFLATE"
|
|
13
|
-
}, options);
|
|
14
|
-
// Default compression level is 6 (good balance of speed and size)
|
|
15
|
-
// 0 = no compression, 9 = best compression
|
|
16
|
-
const level = this.options.compressionOptions?.level ?? 6;
|
|
17
|
-
this.compressionLevel = Math.max(0, Math.min(9, level));
|
|
18
|
-
this.files = {};
|
|
19
|
-
this.stream = new StreamBuf();
|
|
20
11
|
this.finalized = false;
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
});
|
|
12
|
+
this.pendingWrites = [];
|
|
13
|
+
// Determine compression level:
|
|
14
|
+
// - STORE mode = 0 (no compression)
|
|
15
|
+
// - DEFLATE mode = user level or default 1 (fast compression)
|
|
16
|
+
const level = options?.compression === "STORE"
|
|
17
|
+
? 0
|
|
18
|
+
: Math.max(0, Math.min(9, options?.compressionOptions?.level ?? 1));
|
|
19
|
+
this.stream = new StreamBuf();
|
|
20
|
+
this.zipBuilder = new ZipBuilder({ level });
|
|
33
21
|
}
|
|
34
22
|
append(data, options) {
|
|
35
23
|
let buffer;
|
|
@@ -43,7 +31,7 @@ class ZipWriter extends events.EventEmitter {
|
|
|
43
31
|
buffer = Buffer.from(data, "utf8");
|
|
44
32
|
}
|
|
45
33
|
else if (Buffer.isBuffer(data)) {
|
|
46
|
-
// Buffer extends Uint8Array,
|
|
34
|
+
// Buffer extends Uint8Array, can use it directly - no copy needed
|
|
47
35
|
buffer = data;
|
|
48
36
|
}
|
|
49
37
|
else if (ArrayBuffer.isView(data)) {
|
|
@@ -58,14 +46,16 @@ class ZipWriter extends events.EventEmitter {
|
|
|
58
46
|
// Assume it's already a Uint8Array or compatible type
|
|
59
47
|
buffer = data;
|
|
60
48
|
}
|
|
61
|
-
// Add file to zip using
|
|
62
|
-
//
|
|
63
|
-
const
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
49
|
+
// Add file to zip using native compression
|
|
50
|
+
// addFile returns chunks that we write to stream immediately
|
|
51
|
+
const writePromise = this.zipBuilder
|
|
52
|
+
.addFile({ name: options.name, data: buffer })
|
|
53
|
+
.then(chunks => {
|
|
54
|
+
for (const chunk of chunks) {
|
|
55
|
+
this.stream.write(Buffer.from(chunk));
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
this.pendingWrites.push(writePromise);
|
|
69
59
|
}
|
|
70
60
|
push(chunk) {
|
|
71
61
|
return this.stream.push(chunk);
|
|
@@ -75,8 +65,14 @@ class ZipWriter extends events.EventEmitter {
|
|
|
75
65
|
return;
|
|
76
66
|
}
|
|
77
67
|
this.finalized = true;
|
|
78
|
-
//
|
|
79
|
-
this.
|
|
68
|
+
// Wait for all pending writes to complete
|
|
69
|
+
await Promise.all(this.pendingWrites);
|
|
70
|
+
// Finalize the zip and write central directory
|
|
71
|
+
const finalChunks = this.zipBuilder.finalize();
|
|
72
|
+
for (const chunk of finalChunks) {
|
|
73
|
+
this.stream.write(Buffer.from(chunk));
|
|
74
|
+
}
|
|
75
|
+
this.stream.end();
|
|
80
76
|
this.emit("finish");
|
|
81
77
|
}
|
|
82
78
|
// ==========================================================================
|