@cj-tech-master/excelts 1.4.3 → 1.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/README_zh.md +3 -3
- package/dist/browser/excelts.iife.js +8037 -2686
- package/dist/browser/excelts.iife.js.map +1 -1
- package/dist/browser/excelts.iife.min.js +86 -23
- package/dist/cjs/stream/xlsx/workbook-writer.js +3 -2
- package/dist/cjs/utils/unzip/extract.js +166 -0
- package/dist/cjs/utils/unzip/index.js +7 -1
- package/dist/cjs/utils/xml-stream.js +25 -3
- package/dist/cjs/utils/zip/compress.js +261 -0
- package/dist/cjs/utils/zip/crc32.js +154 -0
- package/dist/cjs/utils/zip/index.js +70 -0
- package/dist/cjs/utils/zip/zip-builder.js +378 -0
- package/dist/cjs/utils/zip-stream.js +30 -34
- package/dist/cjs/xlsx/xform/book/defined-name-xform.js +36 -2
- package/dist/cjs/xlsx/xform/list-xform.js +6 -0
- package/dist/cjs/xlsx/xform/sheet/cell-xform.js +6 -1
- package/dist/cjs/xlsx/xform/sheet/row-xform.js +24 -2
- package/dist/cjs/xlsx/xform/table/filter-column-xform.js +4 -0
- package/dist/esm/stream/xlsx/workbook-writer.js +3 -2
- package/dist/esm/utils/unzip/extract.js +160 -0
- package/dist/esm/utils/unzip/index.js +2 -0
- package/dist/esm/utils/xml-stream.js +25 -3
- package/dist/esm/utils/zip/compress.js +220 -0
- package/dist/esm/utils/zip/crc32.js +116 -0
- package/dist/esm/utils/zip/index.js +55 -0
- package/dist/esm/utils/zip/zip-builder.js +372 -0
- package/dist/esm/utils/zip-stream.js +30 -34
- package/dist/esm/xlsx/xform/book/defined-name-xform.js +36 -2
- package/dist/esm/xlsx/xform/list-xform.js +6 -0
- package/dist/esm/xlsx/xform/sheet/cell-xform.js +6 -1
- package/dist/esm/xlsx/xform/sheet/row-xform.js +24 -2
- package/dist/esm/xlsx/xform/table/filter-column-xform.js +4 -0
- package/dist/types/utils/unzip/extract.d.ts +92 -0
- package/dist/types/utils/unzip/index.d.ts +1 -0
- package/dist/types/utils/xml-stream.d.ts +2 -0
- package/dist/types/utils/zip/compress.d.ts +83 -0
- package/dist/types/utils/zip/crc32.d.ts +55 -0
- package/dist/types/utils/zip/index.d.ts +52 -0
- package/dist/types/utils/zip/zip-builder.d.ts +110 -0
- package/dist/types/utils/zip-stream.d.ts +6 -12
- package/dist/types/xlsx/xform/list-xform.d.ts +1 -0
- package/dist/types/xlsx/xform/sheet/row-xform.d.ts +2 -0
- package/package.json +1 -1
|
@@ -4,6 +4,7 @@ exports.RowXform = void 0;
|
|
|
4
4
|
const base_xform_js_1 = require("../base-xform");
|
|
5
5
|
const cell_xform_js_1 = require("./cell-xform");
|
|
6
6
|
const utils_js_1 = require("../../../utils/utils");
|
|
7
|
+
const col_cache_js_1 = require("../../../utils/col-cache");
|
|
7
8
|
class RowXform extends base_xform_js_1.BaseXform {
|
|
8
9
|
constructor(options) {
|
|
9
10
|
super();
|
|
@@ -15,6 +16,11 @@ class RowXform extends base_xform_js_1.BaseXform {
|
|
|
15
16
|
get tag() {
|
|
16
17
|
return "row";
|
|
17
18
|
}
|
|
19
|
+
reset() {
|
|
20
|
+
super.reset();
|
|
21
|
+
this.numRowsSeen = 0;
|
|
22
|
+
this.lastCellCol = 0;
|
|
23
|
+
}
|
|
18
24
|
prepare(model, options) {
|
|
19
25
|
const styleId = options.styles.addStyleModel(model.style);
|
|
20
26
|
if (styleId) {
|
|
@@ -65,11 +71,15 @@ class RowXform extends base_xform_js_1.BaseXform {
|
|
|
65
71
|
}
|
|
66
72
|
if (node.name === "row") {
|
|
67
73
|
this.numRowsSeen += 1;
|
|
74
|
+
// Reset lastCellCol for each new row
|
|
75
|
+
this.lastCellCol = 0;
|
|
68
76
|
const spans = node.attributes.spans
|
|
69
77
|
? node.attributes.spans.split(":").map((span) => parseInt(span, 10))
|
|
70
78
|
: [undefined, undefined];
|
|
79
|
+
// If r attribute is missing, use numRowsSeen as the row number
|
|
80
|
+
const rowNumber = node.attributes.r ? parseInt(node.attributes.r, 10) : this.numRowsSeen;
|
|
71
81
|
const model = (this.model = {
|
|
72
|
-
number:
|
|
82
|
+
number: rowNumber,
|
|
73
83
|
min: spans[0],
|
|
74
84
|
max: spans[1],
|
|
75
85
|
cells: []
|
|
@@ -109,7 +119,19 @@ class RowXform extends base_xform_js_1.BaseXform {
|
|
|
109
119
|
parseClose(name) {
|
|
110
120
|
if (this.parser) {
|
|
111
121
|
if (!this.parser.parseClose(name)) {
|
|
112
|
-
this.
|
|
122
|
+
const cellModel = this.parser.model;
|
|
123
|
+
// If cell has address, extract column number from it
|
|
124
|
+
// Otherwise, calculate address based on position
|
|
125
|
+
if (cellModel.address) {
|
|
126
|
+
const decoded = col_cache_js_1.colCache.decodeAddress(cellModel.address);
|
|
127
|
+
this.lastCellCol = decoded.col;
|
|
128
|
+
}
|
|
129
|
+
else {
|
|
130
|
+
// No r attribute, calculate address from position
|
|
131
|
+
this.lastCellCol += 1;
|
|
132
|
+
cellModel.address = col_cache_js_1.colCache.encodeAddress(this.model.number, this.lastCellCol);
|
|
133
|
+
}
|
|
134
|
+
this.model.cells.push(cellModel);
|
|
113
135
|
if (this.maxItems && this.model.cells.length > this.maxItems) {
|
|
114
136
|
throw new Error(`Max column count (${this.maxItems}) exceeded`);
|
|
115
137
|
}
|
|
@@ -57,6 +57,10 @@ class FilterColumnXform extends base_xform_js_1.BaseXform {
|
|
|
57
57
|
filterButton: attributes.hiddenButton === "0"
|
|
58
58
|
};
|
|
59
59
|
return true;
|
|
60
|
+
case "dynamicFilter":
|
|
61
|
+
// Ignore dynamicFilter nodes - we don't need to preserve them for reading
|
|
62
|
+
// See: https://github.com/exceljs/exceljs/issues/2972
|
|
63
|
+
return true;
|
|
60
64
|
default:
|
|
61
65
|
this.parser = this.map[node.name];
|
|
62
66
|
if (this.parser) {
|
|
@@ -31,8 +31,9 @@ class WorkbookWriter {
|
|
|
31
31
|
this.views = [];
|
|
32
32
|
this.zipOptions = options.zip;
|
|
33
33
|
// Extract compression level from zip options (supports both zlib.level and compressionOptions.level)
|
|
34
|
-
// Default compression level is
|
|
35
|
-
|
|
34
|
+
// Default compression level is 1 (fast compression with good ratio)
|
|
35
|
+
// Level 1 is ~2x faster than level 6 with only ~7% larger files
|
|
36
|
+
const level = options.zip?.zlib?.level ?? options.zip?.compressionOptions?.level ?? 1;
|
|
36
37
|
this.compressionLevel = Math.max(0, Math.min(9, level));
|
|
37
38
|
this.media = [];
|
|
38
39
|
this.commentRefs = [];
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Simple ZIP extraction utilities
|
|
3
|
+
* Provides easy-to-use Promise-based API for extracting ZIP files
|
|
4
|
+
*/
|
|
5
|
+
import { Readable } from "stream";
|
|
6
|
+
import { createParse } from "./parse.js";
|
|
7
|
+
/**
|
|
8
|
+
* Extract all files from a ZIP buffer
|
|
9
|
+
*
|
|
10
|
+
* @param zipData - ZIP file data as Buffer or Uint8Array
|
|
11
|
+
* @returns Map of file paths to their content
|
|
12
|
+
*
|
|
13
|
+
* @example
|
|
14
|
+
* ```ts
|
|
15
|
+
* import { extractAll } from "./utils/unzip/extract.js";
|
|
16
|
+
*
|
|
17
|
+
* const zipData = fs.readFileSync("archive.zip");
|
|
18
|
+
* const files = await extractAll(zipData);
|
|
19
|
+
*
|
|
20
|
+
* for (const [path, file] of files) {
|
|
21
|
+
* console.log(`${path}: ${file.data.length} bytes`);
|
|
22
|
+
* }
|
|
23
|
+
* ```
|
|
24
|
+
*/
|
|
25
|
+
export async function extractAll(zipData) {
|
|
26
|
+
const files = new Map();
|
|
27
|
+
const buffer = Buffer.isBuffer(zipData) ? zipData : Buffer.from(zipData);
|
|
28
|
+
const parse = createParse({ forceStream: true });
|
|
29
|
+
const stream = Readable.from([buffer]);
|
|
30
|
+
stream.pipe(parse);
|
|
31
|
+
for await (const entry of parse) {
|
|
32
|
+
const zipEntry = entry;
|
|
33
|
+
const isDirectory = zipEntry.type === "Directory";
|
|
34
|
+
if (isDirectory) {
|
|
35
|
+
files.set(zipEntry.path, {
|
|
36
|
+
path: zipEntry.path,
|
|
37
|
+
data: Buffer.alloc(0),
|
|
38
|
+
isDirectory: true,
|
|
39
|
+
size: 0
|
|
40
|
+
});
|
|
41
|
+
zipEntry.autodrain();
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
const data = await zipEntry.buffer();
|
|
45
|
+
files.set(zipEntry.path, {
|
|
46
|
+
path: zipEntry.path,
|
|
47
|
+
data,
|
|
48
|
+
isDirectory: false,
|
|
49
|
+
size: data.length
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return files;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Extract a single file from a ZIP buffer
|
|
57
|
+
*
|
|
58
|
+
* @param zipData - ZIP file data as Buffer or Uint8Array
|
|
59
|
+
* @param filePath - Path of the file to extract
|
|
60
|
+
* @returns File content as Buffer, or null if not found
|
|
61
|
+
*
|
|
62
|
+
* @example
|
|
63
|
+
* ```ts
|
|
64
|
+
* import { extractFile } from "./utils/unzip/extract.js";
|
|
65
|
+
*
|
|
66
|
+
* const zipData = fs.readFileSync("archive.zip");
|
|
67
|
+
* const content = await extractFile(zipData, "readme.txt");
|
|
68
|
+
* if (content) {
|
|
69
|
+
* console.log(content.toString("utf-8"));
|
|
70
|
+
* }
|
|
71
|
+
* ```
|
|
72
|
+
*/
|
|
73
|
+
export async function extractFile(zipData, filePath) {
|
|
74
|
+
const buffer = Buffer.isBuffer(zipData) ? zipData : Buffer.from(zipData);
|
|
75
|
+
const parse = createParse({ forceStream: true });
|
|
76
|
+
const stream = Readable.from([buffer]);
|
|
77
|
+
stream.pipe(parse);
|
|
78
|
+
for await (const entry of parse) {
|
|
79
|
+
const zipEntry = entry;
|
|
80
|
+
if (zipEntry.path === filePath) {
|
|
81
|
+
if (zipEntry.type === "Directory") {
|
|
82
|
+
return Buffer.alloc(0);
|
|
83
|
+
}
|
|
84
|
+
return zipEntry.buffer();
|
|
85
|
+
}
|
|
86
|
+
zipEntry.autodrain();
|
|
87
|
+
}
|
|
88
|
+
return null;
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* List all file paths in a ZIP buffer (without extracting content)
|
|
92
|
+
*
|
|
93
|
+
* @param zipData - ZIP file data as Buffer or Uint8Array
|
|
94
|
+
* @returns Array of file paths
|
|
95
|
+
*
|
|
96
|
+
* @example
|
|
97
|
+
* ```ts
|
|
98
|
+
* import { listFiles } from "./utils/unzip/extract.js";
|
|
99
|
+
*
|
|
100
|
+
* const zipData = fs.readFileSync("archive.zip");
|
|
101
|
+
* const paths = await listFiles(zipData);
|
|
102
|
+
* console.log(paths); // ["file1.txt", "folder/file2.txt", ...]
|
|
103
|
+
* ```
|
|
104
|
+
*/
|
|
105
|
+
export async function listFiles(zipData) {
|
|
106
|
+
const paths = [];
|
|
107
|
+
const buffer = Buffer.isBuffer(zipData) ? zipData : Buffer.from(zipData);
|
|
108
|
+
const parse = createParse({ forceStream: true });
|
|
109
|
+
const stream = Readable.from([buffer]);
|
|
110
|
+
stream.pipe(parse);
|
|
111
|
+
for await (const entry of parse) {
|
|
112
|
+
const zipEntry = entry;
|
|
113
|
+
paths.push(zipEntry.path);
|
|
114
|
+
zipEntry.autodrain();
|
|
115
|
+
}
|
|
116
|
+
return paths;
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Iterate over ZIP entries with a callback (memory efficient for large ZIPs)
|
|
120
|
+
*
|
|
121
|
+
* @param zipData - ZIP file data as Buffer or Uint8Array
|
|
122
|
+
* @param callback - Async callback for each entry, return false to stop iteration
|
|
123
|
+
*
|
|
124
|
+
* @example
|
|
125
|
+
* ```ts
|
|
126
|
+
* import { forEachEntry } from "./utils/unzip/extract.js";
|
|
127
|
+
*
|
|
128
|
+
* await forEachEntry(zipData, async (path, getData) => {
|
|
129
|
+
* if (path.endsWith(".xml")) {
|
|
130
|
+
* const content = await getData();
|
|
131
|
+
* console.log(content.toString("utf-8"));
|
|
132
|
+
* }
|
|
133
|
+
* return true; // continue iteration
|
|
134
|
+
* });
|
|
135
|
+
* ```
|
|
136
|
+
*/
|
|
137
|
+
export async function forEachEntry(zipData, callback) {
|
|
138
|
+
const buffer = Buffer.isBuffer(zipData) ? zipData : Buffer.from(zipData);
|
|
139
|
+
const parse = createParse({ forceStream: true });
|
|
140
|
+
const stream = Readable.from([buffer]);
|
|
141
|
+
stream.pipe(parse);
|
|
142
|
+
for await (const entry of parse) {
|
|
143
|
+
const zipEntry = entry;
|
|
144
|
+
let dataPromise = null;
|
|
145
|
+
const getData = () => {
|
|
146
|
+
if (!dataPromise) {
|
|
147
|
+
dataPromise = zipEntry.buffer();
|
|
148
|
+
}
|
|
149
|
+
return dataPromise;
|
|
150
|
+
};
|
|
151
|
+
const shouldContinue = await callback(zipEntry.path, getData, zipEntry);
|
|
152
|
+
// If callback didn't read data, drain it
|
|
153
|
+
if (!dataPromise) {
|
|
154
|
+
zipEntry.autodrain();
|
|
155
|
+
}
|
|
156
|
+
if (shouldContinue === false) {
|
|
157
|
+
break;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
@@ -10,3 +10,5 @@ export { bufferStream } from "./buffer-stream.js";
|
|
|
10
10
|
export { parse as parseBuffer } from "./parse-buffer.js";
|
|
11
11
|
export { parseDateTime } from "./parse-datetime.js";
|
|
12
12
|
export { parseExtraField } from "./parse-extra-field.js";
|
|
13
|
+
// Simple extraction API
|
|
14
|
+
export { extractAll, extractFile, listFiles, forEachEntry } from "./extract.js";
|
|
@@ -4,6 +4,8 @@ const OPEN_ANGLE = "<";
|
|
|
4
4
|
const CLOSE_ANGLE = ">";
|
|
5
5
|
const OPEN_ANGLE_SLASH = "</";
|
|
6
6
|
const CLOSE_SLASH_ANGLE = "/>";
|
|
7
|
+
// Chunk size for periodic consolidation (reduces final join overhead)
|
|
8
|
+
const CHUNK_SIZE = 10000;
|
|
7
9
|
function pushAttribute(xml, name, value) {
|
|
8
10
|
xml.push(` ${name}="${xmlEncode(value.toString())}"`);
|
|
9
11
|
}
|
|
@@ -21,15 +23,23 @@ function pushAttributes(xml, attributes) {
|
|
|
21
23
|
class XmlStream {
|
|
22
24
|
constructor() {
|
|
23
25
|
this._xml = [];
|
|
26
|
+
this._chunks = [];
|
|
24
27
|
this._stack = [];
|
|
25
28
|
this._rollbacks = [];
|
|
26
29
|
}
|
|
30
|
+
_consolidate() {
|
|
31
|
+
// Periodically join small strings into larger chunks to reduce final join overhead
|
|
32
|
+
if (this._xml.length >= CHUNK_SIZE) {
|
|
33
|
+
this._chunks.push(this._xml.join(""));
|
|
34
|
+
this._xml = [];
|
|
35
|
+
}
|
|
36
|
+
}
|
|
27
37
|
get tos() {
|
|
28
38
|
return this._stack.length ? this._stack[this._stack.length - 1] : undefined;
|
|
29
39
|
}
|
|
30
40
|
get cursor() {
|
|
31
41
|
// handy way to track whether anything has been added
|
|
32
|
-
return this._xml.length;
|
|
42
|
+
return this._chunks.length * CHUNK_SIZE + this._xml.length;
|
|
33
43
|
}
|
|
34
44
|
openXml(docAttributes) {
|
|
35
45
|
const xml = this._xml;
|
|
@@ -96,6 +106,7 @@ class XmlStream {
|
|
|
96
106
|
}
|
|
97
107
|
this.open = false;
|
|
98
108
|
this.leaf = false;
|
|
109
|
+
this._consolidate();
|
|
99
110
|
}
|
|
100
111
|
leafNode(name, attributes, text) {
|
|
101
112
|
this.openNode(name, attributes);
|
|
@@ -115,7 +126,8 @@ class XmlStream {
|
|
|
115
126
|
xml: this._xml.length,
|
|
116
127
|
stack: this._stack.length,
|
|
117
128
|
leaf: this.leaf,
|
|
118
|
-
open: this.open
|
|
129
|
+
open: this.open,
|
|
130
|
+
chunksLength: this._chunks.length
|
|
119
131
|
});
|
|
120
132
|
return this.cursor;
|
|
121
133
|
}
|
|
@@ -130,12 +142,22 @@ class XmlStream {
|
|
|
130
142
|
if (this._stack.length > r.stack) {
|
|
131
143
|
this._stack.splice(r.stack, this._stack.length - r.stack);
|
|
132
144
|
}
|
|
145
|
+
if (this._chunks.length > r.chunksLength) {
|
|
146
|
+
this._chunks.splice(r.chunksLength, this._chunks.length - r.chunksLength);
|
|
147
|
+
}
|
|
133
148
|
this.leaf = r.leaf;
|
|
134
149
|
this.open = r.open;
|
|
135
150
|
}
|
|
136
151
|
get xml() {
|
|
137
152
|
this.closeAll();
|
|
138
|
-
|
|
153
|
+
// Join chunks first, then remaining xml array
|
|
154
|
+
if (this._chunks.length === 0) {
|
|
155
|
+
return this._xml.join("");
|
|
156
|
+
}
|
|
157
|
+
if (this._xml.length > 0) {
|
|
158
|
+
this._chunks.push(this._xml.join(""));
|
|
159
|
+
}
|
|
160
|
+
return this._chunks.join("");
|
|
139
161
|
}
|
|
140
162
|
}
|
|
141
163
|
XmlStream.StdDocAttributes = {
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Native compression utilities using platform APIs
|
|
3
|
+
*
|
|
4
|
+
* - Node.js: Uses native zlib module (C++ implementation, fastest)
|
|
5
|
+
* - Browser: Uses CompressionStream API (Chrome 80+, Firefox 113+, Safari 16.4+)
|
|
6
|
+
*
|
|
7
|
+
* Both use "deflate-raw" format which is required for ZIP files
|
|
8
|
+
* (raw DEFLATE without zlib header/trailer)
|
|
9
|
+
*/
|
|
10
|
+
// Detect environment
|
|
11
|
+
const isNode = typeof process !== "undefined" && process.versions?.node;
|
|
12
|
+
// Lazy-loaded zlib module for Node.js
|
|
13
|
+
let _zlib = null;
|
|
14
|
+
let _zlibLoading = null;
|
|
15
|
+
// Auto-initialize zlib in Node.js environment
|
|
16
|
+
if (isNode) {
|
|
17
|
+
_zlibLoading = import("zlib")
|
|
18
|
+
.then(module => {
|
|
19
|
+
_zlib = module.default ?? module;
|
|
20
|
+
return _zlib;
|
|
21
|
+
})
|
|
22
|
+
.catch(() => {
|
|
23
|
+
_zlib = null;
|
|
24
|
+
return null;
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Get zlib module (Node.js only)
|
|
29
|
+
* Returns null if not yet loaded or not in Node.js
|
|
30
|
+
*/
|
|
31
|
+
function getZlib() {
|
|
32
|
+
return _zlib;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Ensure zlib is loaded (Node.js only)
|
|
36
|
+
* Call this before using sync methods if you need to guarantee availability
|
|
37
|
+
*/
|
|
38
|
+
export async function ensureZlib() {
|
|
39
|
+
if (_zlibLoading) {
|
|
40
|
+
return _zlibLoading;
|
|
41
|
+
}
|
|
42
|
+
return _zlib;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Check if native zlib is available (Node.js)
|
|
46
|
+
*/
|
|
47
|
+
export function hasNativeZlib() {
|
|
48
|
+
const zlib = getZlib();
|
|
49
|
+
return zlib !== null && typeof zlib.deflateRawSync === "function";
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Check if CompressionStream is available (Browser/Node.js 17+)
|
|
53
|
+
*/
|
|
54
|
+
export function hasCompressionStream() {
|
|
55
|
+
return typeof CompressionStream !== "undefined";
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Compress data using the best available native method
|
|
59
|
+
*
|
|
60
|
+
* Priority:
|
|
61
|
+
* 1. Node.js zlib (if available) - fastest, supports compression levels
|
|
62
|
+
* 2. CompressionStream (browser/Node.js 17+) - no level support
|
|
63
|
+
* 3. Return uncompressed data (fallback)
|
|
64
|
+
*
|
|
65
|
+
* @param data - Data to compress
|
|
66
|
+
* @param options - Compression options
|
|
67
|
+
* @returns Compressed data
|
|
68
|
+
*
|
|
69
|
+
* @example
|
|
70
|
+
* ```ts
|
|
71
|
+
* const data = new TextEncoder().encode("Hello, World!");
|
|
72
|
+
* const compressed = await compress(data, { level: 6 });
|
|
73
|
+
* ```
|
|
74
|
+
*/
|
|
75
|
+
export async function compress(data, options = {}) {
|
|
76
|
+
const level = options.level ?? 6;
|
|
77
|
+
// Level 0 means no compression
|
|
78
|
+
if (level === 0) {
|
|
79
|
+
return data;
|
|
80
|
+
}
|
|
81
|
+
// Ensure zlib is loaded first
|
|
82
|
+
const zlib = await ensureZlib();
|
|
83
|
+
// Try Node.js zlib first (fastest, supports levels)
|
|
84
|
+
if (zlib && typeof zlib.deflateRawSync === "function") {
|
|
85
|
+
const result = zlib.deflateRawSync(Buffer.from(data), { level });
|
|
86
|
+
return new Uint8Array(result.buffer, result.byteOffset, result.byteLength);
|
|
87
|
+
}
|
|
88
|
+
// Fall back to CompressionStream (browser/Node.js 17+)
|
|
89
|
+
if (typeof CompressionStream !== "undefined") {
|
|
90
|
+
return compressWithCompressionStream(data);
|
|
91
|
+
}
|
|
92
|
+
// No compression available - return original data
|
|
93
|
+
console.warn("No native compression available, returning uncompressed data");
|
|
94
|
+
return data;
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Compress data synchronously using Node.js zlib
|
|
98
|
+
* Only available in Node.js environment
|
|
99
|
+
*
|
|
100
|
+
* @param data - Data to compress
|
|
101
|
+
* @param options - Compression options
|
|
102
|
+
* @returns Compressed data
|
|
103
|
+
* @throws Error if not in Node.js environment
|
|
104
|
+
*/
|
|
105
|
+
export function compressSync(data, options = {}) {
|
|
106
|
+
const level = options.level ?? 6;
|
|
107
|
+
if (level === 0) {
|
|
108
|
+
return data;
|
|
109
|
+
}
|
|
110
|
+
const zlib = getZlib();
|
|
111
|
+
if (!zlib || typeof zlib.deflateRawSync !== "function") {
|
|
112
|
+
throw new Error("Synchronous compression is only available in Node.js environment");
|
|
113
|
+
}
|
|
114
|
+
const result = zlib.deflateRawSync(Buffer.from(data), { level });
|
|
115
|
+
return new Uint8Array(result.buffer, result.byteOffset, result.byteLength);
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Compress using browser's native CompressionStream
|
|
119
|
+
* Uses "deflate-raw" format (required for ZIP files)
|
|
120
|
+
*
|
|
121
|
+
* Note: CompressionStream does not support compression level configuration
|
|
122
|
+
*
|
|
123
|
+
* @param data - Data to compress
|
|
124
|
+
* @returns Compressed data
|
|
125
|
+
*/
|
|
126
|
+
async function compressWithCompressionStream(data) {
|
|
127
|
+
const cs = new CompressionStream("deflate-raw");
|
|
128
|
+
const writer = cs.writable.getWriter();
|
|
129
|
+
const reader = cs.readable.getReader();
|
|
130
|
+
// Write data and close
|
|
131
|
+
writer.write(new Uint8Array(data.buffer, data.byteOffset, data.byteLength));
|
|
132
|
+
writer.close();
|
|
133
|
+
// Read all compressed chunks
|
|
134
|
+
const chunks = [];
|
|
135
|
+
let totalLength = 0;
|
|
136
|
+
while (true) {
|
|
137
|
+
const { done, value } = await reader.read();
|
|
138
|
+
if (done) {
|
|
139
|
+
break;
|
|
140
|
+
}
|
|
141
|
+
chunks.push(value);
|
|
142
|
+
totalLength += value.length;
|
|
143
|
+
}
|
|
144
|
+
// Combine chunks into single array
|
|
145
|
+
const result = new Uint8Array(totalLength);
|
|
146
|
+
let offset = 0;
|
|
147
|
+
for (const chunk of chunks) {
|
|
148
|
+
result.set(chunk, offset);
|
|
149
|
+
offset += chunk.length;
|
|
150
|
+
}
|
|
151
|
+
return result;
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Decompress data using the best available native method
|
|
155
|
+
*
|
|
156
|
+
* @param data - Compressed data (deflate-raw format)
|
|
157
|
+
* @returns Decompressed data
|
|
158
|
+
*/
|
|
159
|
+
export async function decompress(data) {
|
|
160
|
+
// Ensure zlib is loaded first
|
|
161
|
+
const zlib = await ensureZlib();
|
|
162
|
+
// Try Node.js zlib first
|
|
163
|
+
if (zlib && typeof zlib.inflateRawSync === "function") {
|
|
164
|
+
const result = zlib.inflateRawSync(Buffer.from(data));
|
|
165
|
+
return new Uint8Array(result.buffer, result.byteOffset, result.byteLength);
|
|
166
|
+
}
|
|
167
|
+
// Fall back to DecompressionStream
|
|
168
|
+
if (typeof DecompressionStream !== "undefined") {
|
|
169
|
+
return decompressWithDecompressionStream(data);
|
|
170
|
+
}
|
|
171
|
+
throw new Error("No native decompression available");
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Decompress data synchronously using Node.js zlib
|
|
175
|
+
*
|
|
176
|
+
* @param data - Compressed data (deflate-raw format)
|
|
177
|
+
* @returns Decompressed data
|
|
178
|
+
* @throws Error if not in Node.js environment
|
|
179
|
+
*/
|
|
180
|
+
export function decompressSync(data) {
|
|
181
|
+
const zlib = getZlib();
|
|
182
|
+
if (!zlib || typeof zlib.inflateRawSync !== "function") {
|
|
183
|
+
throw new Error("Synchronous decompression is only available in Node.js environment");
|
|
184
|
+
}
|
|
185
|
+
const result = zlib.inflateRawSync(Buffer.from(data));
|
|
186
|
+
return new Uint8Array(result.buffer, result.byteOffset, result.byteLength);
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Decompress using browser's native DecompressionStream
|
|
190
|
+
*
|
|
191
|
+
* @param data - Compressed data (deflate-raw format)
|
|
192
|
+
* @returns Decompressed data
|
|
193
|
+
*/
|
|
194
|
+
async function decompressWithDecompressionStream(data) {
|
|
195
|
+
const ds = new DecompressionStream("deflate-raw");
|
|
196
|
+
const writer = ds.writable.getWriter();
|
|
197
|
+
const reader = ds.readable.getReader();
|
|
198
|
+
// Write data and close
|
|
199
|
+
writer.write(new Uint8Array(data.buffer, data.byteOffset, data.byteLength));
|
|
200
|
+
writer.close();
|
|
201
|
+
// Read all decompressed chunks
|
|
202
|
+
const chunks = [];
|
|
203
|
+
let totalLength = 0;
|
|
204
|
+
while (true) {
|
|
205
|
+
const { done, value } = await reader.read();
|
|
206
|
+
if (done) {
|
|
207
|
+
break;
|
|
208
|
+
}
|
|
209
|
+
chunks.push(value);
|
|
210
|
+
totalLength += value.length;
|
|
211
|
+
}
|
|
212
|
+
// Combine chunks into single array
|
|
213
|
+
const result = new Uint8Array(totalLength);
|
|
214
|
+
let offset = 0;
|
|
215
|
+
for (const chunk of chunks) {
|
|
216
|
+
result.set(chunk, offset);
|
|
217
|
+
offset += chunk.length;
|
|
218
|
+
}
|
|
219
|
+
return result;
|
|
220
|
+
}
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CRC32 calculation utility for ZIP files
|
|
3
|
+
*
|
|
4
|
+
* - Node.js: Uses native zlib.crc32 (C++ implementation, ~100x faster)
|
|
5
|
+
* - Browser: Uses lookup table optimization
|
|
6
|
+
*
|
|
7
|
+
* The polynomial used is the standard CRC-32 IEEE 802.3:
|
|
8
|
+
* x^32 + x^26 + x^23 + x^22 + x^16 + x^12 + x^11 + x^10 + x^8 + x^7 + x^5 + x^4 + x^2 + x + 1
|
|
9
|
+
* Represented as 0xEDB88320 in reversed (LSB-first) form
|
|
10
|
+
*/
|
|
11
|
+
// Detect Node.js environment
|
|
12
|
+
const isNode = typeof process !== "undefined" && process.versions?.node;
|
|
13
|
+
// Lazy-loaded zlib module for Node.js
|
|
14
|
+
let _zlib = null;
|
|
15
|
+
let _zlibLoading = null;
|
|
16
|
+
// Auto-initialize zlib in Node.js environment
|
|
17
|
+
if (isNode) {
|
|
18
|
+
_zlibLoading = import("zlib")
|
|
19
|
+
.then(module => {
|
|
20
|
+
_zlib = module.default ?? module;
|
|
21
|
+
return _zlib;
|
|
22
|
+
})
|
|
23
|
+
.catch(() => {
|
|
24
|
+
_zlib = null;
|
|
25
|
+
return null;
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Pre-computed CRC32 lookup table (256 entries)
|
|
30
|
+
* Generated using the standard polynomial 0xEDB88320
|
|
31
|
+
* Used as fallback when native zlib is not available
|
|
32
|
+
*/
|
|
33
|
+
const CRC32_TABLE = /* @__PURE__ */ (() => {
|
|
34
|
+
const table = new Uint32Array(256);
|
|
35
|
+
for (let i = 0; i < 256; i++) {
|
|
36
|
+
let crc = i;
|
|
37
|
+
for (let j = 0; j < 8; j++) {
|
|
38
|
+
crc = crc & 1 ? 0xedb88320 ^ (crc >>> 1) : crc >>> 1;
|
|
39
|
+
}
|
|
40
|
+
table[i] = crc;
|
|
41
|
+
}
|
|
42
|
+
return table;
|
|
43
|
+
})();
|
|
44
|
+
/**
|
|
45
|
+
* JavaScript fallback CRC32 implementation using lookup table
|
|
46
|
+
*/
|
|
47
|
+
function crc32JS(data) {
|
|
48
|
+
let crc = 0xffffffff;
|
|
49
|
+
for (let i = 0; i < data.length; i++) {
|
|
50
|
+
crc = CRC32_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8);
|
|
51
|
+
}
|
|
52
|
+
return (crc ^ 0xffffffff) >>> 0;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Calculate CRC32 checksum for the given data
|
|
56
|
+
* Uses native zlib.crc32 in Node.js for ~100x better performance
|
|
57
|
+
*
|
|
58
|
+
* @param data - Input data as Uint8Array or Buffer
|
|
59
|
+
* @returns CRC32 checksum as unsigned 32-bit integer
|
|
60
|
+
*
|
|
61
|
+
* @example
|
|
62
|
+
* ```ts
|
|
63
|
+
* const data = new TextEncoder().encode("Hello, World!");
|
|
64
|
+
* const checksum = crc32(data);
|
|
65
|
+
* console.log(checksum.toString(16)); // "ec4ac3d0"
|
|
66
|
+
* ```
|
|
67
|
+
*/
|
|
68
|
+
export function crc32(data) {
|
|
69
|
+
// Use native zlib.crc32 if available (Node.js)
|
|
70
|
+
if (_zlib && typeof _zlib.crc32 === "function") {
|
|
71
|
+
return _zlib.crc32(data) >>> 0;
|
|
72
|
+
}
|
|
73
|
+
// Fallback to JS implementation
|
|
74
|
+
return crc32JS(data);
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Ensure zlib is loaded (for use before calling crc32)
|
|
78
|
+
*/
|
|
79
|
+
export async function ensureCrc32() {
|
|
80
|
+
if (_zlibLoading) {
|
|
81
|
+
await _zlibLoading;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Calculate CRC32 incrementally (useful for streaming)
|
|
86
|
+
* Call with initial crc of 0xffffffff, then finalize with crc32Finalize
|
|
87
|
+
* Note: This always uses JS implementation for consistency in streaming
|
|
88
|
+
*
|
|
89
|
+
* @param crc - Current CRC value (start with 0xffffffff)
|
|
90
|
+
* @param data - Input data chunk
|
|
91
|
+
* @returns Updated CRC value (not finalized)
|
|
92
|
+
*
|
|
93
|
+
* @example
|
|
94
|
+
* ```ts
|
|
95
|
+
* let crc = 0xffffffff;
|
|
96
|
+
* crc = crc32Update(crc, chunk1);
|
|
97
|
+
* crc = crc32Update(crc, chunk2);
|
|
98
|
+
* const checksum = crc32Finalize(crc);
|
|
99
|
+
* ```
|
|
100
|
+
*/
|
|
101
|
+
export function crc32Update(crc, data) {
|
|
102
|
+
for (let i = 0; i < data.length; i++) {
|
|
103
|
+
crc = CRC32_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8);
|
|
104
|
+
}
|
|
105
|
+
return crc;
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Finalize CRC32 calculation
|
|
109
|
+
* XOR with 0xffffffff and convert to unsigned 32-bit
|
|
110
|
+
*
|
|
111
|
+
* @param crc - CRC value from crc32Update
|
|
112
|
+
* @returns Final CRC32 checksum
|
|
113
|
+
*/
|
|
114
|
+
export function crc32Finalize(crc) {
|
|
115
|
+
return (crc ^ 0xffffffff) >>> 0;
|
|
116
|
+
}
|