@cj-tech-master/excelts 1.4.3 → 1.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/README_zh.md +3 -3
- package/dist/browser/excelts.iife.js +8037 -2686
- package/dist/browser/excelts.iife.js.map +1 -1
- package/dist/browser/excelts.iife.min.js +86 -23
- package/dist/cjs/stream/xlsx/workbook-writer.js +3 -2
- package/dist/cjs/utils/unzip/extract.js +166 -0
- package/dist/cjs/utils/unzip/index.js +7 -1
- package/dist/cjs/utils/xml-stream.js +25 -3
- package/dist/cjs/utils/zip/compress.js +261 -0
- package/dist/cjs/utils/zip/crc32.js +154 -0
- package/dist/cjs/utils/zip/index.js +70 -0
- package/dist/cjs/utils/zip/zip-builder.js +378 -0
- package/dist/cjs/utils/zip-stream.js +30 -34
- package/dist/cjs/xlsx/xform/book/defined-name-xform.js +36 -2
- package/dist/cjs/xlsx/xform/list-xform.js +6 -0
- package/dist/cjs/xlsx/xform/sheet/cell-xform.js +6 -1
- package/dist/cjs/xlsx/xform/sheet/row-xform.js +24 -2
- package/dist/cjs/xlsx/xform/table/filter-column-xform.js +4 -0
- package/dist/esm/stream/xlsx/workbook-writer.js +3 -2
- package/dist/esm/utils/unzip/extract.js +160 -0
- package/dist/esm/utils/unzip/index.js +2 -0
- package/dist/esm/utils/xml-stream.js +25 -3
- package/dist/esm/utils/zip/compress.js +220 -0
- package/dist/esm/utils/zip/crc32.js +116 -0
- package/dist/esm/utils/zip/index.js +55 -0
- package/dist/esm/utils/zip/zip-builder.js +372 -0
- package/dist/esm/utils/zip-stream.js +30 -34
- package/dist/esm/xlsx/xform/book/defined-name-xform.js +36 -2
- package/dist/esm/xlsx/xform/list-xform.js +6 -0
- package/dist/esm/xlsx/xform/sheet/cell-xform.js +6 -1
- package/dist/esm/xlsx/xform/sheet/row-xform.js +24 -2
- package/dist/esm/xlsx/xform/table/filter-column-xform.js +4 -0
- package/dist/types/utils/unzip/extract.d.ts +92 -0
- package/dist/types/utils/unzip/index.d.ts +1 -0
- package/dist/types/utils/xml-stream.d.ts +2 -0
- package/dist/types/utils/zip/compress.d.ts +83 -0
- package/dist/types/utils/zip/crc32.d.ts +55 -0
- package/dist/types/utils/zip/index.d.ts +52 -0
- package/dist/types/utils/zip/zip-builder.d.ts +110 -0
- package/dist/types/utils/zip-stream.d.ts +6 -12
- package/dist/types/xlsx/xform/list-xform.d.ts +1 -0
- package/dist/types/xlsx/xform/sheet/row-xform.d.ts +2 -0
- package/package.json +1 -1
|
@@ -37,8 +37,9 @@ class WorkbookWriter {
|
|
|
37
37
|
this.views = [];
|
|
38
38
|
this.zipOptions = options.zip;
|
|
39
39
|
// Extract compression level from zip options (supports both zlib.level and compressionOptions.level)
|
|
40
|
-
// Default compression level is
|
|
41
|
-
|
|
40
|
+
// Default compression level is 1 (fast compression with good ratio)
|
|
41
|
+
// Level 1 is ~2x faster than level 6 with only ~7% larger files
|
|
42
|
+
const level = options.zip?.zlib?.level ?? options.zip?.compressionOptions?.level ?? 1;
|
|
42
43
|
this.compressionLevel = Math.max(0, Math.min(9, level));
|
|
43
44
|
this.media = [];
|
|
44
45
|
this.commentRefs = [];
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Simple ZIP extraction utilities
|
|
4
|
+
* Provides easy-to-use Promise-based API for extracting ZIP files
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.extractAll = extractAll;
|
|
8
|
+
exports.extractFile = extractFile;
|
|
9
|
+
exports.listFiles = listFiles;
|
|
10
|
+
exports.forEachEntry = forEachEntry;
|
|
11
|
+
const stream_1 = require("stream");
|
|
12
|
+
const parse_js_1 = require("./parse");
|
|
13
|
+
/**
|
|
14
|
+
* Extract all files from a ZIP buffer
|
|
15
|
+
*
|
|
16
|
+
* @param zipData - ZIP file data as Buffer or Uint8Array
|
|
17
|
+
* @returns Map of file paths to their content
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```ts
|
|
21
|
+
* import { extractAll } from "./utils/unzip/extract.js";
|
|
22
|
+
*
|
|
23
|
+
* const zipData = fs.readFileSync("archive.zip");
|
|
24
|
+
* const files = await extractAll(zipData);
|
|
25
|
+
*
|
|
26
|
+
* for (const [path, file] of files) {
|
|
27
|
+
* console.log(`${path}: ${file.data.length} bytes`);
|
|
28
|
+
* }
|
|
29
|
+
* ```
|
|
30
|
+
*/
|
|
31
|
+
async function extractAll(zipData) {
|
|
32
|
+
const files = new Map();
|
|
33
|
+
const buffer = Buffer.isBuffer(zipData) ? zipData : Buffer.from(zipData);
|
|
34
|
+
const parse = (0, parse_js_1.createParse)({ forceStream: true });
|
|
35
|
+
const stream = stream_1.Readable.from([buffer]);
|
|
36
|
+
stream.pipe(parse);
|
|
37
|
+
for await (const entry of parse) {
|
|
38
|
+
const zipEntry = entry;
|
|
39
|
+
const isDirectory = zipEntry.type === "Directory";
|
|
40
|
+
if (isDirectory) {
|
|
41
|
+
files.set(zipEntry.path, {
|
|
42
|
+
path: zipEntry.path,
|
|
43
|
+
data: Buffer.alloc(0),
|
|
44
|
+
isDirectory: true,
|
|
45
|
+
size: 0
|
|
46
|
+
});
|
|
47
|
+
zipEntry.autodrain();
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
const data = await zipEntry.buffer();
|
|
51
|
+
files.set(zipEntry.path, {
|
|
52
|
+
path: zipEntry.path,
|
|
53
|
+
data,
|
|
54
|
+
isDirectory: false,
|
|
55
|
+
size: data.length
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return files;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Extract a single file from a ZIP buffer
|
|
63
|
+
*
|
|
64
|
+
* @param zipData - ZIP file data as Buffer or Uint8Array
|
|
65
|
+
* @param filePath - Path of the file to extract
|
|
66
|
+
* @returns File content as Buffer, or null if not found
|
|
67
|
+
*
|
|
68
|
+
* @example
|
|
69
|
+
* ```ts
|
|
70
|
+
* import { extractFile } from "./utils/unzip/extract.js";
|
|
71
|
+
*
|
|
72
|
+
* const zipData = fs.readFileSync("archive.zip");
|
|
73
|
+
* const content = await extractFile(zipData, "readme.txt");
|
|
74
|
+
* if (content) {
|
|
75
|
+
* console.log(content.toString("utf-8"));
|
|
76
|
+
* }
|
|
77
|
+
* ```
|
|
78
|
+
*/
|
|
79
|
+
async function extractFile(zipData, filePath) {
|
|
80
|
+
const buffer = Buffer.isBuffer(zipData) ? zipData : Buffer.from(zipData);
|
|
81
|
+
const parse = (0, parse_js_1.createParse)({ forceStream: true });
|
|
82
|
+
const stream = stream_1.Readable.from([buffer]);
|
|
83
|
+
stream.pipe(parse);
|
|
84
|
+
for await (const entry of parse) {
|
|
85
|
+
const zipEntry = entry;
|
|
86
|
+
if (zipEntry.path === filePath) {
|
|
87
|
+
if (zipEntry.type === "Directory") {
|
|
88
|
+
return Buffer.alloc(0);
|
|
89
|
+
}
|
|
90
|
+
return zipEntry.buffer();
|
|
91
|
+
}
|
|
92
|
+
zipEntry.autodrain();
|
|
93
|
+
}
|
|
94
|
+
return null;
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* List all file paths in a ZIP buffer (without extracting content)
|
|
98
|
+
*
|
|
99
|
+
* @param zipData - ZIP file data as Buffer or Uint8Array
|
|
100
|
+
* @returns Array of file paths
|
|
101
|
+
*
|
|
102
|
+
* @example
|
|
103
|
+
* ```ts
|
|
104
|
+
* import { listFiles } from "./utils/unzip/extract.js";
|
|
105
|
+
*
|
|
106
|
+
* const zipData = fs.readFileSync("archive.zip");
|
|
107
|
+
* const paths = await listFiles(zipData);
|
|
108
|
+
* console.log(paths); // ["file1.txt", "folder/file2.txt", ...]
|
|
109
|
+
* ```
|
|
110
|
+
*/
|
|
111
|
+
async function listFiles(zipData) {
|
|
112
|
+
const paths = [];
|
|
113
|
+
const buffer = Buffer.isBuffer(zipData) ? zipData : Buffer.from(zipData);
|
|
114
|
+
const parse = (0, parse_js_1.createParse)({ forceStream: true });
|
|
115
|
+
const stream = stream_1.Readable.from([buffer]);
|
|
116
|
+
stream.pipe(parse);
|
|
117
|
+
for await (const entry of parse) {
|
|
118
|
+
const zipEntry = entry;
|
|
119
|
+
paths.push(zipEntry.path);
|
|
120
|
+
zipEntry.autodrain();
|
|
121
|
+
}
|
|
122
|
+
return paths;
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Iterate over ZIP entries with a callback (memory efficient for large ZIPs)
|
|
126
|
+
*
|
|
127
|
+
* @param zipData - ZIP file data as Buffer or Uint8Array
|
|
128
|
+
* @param callback - Async callback for each entry, return false to stop iteration
|
|
129
|
+
*
|
|
130
|
+
* @example
|
|
131
|
+
* ```ts
|
|
132
|
+
* import { forEachEntry } from "./utils/unzip/extract.js";
|
|
133
|
+
*
|
|
134
|
+
* await forEachEntry(zipData, async (path, getData) => {
|
|
135
|
+
* if (path.endsWith(".xml")) {
|
|
136
|
+
* const content = await getData();
|
|
137
|
+
* console.log(content.toString("utf-8"));
|
|
138
|
+
* }
|
|
139
|
+
* return true; // continue iteration
|
|
140
|
+
* });
|
|
141
|
+
* ```
|
|
142
|
+
*/
|
|
143
|
+
async function forEachEntry(zipData, callback) {
|
|
144
|
+
const buffer = Buffer.isBuffer(zipData) ? zipData : Buffer.from(zipData);
|
|
145
|
+
const parse = (0, parse_js_1.createParse)({ forceStream: true });
|
|
146
|
+
const stream = stream_1.Readable.from([buffer]);
|
|
147
|
+
stream.pipe(parse);
|
|
148
|
+
for await (const entry of parse) {
|
|
149
|
+
const zipEntry = entry;
|
|
150
|
+
let dataPromise = null;
|
|
151
|
+
const getData = () => {
|
|
152
|
+
if (!dataPromise) {
|
|
153
|
+
dataPromise = zipEntry.buffer();
|
|
154
|
+
}
|
|
155
|
+
return dataPromise;
|
|
156
|
+
};
|
|
157
|
+
const shouldContinue = await callback(zipEntry.path, getData, zipEntry);
|
|
158
|
+
// If callback didn't read data, drain it
|
|
159
|
+
if (!dataPromise) {
|
|
160
|
+
zipEntry.autodrain();
|
|
161
|
+
}
|
|
162
|
+
if (shouldContinue === false) {
|
|
163
|
+
break;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
* License: MIT
|
|
6
6
|
*/
|
|
7
7
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
8
|
-
exports.parseExtraField = exports.parseDateTime = exports.parseBuffer = exports.bufferStream = exports.NoopStream = exports.PullStream = exports.createParse = exports.Parse = void 0;
|
|
8
|
+
exports.forEachEntry = exports.listFiles = exports.extractFile = exports.extractAll = exports.parseExtraField = exports.parseDateTime = exports.parseBuffer = exports.bufferStream = exports.NoopStream = exports.PullStream = exports.createParse = exports.Parse = void 0;
|
|
9
9
|
var parse_js_1 = require("./parse");
|
|
10
10
|
Object.defineProperty(exports, "Parse", { enumerable: true, get: function () { return parse_js_1.Parse; } });
|
|
11
11
|
Object.defineProperty(exports, "createParse", { enumerable: true, get: function () { return parse_js_1.createParse; } });
|
|
@@ -21,3 +21,9 @@ var parse_datetime_js_1 = require("./parse-datetime");
|
|
|
21
21
|
Object.defineProperty(exports, "parseDateTime", { enumerable: true, get: function () { return parse_datetime_js_1.parseDateTime; } });
|
|
22
22
|
var parse_extra_field_js_1 = require("./parse-extra-field");
|
|
23
23
|
Object.defineProperty(exports, "parseExtraField", { enumerable: true, get: function () { return parse_extra_field_js_1.parseExtraField; } });
|
|
24
|
+
// Simple extraction API
|
|
25
|
+
var extract_js_1 = require("./extract");
|
|
26
|
+
Object.defineProperty(exports, "extractAll", { enumerable: true, get: function () { return extract_js_1.extractAll; } });
|
|
27
|
+
Object.defineProperty(exports, "extractFile", { enumerable: true, get: function () { return extract_js_1.extractFile; } });
|
|
28
|
+
Object.defineProperty(exports, "listFiles", { enumerable: true, get: function () { return extract_js_1.listFiles; } });
|
|
29
|
+
Object.defineProperty(exports, "forEachEntry", { enumerable: true, get: function () { return extract_js_1.forEachEntry; } });
|
|
@@ -7,6 +7,8 @@ const OPEN_ANGLE = "<";
|
|
|
7
7
|
const CLOSE_ANGLE = ">";
|
|
8
8
|
const OPEN_ANGLE_SLASH = "</";
|
|
9
9
|
const CLOSE_SLASH_ANGLE = "/>";
|
|
10
|
+
// Chunk size for periodic consolidation (reduces final join overhead)
|
|
11
|
+
const CHUNK_SIZE = 10000;
|
|
10
12
|
function pushAttribute(xml, name, value) {
|
|
11
13
|
xml.push(` ${name}="${(0, utils_js_1.xmlEncode)(value.toString())}"`);
|
|
12
14
|
}
|
|
@@ -24,15 +26,23 @@ function pushAttributes(xml, attributes) {
|
|
|
24
26
|
class XmlStream {
|
|
25
27
|
constructor() {
|
|
26
28
|
this._xml = [];
|
|
29
|
+
this._chunks = [];
|
|
27
30
|
this._stack = [];
|
|
28
31
|
this._rollbacks = [];
|
|
29
32
|
}
|
|
33
|
+
_consolidate() {
|
|
34
|
+
// Periodically join small strings into larger chunks to reduce final join overhead
|
|
35
|
+
if (this._xml.length >= CHUNK_SIZE) {
|
|
36
|
+
this._chunks.push(this._xml.join(""));
|
|
37
|
+
this._xml = [];
|
|
38
|
+
}
|
|
39
|
+
}
|
|
30
40
|
get tos() {
|
|
31
41
|
return this._stack.length ? this._stack[this._stack.length - 1] : undefined;
|
|
32
42
|
}
|
|
33
43
|
get cursor() {
|
|
34
44
|
// handy way to track whether anything has been added
|
|
35
|
-
return this._xml.length;
|
|
45
|
+
return this._chunks.length * CHUNK_SIZE + this._xml.length;
|
|
36
46
|
}
|
|
37
47
|
openXml(docAttributes) {
|
|
38
48
|
const xml = this._xml;
|
|
@@ -99,6 +109,7 @@ class XmlStream {
|
|
|
99
109
|
}
|
|
100
110
|
this.open = false;
|
|
101
111
|
this.leaf = false;
|
|
112
|
+
this._consolidate();
|
|
102
113
|
}
|
|
103
114
|
leafNode(name, attributes, text) {
|
|
104
115
|
this.openNode(name, attributes);
|
|
@@ -118,7 +129,8 @@ class XmlStream {
|
|
|
118
129
|
xml: this._xml.length,
|
|
119
130
|
stack: this._stack.length,
|
|
120
131
|
leaf: this.leaf,
|
|
121
|
-
open: this.open
|
|
132
|
+
open: this.open,
|
|
133
|
+
chunksLength: this._chunks.length
|
|
122
134
|
});
|
|
123
135
|
return this.cursor;
|
|
124
136
|
}
|
|
@@ -133,12 +145,22 @@ class XmlStream {
|
|
|
133
145
|
if (this._stack.length > r.stack) {
|
|
134
146
|
this._stack.splice(r.stack, this._stack.length - r.stack);
|
|
135
147
|
}
|
|
148
|
+
if (this._chunks.length > r.chunksLength) {
|
|
149
|
+
this._chunks.splice(r.chunksLength, this._chunks.length - r.chunksLength);
|
|
150
|
+
}
|
|
136
151
|
this.leaf = r.leaf;
|
|
137
152
|
this.open = r.open;
|
|
138
153
|
}
|
|
139
154
|
get xml() {
|
|
140
155
|
this.closeAll();
|
|
141
|
-
|
|
156
|
+
// Join chunks first, then remaining xml array
|
|
157
|
+
if (this._chunks.length === 0) {
|
|
158
|
+
return this._xml.join("");
|
|
159
|
+
}
|
|
160
|
+
if (this._xml.length > 0) {
|
|
161
|
+
this._chunks.push(this._xml.join(""));
|
|
162
|
+
}
|
|
163
|
+
return this._chunks.join("");
|
|
142
164
|
}
|
|
143
165
|
}
|
|
144
166
|
exports.XmlStream = XmlStream;
|
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Native compression utilities using platform APIs
|
|
4
|
+
*
|
|
5
|
+
* - Node.js: Uses native zlib module (C++ implementation, fastest)
|
|
6
|
+
* - Browser: Uses CompressionStream API (Chrome 80+, Firefox 113+, Safari 16.4+)
|
|
7
|
+
*
|
|
8
|
+
* Both use "deflate-raw" format which is required for ZIP files
|
|
9
|
+
* (raw DEFLATE without zlib header/trailer)
|
|
10
|
+
*/
|
|
11
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
12
|
+
if (k2 === undefined) k2 = k;
|
|
13
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
14
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
15
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
16
|
+
}
|
|
17
|
+
Object.defineProperty(o, k2, desc);
|
|
18
|
+
}) : (function(o, m, k, k2) {
|
|
19
|
+
if (k2 === undefined) k2 = k;
|
|
20
|
+
o[k2] = m[k];
|
|
21
|
+
}));
|
|
22
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
23
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
24
|
+
}) : function(o, v) {
|
|
25
|
+
o["default"] = v;
|
|
26
|
+
});
|
|
27
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
28
|
+
var ownKeys = function(o) {
|
|
29
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
30
|
+
var ar = [];
|
|
31
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
32
|
+
return ar;
|
|
33
|
+
};
|
|
34
|
+
return ownKeys(o);
|
|
35
|
+
};
|
|
36
|
+
return function (mod) {
|
|
37
|
+
if (mod && mod.__esModule) return mod;
|
|
38
|
+
var result = {};
|
|
39
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
40
|
+
__setModuleDefault(result, mod);
|
|
41
|
+
return result;
|
|
42
|
+
};
|
|
43
|
+
})();
|
|
44
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
45
|
+
exports.ensureZlib = ensureZlib;
|
|
46
|
+
exports.hasNativeZlib = hasNativeZlib;
|
|
47
|
+
exports.hasCompressionStream = hasCompressionStream;
|
|
48
|
+
exports.compress = compress;
|
|
49
|
+
exports.compressSync = compressSync;
|
|
50
|
+
exports.decompress = decompress;
|
|
51
|
+
exports.decompressSync = decompressSync;
|
|
52
|
+
// Detect environment
|
|
53
|
+
const isNode = typeof process !== "undefined" && process.versions?.node;
|
|
54
|
+
// Lazy-loaded zlib module for Node.js
|
|
55
|
+
let _zlib = null;
|
|
56
|
+
let _zlibLoading = null;
|
|
57
|
+
// Auto-initialize zlib in Node.js environment
|
|
58
|
+
if (isNode) {
|
|
59
|
+
_zlibLoading = Promise.resolve().then(() => __importStar(require("zlib"))).then(module => {
|
|
60
|
+
_zlib = module.default ?? module;
|
|
61
|
+
return _zlib;
|
|
62
|
+
})
|
|
63
|
+
.catch(() => {
|
|
64
|
+
_zlib = null;
|
|
65
|
+
return null;
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Get zlib module (Node.js only)
|
|
70
|
+
* Returns null if not yet loaded or not in Node.js
|
|
71
|
+
*/
|
|
72
|
+
function getZlib() {
|
|
73
|
+
return _zlib;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Ensure zlib is loaded (Node.js only)
|
|
77
|
+
* Call this before using sync methods if you need to guarantee availability
|
|
78
|
+
*/
|
|
79
|
+
async function ensureZlib() {
|
|
80
|
+
if (_zlibLoading) {
|
|
81
|
+
return _zlibLoading;
|
|
82
|
+
}
|
|
83
|
+
return _zlib;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Check if native zlib is available (Node.js)
|
|
87
|
+
*/
|
|
88
|
+
function hasNativeZlib() {
|
|
89
|
+
const zlib = getZlib();
|
|
90
|
+
return zlib !== null && typeof zlib.deflateRawSync === "function";
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Check if CompressionStream is available (Browser/Node.js 17+)
|
|
94
|
+
*/
|
|
95
|
+
function hasCompressionStream() {
|
|
96
|
+
return typeof CompressionStream !== "undefined";
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Compress data using the best available native method
|
|
100
|
+
*
|
|
101
|
+
* Priority:
|
|
102
|
+
* 1. Node.js zlib (if available) - fastest, supports compression levels
|
|
103
|
+
* 2. CompressionStream (browser/Node.js 17+) - no level support
|
|
104
|
+
* 3. Return uncompressed data (fallback)
|
|
105
|
+
*
|
|
106
|
+
* @param data - Data to compress
|
|
107
|
+
* @param options - Compression options
|
|
108
|
+
* @returns Compressed data
|
|
109
|
+
*
|
|
110
|
+
* @example
|
|
111
|
+
* ```ts
|
|
112
|
+
* const data = new TextEncoder().encode("Hello, World!");
|
|
113
|
+
* const compressed = await compress(data, { level: 6 });
|
|
114
|
+
* ```
|
|
115
|
+
*/
|
|
116
|
+
async function compress(data, options = {}) {
|
|
117
|
+
const level = options.level ?? 6;
|
|
118
|
+
// Level 0 means no compression
|
|
119
|
+
if (level === 0) {
|
|
120
|
+
return data;
|
|
121
|
+
}
|
|
122
|
+
// Ensure zlib is loaded first
|
|
123
|
+
const zlib = await ensureZlib();
|
|
124
|
+
// Try Node.js zlib first (fastest, supports levels)
|
|
125
|
+
if (zlib && typeof zlib.deflateRawSync === "function") {
|
|
126
|
+
const result = zlib.deflateRawSync(Buffer.from(data), { level });
|
|
127
|
+
return new Uint8Array(result.buffer, result.byteOffset, result.byteLength);
|
|
128
|
+
}
|
|
129
|
+
// Fall back to CompressionStream (browser/Node.js 17+)
|
|
130
|
+
if (typeof CompressionStream !== "undefined") {
|
|
131
|
+
return compressWithCompressionStream(data);
|
|
132
|
+
}
|
|
133
|
+
// No compression available - return original data
|
|
134
|
+
console.warn("No native compression available, returning uncompressed data");
|
|
135
|
+
return data;
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Compress data synchronously using Node.js zlib
|
|
139
|
+
* Only available in Node.js environment
|
|
140
|
+
*
|
|
141
|
+
* @param data - Data to compress
|
|
142
|
+
* @param options - Compression options
|
|
143
|
+
* @returns Compressed data
|
|
144
|
+
* @throws Error if not in Node.js environment
|
|
145
|
+
*/
|
|
146
|
+
function compressSync(data, options = {}) {
|
|
147
|
+
const level = options.level ?? 6;
|
|
148
|
+
if (level === 0) {
|
|
149
|
+
return data;
|
|
150
|
+
}
|
|
151
|
+
const zlib = getZlib();
|
|
152
|
+
if (!zlib || typeof zlib.deflateRawSync !== "function") {
|
|
153
|
+
throw new Error("Synchronous compression is only available in Node.js environment");
|
|
154
|
+
}
|
|
155
|
+
const result = zlib.deflateRawSync(Buffer.from(data), { level });
|
|
156
|
+
return new Uint8Array(result.buffer, result.byteOffset, result.byteLength);
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Compress using browser's native CompressionStream
|
|
160
|
+
* Uses "deflate-raw" format (required for ZIP files)
|
|
161
|
+
*
|
|
162
|
+
* Note: CompressionStream does not support compression level configuration
|
|
163
|
+
*
|
|
164
|
+
* @param data - Data to compress
|
|
165
|
+
* @returns Compressed data
|
|
166
|
+
*/
|
|
167
|
+
async function compressWithCompressionStream(data) {
|
|
168
|
+
const cs = new CompressionStream("deflate-raw");
|
|
169
|
+
const writer = cs.writable.getWriter();
|
|
170
|
+
const reader = cs.readable.getReader();
|
|
171
|
+
// Write data and close
|
|
172
|
+
writer.write(new Uint8Array(data.buffer, data.byteOffset, data.byteLength));
|
|
173
|
+
writer.close();
|
|
174
|
+
// Read all compressed chunks
|
|
175
|
+
const chunks = [];
|
|
176
|
+
let totalLength = 0;
|
|
177
|
+
while (true) {
|
|
178
|
+
const { done, value } = await reader.read();
|
|
179
|
+
if (done) {
|
|
180
|
+
break;
|
|
181
|
+
}
|
|
182
|
+
chunks.push(value);
|
|
183
|
+
totalLength += value.length;
|
|
184
|
+
}
|
|
185
|
+
// Combine chunks into single array
|
|
186
|
+
const result = new Uint8Array(totalLength);
|
|
187
|
+
let offset = 0;
|
|
188
|
+
for (const chunk of chunks) {
|
|
189
|
+
result.set(chunk, offset);
|
|
190
|
+
offset += chunk.length;
|
|
191
|
+
}
|
|
192
|
+
return result;
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Decompress data using the best available native method
|
|
196
|
+
*
|
|
197
|
+
* @param data - Compressed data (deflate-raw format)
|
|
198
|
+
* @returns Decompressed data
|
|
199
|
+
*/
|
|
200
|
+
async function decompress(data) {
|
|
201
|
+
// Ensure zlib is loaded first
|
|
202
|
+
const zlib = await ensureZlib();
|
|
203
|
+
// Try Node.js zlib first
|
|
204
|
+
if (zlib && typeof zlib.inflateRawSync === "function") {
|
|
205
|
+
const result = zlib.inflateRawSync(Buffer.from(data));
|
|
206
|
+
return new Uint8Array(result.buffer, result.byteOffset, result.byteLength);
|
|
207
|
+
}
|
|
208
|
+
// Fall back to DecompressionStream
|
|
209
|
+
if (typeof DecompressionStream !== "undefined") {
|
|
210
|
+
return decompressWithDecompressionStream(data);
|
|
211
|
+
}
|
|
212
|
+
throw new Error("No native decompression available");
|
|
213
|
+
}
|
|
214
|
+
/**
|
|
215
|
+
* Decompress data synchronously using Node.js zlib
|
|
216
|
+
*
|
|
217
|
+
* @param data - Compressed data (deflate-raw format)
|
|
218
|
+
* @returns Decompressed data
|
|
219
|
+
* @throws Error if not in Node.js environment
|
|
220
|
+
*/
|
|
221
|
+
function decompressSync(data) {
|
|
222
|
+
const zlib = getZlib();
|
|
223
|
+
if (!zlib || typeof zlib.inflateRawSync !== "function") {
|
|
224
|
+
throw new Error("Synchronous decompression is only available in Node.js environment");
|
|
225
|
+
}
|
|
226
|
+
const result = zlib.inflateRawSync(Buffer.from(data));
|
|
227
|
+
return new Uint8Array(result.buffer, result.byteOffset, result.byteLength);
|
|
228
|
+
}
|
|
229
|
+
/**
|
|
230
|
+
* Decompress using browser's native DecompressionStream
|
|
231
|
+
*
|
|
232
|
+
* @param data - Compressed data (deflate-raw format)
|
|
233
|
+
* @returns Decompressed data
|
|
234
|
+
*/
|
|
235
|
+
async function decompressWithDecompressionStream(data) {
|
|
236
|
+
const ds = new DecompressionStream("deflate-raw");
|
|
237
|
+
const writer = ds.writable.getWriter();
|
|
238
|
+
const reader = ds.readable.getReader();
|
|
239
|
+
// Write data and close
|
|
240
|
+
writer.write(new Uint8Array(data.buffer, data.byteOffset, data.byteLength));
|
|
241
|
+
writer.close();
|
|
242
|
+
// Read all decompressed chunks
|
|
243
|
+
const chunks = [];
|
|
244
|
+
let totalLength = 0;
|
|
245
|
+
while (true) {
|
|
246
|
+
const { done, value } = await reader.read();
|
|
247
|
+
if (done) {
|
|
248
|
+
break;
|
|
249
|
+
}
|
|
250
|
+
chunks.push(value);
|
|
251
|
+
totalLength += value.length;
|
|
252
|
+
}
|
|
253
|
+
// Combine chunks into single array
|
|
254
|
+
const result = new Uint8Array(totalLength);
|
|
255
|
+
let offset = 0;
|
|
256
|
+
for (const chunk of chunks) {
|
|
257
|
+
result.set(chunk, offset);
|
|
258
|
+
offset += chunk.length;
|
|
259
|
+
}
|
|
260
|
+
return result;
|
|
261
|
+
}
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* CRC32 calculation utility for ZIP files
|
|
4
|
+
*
|
|
5
|
+
* - Node.js: Uses native zlib.crc32 (C++ implementation, ~100x faster)
|
|
6
|
+
* - Browser: Uses lookup table optimization
|
|
7
|
+
*
|
|
8
|
+
* The polynomial used is the standard CRC-32 IEEE 802.3:
|
|
9
|
+
* x^32 + x^26 + x^23 + x^22 + x^16 + x^12 + x^11 + x^10 + x^8 + x^7 + x^5 + x^4 + x^2 + x + 1
|
|
10
|
+
* Represented as 0xEDB88320 in reversed (LSB-first) form
|
|
11
|
+
*/
|
|
12
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
13
|
+
if (k2 === undefined) k2 = k;
|
|
14
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
15
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
16
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
17
|
+
}
|
|
18
|
+
Object.defineProperty(o, k2, desc);
|
|
19
|
+
}) : (function(o, m, k, k2) {
|
|
20
|
+
if (k2 === undefined) k2 = k;
|
|
21
|
+
o[k2] = m[k];
|
|
22
|
+
}));
|
|
23
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
24
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
25
|
+
}) : function(o, v) {
|
|
26
|
+
o["default"] = v;
|
|
27
|
+
});
|
|
28
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
29
|
+
var ownKeys = function(o) {
|
|
30
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
31
|
+
var ar = [];
|
|
32
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
33
|
+
return ar;
|
|
34
|
+
};
|
|
35
|
+
return ownKeys(o);
|
|
36
|
+
};
|
|
37
|
+
return function (mod) {
|
|
38
|
+
if (mod && mod.__esModule) return mod;
|
|
39
|
+
var result = {};
|
|
40
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
41
|
+
__setModuleDefault(result, mod);
|
|
42
|
+
return result;
|
|
43
|
+
};
|
|
44
|
+
})();
|
|
45
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
46
|
+
exports.crc32 = crc32;
|
|
47
|
+
exports.ensureCrc32 = ensureCrc32;
|
|
48
|
+
exports.crc32Update = crc32Update;
|
|
49
|
+
exports.crc32Finalize = crc32Finalize;
|
|
50
|
+
// Detect Node.js environment
|
|
51
|
+
const isNode = typeof process !== "undefined" && process.versions?.node;
|
|
52
|
+
// Lazy-loaded zlib module for Node.js
|
|
53
|
+
let _zlib = null;
|
|
54
|
+
let _zlibLoading = null;
|
|
55
|
+
// Auto-initialize zlib in Node.js environment
|
|
56
|
+
if (isNode) {
|
|
57
|
+
_zlibLoading = Promise.resolve().then(() => __importStar(require("zlib"))).then(module => {
|
|
58
|
+
_zlib = module.default ?? module;
|
|
59
|
+
return _zlib;
|
|
60
|
+
})
|
|
61
|
+
.catch(() => {
|
|
62
|
+
_zlib = null;
|
|
63
|
+
return null;
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Pre-computed CRC32 lookup table (256 entries)
|
|
68
|
+
* Generated using the standard polynomial 0xEDB88320
|
|
69
|
+
* Used as fallback when native zlib is not available
|
|
70
|
+
*/
|
|
71
|
+
const CRC32_TABLE = /* @__PURE__ */ (() => {
|
|
72
|
+
const table = new Uint32Array(256);
|
|
73
|
+
for (let i = 0; i < 256; i++) {
|
|
74
|
+
let crc = i;
|
|
75
|
+
for (let j = 0; j < 8; j++) {
|
|
76
|
+
crc = crc & 1 ? 0xedb88320 ^ (crc >>> 1) : crc >>> 1;
|
|
77
|
+
}
|
|
78
|
+
table[i] = crc;
|
|
79
|
+
}
|
|
80
|
+
return table;
|
|
81
|
+
})();
|
|
82
|
+
/**
|
|
83
|
+
* JavaScript fallback CRC32 implementation using lookup table
|
|
84
|
+
*/
|
|
85
|
+
function crc32JS(data) {
|
|
86
|
+
let crc = 0xffffffff;
|
|
87
|
+
for (let i = 0; i < data.length; i++) {
|
|
88
|
+
crc = CRC32_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8);
|
|
89
|
+
}
|
|
90
|
+
return (crc ^ 0xffffffff) >>> 0;
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Calculate CRC32 checksum for the given data
|
|
94
|
+
* Uses native zlib.crc32 in Node.js for ~100x better performance
|
|
95
|
+
*
|
|
96
|
+
* @param data - Input data as Uint8Array or Buffer
|
|
97
|
+
* @returns CRC32 checksum as unsigned 32-bit integer
|
|
98
|
+
*
|
|
99
|
+
* @example
|
|
100
|
+
* ```ts
|
|
101
|
+
* const data = new TextEncoder().encode("Hello, World!");
|
|
102
|
+
* const checksum = crc32(data);
|
|
103
|
+
* console.log(checksum.toString(16)); // "ec4ac3d0"
|
|
104
|
+
* ```
|
|
105
|
+
*/
|
|
106
|
+
function crc32(data) {
|
|
107
|
+
// Use native zlib.crc32 if available (Node.js)
|
|
108
|
+
if (_zlib && typeof _zlib.crc32 === "function") {
|
|
109
|
+
return _zlib.crc32(data) >>> 0;
|
|
110
|
+
}
|
|
111
|
+
// Fallback to JS implementation
|
|
112
|
+
return crc32JS(data);
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Ensure zlib is loaded (for use before calling crc32)
|
|
116
|
+
*/
|
|
117
|
+
async function ensureCrc32() {
|
|
118
|
+
if (_zlibLoading) {
|
|
119
|
+
await _zlibLoading;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Calculate CRC32 incrementally (useful for streaming)
|
|
124
|
+
* Call with initial crc of 0xffffffff, then finalize with crc32Finalize
|
|
125
|
+
* Note: This always uses JS implementation for consistency in streaming
|
|
126
|
+
*
|
|
127
|
+
* @param crc - Current CRC value (start with 0xffffffff)
|
|
128
|
+
* @param data - Input data chunk
|
|
129
|
+
* @returns Updated CRC value (not finalized)
|
|
130
|
+
*
|
|
131
|
+
* @example
|
|
132
|
+
* ```ts
|
|
133
|
+
* let crc = 0xffffffff;
|
|
134
|
+
* crc = crc32Update(crc, chunk1);
|
|
135
|
+
* crc = crc32Update(crc, chunk2);
|
|
136
|
+
* const checksum = crc32Finalize(crc);
|
|
137
|
+
* ```
|
|
138
|
+
*/
|
|
139
|
+
function crc32Update(crc, data) {
|
|
140
|
+
for (let i = 0; i < data.length; i++) {
|
|
141
|
+
crc = CRC32_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8);
|
|
142
|
+
}
|
|
143
|
+
return crc;
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Finalize CRC32 calculation
|
|
147
|
+
* XOR with 0xffffffff and convert to unsigned 32-bit
|
|
148
|
+
*
|
|
149
|
+
* @param crc - CRC value from crc32Update
|
|
150
|
+
* @returns Final CRC32 checksum
|
|
151
|
+
*/
|
|
152
|
+
function crc32Finalize(crc) {
|
|
153
|
+
return (crc ^ 0xffffffff) >>> 0;
|
|
154
|
+
}
|