@cj-tech-master/excelts 1.4.5 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/excelts.iife.js +454 -159
- package/dist/browser/excelts.iife.js.map +1 -1
- package/dist/browser/excelts.iife.min.js +28 -28
- package/dist/cjs/doc/anchor.js +25 -11
- package/dist/cjs/doc/cell.js +75 -43
- package/dist/cjs/doc/column.js +74 -22
- package/dist/cjs/doc/defined-names.js +53 -7
- package/dist/cjs/doc/image.js +11 -8
- package/dist/cjs/doc/range.js +64 -28
- package/dist/cjs/doc/row.js +72 -31
- package/dist/cjs/doc/table.js +3 -5
- package/dist/cjs/doc/workbook.js +30 -6
- package/dist/cjs/doc/worksheet.js +165 -41
- package/dist/cjs/utils/sheet-utils.js +3 -1
- package/dist/cjs/utils/unzip/extract.js +30 -82
- package/dist/cjs/utils/unzip/index.js +18 -2
- package/dist/cjs/utils/unzip/zip-parser.js +458 -0
- package/dist/esm/doc/anchor.js +25 -11
- package/dist/esm/doc/cell.js +75 -43
- package/dist/esm/doc/column.js +74 -22
- package/dist/esm/doc/defined-names.js +53 -7
- package/dist/esm/doc/image.js +11 -8
- package/dist/esm/doc/range.js +64 -28
- package/dist/esm/doc/row.js +72 -31
- package/dist/esm/doc/table.js +3 -5
- package/dist/esm/doc/workbook.js +30 -6
- package/dist/esm/doc/worksheet.js +165 -41
- package/dist/esm/utils/sheet-utils.js +3 -1
- package/dist/esm/utils/unzip/extract.js +28 -82
- package/dist/esm/utils/unzip/index.js +17 -2
- package/dist/esm/utils/unzip/zip-parser.js +451 -0
- package/dist/types/doc/anchor.d.ts +14 -7
- package/dist/types/doc/cell.d.ts +78 -37
- package/dist/types/doc/column.d.ts +72 -36
- package/dist/types/doc/defined-names.d.ts +11 -8
- package/dist/types/doc/image.d.ts +29 -12
- package/dist/types/doc/pivot-table.d.ts +1 -1
- package/dist/types/doc/range.d.ts +15 -4
- package/dist/types/doc/row.d.ts +78 -40
- package/dist/types/doc/table.d.ts +21 -36
- package/dist/types/doc/workbook.d.ts +54 -34
- package/dist/types/doc/worksheet.d.ts +255 -83
- package/dist/types/stream/xlsx/worksheet-reader.d.ts +3 -5
- package/dist/types/types.d.ts +86 -26
- package/dist/types/utils/col-cache.d.ts +11 -8
- package/dist/types/utils/unzip/extract.d.ts +16 -14
- package/dist/types/utils/unzip/index.d.ts +15 -1
- package/dist/types/utils/unzip/zip-parser.d.ts +92 -0
- package/package.json +1 -1
|
@@ -1,11 +1,26 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
/**
|
|
3
3
|
* Unzip utilities for parsing ZIP archives
|
|
4
|
+
*
|
|
5
|
+
* Two APIs are provided:
|
|
6
|
+
*
|
|
7
|
+
* 1. **Stream-based API** (Node.js only):
|
|
8
|
+
* - `Parse`, `createParse` - Parse ZIP files as a stream
|
|
9
|
+
* - Best for large files where you don't want to load entire file into memory
|
|
10
|
+
* - Requires Node.js `stream` module
|
|
11
|
+
*
|
|
12
|
+
* 2. **Buffer-based API** (Browser + Node.js):
|
|
13
|
+
* - `extractAll`, `extractFile`, `listFiles`, `forEachEntry`, `ZipParser`
|
|
14
|
+
* - Works in both Node.js and browser environments
|
|
15
|
+
* - Uses native `DecompressionStream` in browser, `zlib` in Node.js
|
|
16
|
+
* - Best for files already loaded into memory (ArrayBuffer, Uint8Array)
|
|
17
|
+
*
|
|
4
18
|
* Original source: https://github.com/ZJONSSON/node-unzipper
|
|
5
19
|
* License: MIT
|
|
6
20
|
*/
|
|
7
21
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
8
|
-
exports.forEachEntry = exports.listFiles = exports.extractFile = exports.extractAll = exports.parseExtraField = exports.parseDateTime = exports.parseBuffer = exports.bufferStream = exports.NoopStream = exports.PullStream = exports.createParse = exports.Parse = void 0;
|
|
22
|
+
exports.ZipParser = exports.forEachEntry = exports.listFiles = exports.extractFile = exports.extractAll = exports.parseExtraField = exports.parseDateTime = exports.parseBuffer = exports.bufferStream = exports.NoopStream = exports.PullStream = exports.createParse = exports.Parse = void 0;
|
|
23
|
+
// Stream-based API (Node.js only - requires stream module)
|
|
9
24
|
var parse_js_1 = require("./parse");
|
|
10
25
|
Object.defineProperty(exports, "Parse", { enumerable: true, get: function () { return parse_js_1.Parse; } });
|
|
11
26
|
Object.defineProperty(exports, "createParse", { enumerable: true, get: function () { return parse_js_1.createParse; } });
|
|
@@ -21,9 +36,10 @@ var parse_datetime_js_1 = require("./parse-datetime");
|
|
|
21
36
|
Object.defineProperty(exports, "parseDateTime", { enumerable: true, get: function () { return parse_datetime_js_1.parseDateTime; } });
|
|
22
37
|
var parse_extra_field_js_1 = require("./parse-extra-field");
|
|
23
38
|
Object.defineProperty(exports, "parseExtraField", { enumerable: true, get: function () { return parse_extra_field_js_1.parseExtraField; } });
|
|
24
|
-
//
|
|
39
|
+
// Buffer-based API (Browser + Node.js - cross-platform)
|
|
25
40
|
var extract_js_1 = require("./extract");
|
|
26
41
|
Object.defineProperty(exports, "extractAll", { enumerable: true, get: function () { return extract_js_1.extractAll; } });
|
|
27
42
|
Object.defineProperty(exports, "extractFile", { enumerable: true, get: function () { return extract_js_1.extractFile; } });
|
|
28
43
|
Object.defineProperty(exports, "listFiles", { enumerable: true, get: function () { return extract_js_1.listFiles; } });
|
|
29
44
|
Object.defineProperty(exports, "forEachEntry", { enumerable: true, get: function () { return extract_js_1.forEachEntry; } });
|
|
45
|
+
Object.defineProperty(exports, "ZipParser", { enumerable: true, get: function () { return extract_js_1.ZipParser; } });
|
|
@@ -0,0 +1,458 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Pure Uint8Array-based ZIP parser
|
|
4
|
+
* Works in both Node.js and browser environments
|
|
5
|
+
* No dependency on Node.js stream module
|
|
6
|
+
*/
|
|
7
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
8
|
+
exports.ZipParser = void 0;
|
|
9
|
+
exports.parseZipEntries = parseZipEntries;
|
|
10
|
+
exports.extractEntryData = extractEntryData;
|
|
11
|
+
exports.extractEntryDataSync = extractEntryDataSync;
|
|
12
|
+
const compress_js_1 = require("../zip/compress");
|
|
13
|
+
// ZIP file signatures
|
|
14
|
+
const LOCAL_FILE_HEADER_SIG = 0x04034b50;
|
|
15
|
+
const CENTRAL_DIR_HEADER_SIG = 0x02014b50;
|
|
16
|
+
const END_OF_CENTRAL_DIR_SIG = 0x06054b50;
|
|
17
|
+
const ZIP64_END_OF_CENTRAL_DIR_SIG = 0x06064b50;
|
|
18
|
+
const ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIG = 0x07064b50;
|
|
19
|
+
// Compression methods
|
|
20
|
+
const COMPRESSION_STORED = 0;
|
|
21
|
+
const COMPRESSION_DEFLATE = 8;
|
|
22
|
+
/**
|
|
23
|
+
* Parse DOS date/time format to JavaScript Date
|
|
24
|
+
* Dates in zip file entries are stored as DosDateTime
|
|
25
|
+
* Spec: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-dosdatetimetofiletime
|
|
26
|
+
*/
|
|
27
|
+
function parseDateTime(date, time) {
|
|
28
|
+
const day = date & 0x1f;
|
|
29
|
+
const month = (date >> 5) & 0x0f;
|
|
30
|
+
const year = ((date >> 9) & 0x7f) + 1980;
|
|
31
|
+
const seconds = time ? (time & 0x1f) * 2 : 0;
|
|
32
|
+
const minutes = time ? (time >> 5) & 0x3f : 0;
|
|
33
|
+
const hours = time ? time >> 11 : 0;
|
|
34
|
+
return new Date(Date.UTC(year, month - 1, day, hours, minutes, seconds));
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Parse ZIP64 extra field
|
|
38
|
+
*/
|
|
39
|
+
function parseZip64ExtraField(extraField, compressedSize, uncompressedSize, localHeaderOffset) {
|
|
40
|
+
const view = new DataView(extraField.buffer, extraField.byteOffset, extraField.byteLength);
|
|
41
|
+
let offset = 0;
|
|
42
|
+
while (offset + 4 <= extraField.length) {
|
|
43
|
+
const signature = view.getUint16(offset, true);
|
|
44
|
+
const partSize = view.getUint16(offset + 2, true);
|
|
45
|
+
if (signature === 0x0001) {
|
|
46
|
+
// ZIP64 extended information
|
|
47
|
+
let fieldOffset = offset + 4;
|
|
48
|
+
if (uncompressedSize === 0xffffffff && fieldOffset + 8 <= offset + 4 + partSize) {
|
|
49
|
+
uncompressedSize = Number(view.getBigUint64(fieldOffset, true));
|
|
50
|
+
fieldOffset += 8;
|
|
51
|
+
}
|
|
52
|
+
if (compressedSize === 0xffffffff && fieldOffset + 8 <= offset + 4 + partSize) {
|
|
53
|
+
compressedSize = Number(view.getBigUint64(fieldOffset, true));
|
|
54
|
+
fieldOffset += 8;
|
|
55
|
+
}
|
|
56
|
+
if (localHeaderOffset === 0xffffffff && fieldOffset + 8 <= offset + 4 + partSize) {
|
|
57
|
+
localHeaderOffset = Number(view.getBigUint64(fieldOffset, true));
|
|
58
|
+
}
|
|
59
|
+
break;
|
|
60
|
+
}
|
|
61
|
+
offset += 4 + partSize;
|
|
62
|
+
}
|
|
63
|
+
return { compressedSize, uncompressedSize, localHeaderOffset };
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* DataView helper for reading little-endian values
|
|
67
|
+
*/
|
|
68
|
+
class BinaryReader {
|
|
69
|
+
constructor(data, offset = 0) {
|
|
70
|
+
this.data = data;
|
|
71
|
+
this.view = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
|
72
|
+
this.offset = offset;
|
|
73
|
+
}
|
|
74
|
+
get position() {
|
|
75
|
+
return this.offset;
|
|
76
|
+
}
|
|
77
|
+
set position(value) {
|
|
78
|
+
this.offset = value;
|
|
79
|
+
}
|
|
80
|
+
get remaining() {
|
|
81
|
+
return this.data.length - this.offset;
|
|
82
|
+
}
|
|
83
|
+
readUint8() {
|
|
84
|
+
const value = this.view.getUint8(this.offset);
|
|
85
|
+
this.offset += 1;
|
|
86
|
+
return value;
|
|
87
|
+
}
|
|
88
|
+
readUint16() {
|
|
89
|
+
const value = this.view.getUint16(this.offset, true);
|
|
90
|
+
this.offset += 2;
|
|
91
|
+
return value;
|
|
92
|
+
}
|
|
93
|
+
readUint32() {
|
|
94
|
+
const value = this.view.getUint32(this.offset, true);
|
|
95
|
+
this.offset += 4;
|
|
96
|
+
return value;
|
|
97
|
+
}
|
|
98
|
+
readBigUint64() {
|
|
99
|
+
const value = this.view.getBigUint64(this.offset, true);
|
|
100
|
+
this.offset += 8;
|
|
101
|
+
return value;
|
|
102
|
+
}
|
|
103
|
+
readBytes(length) {
|
|
104
|
+
const bytes = this.data.subarray(this.offset, this.offset + length);
|
|
105
|
+
this.offset += length;
|
|
106
|
+
return bytes;
|
|
107
|
+
}
|
|
108
|
+
readString(length, utf8 = true) {
|
|
109
|
+
const bytes = this.readBytes(length);
|
|
110
|
+
if (utf8) {
|
|
111
|
+
return new TextDecoder("utf-8").decode(bytes);
|
|
112
|
+
}
|
|
113
|
+
// Fallback to ASCII/Latin-1
|
|
114
|
+
return String.fromCharCode(...bytes);
|
|
115
|
+
}
|
|
116
|
+
skip(length) {
|
|
117
|
+
this.offset += length;
|
|
118
|
+
}
|
|
119
|
+
slice(start, end) {
|
|
120
|
+
return this.data.subarray(start, end);
|
|
121
|
+
}
|
|
122
|
+
peekUint32(offset) {
|
|
123
|
+
return this.view.getUint32(offset, true);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Find the End of Central Directory record
|
|
128
|
+
* Searches backwards from the end of the file
|
|
129
|
+
*/
|
|
130
|
+
function findEndOfCentralDir(data) {
|
|
131
|
+
// EOCD is at least 22 bytes, search backwards
|
|
132
|
+
// Comment can be up to 65535 bytes
|
|
133
|
+
const minOffset = Math.max(0, data.length - 65557);
|
|
134
|
+
const view = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
|
135
|
+
for (let i = data.length - 22; i >= minOffset; i--) {
|
|
136
|
+
if (view.getUint32(i, true) === END_OF_CENTRAL_DIR_SIG) {
|
|
137
|
+
return i;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
return -1;
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Find ZIP64 End of Central Directory Locator
|
|
144
|
+
*/
|
|
145
|
+
function findZip64EOCDLocator(data, eocdOffset) {
|
|
146
|
+
// ZIP64 EOCD Locator is 20 bytes and appears right before EOCD
|
|
147
|
+
const locatorOffset = eocdOffset - 20;
|
|
148
|
+
if (locatorOffset < 0) {
|
|
149
|
+
return -1;
|
|
150
|
+
}
|
|
151
|
+
const view = new DataView(data.buffer, data.byteOffset, data.byteLength);
|
|
152
|
+
if (view.getUint32(locatorOffset, true) === ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIG) {
|
|
153
|
+
return locatorOffset;
|
|
154
|
+
}
|
|
155
|
+
return -1;
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Parse ZIP file entries from Central Directory
|
|
159
|
+
*/
|
|
160
|
+
function parseZipEntries(data, options = {}) {
|
|
161
|
+
const { decodeStrings = true } = options;
|
|
162
|
+
const entries = [];
|
|
163
|
+
// Find End of Central Directory
|
|
164
|
+
const eocdOffset = findEndOfCentralDir(data);
|
|
165
|
+
if (eocdOffset === -1) {
|
|
166
|
+
throw new Error("Invalid ZIP file: End of Central Directory not found");
|
|
167
|
+
}
|
|
168
|
+
const reader = new BinaryReader(data, eocdOffset);
|
|
169
|
+
// Read EOCD
|
|
170
|
+
// Offset Size Description
|
|
171
|
+
// 0 4 EOCD signature (0x06054b50)
|
|
172
|
+
// 4 2 Number of this disk
|
|
173
|
+
// 6 2 Disk where central directory starts
|
|
174
|
+
// 8 2 Number of central directory records on this disk
|
|
175
|
+
// 10 2 Total number of central directory records
|
|
176
|
+
// 12 4 Size of central directory (bytes)
|
|
177
|
+
// 16 4 Offset of start of central directory
|
|
178
|
+
// 20 2 Comment length
|
|
179
|
+
reader.skip(4); // signature
|
|
180
|
+
reader.skip(2); // disk number
|
|
181
|
+
reader.skip(2); // disk where central dir starts
|
|
182
|
+
reader.skip(2); // entries on this disk
|
|
183
|
+
let totalEntries = reader.readUint16(); // total entries
|
|
184
|
+
let centralDirSize = reader.readUint32();
|
|
185
|
+
let centralDirOffset = reader.readUint32();
|
|
186
|
+
// Check for ZIP64
|
|
187
|
+
const zip64LocatorOffset = findZip64EOCDLocator(data, eocdOffset);
|
|
188
|
+
if (zip64LocatorOffset !== -1) {
|
|
189
|
+
const locatorReader = new BinaryReader(data, zip64LocatorOffset);
|
|
190
|
+
locatorReader.skip(4); // signature
|
|
191
|
+
locatorReader.skip(4); // disk number with ZIP64 EOCD
|
|
192
|
+
const zip64EOCDOffset = Number(locatorReader.readBigUint64());
|
|
193
|
+
// Read ZIP64 EOCD
|
|
194
|
+
const zip64Reader = new BinaryReader(data, zip64EOCDOffset);
|
|
195
|
+
const zip64Sig = zip64Reader.readUint32();
|
|
196
|
+
if (zip64Sig === ZIP64_END_OF_CENTRAL_DIR_SIG) {
|
|
197
|
+
zip64Reader.skip(8); // size of ZIP64 EOCD
|
|
198
|
+
zip64Reader.skip(2); // version made by
|
|
199
|
+
zip64Reader.skip(2); // version needed
|
|
200
|
+
zip64Reader.skip(4); // disk number
|
|
201
|
+
zip64Reader.skip(4); // disk with central dir
|
|
202
|
+
const zip64TotalEntries = Number(zip64Reader.readBigUint64());
|
|
203
|
+
const zip64CentralDirSize = Number(zip64Reader.readBigUint64());
|
|
204
|
+
const zip64CentralDirOffset = Number(zip64Reader.readBigUint64());
|
|
205
|
+
// Use ZIP64 values if standard values are maxed out
|
|
206
|
+
if (totalEntries === 0xffff) {
|
|
207
|
+
totalEntries = zip64TotalEntries;
|
|
208
|
+
}
|
|
209
|
+
if (centralDirSize === 0xffffffff) {
|
|
210
|
+
centralDirSize = zip64CentralDirSize;
|
|
211
|
+
}
|
|
212
|
+
if (centralDirOffset === 0xffffffff) {
|
|
213
|
+
centralDirOffset = zip64CentralDirOffset;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
// Read Central Directory entries
|
|
218
|
+
const centralReader = new BinaryReader(data, centralDirOffset);
|
|
219
|
+
for (let i = 0; i < totalEntries; i++) {
|
|
220
|
+
const sig = centralReader.readUint32();
|
|
221
|
+
if (sig !== CENTRAL_DIR_HEADER_SIG) {
|
|
222
|
+
throw new Error(`Invalid Central Directory header signature at entry ${i}`);
|
|
223
|
+
}
|
|
224
|
+
// Central Directory File Header format:
|
|
225
|
+
// Offset Size Description
|
|
226
|
+
// 0 4 Central directory file header signature (0x02014b50)
|
|
227
|
+
// 4 2 Version made by
|
|
228
|
+
// 6 2 Version needed to extract
|
|
229
|
+
// 8 2 General purpose bit flag
|
|
230
|
+
// 10 2 Compression method
|
|
231
|
+
// 12 2 File last modification time
|
|
232
|
+
// 14 2 File last modification date
|
|
233
|
+
// 16 4 CRC-32
|
|
234
|
+
// 20 4 Compressed size
|
|
235
|
+
// 24 4 Uncompressed size
|
|
236
|
+
// 28 2 File name length
|
|
237
|
+
// 30 2 Extra field length
|
|
238
|
+
// 32 2 File comment length
|
|
239
|
+
// 34 2 Disk number where file starts
|
|
240
|
+
// 36 2 Internal file attributes
|
|
241
|
+
// 38 4 External file attributes
|
|
242
|
+
// 42 4 Relative offset of local file header
|
|
243
|
+
// 46 n File name
|
|
244
|
+
// 46+n m Extra field
|
|
245
|
+
// 46+n+m k File comment
|
|
246
|
+
centralReader.skip(2); // version made by
|
|
247
|
+
centralReader.skip(2); // version needed
|
|
248
|
+
const flags = centralReader.readUint16();
|
|
249
|
+
const compressionMethod = centralReader.readUint16();
|
|
250
|
+
const lastModTime = centralReader.readUint16();
|
|
251
|
+
const lastModDate = centralReader.readUint16();
|
|
252
|
+
const crc32 = centralReader.readUint32();
|
|
253
|
+
let compressedSize = centralReader.readUint32();
|
|
254
|
+
let uncompressedSize = centralReader.readUint32();
|
|
255
|
+
const fileNameLength = centralReader.readUint16();
|
|
256
|
+
const extraFieldLength = centralReader.readUint16();
|
|
257
|
+
const commentLength = centralReader.readUint16();
|
|
258
|
+
centralReader.skip(2); // disk number start
|
|
259
|
+
centralReader.skip(2); // internal attributes
|
|
260
|
+
const externalAttributes = centralReader.readUint32();
|
|
261
|
+
let localHeaderOffset = centralReader.readUint32();
|
|
262
|
+
// Check for UTF-8 flag (bit 11)
|
|
263
|
+
const isUtf8 = (flags & 0x800) !== 0;
|
|
264
|
+
const useUtf8 = decodeStrings && isUtf8;
|
|
265
|
+
const fileName = centralReader.readString(fileNameLength, useUtf8);
|
|
266
|
+
const extraField = centralReader.readBytes(extraFieldLength);
|
|
267
|
+
const comment = centralReader.readString(commentLength, useUtf8);
|
|
268
|
+
// Parse extra field for ZIP64 values
|
|
269
|
+
if (extraFieldLength > 0) {
|
|
270
|
+
const parsed = parseZip64ExtraField(extraField, compressedSize, uncompressedSize, localHeaderOffset);
|
|
271
|
+
compressedSize = parsed.compressedSize;
|
|
272
|
+
uncompressedSize = parsed.uncompressedSize;
|
|
273
|
+
localHeaderOffset = parsed.localHeaderOffset;
|
|
274
|
+
}
|
|
275
|
+
const isDirectory = fileName.endsWith("/") || (externalAttributes & 0x10) !== 0;
|
|
276
|
+
const isEncrypted = (flags & 0x01) !== 0;
|
|
277
|
+
entries.push({
|
|
278
|
+
path: fileName,
|
|
279
|
+
isDirectory,
|
|
280
|
+
compressedSize,
|
|
281
|
+
uncompressedSize,
|
|
282
|
+
compressionMethod,
|
|
283
|
+
crc32,
|
|
284
|
+
lastModified: parseDateTime(lastModDate, lastModTime),
|
|
285
|
+
localHeaderOffset,
|
|
286
|
+
comment,
|
|
287
|
+
externalAttributes,
|
|
288
|
+
isEncrypted
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
return entries;
|
|
292
|
+
}
|
|
293
|
+
/**
|
|
294
|
+
* Extract file data for a specific entry
|
|
295
|
+
*/
|
|
296
|
+
async function extractEntryData(data, entry) {
|
|
297
|
+
if (entry.isDirectory) {
|
|
298
|
+
return new Uint8Array(0);
|
|
299
|
+
}
|
|
300
|
+
if (entry.isEncrypted) {
|
|
301
|
+
throw new Error(`File "${entry.path}" is encrypted and cannot be extracted`);
|
|
302
|
+
}
|
|
303
|
+
const reader = new BinaryReader(data, entry.localHeaderOffset);
|
|
304
|
+
// Read local file header
|
|
305
|
+
const sig = reader.readUint32();
|
|
306
|
+
if (sig !== LOCAL_FILE_HEADER_SIG) {
|
|
307
|
+
throw new Error(`Invalid local file header signature for "${entry.path}"`);
|
|
308
|
+
}
|
|
309
|
+
reader.skip(2); // version needed
|
|
310
|
+
reader.skip(2); // flags
|
|
311
|
+
reader.skip(2); // compression method
|
|
312
|
+
reader.skip(2); // last mod time
|
|
313
|
+
reader.skip(2); // last mod date
|
|
314
|
+
reader.skip(4); // crc32
|
|
315
|
+
reader.skip(4); // compressed size
|
|
316
|
+
reader.skip(4); // uncompressed size
|
|
317
|
+
const fileNameLength = reader.readUint16();
|
|
318
|
+
const extraFieldLength = reader.readUint16();
|
|
319
|
+
reader.skip(fileNameLength);
|
|
320
|
+
reader.skip(extraFieldLength);
|
|
321
|
+
// Extract compressed data
|
|
322
|
+
const compressedData = reader.readBytes(entry.compressedSize);
|
|
323
|
+
// Decompress if needed
|
|
324
|
+
if (entry.compressionMethod === COMPRESSION_STORED) {
|
|
325
|
+
return compressedData;
|
|
326
|
+
}
|
|
327
|
+
else if (entry.compressionMethod === COMPRESSION_DEFLATE) {
|
|
328
|
+
return (0, compress_js_1.decompress)(compressedData);
|
|
329
|
+
}
|
|
330
|
+
else {
|
|
331
|
+
throw new Error(`Unsupported compression method: ${entry.compressionMethod}`);
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
/**
|
|
335
|
+
* Extract file data synchronously (Node.js only)
|
|
336
|
+
*/
|
|
337
|
+
function extractEntryDataSync(data, entry) {
|
|
338
|
+
if (entry.isDirectory) {
|
|
339
|
+
return new Uint8Array(0);
|
|
340
|
+
}
|
|
341
|
+
if (entry.isEncrypted) {
|
|
342
|
+
throw new Error(`File "${entry.path}" is encrypted and cannot be extracted`);
|
|
343
|
+
}
|
|
344
|
+
const reader = new BinaryReader(data, entry.localHeaderOffset);
|
|
345
|
+
// Read local file header
|
|
346
|
+
const sig = reader.readUint32();
|
|
347
|
+
if (sig !== LOCAL_FILE_HEADER_SIG) {
|
|
348
|
+
throw new Error(`Invalid local file header signature for "${entry.path}"`);
|
|
349
|
+
}
|
|
350
|
+
reader.skip(2); // version needed
|
|
351
|
+
reader.skip(2); // flags
|
|
352
|
+
reader.skip(2); // compression method
|
|
353
|
+
reader.skip(2); // last mod time
|
|
354
|
+
reader.skip(2); // last mod date
|
|
355
|
+
reader.skip(4); // crc32
|
|
356
|
+
reader.skip(4); // compressed size
|
|
357
|
+
reader.skip(4); // uncompressed size
|
|
358
|
+
const fileNameLength = reader.readUint16();
|
|
359
|
+
const extraFieldLength = reader.readUint16();
|
|
360
|
+
reader.skip(fileNameLength);
|
|
361
|
+
reader.skip(extraFieldLength);
|
|
362
|
+
// Extract compressed data
|
|
363
|
+
const compressedData = reader.readBytes(entry.compressedSize);
|
|
364
|
+
// Decompress if needed
|
|
365
|
+
if (entry.compressionMethod === COMPRESSION_STORED) {
|
|
366
|
+
return compressedData;
|
|
367
|
+
}
|
|
368
|
+
else if (entry.compressionMethod === COMPRESSION_DEFLATE) {
|
|
369
|
+
return (0, compress_js_1.decompressSync)(compressedData);
|
|
370
|
+
}
|
|
371
|
+
else {
|
|
372
|
+
throw new Error(`Unsupported compression method: ${entry.compressionMethod}`);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
/**
|
|
376
|
+
* High-level ZIP parser class
|
|
377
|
+
*/
|
|
378
|
+
class ZipParser {
|
|
379
|
+
constructor(data, options = {}) {
|
|
380
|
+
this.data = data instanceof ArrayBuffer ? new Uint8Array(data) : data;
|
|
381
|
+
this.entries = parseZipEntries(this.data, options);
|
|
382
|
+
this.entryMap = new Map(this.entries.map(e => [e.path, e]));
|
|
383
|
+
}
|
|
384
|
+
/**
|
|
385
|
+
* Get all entries in the ZIP file
|
|
386
|
+
*/
|
|
387
|
+
getEntries() {
|
|
388
|
+
return this.entries;
|
|
389
|
+
}
|
|
390
|
+
/**
|
|
391
|
+
* Get entry by path
|
|
392
|
+
*/
|
|
393
|
+
getEntry(path) {
|
|
394
|
+
return this.entryMap.get(path);
|
|
395
|
+
}
|
|
396
|
+
/**
|
|
397
|
+
* Check if entry exists
|
|
398
|
+
*/
|
|
399
|
+
hasEntry(path) {
|
|
400
|
+
return this.entryMap.has(path);
|
|
401
|
+
}
|
|
402
|
+
/**
|
|
403
|
+
* List all file paths
|
|
404
|
+
*/
|
|
405
|
+
listFiles() {
|
|
406
|
+
return this.entries.map(e => e.path);
|
|
407
|
+
}
|
|
408
|
+
/**
|
|
409
|
+
* Extract a single file (async)
|
|
410
|
+
*/
|
|
411
|
+
async extract(path) {
|
|
412
|
+
const entry = this.entryMap.get(path);
|
|
413
|
+
if (!entry) {
|
|
414
|
+
return null;
|
|
415
|
+
}
|
|
416
|
+
return extractEntryData(this.data, entry);
|
|
417
|
+
}
|
|
418
|
+
/**
|
|
419
|
+
* Extract a single file (sync, Node.js only)
|
|
420
|
+
*/
|
|
421
|
+
extractSync(path) {
|
|
422
|
+
const entry = this.entryMap.get(path);
|
|
423
|
+
if (!entry) {
|
|
424
|
+
return null;
|
|
425
|
+
}
|
|
426
|
+
return extractEntryDataSync(this.data, entry);
|
|
427
|
+
}
|
|
428
|
+
/**
|
|
429
|
+
* Extract all files (async)
|
|
430
|
+
*/
|
|
431
|
+
async extractAll() {
|
|
432
|
+
const result = new Map();
|
|
433
|
+
for (const entry of this.entries) {
|
|
434
|
+
const data = await extractEntryData(this.data, entry);
|
|
435
|
+
result.set(entry.path, data);
|
|
436
|
+
}
|
|
437
|
+
return result;
|
|
438
|
+
}
|
|
439
|
+
/**
|
|
440
|
+
* Iterate over entries with async callback
|
|
441
|
+
*/
|
|
442
|
+
async forEach(callback) {
|
|
443
|
+
for (const entry of this.entries) {
|
|
444
|
+
let dataPromise = null;
|
|
445
|
+
const getData = () => {
|
|
446
|
+
if (!dataPromise) {
|
|
447
|
+
dataPromise = extractEntryData(this.data, entry);
|
|
448
|
+
}
|
|
449
|
+
return dataPromise;
|
|
450
|
+
};
|
|
451
|
+
const shouldContinue = await callback(entry, getData);
|
|
452
|
+
if (shouldContinue === false) {
|
|
453
|
+
break;
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
exports.ZipParser = ZipParser;
|
package/dist/esm/doc/anchor.js
CHANGED
|
@@ -1,4 +1,14 @@
|
|
|
1
1
|
import { colCache } from "../utils/col-cache.js";
|
|
2
|
+
function isAnchorModel(value) {
|
|
3
|
+
return (typeof value === "object" &&
|
|
4
|
+
"nativeCol" in value &&
|
|
5
|
+
"nativeRow" in value &&
|
|
6
|
+
"nativeColOff" in value &&
|
|
7
|
+
"nativeRowOff" in value);
|
|
8
|
+
}
|
|
9
|
+
function isSimpleAddress(value) {
|
|
10
|
+
return typeof value === "object" && "col" in value && "row" in value;
|
|
11
|
+
}
|
|
2
12
|
class Anchor {
|
|
3
13
|
constructor(worksheet, address, offset = 0) {
|
|
4
14
|
this.worksheet = worksheet;
|
|
@@ -15,17 +25,15 @@ class Anchor {
|
|
|
15
25
|
this.nativeRow = decoded.row + offset;
|
|
16
26
|
this.nativeRowOff = 0;
|
|
17
27
|
}
|
|
18
|
-
else if (address
|
|
19
|
-
|
|
20
|
-
this.
|
|
21
|
-
this.
|
|
22
|
-
this.
|
|
23
|
-
this.nativeRowOff = anchor.nativeRowOff || 0;
|
|
28
|
+
else if (isAnchorModel(address)) {
|
|
29
|
+
this.nativeCol = address.nativeCol || 0;
|
|
30
|
+
this.nativeColOff = address.nativeColOff || 0;
|
|
31
|
+
this.nativeRow = address.nativeRow || 0;
|
|
32
|
+
this.nativeRowOff = address.nativeRowOff || 0;
|
|
24
33
|
}
|
|
25
|
-
else if (address
|
|
26
|
-
|
|
27
|
-
this.
|
|
28
|
-
this.row = simple.row + offset;
|
|
34
|
+
else if (isSimpleAddress(address)) {
|
|
35
|
+
this.col = address.col + offset;
|
|
36
|
+
this.row = address.row + offset;
|
|
29
37
|
}
|
|
30
38
|
else {
|
|
31
39
|
this.nativeCol = 0;
|
|
@@ -35,7 +43,13 @@ class Anchor {
|
|
|
35
43
|
}
|
|
36
44
|
}
|
|
37
45
|
static asInstance(model) {
|
|
38
|
-
|
|
46
|
+
if (model == null) {
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
if (model instanceof Anchor) {
|
|
50
|
+
return model;
|
|
51
|
+
}
|
|
52
|
+
return new Anchor(undefined, model);
|
|
39
53
|
}
|
|
40
54
|
get col() {
|
|
41
55
|
return this.nativeCol + Math.min(this.colWidth - 1, this.nativeColOff) / this.colWidth;
|