@neoware_inc/neozipkit 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -0
- package/dist/browser/ZipkitBrowser.d.ts +27 -0
- package/dist/browser/ZipkitBrowser.d.ts.map +1 -0
- package/dist/browser/ZipkitBrowser.js +303 -0
- package/dist/browser/ZipkitBrowser.js.map +1 -0
- package/dist/browser/index.d.ts +9 -0
- package/dist/browser/index.d.ts.map +1 -0
- package/dist/browser/index.esm.d.ts +12 -0
- package/dist/browser/index.esm.d.ts.map +1 -0
- package/dist/browser/index.esm.js +46 -0
- package/dist/browser/index.esm.js.map +1 -0
- package/dist/browser/index.js +38 -0
- package/dist/browser/index.js.map +1 -0
- package/dist/browser-esm/index.d.ts +9 -0
- package/dist/browser-esm/index.js +50211 -0
- package/dist/browser-esm/index.js.map +7 -0
- package/dist/browser-umd/index.d.ts +9 -0
- package/dist/browser-umd/index.js +50221 -0
- package/dist/browser-umd/index.js.map +7 -0
- package/dist/browser-umd/index.min.js +39 -0
- package/dist/browser.d.ts +9 -0
- package/dist/browser.js +38 -0
- package/dist/core/ZipCompress.d.ts +99 -0
- package/dist/core/ZipCompress.d.ts.map +1 -0
- package/dist/core/ZipCompress.js +287 -0
- package/dist/core/ZipCompress.js.map +1 -0
- package/dist/core/ZipCopy.d.ts +175 -0
- package/dist/core/ZipCopy.d.ts.map +1 -0
- package/dist/core/ZipCopy.js +310 -0
- package/dist/core/ZipCopy.js.map +1 -0
- package/dist/core/ZipDecompress.d.ts +57 -0
- package/dist/core/ZipDecompress.d.ts.map +1 -0
- package/dist/core/ZipDecompress.js +155 -0
- package/dist/core/ZipDecompress.js.map +1 -0
- package/dist/core/ZipEntry.d.ts +138 -0
- package/dist/core/ZipEntry.d.ts.map +1 -0
- package/dist/core/ZipEntry.js +829 -0
- package/dist/core/ZipEntry.js.map +1 -0
- package/dist/core/Zipkit.d.ts +315 -0
- package/dist/core/Zipkit.d.ts.map +1 -0
- package/dist/core/Zipkit.js +647 -0
- package/dist/core/Zipkit.js.map +1 -0
- package/dist/core/ZstdManager.d.ts +56 -0
- package/dist/core/ZstdManager.d.ts.map +1 -0
- package/dist/core/ZstdManager.js +144 -0
- package/dist/core/ZstdManager.js.map +1 -0
- package/dist/core/components/HashCalculator.d.ts +138 -0
- package/dist/core/components/HashCalculator.d.ts.map +1 -0
- package/dist/core/components/HashCalculator.js +360 -0
- package/dist/core/components/HashCalculator.js.map +1 -0
- package/dist/core/components/Logger.d.ts +73 -0
- package/dist/core/components/Logger.d.ts.map +1 -0
- package/dist/core/components/Logger.js +156 -0
- package/dist/core/components/Logger.js.map +1 -0
- package/dist/core/components/ProgressTracker.d.ts +43 -0
- package/dist/core/components/ProgressTracker.d.ts.map +1 -0
- package/dist/core/components/ProgressTracker.js +112 -0
- package/dist/core/components/ProgressTracker.js.map +1 -0
- package/dist/core/components/Support.d.ts +64 -0
- package/dist/core/components/Support.d.ts.map +1 -0
- package/dist/core/components/Support.js +71 -0
- package/dist/core/components/Support.js.map +1 -0
- package/dist/core/components/Util.d.ts +26 -0
- package/dist/core/components/Util.d.ts.map +1 -0
- package/dist/core/components/Util.js +95 -0
- package/dist/core/components/Util.js.map +1 -0
- package/dist/core/constants/Errors.d.ts +52 -0
- package/dist/core/constants/Errors.d.ts.map +1 -0
- package/dist/core/constants/Errors.js +67 -0
- package/dist/core/constants/Errors.js.map +1 -0
- package/dist/core/constants/Headers.d.ts +170 -0
- package/dist/core/constants/Headers.d.ts.map +1 -0
- package/dist/core/constants/Headers.js +194 -0
- package/dist/core/constants/Headers.js.map +1 -0
- package/dist/core/encryption/Manager.d.ts +58 -0
- package/dist/core/encryption/Manager.d.ts.map +1 -0
- package/dist/core/encryption/Manager.js +121 -0
- package/dist/core/encryption/Manager.js.map +1 -0
- package/dist/core/encryption/ZipCrypto.d.ts +172 -0
- package/dist/core/encryption/ZipCrypto.d.ts.map +1 -0
- package/dist/core/encryption/ZipCrypto.js +554 -0
- package/dist/core/encryption/ZipCrypto.js.map +1 -0
- package/dist/core/encryption/index.d.ts +9 -0
- package/dist/core/encryption/index.d.ts.map +1 -0
- package/dist/core/encryption/index.js +17 -0
- package/dist/core/encryption/index.js.map +1 -0
- package/dist/core/encryption/types.d.ts +29 -0
- package/dist/core/encryption/types.d.ts.map +1 -0
- package/dist/core/encryption/types.js +12 -0
- package/dist/core/encryption/types.js.map +1 -0
- package/dist/core/index.d.ts +27 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +59 -0
- package/dist/core/index.js.map +1 -0
- package/dist/core/version.d.ts +5 -0
- package/dist/core/version.d.ts.map +1 -0
- package/dist/core/version.js +31 -0
- package/dist/core/version.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +38 -0
- package/dist/index.js.map +1 -0
- package/dist/node/ZipCompressNode.d.ts +123 -0
- package/dist/node/ZipCompressNode.d.ts.map +1 -0
- package/dist/node/ZipCompressNode.js +565 -0
- package/dist/node/ZipCompressNode.js.map +1 -0
- package/dist/node/ZipCopyNode.d.ts +165 -0
- package/dist/node/ZipCopyNode.d.ts.map +1 -0
- package/dist/node/ZipCopyNode.js +347 -0
- package/dist/node/ZipCopyNode.js.map +1 -0
- package/dist/node/ZipDecompressNode.d.ts +197 -0
- package/dist/node/ZipDecompressNode.d.ts.map +1 -0
- package/dist/node/ZipDecompressNode.js +678 -0
- package/dist/node/ZipDecompressNode.js.map +1 -0
- package/dist/node/ZipkitNode.d.ts +466 -0
- package/dist/node/ZipkitNode.d.ts.map +1 -0
- package/dist/node/ZipkitNode.js +1426 -0
- package/dist/node/ZipkitNode.js.map +1 -0
- package/dist/node/index.d.ts +25 -0
- package/dist/node/index.d.ts.map +1 -0
- package/dist/node/index.js +54 -0
- package/dist/node/index.js.map +1 -0
- package/dist/types/index.d.ts +45 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +11 -0
- package/dist/types/index.js.map +1 -0
- package/examples/README.md +261 -0
- package/examples/append-data.json +44 -0
- package/examples/copy-zip-append.ts +139 -0
- package/examples/copy-zip.ts +152 -0
- package/examples/create-zip.ts +172 -0
- package/examples/extract-zip.ts +118 -0
- package/examples/list-zip.ts +161 -0
- package/examples/test-files/data.json +116 -0
- package/examples/test-files/document.md +80 -0
- package/examples/test-files/document.txt +6 -0
- package/examples/test-files/file1.txt +48 -0
- package/examples/test-files/file2.txt +80 -0
- package/examples/tsconfig.json +44 -0
- package/package.json +167 -0
- package/src/browser/ZipkitBrowser.ts +305 -0
- package/src/browser/index.esm.ts +32 -0
- package/src/browser/index.ts +19 -0
- package/src/core/ZipCompress.ts +370 -0
- package/src/core/ZipCopy.ts +434 -0
- package/src/core/ZipDecompress.ts +191 -0
- package/src/core/ZipEntry.ts +917 -0
- package/src/core/Zipkit.ts +794 -0
- package/src/core/ZstdManager.ts +165 -0
- package/src/core/components/HashCalculator.ts +384 -0
- package/src/core/components/Logger.ts +180 -0
- package/src/core/components/ProgressTracker.ts +134 -0
- package/src/core/components/Support.ts +77 -0
- package/src/core/components/Util.ts +91 -0
- package/src/core/constants/Errors.ts +78 -0
- package/src/core/constants/Headers.ts +205 -0
- package/src/core/encryption/Manager.ts +137 -0
- package/src/core/encryption/ZipCrypto.ts +650 -0
- package/src/core/encryption/index.ts +15 -0
- package/src/core/encryption/types.ts +33 -0
- package/src/core/index.ts +42 -0
- package/src/core/version.ts +33 -0
- package/src/index.ts +19 -0
- package/src/node/ZipCompressNode.ts +618 -0
- package/src/node/ZipCopyNode.ts +437 -0
- package/src/node/ZipDecompressNode.ts +793 -0
- package/src/node/ZipkitNode.ts +1706 -0
- package/src/node/index.ts +40 -0
- package/src/types/index.ts +68 -0
- package/src/types/modules.d.ts +22 -0
- package/src/types/opentimestamps.d.ts +1 -0
|
@@ -0,0 +1,1706 @@
|
|
|
1
|
+
// ======================================
|
|
2
|
+
// ZipkitNode.ts - Node.js File-Based ZIP Operations
|
|
3
|
+
// Copyright (c) 2025 NeoWare, Inc. All rights reserved.
|
|
4
|
+
// ======================================
|
|
5
|
+
|
|
6
|
+
import Zipkit, { CompressOptions, StreamingFileHandle } from '../core';
|
|
7
|
+
import ZipEntry from '../core/ZipEntry';
|
|
8
|
+
import Errors from '../core/constants/Errors';
|
|
9
|
+
import { ZipCompressNode } from './ZipCompressNode';
|
|
10
|
+
import { ZipDecompressNode } from './ZipDecompressNode';
|
|
11
|
+
import {
|
|
12
|
+
CENTRAL_END,
|
|
13
|
+
CENTRAL_DIR,
|
|
14
|
+
ZIP64_CENTRAL_END,
|
|
15
|
+
ZIP64_CENTRAL_DIR,
|
|
16
|
+
LOCAL_HDR,
|
|
17
|
+
GP_FLAG,
|
|
18
|
+
ENCRYPT_HDR_SIZE
|
|
19
|
+
} from '../core/constants/Headers';
|
|
20
|
+
import * as fs from 'fs';
|
|
21
|
+
import * as path from 'path';
|
|
22
|
+
import { minimatch } from 'minimatch';
|
|
23
|
+
|
|
24
|
+
// Re-export everything from core Zipkit
|
|
25
|
+
export * from '../core';
|
|
26
|
+
export { ZipEntry, Errors };
|
|
27
|
+
|
|
28
|
+
// ============================================================================
|
|
29
|
+
// ZIP File Writer Interface
|
|
30
|
+
// ============================================================================
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Interface for ZIP file writing operations
|
|
34
|
+
* Tracks file descriptor, stream, current position, and entry positions
|
|
35
|
+
*/
|
|
36
|
+
export interface ZipFileWriter {
|
|
37
|
+
outputFd: number;
|
|
38
|
+
outputStream: fs.WriteStream;
|
|
39
|
+
currentPosition: number;
|
|
40
|
+
entryPositions: Map<string, number>; // filename -> position
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// ======================================
|
|
44
|
+
// ZipkitNode
|
|
45
|
+
// ======================================
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* ZipkitNode - Node.js file-based ZIP operations
|
|
49
|
+
*
|
|
50
|
+
* Extends Zipkit to provide file I/O operations for Node.js environments.
|
|
51
|
+
* Similar to ZipkitBrowser which provides Blob operations for browser environments.
|
|
52
|
+
*
|
|
53
|
+
* @example
|
|
54
|
+
* ```typescript
|
|
55
|
+
* const zip = new ZipkitNode();
|
|
56
|
+
* await zip.loadZipFile('archive.zip');
|
|
57
|
+
* await zip.extractToFile(entry, './output/file.txt');
|
|
58
|
+
* ```
|
|
59
|
+
*/
|
|
60
|
+
export default class ZipkitNode extends Zipkit {
|
|
61
|
+
// Override _zipkitCmp to use ZipCompressNode instead of ZipCompress (lazy-loaded)
|
|
62
|
+
private _zipkitCmpNode: ZipCompressNode | null = null;
|
|
63
|
+
// Override _zipkitDeCmp to use ZipDecompressNode instead of ZipDecompress (lazy-loaded)
|
|
64
|
+
private _zipkitDeCmpNode: ZipDecompressNode | null = null;
|
|
65
|
+
|
|
66
|
+
// File-based ZIP loading properties (merged from ZipLoadEntriesServer)
|
|
67
|
+
private fileHandle: StreamingFileHandle | null = null;
|
|
68
|
+
private filePath: string | null = null;
|
|
69
|
+
private fileSize: number = 0;
|
|
70
|
+
// Note: centralDirSize and centralDirOffset are inherited from Zipkit base class
|
|
71
|
+
|
|
72
|
+
constructor(config?: { bufferSize?: number; debug?: boolean }) {
|
|
73
|
+
super(config);
|
|
74
|
+
|
|
75
|
+
// Note: ZipCompressNode and ZipDecompressNode are lazy-loaded when first accessed
|
|
76
|
+
// They will override the base class _zipkitCmp and _zipkitDeCmp on first access
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Lazy-load ZipCompressNode instance and override base class _zipkitCmp
|
|
81
|
+
* @returns ZipCompressNode instance (created on first access)
|
|
82
|
+
*/
|
|
83
|
+
private getZipCompressNode(): ZipCompressNode {
|
|
84
|
+
if (!this._zipkitCmpNode) {
|
|
85
|
+
this._zipkitCmpNode = new ZipCompressNode(this);
|
|
86
|
+
// Override the base class _zipkitCmp with ZipCompressNode
|
|
87
|
+
const zipkit = this as any;
|
|
88
|
+
zipkit._zipkitCmp = this._zipkitCmpNode;
|
|
89
|
+
}
|
|
90
|
+
return this._zipkitCmpNode;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Lazy-load ZipDecompressNode instance and override base class _zipkitDeCmp
|
|
95
|
+
* @returns ZipDecompressNode instance (created on first access)
|
|
96
|
+
*/
|
|
97
|
+
private getZipDecompressNode(): ZipDecompressNode {
|
|
98
|
+
if (!this._zipkitDeCmpNode) {
|
|
99
|
+
this._zipkitDeCmpNode = new ZipDecompressNode(this);
|
|
100
|
+
// Override the base class _zipkitDeCmp with ZipDecompressNode
|
|
101
|
+
const zipkit = this as any;
|
|
102
|
+
zipkit._zipkitDeCmp = this._zipkitDeCmpNode;
|
|
103
|
+
}
|
|
104
|
+
return this._zipkitDeCmpNode;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// ============================================================================
|
|
108
|
+
// File Loading Methods
|
|
109
|
+
// ============================================================================
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Load ZIP file from file path (streaming mode)
|
|
113
|
+
*
|
|
114
|
+
* **Required**: You must call this method before calling `getDirectory()` or any other ZIP operations.
|
|
115
|
+
* This method:
|
|
116
|
+
* 1. Resets all ZIP data
|
|
117
|
+
* 2. Opens the file handle
|
|
118
|
+
* 3. Loads EOCD and parses central directory
|
|
119
|
+
* 4. Populates this.zipEntries[] array
|
|
120
|
+
*
|
|
121
|
+
* @param filePath - Path to the ZIP file to load
|
|
122
|
+
* @returns Promise<ZipEntry[]> Array of all entries in the ZIP file
|
|
123
|
+
* @throws Error if Node.js environment not available
|
|
124
|
+
*/
|
|
125
|
+
async loadZipFile(filePath: string): Promise<ZipEntry[]> {
|
|
126
|
+
// Access private members via type assertion (ZipkitServer extends Zipkit)
|
|
127
|
+
const zipkit = this as any;
|
|
128
|
+
zipkit.resetZipData();
|
|
129
|
+
|
|
130
|
+
// Reset file-based data
|
|
131
|
+
this.resetFileData();
|
|
132
|
+
this.filePath = filePath;
|
|
133
|
+
|
|
134
|
+
// Open file handle
|
|
135
|
+
this.fileHandle = await this.openFileHandle(filePath);
|
|
136
|
+
const stats = await this.fileHandle.stat();
|
|
137
|
+
this.fileSize = stats.size;
|
|
138
|
+
|
|
139
|
+
// Load EOCD to get central directory info (sets zipComment internally)
|
|
140
|
+
await this.loadEOCD();
|
|
141
|
+
|
|
142
|
+
// Load central directory in chunks
|
|
143
|
+
const entries: ZipEntry[] = [];
|
|
144
|
+
let offset = zipkit.centralDirOffset;
|
|
145
|
+
let remaining = zipkit.centralDirSize;
|
|
146
|
+
const bufferSize = this.getBufferSize();
|
|
147
|
+
|
|
148
|
+
while (remaining > 0) {
|
|
149
|
+
const currentBufferSize = Math.min(bufferSize, remaining);
|
|
150
|
+
const chunk = Buffer.alloc(currentBufferSize);
|
|
151
|
+
await this.fileHandle.read(chunk, 0, currentBufferSize, offset);
|
|
152
|
+
|
|
153
|
+
// Parse entries from chunk
|
|
154
|
+
let chunkOffset = 0;
|
|
155
|
+
while (chunkOffset < chunk.length) {
|
|
156
|
+
if (chunk.readUInt32LE(chunkOffset) !== CENTRAL_DIR.SIGNATURE) {
|
|
157
|
+
break; // End of central directory
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Parse central directory entry
|
|
161
|
+
const entry = new ZipEntry(null, null, false);
|
|
162
|
+
const entryData = chunk.subarray(chunkOffset);
|
|
163
|
+
const remainingData = entry.readZipEntry(entryData);
|
|
164
|
+
|
|
165
|
+
entries.push(entry);
|
|
166
|
+
|
|
167
|
+
// Move to next entry
|
|
168
|
+
chunkOffset += (entryData.length - remainingData.length);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
offset += currentBufferSize;
|
|
172
|
+
remaining -= currentBufferSize;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Store entries in zipEntries[] array (single source of truth)
|
|
176
|
+
this.zipEntries = entries;
|
|
177
|
+
return entries;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Alias for loadZipFile() for consistency
|
|
183
|
+
* @param filePath - Path to the ZIP file to load
|
|
184
|
+
* @returns Promise<ZipEntry[]> Array of all entries in the ZIP file
|
|
185
|
+
*/
|
|
186
|
+
async loadZipFromFile(filePath: string): Promise<ZipEntry[]> {
|
|
187
|
+
return this.loadZipFile(filePath);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// ============================================================================
|
|
191
|
+
// File Extraction Methods
|
|
192
|
+
// ============================================================================
|
|
193
|
+
|
|
194
|
+
/**
|
|
195
|
+
* Extract file directly to disk with true streaming (no memory buffering)
|
|
196
|
+
* Wrapper for ZipDecompress.extractToFile()
|
|
197
|
+
*
|
|
198
|
+
* Note: ZSTD codec is lazily initialized on first use (module-level singleton).
|
|
199
|
+
* Initialization happens automatically when needed.
|
|
200
|
+
*
|
|
201
|
+
* @param entry - ZIP entry to extract
|
|
202
|
+
* @param outputPath - Path where the file should be written
|
|
203
|
+
* @param options - Optional extraction options:
|
|
204
|
+
* - skipHashCheck: Skip hash verification (default: false)
|
|
205
|
+
* - onProgress: Callback function receiving bytes extracted as parameter
|
|
206
|
+
* @returns Promise that resolves when extraction is complete
|
|
207
|
+
* @throws Error if not a File-based ZIP
|
|
208
|
+
*/
|
|
209
|
+
async extractToFile(
|
|
210
|
+
entry: ZipEntry,
|
|
211
|
+
outputPath: string,
|
|
212
|
+
options?: {
|
|
213
|
+
skipHashCheck?: boolean;
|
|
214
|
+
onProgress?: (bytes: number) => void;
|
|
215
|
+
}
|
|
216
|
+
): Promise<void> {
|
|
217
|
+
return this.getZipDecompressNode().extractToFile(entry, outputPath, options);
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Alias for extractToFile() for consistency
|
|
222
|
+
* @param entry - ZIP entry to extract
|
|
223
|
+
* @param outputPath - Path where the file should be written
|
|
224
|
+
* @param options - Optional extraction options
|
|
225
|
+
* @returns Promise that resolves when extraction is complete
|
|
226
|
+
*/
|
|
227
|
+
async extractEntryToFile(
|
|
228
|
+
entry: ZipEntry,
|
|
229
|
+
outputPath: string,
|
|
230
|
+
options?: {
|
|
231
|
+
skipHashCheck?: boolean;
|
|
232
|
+
onProgress?: (bytes: number) => void;
|
|
233
|
+
}
|
|
234
|
+
): Promise<void> {
|
|
235
|
+
return this.extractToFile(entry, outputPath, options);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Extract file to Buffer (in-memory) for file-based ZIP
|
|
240
|
+
*
|
|
241
|
+
* This method extracts a ZIP entry directly to a Buffer without writing to disk.
|
|
242
|
+
* This is ideal for reading metadata files (like NZIP.TOKEN) that don't need
|
|
243
|
+
* to be written to temporary files.
|
|
244
|
+
*
|
|
245
|
+
* @param entry - ZIP entry to extract
|
|
246
|
+
* @param options - Optional extraction options:
|
|
247
|
+
* - skipHashCheck: Skip hash verification (default: false)
|
|
248
|
+
* - onProgress: Callback function receiving bytes extracted as parameter
|
|
249
|
+
* @returns Promise that resolves to Buffer containing the extracted file data
|
|
250
|
+
* @throws Error if not a File-based ZIP or if extraction fails
|
|
251
|
+
*/
|
|
252
|
+
async extractToBuffer(
|
|
253
|
+
entry: ZipEntry,
|
|
254
|
+
options?: {
|
|
255
|
+
skipHashCheck?: boolean;
|
|
256
|
+
onProgress?: (bytes: number) => void;
|
|
257
|
+
}
|
|
258
|
+
): Promise<Buffer> {
|
|
259
|
+
return this.getZipDecompressNode().extractToBuffer(entry, options);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Get comprehensive archive statistics
|
|
264
|
+
*
|
|
265
|
+
* Calculates statistics about the loaded ZIP archive including file counts,
|
|
266
|
+
* sizes, compression ratios, and file system metadata.
|
|
267
|
+
*
|
|
268
|
+
* @param archivePath - Optional path to archive file (if not already loaded)
|
|
269
|
+
* @returns Promise that resolves to ArchiveStatistics object
|
|
270
|
+
* @throws Error if archive is not loaded and archivePath is not provided
|
|
271
|
+
*
|
|
272
|
+
* @example
|
|
273
|
+
* ```typescript
|
|
274
|
+
* const zipkit = new ZipkitNode();
|
|
275
|
+
* await zipkit.loadZipFile('archive.zip');
|
|
276
|
+
* const stats = await zipkit.getArchiveStatistics();
|
|
277
|
+
* console.log(`Total files: ${stats.totalFiles}`);
|
|
278
|
+
* console.log(`Compression ratio: ${stats.compressionRatio.toFixed(2)}%`);
|
|
279
|
+
* ```
|
|
280
|
+
*/
|
|
281
|
+
async getArchiveStatistics(archivePath?: string): Promise<import('../types').ArchiveStatistics> {
|
|
282
|
+
// Load archive if path provided and not already loaded
|
|
283
|
+
if (archivePath && !this.filePath) {
|
|
284
|
+
await this.loadZipFile(archivePath);
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
if (!this.filePath) {
|
|
288
|
+
throw new Error('Archive not loaded. Call loadZipFile() first or provide archivePath parameter.');
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
// Get file system stats
|
|
292
|
+
const stats = await fs.promises.stat(this.filePath);
|
|
293
|
+
|
|
294
|
+
// Get entries
|
|
295
|
+
const entries = this.getDirectory();
|
|
296
|
+
|
|
297
|
+
// Calculate statistics
|
|
298
|
+
const totalFiles = entries.filter((e) => !e.isDirectory).length;
|
|
299
|
+
const totalFolders = entries.filter((e) => e.isDirectory).length;
|
|
300
|
+
const uncompressedSize = entries.reduce((sum, e) => sum + e.uncompressedSize, 0);
|
|
301
|
+
const compressedSize = entries.reduce((sum, e) => sum + e.compressedSize, 0);
|
|
302
|
+
|
|
303
|
+
// Calculate compression ratios
|
|
304
|
+
const compressionRatio = uncompressedSize > 0
|
|
305
|
+
? ((1 - compressedSize / uncompressedSize) * 100)
|
|
306
|
+
: 0;
|
|
307
|
+
|
|
308
|
+
// Calculate average compression ratio per file
|
|
309
|
+
const averageCompressionRatio = totalFiles > 0
|
|
310
|
+
? entries
|
|
311
|
+
.filter((e) => !e.isDirectory && e.uncompressedSize > 0)
|
|
312
|
+
.reduce((sum, e) => {
|
|
313
|
+
const fileRatio = (1 - e.compressedSize / e.uncompressedSize) * 100;
|
|
314
|
+
return sum + fileRatio;
|
|
315
|
+
}, 0) / totalFiles
|
|
316
|
+
: 0;
|
|
317
|
+
|
|
318
|
+
return {
|
|
319
|
+
fileSize: stats.size,
|
|
320
|
+
created: stats.birthtime,
|
|
321
|
+
modified: stats.mtime,
|
|
322
|
+
totalFiles,
|
|
323
|
+
totalFolders,
|
|
324
|
+
uncompressedSize,
|
|
325
|
+
compressedSize,
|
|
326
|
+
compressionRatio,
|
|
327
|
+
averageCompressionRatio
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
/**
|
|
332
|
+
* Test entry integrity without extracting to disk
|
|
333
|
+
* Validates CRC-32 or SHA-256 hash without writing decompressed data
|
|
334
|
+
*
|
|
335
|
+
* This method processes chunks as they are decompressed and validates them,
|
|
336
|
+
* but discards the decompressed data instead of writing to disk. This is useful
|
|
337
|
+
* for verifying ZIP file integrity without extracting files.
|
|
338
|
+
*
|
|
339
|
+
* @param entry - ZIP entry to test
|
|
340
|
+
* @param options - Optional test options:
|
|
341
|
+
* - skipHashCheck: Skip hash verification (default: false)
|
|
342
|
+
* - onProgress: Callback function receiving bytes processed as parameter
|
|
343
|
+
* @returns Promise that resolves to an object containing the verified hash (if SHA-256) or undefined
|
|
344
|
+
* @throws Error if validation fails (INVALID_CRC or INVALID_SHA256) or if not a File-based ZIP
|
|
345
|
+
*/
|
|
346
|
+
async testEntry(
|
|
347
|
+
entry: ZipEntry,
|
|
348
|
+
options?: {
|
|
349
|
+
skipHashCheck?: boolean;
|
|
350
|
+
onProgress?: (bytes: number) => void;
|
|
351
|
+
}
|
|
352
|
+
): Promise<{ verifiedHash?: string }> {
|
|
353
|
+
return this.getZipDecompressNode().testEntry(entry, options);
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
// ============================================================================
|
|
357
|
+
// File-Based Compression Methods (ZipCompressNode wrappers)
|
|
358
|
+
// ============================================================================
|
|
359
|
+
|
|
360
|
+
/**
|
|
361
|
+
* Compress data for a ZIP entry (Buffer-based)
|
|
362
|
+
* Override to use ZipCompressNode instead of ZipCompress
|
|
363
|
+
*
|
|
364
|
+
* @param entry - ZIP entry to compress
|
|
365
|
+
* @param data - Buffer containing data to compress
|
|
366
|
+
* @param options - Compression options
|
|
367
|
+
* @param onOutputBuffer - Optional callback for streaming output
|
|
368
|
+
* @returns Promise resolving to Buffer containing compressed data
|
|
369
|
+
*/
|
|
370
|
+
async compressData(
|
|
371
|
+
entry: ZipEntry,
|
|
372
|
+
data: Buffer,
|
|
373
|
+
options?: CompressOptions,
|
|
374
|
+
onOutputBuffer?: (data: Buffer) => Promise<void>
|
|
375
|
+
): Promise<Buffer> {
|
|
376
|
+
return this.getZipCompressNode().compressData(entry, data, options, onOutputBuffer);
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
/**
|
|
380
|
+
* Compress a file from disk
|
|
381
|
+
* Wrapper for ZipCompressNode.compressFile()
|
|
382
|
+
*
|
|
383
|
+
* @param filePath - Path to the file to compress
|
|
384
|
+
* @param entry - ZIP entry to compress (filename should already be set)
|
|
385
|
+
* @param options - Optional compression options
|
|
386
|
+
* @returns Promise resolving to Buffer containing compressed data
|
|
387
|
+
*/
|
|
388
|
+
async compressFile(
|
|
389
|
+
filePath: string,
|
|
390
|
+
entry: ZipEntry,
|
|
391
|
+
options?: CompressOptions
|
|
392
|
+
): Promise<Buffer> {
|
|
393
|
+
return this.getZipCompressNode().compressFile(filePath, entry, options);
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
/**
|
|
397
|
+
* Compress a file from disk using streaming for large files
|
|
398
|
+
* Wrapper for ZipCompressNode.compressFileStream()
|
|
399
|
+
*
|
|
400
|
+
* @param filePath - Path to the file to compress
|
|
401
|
+
* @param entry - ZIP entry to compress (filename should already be set)
|
|
402
|
+
* @param options - Optional compression options
|
|
403
|
+
* @param onOutputBuffer - Optional callback for streaming output
|
|
404
|
+
* @returns Promise resolving to Buffer containing compressed data
|
|
405
|
+
*/
|
|
406
|
+
async compressFileStream(
|
|
407
|
+
filePath: string,
|
|
408
|
+
entry: ZipEntry,
|
|
409
|
+
options?: CompressOptions,
|
|
410
|
+
onOutputBuffer?: (data: Buffer) => Promise<void>
|
|
411
|
+
): Promise<Buffer> {
|
|
412
|
+
return this.getZipCompressNode().compressFileStream(filePath, entry, options, onOutputBuffer);
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
/**
|
|
416
|
+
* Extract all entries from ZIP to a directory
|
|
417
|
+
*
|
|
418
|
+
* @param outputDir - Directory where files should be extracted
|
|
419
|
+
* @param options - Optional extraction options:
|
|
420
|
+
* - skipHashCheck: Skip hash verification (default: false)
|
|
421
|
+
* - onProgress: Callback function receiving (entry, bytes) as parameters
|
|
422
|
+
* - preservePaths: Preserve directory structure (default: true)
|
|
423
|
+
* @returns Promise that resolves when all extractions are complete
|
|
424
|
+
* @throws Error if not a File-based ZIP
|
|
425
|
+
*/
|
|
426
|
+
async extractAll(
|
|
427
|
+
outputDir: string,
|
|
428
|
+
options?: {
|
|
429
|
+
skipHashCheck?: boolean;
|
|
430
|
+
onProgress?: (entry: ZipEntry, bytes: number) => void;
|
|
431
|
+
preservePaths?: boolean;
|
|
432
|
+
}
|
|
433
|
+
): Promise<void> {
|
|
434
|
+
const entries = this.zipEntries;
|
|
435
|
+
const preservePaths = options?.preservePaths !== false;
|
|
436
|
+
|
|
437
|
+
// Ensure output directory exists
|
|
438
|
+
if (!fs.existsSync(outputDir)) {
|
|
439
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
for (const entry of entries) {
|
|
443
|
+
if (!entry.filename) continue;
|
|
444
|
+
|
|
445
|
+
// Determine output path
|
|
446
|
+
let outputPath: string;
|
|
447
|
+
if (preservePaths) {
|
|
448
|
+
// Preserve directory structure
|
|
449
|
+
outputPath = path.join(outputDir, entry.filename);
|
|
450
|
+
// Create parent directories if needed
|
|
451
|
+
const parentDir = path.dirname(outputPath);
|
|
452
|
+
if (!fs.existsSync(parentDir)) {
|
|
453
|
+
fs.mkdirSync(parentDir, { recursive: true });
|
|
454
|
+
}
|
|
455
|
+
} else {
|
|
456
|
+
// Extract to flat structure (filename only)
|
|
457
|
+
const filename = path.basename(entry.filename);
|
|
458
|
+
outputPath = path.join(outputDir, filename);
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
// Extract entry
|
|
462
|
+
await this.extractToFile(entry, outputPath, {
|
|
463
|
+
skipHashCheck: options?.skipHashCheck,
|
|
464
|
+
onProgress: options?.onProgress ? (bytes: number) => options.onProgress!(entry, bytes) : undefined
|
|
465
|
+
});
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
// ============================================================================
|
|
470
|
+
// ZIP File Creation Subfunctions
|
|
471
|
+
// ============================================================================
|
|
472
|
+
|
|
473
|
+
/**
|
|
474
|
+
* Initialize ZIP file for writing
|
|
475
|
+
* Creates output file with seek capability and returns writer object
|
|
476
|
+
*
|
|
477
|
+
* @param outputPath - Path where the ZIP file should be created
|
|
478
|
+
* @returns Promise resolving to ZipFileWriter object
|
|
479
|
+
*/
|
|
480
|
+
async initializeZipFile(outputPath: string): Promise<ZipFileWriter> {
|
|
481
|
+
// Ensure parent directory exists
|
|
482
|
+
const parentDir = path.dirname(outputPath);
|
|
483
|
+
if (parentDir && parentDir !== '.' && !fs.existsSync(parentDir)) {
|
|
484
|
+
fs.mkdirSync(parentDir, { recursive: true });
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
// Open file for writing with seek capability
|
|
488
|
+
const outputFd = fs.openSync(outputPath, 'w+');
|
|
489
|
+
const outputStream = fs.createWriteStream(outputPath);
|
|
490
|
+
|
|
491
|
+
return {
|
|
492
|
+
outputFd,
|
|
493
|
+
outputStream,
|
|
494
|
+
currentPosition: 0,
|
|
495
|
+
entryPositions: new Map<string, number>()
|
|
496
|
+
};
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
/**
|
|
500
|
+
* Prepare ZipEntry from file path
|
|
501
|
+
* Validates file exists and creates entry with metadata from file stats
|
|
502
|
+
*
|
|
503
|
+
* @param filePath - Path to the file
|
|
504
|
+
* @param entryName - Optional entry name (defaults to basename)
|
|
505
|
+
* @returns Promise resolving to ZipEntry ready for compression
|
|
506
|
+
*/
|
|
507
|
+
async prepareEntryFromFile(filePath: string, entryName?: string): Promise<ZipEntry> {
|
|
508
|
+
if (!fs.existsSync(filePath)) {
|
|
509
|
+
throw new Error(`File not found: ${filePath}`);
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
const stats = fs.statSync(filePath);
|
|
513
|
+
if (!stats.isFile()) {
|
|
514
|
+
throw new Error(`Path is not a file: ${filePath}`);
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
// Use provided entry name or default to basename
|
|
518
|
+
const name = entryName || path.basename(filePath);
|
|
519
|
+
const entry = this.createZipEntry(name);
|
|
520
|
+
|
|
521
|
+
// Set entry metadata from file stats
|
|
522
|
+
entry.uncompressedSize = stats.size;
|
|
523
|
+
entry.timeDateDOS = entry.setDateTime(stats.mtime);
|
|
524
|
+
entry.lastModTimeDate = entry.timeDateDOS;
|
|
525
|
+
|
|
526
|
+
return entry;
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
/**
|
|
530
|
+
* Write a ZIP entry to the file
|
|
531
|
+
* Handles sequential write: header (placeholder) → compress → data → update header
|
|
532
|
+
*
|
|
533
|
+
* @param writer - ZipFileWriter object
|
|
534
|
+
* @param entry - ZipEntry to write
|
|
535
|
+
* @param filePath - Path to source file
|
|
536
|
+
* @param options - Optional compression options
|
|
537
|
+
* @param callbacks - Optional callbacks for progress and hash calculation
|
|
538
|
+
* @returns Promise that resolves when entry is written
|
|
539
|
+
*/
|
|
540
|
+
async writeZipEntry(
|
|
541
|
+
writer: ZipFileWriter,
|
|
542
|
+
entry: ZipEntry,
|
|
543
|
+
filePath: string,
|
|
544
|
+
options?: CompressOptions,
|
|
545
|
+
callbacks?: {
|
|
546
|
+
onProgress?: (entry: ZipEntry, bytes: number) => void;
|
|
547
|
+
onHashCalculated?: (entry: ZipEntry, hash: Buffer) => void;
|
|
548
|
+
}
|
|
549
|
+
): Promise<void> {
|
|
550
|
+
// Set compression method based on options
|
|
551
|
+
const level = options?.level ?? 6;
|
|
552
|
+
if (level === 0) {
|
|
553
|
+
entry.cmpMethod = 0; // STORED
|
|
554
|
+
} else if (options?.useZstd !== false) {
|
|
555
|
+
entry.cmpMethod = 93; // ZSTD
|
|
556
|
+
} else {
|
|
557
|
+
entry.cmpMethod = 8; // DEFLATED
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
// Step 1: Create local header with placeholder compressed size (0)
|
|
561
|
+
entry.compressedSize = 0; // Placeholder - will be updated after compression
|
|
562
|
+
entry.localHdrOffset = writer.currentPosition;
|
|
563
|
+
const localHeader = entry.createLocalHdr();
|
|
564
|
+
|
|
565
|
+
// Step 2: Write local header to file
|
|
566
|
+
await new Promise<void>((resolve, reject) => {
|
|
567
|
+
writer.outputStream.write(localHeader, (error) => {
|
|
568
|
+
if (error) {
|
|
569
|
+
reject(error);
|
|
570
|
+
} else {
|
|
571
|
+
writer.currentPosition += localHeader.length;
|
|
572
|
+
writer.entryPositions.set(entry.filename || '', entry.localHdrOffset);
|
|
573
|
+
resolve();
|
|
574
|
+
}
|
|
575
|
+
});
|
|
576
|
+
});
|
|
577
|
+
|
|
578
|
+
// Step 3: Compress file and write data
|
|
579
|
+
const bufferSize = options?.bufferSize || this.getBufferSize();
|
|
580
|
+
const useZstd = options?.useZstd !== false;
|
|
581
|
+
// Never use the chunked/streaming path when encrypting: the streaming path writes
|
|
582
|
+
// compressed data to the writer via onOutputBuffer BEFORE encryption can be applied.
|
|
583
|
+
// Encryption requires the full compressed buffer to create the 12-byte header and
|
|
584
|
+
// encrypt all data in one pass, so we must use the buffer path (compressFile).
|
|
585
|
+
const shouldUseChunked = !useZstd && !options?.password && entry.uncompressedSize && entry.uncompressedSize > bufferSize;
|
|
586
|
+
|
|
587
|
+
if (shouldUseChunked) {
|
|
588
|
+
// Use streaming compression for large files
|
|
589
|
+
// Data is written directly via onOutputBuffer callback
|
|
590
|
+
const onOutputBuffer = async (data: Buffer) => {
|
|
591
|
+
await new Promise<void>((resolve, reject) => {
|
|
592
|
+
writer.outputStream.write(data, (error) => {
|
|
593
|
+
if (error) {
|
|
594
|
+
reject(error);
|
|
595
|
+
} else {
|
|
596
|
+
writer.currentPosition += data.length;
|
|
597
|
+
if (callbacks?.onProgress) {
|
|
598
|
+
callbacks.onProgress(entry, data.length);
|
|
599
|
+
}
|
|
600
|
+
resolve();
|
|
601
|
+
}
|
|
602
|
+
});
|
|
603
|
+
});
|
|
604
|
+
};
|
|
605
|
+
|
|
606
|
+
// compressFileStream will set entry.compressedSize and entry.crc
|
|
607
|
+
await this.compressFileStream(filePath, entry, options, onOutputBuffer);
|
|
608
|
+
} else {
|
|
609
|
+
// Use regular buffer compression for small files
|
|
610
|
+
// compressFile will set entry.compressedSize and entry.crc
|
|
611
|
+
const compressedData = await this.compressFile(filePath, entry, options);
|
|
612
|
+
|
|
613
|
+
// Write compressed data to file
|
|
614
|
+
await new Promise<void>((resolve, reject) => {
|
|
615
|
+
writer.outputStream.write(compressedData, (error) => {
|
|
616
|
+
if (error) {
|
|
617
|
+
reject(error);
|
|
618
|
+
} else {
|
|
619
|
+
writer.currentPosition += compressedData.length;
|
|
620
|
+
if (callbacks?.onProgress) {
|
|
621
|
+
callbacks.onProgress(entry, compressedData.length);
|
|
622
|
+
}
|
|
623
|
+
resolve();
|
|
624
|
+
}
|
|
625
|
+
});
|
|
626
|
+
});
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
// Step 4: Update compressed size and CRC in local header
|
|
630
|
+
// entry.compressedSize and entry.crc are set by compression methods
|
|
631
|
+
if (entry.compressedSize === undefined) {
|
|
632
|
+
throw new Error(`Compressed size not set for entry: ${entry.filename}`);
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
const compressedSizeOffset = entry.localHdrOffset + 18;
|
|
636
|
+
const sizeBuffer = Buffer.alloc(4);
|
|
637
|
+
sizeBuffer.writeUInt32LE(entry.compressedSize, 0);
|
|
638
|
+
fs.writeSync(writer.outputFd, sizeBuffer, 0, 4, compressedSizeOffset);
|
|
639
|
+
|
|
640
|
+
if (entry.crc !== undefined) {
|
|
641
|
+
const crcOffset = entry.localHdrOffset + 14;
|
|
642
|
+
const crcBuffer = Buffer.alloc(4);
|
|
643
|
+
crcBuffer.writeUInt32LE(entry.crc, 0);
|
|
644
|
+
fs.writeSync(writer.outputFd, crcBuffer, 0, 4, crcOffset);
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
// Update bitFlags in local header if encryption was applied
|
|
648
|
+
// This is necessary because the local header is written before compression/encryption,
|
|
649
|
+
// but encryption flags are set during compression. We need to update the header afterward.
|
|
650
|
+
if (entry.isEncrypted || (entry.bitFlags & GP_FLAG.ENCRYPTED)) {
|
|
651
|
+
const bitFlagsOffset = entry.localHdrOffset + LOCAL_HDR.FLAGS;
|
|
652
|
+
const bitFlagsBuffer = Buffer.alloc(2);
|
|
653
|
+
bitFlagsBuffer.writeUInt16LE(entry.bitFlags >>> 0, 0);
|
|
654
|
+
fs.writeSync(writer.outputFd, bitFlagsBuffer, 0, 2, bitFlagsOffset);
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
// Call hash callback if provided
|
|
658
|
+
if (callbacks?.onHashCalculated && entry.sha256) {
|
|
659
|
+
const hashBuffer = Buffer.from(entry.sha256, 'hex');
|
|
660
|
+
callbacks.onHashCalculated(entry, hashBuffer);
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
/**
|
|
665
|
+
* Write central directory entries to ZIP file
|
|
666
|
+
*
|
|
667
|
+
* @param writer - ZipFileWriter object
|
|
668
|
+
* @param entries - Array of ZipEntry objects
|
|
669
|
+
* @param options - Optional options for archive comment and progress
|
|
670
|
+
* @returns Promise resolving to central directory size in bytes
|
|
671
|
+
*/
|
|
672
|
+
async writeCentralDirectory(
|
|
673
|
+
writer: ZipFileWriter,
|
|
674
|
+
entries: ZipEntry[],
|
|
675
|
+
options?: {
|
|
676
|
+
archiveComment?: string;
|
|
677
|
+
onProgress?: (entry: ZipEntry) => void;
|
|
678
|
+
}
|
|
679
|
+
): Promise<number> {
|
|
680
|
+
const centralDirStart = writer.currentPosition;
|
|
681
|
+
|
|
682
|
+
// Update entry local header offsets from tracked positions
|
|
683
|
+
for (const entry of entries) {
|
|
684
|
+
const actualPosition = writer.entryPositions.get(entry.filename || '');
|
|
685
|
+
if (actualPosition !== undefined) {
|
|
686
|
+
entry.localHdrOffset = actualPosition;
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
|
|
690
|
+
// Write central directory entries
|
|
691
|
+
for (const entry of entries) {
|
|
692
|
+
const centralDirEntry = entry.centralDirEntry();
|
|
693
|
+
|
|
694
|
+
await new Promise<void>((resolve, reject) => {
|
|
695
|
+
writer.outputStream.write(centralDirEntry, (error) => {
|
|
696
|
+
if (error) {
|
|
697
|
+
reject(error);
|
|
698
|
+
} else {
|
|
699
|
+
writer.currentPosition += centralDirEntry.length;
|
|
700
|
+
if (options?.onProgress) {
|
|
701
|
+
options.onProgress(entry);
|
|
702
|
+
}
|
|
703
|
+
resolve();
|
|
704
|
+
}
|
|
705
|
+
});
|
|
706
|
+
});
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
return writer.currentPosition - centralDirStart;
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
/**
|
|
713
|
+
* Write End of Central Directory record
|
|
714
|
+
*
|
|
715
|
+
* @param writer - ZipFileWriter object
|
|
716
|
+
* @param totalEntries - Total number of entries in ZIP
|
|
717
|
+
* @param centralDirSize - Size of central directory in bytes
|
|
718
|
+
* @param centralDirOffset - Offset to start of central directory
|
|
719
|
+
* @param archiveComment - Optional archive comment (max 65535 bytes)
|
|
720
|
+
* @returns Promise that resolves when EOCD is written
|
|
721
|
+
*/
|
|
722
|
+
async writeEndOfCentralDirectory(
|
|
723
|
+
writer: ZipFileWriter,
|
|
724
|
+
totalEntries: number,
|
|
725
|
+
centralDirSize: number,
|
|
726
|
+
centralDirOffset: number,
|
|
727
|
+
archiveComment?: string
|
|
728
|
+
): Promise<void> {
|
|
729
|
+
const comment = archiveComment || '';
|
|
730
|
+
const commentBytes = Buffer.from(comment, 'utf8');
|
|
731
|
+
const commentLength = Math.min(commentBytes.length, 0xFFFF); // Max 65535 bytes
|
|
732
|
+
|
|
733
|
+
const buffer = Buffer.alloc(22 + commentLength);
|
|
734
|
+
let offset = 0;
|
|
735
|
+
|
|
736
|
+
// End of central directory signature (4 bytes)
|
|
737
|
+
buffer.writeUInt32LE(0x06054b50, offset);
|
|
738
|
+
offset += 4;
|
|
739
|
+
|
|
740
|
+
// Number of this disk (2 bytes)
|
|
741
|
+
buffer.writeUInt16LE(0, offset);
|
|
742
|
+
offset += 2;
|
|
743
|
+
|
|
744
|
+
// Number of the disk with the start of the central directory (2 bytes)
|
|
745
|
+
buffer.writeUInt16LE(0, offset);
|
|
746
|
+
offset += 2;
|
|
747
|
+
|
|
748
|
+
// Total number of entries in the central directory on this disk (2 bytes)
|
|
749
|
+
buffer.writeUInt16LE(totalEntries, offset);
|
|
750
|
+
offset += 2;
|
|
751
|
+
|
|
752
|
+
// Total number of entries in the central directory (2 bytes)
|
|
753
|
+
buffer.writeUInt16LE(totalEntries, offset);
|
|
754
|
+
offset += 2;
|
|
755
|
+
|
|
756
|
+
// Size of the central directory (4 bytes)
|
|
757
|
+
buffer.writeUInt32LE(centralDirSize, offset);
|
|
758
|
+
offset += 4;
|
|
759
|
+
|
|
760
|
+
// Offset of start of central directory with respect to the starting disk number (4 bytes)
|
|
761
|
+
buffer.writeUInt32LE(centralDirOffset, offset);
|
|
762
|
+
offset += 4;
|
|
763
|
+
|
|
764
|
+
// ZIP file comment length (2 bytes)
|
|
765
|
+
buffer.writeUInt16LE(commentLength, offset);
|
|
766
|
+
offset += 2;
|
|
767
|
+
|
|
768
|
+
// ZIP file comment (variable length)
|
|
769
|
+
if (commentLength > 0) {
|
|
770
|
+
commentBytes.copy(buffer, offset, 0, commentLength);
|
|
771
|
+
}
|
|
772
|
+
|
|
773
|
+
// Write EOCD to file
|
|
774
|
+
await new Promise<void>((resolve, reject) => {
|
|
775
|
+
writer.outputStream.write(buffer, (error) => {
|
|
776
|
+
if (error) {
|
|
777
|
+
reject(error);
|
|
778
|
+
} else {
|
|
779
|
+
writer.currentPosition += buffer.length;
|
|
780
|
+
resolve();
|
|
781
|
+
}
|
|
782
|
+
});
|
|
783
|
+
});
|
|
784
|
+
}
|
|
785
|
+
|
|
786
|
+
/**
|
|
787
|
+
* Finalize ZIP file by closing handles
|
|
788
|
+
*
|
|
789
|
+
* @param writer - ZipFileWriter object
|
|
790
|
+
* @returns Promise that resolves when file is closed
|
|
791
|
+
*/
|
|
792
|
+
async finalizeZipFile(writer: ZipFileWriter): Promise<void> {
|
|
793
|
+
// Close file descriptor
|
|
794
|
+
fs.closeSync(writer.outputFd);
|
|
795
|
+
|
|
796
|
+
// Close write stream
|
|
797
|
+
return new Promise<void>((resolve, reject) => {
|
|
798
|
+
writer.outputStream.end((error: Error | null) => {
|
|
799
|
+
if (error) {
|
|
800
|
+
reject(error);
|
|
801
|
+
} else {
|
|
802
|
+
resolve();
|
|
803
|
+
}
|
|
804
|
+
});
|
|
805
|
+
});
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
// ============================================================================
|
|
809
|
+
// File Creation Methods
|
|
810
|
+
// ============================================================================
|
|
811
|
+
|
|
812
|
+
/**
|
|
813
|
+
* Create a ZIP file from multiple file paths
|
|
814
|
+
* Simple API that uses the modular subfunctions
|
|
815
|
+
*
|
|
816
|
+
* @param filePaths - Array of file paths to add to ZIP
|
|
817
|
+
* @param outputPath - Path where the ZIP file should be created
|
|
818
|
+
* @param options - Optional compression options
|
|
819
|
+
* @returns Promise that resolves when ZIP creation is complete
|
|
820
|
+
*/
|
|
821
|
+
async createZipFromFiles(
|
|
822
|
+
filePaths: string[],
|
|
823
|
+
outputPath: string,
|
|
824
|
+
options?: CompressOptions
|
|
825
|
+
): Promise<void> {
|
|
826
|
+
// Initialize ZIP file
|
|
827
|
+
const writer = await this.initializeZipFile(outputPath);
|
|
828
|
+
|
|
829
|
+
try {
|
|
830
|
+
// Process each file
|
|
831
|
+
for (const filePath of filePaths) {
|
|
832
|
+
// Validate and create entry
|
|
833
|
+
const entry = await this.prepareEntryFromFile(filePath);
|
|
834
|
+
|
|
835
|
+
// Write entry to ZIP
|
|
836
|
+
await this.writeZipEntry(writer, entry, filePath, options);
|
|
837
|
+
}
|
|
838
|
+
|
|
839
|
+
// Write central directory
|
|
840
|
+
const entries = this.getDirectory();
|
|
841
|
+
const centralDirOffset = writer.currentPosition;
|
|
842
|
+
const centralDirSize = await this.writeCentralDirectory(writer, entries);
|
|
843
|
+
|
|
844
|
+
// Write EOCD
|
|
845
|
+
await this.writeEndOfCentralDirectory(
|
|
846
|
+
writer,
|
|
847
|
+
entries.length,
|
|
848
|
+
centralDirSize,
|
|
849
|
+
centralDirOffset
|
|
850
|
+
);
|
|
851
|
+
} finally {
|
|
852
|
+
await this.finalizeZipFile(writer);
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
|
|
856
|
+
/**
|
|
857
|
+
* Add a file to the current ZIP
|
|
858
|
+
*
|
|
859
|
+
* @param filePath - Path to the file to add
|
|
860
|
+
* @param entryName - Name to use in ZIP (defaults to filename)
|
|
861
|
+
* @param options - Optional compression options
|
|
862
|
+
* @returns Promise resolving to the created ZipEntry
|
|
863
|
+
*/
|
|
864
|
+
async addFileToZip(
|
|
865
|
+
filePath: string,
|
|
866
|
+
entryName?: string,
|
|
867
|
+
options?: CompressOptions
|
|
868
|
+
): Promise<ZipEntry> {
|
|
869
|
+
// Use provided entry name or derive from file path
|
|
870
|
+
const name = entryName || path.basename(filePath);
|
|
871
|
+
const entry = this.createZipEntry(name);
|
|
872
|
+
|
|
873
|
+
// Use ZipCompressNode.compressFile() which handles file I/O and compression
|
|
874
|
+
await this.getZipCompressNode().compressFile(filePath, entry, options);
|
|
875
|
+
|
|
876
|
+
// Add to entries
|
|
877
|
+
this.zipEntries.push(entry);
|
|
878
|
+
|
|
879
|
+
return entry;
|
|
880
|
+
}
|
|
881
|
+
|
|
882
|
+
// ============================================================================
|
|
883
|
+
// File Management Methods
|
|
884
|
+
// ============================================================================
|
|
885
|
+
|
|
886
|
+
/**
|
|
887
|
+
* Get underlying file handle for advanced operations
|
|
888
|
+
*
|
|
889
|
+
* @returns StreamingFileHandle if file is loaded
|
|
890
|
+
* @throws Error if file handle not available
|
|
891
|
+
*/
|
|
892
|
+
getFileHandle(): StreamingFileHandle {
|
|
893
|
+
if (!this.fileHandle) {
|
|
894
|
+
throw new Error('File handle not available');
|
|
895
|
+
}
|
|
896
|
+
return this.fileHandle;
|
|
897
|
+
}
|
|
898
|
+
|
|
899
|
+
/**
|
|
900
|
+
* Close file handle explicitly
|
|
901
|
+
*
|
|
902
|
+
* @returns Promise that resolves when file is closed
|
|
903
|
+
*/
|
|
904
|
+
async closeFile(): Promise<void> {
|
|
905
|
+
if (this.fileHandle) {
|
|
906
|
+
await this.fileHandle.close();
|
|
907
|
+
this.fileHandle = null;
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
|
|
911
|
+
/**
|
|
912
|
+
* Copy entry from another ZIP (compatibility method)
|
|
913
|
+
* Reads the local header and compressed data from the file and returns it as a Buffer
|
|
914
|
+
* This is used when updating an existing ZIP file to copy unchanged entries
|
|
915
|
+
*
|
|
916
|
+
* @param entry - ZIP entry to copy
|
|
917
|
+
* @returns Promise resolving to Buffer containing local header + compressed data
|
|
918
|
+
* @throws Error if file handle not available
|
|
919
|
+
*/
|
|
920
|
+
async copyEntry(entry: ZipEntry): Promise<Buffer> {
|
|
921
|
+
if (!this.fileHandle) {
|
|
922
|
+
throw new Error('File handle not available');
|
|
923
|
+
}
|
|
924
|
+
|
|
925
|
+
// Read local file header (30 bytes)
|
|
926
|
+
const localHeaderBuffer = Buffer.alloc(LOCAL_HDR.SIZE);
|
|
927
|
+
await this.fileHandle.read(localHeaderBuffer, 0, LOCAL_HDR.SIZE, entry.localHdrOffset);
|
|
928
|
+
|
|
929
|
+
// Verify signature
|
|
930
|
+
if (localHeaderBuffer.readUInt32LE(0) !== LOCAL_HDR.SIGNATURE) {
|
|
931
|
+
throw new Error(Errors.INVALID_CEN);
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
// Extract header information
|
|
935
|
+
const filenameLength = localHeaderBuffer.readUInt16LE(LOCAL_HDR.FNAME_LEN);
|
|
936
|
+
const extraFieldLength = localHeaderBuffer.readUInt16LE(LOCAL_HDR.EXTRA_LEN);
|
|
937
|
+
const bitFlags = localHeaderBuffer.readUInt16LE(LOCAL_HDR.FLAGS);
|
|
938
|
+
|
|
939
|
+
// Check for encryption header
|
|
940
|
+
let encryptionHeaderLength = 0;
|
|
941
|
+
if (bitFlags & GP_FLAG.ENCRYPTED) {
|
|
942
|
+
encryptionHeaderLength = ENCRYPT_HDR_SIZE;
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
// Calculate sizes
|
|
946
|
+
const localHeaderSize = LOCAL_HDR.SIZE + filenameLength + extraFieldLength;
|
|
947
|
+
const totalLocalEntrySize = localHeaderSize + encryptionHeaderLength + entry.compressedSize;
|
|
948
|
+
|
|
949
|
+
// Read the entire local entry (header + filename + extra field + encryption header + compressed data)
|
|
950
|
+
const entryBuffer = Buffer.alloc(totalLocalEntrySize);
|
|
951
|
+
await this.fileHandle.read(entryBuffer, 0, totalLocalEntrySize, entry.localHdrOffset);
|
|
952
|
+
|
|
953
|
+
return entryBuffer;
|
|
954
|
+
}
|
|
955
|
+
|
|
956
|
+
// ============================================================================
|
|
957
|
+
// File Update Methods
|
|
958
|
+
// ============================================================================
|
|
959
|
+
|
|
960
|
+
/**
|
|
961
|
+
* Update existing ZIP file
|
|
962
|
+
*
|
|
963
|
+
* This is a placeholder for future implementation.
|
|
964
|
+
* Full implementation would require:
|
|
965
|
+
* - Reading existing ZIP structure
|
|
966
|
+
* - Identifying entries to update/add/remove
|
|
967
|
+
* - Writing updated ZIP file
|
|
968
|
+
*
|
|
969
|
+
* @param zipPath - Path to the ZIP file to update
|
|
970
|
+
* @param updates - Update operations (add, update, remove entries)
|
|
971
|
+
* @returns Promise that resolves when update is complete
|
|
972
|
+
*/
|
|
973
|
+
async updateZipFile(
|
|
974
|
+
zipPath: string,
|
|
975
|
+
updates: {
|
|
976
|
+
add?: Array<{ filePath: string; entryName?: string; options?: CompressOptions }>;
|
|
977
|
+
update?: Array<{ entryName: string; filePath: string; options?: CompressOptions }>;
|
|
978
|
+
remove?: string[]; // Entry names to remove
|
|
979
|
+
}
|
|
980
|
+
): Promise<void> {
|
|
981
|
+
// Placeholder for future implementation
|
|
982
|
+
// This would require significant ZIP file manipulation logic
|
|
983
|
+
throw new Error('updateZipFile() - Full implementation pending. Use neozip CLI for now.');
|
|
984
|
+
}
|
|
985
|
+
|
|
986
|
+
// ============================================================================
|
|
987
|
+
// File-based ZIP Loading Methods (merged from ZipLoadEntriesServer)
|
|
988
|
+
// ============================================================================
|
|
989
|
+
|
|
990
|
+
/**
|
|
991
|
+
* Open file handle for streaming mode
|
|
992
|
+
*/
|
|
993
|
+
private async openFileHandle(filePath: string): Promise<StreamingFileHandle> {
|
|
994
|
+
const handle = await fs.promises.open(filePath, 'r');
|
|
995
|
+
|
|
996
|
+
return {
|
|
997
|
+
async read(buffer: Buffer, offset: number, length: number, position: number): Promise<number> {
|
|
998
|
+
const result = await handle.read(buffer, offset, length, position);
|
|
999
|
+
return result.bytesRead;
|
|
1000
|
+
},
|
|
1001
|
+
async stat(): Promise<{ size: number }> {
|
|
1002
|
+
const stats = await handle.stat();
|
|
1003
|
+
return { size: stats.size };
|
|
1004
|
+
},
|
|
1005
|
+
async close(): Promise<void> {
|
|
1006
|
+
await handle.close();
|
|
1007
|
+
}
|
|
1008
|
+
};
|
|
1009
|
+
}
|
|
1010
|
+
|
|
1011
|
+
/**
|
|
1012
|
+
* Load End of Central Directory (EOCD) in streaming mode
|
|
1013
|
+
*/
|
|
1014
|
+
private async loadEOCD(): Promise<void> {
|
|
1015
|
+
if (!this.fileHandle) {
|
|
1016
|
+
throw new Error('File handle not available');
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
// Read potential EOCD area (last 65KB + 22 bytes)
|
|
1020
|
+
const searchSize = Math.min(0xFFFF + 22, this.fileSize);
|
|
1021
|
+
const searchStart = this.fileSize - searchSize;
|
|
1022
|
+
const buffer = Buffer.alloc(searchSize);
|
|
1023
|
+
|
|
1024
|
+
try {
|
|
1025
|
+
await this.fileHandle.read(buffer, 0, searchSize, searchStart);
|
|
1026
|
+
|
|
1027
|
+
// Find EOCD signature
|
|
1028
|
+
let eocdOffset = -1;
|
|
1029
|
+
for (let i = buffer.length - 22; i >= 0; i--) {
|
|
1030
|
+
if (buffer[i] === 0x50) { // Quick 'P' check
|
|
1031
|
+
if (buffer.readUInt32LE(i) === CENTRAL_END.SIGNATURE) {
|
|
1032
|
+
eocdOffset = searchStart + i;
|
|
1033
|
+
break;
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
|
|
1038
|
+
if (eocdOffset === -1) {
|
|
1039
|
+
throw new Error(Errors.INVALID_FORMAT);
|
|
1040
|
+
}
|
|
1041
|
+
|
|
1042
|
+
// Parse EOCD
|
|
1043
|
+
const eocdBuffer = Buffer.alloc(22);
|
|
1044
|
+
await this.fileHandle.read(eocdBuffer, 0, 22, eocdOffset);
|
|
1045
|
+
|
|
1046
|
+
if (eocdBuffer.readUInt32LE(0) === CENTRAL_END.SIGNATURE) {
|
|
1047
|
+
// Standard ZIP format
|
|
1048
|
+
const zipkit = this as any;
|
|
1049
|
+
zipkit.centralDirSize = eocdBuffer.readUInt32LE(CENTRAL_END.CENTRAL_DIR_SIZE);
|
|
1050
|
+
zipkit.centralDirOffset = eocdBuffer.readUInt32LE(CENTRAL_END.CENTRAL_DIR_OFFSET);
|
|
1051
|
+
|
|
1052
|
+
// Handle ZIP64
|
|
1053
|
+
if (zipkit.centralDirOffset === 0xFFFFFFFF) {
|
|
1054
|
+
await this.loadZIP64EOCD(eocdOffset);
|
|
1055
|
+
}
|
|
1056
|
+
} else {
|
|
1057
|
+
throw new Error(Errors.INVALID_FORMAT);
|
|
1058
|
+
}
|
|
1059
|
+
|
|
1060
|
+
// Load ZIP comment
|
|
1061
|
+
const commentLength = eocdBuffer.readUInt16LE(CENTRAL_END.ZIP_COMMENT_LEN);
|
|
1062
|
+
if (commentLength > 0) {
|
|
1063
|
+
const commentBuffer = Buffer.alloc(commentLength);
|
|
1064
|
+
await this.fileHandle.read(commentBuffer, 0, commentLength, eocdOffset + 22);
|
|
1065
|
+
const zipkitAny = this as any;
|
|
1066
|
+
zipkitAny.zipComment = commentBuffer.toString();
|
|
1067
|
+
}
|
|
1068
|
+
} finally {
|
|
1069
|
+
// Clean up search buffer to help GC (can be up to 65KB)
|
|
1070
|
+
// Note: Buffer will be GC'd when it goes out of scope, but explicit cleanup helps
|
|
1071
|
+
}
|
|
1072
|
+
}
|
|
1073
|
+
|
|
1074
|
+
/**
|
|
1075
|
+
* Load ZIP64 End of Central Directory
|
|
1076
|
+
*/
|
|
1077
|
+
private async loadZIP64EOCD(eocdOffset: number): Promise<void> {
|
|
1078
|
+
if (!this.fileHandle) {
|
|
1079
|
+
throw new Error('File handle not available');
|
|
1080
|
+
}
|
|
1081
|
+
|
|
1082
|
+
// Look for ZIP64 locator
|
|
1083
|
+
const locatorOffset = eocdOffset - 20;
|
|
1084
|
+
const locatorBuffer = Buffer.alloc(20);
|
|
1085
|
+
await this.fileHandle.read(locatorBuffer, 0, 20, locatorOffset);
|
|
1086
|
+
|
|
1087
|
+
if (locatorBuffer.readUInt32LE(0) === ZIP64_CENTRAL_END.SIGNATURE) {
|
|
1088
|
+
// Read ZIP64 EOCD
|
|
1089
|
+
const zip64Offset = locatorBuffer.readBigUInt64LE(8);
|
|
1090
|
+
const zip64Buffer = Buffer.alloc(56);
|
|
1091
|
+
await this.fileHandle.read(zip64Buffer, 0, 56, Number(zip64Offset));
|
|
1092
|
+
|
|
1093
|
+
const zipkit = this as any;
|
|
1094
|
+
zipkit.centralDirSize = Number(zip64Buffer.readBigUInt64LE(ZIP64_CENTRAL_DIR.CENTRAL_DIR_SIZE));
|
|
1095
|
+
zipkit.centralDirOffset = Number(zip64Buffer.readBigUInt64LE(ZIP64_CENTRAL_DIR.CENTRAL_DIR_OFFSET));
|
|
1096
|
+
}
|
|
1097
|
+
}
|
|
1098
|
+
|
|
1099
|
+
/**
|
|
1100
|
+
* Reset file-based ZIP data to initial state
|
|
1101
|
+
*/
|
|
1102
|
+
private resetFileData(): void {
|
|
1103
|
+
this.fileHandle = null;
|
|
1104
|
+
this.filePath = null;
|
|
1105
|
+
this.fileSize = 0;
|
|
1106
|
+
// Note: centralDirSize and centralDirOffset are reset in base class resetZipData()
|
|
1107
|
+
const zipkit = this as any;
|
|
1108
|
+
zipkit.centralDirSize = 0;
|
|
1109
|
+
zipkit.centralDirOffset = 0;
|
|
1110
|
+
}
|
|
1111
|
+
|
|
1112
|
+
// ============================================================================
|
|
1113
|
+
// ZIP File Extraction Subfunctions
|
|
1114
|
+
// ============================================================================
|
|
1115
|
+
|
|
1116
|
+
/**
|
|
1117
|
+
* Filter ZIP entries based on include/exclude patterns
|
|
1118
|
+
*
|
|
1119
|
+
* @param entries - Array of ZipEntry objects to filter
|
|
1120
|
+
* @param options - Optional filtering options
|
|
1121
|
+
* @returns Filtered array of ZipEntry objects
|
|
1122
|
+
*/
|
|
1123
|
+
filterEntries(
|
|
1124
|
+
entries: ZipEntry[],
|
|
1125
|
+
options?: {
|
|
1126
|
+
include?: string[];
|
|
1127
|
+
exclude?: string[];
|
|
1128
|
+
skipMetadata?: boolean; // Skip META-INF/* files (default: true)
|
|
1129
|
+
}
|
|
1130
|
+
): ZipEntry[] {
|
|
1131
|
+
const skipMetadata = options?.skipMetadata !== false;
|
|
1132
|
+
|
|
1133
|
+
return entries.filter(entry => {
|
|
1134
|
+
const filename = entry.filename || '';
|
|
1135
|
+
|
|
1136
|
+
// Skip metadata files if requested
|
|
1137
|
+
if (skipMetadata && (filename.startsWith('META-INF/') || filename === 'META-INF')) {
|
|
1138
|
+
return false;
|
|
1139
|
+
}
|
|
1140
|
+
|
|
1141
|
+
// Skip directories
|
|
1142
|
+
if (entry.isDirectory) {
|
|
1143
|
+
return false;
|
|
1144
|
+
}
|
|
1145
|
+
|
|
1146
|
+
// If no filtering patterns, include all
|
|
1147
|
+
if (!options?.include && !options?.exclude) {
|
|
1148
|
+
return true;
|
|
1149
|
+
}
|
|
1150
|
+
|
|
1151
|
+
const fileName = path.basename(filename);
|
|
1152
|
+
const relativePath = path.relative(process.cwd(), filename);
|
|
1153
|
+
|
|
1154
|
+
// Check include patterns first (if any)
|
|
1155
|
+
if (options.include && options.include.length > 0) {
|
|
1156
|
+
const matchesInclude = options.include.some(pattern =>
|
|
1157
|
+
minimatch(fileName, pattern) || minimatch(relativePath, pattern) || minimatch(filename, pattern)
|
|
1158
|
+
);
|
|
1159
|
+
if (!matchesInclude) {
|
|
1160
|
+
return false;
|
|
1161
|
+
}
|
|
1162
|
+
}
|
|
1163
|
+
|
|
1164
|
+
// Check exclude patterns
|
|
1165
|
+
if (options.exclude && options.exclude.length > 0) {
|
|
1166
|
+
const matchesExclude = options.exclude.some(pattern =>
|
|
1167
|
+
minimatch(fileName, pattern) || minimatch(relativePath, pattern) || minimatch(filename, pattern)
|
|
1168
|
+
);
|
|
1169
|
+
if (matchesExclude) {
|
|
1170
|
+
return false;
|
|
1171
|
+
}
|
|
1172
|
+
}
|
|
1173
|
+
|
|
1174
|
+
return true;
|
|
1175
|
+
});
|
|
1176
|
+
}
|
|
1177
|
+
|
|
1178
|
+
/**
|
|
1179
|
+
* Prepare extraction path for a ZIP entry
|
|
1180
|
+
*
|
|
1181
|
+
* @param entry - ZipEntry to extract
|
|
1182
|
+
* @param destination - Destination directory
|
|
1183
|
+
* @param options - Optional path options
|
|
1184
|
+
* @returns Absolute output path for the entry
|
|
1185
|
+
*/
|
|
1186
|
+
prepareExtractionPath(
|
|
1187
|
+
entry: ZipEntry,
|
|
1188
|
+
destination: string,
|
|
1189
|
+
options?: {
|
|
1190
|
+
junkPaths?: boolean; // Extract to flat structure (default: false)
|
|
1191
|
+
}
|
|
1192
|
+
): string {
|
|
1193
|
+
const filename = entry.filename || '';
|
|
1194
|
+
|
|
1195
|
+
// Determine output path
|
|
1196
|
+
let outputPath: string;
|
|
1197
|
+
if (options?.junkPaths) {
|
|
1198
|
+
// Extract to flat structure (filename only)
|
|
1199
|
+
outputPath = path.join(destination, path.basename(filename));
|
|
1200
|
+
} else {
|
|
1201
|
+
// Preserve directory structure
|
|
1202
|
+
outputPath = path.join(destination, filename);
|
|
1203
|
+
}
|
|
1204
|
+
|
|
1205
|
+
// Ensure parent directory exists
|
|
1206
|
+
const parentDir = path.dirname(outputPath);
|
|
1207
|
+
if (parentDir && parentDir !== '.' && !fs.existsSync(parentDir)) {
|
|
1208
|
+
fs.mkdirSync(parentDir, { recursive: true });
|
|
1209
|
+
}
|
|
1210
|
+
|
|
1211
|
+
return path.resolve(outputPath);
|
|
1212
|
+
}
|
|
1213
|
+
|
|
1214
|
+
/**
|
|
1215
|
+
* Extract timestamps from ZIP entry
|
|
1216
|
+
*
|
|
1217
|
+
* @param entry - ZipEntry to extract timestamps from
|
|
1218
|
+
* @returns Object with mtime, atime, ctime (Date objects or null)
|
|
1219
|
+
*/
|
|
1220
|
+
extractEntryTimestamps(entry: ZipEntry): { mtime: Date | null; atime: Date | null; ctime: Date | null } {
|
|
1221
|
+
let mtime: Date | null = null;
|
|
1222
|
+
let atime: Date | null = null;
|
|
1223
|
+
let ctime: Date | null = null;
|
|
1224
|
+
|
|
1225
|
+
// Try extended timestamps first (most accurate)
|
|
1226
|
+
if ((entry as any).ntfsTime) {
|
|
1227
|
+
const ntfs = (entry as any).ntfsTime;
|
|
1228
|
+
if (ntfs.mtime) mtime = new Date(ntfs.mtime);
|
|
1229
|
+
if (ntfs.atime) atime = new Date(ntfs.atime);
|
|
1230
|
+
if (ntfs.ctime) ctime = new Date(ntfs.ctime);
|
|
1231
|
+
} else if ((entry as any).extendedTime) {
|
|
1232
|
+
const ext = (entry as any).extendedTime;
|
|
1233
|
+
if (ext.mtime) mtime = new Date(ext.mtime);
|
|
1234
|
+
if (ext.atime) atime = new Date(ext.atime);
|
|
1235
|
+
if (ext.ctime) ctime = new Date(ext.ctime);
|
|
1236
|
+
}
|
|
1237
|
+
|
|
1238
|
+
// Fall back to standard DOS timestamps (ZIP stores packed 32-bit time+date, not Unix time)
|
|
1239
|
+
if (!mtime && (entry as any).parseDateTime) {
|
|
1240
|
+
const dosTimestamp = entry.lastModTimeDate || entry.timeDateDOS || 0;
|
|
1241
|
+
if (dosTimestamp) {
|
|
1242
|
+
mtime = (entry as any).parseDateTime(dosTimestamp);
|
|
1243
|
+
}
|
|
1244
|
+
}
|
|
1245
|
+
|
|
1246
|
+
return { mtime, atime, ctime };
|
|
1247
|
+
}
|
|
1248
|
+
|
|
1249
|
+
/**
|
|
1250
|
+
* Determine if an entry should be extracted based on overwrite logic
|
|
1251
|
+
*
|
|
1252
|
+
* @param entry - ZipEntry to check
|
|
1253
|
+
* @param outputPath - Path where file would be extracted
|
|
1254
|
+
* @param options - Optional overwrite options
|
|
1255
|
+
* @returns Promise resolving to decision object
|
|
1256
|
+
*/
|
|
1257
|
+
async shouldExtractEntry(
|
|
1258
|
+
entry: ZipEntry,
|
|
1259
|
+
outputPath: string,
|
|
1260
|
+
options?: {
|
|
1261
|
+
overwrite?: boolean; // Always overwrite (default: false)
|
|
1262
|
+
never?: boolean; // Never overwrite (default: false)
|
|
1263
|
+
freshenOnly?: boolean; // Only extract if newer (default: false)
|
|
1264
|
+
updateOnly?: boolean; // Only extract if newer or doesn't exist (default: false)
|
|
1265
|
+
onOverwritePrompt?: (filename: string) => Promise<'y' | 'n' | 'a' | 'q'>; // Interactive prompt callback
|
|
1266
|
+
}
|
|
1267
|
+
): Promise<{ shouldExtract: boolean; reason?: string }> {
|
|
1268
|
+
const fileExists = fs.existsSync(outputPath);
|
|
1269
|
+
|
|
1270
|
+
// If file doesn't exist, always extract (unless freshenOnly mode)
|
|
1271
|
+
if (!fileExists) {
|
|
1272
|
+
if (options?.freshenOnly) {
|
|
1273
|
+
return { shouldExtract: false, reason: 'not in destination' };
|
|
1274
|
+
}
|
|
1275
|
+
// For updateOnly or normal mode, extract new files
|
|
1276
|
+
return { shouldExtract: true };
|
|
1277
|
+
}
|
|
1278
|
+
|
|
1279
|
+
// File exists - apply overwrite logic
|
|
1280
|
+
if (options?.never) {
|
|
1281
|
+
return { shouldExtract: false, reason: 'never overwrite' };
|
|
1282
|
+
}
|
|
1283
|
+
|
|
1284
|
+
if (options?.overwrite) {
|
|
1285
|
+
return { shouldExtract: true };
|
|
1286
|
+
}
|
|
1287
|
+
|
|
1288
|
+
if (options?.freshenOnly || options?.updateOnly) {
|
|
1289
|
+
// Compare timestamps to determine if archive file is newer
|
|
1290
|
+
const existingStats = fs.statSync(outputPath);
|
|
1291
|
+
const timestamps = this.extractEntryTimestamps(entry);
|
|
1292
|
+
const archiveDate = timestamps.mtime || new Date(0);
|
|
1293
|
+
|
|
1294
|
+
if (archiveDate <= existingStats.mtime) {
|
|
1295
|
+
return { shouldExtract: false, reason: 'not newer' };
|
|
1296
|
+
}
|
|
1297
|
+
|
|
1298
|
+
// File in archive is newer, proceed with extraction
|
|
1299
|
+
return { shouldExtract: true };
|
|
1300
|
+
}
|
|
1301
|
+
|
|
1302
|
+
// Interactive mode - use callback if provided
|
|
1303
|
+
if (options?.onOverwritePrompt) {
|
|
1304
|
+
const response = await options.onOverwritePrompt(entry.filename || '');
|
|
1305
|
+
if (response === 'n') {
|
|
1306
|
+
return { shouldExtract: false, reason: 'user declined' };
|
|
1307
|
+
} else if (response === 'q') {
|
|
1308
|
+
return { shouldExtract: false, reason: 'user aborted' };
|
|
1309
|
+
} else if (response === 'y' || response === 'a') {
|
|
1310
|
+
return { shouldExtract: true };
|
|
1311
|
+
}
|
|
1312
|
+
}
|
|
1313
|
+
|
|
1314
|
+
// Default: skip if exists and no overwrite option
|
|
1315
|
+
return { shouldExtract: false, reason: 'file exists' };
|
|
1316
|
+
}
|
|
1317
|
+
|
|
1318
|
+
/**
|
|
1319
|
+
* Restore entry metadata (timestamps and permissions) to extracted file
|
|
1320
|
+
*
|
|
1321
|
+
* @param filePath - Path to the extracted file
|
|
1322
|
+
* @param entry - ZipEntry that was extracted
|
|
1323
|
+
* @param options - Optional metadata options
|
|
1324
|
+
*/
|
|
1325
|
+
restoreEntryMetadata(
|
|
1326
|
+
filePath: string,
|
|
1327
|
+
entry: ZipEntry,
|
|
1328
|
+
options?: {
|
|
1329
|
+
preserveTimestamps?: boolean; // Restore file timestamps (default: true)
|
|
1330
|
+
preservePermissions?: boolean; // Restore file permissions (default: false)
|
|
1331
|
+
}
|
|
1332
|
+
): void {
|
|
1333
|
+
const preserveTimestamps = options?.preserveTimestamps !== false;
|
|
1334
|
+
const preservePermissions = options?.preservePermissions === true;
|
|
1335
|
+
|
|
1336
|
+
// Restore timestamps
|
|
1337
|
+
if (preserveTimestamps) {
|
|
1338
|
+
try {
|
|
1339
|
+
const timestamps = this.extractEntryTimestamps(entry);
|
|
1340
|
+
|
|
1341
|
+
if (timestamps.mtime && timestamps.atime) {
|
|
1342
|
+
fs.utimesSync(filePath, timestamps.atime, timestamps.mtime);
|
|
1343
|
+
} else if (timestamps.mtime) {
|
|
1344
|
+
// If we only have modification time, use it for both
|
|
1345
|
+
fs.utimesSync(filePath, timestamps.mtime, timestamps.mtime);
|
|
1346
|
+
}
|
|
1347
|
+
} catch (error) {
|
|
1348
|
+
// Don't fail extraction if timestamp restoration fails
|
|
1349
|
+
// Some filesystems don't support timestamp modification
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
1352
|
+
|
|
1353
|
+
// Restore permissions (Unix only)
|
|
1354
|
+
if (preservePermissions && process.platform !== 'win32') {
|
|
1355
|
+
try {
|
|
1356
|
+
// Restore UID/GID if available
|
|
1357
|
+
if ((entry as any).uid !== null && (entry as any).uid !== undefined &&
|
|
1358
|
+
(entry as any).gid !== null && (entry as any).gid !== undefined) {
|
|
1359
|
+
// Only root can change ownership to different users
|
|
1360
|
+
if (process.getuid && process.getuid() === 0) {
|
|
1361
|
+
// Running as root - can change both UID and GID
|
|
1362
|
+
fs.chownSync(filePath, (entry as any).uid, (entry as any).gid);
|
|
1363
|
+
} else {
|
|
1364
|
+
// Not running as root - try to change group only if we're a member
|
|
1365
|
+
try {
|
|
1366
|
+
fs.chownSync(filePath, -1, (entry as any).gid); // -1 means don't change UID
|
|
1367
|
+
} catch (error) {
|
|
1368
|
+
// Ignore errors - insufficient privileges
|
|
1369
|
+
}
|
|
1370
|
+
}
|
|
1371
|
+
}
|
|
1372
|
+
|
|
1373
|
+
// Restore file mode if available
|
|
1374
|
+
if (entry.extFileAttr) {
|
|
1375
|
+
// Extract permission bits from external file attributes
|
|
1376
|
+
const permissions = (entry.extFileAttr >>> 16) & 0o777;
|
|
1377
|
+
if (permissions > 0) {
|
|
1378
|
+
fs.chmodSync(filePath, permissions);
|
|
1379
|
+
}
|
|
1380
|
+
}
|
|
1381
|
+
} catch (error) {
|
|
1382
|
+
// Don't fail extraction if permission restoration fails
|
|
1383
|
+
}
|
|
1384
|
+
}
|
|
1385
|
+
}
|
|
1386
|
+
|
|
1387
|
+
/**
|
|
1388
|
+
* Extract a single entry to a file path
|
|
1389
|
+
* Handles symlinks, hardlinks, timestamps, and permissions
|
|
1390
|
+
*
|
|
1391
|
+
* @param entry - ZipEntry to extract
|
|
1392
|
+
* @param outputPath - Path where file should be extracted
|
|
1393
|
+
* @param options - Optional extraction options
|
|
1394
|
+
* @returns Promise resolving to extraction result
|
|
1395
|
+
*/
|
|
1396
|
+
async extractEntryToPath(
|
|
1397
|
+
entry: ZipEntry,
|
|
1398
|
+
outputPath: string,
|
|
1399
|
+
options?: {
|
|
1400
|
+
skipHashCheck?: boolean; // Skip hash verification (default: false)
|
|
1401
|
+
preserveTimestamps?: boolean; // Restore file timestamps (default: true)
|
|
1402
|
+
preservePermissions?: boolean; // Restore file permissions (default: false)
|
|
1403
|
+
symlinks?: boolean; // Handle symbolic links (default: false)
|
|
1404
|
+
hardLinks?: boolean; // Handle hard links (default: false)
|
|
1405
|
+
onProgress?: (entry: ZipEntry, bytes: number) => void; // Progress callback
|
|
1406
|
+
}
|
|
1407
|
+
): Promise<{ success: boolean; bytesExtracted: number; error?: string }> {
|
|
1408
|
+
const filename = entry.filename || '';
|
|
1409
|
+
|
|
1410
|
+
try {
|
|
1411
|
+
// Check if entry is a symbolic link
|
|
1412
|
+
const isSymlink = (entry as any).isSymlink && (entry as any).linkTarget;
|
|
1413
|
+
const S_IFLNK = 0o120000;
|
|
1414
|
+
const fileType = entry.extFileAttr ? ((entry.extFileAttr >>> 16) & 0o170000) : 0;
|
|
1415
|
+
const isSymlinkByAttr = fileType === S_IFLNK;
|
|
1416
|
+
|
|
1417
|
+
if ((isSymlink || isSymlinkByAttr) && options?.symlinks) {
|
|
1418
|
+
// Handle symbolic link
|
|
1419
|
+
let linkTarget = (entry as any).linkTarget;
|
|
1420
|
+
|
|
1421
|
+
if (!linkTarget) {
|
|
1422
|
+
// Extract target from file content
|
|
1423
|
+
const bufferBased = !this.fileHandle;
|
|
1424
|
+
if (bufferBased) {
|
|
1425
|
+
const data = await this.extract(entry, options?.skipHashCheck);
|
|
1426
|
+
if (data) {
|
|
1427
|
+
linkTarget = data.toString('utf8');
|
|
1428
|
+
}
|
|
1429
|
+
} else {
|
|
1430
|
+
// For file-based, extract to temp file and read
|
|
1431
|
+
const tempPath = path.join(require('os').tmpdir(), `neozip-symlink-${Date.now()}-${process.pid}`);
|
|
1432
|
+
try {
|
|
1433
|
+
await this.extractToFile(entry, tempPath, {
|
|
1434
|
+
skipHashCheck: options?.skipHashCheck
|
|
1435
|
+
});
|
|
1436
|
+
const data = fs.readFileSync(tempPath, 'utf8');
|
|
1437
|
+
linkTarget = data;
|
|
1438
|
+
// Clean up temp file
|
|
1439
|
+
fs.unlinkSync(tempPath);
|
|
1440
|
+
} catch (error) {
|
|
1441
|
+
// Clean up temp file if it exists
|
|
1442
|
+
if (fs.existsSync(tempPath)) {
|
|
1443
|
+
try {
|
|
1444
|
+
fs.unlinkSync(tempPath);
|
|
1445
|
+
} catch (cleanupError) {
|
|
1446
|
+
// Ignore cleanup errors
|
|
1447
|
+
}
|
|
1448
|
+
}
|
|
1449
|
+
return { success: false, bytesExtracted: 0, error: `Could not extract symbolic link target: ${error instanceof Error ? error.message : String(error)}` };
|
|
1450
|
+
}
|
|
1451
|
+
}
|
|
1452
|
+
}
|
|
1453
|
+
|
|
1454
|
+
if (linkTarget && process.platform !== 'win32') {
|
|
1455
|
+
try {
|
|
1456
|
+
fs.symlinkSync(linkTarget, outputPath);
|
|
1457
|
+
return { success: true, bytesExtracted: Buffer.byteLength(linkTarget, 'utf8') };
|
|
1458
|
+
} catch (error) {
|
|
1459
|
+
return { success: false, bytesExtracted: 0, error: `Failed to create symbolic link: ${error instanceof Error ? error.message : String(error)}` };
|
|
1460
|
+
}
|
|
1461
|
+
} else {
|
|
1462
|
+
return { success: false, bytesExtracted: 0, error: 'Symbolic links not supported on this platform' };
|
|
1463
|
+
}
|
|
1464
|
+
}
|
|
1465
|
+
|
|
1466
|
+
// Check if entry is a hard link
|
|
1467
|
+
const isHardLink = (entry as any).isHardLink && (entry as any).originalEntry;
|
|
1468
|
+
|
|
1469
|
+
if (isHardLink && options?.hardLinks) {
|
|
1470
|
+
// Handle hard link
|
|
1471
|
+
const originalEntry = (entry as any).originalEntry;
|
|
1472
|
+
const outDir = path.dirname(outputPath);
|
|
1473
|
+
const originalPath = path.resolve(outDir, originalEntry);
|
|
1474
|
+
|
|
1475
|
+
if (fs.existsSync(originalPath) && process.platform !== 'win32') {
|
|
1476
|
+
try {
|
|
1477
|
+
fs.linkSync(originalPath, outputPath);
|
|
1478
|
+
return { success: true, bytesExtracted: 0 }; // No actual bytes extracted for hard links
|
|
1479
|
+
} catch (error) {
|
|
1480
|
+
return { success: false, bytesExtracted: 0, error: `Failed to create hard link: ${error instanceof Error ? error.message : String(error)}` };
|
|
1481
|
+
}
|
|
1482
|
+
} else {
|
|
1483
|
+
return { success: false, bytesExtracted: 0, error: 'Hard links not supported or original file not found' };
|
|
1484
|
+
}
|
|
1485
|
+
}
|
|
1486
|
+
|
|
1487
|
+
// Regular file extraction
|
|
1488
|
+
// Check if we're in buffer-based or file-based mode
|
|
1489
|
+
const bufferBased = !this.fileHandle;
|
|
1490
|
+
const fileBased = !!this.fileHandle;
|
|
1491
|
+
|
|
1492
|
+
// Use temp file for overwrite safety
|
|
1493
|
+
const fileExists = fs.existsSync(outputPath);
|
|
1494
|
+
const needsTempFile = fileExists;
|
|
1495
|
+
const tempPath = needsTempFile
|
|
1496
|
+
? path.join(require('os').tmpdir(), `neozip-extract-${Date.now()}-${process.pid}-${path.basename(outputPath).replace(/[^a-zA-Z0-9]/g, '_')}`)
|
|
1497
|
+
: outputPath;
|
|
1498
|
+
|
|
1499
|
+
let bytesExtracted = 0;
|
|
1500
|
+
let extractionSucceeded = false;
|
|
1501
|
+
|
|
1502
|
+
try {
|
|
1503
|
+
if (bufferBased) {
|
|
1504
|
+
// Buffer-based (in-memory) mode: extract to buffer, then write to file
|
|
1505
|
+
const data = await this.extract(entry, options?.skipHashCheck);
|
|
1506
|
+
|
|
1507
|
+
if (!data) {
|
|
1508
|
+
return { success: false, bytesExtracted: 0, error: 'Extraction returned no data' };
|
|
1509
|
+
}
|
|
1510
|
+
|
|
1511
|
+
// Write buffer to temp file
|
|
1512
|
+
fs.writeFileSync(tempPath, data);
|
|
1513
|
+
bytesExtracted = data.length;
|
|
1514
|
+
extractionSucceeded = true;
|
|
1515
|
+
|
|
1516
|
+
if (options?.onProgress) {
|
|
1517
|
+
options.onProgress(entry, bytesExtracted);
|
|
1518
|
+
}
|
|
1519
|
+
} else if (fileBased) {
|
|
1520
|
+
// File-based mode: use direct streaming extraction to temp file
|
|
1521
|
+
await this.extractToFile(entry, tempPath, {
|
|
1522
|
+
skipHashCheck: options?.skipHashCheck,
|
|
1523
|
+
onProgress: (bytes: number) => {
|
|
1524
|
+
bytesExtracted = bytes;
|
|
1525
|
+
if (options?.onProgress) {
|
|
1526
|
+
options.onProgress(entry, bytes);
|
|
1527
|
+
}
|
|
1528
|
+
}
|
|
1529
|
+
});
|
|
1530
|
+
|
|
1531
|
+
// If we get here, extraction succeeded
|
|
1532
|
+
extractionSucceeded = true;
|
|
1533
|
+
} else {
|
|
1534
|
+
return { success: false, bytesExtracted: 0, error: 'ZIP file not loaded or unknown backend type' };
|
|
1535
|
+
}
|
|
1536
|
+
|
|
1537
|
+
// If extraction succeeded and we used a temp file, replace the original
|
|
1538
|
+
if (extractionSucceeded && needsTempFile) {
|
|
1539
|
+
// Delete the original file
|
|
1540
|
+
fs.unlinkSync(outputPath);
|
|
1541
|
+
// Move temp file to final location
|
|
1542
|
+
fs.renameSync(tempPath, outputPath);
|
|
1543
|
+
}
|
|
1544
|
+
|
|
1545
|
+
// Restore metadata (timestamps and permissions)
|
|
1546
|
+
this.restoreEntryMetadata(outputPath, entry, {
|
|
1547
|
+
preserveTimestamps: options?.preserveTimestamps,
|
|
1548
|
+
preservePermissions: options?.preservePermissions
|
|
1549
|
+
});
|
|
1550
|
+
|
|
1551
|
+
return { success: true, bytesExtracted };
|
|
1552
|
+
} catch (error) {
|
|
1553
|
+
// Clean up temp file if it exists
|
|
1554
|
+
if (needsTempFile && fs.existsSync(tempPath)) {
|
|
1555
|
+
try {
|
|
1556
|
+
fs.unlinkSync(tempPath);
|
|
1557
|
+
} catch (cleanupError) {
|
|
1558
|
+
// Ignore cleanup errors
|
|
1559
|
+
}
|
|
1560
|
+
}
|
|
1561
|
+
|
|
1562
|
+
return {
|
|
1563
|
+
success: false,
|
|
1564
|
+
bytesExtracted: 0,
|
|
1565
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1566
|
+
};
|
|
1567
|
+
}
|
|
1568
|
+
} catch (error) {
|
|
1569
|
+
return {
|
|
1570
|
+
success: false,
|
|
1571
|
+
bytesExtracted: 0,
|
|
1572
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1573
|
+
};
|
|
1574
|
+
}
|
|
1575
|
+
}
|
|
1576
|
+
|
|
1577
|
+
/**
|
|
1578
|
+
* Extract all files from a ZIP archive to a destination directory
|
|
1579
|
+
* Simple API that uses the modular subfunctions
|
|
1580
|
+
*
|
|
1581
|
+
* @param archivePath - Path to the ZIP file
|
|
1582
|
+
* @param destination - Directory where files should be extracted (ignored if testOnly is true)
|
|
1583
|
+
* @param options - Optional extraction options
|
|
1584
|
+
* @returns Promise resolving to extraction statistics
|
|
1585
|
+
*/
|
|
1586
|
+
async extractZipFile(
|
|
1587
|
+
archivePath: string,
|
|
1588
|
+
destination: string,
|
|
1589
|
+
options?: {
|
|
1590
|
+
password?: string; // Password for encrypted archives
|
|
1591
|
+
overwrite?: boolean; // Always overwrite existing files
|
|
1592
|
+
junkPaths?: boolean; // Extract to flat structure
|
|
1593
|
+
include?: string[]; // Include patterns
|
|
1594
|
+
exclude?: string[]; // Exclude patterns
|
|
1595
|
+
preserveTimestamps?: boolean; // Restore file timestamps
|
|
1596
|
+
preservePermissions?: boolean; // Restore file permissions
|
|
1597
|
+
symlinks?: boolean; // Handle symbolic links
|
|
1598
|
+
hardLinks?: boolean; // Handle hard links
|
|
1599
|
+
skipHashCheck?: boolean; // Skip hash verification
|
|
1600
|
+
testOnly?: boolean; // Test integrity without extracting files
|
|
1601
|
+
onProgress?: (entry: ZipEntry, bytes: number) => void; // Progress callback
|
|
1602
|
+
onOverwritePrompt?: (filename: string) => Promise<'y' | 'n' | 'a' | 'q'>; // Overwrite prompt callback
|
|
1603
|
+
}
|
|
1604
|
+
): Promise<{ filesExtracted: number; bytesExtracted: number }> {
|
|
1605
|
+
// Ensure destination directory exists
|
|
1606
|
+
if (!fs.existsSync(destination)) {
|
|
1607
|
+
fs.mkdirSync(destination, { recursive: true });
|
|
1608
|
+
}
|
|
1609
|
+
|
|
1610
|
+
// Load ZIP file if not already loaded or if path changed
|
|
1611
|
+
if (!this.fileHandle || this.filePath !== archivePath) {
|
|
1612
|
+
await this.loadZipFile(archivePath);
|
|
1613
|
+
}
|
|
1614
|
+
|
|
1615
|
+
// Set password if provided (needed for decryption)
|
|
1616
|
+
if (options?.password) {
|
|
1617
|
+
(this as any).password = options.password;
|
|
1618
|
+
}
|
|
1619
|
+
|
|
1620
|
+
// Get all entries
|
|
1621
|
+
const entries = this.getDirectory();
|
|
1622
|
+
|
|
1623
|
+
// Filter entries
|
|
1624
|
+
const filteredEntries = this.filterEntries(entries, {
|
|
1625
|
+
include: options?.include,
|
|
1626
|
+
exclude: options?.exclude,
|
|
1627
|
+
skipMetadata: true
|
|
1628
|
+
});
|
|
1629
|
+
|
|
1630
|
+
// Extract each entry
|
|
1631
|
+
let filesExtracted = 0;
|
|
1632
|
+
let bytesExtracted = 0;
|
|
1633
|
+
let alwaysOverwrite = false; // Track "always" response from user
|
|
1634
|
+
|
|
1635
|
+
// If testOnly mode, validate entries without extracting
|
|
1636
|
+
if (options?.testOnly) {
|
|
1637
|
+
for (const entry of filteredEntries) {
|
|
1638
|
+
try {
|
|
1639
|
+
await this.testEntry(entry, {
|
|
1640
|
+
skipHashCheck: options?.skipHashCheck,
|
|
1641
|
+
onProgress: options?.onProgress ? (bytes: number) => options.onProgress!(entry, bytes) : undefined
|
|
1642
|
+
});
|
|
1643
|
+
// If we get here, validation passed
|
|
1644
|
+
filesExtracted++;
|
|
1645
|
+
bytesExtracted += (entry.uncompressedSize || 0);
|
|
1646
|
+
} catch (error) {
|
|
1647
|
+
// Validation failed - rethrow the error
|
|
1648
|
+
throw error;
|
|
1649
|
+
}
|
|
1650
|
+
}
|
|
1651
|
+
return { filesExtracted, bytesExtracted };
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
// Normal extraction mode
|
|
1655
|
+
for (const entry of filteredEntries) {
|
|
1656
|
+
// Prepare output path
|
|
1657
|
+
const outputPath = this.prepareExtractionPath(entry, destination, {
|
|
1658
|
+
junkPaths: options?.junkPaths
|
|
1659
|
+
});
|
|
1660
|
+
|
|
1661
|
+
// Check if should extract
|
|
1662
|
+
const decision = await this.shouldExtractEntry(entry, outputPath, {
|
|
1663
|
+
overwrite: options?.overwrite || alwaysOverwrite,
|
|
1664
|
+
never: false,
|
|
1665
|
+
freshenOnly: false,
|
|
1666
|
+
updateOnly: false,
|
|
1667
|
+
onOverwritePrompt: async (filename: string) => {
|
|
1668
|
+
if (options?.onOverwritePrompt) {
|
|
1669
|
+
const response = await options.onOverwritePrompt(filename);
|
|
1670
|
+
if (response === 'a') {
|
|
1671
|
+
alwaysOverwrite = true;
|
|
1672
|
+
}
|
|
1673
|
+
return response;
|
|
1674
|
+
}
|
|
1675
|
+
return 'n'; // Default to no if no callback provided
|
|
1676
|
+
}
|
|
1677
|
+
});
|
|
1678
|
+
|
|
1679
|
+
if (!decision.shouldExtract) {
|
|
1680
|
+
// Check if user aborted
|
|
1681
|
+
if (decision.reason === 'user aborted') {
|
|
1682
|
+
break; // Stop extraction
|
|
1683
|
+
}
|
|
1684
|
+
continue;
|
|
1685
|
+
}
|
|
1686
|
+
|
|
1687
|
+
// Extract entry
|
|
1688
|
+
const result = await this.extractEntryToPath(entry, outputPath, {
|
|
1689
|
+
skipHashCheck: options?.skipHashCheck,
|
|
1690
|
+
preserveTimestamps: options?.preserveTimestamps !== false,
|
|
1691
|
+
preservePermissions: options?.preservePermissions,
|
|
1692
|
+
symlinks: options?.symlinks,
|
|
1693
|
+
hardLinks: options?.hardLinks,
|
|
1694
|
+
onProgress: options?.onProgress
|
|
1695
|
+
});
|
|
1696
|
+
|
|
1697
|
+
if (result.success) {
|
|
1698
|
+
filesExtracted++;
|
|
1699
|
+
bytesExtracted += result.bytesExtracted;
|
|
1700
|
+
}
|
|
1701
|
+
}
|
|
1702
|
+
|
|
1703
|
+
return { filesExtracted, bytesExtracted };
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
|