@neoware_inc/neozipkit 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -0
- package/dist/browser/ZipkitBrowser.d.ts +27 -0
- package/dist/browser/ZipkitBrowser.d.ts.map +1 -0
- package/dist/browser/ZipkitBrowser.js +303 -0
- package/dist/browser/ZipkitBrowser.js.map +1 -0
- package/dist/browser/index.d.ts +9 -0
- package/dist/browser/index.d.ts.map +1 -0
- package/dist/browser/index.esm.d.ts +12 -0
- package/dist/browser/index.esm.d.ts.map +1 -0
- package/dist/browser/index.esm.js +46 -0
- package/dist/browser/index.esm.js.map +1 -0
- package/dist/browser/index.js +38 -0
- package/dist/browser/index.js.map +1 -0
- package/dist/browser-esm/index.d.ts +9 -0
- package/dist/browser-esm/index.js +50211 -0
- package/dist/browser-esm/index.js.map +7 -0
- package/dist/browser-umd/index.d.ts +9 -0
- package/dist/browser-umd/index.js +50221 -0
- package/dist/browser-umd/index.js.map +7 -0
- package/dist/browser-umd/index.min.js +39 -0
- package/dist/browser.d.ts +9 -0
- package/dist/browser.js +38 -0
- package/dist/core/ZipCompress.d.ts +99 -0
- package/dist/core/ZipCompress.d.ts.map +1 -0
- package/dist/core/ZipCompress.js +287 -0
- package/dist/core/ZipCompress.js.map +1 -0
- package/dist/core/ZipCopy.d.ts +175 -0
- package/dist/core/ZipCopy.d.ts.map +1 -0
- package/dist/core/ZipCopy.js +310 -0
- package/dist/core/ZipCopy.js.map +1 -0
- package/dist/core/ZipDecompress.d.ts +57 -0
- package/dist/core/ZipDecompress.d.ts.map +1 -0
- package/dist/core/ZipDecompress.js +155 -0
- package/dist/core/ZipDecompress.js.map +1 -0
- package/dist/core/ZipEntry.d.ts +138 -0
- package/dist/core/ZipEntry.d.ts.map +1 -0
- package/dist/core/ZipEntry.js +829 -0
- package/dist/core/ZipEntry.js.map +1 -0
- package/dist/core/Zipkit.d.ts +315 -0
- package/dist/core/Zipkit.d.ts.map +1 -0
- package/dist/core/Zipkit.js +647 -0
- package/dist/core/Zipkit.js.map +1 -0
- package/dist/core/ZstdManager.d.ts +56 -0
- package/dist/core/ZstdManager.d.ts.map +1 -0
- package/dist/core/ZstdManager.js +144 -0
- package/dist/core/ZstdManager.js.map +1 -0
- package/dist/core/components/HashCalculator.d.ts +138 -0
- package/dist/core/components/HashCalculator.d.ts.map +1 -0
- package/dist/core/components/HashCalculator.js +360 -0
- package/dist/core/components/HashCalculator.js.map +1 -0
- package/dist/core/components/Logger.d.ts +73 -0
- package/dist/core/components/Logger.d.ts.map +1 -0
- package/dist/core/components/Logger.js +156 -0
- package/dist/core/components/Logger.js.map +1 -0
- package/dist/core/components/ProgressTracker.d.ts +43 -0
- package/dist/core/components/ProgressTracker.d.ts.map +1 -0
- package/dist/core/components/ProgressTracker.js +112 -0
- package/dist/core/components/ProgressTracker.js.map +1 -0
- package/dist/core/components/Support.d.ts +64 -0
- package/dist/core/components/Support.d.ts.map +1 -0
- package/dist/core/components/Support.js +71 -0
- package/dist/core/components/Support.js.map +1 -0
- package/dist/core/components/Util.d.ts +26 -0
- package/dist/core/components/Util.d.ts.map +1 -0
- package/dist/core/components/Util.js +95 -0
- package/dist/core/components/Util.js.map +1 -0
- package/dist/core/constants/Errors.d.ts +52 -0
- package/dist/core/constants/Errors.d.ts.map +1 -0
- package/dist/core/constants/Errors.js +67 -0
- package/dist/core/constants/Errors.js.map +1 -0
- package/dist/core/constants/Headers.d.ts +170 -0
- package/dist/core/constants/Headers.d.ts.map +1 -0
- package/dist/core/constants/Headers.js +194 -0
- package/dist/core/constants/Headers.js.map +1 -0
- package/dist/core/encryption/Manager.d.ts +58 -0
- package/dist/core/encryption/Manager.d.ts.map +1 -0
- package/dist/core/encryption/Manager.js +121 -0
- package/dist/core/encryption/Manager.js.map +1 -0
- package/dist/core/encryption/ZipCrypto.d.ts +172 -0
- package/dist/core/encryption/ZipCrypto.d.ts.map +1 -0
- package/dist/core/encryption/ZipCrypto.js +554 -0
- package/dist/core/encryption/ZipCrypto.js.map +1 -0
- package/dist/core/encryption/index.d.ts +9 -0
- package/dist/core/encryption/index.d.ts.map +1 -0
- package/dist/core/encryption/index.js +17 -0
- package/dist/core/encryption/index.js.map +1 -0
- package/dist/core/encryption/types.d.ts +29 -0
- package/dist/core/encryption/types.d.ts.map +1 -0
- package/dist/core/encryption/types.js +12 -0
- package/dist/core/encryption/types.js.map +1 -0
- package/dist/core/index.d.ts +27 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +59 -0
- package/dist/core/index.js.map +1 -0
- package/dist/core/version.d.ts +5 -0
- package/dist/core/version.d.ts.map +1 -0
- package/dist/core/version.js +31 -0
- package/dist/core/version.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +38 -0
- package/dist/index.js.map +1 -0
- package/dist/node/ZipCompressNode.d.ts +123 -0
- package/dist/node/ZipCompressNode.d.ts.map +1 -0
- package/dist/node/ZipCompressNode.js +565 -0
- package/dist/node/ZipCompressNode.js.map +1 -0
- package/dist/node/ZipCopyNode.d.ts +165 -0
- package/dist/node/ZipCopyNode.d.ts.map +1 -0
- package/dist/node/ZipCopyNode.js +347 -0
- package/dist/node/ZipCopyNode.js.map +1 -0
- package/dist/node/ZipDecompressNode.d.ts +197 -0
- package/dist/node/ZipDecompressNode.d.ts.map +1 -0
- package/dist/node/ZipDecompressNode.js +678 -0
- package/dist/node/ZipDecompressNode.js.map +1 -0
- package/dist/node/ZipkitNode.d.ts +466 -0
- package/dist/node/ZipkitNode.d.ts.map +1 -0
- package/dist/node/ZipkitNode.js +1426 -0
- package/dist/node/ZipkitNode.js.map +1 -0
- package/dist/node/index.d.ts +25 -0
- package/dist/node/index.d.ts.map +1 -0
- package/dist/node/index.js +54 -0
- package/dist/node/index.js.map +1 -0
- package/dist/types/index.d.ts +45 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +11 -0
- package/dist/types/index.js.map +1 -0
- package/examples/README.md +261 -0
- package/examples/append-data.json +44 -0
- package/examples/copy-zip-append.ts +139 -0
- package/examples/copy-zip.ts +152 -0
- package/examples/create-zip.ts +172 -0
- package/examples/extract-zip.ts +118 -0
- package/examples/list-zip.ts +161 -0
- package/examples/test-files/data.json +116 -0
- package/examples/test-files/document.md +80 -0
- package/examples/test-files/document.txt +6 -0
- package/examples/test-files/file1.txt +48 -0
- package/examples/test-files/file2.txt +80 -0
- package/examples/tsconfig.json +44 -0
- package/package.json +167 -0
- package/src/browser/ZipkitBrowser.ts +305 -0
- package/src/browser/index.esm.ts +32 -0
- package/src/browser/index.ts +19 -0
- package/src/core/ZipCompress.ts +370 -0
- package/src/core/ZipCopy.ts +434 -0
- package/src/core/ZipDecompress.ts +191 -0
- package/src/core/ZipEntry.ts +917 -0
- package/src/core/Zipkit.ts +794 -0
- package/src/core/ZstdManager.ts +165 -0
- package/src/core/components/HashCalculator.ts +384 -0
- package/src/core/components/Logger.ts +180 -0
- package/src/core/components/ProgressTracker.ts +134 -0
- package/src/core/components/Support.ts +77 -0
- package/src/core/components/Util.ts +91 -0
- package/src/core/constants/Errors.ts +78 -0
- package/src/core/constants/Headers.ts +205 -0
- package/src/core/encryption/Manager.ts +137 -0
- package/src/core/encryption/ZipCrypto.ts +650 -0
- package/src/core/encryption/index.ts +15 -0
- package/src/core/encryption/types.ts +33 -0
- package/src/core/index.ts +42 -0
- package/src/core/version.ts +33 -0
- package/src/index.ts +19 -0
- package/src/node/ZipCompressNode.ts +618 -0
- package/src/node/ZipCopyNode.ts +437 -0
- package/src/node/ZipDecompressNode.ts +793 -0
- package/src/node/ZipkitNode.ts +1706 -0
- package/src/node/index.ts +40 -0
- package/src/types/index.ts +68 -0
- package/src/types/modules.d.ts +22 -0
- package/src/types/opentimestamps.d.ts +1 -0
|
@@ -0,0 +1,678 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// ======================================
|
|
3
|
+
// ZipDecompressNode.ts - Node.js File-Based Decompression
|
|
4
|
+
// Copyright (c) 2025 NeoWare, Inc. All rights reserved.
|
|
5
|
+
// ======================================
|
|
6
|
+
//
|
|
7
|
+
// LOGGING INSTRUCTIONS:
|
|
8
|
+
// ---------------------
|
|
9
|
+
// To enable/disable logging, set loggingEnabled to true/false in the class:
|
|
10
|
+
// private static loggingEnabled: boolean = true; // Enable logging
|
|
11
|
+
// private static loggingEnabled: boolean = false; // Disable logging
|
|
12
|
+
//
|
|
13
|
+
// Logging respects the global Logger level (debug, info, warn, error, silent).
|
|
14
|
+
// Logger level is automatically set to 'debug' when loggingEnabled is true.
|
|
15
|
+
//
|
|
16
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
17
|
+
if (k2 === undefined) k2 = k;
|
|
18
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
19
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
20
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
21
|
+
}
|
|
22
|
+
Object.defineProperty(o, k2, desc);
|
|
23
|
+
}) : (function(o, m, k, k2) {
|
|
24
|
+
if (k2 === undefined) k2 = k;
|
|
25
|
+
o[k2] = m[k];
|
|
26
|
+
}));
|
|
27
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
28
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
29
|
+
}) : function(o, v) {
|
|
30
|
+
o["default"] = v;
|
|
31
|
+
});
|
|
32
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
33
|
+
var ownKeys = function(o) {
|
|
34
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
35
|
+
var ar = [];
|
|
36
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
37
|
+
return ar;
|
|
38
|
+
};
|
|
39
|
+
return ownKeys(o);
|
|
40
|
+
};
|
|
41
|
+
return function (mod) {
|
|
42
|
+
if (mod && mod.__esModule) return mod;
|
|
43
|
+
var result = {};
|
|
44
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
45
|
+
__setModuleDefault(result, mod);
|
|
46
|
+
return result;
|
|
47
|
+
};
|
|
48
|
+
})();
|
|
49
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
50
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
51
|
+
};
|
|
52
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
53
|
+
exports.ZipDecompressNode = void 0;
|
|
54
|
+
const pako = require('pako');
|
|
55
|
+
const ZstdManager_1 = require("../core/ZstdManager");
|
|
56
|
+
const Logger_1 = require("../core/components/Logger");
|
|
57
|
+
const Errors_1 = __importDefault(require("../core/constants/Errors"));
|
|
58
|
+
const Headers_1 = require("../core/constants/Headers");
|
|
59
|
+
const HashCalculator_1 = require("../core/components/HashCalculator");
|
|
60
|
+
const ZipCrypto_1 = require("../core/encryption/ZipCrypto");
|
|
61
|
+
const types_1 = require("../core/encryption/types");
|
|
62
|
+
const fs = __importStar(require("fs"));
|
|
63
|
+
/**
|
|
64
|
+
* ZipDecompressNode - Node.js file-based decompression operations
|
|
65
|
+
*
|
|
66
|
+
* Independent decompression implementation for Node.js environments.
|
|
67
|
+
* All decompression logic is implemented directly without delegating to ZipDecompress.
|
|
68
|
+
*
|
|
69
|
+
* @example
|
|
70
|
+
* ```typescript
|
|
71
|
+
* const zipkitNode = new ZipkitNode();
|
|
72
|
+
* const decompressNode = new ZipDecompressNode(zipkitNode);
|
|
73
|
+
* await decompressNode.extractToFile(entry, './output/file.txt');
|
|
74
|
+
* ```
|
|
75
|
+
*/
|
|
76
|
+
class ZipDecompressNode {
|
|
77
|
+
/**
|
|
78
|
+
* Creates a new ZipDecompressNode instance
|
|
79
|
+
* @param zipkitNode - ZipkitNode instance to use for ZIP operations
|
|
80
|
+
*/
|
|
81
|
+
constructor(zipkitNode) {
|
|
82
|
+
this.zipkitNode = zipkitNode;
|
|
83
|
+
// If logging is enabled, ensure Logger level is set to debug
|
|
84
|
+
if (ZipDecompressNode.loggingEnabled) {
|
|
85
|
+
Logger_1.Logger.setLevel('debug');
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Internal logging method - only logs if class logging is enabled
|
|
90
|
+
*/
|
|
91
|
+
log(...args) {
|
|
92
|
+
if (ZipDecompressNode.loggingEnabled) {
|
|
93
|
+
Logger_1.Logger.debug(`[ZipDecompressNode]`, ...args);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
// ============================================================================
|
|
97
|
+
// File-Based Extraction Methods
|
|
98
|
+
// ============================================================================
|
|
99
|
+
/**
|
|
100
|
+
* Extract file directly to disk with true streaming (no memory buffering)
|
|
101
|
+
* Public method that validates file mode and extracts entry to file
|
|
102
|
+
*
|
|
103
|
+
* This method processes chunks as they are decompressed and writes them
|
|
104
|
+
* directly to disk, maintaining minimal memory footprint regardless of file size.
|
|
105
|
+
* This is the recommended method for file extraction to avoid memory issues.
|
|
106
|
+
*
|
|
107
|
+
* @param entry ZIP entry to extract
|
|
108
|
+
* @param outputPath Path where the file should be written
|
|
109
|
+
* @param options Optional extraction options including progress callback
|
|
110
|
+
* @throws Error if not a File-based ZIP
|
|
111
|
+
*/
|
|
112
|
+
async extractToFile(entry, outputPath, options) {
|
|
113
|
+
// Get fileHandle from zipkitNode (merged from ZipLoadEntriesServer)
|
|
114
|
+
const fileHandle = this.zipkitNode.getFileHandle();
|
|
115
|
+
// Call internal method with fileHandle
|
|
116
|
+
await this.extractToFileInternal(fileHandle, entry, outputPath, options);
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Extract file to Buffer (in-memory) for file-based ZIP
|
|
120
|
+
*
|
|
121
|
+
* This method extracts a ZIP entry directly to a Buffer without writing to disk.
|
|
122
|
+
* This is ideal for reading metadata files (like NZIP.TOKEN) that don't need
|
|
123
|
+
* to be written to temporary files.
|
|
124
|
+
*
|
|
125
|
+
* @param entry ZIP entry to extract
|
|
126
|
+
* @param options Optional extraction options including progress callback
|
|
127
|
+
* @returns Promise that resolves to Buffer containing the extracted file data
|
|
128
|
+
* @throws Error if not a File-based ZIP or if extraction fails
|
|
129
|
+
*/
|
|
130
|
+
async extractToBuffer(entry, options) {
|
|
131
|
+
// Get fileHandle from zipkitNode
|
|
132
|
+
const fileHandle = this.zipkitNode.getFileHandle();
|
|
133
|
+
// Call internal extract to buffer method
|
|
134
|
+
return await this.extractToBufferInternal(fileHandle, entry, options);
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* Test entry integrity without extracting to disk
|
|
138
|
+
* Validates CRC-32 or SHA-256 hash without writing decompressed data
|
|
139
|
+
*
|
|
140
|
+
* This method processes chunks as they are decompressed and validates them,
|
|
141
|
+
* but discards the decompressed data instead of writing to disk. This is useful
|
|
142
|
+
* for verifying ZIP file integrity without extracting files.
|
|
143
|
+
*
|
|
144
|
+
* @param entry ZIP entry to test
|
|
145
|
+
* @param options Optional test options including progress callback
|
|
146
|
+
* @returns Promise that resolves to an object containing the verified hash (if SHA-256) or undefined
|
|
147
|
+
* @throws Error if validation fails (INVALID_CRC or INVALID_SHA256) or if not a File-based ZIP
|
|
148
|
+
*/
|
|
149
|
+
async testEntry(entry, options) {
|
|
150
|
+
// Get fileHandle from zipkitNode
|
|
151
|
+
const fileHandle = this.zipkitNode.getFileHandle();
|
|
152
|
+
// Call internal test method with fileHandle
|
|
153
|
+
return await this.testEntryInternal(fileHandle, entry, options);
|
|
154
|
+
}
|
|
155
|
+
// ============================================================================
|
|
156
|
+
// Internal File-Based Methods
|
|
157
|
+
// ============================================================================
|
|
158
|
+
/**
|
|
159
|
+
* Read compressed data from file and yield one block at a time
|
|
160
|
+
*
|
|
161
|
+
* MEMORY EFFICIENCY: Yields compressed data chunks one at a time without accumulation.
|
|
162
|
+
* Each chunk is read from disk and yielded immediately, allowing downstream processing
|
|
163
|
+
* (decryption, decompression) to handle one block at a time.
|
|
164
|
+
*
|
|
165
|
+
* @param fileHandle - File handle to read from
|
|
166
|
+
* @param entry - ZIP entry to read compressed data for
|
|
167
|
+
* @param chunkSize - Optional chunk size override (defaults to ZipkitServer's bufferSize)
|
|
168
|
+
* @returns Async generator yielding compressed data chunks one at a time
|
|
169
|
+
*/
|
|
170
|
+
async *readCompressedDataStream(fileHandle, entry, chunkSize) {
|
|
171
|
+
// Use provided chunkSize or ZipkitServer's default bufferSize
|
|
172
|
+
const effectiveChunkSize = chunkSize || this.zipkitNode.getBufferSize();
|
|
173
|
+
// Read local file header
|
|
174
|
+
const localHeaderBuffer = Buffer.alloc(30);
|
|
175
|
+
await fileHandle.read(localHeaderBuffer, 0, 30, entry.localHdrOffset);
|
|
176
|
+
if (localHeaderBuffer.readUInt32LE(0) !== 0x04034b50) { // LOCAL_HDR.SIGNATURE
|
|
177
|
+
throw new Error(Errors_1.default.INVALID_CEN);
|
|
178
|
+
}
|
|
179
|
+
// Calculate data start position
|
|
180
|
+
const filenameLength = localHeaderBuffer.readUInt16LE(26);
|
|
181
|
+
const extraFieldLength = localHeaderBuffer.readUInt16LE(28);
|
|
182
|
+
const dataStart = entry.localHdrOffset + 30 + filenameLength + extraFieldLength;
|
|
183
|
+
// Yield compressed data in chunks - one block at a time
|
|
184
|
+
let remaining = entry.compressedSize;
|
|
185
|
+
let position = dataStart;
|
|
186
|
+
while (remaining > 0) {
|
|
187
|
+
const currentChunkSize = Math.min(effectiveChunkSize, remaining);
|
|
188
|
+
const chunk = Buffer.alloc(currentChunkSize);
|
|
189
|
+
await fileHandle.read(chunk, 0, currentChunkSize, position);
|
|
190
|
+
this.log(`readCompressedDataStream: Yielding compressed chunk: ${chunk.length} bytes (${remaining} bytes remaining)`);
|
|
191
|
+
yield chunk;
|
|
192
|
+
position += currentChunkSize;
|
|
193
|
+
remaining -= currentChunkSize;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Handles: reading compressed data, optional decryption, decompression, hashing, and writing
|
|
198
|
+
* Internal method that takes fileHandle as parameter
|
|
199
|
+
*/
|
|
200
|
+
async extractToFileInternal(fileHandle, entry, outputPath, options) {
|
|
201
|
+
this.log(`extractToFileInternal called for entry: ${entry.filename}`);
|
|
202
|
+
this.log(`Entry isEncrypted: ${entry.isEncrypted}, has password: ${!!this.zipkitNode?.password}`);
|
|
203
|
+
try {
|
|
204
|
+
// Create output stream with overwrite flag to truncate existing files
|
|
205
|
+
const writeStream = fs.createWriteStream(outputPath, { flags: 'w' });
|
|
206
|
+
// Build compressed data stream - yields one block at a time
|
|
207
|
+
let dataStream = this.readCompressedDataStream(fileHandle, entry);
|
|
208
|
+
// Decrypt if needed using password on zipkitNode instance
|
|
209
|
+
// Decryption maintains state across blocks via updateKeys()
|
|
210
|
+
const isEncrypted = entry.isEncrypted && this.zipkitNode?.password;
|
|
211
|
+
if (isEncrypted) {
|
|
212
|
+
this.log(`Starting decryption for entry: ${entry.filename}`);
|
|
213
|
+
// Prepare entry for decryption by parsing local header
|
|
214
|
+
await ZipCrypto_1.DecryptionStream.prepareEntryForDecryption(fileHandle, entry);
|
|
215
|
+
const encryptionMethod = entry.encryptionMethod || types_1.EncryptionMethod.ZIP_CRYPTO;
|
|
216
|
+
this.log(`Creating DecryptionStream with method: ${encryptionMethod}`);
|
|
217
|
+
const decryptor = new ZipCrypto_1.DecryptionStream({
|
|
218
|
+
password: this.zipkitNode.password,
|
|
219
|
+
method: encryptionMethod,
|
|
220
|
+
entry: entry
|
|
221
|
+
});
|
|
222
|
+
this.log(`DecryptionStream created, calling decrypt()...`);
|
|
223
|
+
// Decryption processes one block at a time, maintaining state across blocks
|
|
224
|
+
dataStream = decryptor.decrypt(dataStream);
|
|
225
|
+
this.log(`decrypt() returned, dataStream is now a generator that yields one decrypted block at a time`);
|
|
226
|
+
}
|
|
227
|
+
// Pipeline: readCompressedDataStream() → DecryptionStream.decrypt() → decompressStream() → writeStream
|
|
228
|
+
// Each stage processes one block at a time without accumulation
|
|
229
|
+
await this.unCompressToFile(dataStream, entry, writeStream, {
|
|
230
|
+
skipHashCheck: options?.skipHashCheck,
|
|
231
|
+
onProgress: options?.onProgress,
|
|
232
|
+
outputPath
|
|
233
|
+
});
|
|
234
|
+
}
|
|
235
|
+
catch (error) {
|
|
236
|
+
throw error;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
/**
|
|
240
|
+
* Extract file to Buffer (in-memory) for file-based ZIP
|
|
241
|
+
* Internal method that takes fileHandle as parameter
|
|
242
|
+
*
|
|
243
|
+
* MEMORY EFFICIENCY: Accumulates decompressed chunks into a Buffer.
|
|
244
|
+
* For small files (like metadata), this is acceptable. For large files,
|
|
245
|
+
* consider using extractToFile() instead.
|
|
246
|
+
*/
|
|
247
|
+
async extractToBufferInternal(fileHandle, entry, options) {
|
|
248
|
+
this.log(`extractToBufferInternal called for entry: ${entry.filename}`);
|
|
249
|
+
this.log(`Entry isEncrypted: ${entry.isEncrypted}, has password: ${!!this.zipkitNode?.password}`);
|
|
250
|
+
try {
|
|
251
|
+
// Build compressed data stream - yields one block at a time
|
|
252
|
+
let dataStream = this.readCompressedDataStream(fileHandle, entry);
|
|
253
|
+
// Decrypt if needed using password on zipkitNode instance
|
|
254
|
+
const isEncrypted = entry.isEncrypted && this.zipkitNode?.password;
|
|
255
|
+
if (isEncrypted) {
|
|
256
|
+
this.log(`Starting decryption for entry: ${entry.filename}`);
|
|
257
|
+
// Prepare entry for decryption by parsing local header
|
|
258
|
+
await ZipCrypto_1.DecryptionStream.prepareEntryForDecryption(fileHandle, entry);
|
|
259
|
+
const encryptionMethod = entry.encryptionMethod || types_1.EncryptionMethod.ZIP_CRYPTO;
|
|
260
|
+
this.log(`Creating DecryptionStream with method: ${encryptionMethod}`);
|
|
261
|
+
const decryptor = new ZipCrypto_1.DecryptionStream({
|
|
262
|
+
password: this.zipkitNode.password,
|
|
263
|
+
method: encryptionMethod,
|
|
264
|
+
entry: entry
|
|
265
|
+
});
|
|
266
|
+
this.log(`DecryptionStream created, calling decrypt()...`);
|
|
267
|
+
dataStream = decryptor.decrypt(dataStream);
|
|
268
|
+
this.log(`decrypt() returned, dataStream is now a generator that yields one decrypted block at a time`);
|
|
269
|
+
}
|
|
270
|
+
// Pipeline: readCompressedDataStream() → DecryptionStream.decrypt() → decompressStream() → accumulate to Buffer
|
|
271
|
+
return await this.unCompressToBuffer(dataStream, entry, {
|
|
272
|
+
skipHashCheck: options?.skipHashCheck,
|
|
273
|
+
onProgress: options?.onProgress
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
catch (error) {
|
|
277
|
+
throw error;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
/**
|
|
281
|
+
* Test entry integrity without writing to disk
|
|
282
|
+
* Internal method that takes fileHandle as parameter
|
|
283
|
+
*/
|
|
284
|
+
async testEntryInternal(fileHandle, entry, options) {
|
|
285
|
+
this.log(`testEntryInternal called for entry: ${entry.filename}`);
|
|
286
|
+
this.log(`Entry isEncrypted: ${entry.isEncrypted}, has password: ${!!this.zipkitNode?.password}`);
|
|
287
|
+
try {
|
|
288
|
+
// Build compressed data stream - yields one block at a time
|
|
289
|
+
let dataStream = this.readCompressedDataStream(fileHandle, entry);
|
|
290
|
+
// Decrypt if needed using password on zipkitNode instance
|
|
291
|
+
const isEncrypted = entry.isEncrypted && this.zipkitNode?.password;
|
|
292
|
+
if (isEncrypted) {
|
|
293
|
+
this.log(`Starting decryption for entry: ${entry.filename}`);
|
|
294
|
+
// Prepare entry for decryption by parsing local header
|
|
295
|
+
await ZipCrypto_1.DecryptionStream.prepareEntryForDecryption(fileHandle, entry);
|
|
296
|
+
const encryptionMethod = entry.encryptionMethod || types_1.EncryptionMethod.ZIP_CRYPTO;
|
|
297
|
+
this.log(`Creating DecryptionStream with method: ${encryptionMethod}`);
|
|
298
|
+
const decryptor = new ZipCrypto_1.DecryptionStream({
|
|
299
|
+
password: this.zipkitNode.password,
|
|
300
|
+
method: encryptionMethod,
|
|
301
|
+
entry: entry
|
|
302
|
+
});
|
|
303
|
+
this.log(`DecryptionStream created, calling decrypt()...`);
|
|
304
|
+
dataStream = decryptor.decrypt(dataStream);
|
|
305
|
+
this.log(`decrypt() returned, dataStream is now a generator that yields one decrypted block at a time`);
|
|
306
|
+
}
|
|
307
|
+
// Pipeline: readCompressedDataStream() → DecryptionStream.decrypt() → decompressStream() → hash validation
|
|
308
|
+
// Data is discarded after validation, no file writing
|
|
309
|
+
return await this.unCompressToTest(dataStream, entry, {
|
|
310
|
+
skipHashCheck: options?.skipHashCheck,
|
|
311
|
+
onProgress: options?.onProgress
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
catch (error) {
|
|
315
|
+
throw error;
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
/**
|
|
319
|
+
* Decompress data stream and write to file
|
|
320
|
+
*
|
|
321
|
+
* MEMORY EFFICIENCY: Processes decompressed chunks one at a time.
|
|
322
|
+
* Pipeline: compressedStream → decompressStream() → hashCalc → writeStream
|
|
323
|
+
* - Each decompressed chunk is written immediately without accumulation
|
|
324
|
+
* - Hash calculation is incremental (HashCalculator)
|
|
325
|
+
* - Progress callbacks are invoked per chunk
|
|
326
|
+
*
|
|
327
|
+
* Handles decompression, hash calculation, file writing, and verification.
|
|
328
|
+
* Internal method only
|
|
329
|
+
*/
|
|
330
|
+
async unCompressToFile(compressedStream, entry, writeStream, // Node.js WriteStream
|
|
331
|
+
options) {
|
|
332
|
+
this.log(`unCompressToFile() called for entry: ${entry.filename}, method: ${entry.cmpMethod}`);
|
|
333
|
+
// Decompress stream - processes one block at a time
|
|
334
|
+
const decompressedStream = this.decompressStream(compressedStream, entry.cmpMethod);
|
|
335
|
+
// Process and write chunks - one block at a time
|
|
336
|
+
const hashCalc = new HashCalculator_1.HashCalculator({ useSHA256: !!entry.sha256 });
|
|
337
|
+
let totalBytes = 0;
|
|
338
|
+
try {
|
|
339
|
+
for await (const chunk of decompressedStream) {
|
|
340
|
+
this.log(`unCompressToFile: Processing decompressed chunk: ${chunk.length} bytes`);
|
|
341
|
+
hashCalc.update(chunk);
|
|
342
|
+
writeStream.write(chunk);
|
|
343
|
+
totalBytes += chunk.length;
|
|
344
|
+
if (options?.onProgress) {
|
|
345
|
+
options.onProgress(totalBytes);
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
// Close stream
|
|
349
|
+
await new Promise((resolve, reject) => {
|
|
350
|
+
writeStream.end(() => resolve(undefined));
|
|
351
|
+
writeStream.on('error', reject);
|
|
352
|
+
});
|
|
353
|
+
// Verify hash
|
|
354
|
+
if (!options?.skipHashCheck) {
|
|
355
|
+
if (entry.sha256) {
|
|
356
|
+
const calculatedHash = hashCalc.finalizeSHA256();
|
|
357
|
+
this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
|
|
358
|
+
if (calculatedHash !== entry.sha256) {
|
|
359
|
+
if (options?.outputPath && fs) {
|
|
360
|
+
fs.unlinkSync(options.outputPath);
|
|
361
|
+
}
|
|
362
|
+
throw new Error(Errors_1.default.INVALID_SHA256);
|
|
363
|
+
}
|
|
364
|
+
this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
|
|
365
|
+
}
|
|
366
|
+
else {
|
|
367
|
+
const calculatedCRC = hashCalc.finalizeCRC32();
|
|
368
|
+
this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
|
|
369
|
+
if (calculatedCRC !== entry.crc) {
|
|
370
|
+
if (options?.outputPath && fs) {
|
|
371
|
+
fs.unlinkSync(options.outputPath);
|
|
372
|
+
}
|
|
373
|
+
throw new Error(Errors_1.default.INVALID_CRC);
|
|
374
|
+
}
|
|
375
|
+
this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
catch (error) {
|
|
380
|
+
// Cleanup file on error
|
|
381
|
+
if (options?.outputPath && fs) {
|
|
382
|
+
try {
|
|
383
|
+
fs.unlinkSync(options.outputPath);
|
|
384
|
+
}
|
|
385
|
+
catch {
|
|
386
|
+
// Ignore cleanup errors
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
throw error;
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
// ============================================================================
|
|
393
|
+
// Decompression Methods
|
|
394
|
+
// ============================================================================
|
|
395
|
+
/**
|
|
396
|
+
* Decompress data stream and accumulate to Buffer
|
|
397
|
+
*
|
|
398
|
+
* MEMORY EFFICIENCY: Accumulates decompressed chunks into a Buffer.
|
|
399
|
+
* For small files (like metadata), this is acceptable. For large files,
|
|
400
|
+
* consider using unCompressToFile() instead.
|
|
401
|
+
*
|
|
402
|
+
* Pipeline: compressedStream → decompressStream() → hashCalc → Buffer accumulation
|
|
403
|
+
* - Each decompressed chunk is accumulated into a Buffer
|
|
404
|
+
* - Hash calculation is incremental (HashCalculator)
|
|
405
|
+
* - Progress callbacks are invoked per chunk
|
|
406
|
+
*
|
|
407
|
+
* Handles decompression, hash calculation, and Buffer accumulation.
|
|
408
|
+
* Internal method only
|
|
409
|
+
*/
|
|
410
|
+
async unCompressToBuffer(compressedStream, entry, options) {
|
|
411
|
+
this.log(`unCompressToBuffer() called for entry: ${entry.filename}, method: ${entry.cmpMethod}`);
|
|
412
|
+
// Decompress stream - processes one block at a time
|
|
413
|
+
const decompressedStream = this.decompressStream(compressedStream, entry.cmpMethod);
|
|
414
|
+
// Accumulate chunks into Buffer
|
|
415
|
+
const hashCalc = new HashCalculator_1.HashCalculator({ useSHA256: !!entry.sha256 });
|
|
416
|
+
const chunks = [];
|
|
417
|
+
let totalBytes = 0;
|
|
418
|
+
try {
|
|
419
|
+
for await (const chunk of decompressedStream) {
|
|
420
|
+
this.log(`unCompressToBuffer: Processing decompressed chunk: ${chunk.length} bytes`);
|
|
421
|
+
hashCalc.update(chunk);
|
|
422
|
+
chunks.push(chunk);
|
|
423
|
+
totalBytes += chunk.length;
|
|
424
|
+
if (options?.onProgress) {
|
|
425
|
+
options.onProgress(totalBytes);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
// Concatenate all chunks into a single Buffer
|
|
429
|
+
const result = Buffer.concat(chunks);
|
|
430
|
+
// Verify hash
|
|
431
|
+
if (!options?.skipHashCheck) {
|
|
432
|
+
if (entry.sha256) {
|
|
433
|
+
const calculatedHash = hashCalc.finalizeSHA256();
|
|
434
|
+
this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
|
|
435
|
+
if (calculatedHash !== entry.sha256) {
|
|
436
|
+
throw new Error(Errors_1.default.INVALID_SHA256);
|
|
437
|
+
}
|
|
438
|
+
this.log(`SHA-256 verification passed`);
|
|
439
|
+
}
|
|
440
|
+
else {
|
|
441
|
+
const calculatedCRC = hashCalc.finalizeCRC32();
|
|
442
|
+
this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
|
|
443
|
+
if (calculatedCRC !== entry.crc) {
|
|
444
|
+
throw new Error(Errors_1.default.INVALID_CRC);
|
|
445
|
+
}
|
|
446
|
+
this.log(`CRC-32 verification passed`);
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
return result;
|
|
450
|
+
}
|
|
451
|
+
catch (error) {
|
|
452
|
+
throw error;
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
/**
|
|
456
|
+
* Decompress data stream and validate hash without writing to disk
|
|
457
|
+
*
|
|
458
|
+
* MEMORY EFFICIENCY: Processes decompressed chunks one at a time.
|
|
459
|
+
* Pipeline: compressedStream → decompressStream() → hashCalc → validation
|
|
460
|
+
* - Each decompressed chunk is validated immediately without accumulation
|
|
461
|
+
* - Hash calculation is incremental (HashCalculator)
|
|
462
|
+
* - Progress callbacks are invoked per chunk
|
|
463
|
+
* - No file writing - data is discarded after validation
|
|
464
|
+
*
|
|
465
|
+
* Handles decompression, hash calculation, and verification.
|
|
466
|
+
* Internal method only
|
|
467
|
+
*/
|
|
468
|
+
async unCompressToTest(compressedStream, entry, options) {
|
|
469
|
+
this.log(`unCompressToTest() called for entry: ${entry.filename}, method: ${entry.cmpMethod}`);
|
|
470
|
+
// Decompress stream - processes one block at a time
|
|
471
|
+
const decompressedStream = this.decompressStream(compressedStream, entry.cmpMethod);
|
|
472
|
+
// Process and validate chunks - one block at a time
|
|
473
|
+
const hashCalc = new HashCalculator_1.HashCalculator({ useSHA256: !!entry.sha256 });
|
|
474
|
+
let totalBytes = 0;
|
|
475
|
+
try {
|
|
476
|
+
for await (const chunk of decompressedStream) {
|
|
477
|
+
this.log(`unCompressToTest: Processing decompressed chunk: ${chunk.length} bytes`);
|
|
478
|
+
hashCalc.update(chunk);
|
|
479
|
+
// Discard chunk - don't write to disk
|
|
480
|
+
totalBytes += chunk.length;
|
|
481
|
+
if (options?.onProgress) {
|
|
482
|
+
options.onProgress(totalBytes);
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
// Verify hash and return verified hash if SHA-256
|
|
486
|
+
if (!options?.skipHashCheck) {
|
|
487
|
+
if (entry.sha256) {
|
|
488
|
+
const calculatedHash = hashCalc.finalizeSHA256();
|
|
489
|
+
this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
|
|
490
|
+
if (calculatedHash !== entry.sha256) {
|
|
491
|
+
throw new Error(Errors_1.default.INVALID_SHA256);
|
|
492
|
+
}
|
|
493
|
+
this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
|
|
494
|
+
// Return the verified hash
|
|
495
|
+
return { verifiedHash: calculatedHash };
|
|
496
|
+
}
|
|
497
|
+
else {
|
|
498
|
+
const calculatedCRC = hashCalc.finalizeCRC32();
|
|
499
|
+
this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
|
|
500
|
+
if (calculatedCRC !== entry.crc) {
|
|
501
|
+
throw new Error(Errors_1.default.INVALID_CRC);
|
|
502
|
+
}
|
|
503
|
+
this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
|
|
504
|
+
// No hash to return for CRC-32 only entries
|
|
505
|
+
return { verifiedHash: undefined };
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
else {
|
|
509
|
+
// Hash check skipped - return undefined
|
|
510
|
+
return { verifiedHash: undefined };
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
catch (error) {
|
|
514
|
+
throw error;
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
/**
|
|
518
|
+
* Decompress data stream chunk by chunk
|
|
519
|
+
*
|
|
520
|
+
* MEMORY EFFICIENCY: Processes compressed data one block at a time.
|
|
521
|
+
* - For STORED: Passes through chunks unchanged (no accumulation)
|
|
522
|
+
* - For DEFLATED: Uses pako streaming inflate (maintains state across chunks)
|
|
523
|
+
* - For ZSTD: Collects all chunks (ZSTD limitation - requires full buffer)
|
|
524
|
+
*
|
|
525
|
+
* Pipeline: readCompressedDataStream() → DecryptionStream.decrypt() → decompressStream() → writeStream
|
|
526
|
+
*
|
|
527
|
+
* Internal method only
|
|
528
|
+
*/
|
|
529
|
+
async *decompressStream(compressedStream, method, chunkSize) {
|
|
530
|
+
// chunkSize parameter is currently unused but kept for API consistency
|
|
531
|
+
if (method === Headers_1.CMP_METHOD.STORED) {
|
|
532
|
+
// Pass through unchanged - one block at a time
|
|
533
|
+
for await (const chunk of compressedStream) {
|
|
534
|
+
yield chunk;
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
else if (method === Headers_1.CMP_METHOD.DEFLATED) {
|
|
538
|
+
// Use pako streaming inflate - maintains state across chunks
|
|
539
|
+
yield* this.inflateStream(compressedStream);
|
|
540
|
+
}
|
|
541
|
+
else if (method === Headers_1.CMP_METHOD.ZSTD) {
|
|
542
|
+
// Use ZSTD streaming decompression - note: ZSTD requires full buffer
|
|
543
|
+
yield* this.zstdDecompressStream(compressedStream);
|
|
544
|
+
}
|
|
545
|
+
else {
|
|
546
|
+
throw new Error(`Unsupported compression method: ${method}`);
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
/**
|
|
550
|
+
* Streaming deflate decompression using pako
|
|
551
|
+
*
|
|
552
|
+
* MEMORY EFFICIENCY: Processes compressed chunks one at a time.
|
|
553
|
+
* - Inflator maintains decompression state across chunks
|
|
554
|
+
* - Decompressed chunks are yielded immediately after processing each compressed chunk
|
|
555
|
+
* - No accumulation of compressed data (except in pako's internal buffers)
|
|
556
|
+
*/
|
|
557
|
+
async *inflateStream(compressedStream) {
|
|
558
|
+
const inflator = new pako.Inflate({ raw: true });
|
|
559
|
+
const decompressedChunks = [];
|
|
560
|
+
inflator.onData = (chunk) => {
|
|
561
|
+
decompressedChunks.push(Buffer.from(chunk));
|
|
562
|
+
};
|
|
563
|
+
// Process each compressed chunk one at a time
|
|
564
|
+
for await (const compressedChunk of compressedStream) {
|
|
565
|
+
this.log(`inflateStream: Processing compressed chunk: ${compressedChunk.length} bytes`);
|
|
566
|
+
inflator.push(compressedChunk, false);
|
|
567
|
+
// Yield accumulated decompressed chunks immediately (no accumulation)
|
|
568
|
+
for (const chunk of decompressedChunks) {
|
|
569
|
+
yield chunk;
|
|
570
|
+
}
|
|
571
|
+
decompressedChunks.length = 0;
|
|
572
|
+
}
|
|
573
|
+
// Finalize decompression
|
|
574
|
+
inflator.push(new Uint8Array(0), true);
|
|
575
|
+
for (const chunk of decompressedChunks) {
|
|
576
|
+
yield chunk;
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
/**
|
|
580
|
+
* Streaming ZSTD decompression
|
|
581
|
+
*/
|
|
582
|
+
async *zstdDecompressStream(compressedStream) {
|
|
583
|
+
// ZSTD is guaranteed to be initialized via factory method
|
|
584
|
+
// Collect all compressed chunks first (ZSTD needs complete data)
|
|
585
|
+
const compressedChunks = [];
|
|
586
|
+
for await (const chunk of compressedStream) {
|
|
587
|
+
compressedChunks.push(chunk);
|
|
588
|
+
}
|
|
589
|
+
const compressedData = Buffer.concat(compressedChunks);
|
|
590
|
+
try {
|
|
591
|
+
// Use global ZstdManager for decompression
|
|
592
|
+
const decompressed = await ZstdManager_1.ZstdManager.decompress(compressedData);
|
|
593
|
+
const decompressedBuffer = Buffer.from(decompressed);
|
|
594
|
+
// Yield decompressed data in chunks using ZipkitServer's bufferSize
|
|
595
|
+
const chunkSize = this.zipkitNode.getBufferSize();
|
|
596
|
+
let offset = 0;
|
|
597
|
+
while (offset < decompressedBuffer.length) {
|
|
598
|
+
const end = Math.min(offset + chunkSize, decompressedBuffer.length);
|
|
599
|
+
yield decompressedBuffer.slice(offset, end);
|
|
600
|
+
offset = end;
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
catch (error) {
|
|
604
|
+
throw new Error(`ZSTD streaming decompression failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
/**
|
|
608
|
+
* Inflate data using pako (internal use only)
|
|
609
|
+
*/
|
|
610
|
+
inflate(data) {
|
|
611
|
+
this.log(`inflate() called with ${data.length} bytes`);
|
|
612
|
+
const result = pako.inflateRaw(data);
|
|
613
|
+
return Buffer.from(result.buffer, result.byteOffset, result.byteLength);
|
|
614
|
+
}
|
|
615
|
+
/**
|
|
616
|
+
* Zstd decompress method (now async with ZstdManager)
|
|
617
|
+
* Internal method only
|
|
618
|
+
*/
|
|
619
|
+
async zstdDecompressSync(data) {
|
|
620
|
+
this.log(`zstdDecompressSync() called with ${data.length} bytes`);
|
|
621
|
+
try {
|
|
622
|
+
// Use global ZstdManager for decompression
|
|
623
|
+
const decompressed = await ZstdManager_1.ZstdManager.decompress(data);
|
|
624
|
+
this.log(`ZSTD decompression successful: ${data.length} bytes -> ${decompressed.length} bytes`);
|
|
625
|
+
return Buffer.from(decompressed);
|
|
626
|
+
}
|
|
627
|
+
catch (error) {
|
|
628
|
+
this.log(`ZSTD decompression failed: ${error}`);
|
|
629
|
+
throw new Error(`ZSTD decompression failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
/**
|
|
633
|
+
* Uncompress compressed data buffer (now async for Zstd)
|
|
634
|
+
* Handles decompression and hash verification
|
|
635
|
+
* Internal method only
|
|
636
|
+
*/
|
|
637
|
+
async unCompress(compressedData, entry, skipHashCheck) {
|
|
638
|
+
this.log(`unCompress() called for entry: ${entry.filename}, method: ${entry.cmpMethod}, data length: ${compressedData.length}`);
|
|
639
|
+
if (compressedData.length === 0) {
|
|
640
|
+
return Buffer.alloc(0);
|
|
641
|
+
}
|
|
642
|
+
let outBuf;
|
|
643
|
+
if (entry.cmpMethod === Headers_1.CMP_METHOD.STORED) {
|
|
644
|
+
outBuf = compressedData;
|
|
645
|
+
}
|
|
646
|
+
else if (entry.cmpMethod === Headers_1.CMP_METHOD.DEFLATED) {
|
|
647
|
+
// Use synchronous inflate for deflate
|
|
648
|
+
outBuf = this.inflate(compressedData);
|
|
649
|
+
}
|
|
650
|
+
else if (entry.cmpMethod === Headers_1.CMP_METHOD.ZSTD) {
|
|
651
|
+
// Use ZSTD decompression (now async with ZstdManager)
|
|
652
|
+
outBuf = await this.zstdDecompressSync(compressedData);
|
|
653
|
+
}
|
|
654
|
+
else {
|
|
655
|
+
throw new Error(`Unsupported compression method: ${entry.cmpMethod}`);
|
|
656
|
+
}
|
|
657
|
+
// Verify hash
|
|
658
|
+
if (!skipHashCheck) {
|
|
659
|
+
if (entry.sha256) {
|
|
660
|
+
const isValid = this.zipkitNode.testSHA256(entry, outBuf);
|
|
661
|
+
if (!isValid) {
|
|
662
|
+
throw new Error(Errors_1.default.INVALID_SHA256);
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
else {
|
|
666
|
+
const isValid = this.zipkitNode.testCRC32(entry, outBuf);
|
|
667
|
+
if (!isValid) {
|
|
668
|
+
throw new Error(Errors_1.default.INVALID_CRC);
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
return outBuf;
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
exports.ZipDecompressNode = ZipDecompressNode;
|
|
676
|
+
// Class-level logging control - set to true to enable logging
|
|
677
|
+
ZipDecompressNode.loggingEnabled = false;
|
|
678
|
+
//# sourceMappingURL=ZipDecompressNode.js.map
|