@neoware_inc/neozipkit 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (171) hide show
  1. package/README.md +134 -0
  2. package/dist/browser/ZipkitBrowser.d.ts +27 -0
  3. package/dist/browser/ZipkitBrowser.d.ts.map +1 -0
  4. package/dist/browser/ZipkitBrowser.js +303 -0
  5. package/dist/browser/ZipkitBrowser.js.map +1 -0
  6. package/dist/browser/index.d.ts +9 -0
  7. package/dist/browser/index.d.ts.map +1 -0
  8. package/dist/browser/index.esm.d.ts +12 -0
  9. package/dist/browser/index.esm.d.ts.map +1 -0
  10. package/dist/browser/index.esm.js +46 -0
  11. package/dist/browser/index.esm.js.map +1 -0
  12. package/dist/browser/index.js +38 -0
  13. package/dist/browser/index.js.map +1 -0
  14. package/dist/browser-esm/index.d.ts +9 -0
  15. package/dist/browser-esm/index.js +50211 -0
  16. package/dist/browser-esm/index.js.map +7 -0
  17. package/dist/browser-umd/index.d.ts +9 -0
  18. package/dist/browser-umd/index.js +50221 -0
  19. package/dist/browser-umd/index.js.map +7 -0
  20. package/dist/browser-umd/index.min.js +39 -0
  21. package/dist/browser.d.ts +9 -0
  22. package/dist/browser.js +38 -0
  23. package/dist/core/ZipCompress.d.ts +99 -0
  24. package/dist/core/ZipCompress.d.ts.map +1 -0
  25. package/dist/core/ZipCompress.js +287 -0
  26. package/dist/core/ZipCompress.js.map +1 -0
  27. package/dist/core/ZipCopy.d.ts +175 -0
  28. package/dist/core/ZipCopy.d.ts.map +1 -0
  29. package/dist/core/ZipCopy.js +310 -0
  30. package/dist/core/ZipCopy.js.map +1 -0
  31. package/dist/core/ZipDecompress.d.ts +57 -0
  32. package/dist/core/ZipDecompress.d.ts.map +1 -0
  33. package/dist/core/ZipDecompress.js +155 -0
  34. package/dist/core/ZipDecompress.js.map +1 -0
  35. package/dist/core/ZipEntry.d.ts +138 -0
  36. package/dist/core/ZipEntry.d.ts.map +1 -0
  37. package/dist/core/ZipEntry.js +829 -0
  38. package/dist/core/ZipEntry.js.map +1 -0
  39. package/dist/core/Zipkit.d.ts +315 -0
  40. package/dist/core/Zipkit.d.ts.map +1 -0
  41. package/dist/core/Zipkit.js +647 -0
  42. package/dist/core/Zipkit.js.map +1 -0
  43. package/dist/core/ZstdManager.d.ts +56 -0
  44. package/dist/core/ZstdManager.d.ts.map +1 -0
  45. package/dist/core/ZstdManager.js +144 -0
  46. package/dist/core/ZstdManager.js.map +1 -0
  47. package/dist/core/components/HashCalculator.d.ts +138 -0
  48. package/dist/core/components/HashCalculator.d.ts.map +1 -0
  49. package/dist/core/components/HashCalculator.js +360 -0
  50. package/dist/core/components/HashCalculator.js.map +1 -0
  51. package/dist/core/components/Logger.d.ts +73 -0
  52. package/dist/core/components/Logger.d.ts.map +1 -0
  53. package/dist/core/components/Logger.js +156 -0
  54. package/dist/core/components/Logger.js.map +1 -0
  55. package/dist/core/components/ProgressTracker.d.ts +43 -0
  56. package/dist/core/components/ProgressTracker.d.ts.map +1 -0
  57. package/dist/core/components/ProgressTracker.js +112 -0
  58. package/dist/core/components/ProgressTracker.js.map +1 -0
  59. package/dist/core/components/Support.d.ts +64 -0
  60. package/dist/core/components/Support.d.ts.map +1 -0
  61. package/dist/core/components/Support.js +71 -0
  62. package/dist/core/components/Support.js.map +1 -0
  63. package/dist/core/components/Util.d.ts +26 -0
  64. package/dist/core/components/Util.d.ts.map +1 -0
  65. package/dist/core/components/Util.js +95 -0
  66. package/dist/core/components/Util.js.map +1 -0
  67. package/dist/core/constants/Errors.d.ts +52 -0
  68. package/dist/core/constants/Errors.d.ts.map +1 -0
  69. package/dist/core/constants/Errors.js +67 -0
  70. package/dist/core/constants/Errors.js.map +1 -0
  71. package/dist/core/constants/Headers.d.ts +170 -0
  72. package/dist/core/constants/Headers.d.ts.map +1 -0
  73. package/dist/core/constants/Headers.js +194 -0
  74. package/dist/core/constants/Headers.js.map +1 -0
  75. package/dist/core/encryption/Manager.d.ts +58 -0
  76. package/dist/core/encryption/Manager.d.ts.map +1 -0
  77. package/dist/core/encryption/Manager.js +121 -0
  78. package/dist/core/encryption/Manager.js.map +1 -0
  79. package/dist/core/encryption/ZipCrypto.d.ts +172 -0
  80. package/dist/core/encryption/ZipCrypto.d.ts.map +1 -0
  81. package/dist/core/encryption/ZipCrypto.js +554 -0
  82. package/dist/core/encryption/ZipCrypto.js.map +1 -0
  83. package/dist/core/encryption/index.d.ts +9 -0
  84. package/dist/core/encryption/index.d.ts.map +1 -0
  85. package/dist/core/encryption/index.js +17 -0
  86. package/dist/core/encryption/index.js.map +1 -0
  87. package/dist/core/encryption/types.d.ts +29 -0
  88. package/dist/core/encryption/types.d.ts.map +1 -0
  89. package/dist/core/encryption/types.js +12 -0
  90. package/dist/core/encryption/types.js.map +1 -0
  91. package/dist/core/index.d.ts +27 -0
  92. package/dist/core/index.d.ts.map +1 -0
  93. package/dist/core/index.js +59 -0
  94. package/dist/core/index.js.map +1 -0
  95. package/dist/core/version.d.ts +5 -0
  96. package/dist/core/version.d.ts.map +1 -0
  97. package/dist/core/version.js +31 -0
  98. package/dist/core/version.js.map +1 -0
  99. package/dist/index.d.ts +9 -0
  100. package/dist/index.d.ts.map +1 -0
  101. package/dist/index.js +38 -0
  102. package/dist/index.js.map +1 -0
  103. package/dist/node/ZipCompressNode.d.ts +123 -0
  104. package/dist/node/ZipCompressNode.d.ts.map +1 -0
  105. package/dist/node/ZipCompressNode.js +565 -0
  106. package/dist/node/ZipCompressNode.js.map +1 -0
  107. package/dist/node/ZipCopyNode.d.ts +165 -0
  108. package/dist/node/ZipCopyNode.d.ts.map +1 -0
  109. package/dist/node/ZipCopyNode.js +347 -0
  110. package/dist/node/ZipCopyNode.js.map +1 -0
  111. package/dist/node/ZipDecompressNode.d.ts +197 -0
  112. package/dist/node/ZipDecompressNode.d.ts.map +1 -0
  113. package/dist/node/ZipDecompressNode.js +678 -0
  114. package/dist/node/ZipDecompressNode.js.map +1 -0
  115. package/dist/node/ZipkitNode.d.ts +466 -0
  116. package/dist/node/ZipkitNode.d.ts.map +1 -0
  117. package/dist/node/ZipkitNode.js +1426 -0
  118. package/dist/node/ZipkitNode.js.map +1 -0
  119. package/dist/node/index.d.ts +25 -0
  120. package/dist/node/index.d.ts.map +1 -0
  121. package/dist/node/index.js +54 -0
  122. package/dist/node/index.js.map +1 -0
  123. package/dist/types/index.d.ts +45 -0
  124. package/dist/types/index.d.ts.map +1 -0
  125. package/dist/types/index.js +11 -0
  126. package/dist/types/index.js.map +1 -0
  127. package/examples/README.md +261 -0
  128. package/examples/append-data.json +44 -0
  129. package/examples/copy-zip-append.ts +139 -0
  130. package/examples/copy-zip.ts +152 -0
  131. package/examples/create-zip.ts +172 -0
  132. package/examples/extract-zip.ts +118 -0
  133. package/examples/list-zip.ts +161 -0
  134. package/examples/test-files/data.json +116 -0
  135. package/examples/test-files/document.md +80 -0
  136. package/examples/test-files/document.txt +6 -0
  137. package/examples/test-files/file1.txt +48 -0
  138. package/examples/test-files/file2.txt +80 -0
  139. package/examples/tsconfig.json +44 -0
  140. package/package.json +167 -0
  141. package/src/browser/ZipkitBrowser.ts +305 -0
  142. package/src/browser/index.esm.ts +32 -0
  143. package/src/browser/index.ts +19 -0
  144. package/src/core/ZipCompress.ts +370 -0
  145. package/src/core/ZipCopy.ts +434 -0
  146. package/src/core/ZipDecompress.ts +191 -0
  147. package/src/core/ZipEntry.ts +917 -0
  148. package/src/core/Zipkit.ts +794 -0
  149. package/src/core/ZstdManager.ts +165 -0
  150. package/src/core/components/HashCalculator.ts +384 -0
  151. package/src/core/components/Logger.ts +180 -0
  152. package/src/core/components/ProgressTracker.ts +134 -0
  153. package/src/core/components/Support.ts +77 -0
  154. package/src/core/components/Util.ts +91 -0
  155. package/src/core/constants/Errors.ts +78 -0
  156. package/src/core/constants/Headers.ts +205 -0
  157. package/src/core/encryption/Manager.ts +137 -0
  158. package/src/core/encryption/ZipCrypto.ts +650 -0
  159. package/src/core/encryption/index.ts +15 -0
  160. package/src/core/encryption/types.ts +33 -0
  161. package/src/core/index.ts +42 -0
  162. package/src/core/version.ts +33 -0
  163. package/src/index.ts +19 -0
  164. package/src/node/ZipCompressNode.ts +618 -0
  165. package/src/node/ZipCopyNode.ts +437 -0
  166. package/src/node/ZipDecompressNode.ts +793 -0
  167. package/src/node/ZipkitNode.ts +1706 -0
  168. package/src/node/index.ts +40 -0
  169. package/src/types/index.ts +68 -0
  170. package/src/types/modules.d.ts +22 -0
  171. package/src/types/opentimestamps.d.ts +1 -0
@@ -0,0 +1,793 @@
1
+ // ======================================
2
+ // ZipDecompressNode.ts - Node.js File-Based Decompression
3
+ // Copyright (c) 2025 NeoWare, Inc. All rights reserved.
4
+ // ======================================
5
+ //
6
+ // LOGGING INSTRUCTIONS:
7
+ // ---------------------
8
+ // To enable/disable logging, set loggingEnabled to true/false in the class:
9
+ // private static loggingEnabled: boolean = true; // Enable logging
10
+ // private static loggingEnabled: boolean = false; // Disable logging
11
+ //
12
+ // Logging respects the global Logger level (debug, info, warn, error, silent).
13
+ // Logger level is automatically set to 'debug' when loggingEnabled is true.
14
+ //
15
+
16
+ const pako = require('pako');
17
+ import { ZstdManager } from '../core/ZstdManager';
18
+ import ZipkitNode from './ZipkitNode';
19
+ import { Logger } from '../core/components/Logger';
20
+ import ZipEntry from '../core/ZipEntry';
21
+ import Errors from '../core/constants/Errors';
22
+ import { CMP_METHOD } from '../core/constants/Headers';
23
+ import { HashCalculator } from '../core/components/HashCalculator';
24
+ import { DecryptionStream, ZipCrypto } from '../core/encryption/ZipCrypto';
25
+ import { EncryptionMethod } from '../core/encryption/types';
26
+ import { StreamingFileHandle } from '../core';
27
+ import * as fs from 'fs';
28
+
29
+ /**
30
+ * ZipDecompressNode - Node.js file-based decompression operations
31
+ *
32
+ * Independent decompression implementation for Node.js environments.
33
+ * All decompression logic is implemented directly without delegating to ZipDecompress.
34
+ *
35
+ * @example
36
+ * ```typescript
37
+ * const zipkitNode = new ZipkitNode();
38
+ * const decompressNode = new ZipDecompressNode(zipkitNode);
39
+ * await decompressNode.extractToFile(entry, './output/file.txt');
40
+ * ```
41
+ */
42
+ export class ZipDecompressNode {
43
+ private zipkitNode: ZipkitNode;
44
+
45
+ // Class-level logging control - set to true to enable logging
46
+ private static loggingEnabled: boolean = false;
47
+
48
+ /**
49
+ * Creates a new ZipDecompressNode instance
50
+ * @param zipkitNode - ZipkitNode instance to use for ZIP operations
51
+ */
52
+ constructor(zipkitNode: ZipkitNode) {
53
+ this.zipkitNode = zipkitNode;
54
+ // If logging is enabled, ensure Logger level is set to debug
55
+ if (ZipDecompressNode.loggingEnabled) {
56
+ Logger.setLevel('debug');
57
+ }
58
+ }
59
+
60
+ /**
61
+ * Internal logging method - only logs if class logging is enabled
62
+ */
63
+ private log(...args: any[]): void {
64
+ if (ZipDecompressNode.loggingEnabled) {
65
+ Logger.debug(`[ZipDecompressNode]`, ...args);
66
+ }
67
+ }
68
+
69
+ // ============================================================================
70
+ // File-Based Extraction Methods
71
+ // ============================================================================
72
+
73
+ /**
74
+ * Extract file directly to disk with true streaming (no memory buffering)
75
+ * Public method that validates file mode and extracts entry to file
76
+ *
77
+ * This method processes chunks as they are decompressed and writes them
78
+ * directly to disk, maintaining minimal memory footprint regardless of file size.
79
+ * This is the recommended method for file extraction to avoid memory issues.
80
+ *
81
+ * @param entry ZIP entry to extract
82
+ * @param outputPath Path where the file should be written
83
+ * @param options Optional extraction options including progress callback
84
+ * @throws Error if not a File-based ZIP
85
+ */
86
+ async extractToFile(
87
+ entry: ZipEntry,
88
+ outputPath: string,
89
+ options?: {
90
+ skipHashCheck?: boolean;
91
+ onProgress?: (bytes: number) => void;
92
+ }
93
+ ): Promise<void> {
94
+ // Get fileHandle from zipkitNode (merged from ZipLoadEntriesServer)
95
+ const fileHandle = (this.zipkitNode as any).getFileHandle();
96
+
97
+ // Call internal method with fileHandle
98
+ await this.extractToFileInternal(fileHandle, entry, outputPath, options);
99
+ }
100
+
101
+ /**
102
+ * Extract file to Buffer (in-memory) for file-based ZIP
103
+ *
104
+ * This method extracts a ZIP entry directly to a Buffer without writing to disk.
105
+ * This is ideal for reading metadata files (like NZIP.TOKEN) that don't need
106
+ * to be written to temporary files.
107
+ *
108
+ * @param entry ZIP entry to extract
109
+ * @param options Optional extraction options including progress callback
110
+ * @returns Promise that resolves to Buffer containing the extracted file data
111
+ * @throws Error if not a File-based ZIP or if extraction fails
112
+ */
113
+ async extractToBuffer(
114
+ entry: ZipEntry,
115
+ options?: {
116
+ skipHashCheck?: boolean;
117
+ onProgress?: (bytes: number) => void;
118
+ }
119
+ ): Promise<Buffer> {
120
+ // Get fileHandle from zipkitNode
121
+ const fileHandle = (this.zipkitNode as any).getFileHandle();
122
+
123
+ // Call internal extract to buffer method
124
+ return await this.extractToBufferInternal(fileHandle, entry, options);
125
+ }
126
+
127
+ /**
128
+ * Test entry integrity without extracting to disk
129
+ * Validates CRC-32 or SHA-256 hash without writing decompressed data
130
+ *
131
+ * This method processes chunks as they are decompressed and validates them,
132
+ * but discards the decompressed data instead of writing to disk. This is useful
133
+ * for verifying ZIP file integrity without extracting files.
134
+ *
135
+ * @param entry ZIP entry to test
136
+ * @param options Optional test options including progress callback
137
+ * @returns Promise that resolves to an object containing the verified hash (if SHA-256) or undefined
138
+ * @throws Error if validation fails (INVALID_CRC or INVALID_SHA256) or if not a File-based ZIP
139
+ */
140
+ async testEntry(
141
+ entry: ZipEntry,
142
+ options?: {
143
+ skipHashCheck?: boolean;
144
+ onProgress?: (bytes: number) => void;
145
+ }
146
+ ): Promise<{ verifiedHash?: string }> {
147
+ // Get fileHandle from zipkitNode
148
+ const fileHandle = (this.zipkitNode as any).getFileHandle();
149
+
150
+ // Call internal test method with fileHandle
151
+ return await this.testEntryInternal(fileHandle, entry, options);
152
+ }
153
+
154
+ // ============================================================================
155
+ // Internal File-Based Methods
156
+ // ============================================================================
157
+
158
+ /**
159
+ * Read compressed data from file and yield one block at a time
160
+ *
161
+ * MEMORY EFFICIENCY: Yields compressed data chunks one at a time without accumulation.
162
+ * Each chunk is read from disk and yielded immediately, allowing downstream processing
163
+ * (decryption, decompression) to handle one block at a time.
164
+ *
165
+ * @param fileHandle - File handle to read from
166
+ * @param entry - ZIP entry to read compressed data for
167
+ * @param chunkSize - Optional chunk size override (defaults to ZipkitServer's bufferSize)
168
+ * @returns Async generator yielding compressed data chunks one at a time
169
+ */
170
+ private async *readCompressedDataStream(
171
+ fileHandle: StreamingFileHandle,
172
+ entry: ZipEntry,
173
+ chunkSize?: number
174
+ ): AsyncGenerator<Buffer> {
175
+ // Use provided chunkSize or ZipkitServer's default bufferSize
176
+ const effectiveChunkSize = chunkSize || this.zipkitNode.getBufferSize();
177
+ // Read local file header
178
+ const localHeaderBuffer = Buffer.alloc(30);
179
+ await fileHandle.read(localHeaderBuffer, 0, 30, entry.localHdrOffset);
180
+
181
+ if (localHeaderBuffer.readUInt32LE(0) !== 0x04034b50) { // LOCAL_HDR.SIGNATURE
182
+ throw new Error(Errors.INVALID_CEN);
183
+ }
184
+
185
+ // Calculate data start position
186
+ const filenameLength = localHeaderBuffer.readUInt16LE(26);
187
+ const extraFieldLength = localHeaderBuffer.readUInt16LE(28);
188
+ const dataStart = entry.localHdrOffset + 30 + filenameLength + extraFieldLength;
189
+
190
+ // Yield compressed data in chunks - one block at a time
191
+ let remaining = entry.compressedSize;
192
+ let position = dataStart;
193
+
194
+ while (remaining > 0) {
195
+ const currentChunkSize = Math.min(effectiveChunkSize, remaining);
196
+ const chunk = Buffer.alloc(currentChunkSize);
197
+ await fileHandle.read(chunk, 0, currentChunkSize, position);
198
+
199
+ this.log(`readCompressedDataStream: Yielding compressed chunk: ${chunk.length} bytes (${remaining} bytes remaining)`);
200
+ yield chunk;
201
+
202
+ position += currentChunkSize;
203
+ remaining -= currentChunkSize;
204
+ }
205
+ }
206
+
207
+ /**
208
+ * Handles: reading compressed data, optional decryption, decompression, hashing, and writing
209
+ * Internal method that takes fileHandle as parameter
210
+ */
211
+ private async extractToFileInternal(
212
+ fileHandle: any,
213
+ entry: ZipEntry,
214
+ outputPath: string,
215
+ options?: {
216
+ skipHashCheck?: boolean;
217
+ onProgress?: (bytes: number) => void;
218
+ }
219
+ ): Promise<void> {
220
+ this.log(`extractToFileInternal called for entry: ${entry.filename}`);
221
+ this.log(`Entry isEncrypted: ${(entry as any).isEncrypted}, has password: ${!!(this.zipkitNode as any)?.password}`);
222
+
223
+ try {
224
+ // Create output stream with overwrite flag to truncate existing files
225
+ const writeStream = fs.createWriteStream(outputPath, { flags: 'w' });
226
+
227
+ // Build compressed data stream - yields one block at a time
228
+ let dataStream = this.readCompressedDataStream(fileHandle, entry);
229
+
230
+ // Decrypt if needed using password on zipkitNode instance
231
+ // Decryption maintains state across blocks via updateKeys()
232
+ const isEncrypted = (entry as any).isEncrypted && (this.zipkitNode as any)?.password;
233
+
234
+ if (isEncrypted) {
235
+ this.log(`Starting decryption for entry: ${entry.filename}`);
236
+
237
+ // Prepare entry for decryption by parsing local header
238
+ await DecryptionStream.prepareEntryForDecryption(fileHandle, entry);
239
+
240
+ const encryptionMethod = (entry as any).encryptionMethod || EncryptionMethod.ZIP_CRYPTO;
241
+
242
+ this.log(`Creating DecryptionStream with method: ${encryptionMethod}`);
243
+
244
+ const decryptor = new DecryptionStream({
245
+ password: (this.zipkitNode as any).password,
246
+ method: encryptionMethod,
247
+ entry: entry
248
+ });
249
+
250
+ this.log(`DecryptionStream created, calling decrypt()...`);
251
+ // Decryption processes one block at a time, maintaining state across blocks
252
+ dataStream = decryptor.decrypt(dataStream);
253
+ this.log(`decrypt() returned, dataStream is now a generator that yields one decrypted block at a time`);
254
+ }
255
+
256
+ // Pipeline: readCompressedDataStream() → DecryptionStream.decrypt() → decompressStream() → writeStream
257
+ // Each stage processes one block at a time without accumulation
258
+ await this.unCompressToFile(dataStream, entry, writeStream, {
259
+ skipHashCheck: options?.skipHashCheck,
260
+ onProgress: options?.onProgress,
261
+ outputPath
262
+ });
263
+ } catch (error) {
264
+ throw error;
265
+ }
266
+ }
267
+
268
+ /**
269
+ * Extract file to Buffer (in-memory) for file-based ZIP
270
+ * Internal method that takes fileHandle as parameter
271
+ *
272
+ * MEMORY EFFICIENCY: Accumulates decompressed chunks into a Buffer.
273
+ * For small files (like metadata), this is acceptable. For large files,
274
+ * consider using extractToFile() instead.
275
+ */
276
+ private async extractToBufferInternal(
277
+ fileHandle: any,
278
+ entry: ZipEntry,
279
+ options?: {
280
+ skipHashCheck?: boolean;
281
+ onProgress?: (bytes: number) => void;
282
+ }
283
+ ): Promise<Buffer> {
284
+ this.log(`extractToBufferInternal called for entry: ${entry.filename}`);
285
+ this.log(`Entry isEncrypted: ${(entry as any).isEncrypted}, has password: ${!!(this.zipkitNode as any)?.password}`);
286
+
287
+ try {
288
+ // Build compressed data stream - yields one block at a time
289
+ let dataStream = this.readCompressedDataStream(fileHandle, entry);
290
+
291
+ // Decrypt if needed using password on zipkitNode instance
292
+ const isEncrypted = (entry as any).isEncrypted && (this.zipkitNode as any)?.password;
293
+
294
+ if (isEncrypted) {
295
+ this.log(`Starting decryption for entry: ${entry.filename}`);
296
+
297
+ // Prepare entry for decryption by parsing local header
298
+ await DecryptionStream.prepareEntryForDecryption(fileHandle, entry);
299
+
300
+ const encryptionMethod = (entry as any).encryptionMethod || EncryptionMethod.ZIP_CRYPTO;
301
+
302
+ this.log(`Creating DecryptionStream with method: ${encryptionMethod}`);
303
+
304
+ const decryptor = new DecryptionStream({
305
+ password: (this.zipkitNode as any).password,
306
+ method: encryptionMethod,
307
+ entry: entry
308
+ });
309
+
310
+ this.log(`DecryptionStream created, calling decrypt()...`);
311
+ dataStream = decryptor.decrypt(dataStream);
312
+ this.log(`decrypt() returned, dataStream is now a generator that yields one decrypted block at a time`);
313
+ }
314
+
315
+ // Pipeline: readCompressedDataStream() → DecryptionStream.decrypt() → decompressStream() → accumulate to Buffer
316
+ return await this.unCompressToBuffer(dataStream, entry, {
317
+ skipHashCheck: options?.skipHashCheck,
318
+ onProgress: options?.onProgress
319
+ });
320
+ } catch (error) {
321
+ throw error;
322
+ }
323
+ }
324
+
325
+ /**
326
+ * Test entry integrity without writing to disk
327
+ * Internal method that takes fileHandle as parameter
328
+ */
329
+ private async testEntryInternal(
330
+ fileHandle: any,
331
+ entry: ZipEntry,
332
+ options?: {
333
+ skipHashCheck?: boolean;
334
+ onProgress?: (bytes: number) => void;
335
+ }
336
+ ): Promise<{ verifiedHash?: string }> {
337
+ this.log(`testEntryInternal called for entry: ${entry.filename}`);
338
+ this.log(`Entry isEncrypted: ${(entry as any).isEncrypted}, has password: ${!!(this.zipkitNode as any)?.password}`);
339
+
340
+ try {
341
+ // Build compressed data stream - yields one block at a time
342
+ let dataStream = this.readCompressedDataStream(fileHandle, entry);
343
+
344
+ // Decrypt if needed using password on zipkitNode instance
345
+ const isEncrypted = (entry as any).isEncrypted && (this.zipkitNode as any)?.password;
346
+
347
+ if (isEncrypted) {
348
+ this.log(`Starting decryption for entry: ${entry.filename}`);
349
+
350
+ // Prepare entry for decryption by parsing local header
351
+ await DecryptionStream.prepareEntryForDecryption(fileHandle, entry);
352
+
353
+ const encryptionMethod = (entry as any).encryptionMethod || EncryptionMethod.ZIP_CRYPTO;
354
+
355
+ this.log(`Creating DecryptionStream with method: ${encryptionMethod}`);
356
+
357
+ const decryptor = new DecryptionStream({
358
+ password: (this.zipkitNode as any).password,
359
+ method: encryptionMethod,
360
+ entry: entry
361
+ });
362
+
363
+ this.log(`DecryptionStream created, calling decrypt()...`);
364
+ dataStream = decryptor.decrypt(dataStream);
365
+ this.log(`decrypt() returned, dataStream is now a generator that yields one decrypted block at a time`);
366
+ }
367
+
368
+ // Pipeline: readCompressedDataStream() → DecryptionStream.decrypt() → decompressStream() → hash validation
369
+ // Data is discarded after validation, no file writing
370
+ return await this.unCompressToTest(dataStream, entry, {
371
+ skipHashCheck: options?.skipHashCheck,
372
+ onProgress: options?.onProgress
373
+ });
374
+ } catch (error) {
375
+ throw error;
376
+ }
377
+ }
378
+
379
+ /**
380
+ * Decompress data stream and write to file
381
+ *
382
+ * MEMORY EFFICIENCY: Processes decompressed chunks one at a time.
383
+ * Pipeline: compressedStream → decompressStream() → hashCalc → writeStream
384
+ * - Each decompressed chunk is written immediately without accumulation
385
+ * - Hash calculation is incremental (HashCalculator)
386
+ * - Progress callbacks are invoked per chunk
387
+ *
388
+ * Handles decompression, hash calculation, file writing, and verification.
389
+ * Internal method only
390
+ */
391
+ private async unCompressToFile(
392
+ compressedStream: AsyncGenerator<Buffer>,
393
+ entry: ZipEntry,
394
+ writeStream: any, // Node.js WriteStream
395
+ options?: {
396
+ skipHashCheck?: boolean;
397
+ onProgress?: (bytes: number) => void;
398
+ outputPath?: string; // For cleanup on error
399
+ }
400
+ ): Promise<void> {
401
+ this.log(`unCompressToFile() called for entry: ${entry.filename}, method: ${entry.cmpMethod}`);
402
+
403
+ // Decompress stream - processes one block at a time
404
+ const decompressedStream = this.decompressStream(compressedStream, entry.cmpMethod);
405
+
406
+ // Process and write chunks - one block at a time
407
+ const hashCalc = new HashCalculator({ useSHA256: !!entry.sha256 });
408
+ let totalBytes = 0;
409
+
410
+ try {
411
+ for await (const chunk of decompressedStream) {
412
+ this.log(`unCompressToFile: Processing decompressed chunk: ${chunk.length} bytes`);
413
+ hashCalc.update(chunk);
414
+ writeStream.write(chunk);
415
+ totalBytes += chunk.length;
416
+
417
+ if (options?.onProgress) {
418
+ options.onProgress(totalBytes);
419
+ }
420
+ }
421
+
422
+ // Close stream
423
+ await new Promise((resolve, reject) => {
424
+ writeStream.end(() => resolve(undefined));
425
+ writeStream.on('error', reject);
426
+ });
427
+
428
+ // Verify hash
429
+ if (!options?.skipHashCheck) {
430
+ if (entry.sha256) {
431
+ const calculatedHash = hashCalc.finalizeSHA256();
432
+ this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
433
+ if (calculatedHash !== entry.sha256) {
434
+ if (options?.outputPath && fs) {
435
+ fs.unlinkSync(options.outputPath);
436
+ }
437
+ throw new Error(Errors.INVALID_SHA256);
438
+ }
439
+ this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
440
+ } else {
441
+ const calculatedCRC = hashCalc.finalizeCRC32();
442
+ this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
443
+ if (calculatedCRC !== entry.crc) {
444
+ if (options?.outputPath && fs) {
445
+ fs.unlinkSync(options.outputPath);
446
+ }
447
+ throw new Error(Errors.INVALID_CRC);
448
+ }
449
+ this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
450
+ }
451
+ }
452
+ } catch (error) {
453
+ // Cleanup file on error
454
+ if (options?.outputPath && fs) {
455
+ try {
456
+ fs.unlinkSync(options.outputPath);
457
+ } catch {
458
+ // Ignore cleanup errors
459
+ }
460
+ }
461
+ throw error;
462
+ }
463
+ }
464
+
465
+ // ============================================================================
466
+ // Decompression Methods
467
+ // ============================================================================
468
+
469
+ /**
470
+ * Decompress data stream and accumulate to Buffer
471
+ *
472
+ * MEMORY EFFICIENCY: Accumulates decompressed chunks into a Buffer.
473
+ * For small files (like metadata), this is acceptable. For large files,
474
+ * consider using unCompressToFile() instead.
475
+ *
476
+ * Pipeline: compressedStream → decompressStream() → hashCalc → Buffer accumulation
477
+ * - Each decompressed chunk is accumulated into a Buffer
478
+ * - Hash calculation is incremental (HashCalculator)
479
+ * - Progress callbacks are invoked per chunk
480
+ *
481
+ * Handles decompression, hash calculation, and Buffer accumulation.
482
+ * Internal method only
483
+ */
484
+ private async unCompressToBuffer(
485
+ compressedStream: AsyncGenerator<Buffer>,
486
+ entry: ZipEntry,
487
+ options?: {
488
+ skipHashCheck?: boolean;
489
+ onProgress?: (bytes: number) => void;
490
+ }
491
+ ): Promise<Buffer> {
492
+ this.log(`unCompressToBuffer() called for entry: ${entry.filename}, method: ${entry.cmpMethod}`);
493
+
494
+ // Decompress stream - processes one block at a time
495
+ const decompressedStream = this.decompressStream(compressedStream, entry.cmpMethod);
496
+
497
+ // Accumulate chunks into Buffer
498
+ const hashCalc = new HashCalculator({ useSHA256: !!entry.sha256 });
499
+ const chunks: Buffer[] = [];
500
+ let totalBytes = 0;
501
+
502
+ try {
503
+ for await (const chunk of decompressedStream) {
504
+ this.log(`unCompressToBuffer: Processing decompressed chunk: ${chunk.length} bytes`);
505
+ hashCalc.update(chunk);
506
+ chunks.push(chunk);
507
+ totalBytes += chunk.length;
508
+
509
+ if (options?.onProgress) {
510
+ options.onProgress(totalBytes);
511
+ }
512
+ }
513
+
514
+ // Concatenate all chunks into a single Buffer
515
+ const result = Buffer.concat(chunks);
516
+
517
+ // Verify hash
518
+ if (!options?.skipHashCheck) {
519
+ if (entry.sha256) {
520
+ const calculatedHash = hashCalc.finalizeSHA256();
521
+ this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
522
+ if (calculatedHash !== entry.sha256) {
523
+ throw new Error(Errors.INVALID_SHA256);
524
+ }
525
+ this.log(`SHA-256 verification passed`);
526
+ } else {
527
+ const calculatedCRC = hashCalc.finalizeCRC32();
528
+ this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
529
+ if (calculatedCRC !== entry.crc) {
530
+ throw new Error(Errors.INVALID_CRC);
531
+ }
532
+ this.log(`CRC-32 verification passed`);
533
+ }
534
+ }
535
+
536
+ return result;
537
+ } catch (error) {
538
+ throw error;
539
+ }
540
+ }
541
+
542
+ /**
543
+ * Decompress data stream and validate hash without writing to disk
544
+ *
545
+ * MEMORY EFFICIENCY: Processes decompressed chunks one at a time.
546
+ * Pipeline: compressedStream → decompressStream() → hashCalc → validation
547
+ * - Each decompressed chunk is validated immediately without accumulation
548
+ * - Hash calculation is incremental (HashCalculator)
549
+ * - Progress callbacks are invoked per chunk
550
+ * - No file writing - data is discarded after validation
551
+ *
552
+ * Handles decompression, hash calculation, and verification.
553
+ * Internal method only
554
+ */
555
+ private async unCompressToTest(
556
+ compressedStream: AsyncGenerator<Buffer>,
557
+ entry: ZipEntry,
558
+ options?: {
559
+ skipHashCheck?: boolean;
560
+ onProgress?: (bytes: number) => void;
561
+ }
562
+ ): Promise<{ verifiedHash?: string }> {
563
+ this.log(`unCompressToTest() called for entry: ${entry.filename}, method: ${entry.cmpMethod}`);
564
+
565
+ // Decompress stream - processes one block at a time
566
+ const decompressedStream = this.decompressStream(compressedStream, entry.cmpMethod);
567
+
568
+ // Process and validate chunks - one block at a time
569
+ const hashCalc = new HashCalculator({ useSHA256: !!entry.sha256 });
570
+ let totalBytes = 0;
571
+
572
+ try {
573
+ for await (const chunk of decompressedStream) {
574
+ this.log(`unCompressToTest: Processing decompressed chunk: ${chunk.length} bytes`);
575
+ hashCalc.update(chunk);
576
+ // Discard chunk - don't write to disk
577
+ totalBytes += chunk.length;
578
+
579
+ if (options?.onProgress) {
580
+ options.onProgress(totalBytes);
581
+ }
582
+ }
583
+
584
+ // Verify hash and return verified hash if SHA-256
585
+ if (!options?.skipHashCheck) {
586
+ if (entry.sha256) {
587
+ const calculatedHash = hashCalc.finalizeSHA256();
588
+ this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
589
+ if (calculatedHash !== entry.sha256) {
590
+ throw new Error(Errors.INVALID_SHA256);
591
+ }
592
+ this.log(`SHA-256 comparison: calculated=${calculatedHash}, stored=${entry.sha256}`);
593
+ // Return the verified hash
594
+ return { verifiedHash: calculatedHash };
595
+ } else {
596
+ const calculatedCRC = hashCalc.finalizeCRC32();
597
+ this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
598
+ if (calculatedCRC !== entry.crc) {
599
+ throw new Error(Errors.INVALID_CRC);
600
+ }
601
+ this.log(`CRC-32 comparison: calculated=${calculatedCRC}, stored=${entry.crc}`);
602
+ // No hash to return for CRC-32 only entries
603
+ return { verifiedHash: undefined };
604
+ }
605
+ } else {
606
+ // Hash check skipped - return undefined
607
+ return { verifiedHash: undefined };
608
+ }
609
+ } catch (error) {
610
+ throw error;
611
+ }
612
+ }
613
+
614
+ /**
615
+ * Decompress data stream chunk by chunk
616
+ *
617
+ * MEMORY EFFICIENCY: Processes compressed data one block at a time.
618
+ * - For STORED: Passes through chunks unchanged (no accumulation)
619
+ * - For DEFLATED: Uses pako streaming inflate (maintains state across chunks)
620
+ * - For ZSTD: Collects all chunks (ZSTD limitation - requires full buffer)
621
+ *
622
+ * Pipeline: readCompressedDataStream() → DecryptionStream.decrypt() → decompressStream() → writeStream
623
+ *
624
+ * Internal method only
625
+ */
626
+ private async *decompressStream(
627
+ compressedStream: AsyncGenerator<Buffer>,
628
+ method: number,
629
+ chunkSize?: number
630
+ ): AsyncGenerator<Buffer> {
631
+ // chunkSize parameter is currently unused but kept for API consistency
632
+ if (method === CMP_METHOD.STORED) {
633
+ // Pass through unchanged - one block at a time
634
+ for await (const chunk of compressedStream) {
635
+ yield chunk;
636
+ }
637
+ } else if (method === CMP_METHOD.DEFLATED) {
638
+ // Use pako streaming inflate - maintains state across chunks
639
+ yield* this.inflateStream(compressedStream);
640
+ } else if (method === CMP_METHOD.ZSTD) {
641
+ // Use ZSTD streaming decompression - note: ZSTD requires full buffer
642
+ yield* this.zstdDecompressStream(compressedStream);
643
+ } else {
644
+ throw new Error(`Unsupported compression method: ${method}`);
645
+ }
646
+ }
647
+
648
+ /**
649
+ * Streaming deflate decompression using pako
650
+ *
651
+ * MEMORY EFFICIENCY: Processes compressed chunks one at a time.
652
+ * - Inflator maintains decompression state across chunks
653
+ * - Decompressed chunks are yielded immediately after processing each compressed chunk
654
+ * - No accumulation of compressed data (except in pako's internal buffers)
655
+ */
656
+ private async *inflateStream(
657
+ compressedStream: AsyncGenerator<Buffer>
658
+ ): AsyncGenerator<Buffer> {
659
+ const inflator = new pako.Inflate({ raw: true });
660
+ const decompressedChunks: Buffer[] = [];
661
+
662
+ inflator.onData = (chunk: Uint8Array) => {
663
+ decompressedChunks.push(Buffer.from(chunk));
664
+ };
665
+
666
+ // Process each compressed chunk one at a time
667
+ for await (const compressedChunk of compressedStream) {
668
+ this.log(`inflateStream: Processing compressed chunk: ${compressedChunk.length} bytes`);
669
+ inflator.push(compressedChunk, false);
670
+
671
+ // Yield accumulated decompressed chunks immediately (no accumulation)
672
+ for (const chunk of decompressedChunks) {
673
+ yield chunk;
674
+ }
675
+ decompressedChunks.length = 0;
676
+ }
677
+
678
+ // Finalize decompression
679
+ inflator.push(new Uint8Array(0), true);
680
+ for (const chunk of decompressedChunks) {
681
+ yield chunk;
682
+ }
683
+ }
684
+
685
+ /**
686
+ * Streaming ZSTD decompression
687
+ */
688
+ private async *zstdDecompressStream(
689
+ compressedStream: AsyncGenerator<Buffer>
690
+ ): AsyncGenerator<Buffer> {
691
+ // ZSTD is guaranteed to be initialized via factory method
692
+
693
+ // Collect all compressed chunks first (ZSTD needs complete data)
694
+ const compressedChunks: Buffer[] = [];
695
+ for await (const chunk of compressedStream) {
696
+ compressedChunks.push(chunk);
697
+ }
698
+
699
+ const compressedData = Buffer.concat(compressedChunks);
700
+
701
+ try {
702
+ // Use global ZstdManager for decompression
703
+ const decompressed = await ZstdManager.decompress(compressedData);
704
+ const decompressedBuffer = Buffer.from(decompressed);
705
+
706
+ // Yield decompressed data in chunks using ZipkitServer's bufferSize
707
+ const chunkSize = this.zipkitNode.getBufferSize();
708
+ let offset = 0;
709
+ while (offset < decompressedBuffer.length) {
710
+ const end = Math.min(offset + chunkSize, decompressedBuffer.length);
711
+ yield decompressedBuffer.slice(offset, end);
712
+ offset = end;
713
+ }
714
+ } catch (error) {
715
+ throw new Error(`ZSTD streaming decompression failed: ${error instanceof Error ? error.message : String(error)}`);
716
+ }
717
+ }
718
+
719
+ /**
720
+ * Inflate data using pako (internal use only)
721
+ */
722
+ private inflate(data: Buffer): Buffer {
723
+ this.log(`inflate() called with ${data.length} bytes`);
724
+ const result = pako.inflateRaw(data);
725
+ return Buffer.from(result.buffer, result.byteOffset, result.byteLength);
726
+ }
727
+
728
+ /**
729
+ * Zstd decompress method (now async with ZstdManager)
730
+ * Internal method only
731
+ */
732
+ private async zstdDecompressSync(data: Buffer): Promise<Buffer> {
733
+ this.log(`zstdDecompressSync() called with ${data.length} bytes`);
734
+
735
+ try {
736
+ // Use global ZstdManager for decompression
737
+ const decompressed = await ZstdManager.decompress(data);
738
+ this.log(`ZSTD decompression successful: ${data.length} bytes -> ${decompressed.length} bytes`);
739
+ return Buffer.from(decompressed);
740
+ } catch (error) {
741
+ this.log(`ZSTD decompression failed: ${error}`);
742
+ throw new Error(`ZSTD decompression failed: ${error instanceof Error ? error.message : String(error)}`);
743
+ }
744
+ }
745
+
746
+ /**
747
+ * Uncompress compressed data buffer (now async for Zstd)
748
+ * Handles decompression and hash verification
749
+ * Internal method only
750
+ */
751
+ private async unCompress(
752
+ compressedData: Buffer,
753
+ entry: ZipEntry,
754
+ skipHashCheck?: boolean
755
+ ): Promise<Buffer> {
756
+ this.log(`unCompress() called for entry: ${entry.filename}, method: ${entry.cmpMethod}, data length: ${compressedData.length}`);
757
+
758
+ if (compressedData.length === 0) {
759
+ return Buffer.alloc(0);
760
+ }
761
+
762
+ let outBuf: Buffer;
763
+ if (entry.cmpMethod === CMP_METHOD.STORED) {
764
+ outBuf = compressedData;
765
+ } else if (entry.cmpMethod === CMP_METHOD.DEFLATED) {
766
+ // Use synchronous inflate for deflate
767
+ outBuf = this.inflate(compressedData);
768
+ } else if (entry.cmpMethod === CMP_METHOD.ZSTD) {
769
+ // Use ZSTD decompression (now async with ZstdManager)
770
+ outBuf = await this.zstdDecompressSync(compressedData);
771
+ } else {
772
+ throw new Error(`Unsupported compression method: ${entry.cmpMethod}`);
773
+ }
774
+
775
+ // Verify hash
776
+ if (!skipHashCheck) {
777
+ if (entry.sha256) {
778
+ const isValid = this.zipkitNode.testSHA256(entry, outBuf);
779
+ if (!isValid) {
780
+ throw new Error(Errors.INVALID_SHA256);
781
+ }
782
+ } else {
783
+ const isValid = this.zipkitNode.testCRC32(entry, outBuf);
784
+ if (!isValid) {
785
+ throw new Error(Errors.INVALID_CRC);
786
+ }
787
+ }
788
+ }
789
+
790
+ return outBuf;
791
+ }
792
+ }
793
+