@neoware_inc/neozipkit 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (171) hide show
  1. package/README.md +134 -0
  2. package/dist/browser/ZipkitBrowser.d.ts +27 -0
  3. package/dist/browser/ZipkitBrowser.d.ts.map +1 -0
  4. package/dist/browser/ZipkitBrowser.js +303 -0
  5. package/dist/browser/ZipkitBrowser.js.map +1 -0
  6. package/dist/browser/index.d.ts +9 -0
  7. package/dist/browser/index.d.ts.map +1 -0
  8. package/dist/browser/index.esm.d.ts +12 -0
  9. package/dist/browser/index.esm.d.ts.map +1 -0
  10. package/dist/browser/index.esm.js +46 -0
  11. package/dist/browser/index.esm.js.map +1 -0
  12. package/dist/browser/index.js +38 -0
  13. package/dist/browser/index.js.map +1 -0
  14. package/dist/browser-esm/index.d.ts +9 -0
  15. package/dist/browser-esm/index.js +50211 -0
  16. package/dist/browser-esm/index.js.map +7 -0
  17. package/dist/browser-umd/index.d.ts +9 -0
  18. package/dist/browser-umd/index.js +50221 -0
  19. package/dist/browser-umd/index.js.map +7 -0
  20. package/dist/browser-umd/index.min.js +39 -0
  21. package/dist/browser.d.ts +9 -0
  22. package/dist/browser.js +38 -0
  23. package/dist/core/ZipCompress.d.ts +99 -0
  24. package/dist/core/ZipCompress.d.ts.map +1 -0
  25. package/dist/core/ZipCompress.js +287 -0
  26. package/dist/core/ZipCompress.js.map +1 -0
  27. package/dist/core/ZipCopy.d.ts +175 -0
  28. package/dist/core/ZipCopy.d.ts.map +1 -0
  29. package/dist/core/ZipCopy.js +310 -0
  30. package/dist/core/ZipCopy.js.map +1 -0
  31. package/dist/core/ZipDecompress.d.ts +57 -0
  32. package/dist/core/ZipDecompress.d.ts.map +1 -0
  33. package/dist/core/ZipDecompress.js +155 -0
  34. package/dist/core/ZipDecompress.js.map +1 -0
  35. package/dist/core/ZipEntry.d.ts +138 -0
  36. package/dist/core/ZipEntry.d.ts.map +1 -0
  37. package/dist/core/ZipEntry.js +829 -0
  38. package/dist/core/ZipEntry.js.map +1 -0
  39. package/dist/core/Zipkit.d.ts +315 -0
  40. package/dist/core/Zipkit.d.ts.map +1 -0
  41. package/dist/core/Zipkit.js +647 -0
  42. package/dist/core/Zipkit.js.map +1 -0
  43. package/dist/core/ZstdManager.d.ts +56 -0
  44. package/dist/core/ZstdManager.d.ts.map +1 -0
  45. package/dist/core/ZstdManager.js +144 -0
  46. package/dist/core/ZstdManager.js.map +1 -0
  47. package/dist/core/components/HashCalculator.d.ts +138 -0
  48. package/dist/core/components/HashCalculator.d.ts.map +1 -0
  49. package/dist/core/components/HashCalculator.js +360 -0
  50. package/dist/core/components/HashCalculator.js.map +1 -0
  51. package/dist/core/components/Logger.d.ts +73 -0
  52. package/dist/core/components/Logger.d.ts.map +1 -0
  53. package/dist/core/components/Logger.js +156 -0
  54. package/dist/core/components/Logger.js.map +1 -0
  55. package/dist/core/components/ProgressTracker.d.ts +43 -0
  56. package/dist/core/components/ProgressTracker.d.ts.map +1 -0
  57. package/dist/core/components/ProgressTracker.js +112 -0
  58. package/dist/core/components/ProgressTracker.js.map +1 -0
  59. package/dist/core/components/Support.d.ts +64 -0
  60. package/dist/core/components/Support.d.ts.map +1 -0
  61. package/dist/core/components/Support.js +71 -0
  62. package/dist/core/components/Support.js.map +1 -0
  63. package/dist/core/components/Util.d.ts +26 -0
  64. package/dist/core/components/Util.d.ts.map +1 -0
  65. package/dist/core/components/Util.js +95 -0
  66. package/dist/core/components/Util.js.map +1 -0
  67. package/dist/core/constants/Errors.d.ts +52 -0
  68. package/dist/core/constants/Errors.d.ts.map +1 -0
  69. package/dist/core/constants/Errors.js +67 -0
  70. package/dist/core/constants/Errors.js.map +1 -0
  71. package/dist/core/constants/Headers.d.ts +170 -0
  72. package/dist/core/constants/Headers.d.ts.map +1 -0
  73. package/dist/core/constants/Headers.js +194 -0
  74. package/dist/core/constants/Headers.js.map +1 -0
  75. package/dist/core/encryption/Manager.d.ts +58 -0
  76. package/dist/core/encryption/Manager.d.ts.map +1 -0
  77. package/dist/core/encryption/Manager.js +121 -0
  78. package/dist/core/encryption/Manager.js.map +1 -0
  79. package/dist/core/encryption/ZipCrypto.d.ts +172 -0
  80. package/dist/core/encryption/ZipCrypto.d.ts.map +1 -0
  81. package/dist/core/encryption/ZipCrypto.js +554 -0
  82. package/dist/core/encryption/ZipCrypto.js.map +1 -0
  83. package/dist/core/encryption/index.d.ts +9 -0
  84. package/dist/core/encryption/index.d.ts.map +1 -0
  85. package/dist/core/encryption/index.js +17 -0
  86. package/dist/core/encryption/index.js.map +1 -0
  87. package/dist/core/encryption/types.d.ts +29 -0
  88. package/dist/core/encryption/types.d.ts.map +1 -0
  89. package/dist/core/encryption/types.js +12 -0
  90. package/dist/core/encryption/types.js.map +1 -0
  91. package/dist/core/index.d.ts +27 -0
  92. package/dist/core/index.d.ts.map +1 -0
  93. package/dist/core/index.js +59 -0
  94. package/dist/core/index.js.map +1 -0
  95. package/dist/core/version.d.ts +5 -0
  96. package/dist/core/version.d.ts.map +1 -0
  97. package/dist/core/version.js +31 -0
  98. package/dist/core/version.js.map +1 -0
  99. package/dist/index.d.ts +9 -0
  100. package/dist/index.d.ts.map +1 -0
  101. package/dist/index.js +38 -0
  102. package/dist/index.js.map +1 -0
  103. package/dist/node/ZipCompressNode.d.ts +123 -0
  104. package/dist/node/ZipCompressNode.d.ts.map +1 -0
  105. package/dist/node/ZipCompressNode.js +565 -0
  106. package/dist/node/ZipCompressNode.js.map +1 -0
  107. package/dist/node/ZipCopyNode.d.ts +165 -0
  108. package/dist/node/ZipCopyNode.d.ts.map +1 -0
  109. package/dist/node/ZipCopyNode.js +347 -0
  110. package/dist/node/ZipCopyNode.js.map +1 -0
  111. package/dist/node/ZipDecompressNode.d.ts +197 -0
  112. package/dist/node/ZipDecompressNode.d.ts.map +1 -0
  113. package/dist/node/ZipDecompressNode.js +678 -0
  114. package/dist/node/ZipDecompressNode.js.map +1 -0
  115. package/dist/node/ZipkitNode.d.ts +466 -0
  116. package/dist/node/ZipkitNode.d.ts.map +1 -0
  117. package/dist/node/ZipkitNode.js +1426 -0
  118. package/dist/node/ZipkitNode.js.map +1 -0
  119. package/dist/node/index.d.ts +25 -0
  120. package/dist/node/index.d.ts.map +1 -0
  121. package/dist/node/index.js +54 -0
  122. package/dist/node/index.js.map +1 -0
  123. package/dist/types/index.d.ts +45 -0
  124. package/dist/types/index.d.ts.map +1 -0
  125. package/dist/types/index.js +11 -0
  126. package/dist/types/index.js.map +1 -0
  127. package/examples/README.md +261 -0
  128. package/examples/append-data.json +44 -0
  129. package/examples/copy-zip-append.ts +139 -0
  130. package/examples/copy-zip.ts +152 -0
  131. package/examples/create-zip.ts +172 -0
  132. package/examples/extract-zip.ts +118 -0
  133. package/examples/list-zip.ts +161 -0
  134. package/examples/test-files/data.json +116 -0
  135. package/examples/test-files/document.md +80 -0
  136. package/examples/test-files/document.txt +6 -0
  137. package/examples/test-files/file1.txt +48 -0
  138. package/examples/test-files/file2.txt +80 -0
  139. package/examples/tsconfig.json +44 -0
  140. package/package.json +167 -0
  141. package/src/browser/ZipkitBrowser.ts +305 -0
  142. package/src/browser/index.esm.ts +32 -0
  143. package/src/browser/index.ts +19 -0
  144. package/src/core/ZipCompress.ts +370 -0
  145. package/src/core/ZipCopy.ts +434 -0
  146. package/src/core/ZipDecompress.ts +191 -0
  147. package/src/core/ZipEntry.ts +917 -0
  148. package/src/core/Zipkit.ts +794 -0
  149. package/src/core/ZstdManager.ts +165 -0
  150. package/src/core/components/HashCalculator.ts +384 -0
  151. package/src/core/components/Logger.ts +180 -0
  152. package/src/core/components/ProgressTracker.ts +134 -0
  153. package/src/core/components/Support.ts +77 -0
  154. package/src/core/components/Util.ts +91 -0
  155. package/src/core/constants/Errors.ts +78 -0
  156. package/src/core/constants/Headers.ts +205 -0
  157. package/src/core/encryption/Manager.ts +137 -0
  158. package/src/core/encryption/ZipCrypto.ts +650 -0
  159. package/src/core/encryption/index.ts +15 -0
  160. package/src/core/encryption/types.ts +33 -0
  161. package/src/core/index.ts +42 -0
  162. package/src/core/version.ts +33 -0
  163. package/src/index.ts +19 -0
  164. package/src/node/ZipCompressNode.ts +618 -0
  165. package/src/node/ZipCopyNode.ts +437 -0
  166. package/src/node/ZipDecompressNode.ts +793 -0
  167. package/src/node/ZipkitNode.ts +1706 -0
  168. package/src/node/index.ts +40 -0
  169. package/src/types/index.ts +68 -0
  170. package/src/types/modules.d.ts +22 -0
  171. package/src/types/opentimestamps.d.ts +1 -0
@@ -0,0 +1,618 @@
1
+ // ======================================
2
+ // ZipCompressNode.ts - Node.js File-Based Compression
3
+ // Copyright (c) 2025 NeoWare, Inc. All rights reserved.
4
+ // ======================================
5
+ //
6
+ // LOGGING INSTRUCTIONS:
7
+ // ---------------------
8
+ // To enable/disable logging, set loggingEnabled to true/false in the class:
9
+ // private static loggingEnabled: boolean = true; // Enable logging
10
+ // private static loggingEnabled: boolean = false; // Disable logging
11
+ //
12
+ // Logging respects the global Logger level (debug, info, warn, error, silent).
13
+ // Logger level is automatically set to 'debug' when loggingEnabled is true.
14
+ //
15
+
16
+ import { CompressOptions } from '../core/ZipCompress';
17
+ import ZipEntry from '../core/ZipEntry';
18
+ import ZipkitNode from './ZipkitNode';
19
+ import { Logger } from '../core/components/Logger';
20
+ import { CMP_METHOD, GP_FLAG, ENCRYPT_HDR_SIZE } from '../core/constants/Headers';
21
+ import { HashCalculator } from '../core/components/HashCalculator';
22
+ import { ZipCrypto } from '../core/encryption/ZipCrypto';
23
+ import { ZstdManager } from '../core/ZstdManager';
24
+ import Errors from '../core/constants/Errors';
25
+ import * as fs from 'fs';
26
+ import * as path from 'path';
27
+
28
+ const pako = require('pako');
29
+
30
+ // Re-export types from ZipCompress (from core module)
31
+ export type { CompressOptions } from '../core/ZipCompress';
32
+
33
+ /**
34
+ * ZipCompressNode - Node.js file-based compression operations
35
+ *
36
+ * Independent compression implementation for Node.js environments.
37
+ * All compression logic is implemented directly without delegating to ZipCompress.
38
+ *
39
+ * @example
40
+ * ```typescript
41
+ * const zipkitNode = new ZipkitNode();
42
+ * const compressNode = new ZipCompressNode(zipkitNode);
43
+ * const compressed = await compressNode.compressFile('/path/to/file.txt', entry);
44
+ * ```
45
+ */
46
+ export class ZipCompressNode {
47
+ private zipkitNode: ZipkitNode;
48
+
49
+ // Class-level logging control - set to true to enable logging
50
+ private static loggingEnabled: boolean = false;
51
+
52
+ /**
53
+ * Internal logging method - only logs if class logging is enabled
54
+ */
55
+ private log(...args: any[]): void {
56
+ if (ZipCompressNode.loggingEnabled) {
57
+ Logger.debug(`[ZipCompressNode]`, ...args);
58
+ }
59
+ }
60
+
61
+ /**
62
+ * Creates a new ZipCompressNode instance
63
+ * @param zipkitNode - ZipkitNode instance to use for ZIP operations
64
+ */
65
+ constructor(zipkitNode: ZipkitNode) {
66
+ this.zipkitNode = zipkitNode;
67
+ // If logging is enabled, ensure Logger level is set to debug
68
+ if (ZipCompressNode.loggingEnabled) {
69
+ Logger.setLevel('debug');
70
+ }
71
+ }
72
+
73
+ // ============================================================================
74
+ // Compression Methods
75
+ // ============================================================================
76
+
77
+ /**
78
+ * Compress data for a ZIP entry (Buffer-based only)
79
+ * @param entry - ZIP entry to compress
80
+ * @param data - Buffer containing data to compress
81
+ * @param options - Compression options
82
+ * @param onOutputBuffer - Optional callback for streaming output
83
+ * @returns Buffer containing compressed data
84
+ */
85
+ async compressData(entry: ZipEntry, data: Buffer, options?: CompressOptions, onOutputBuffer?: (data: Buffer) => Promise<void>): Promise<Buffer> {
86
+ // Set uncompressed size if not already set
87
+ if (!entry.uncompressedSize || entry.uncompressedSize === 0) {
88
+ entry.uncompressedSize = data.length;
89
+ }
90
+ const totalSize = data.length;
91
+ const bufferSize = options?.bufferSize || this.zipkitNode.getBufferSize();
92
+
93
+ // Determine compression method
94
+ let compressionMethod: number;
95
+
96
+ if (options?.level === 0) {
97
+ compressionMethod = CMP_METHOD.STORED;
98
+ } else if (options?.useZstd) {
99
+ // ZSTD fallback to STORED if file too small
100
+ if (totalSize < 100) {
101
+ compressionMethod = CMP_METHOD.STORED;
102
+ } else {
103
+ compressionMethod = CMP_METHOD.ZSTD;
104
+ }
105
+ } else {
106
+ compressionMethod = CMP_METHOD.DEFLATED;
107
+ }
108
+
109
+ entry.cmpMethod = compressionMethod;
110
+
111
+ // Initialize hash calculator
112
+ const needsHashCalculation = (!entry.crc || entry.crc === 0) || (options?.useSHA256 && !entry.sha256);
113
+ const hashCalculator = needsHashCalculation ? new HashCalculator({ useSHA256: options?.useSHA256 && !entry.sha256 || false }) : null;
114
+
115
+ // Calculate hashes if needed
116
+ let buffer: Buffer = Buffer.alloc(0);
117
+
118
+ if (hashCalculator) {
119
+ hashCalculator.update(data);
120
+ if (!entry.crc || entry.crc === 0) {
121
+ entry.crc = hashCalculator.finalizeCRC32();
122
+ }
123
+ if (options?.useSHA256 && !entry.sha256) {
124
+ entry.sha256 = hashCalculator.finalizeSHA256();
125
+ }
126
+ }
127
+
128
+ // Compress based on method
129
+ if (compressionMethod === CMP_METHOD.STORED) {
130
+ buffer = data;
131
+ entry.compressedSize = data.length;
132
+ } else if (compressionMethod === CMP_METHOD.ZSTD) {
133
+ buffer = await this.zstdCompress(data, options, bufferSize, entry, onOutputBuffer);
134
+ } else {
135
+ // DEFLATED
136
+ buffer = await this.deflateCompress(data, options, bufferSize, entry, onOutputBuffer);
137
+ }
138
+
139
+ // Only set compressed size if it hasn't been set already
140
+ if (entry.compressedSize === undefined || entry.compressedSize === 0) {
141
+ entry.compressedSize = buffer.length;
142
+ }
143
+
144
+ // Apply encryption if password is provided
145
+ if (options?.password && buffer.length > 0) {
146
+ buffer = this.encryptCompressedData(buffer, entry, options.password);
147
+ }
148
+
149
+ return buffer;
150
+ }
151
+
152
+
153
+ /**
154
+ * Compresses data using deflate algorithm with chunked processing
155
+ * @param data - Data to compress (Buffer or chunked reader)
156
+ * @param options - Compression options
157
+ * @param bufferSize - Size of buffer to read (default: 512KB)
158
+ * @param entry - Optional ZIP entry for hash calculation
159
+ * @param onOutputBuffer - Optional callback for streaming output
160
+ * @returns Compressed data buffer
161
+ */
162
+ async deflateCompress(
163
+ data: Buffer | { totalSize: number, onReadChunk: (position: number, size: number) => Buffer, onOutChunk: (chunk: Buffer) => void },
164
+ options?: CompressOptions,
165
+ bufferSize?: number,
166
+ entry?: ZipEntry,
167
+ onOutputBuffer?: (data: Buffer) => Promise<void>
168
+ ): Promise<Buffer> {
169
+ const effectiveBufferSize = bufferSize || options?.bufferSize || this.zipkitNode.getBufferSize();
170
+
171
+ // Initialize hash calculator for incremental hash calculation during chunk reads
172
+ const needsHashCalculation = entry && ((!entry.crc || entry.crc === 0) || (options?.useSHA256 && !entry.sha256));
173
+ const hashCalculator = needsHashCalculation ? new HashCalculator({ useSHA256: options?.useSHA256 && !entry.sha256 || false }) : null;
174
+
175
+ if (options?.level === 0) {
176
+ // Store without compression
177
+ if (Buffer.isBuffer(data)) {
178
+ // For buffer, calculate hashes if needed
179
+ if (hashCalculator && entry) {
180
+ hashCalculator.update(data);
181
+ if (!entry.crc || entry.crc === 0) {
182
+ entry.crc = hashCalculator.finalizeCRC32();
183
+ }
184
+ if (options?.useSHA256 && !entry.sha256) {
185
+ entry.sha256 = hashCalculator.finalizeSHA256();
186
+ }
187
+ }
188
+ return data;
189
+ } else {
190
+ // For chunked reader, process in chunks and call onOutputBuffer
191
+ let position = 0;
192
+ let totalProcessed = 0;
193
+
194
+ while (position < data.totalSize) {
195
+ const readSize = Math.min(effectiveBufferSize, data.totalSize - position);
196
+ const chunk = data.onReadChunk(position, readSize);
197
+
198
+ // [READ] -> [HASH] -> [OUTPUT] sequence
199
+ if (hashCalculator) {
200
+ hashCalculator.update(chunk);
201
+ }
202
+
203
+ if (onOutputBuffer) {
204
+ await onOutputBuffer(chunk);
205
+ }
206
+
207
+ totalProcessed += chunk.length;
208
+ position += chunk.length;
209
+ }
210
+
211
+ // Finalize hashes
212
+ if (hashCalculator && entry) {
213
+ if (!entry.crc || entry.crc === 0) {
214
+ entry.crc = hashCalculator.finalizeCRC32();
215
+ }
216
+ if (options?.useSHA256 && !entry.sha256) {
217
+ entry.sha256 = hashCalculator.finalizeSHA256();
218
+ }
219
+ }
220
+
221
+ return Buffer.alloc(0);
222
+ }
223
+ }
224
+
225
+ try {
226
+ const level = options?.level as 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | undefined;
227
+ const isBuffer = Buffer.isBuffer(data);
228
+ const totalSize = isBuffer ? data.length : data.totalSize;
229
+ const compressedChunks: Buffer[] = [];
230
+ let totalProcessed = 0;
231
+ let totalCompressedSize = 0;
232
+ let position = 0;
233
+
234
+ if (isBuffer) {
235
+ // Buffer-based processing
236
+ if (hashCalculator && entry) {
237
+ hashCalculator.update(data);
238
+ // Always set CRC if hash calculator was used (it calculated the correct CRC)
239
+ entry.crc = hashCalculator.finalizeCRC32();
240
+ if (options?.useSHA256 && !entry.sha256) {
241
+ entry.sha256 = hashCalculator.finalizeSHA256();
242
+ }
243
+ }
244
+
245
+ const result = pako.deflateRaw(data, { level: level ?? 6 });
246
+ const compressed = Buffer.from(result.buffer, result.byteOffset, result.byteLength);
247
+
248
+ if (onOutputBuffer) {
249
+ await onOutputBuffer(compressed);
250
+ }
251
+
252
+ return compressed;
253
+ } else {
254
+ // Chunked reader processing - use streaming deflator to maintain state across chunks
255
+ const deflator = new pako.Deflate({ level: level ?? 6, raw: true });
256
+ const compressedChunks: Buffer[] = [];
257
+ let resultOffset = 0; // Track how much of deflator.result we've already processed
258
+
259
+ while (position < totalSize) {
260
+ const readSize = Math.min(effectiveBufferSize, totalSize - position);
261
+ const chunk = data.onReadChunk(position, readSize);
262
+ const isLast = position + readSize >= totalSize;
263
+
264
+ // [READ] -> [HASH] sequence
265
+ if (hashCalculator) {
266
+ hashCalculator.update(chunk);
267
+ }
268
+
269
+ // Push chunk to streaming deflator (maintains state across chunks)
270
+ deflator.push(chunk, isLast);
271
+
272
+ // Collect compressed chunks from deflator
273
+ // deflator.result accumulates compressed data, so we need to process only new data
274
+ if (deflator.result && deflator.result.length > resultOffset) {
275
+ const newCompressed = Buffer.from(deflator.result.subarray(resultOffset));
276
+ compressedChunks.push(newCompressed);
277
+ totalCompressedSize += newCompressed.length;
278
+ resultOffset = deflator.result.length;
279
+
280
+ if (onOutputBuffer) {
281
+ await onOutputBuffer(newCompressed);
282
+ }
283
+ }
284
+
285
+ totalProcessed += chunk.length;
286
+ position += chunk.length;
287
+ }
288
+
289
+ // Finalize hashes after all chunks processed and compressed
290
+ if (hashCalculator && entry) {
291
+ // Always set CRC if hash calculator was used (it calculated the correct CRC)
292
+ entry.crc = hashCalculator.finalizeCRC32();
293
+ if (options?.useSHA256 && !entry.sha256) {
294
+ entry.sha256 = hashCalculator.finalizeSHA256();
295
+ }
296
+ }
297
+
298
+ // For chunked processing, return empty buffer (data already written via onOutputBuffer)
299
+ if (entry) {
300
+ entry.compressedSize = totalCompressedSize;
301
+ }
302
+ return Buffer.alloc(0);
303
+ }
304
+ } catch (e) {
305
+ Logger.error('Error during chunked deflate compression:', e);
306
+ throw new Error(Errors.COMPRESSION_ERROR);
307
+ }
308
+ }
309
+
310
+ /**
311
+ * Compresses data using deflate algorithm (legacy method for small buffers)
312
+ * @param inbuf - Data to compress
313
+ * @param options - Compression options
314
+ * @returns Compressed data buffer
315
+ */
316
+ deflate(inbuf: Buffer, options?: CompressOptions): Buffer {
317
+ if (options?.level == 0) {
318
+ return inbuf; // Store without compression
319
+ }
320
+ try {
321
+ const level = options?.level as 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | undefined;
322
+ const result = pako.deflateRaw(inbuf, {
323
+ level: level ?? 6
324
+ });
325
+ return Buffer.from(result.buffer, result.byteOffset, result.byteLength);
326
+ } catch (e) {
327
+ Logger.error('Error during compression:', e);
328
+ throw new Error(Errors.COMPRESSION_ERROR);
329
+ }
330
+ }
331
+
332
+ /**
333
+ * Compresses data using Zstandard (zstd) algorithm
334
+ * @param input - Buffer to compress OR chunked reader object with totalSize and readChunk callback
335
+ * @param options - Compression options
336
+ * @param bufferSize - Size of buffer to read if using chunked reader (default: 512KB)
337
+ * @param entry - Optional ZIP entry for hash calculation
338
+ * @param onOutputBuffer - Optional callback for streaming output
339
+ * @returns Compressed data buffer
340
+ */
341
+ async zstdCompress(
342
+ input: Buffer | { totalSize: number, readChunk: (position: number, size: number) => Buffer },
343
+ options?: CompressOptions,
344
+ bufferSize?: number,
345
+ entry?: ZipEntry,
346
+ onOutputBuffer?: (data: Buffer) => Promise<void>
347
+ ): Promise<Buffer> {
348
+ const effectiveBufferSize = bufferSize || options?.bufferSize || this.zipkitNode.getBufferSize();
349
+ const isBuffer = Buffer.isBuffer(input);
350
+ const totalSize = isBuffer ? input.length : input.totalSize;
351
+
352
+ if (options?.level == 0) {
353
+ // For store mode, return as-is
354
+ if (isBuffer) {
355
+ return input;
356
+ } else {
357
+ // Read all chunks
358
+ const chunks: Buffer[] = [];
359
+ let position = 0;
360
+ while (position < totalSize) {
361
+ const size = Math.min(effectiveBufferSize, totalSize - position);
362
+ const chunk = input.readChunk(position, size);
363
+ chunks.push(chunk);
364
+ position += size;
365
+ }
366
+ return Buffer.concat(chunks);
367
+ }
368
+ }
369
+
370
+ try {
371
+ // Zstd compression levels range from 1 (fastest) to 22 (highest compression)
372
+ // Map our 1-9 level to a reasonable zstd range (1-19)
373
+ const level = options?.level ?? 6;
374
+ const zstdLevel = Math.min(Math.max(1, Math.floor(level * 2.1)), 19);
375
+
376
+ // Get the full buffer (zstd doesn't support true streaming compression)
377
+ const inbuf = isBuffer ? input : (() => {
378
+ const chunks: Buffer[] = [];
379
+ let position = 0;
380
+ while (position < totalSize) {
381
+ const size = Math.min(effectiveBufferSize, totalSize - position);
382
+ const chunk = input.readChunk(position, size);
383
+ chunks.push(chunk);
384
+ position += size;
385
+ }
386
+ return Buffer.concat(chunks);
387
+ })();
388
+
389
+ // Validate input
390
+ if (!inbuf || inbuf.length === 0) {
391
+ throw new Error('ZSTD compression: empty input buffer');
392
+ }
393
+
394
+ // Convert Buffer to Uint8Array for WASM module
395
+ const inputArray = new Uint8Array(inbuf.buffer, inbuf.byteOffset, inbuf.byteLength);
396
+
397
+ // Compress the data with zstd using global ZstdManager
398
+ const compressedData = await ZstdManager.compress(inputArray, zstdLevel);
399
+ const compressedBuffer = Buffer.from(compressedData);
400
+
401
+ // Set the compressed size in the entry for ZIP file structure
402
+ if (entry) {
403
+ entry.compressedSize = compressedBuffer.length;
404
+ }
405
+
406
+ if (onOutputBuffer) {
407
+ await onOutputBuffer(compressedBuffer);
408
+ }
409
+
410
+ return compressedBuffer;
411
+ } catch (e) {
412
+ Logger.error('Error during zstd compression:', e);
413
+ throw new Error(Errors.COMPRESSION_ERROR);
414
+ }
415
+ }
416
+
417
+ /**
418
+ * Encrypt compressed data using PKZIP encryption
419
+ * Creates encryption header, encrypts compressed data, and updates entry flags
420
+ * @param compressedData - Compressed data to encrypt
421
+ * @param entry - ZIP entry to encrypt
422
+ * @param password - Password for encryption
423
+ * @returns Encrypted buffer (encrypted header + encrypted compressed data)
424
+ */
425
+ private encryptCompressedData(compressedData: Buffer, entry: ZipEntry, password: string): Buffer {
426
+ // Create ZipCrypto instance
427
+ const zipCrypto = new ZipCrypto();
428
+
429
+ // Encrypt the compressed data (includes header creation and encryption)
430
+ const encryptedData = zipCrypto.encryptBuffer(entry, compressedData, password);
431
+
432
+ // Set encryption flags on entry
433
+ entry.isEncrypted = true;
434
+ entry.bitFlags |= GP_FLAG.ENCRYPTED;
435
+
436
+ // Update compressed size to include encryption header (12 bytes)
437
+ entry.compressedSize = encryptedData.length;
438
+
439
+ return encryptedData;
440
+ }
441
+
442
+ // ============================================================================
443
+ // File-Based Compression Methods
444
+ // ============================================================================
445
+
446
+ /**
447
+ * Compress a file from disk
448
+ *
449
+ * Reads file from disk, sets entry metadata from file stats, and compresses the data.
450
+ *
451
+ * @param filePath - Path to the file to compress
452
+ * @param entry - ZIP entry to compress (filename should already be set)
453
+ * @param options - Optional compression options
454
+ * @returns Promise resolving to Buffer containing compressed data
455
+ * @throws Error if file not found or not a file
456
+ */
457
+ async compressFile(
458
+ filePath: string,
459
+ entry: ZipEntry,
460
+ options?: CompressOptions
461
+ ): Promise<Buffer> {
462
+ // Validate file exists
463
+ if (!fs.existsSync(filePath)) {
464
+ throw new Error(`File not found: ${filePath}`);
465
+ }
466
+
467
+ const stats = fs.statSync(filePath);
468
+ if (!stats.isFile()) {
469
+ throw new Error(`Path is not a file: ${filePath}`);
470
+ }
471
+
472
+ // Set entry metadata from file stats
473
+ entry.uncompressedSize = stats.size;
474
+ entry.timeDateDOS = entry.setDateTime(stats.mtime);
475
+
476
+ // Read file data
477
+ const fileData = fs.readFileSync(filePath);
478
+
479
+ // Compress the buffer using compressData (buffer-based compression)
480
+ return await this.compressData(entry, fileData, options);
481
+ }
482
+
483
+ /**
484
+ * Compress a file from disk using streaming for large files
485
+ *
486
+ * Streams file in chunks for memory-efficient compression of large files.
487
+ * All chunk reading logic is handled in this server class.
488
+ *
489
+ * @param filePath - Path to the file to compress
490
+ * @param entry - ZIP entry to compress (filename should already be set)
491
+ * @param options - Optional compression options
492
+ * @param onOutputBuffer - Optional callback for streaming output
493
+ * @returns Promise resolving to Buffer containing compressed data
494
+ * @throws Error if file not found or not a file
495
+ */
496
+ async compressFileStream(
497
+ filePath: string,
498
+ entry: ZipEntry,
499
+ options?: CompressOptions,
500
+ onOutputBuffer?: (data: Buffer) => Promise<void>
501
+ ): Promise<Buffer> {
502
+ // Validate file exists
503
+ if (!fs.existsSync(filePath)) {
504
+ throw new Error(`File not found: ${filePath}`);
505
+ }
506
+
507
+ const stats = fs.statSync(filePath);
508
+ if (!stats.isFile()) {
509
+ throw new Error(`Path is not a file: ${filePath}`);
510
+ }
511
+
512
+ // Set entry metadata from file stats
513
+ entry.uncompressedSize = stats.size;
514
+ entry.timeDateDOS = entry.setDateTime(stats.mtime);
515
+
516
+ // Determine buffer size for chunked reading
517
+ const bufferSize = options?.bufferSize || this.zipkitNode.getBufferSize();
518
+
519
+ // Create chunked reader for streaming compression
520
+ // All chunk reading logic is in this server class
521
+ const chunkedReader = {
522
+ totalSize: stats.size,
523
+ onReadChunk: (position: number, size: number): Buffer => {
524
+ const fd = fs.openSync(filePath, 'r');
525
+ try {
526
+ const buffer = Buffer.alloc(size);
527
+ const bytesRead = fs.readSync(fd, buffer, 0, size, position);
528
+ return buffer.subarray(0, bytesRead);
529
+ } finally {
530
+ fs.closeSync(fd);
531
+ }
532
+ },
533
+ onOutChunk: (chunk: Buffer): void => {
534
+ // Output chunks are accumulated in compression methods
535
+ }
536
+ };
537
+
538
+ // Determine compression method and call appropriate method
539
+ const compressionMethod = options?.level === 0 ? 'STORED' :
540
+ options?.useZstd ? 'ZSTD' : 'DEFLATED';
541
+
542
+ // Set entry compression method before compression
543
+ if (compressionMethod === 'STORED') {
544
+ entry.cmpMethod = CMP_METHOD.STORED;
545
+ } else if (compressionMethod === 'ZSTD') {
546
+ entry.cmpMethod = CMP_METHOD.ZSTD;
547
+ } else {
548
+ entry.cmpMethod = CMP_METHOD.DEFLATED;
549
+ }
550
+
551
+ if (compressionMethod === 'STORED') {
552
+ // For STORED, read file and pass as buffer
553
+ const fileData = fs.readFileSync(filePath);
554
+ return await this.compressData(entry, fileData, options, onOutputBuffer);
555
+ } else if (compressionMethod === 'ZSTD') {
556
+ // ZSTD requires full buffer, so read file first
557
+ const fileData = fs.readFileSync(filePath);
558
+ return await this.compressData(entry, fileData, options, onOutputBuffer);
559
+ } else {
560
+ // DEFLATED: Use deflateCompress with chunked reader
561
+ return await this.deflateCompress(chunkedReader, options, bufferSize, entry, onOutputBuffer);
562
+ }
563
+ }
564
+
565
+ /**
566
+ * Compress multiple files from disk to a ZIP file
567
+ *
568
+ * Batch compression from file paths. Creates entries for each file and writes
569
+ * to output ZIP file. This is a simplified implementation - full implementation
570
+ * would need to write ZIP structure incrementally.
571
+ *
572
+ * @param filePaths - Array of file paths to compress
573
+ * @param outputPath - Path where the ZIP file should be created
574
+ * @param options - Optional compression options
575
+ * @returns Promise that resolves when ZIP creation is complete
576
+ * @throws Error if any file not found
577
+ */
578
+ async compressFiles(
579
+ filePaths: string[],
580
+ outputPath: string,
581
+ options?: CompressOptions
582
+ ): Promise<void> {
583
+ // This is a placeholder for future implementation
584
+ // Full implementation would need to:
585
+ // 1. Create ZIP file structure
586
+ // 2. Write local headers and compressed data for each file
587
+ // 3. Write central directory
588
+ // 4. Write end of central directory record
589
+ // For now, this is a simplified version that compresses files but doesn't write ZIP structure
590
+
591
+ const entries: ZipEntry[] = [];
592
+ const compressedData: Buffer[] = [];
593
+
594
+ for (const filePath of filePaths) {
595
+ if (!fs.existsSync(filePath)) {
596
+ throw new Error(`File not found: ${filePath}`);
597
+ }
598
+
599
+ const stats = fs.statSync(filePath);
600
+ if (!stats.isFile()) {
601
+ continue; // Skip directories
602
+ }
603
+
604
+ // Create entry
605
+ const entryName = path.relative(process.cwd(), filePath) || path.basename(filePath);
606
+ const entry = this.zipkitNode.createZipEntry(entryName);
607
+
608
+ // Compress file
609
+ const compressed = await this.compressFile(filePath, entry, options);
610
+ entries.push(entry);
611
+ compressedData.push(compressed);
612
+ }
613
+
614
+ // For now, this is a placeholder
615
+ // Full implementation would write ZIP structure to outputPath
616
+ throw new Error('compressFiles() - Full implementation pending. Use neozip CLI for now.');
617
+ }
618
+ }