@rushstack/zipsync 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. package/CHANGELOG.json +31 -0
  2. package/CHANGELOG.md +11 -0
  3. package/LICENSE +24 -0
  4. package/README.md +48 -0
  5. package/bin/zipsync +2 -0
  6. package/lib/ZipSyncCommandLineParser.d.ts +17 -0
  7. package/lib/ZipSyncCommandLineParser.d.ts.map +1 -0
  8. package/lib/ZipSyncCommandLineParser.js +97 -0
  9. package/lib/ZipSyncCommandLineParser.js.map +1 -0
  10. package/lib/benchmark.test.d.ts +2 -0
  11. package/lib/benchmark.test.d.ts.map +1 -0
  12. package/lib/benchmark.test.js.map +1 -0
  13. package/lib/compress.d.ts +8 -0
  14. package/lib/compress.d.ts.map +1 -0
  15. package/lib/compress.js +121 -0
  16. package/lib/compress.js.map +1 -0
  17. package/lib/crc32.d.ts +3 -0
  18. package/lib/crc32.d.ts.map +1 -0
  19. package/lib/crc32.js +69 -0
  20. package/lib/crc32.js.map +1 -0
  21. package/lib/crc32.test.d.ts +2 -0
  22. package/lib/crc32.test.d.ts.map +1 -0
  23. package/lib/crc32.test.js.map +1 -0
  24. package/lib/fs.d.ts +13 -0
  25. package/lib/fs.d.ts.map +1 -0
  26. package/lib/fs.js +57 -0
  27. package/lib/fs.js.map +1 -0
  28. package/lib/hash.d.ts +3 -0
  29. package/lib/hash.d.ts.map +1 -0
  30. package/lib/hash.js +43 -0
  31. package/lib/hash.js.map +1 -0
  32. package/lib/index.d.ts +3 -0
  33. package/lib/index.d.ts.map +1 -0
  34. package/lib/index.js +10 -0
  35. package/lib/index.js.map +1 -0
  36. package/lib/index.test.d.ts +2 -0
  37. package/lib/index.test.d.ts.map +1 -0
  38. package/lib/index.test.js.map +1 -0
  39. package/lib/pack.d.ts +55 -0
  40. package/lib/pack.d.ts.map +1 -0
  41. package/lib/pack.js +415 -0
  42. package/lib/pack.js.map +1 -0
  43. package/lib/packWorker.d.ts +31 -0
  44. package/lib/packWorker.d.ts.map +1 -0
  45. package/lib/packWorker.js +60 -0
  46. package/lib/packWorker.js.map +1 -0
  47. package/lib/packWorkerAsync.d.ts +4 -0
  48. package/lib/packWorkerAsync.d.ts.map +1 -0
  49. package/lib/packWorkerAsync.js +79 -0
  50. package/lib/packWorkerAsync.js.map +1 -0
  51. package/lib/perf.d.ts +7 -0
  52. package/lib/perf.d.ts.map +1 -0
  53. package/lib/perf.js +56 -0
  54. package/lib/perf.js.map +1 -0
  55. package/lib/start.d.ts +2 -0
  56. package/lib/start.d.ts.map +1 -0
  57. package/lib/start.js +19 -0
  58. package/lib/start.js.map +1 -0
  59. package/lib/start.test.d.ts +2 -0
  60. package/lib/start.test.d.ts.map +1 -0
  61. package/lib/start.test.js.map +1 -0
  62. package/lib/testUtils.d.ts +9 -0
  63. package/lib/testUtils.d.ts.map +1 -0
  64. package/lib/testUtils.js +77 -0
  65. package/lib/testUtils.js.map +1 -0
  66. package/lib/unpack.d.ts +41 -0
  67. package/lib/unpack.d.ts.map +1 -0
  68. package/lib/unpack.js +370 -0
  69. package/lib/unpack.js.map +1 -0
  70. package/lib/unpackWorker.d.ts +31 -0
  71. package/lib/unpackWorker.d.ts.map +1 -0
  72. package/lib/unpackWorker.js +56 -0
  73. package/lib/unpackWorker.js.map +1 -0
  74. package/lib/unpackWorkerAsync.d.ts +4 -0
  75. package/lib/unpackWorkerAsync.d.ts.map +1 -0
  76. package/lib/unpackWorkerAsync.js +79 -0
  77. package/lib/unpackWorkerAsync.js.map +1 -0
  78. package/lib/workerAsync.test.d.ts +2 -0
  79. package/lib/workerAsync.test.d.ts.map +1 -0
  80. package/lib/workerAsync.test.js.map +1 -0
  81. package/lib/zipSyncUtils.d.ts +20 -0
  82. package/lib/zipSyncUtils.d.ts.map +1 -0
  83. package/lib/zipSyncUtils.js +9 -0
  84. package/lib/zipSyncUtils.js.map +1 -0
  85. package/lib/zipUtils.d.ts +127 -0
  86. package/lib/zipUtils.d.ts.map +1 -0
  87. package/lib/zipUtils.js +304 -0
  88. package/lib/zipUtils.js.map +1 -0
  89. package/package.json +31 -0
package/lib/pack.js ADDED
@@ -0,0 +1,415 @@
1
+ "use strict";
2
+ // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
3
+ // See LICENSE in the project root for license information.
4
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
5
+ if (k2 === undefined) k2 = k;
6
+ var desc = Object.getOwnPropertyDescriptor(m, k);
7
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
8
+ desc = { enumerable: true, get: function() { return m[k]; } };
9
+ }
10
+ Object.defineProperty(o, k2, desc);
11
+ }) : (function(o, m, k, k2) {
12
+ if (k2 === undefined) k2 = k;
13
+ o[k2] = m[k];
14
+ }));
15
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
16
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
17
+ }) : function(o, v) {
18
+ o["default"] = v;
19
+ });
20
+ var __importStar = (this && this.__importStar) || (function () {
21
+ var ownKeys = function(o) {
22
+ ownKeys = Object.getOwnPropertyNames || function (o) {
23
+ var ar = [];
24
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
25
+ return ar;
26
+ };
27
+ return ownKeys(o);
28
+ };
29
+ return function (mod) {
30
+ if (mod && mod.__esModule) return mod;
31
+ var result = {};
32
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
33
+ __setModuleDefault(result, mod);
34
+ return result;
35
+ };
36
+ })();
37
+ var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
38
+ if (value !== null && value !== void 0) {
39
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
40
+ var dispose, inner;
41
+ if (async) {
42
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
43
+ dispose = value[Symbol.asyncDispose];
44
+ }
45
+ if (dispose === void 0) {
46
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
47
+ dispose = value[Symbol.dispose];
48
+ if (async) inner = dispose;
49
+ }
50
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
51
+ if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
52
+ env.stack.push({ value: value, dispose: dispose, async: async });
53
+ }
54
+ else if (async) {
55
+ env.stack.push({ async: true });
56
+ }
57
+ return value;
58
+ };
59
+ var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
60
+ return function (env) {
61
+ function fail(e) {
62
+ env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
63
+ env.hasError = true;
64
+ }
65
+ var r, s = 0;
66
+ function next() {
67
+ while (r = env.stack.pop()) {
68
+ try {
69
+ if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
70
+ if (r.dispose) {
71
+ var result = r.dispose.call(r.value);
72
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
73
+ }
74
+ else s |= 1;
75
+ }
76
+ catch (e) {
77
+ fail(e);
78
+ }
79
+ }
80
+ if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
81
+ if (env.hasError) throw env.error;
82
+ }
83
+ return next();
84
+ };
85
+ })(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
86
+ var e = new Error(message);
87
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
88
+ });
89
+ Object.defineProperty(exports, "__esModule", { value: true });
90
+ exports.pack = pack;
91
+ const fs = __importStar(require("node:fs"));
92
+ const path = __importStar(require("node:path"));
93
+ const crypto = __importStar(require("node:crypto"));
94
+ const zlib = __importStar(require("node:zlib"));
95
+ const crc32_1 = require("./crc32");
96
+ const fs_1 = require("./fs");
97
+ const compress_1 = require("./compress");
98
+ const perf_1 = require("./perf");
99
+ const zipUtils_1 = require("./zipUtils");
100
+ const hash_1 = require("./hash");
101
+ const zipSyncUtils_1 = require("./zipSyncUtils");
102
+ /**
103
+ * File extensions for which additional DEFLATE/ZSTD compression is unlikely to help.
104
+ * Used by the 'auto' compression heuristic to avoid wasting CPU on data that is already
105
+ * compressed (images, media, existing archives, fonts, etc.).
106
+ */
107
+ const LIKELY_COMPRESSED_EXTENSION_REGEX = /\.(?:zip|gz|tgz|bz2|xz|7z|rar|jpg|jpeg|png|gif|webp|avif|mp4|m4v|mov|mkv|webm|mp3|ogg|aac|flac|pdf|woff|woff2)$/;
108
+ /**
109
+ * Basic heuristic: skip re-compressing file types that are already compressed.
110
+ */
111
+ function isLikelyAlreadyCompressed(filename) {
112
+ return LIKELY_COMPRESSED_EXTENSION_REGEX.test(filename.toLowerCase());
113
+ }
114
+ /**
115
+ * Map zip compression method code -> incremental zlib mode label
116
+ */
117
+ const zlibPackModes = {
118
+ [zipUtils_1.ZSTD_COMPRESSION]: 'zstd-compress',
119
+ [zipUtils_1.DEFLATE_COMPRESSION]: 'deflate',
120
+ [zipUtils_1.STORE_COMPRESSION]: undefined
121
+ };
122
+ /**
123
+ * Public facing CLI option -> actual zip method used for a file we decide to compress.
124
+ */
125
+ const zipSyncCompressionOptions = {
126
+ store: zipUtils_1.STORE_COMPRESSION,
127
+ deflate: zipUtils_1.DEFLATE_COMPRESSION,
128
+ zstd: zipUtils_1.ZSTD_COMPRESSION,
129
+ auto: zipUtils_1.DEFLATE_COMPRESSION
130
+ };
131
+ /**
132
+ * Create a zipsync archive by enumerating target directories, then streaming each file into the
133
+ * output zip using the local file header + (optional compressed data) + data descriptor pattern.
134
+ *
135
+ * Performance characteristics:
136
+ * - Single pass per file (no read-then-compress-then-write buffering). CRC32 + SHA-1 are computed
137
+ * while streaming so the metadata JSON can later be used for selective unpack.
138
+ * - Data descriptor usage (bit 3) allows writing headers before we know sizes or CRC32.
139
+ * - A single timestamp (captured once) is applied to all entries for determinism.
140
+ * - Metadata entry is added as a normal zip entry at the end (before central directory) so legacy
141
+ * tools can still list/extract it, while zipsync can quickly parse file hashes.
142
+ */
143
+ function pack({ archivePath, targetDirectories: rawTargetDirectories, baseDir: rawBaseDir, compression, terminal, inputBuffer = Buffer.allocUnsafeSlow(zipSyncUtils_1.defaultBufferSize), outputBuffer = Buffer.allocUnsafeSlow(zipSyncUtils_1.defaultBufferSize) }) {
144
+ const env_1 = { stack: [], error: void 0, hasError: false };
145
+ try {
146
+ const baseDir = path.resolve(rawBaseDir);
147
+ const targetDirectories = rawTargetDirectories.map((dir) => path.join(baseDir, dir));
148
+ terminal.writeLine(`Packing to ${archivePath} from ${rawTargetDirectories.join(', ')}`);
149
+ (0, perf_1.markStart)('pack.total');
150
+ terminal.writeDebugLine('Starting pack');
151
+ // Pass 1: enumerate files with a queue to avoid deep recursion
152
+ (0, perf_1.markStart)('pack.enumerate');
153
+ const filePaths = [];
154
+ const queue = targetDirectories.map((dir) => ({ dir, depth: 0 }));
155
+ while (queue.length) {
156
+ const { dir: currentDir, depth } = queue.shift();
157
+ terminal.writeDebugLine(`Enumerating directory: ${currentDir}`);
158
+ const padding = depth === 0 ? '' : '-↳'.repeat(depth);
159
+ let items;
160
+ try {
161
+ items = fs.readdirSync(currentDir, { withFileTypes: true });
162
+ }
163
+ catch (e) {
164
+ if (e &&
165
+ (e.code === 'ENOENT' || e.code === 'ENOTDIR')) {
166
+ terminal.writeWarningLine(`Failed to read directory: ${currentDir}. Ignoring.`);
167
+ continue;
168
+ }
169
+ else {
170
+ throw e;
171
+ }
172
+ }
173
+ for (const item of items) {
174
+ const fullPath = path.join(currentDir, item.name);
175
+ if (item.isFile()) {
176
+ const relativePath = path.relative(baseDir, fullPath).replace(/\\/g, '/');
177
+ terminal.writeVerboseLine(`${padding}${item.name}`);
178
+ filePaths.push(relativePath);
179
+ }
180
+ else if (item.isDirectory()) {
181
+ terminal.writeVerboseLine(`${padding}${item.name}/`);
182
+ queue.push({ dir: fullPath, depth: depth + 1 });
183
+ }
184
+ else {
185
+ throw new Error(`Unexpected item (not file or directory): ${fullPath}. Aborting.`);
186
+ }
187
+ }
188
+ }
189
+ terminal.writeLine(`Found ${filePaths.length} files to pack (enumerated)`);
190
+ (0, perf_1.markEnd)('pack.enumerate');
191
+ // Pass 2: stream each file: read chunks -> hash + (maybe) compress -> write local header + data descriptor.
192
+ (0, perf_1.markStart)('pack.prepareEntries');
193
+ terminal.writeDebugLine(`Opening archive for write: ${archivePath}`);
194
+ const zipFile = __addDisposableResource(env_1, (0, fs_1.getDisposableFileHandle)(archivePath, 'w'), false);
195
+ let currentOffset = 0;
196
+ /**
197
+ * Write a raw chunk to the archive file descriptor, updating current offset.
198
+ */
199
+ function writeChunkToZip(chunk, lengthBytes = chunk.byteLength) {
200
+ let offset = 0;
201
+ while (lengthBytes > 0 && offset < chunk.byteLength) {
202
+ // In practice this call always writes all data at once, but the spec says it is not an error
203
+ // for it to not do so. Possibly that situation comes up when writing to something that is not
204
+ // an ordinary file.
205
+ const written = fs.writeSync(zipFile.fd, chunk, offset, lengthBytes);
206
+ lengthBytes -= written;
207
+ offset += written;
208
+ }
209
+ currentOffset += offset;
210
+ }
211
+ /** Convenience wrapper for writing multiple buffers sequentially. */
212
+ function writeChunksToZip(chunks) {
213
+ for (const chunk of chunks) {
214
+ writeChunkToZip(chunk);
215
+ }
216
+ }
217
+ const dosDateTimeNow = (0, zipUtils_1.dosDateTime)(new Date());
218
+ /**
219
+ * Stream a single file into the archive.
220
+ * Steps:
221
+ * 1. Decide compression (based on user choice + heuristic).
222
+ * 2. Emit local file header (sizes/CRC zeroed because we use a data descriptor).
223
+ * 3. Read file in 32 MiB chunks: update SHA-1 + CRC32; optionally feed compressor or write raw.
224
+ * 4. Flush compressor (if any) and write trailing data descriptor containing sizes + CRC.
225
+ * 5. Return populated entry metadata for later central directory + JSON metadata.
226
+ */
227
+ function writeFileEntry(relativePath) {
228
+ const env_2 = { stack: [], error: void 0, hasError: false };
229
+ try {
230
+ const fullPath = path.join(baseDir, relativePath);
231
+ /**
232
+ * Read file in large fixed-size buffer; invoke callback for each filled chunk.
233
+ */
234
+ const readInputInChunks = (onChunk) => {
235
+ const env_3 = { stack: [], error: void 0, hasError: false };
236
+ try {
237
+ const inputDisposable = __addDisposableResource(env_3, (0, fs_1.getDisposableFileHandle)(fullPath, 'r'), false);
238
+ let bytesInInputBuffer = 0;
239
+ // The entire input buffer will be drained in each loop iteration
240
+ // So run until EOF
241
+ while (!isNaN(inputDisposable.fd)) {
242
+ bytesInInputBuffer = fs.readSync(inputDisposable.fd, inputBuffer, 0, inputBuffer.byteLength, -1);
243
+ if (bytesInInputBuffer <= 0) {
244
+ // EOF, close the input fd
245
+ inputDisposable[fs_1.DISPOSE_SYMBOL]();
246
+ }
247
+ onChunk(bytesInInputBuffer);
248
+ }
249
+ }
250
+ catch (e_3) {
251
+ env_3.error = e_3;
252
+ env_3.hasError = true;
253
+ }
254
+ finally {
255
+ __disposeResources(env_3);
256
+ }
257
+ };
258
+ let shouldCompress = false;
259
+ if (compression === 'deflate' || compression === 'zstd') {
260
+ shouldCompress = true;
261
+ }
262
+ else if (compression === 'auto') {
263
+ // Heuristic: skip compression for small files or likely-already-compressed files
264
+ if (!isLikelyAlreadyCompressed(relativePath)) {
265
+ shouldCompress = true;
266
+ }
267
+ else {
268
+ terminal.writeVerboseLine(`Skip compression heuristically (already-compressed) for ${relativePath} (size unknown at this point)`);
269
+ }
270
+ }
271
+ const compressionMethod = shouldCompress
272
+ ? zipSyncCompressionOptions[compression]
273
+ : zipSyncCompressionOptions.store;
274
+ const entry = {
275
+ filename: relativePath,
276
+ size: 0,
277
+ compressedSize: 0,
278
+ crc32: 0,
279
+ sha1Hash: '',
280
+ localHeaderOffset: currentOffset,
281
+ compressionMethod,
282
+ dosDateTime: dosDateTimeNow
283
+ };
284
+ writeChunksToZip((0, zipUtils_1.writeLocalFileHeader)(entry));
285
+ const sha1HashBuilder = crypto.createHash('sha1');
286
+ let crc32 = 0;
287
+ let uncompressedSize = 0;
288
+ let compressedSize = 0;
289
+ /**
290
+ * Compressor instance (deflate or zstd) created only if needed.
291
+ */
292
+ const incrementalZlib = __addDisposableResource(env_2, shouldCompress
293
+ ? (0, compress_1.createIncrementalZlib)(outputBuffer, (chunk, lengthBytes) => {
294
+ writeChunkToZip(chunk, lengthBytes);
295
+ compressedSize += lengthBytes;
296
+ }, zlibPackModes[compressionMethod])
297
+ : undefined, false);
298
+ // Read input file in chunks, update hashes, and either compress or write raw.
299
+ readInputInChunks((bytesInInputBuffer) => {
300
+ const slice = inputBuffer.subarray(0, bytesInInputBuffer);
301
+ sha1HashBuilder.update(slice);
302
+ crc32 = (0, crc32_1.crc32Builder)(slice, crc32);
303
+ if (incrementalZlib) {
304
+ incrementalZlib.update(slice);
305
+ }
306
+ else {
307
+ writeChunkToZip(slice, bytesInInputBuffer);
308
+ }
309
+ uncompressedSize += bytesInInputBuffer;
310
+ });
311
+ // finalize hashes, compression
312
+ incrementalZlib === null || incrementalZlib === void 0 ? void 0 : incrementalZlib.update(Buffer.alloc(0));
313
+ crc32 = crc32 >>> 0;
314
+ const sha1Hash = sha1HashBuilder.digest('hex');
315
+ if (!shouldCompress) {
316
+ compressedSize = uncompressedSize;
317
+ }
318
+ entry.size = uncompressedSize;
319
+ entry.compressedSize = compressedSize;
320
+ entry.crc32 = crc32;
321
+ entry.sha1Hash = sha1Hash;
322
+ // Trailing data descriptor now that final CRC/sizes are known.
323
+ writeChunkToZip((0, zipUtils_1.writeDataDescriptor)(entry));
324
+ terminal.writeVerboseLine(`${relativePath} (sha1=${entry.sha1Hash}, crc32=${entry.crc32.toString(16)}, size=${entry.size}, compressed=${entry.compressedSize}, method=${entry.compressionMethod}, compressed ${(100 -
325
+ (entry.compressedSize / entry.size) * 100).toFixed(1)}%)`);
326
+ return entry;
327
+ }
328
+ catch (e_2) {
329
+ env_2.error = e_2;
330
+ env_2.hasError = true;
331
+ }
332
+ finally {
333
+ __disposeResources(env_2);
334
+ }
335
+ }
336
+ const entries = [];
337
+ // Emit all file entries in enumeration order.
338
+ for (const relativePath of filePaths) {
339
+ entries.push(writeFileEntry(relativePath));
340
+ }
341
+ (0, perf_1.markEnd)('pack.prepareEntries');
342
+ terminal.writeLine(`Prepared ${entries.length} file entries`);
343
+ (0, perf_1.markStart)('pack.metadata.build');
344
+ const metadata = { version: zipSyncUtils_1.METADATA_VERSION, files: {} };
345
+ // Build metadata map used for selective unpack (size + SHA‑1 per file).
346
+ for (const entry of entries) {
347
+ metadata.files[entry.filename] = { size: entry.size, sha1Hash: entry.sha1Hash };
348
+ }
349
+ const metadataContent = JSON.stringify(metadata);
350
+ const metadataBuffer = Buffer.from(metadataContent, 'utf8');
351
+ terminal.writeDebugLine(`Metadata size=${metadataBuffer.length} bytes, fileCount=${Object.keys(metadata.files).length}`);
352
+ let metadataCompressionMethod = zipSyncCompressionOptions.store;
353
+ let metadataData = metadataBuffer;
354
+ let metadataCompressedSize = metadataBuffer.length;
355
+ // Compress metadata (deflate) iff user allowed compression and it helps (>64 bytes & smaller result).
356
+ if (compression !== 'store' && metadataBuffer.length > 64) {
357
+ const compressed = zlib.deflateRawSync(metadataBuffer, { level: 9 });
358
+ if (compressed.length < metadataBuffer.length) {
359
+ metadataCompressionMethod = zipSyncCompressionOptions.deflate;
360
+ metadataData = compressed;
361
+ metadataCompressedSize = compressed.length;
362
+ terminal.writeDebugLine(`Metadata compressed (orig=${metadataBuffer.length}, compressed=${compressed.length})`);
363
+ }
364
+ else {
365
+ terminal.writeDebugLine('Metadata compression skipped (not smaller)');
366
+ }
367
+ }
368
+ const metadataEntry = {
369
+ filename: zipSyncUtils_1.METADATA_FILENAME,
370
+ size: metadataBuffer.length,
371
+ compressedSize: metadataCompressedSize,
372
+ crc32: (0, crc32_1.crc32Builder)(metadataBuffer),
373
+ sha1Hash: (0, hash_1.calculateSHA1)(metadataBuffer),
374
+ localHeaderOffset: currentOffset,
375
+ compressionMethod: metadataCompressionMethod,
376
+ dosDateTime: dosDateTimeNow
377
+ };
378
+ writeChunksToZip((0, zipUtils_1.writeLocalFileHeader)(metadataEntry));
379
+ writeChunkToZip(metadataData, metadataCompressedSize);
380
+ writeChunkToZip((0, zipUtils_1.writeDataDescriptor)(metadataEntry));
381
+ entries.push(metadataEntry);
382
+ terminal.writeVerboseLine(`Total entries including metadata: ${entries.length}`);
383
+ (0, perf_1.markEnd)('pack.metadata.build');
384
+ (0, perf_1.markStart)('pack.write.entries');
385
+ const outputDir = path.dirname(archivePath);
386
+ fs.mkdirSync(outputDir, { recursive: true });
387
+ (0, perf_1.markEnd)('pack.write.entries');
388
+ (0, perf_1.markStart)('pack.write.centralDirectory');
389
+ const centralDirOffset = currentOffset;
390
+ // Emit central directory records.
391
+ for (const entry of entries) {
392
+ writeChunksToZip((0, zipUtils_1.writeCentralDirectoryHeader)(entry));
393
+ }
394
+ const centralDirSize = currentOffset - centralDirOffset;
395
+ (0, perf_1.markEnd)('pack.write.centralDirectory');
396
+ // Write end of central directory
397
+ (0, perf_1.markStart)('pack.write.eocd');
398
+ writeChunkToZip((0, zipUtils_1.writeEndOfCentralDirectory)(centralDirOffset, centralDirSize, entries.length));
399
+ terminal.writeDebugLine('EOCD record written');
400
+ (0, perf_1.markEnd)('pack.write.eocd');
401
+ (0, perf_1.markEnd)('pack.total');
402
+ const total = (0, perf_1.getDuration)('pack.total');
403
+ (0, perf_1.emitSummary)('pack', terminal);
404
+ terminal.writeLine(`Successfully packed ${entries.length} files in ${(0, perf_1.formatDuration)(total)}`);
405
+ return { filesPacked: entries.length, metadata };
406
+ }
407
+ catch (e_1) {
408
+ env_1.error = e_1;
409
+ env_1.hasError = true;
410
+ }
411
+ finally {
412
+ __disposeResources(env_1);
413
+ }
414
+ }
415
+ //# sourceMappingURL=pack.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pack.js","sourceRoot":"","sources":["../src/pack.ts"],"names":[],"mappings":";AAAA,4FAA4F;AAC5F,2DAA2D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0H3D,oBA6SC;AAraD,4CAA8B;AAC9B,gDAAkC;AAClC,oDAAsC;AACtC,gDAAkC;AAIlC,mCAAuC;AACvC,6BAA2F;AAC3F,yCAAoG;AACpG,iCAAsF;AACtF,yCAWoB;AACpB,iCAAuC;AACvC,iDAOwB;AAExB;;;;GAIG;AACH,MAAM,iCAAiC,GACrC,iHAAiH,CAAC;AAEpH;;GAEG;AACH,SAAS,yBAAyB,CAAC,QAAgB;IACjD,OAAO,iCAAiC,CAAC,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;AACxE,CAAC;AAED;;GAEG;AACH,MAAM,aAAa,GAAsE;IACvF,CAAC,2BAAgB,CAAC,EAAE,eAAe;IACnC,CAAC,8BAAmB,CAAC,EAAE,SAAS;IAChC,CAAC,4BAAiB,CAAC,EAAE,SAAS;CACtB,CAAC;AAEX;;GAEG;AACH,MAAM,yBAAyB,GAA+D;IAC5F,KAAK,EAAE,4BAAiB;IACxB,OAAO,EAAE,8BAAmB;IAC5B,IAAI,EAAE,2BAAgB;IACtB,IAAI,EAAE,8BAAmB;CACjB,CAAC;AA2CX;;;;;;;;;;;GAWG;AACH,SAAgB,IAAI,CAAC,EACnB,WAAW,EACX,iBAAiB,EAAE,oBAAoB,EACvC,OAAO,EAAE,UAAU,EACnB,WAAW,EACX,QAAQ,EACR,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,gCAAiB,CAAC,EACvD,YAAY,GAAG,MAAM,CAAC,eAAe,CAAC,gCAAiB,CAAC,EACpC;;;QACpB,MAAM,OAAO,GAAW,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;QACjD,MAAM,iBAAiB,GAAa,oBAAoB,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC,CAAC;QAC/F,QAAQ,CAAC,SAAS,CAAC,cAAc,WAAW,SAAS,oBAAoB,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAExF,IAAA,gBAAS,EAAC,YAAY,CAAC,CAAC;QACxB,QAAQ,CAAC,cAAc,CAAC,eAAe,CAAC,CAAC;QACzC,+DAA+D;QAC/D,IAAA,gBAAS,EAAC,gBAAgB,CAAC,CAAC;QAE5B,MAAM,SAAS,GAAa,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAoB,iBAAiB,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QAEnF,OAAO,KAAK,CAAC,MAAM,EAAE,CAAC;YACpB,MAAM,EAAE,GAAG,EAAE,UAAU,EAAE,KAAK,EAAE,GAAG,KAAK,CAAC,KAAK,EAAG,CAAC;YAClD,QAAQ,CAAC,cAAc,CAAC,0BAA0B,UAAU,EAAE,CAAC,CAAC;YAEhE,MAAM,OAAO,GAAW,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAE9D,IAAI,KAAkB,CAAC;YACvB,IAAI,CAAC;gBACH,KAAK,GAAG,EAAE,CAAC,WAAW,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;YAC9D,CAAC;YAAC,OAAO,CAAC,EAAE,CAAC;gBACX,IACE,CAAC;oBACD,CAAE,CAA2B,CAAC,IAAI,KAAK,QAAQ,IAAK,CAA2B,CAAC,IAAI,KAAK,SAAS,CAAC,EACnG,CAAC;oBACD,QAAQ,CAAC,gBAAgB,CAAC,6BAA6B,UAAU,aAAa,CAAC,CAAC;oBAChF,SAAS;gBACX,CAAC;qBAAM,CAAC;oBACN,MAAM,CAAC,CAAC;gBACV,CAAC;YACH,CAAC;YAED,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACzB,MAAM,QAAQ,GAAW,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;gBAC1D,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;oBAClB,MAAM,YAAY,GAAW,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;oBAClF,QAAQ,CAAC,gBAAgB,CAAC,GAAG,OAAO,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;oBACpD,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC/B,CAAC;qBAAM,IAAI,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;oBAC9B,QAAQ,CAAC,gBAAgB,CAAC,GAAG,OAAO,GAAG,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC;oBACrD,KAAK,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,GAAG,CAAC,EAAE,CAAC,CAAC;gBAClD,CAAC;qBAAM,CAAC;oBACN,MAAM,IAAI,KAAK,CAAC,4CAA4C,QAAQ,aAAa,CAAC,CAAC;gBACrF,CAAC;YACH,CAAC;QACH,CAAC;QAED,QAAQ,CAAC,SAAS,CAAC,SAAS,SAAS,CAAC,MAAM,6BAA6B,CAAC,CAAC;QAC3E,IAAA,cAAO,EAAC,gBAAgB,CAAC,CAAC;QAE1B,4GAA4G;QAC5G,IAAA,gBAAS,EAAC,qBAAqB,CAAC,CAAC;QAEjC,QAAQ,CAAC,cAAc,CAAC,8BAA8B,WAAW,EAAE,CAAC,CAAC;QACrE,MAAM,OAAO,kCAA0B,IAAA,4BAAuB,EAAC,WAAW,EAAE,GAAG,CAAC,QAAA,CAAC;QACjF,IAAI,aAAa,GAAW,CAAC,CAAC;QAC9B;;WAEG;QACH,SAAS,eAAe,CAAC,KAAiB,EAAE,cAAsB,KAAK,CAAC,UAAU;YAChF,IAAI,MAAM,GAAW,CAAC,CAAC;YACvB,OAAO,WAAW,GAAG,CAAC,IAAI,MAAM,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC;gBACpD,6FAA6F;gBAC7F,8FAA8F;gBAC9F,oBAAoB;gBACpB,MAAM,OAAO,GAAW,EAAE,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,WAAW,CAAC,CAAC;gBAC7E,WAAW,IAAI,OAAO,CAAC;gBACvB,MAAM,IAAI,OAAO,CAAC;YACpB,CAAC;YACD,aAAa,IAAI,MAAM,CAAC;QAC1B,CAAC;QACD,qEAAqE;QACrE,SAAS,gBAAgB,CAAC,MAAoB;YAC5C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;gBAC3B,eAAe,CAAC,KAAK,CAAC,CAAC;YACzB,CAAC;QACH,CAAC;QAED,MAAM,cAAc,GAAmC,IAAA,sBAAW,EAAC,IAAI,IAAI,EAAE,CAAC,CAAC;QAC/E;;;;;;;;WAQG;QACH,SAAS,cAAc,CAAC,YAAoB;;;gBAC1C,MAAM,QAAQ,GAAW,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;gBAE1D;;mBAEG;gBACH,MAAM,iBAAiB,GAA4D,CACjF,OAA6C,EACvC,EAAE;;;wBACR,MAAM,eAAe,kCAA0B,IAAA,4BAAuB,EAAC,QAAQ,EAAE,GAAG,CAAC,QAAA,CAAC;wBAEtF,IAAI,kBAAkB,GAAW,CAAC,CAAC;wBACnC,iEAAiE;wBACjE,mBAAmB;wBACnB,OAAO,CAAC,KAAK,CAAC,eAAe,CAAC,EAAE,CAAC,EAAE,CAAC;4BAClC,kBAAkB,GAAG,EAAE,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAC;4BAEjG,IAAI,kBAAkB,IAAI,CAAC,EAAE,CAAC;gCAC5B,0BAA0B;gCAC1B,eAAe,CAAC,mBAAc,CAAC,EAAE,CAAC;4BACpC,CAAC;4BAED,OAAO,CAAC,kBAAkB,CAAC,CAAC;wBAC9B,CAAC;;;;;;;;;iBACF,CAAC;gBAEF,IAAI,cAAc,GAAY,KAAK,CAAC;gBACpC,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,MAAM,EAAE,CAAC;oBACxD,cAAc,GAAG,IAAI,CAAC;gBACxB,CAAC;qBAAM,IAAI,WAAW,KAAK,MAAM,EAAE,CAAC;oBAClC,iFAAiF;oBACjF,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC,EAAE,CAAC;wBAC7C,cAAc,GAAG,IAAI,CAAC;oBACxB,CAAC;yBAAM,CAAC;wBACN,QAAQ,CAAC,gBAAgB,CACvB,2DAA2D,YAAY,+BAA+B,CACvG,CAAC;oBACJ,CAAC;gBACH,CAAC;gBAED,MAAM,iBAAiB,GAA6B,cAAc;oBAChE,CAAC,CAAC,yBAAyB,CAAC,WAAW,CAAC;oBACxC,CAAC,CAAC,yBAAyB,CAAC,KAAK,CAAC;gBAEpC,MAAM,KAAK,GAAe;oBACxB,QAAQ,EAAE,YAAY;oBACtB,IAAI,EAAE,CAAC;oBACP,cAAc,EAAE,CAAC;oBACjB,KAAK,EAAE,CAAC;oBACR,QAAQ,EAAE,EAAE;oBACZ,iBAAiB,EAAE,aAAa;oBAChC,iBAAiB;oBACjB,WAAW,EAAE,cAAc;iBAC5B,CAAC;gBAEF,gBAAgB,CAAC,IAAA,+BAAoB,EAAC,KAAK,CAAC,CAAC,CAAC;gBAE9C,MAAM,eAAe,GAAgB,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;gBAC/D,IAAI,KAAK,GAAW,CAAC,CAAC;gBACtB,IAAI,gBAAgB,GAAW,CAAC,CAAC;gBACjC,IAAI,cAAc,GAAW,CAAC,CAAC;gBAE/B;;mBAEG;gBACH,MAAM,eAAe,kCAAiC,cAAc;oBAClE,CAAC,CAAC,IAAA,gCAAqB,EACnB,YAAY,EACZ,CAAC,KAAK,EAAE,WAAW,EAAE,EAAE;wBACrB,eAAe,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;wBACpC,cAAc,IAAI,WAAW,CAAC;oBAChC,CAAC,EACD,aAAa,CAAC,iBAAiB,CAAE,CAClC;oBACH,CAAC,CAAC,SAAS,QAAA,CAAC;gBAEd,8EAA8E;gBAC9E,iBAAiB,CAAC,CAAC,kBAA0B,EAAE,EAAE;oBAC/C,MAAM,KAAK,GAAW,WAAW,CAAC,QAAQ,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC;oBAClE,eAAe,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBAC9B,KAAK,GAAG,IAAA,oBAAY,EAAC,KAAK,EAAE,KAAK,CAAC,CAAC;oBACnC,IAAI,eAAe,EAAE,CAAC;wBACpB,eAAe,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBAChC,CAAC;yBAAM,CAAC;wBACN,eAAe,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;oBAC7C,CAAC;oBACD,gBAAgB,IAAI,kBAAkB,CAAC;gBACzC,CAAC,CAAC,CAAC;gBAEH,+BAA+B;gBAC/B,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;gBACzC,KAAK,GAAG,KAAK,KAAK,CAAC,CAAC;gBACpB,MAAM,QAAQ,GAAW,eAAe,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gBAEvD,IAAI,CAAC,cAAc,EAAE,CAAC;oBACpB,cAAc,GAAG,gBAAgB,CAAC;gBACpC,CAAC;gBAED,KAAK,CAAC,IAAI,GAAG,gBAAgB,CAAC;gBAC9B,KAAK,CAAC,cAAc,GAAG,cAAc,CAAC;gBACtC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;gBACpB,KAAK,CAAC,QAAQ,GAAG,QAAQ,CAAC;gBAE1B,+DAA+D;gBAC/D,eAAe,CAAC,IAAA,8BAAmB,EAAC,KAAK,CAAC,CAAC,CAAC;gBAE5C,QAAQ,CAAC,gBAAgB,CACvB,GAAG,YAAY,UAAU,KAAK,CAAC,QAAQ,WAAW,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,UACxE,KAAK,CAAC,IACR,gBAAgB,KAAK,CAAC,cAAc,YAAY,KAAK,CAAC,iBAAiB,gBAAgB,CACrF,GAAG;oBACH,CAAC,KAAK,CAAC,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,GAAG,CAC1C,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CACjB,CAAC;gBACF,OAAO,KAAK,CAAC;;;;;;;;;SACd;QAED,MAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,8CAA8C;QAC9C,KAAK,MAAM,YAAY,IAAI,SAAS,EAAE,CAAC;YACrC,OAAO,CAAC,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,CAAC,CAAC;QAC7C,CAAC;QAED,IAAA,cAAO,EAAC,qBAAqB,CAAC,CAAC;QAC/B,QAAQ,CAAC,SAAS,CAAC,YAAY,OAAO,CAAC,MAAM,eAAe,CAAC,CAAC;QAE9D,IAAA,gBAAS,EAAC,qBAAqB,CAAC,CAAC;QACjC,MAAM,QAAQ,GAAc,EAAE,OAAO,EAAE,+BAAgB,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;QACrE,wEAAwE;QACxE,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;YAC5B,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,EAAE,IAAI,EAAE,KAAK,CAAC,IAAI,EAAE,QAAQ,EAAE,KAAK,CAAC,QAAQ,EAAE,CAAC;QAClF,CAAC;QAED,MAAM,eAAe,GAAW,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,cAAc,GAAW,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,MAAM,CAAC,CAAC;QACpE,QAAQ,CAAC,cAAc,CACrB,iBAAiB,cAAc,CAAC,MAAM,qBAAqB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE,CAChG,CAAC;QAEF,IAAI,yBAAyB,GAA6B,yBAAyB,CAAC,KAAK,CAAC;QAC1F,IAAI,YAAY,GAAW,cAAc,CAAC;QAC1C,IAAI,sBAAsB,GAAW,cAAc,CAAC,MAAM,CAAC;QAC3D,sGAAsG;QACtG,IAAI,WAAW,KAAK,OAAO,IAAI,cAAc,CAAC,MAAM,GAAG,EAAE,EAAE,CAAC;YAC1D,MAAM,UAAU,GAAW,IAAI,CAAC,cAAc,CAAC,cAAc,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC;YAC7E,IAAI,UAAU,CAAC,MAAM,GAAG,cAAc,CAAC,MAAM,EAAE,CAAC;gBAC9C,yBAAyB,GAAG,yBAAyB,CAAC,OAAO,CAAC;gBAC9D,YAAY,GAAG,UAAU,CAAC;gBAC1B,sBAAsB,GAAG,UAAU,CAAC,MAAM,CAAC;gBAC3C,QAAQ,CAAC,cAAc,CACrB,6BAA6B,cAAc,CAAC,MAAM,gBAAgB,UAAU,CAAC,MAAM,GAAG,CACvF,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,QAAQ,CAAC,cAAc,CAAC,4CAA4C,CAAC,CAAC;YACxE,CAAC;QACH,CAAC;QAED,MAAM,aAAa,GAAe;YAChC,QAAQ,EAAE,gCAAiB;YAC3B,IAAI,EAAE,cAAc,CAAC,MAAM;YAC3B,cAAc,EAAE,sBAAsB;YACtC,KAAK,EAAE,IAAA,oBAAY,EAAC,cAAc,CAAC;YACnC,QAAQ,EAAE,IAAA,oBAAa,EAAC,cAAc,CAAC;YACvC,iBAAiB,EAAE,aAAa;YAChC,iBAAiB,EAAE,yBAAyB;YAC5C,WAAW,EAAE,cAAc;SAC5B,CAAC;QAEF,gBAAgB,CAAC,IAAA,+BAAoB,EAAC,aAAa,CAAC,CAAC,CAAC;QACtD,eAAe,CAAC,YAAY,EAAE,sBAAsB,CAAC,CAAC;QACtD,eAAe,CAAC,IAAA,8BAAmB,EAAC,aAAa,CAAC,CAAC,CAAC;QAEpD,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAC5B,QAAQ,CAAC,gBAAgB,CAAC,qCAAqC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;QAEjF,IAAA,cAAO,EAAC,qBAAqB,CAAC,CAAC;QAE/B,IAAA,gBAAS,EAAC,oBAAoB,CAAC,CAAC;QAChC,MAAM,SAAS,GAAW,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;QACpD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE7C,IAAA,cAAO,EAAC,oBAAoB,CAAC,CAAC;QAE9B,IAAA,gBAAS,EAAC,6BAA6B,CAAC,CAAC;QACzC,MAAM,gBAAgB,GAAW,aAAa,CAAC;QAC/C,kCAAkC;QAClC,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;YAC5B,gBAAgB,CAAC,IAAA,sCAA2B,EAAC,KAAK,CAAC,CAAC,CAAC;QACvD,CAAC;QACD,MAAM,cAAc,GAAW,aAAa,GAAG,gBAAgB,CAAC;QAChE,IAAA,cAAO,EAAC,6BAA6B,CAAC,CAAC;QAEvC,iCAAiC;QACjC,IAAA,gBAAS,EAAC,iBAAiB,CAAC,CAAC;QAC7B,eAAe,CAAC,IAAA,qCAA0B,EAAC,gBAAgB,EAAE,cAAc,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC;QAC9F,QAAQ,CAAC,cAAc,CAAC,qBAAqB,CAAC,CAAC;QAC/C,IAAA,cAAO,EAAC,iBAAiB,CAAC,CAAC;QAE3B,IAAA,cAAO,EAAC,YAAY,CAAC,CAAC;QACtB,MAAM,KAAK,GAAW,IAAA,kBAAW,EAAC,YAAY,CAAC,CAAC;QAChD,IAAA,kBAAW,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAC9B,QAAQ,CAAC,SAAS,CAAC,uBAAuB,OAAO,CAAC,MAAM,aAAa,IAAA,qBAAc,EAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QAC9F,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,CAAC;;;;;;;;;CAClD","sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.\n// See LICENSE in the project root for license information.\n\nimport * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport * as crypto from 'node:crypto';\nimport * as zlib from 'node:zlib';\n\nimport type { ITerminal } from '@rushstack/terminal/lib/ITerminal';\n\nimport { crc32Builder } from './crc32';\nimport { DISPOSE_SYMBOL, getDisposableFileHandle, type IDisposableFileHandle } from './fs';\nimport { type IIncrementalZlib, type IncrementalZlibMode, createIncrementalZlib } from './compress';\nimport { markStart, markEnd, getDuration, emitSummary, formatDuration } from './perf';\nimport {\n writeLocalFileHeader,\n writeDataDescriptor,\n writeCentralDirectoryHeader,\n writeEndOfCentralDirectory,\n ZSTD_COMPRESSION,\n DEFLATE_COMPRESSION,\n STORE_COMPRESSION,\n type ZipMetaCompressionMethod,\n type IFileEntry,\n dosDateTime\n} from './zipUtils';\nimport { calculateSHA1 } from './hash';\nimport {\n type ZipSyncOptionCompression,\n type IMetadata,\n type IDirQueueItem,\n METADATA_VERSION,\n METADATA_FILENAME,\n defaultBufferSize\n} from './zipSyncUtils';\n\n/**\n * File extensions for which additional DEFLATE/ZSTD compression is unlikely to help.\n * Used by the 'auto' compression heuristic to avoid wasting CPU on data that is already\n * compressed (images, media, existing archives, fonts, etc.).\n */\nconst LIKELY_COMPRESSED_EXTENSION_REGEX: RegExp =\n /\\.(?:zip|gz|tgz|bz2|xz|7z|rar|jpg|jpeg|png|gif|webp|avif|mp4|m4v|mov|mkv|webm|mp3|ogg|aac|flac|pdf|woff|woff2)$/;\n\n/**\n * Basic heuristic: skip re-compressing file types that are already compressed.\n */\nfunction isLikelyAlreadyCompressed(filename: string): boolean {\n return LIKELY_COMPRESSED_EXTENSION_REGEX.test(filename.toLowerCase());\n}\n\n/**\n * Map zip compression method code -> incremental zlib mode label\n */\nconst zlibPackModes: Record<ZipMetaCompressionMethod, IncrementalZlibMode | undefined> = {\n [ZSTD_COMPRESSION]: 'zstd-compress',\n [DEFLATE_COMPRESSION]: 'deflate',\n [STORE_COMPRESSION]: undefined\n} as const;\n\n/**\n * Public facing CLI option -> actual zip method used for a file we decide to compress.\n */\nconst zipSyncCompressionOptions: Record<ZipSyncOptionCompression, ZipMetaCompressionMethod> = {\n store: STORE_COMPRESSION,\n deflate: DEFLATE_COMPRESSION,\n zstd: ZSTD_COMPRESSION,\n auto: DEFLATE_COMPRESSION\n} as const;\n\n/**\n * @public\n * Options for zipsync\n */\nexport interface IZipSyncPackOptions {\n /**\n * \\@rushstack/terminal compatible terminal for logging\n */\n terminal: ITerminal;\n /**\n * Zip file path\n */\n archivePath: string;\n /**\n * Target directories to pack (relative to baseDir)\n */\n targetDirectories: ReadonlyArray<string>;\n /**\n * Base directory for relative paths within the archive (defaults to common parent of targetDirectories)\n */\n baseDir: string;\n /**\n * Compression mode. If set to 'deflate', file data will be compressed using raw DEFLATE (method 8) when this\n * produces a smaller result; otherwise it will fall back to 'store' per-file.\n */\n compression: ZipSyncOptionCompression;\n /**\n * Optional buffer that can be provided to avoid internal allocations.\n */\n inputBuffer?: Buffer<ArrayBuffer>;\n /**\n * Optional buffer that can be provided to avoid internal allocations.\n */\n outputBuffer?: Buffer<ArrayBuffer>;\n}\n\nexport interface IZipSyncPackResult {\n filesPacked: number;\n metadata: IMetadata;\n}\n\n/**\n * Create a zipsync archive by enumerating target directories, then streaming each file into the\n * output zip using the local file header + (optional compressed data) + data descriptor pattern.\n *\n * Performance characteristics:\n * - Single pass per file (no read-then-compress-then-write buffering). CRC32 + SHA-1 are computed\n * while streaming so the metadata JSON can later be used for selective unpack.\n * - Data descriptor usage (bit 3) allows writing headers before we know sizes or CRC32.\n * - A single timestamp (captured once) is applied to all entries for determinism.\n * - Metadata entry is added as a normal zip entry at the end (before central directory) so legacy\n * tools can still list/extract it, while zipsync can quickly parse file hashes.\n */\nexport function pack({\n archivePath,\n targetDirectories: rawTargetDirectories,\n baseDir: rawBaseDir,\n compression,\n terminal,\n inputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize),\n outputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize)\n}: IZipSyncPackOptions): IZipSyncPackResult {\n const baseDir: string = path.resolve(rawBaseDir);\n const targetDirectories: string[] = rawTargetDirectories.map((dir) => path.join(baseDir, dir));\n terminal.writeLine(`Packing to ${archivePath} from ${rawTargetDirectories.join(', ')}`);\n\n markStart('pack.total');\n terminal.writeDebugLine('Starting pack');\n // Pass 1: enumerate files with a queue to avoid deep recursion\n markStart('pack.enumerate');\n\n const filePaths: string[] = [];\n const queue: IDirQueueItem[] = targetDirectories.map((dir) => ({ dir, depth: 0 }));\n\n while (queue.length) {\n const { dir: currentDir, depth } = queue.shift()!;\n terminal.writeDebugLine(`Enumerating directory: ${currentDir}`);\n\n const padding: string = depth === 0 ? '' : '-↳'.repeat(depth);\n\n let items: fs.Dirent[];\n try {\n items = fs.readdirSync(currentDir, { withFileTypes: true });\n } catch (e) {\n if (\n e &&\n ((e as NodeJS.ErrnoException).code === 'ENOENT' || (e as NodeJS.ErrnoException).code === 'ENOTDIR')\n ) {\n terminal.writeWarningLine(`Failed to read directory: ${currentDir}. Ignoring.`);\n continue;\n } else {\n throw e;\n }\n }\n\n for (const item of items) {\n const fullPath: string = path.join(currentDir, item.name);\n if (item.isFile()) {\n const relativePath: string = path.relative(baseDir, fullPath).replace(/\\\\/g, '/');\n terminal.writeVerboseLine(`${padding}${item.name}`);\n filePaths.push(relativePath);\n } else if (item.isDirectory()) {\n terminal.writeVerboseLine(`${padding}${item.name}/`);\n queue.push({ dir: fullPath, depth: depth + 1 });\n } else {\n throw new Error(`Unexpected item (not file or directory): ${fullPath}. Aborting.`);\n }\n }\n }\n\n terminal.writeLine(`Found ${filePaths.length} files to pack (enumerated)`);\n markEnd('pack.enumerate');\n\n // Pass 2: stream each file: read chunks -> hash + (maybe) compress -> write local header + data descriptor.\n markStart('pack.prepareEntries');\n\n terminal.writeDebugLine(`Opening archive for write: ${archivePath}`);\n using zipFile: IDisposableFileHandle = getDisposableFileHandle(archivePath, 'w');\n let currentOffset: number = 0;\n /**\n * Write a raw chunk to the archive file descriptor, updating current offset.\n */\n function writeChunkToZip(chunk: Uint8Array, lengthBytes: number = chunk.byteLength): void {\n let offset: number = 0;\n while (lengthBytes > 0 && offset < chunk.byteLength) {\n // In practice this call always writes all data at once, but the spec says it is not an error\n // for it to not do so. Possibly that situation comes up when writing to something that is not\n // an ordinary file.\n const written: number = fs.writeSync(zipFile.fd, chunk, offset, lengthBytes);\n lengthBytes -= written;\n offset += written;\n }\n currentOffset += offset;\n }\n /** Convenience wrapper for writing multiple buffers sequentially. */\n function writeChunksToZip(chunks: Uint8Array[]): void {\n for (const chunk of chunks) {\n writeChunkToZip(chunk);\n }\n }\n\n const dosDateTimeNow: { time: number; date: number } = dosDateTime(new Date());\n /**\n * Stream a single file into the archive.\n * Steps:\n * 1. Decide compression (based on user choice + heuristic).\n * 2. Emit local file header (sizes/CRC zeroed because we use a data descriptor).\n * 3. Read file in 32 MiB chunks: update SHA-1 + CRC32; optionally feed compressor or write raw.\n * 4. Flush compressor (if any) and write trailing data descriptor containing sizes + CRC.\n * 5. Return populated entry metadata for later central directory + JSON metadata.\n */\n function writeFileEntry(relativePath: string): IFileEntry {\n const fullPath: string = path.join(baseDir, relativePath);\n\n /**\n * Read file in large fixed-size buffer; invoke callback for each filled chunk.\n */\n const readInputInChunks: (onChunk: (bytesInInputBuffer: number) => void) => void = (\n onChunk: (bytesInInputBuffer: number) => void\n ): void => {\n using inputDisposable: IDisposableFileHandle = getDisposableFileHandle(fullPath, 'r');\n\n let bytesInInputBuffer: number = 0;\n // The entire input buffer will be drained in each loop iteration\n // So run until EOF\n while (!isNaN(inputDisposable.fd)) {\n bytesInInputBuffer = fs.readSync(inputDisposable.fd, inputBuffer, 0, inputBuffer.byteLength, -1);\n\n if (bytesInInputBuffer <= 0) {\n // EOF, close the input fd\n inputDisposable[DISPOSE_SYMBOL]();\n }\n\n onChunk(bytesInInputBuffer);\n }\n };\n\n let shouldCompress: boolean = false;\n if (compression === 'deflate' || compression === 'zstd') {\n shouldCompress = true;\n } else if (compression === 'auto') {\n // Heuristic: skip compression for small files or likely-already-compressed files\n if (!isLikelyAlreadyCompressed(relativePath)) {\n shouldCompress = true;\n } else {\n terminal.writeVerboseLine(\n `Skip compression heuristically (already-compressed) for ${relativePath} (size unknown at this point)`\n );\n }\n }\n\n const compressionMethod: ZipMetaCompressionMethod = shouldCompress\n ? zipSyncCompressionOptions[compression]\n : zipSyncCompressionOptions.store;\n\n const entry: IFileEntry = {\n filename: relativePath,\n size: 0,\n compressedSize: 0,\n crc32: 0,\n sha1Hash: '',\n localHeaderOffset: currentOffset,\n compressionMethod,\n dosDateTime: dosDateTimeNow\n };\n\n writeChunksToZip(writeLocalFileHeader(entry));\n\n const sha1HashBuilder: crypto.Hash = crypto.createHash('sha1');\n let crc32: number = 0;\n let uncompressedSize: number = 0;\n let compressedSize: number = 0;\n\n /**\n * Compressor instance (deflate or zstd) created only if needed.\n */\n using incrementalZlib: IIncrementalZlib | undefined = shouldCompress\n ? createIncrementalZlib(\n outputBuffer,\n (chunk, lengthBytes) => {\n writeChunkToZip(chunk, lengthBytes);\n compressedSize += lengthBytes;\n },\n zlibPackModes[compressionMethod]!\n )\n : undefined;\n\n // Read input file in chunks, update hashes, and either compress or write raw.\n readInputInChunks((bytesInInputBuffer: number) => {\n const slice: Buffer = inputBuffer.subarray(0, bytesInInputBuffer);\n sha1HashBuilder.update(slice);\n crc32 = crc32Builder(slice, crc32);\n if (incrementalZlib) {\n incrementalZlib.update(slice);\n } else {\n writeChunkToZip(slice, bytesInInputBuffer);\n }\n uncompressedSize += bytesInInputBuffer;\n });\n\n // finalize hashes, compression\n incrementalZlib?.update(Buffer.alloc(0));\n crc32 = crc32 >>> 0;\n const sha1Hash: string = sha1HashBuilder.digest('hex');\n\n if (!shouldCompress) {\n compressedSize = uncompressedSize;\n }\n\n entry.size = uncompressedSize;\n entry.compressedSize = compressedSize;\n entry.crc32 = crc32;\n entry.sha1Hash = sha1Hash;\n\n // Trailing data descriptor now that final CRC/sizes are known.\n writeChunkToZip(writeDataDescriptor(entry));\n\n terminal.writeVerboseLine(\n `${relativePath} (sha1=${entry.sha1Hash}, crc32=${entry.crc32.toString(16)}, size=${\n entry.size\n }, compressed=${entry.compressedSize}, method=${entry.compressionMethod}, compressed ${(\n 100 -\n (entry.compressedSize / entry.size) * 100\n ).toFixed(1)}%)`\n );\n return entry;\n }\n\n const entries: IFileEntry[] = [];\n // Emit all file entries in enumeration order.\n for (const relativePath of filePaths) {\n entries.push(writeFileEntry(relativePath));\n }\n\n markEnd('pack.prepareEntries');\n terminal.writeLine(`Prepared ${entries.length} file entries`);\n\n markStart('pack.metadata.build');\n const metadata: IMetadata = { version: METADATA_VERSION, files: {} };\n // Build metadata map used for selective unpack (size + SHA‑1 per file).\n for (const entry of entries) {\n metadata.files[entry.filename] = { size: entry.size, sha1Hash: entry.sha1Hash };\n }\n\n const metadataContent: string = JSON.stringify(metadata);\n const metadataBuffer: Buffer = Buffer.from(metadataContent, 'utf8');\n terminal.writeDebugLine(\n `Metadata size=${metadataBuffer.length} bytes, fileCount=${Object.keys(metadata.files).length}`\n );\n\n let metadataCompressionMethod: ZipMetaCompressionMethod = zipSyncCompressionOptions.store;\n let metadataData: Buffer = metadataBuffer;\n let metadataCompressedSize: number = metadataBuffer.length;\n // Compress metadata (deflate) iff user allowed compression and it helps (>64 bytes & smaller result).\n if (compression !== 'store' && metadataBuffer.length > 64) {\n const compressed: Buffer = zlib.deflateRawSync(metadataBuffer, { level: 9 });\n if (compressed.length < metadataBuffer.length) {\n metadataCompressionMethod = zipSyncCompressionOptions.deflate;\n metadataData = compressed;\n metadataCompressedSize = compressed.length;\n terminal.writeDebugLine(\n `Metadata compressed (orig=${metadataBuffer.length}, compressed=${compressed.length})`\n );\n } else {\n terminal.writeDebugLine('Metadata compression skipped (not smaller)');\n }\n }\n\n const metadataEntry: IFileEntry = {\n filename: METADATA_FILENAME,\n size: metadataBuffer.length,\n compressedSize: metadataCompressedSize,\n crc32: crc32Builder(metadataBuffer),\n sha1Hash: calculateSHA1(metadataBuffer),\n localHeaderOffset: currentOffset,\n compressionMethod: metadataCompressionMethod,\n dosDateTime: dosDateTimeNow\n };\n\n writeChunksToZip(writeLocalFileHeader(metadataEntry));\n writeChunkToZip(metadataData, metadataCompressedSize);\n writeChunkToZip(writeDataDescriptor(metadataEntry));\n\n entries.push(metadataEntry);\n terminal.writeVerboseLine(`Total entries including metadata: ${entries.length}`);\n\n markEnd('pack.metadata.build');\n\n markStart('pack.write.entries');\n const outputDir: string = path.dirname(archivePath);\n fs.mkdirSync(outputDir, { recursive: true });\n\n markEnd('pack.write.entries');\n\n markStart('pack.write.centralDirectory');\n const centralDirOffset: number = currentOffset;\n // Emit central directory records.\n for (const entry of entries) {\n writeChunksToZip(writeCentralDirectoryHeader(entry));\n }\n const centralDirSize: number = currentOffset - centralDirOffset;\n markEnd('pack.write.centralDirectory');\n\n // Write end of central directory\n markStart('pack.write.eocd');\n writeChunkToZip(writeEndOfCentralDirectory(centralDirOffset, centralDirSize, entries.length));\n terminal.writeDebugLine('EOCD record written');\n markEnd('pack.write.eocd');\n\n markEnd('pack.total');\n const total: number = getDuration('pack.total');\n emitSummary('pack', terminal);\n terminal.writeLine(`Successfully packed ${entries.length} files in ${formatDuration(total)}`);\n return { filesPacked: entries.length, metadata };\n}\n"]}
@@ -0,0 +1,31 @@
1
+ import { type IZipSyncPackOptions, type IZipSyncPackResult } from './pack';
2
+ export { type IZipSyncPackOptions, type IZipSyncPackResult } from './pack';
3
+ export interface IHashWorkerData {
4
+ basePath: string;
5
+ }
6
+ export interface IZipSyncPackCommandMessage {
7
+ type: 'zipsync-pack';
8
+ id: number;
9
+ options: Omit<IZipSyncPackOptions, 'terminal'>;
10
+ }
11
+ export interface IZipSyncPackWorkerResult {
12
+ zipSyncReturn: IZipSyncPackResult;
13
+ zipSyncLogs: string;
14
+ }
15
+ interface IZipSyncSuccessMessage {
16
+ id: number;
17
+ type: 'zipsync-pack';
18
+ result: IZipSyncPackWorkerResult;
19
+ }
20
+ export interface IZipSyncPackErrorMessage {
21
+ type: 'error';
22
+ id: number;
23
+ args: {
24
+ message: string;
25
+ stack: string;
26
+ zipSyncLogs: string;
27
+ };
28
+ }
29
+ export type IHostToWorkerMessage = IZipSyncPackCommandMessage;
30
+ export type IWorkerToHostMessage = IZipSyncSuccessMessage | IZipSyncPackErrorMessage;
31
+ //# sourceMappingURL=packWorker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"packWorker.d.ts","sourceRoot":"","sources":["../src/packWorker.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,KAAK,mBAAmB,EAAE,KAAK,kBAAkB,EAAQ,MAAM,QAAQ,CAAC;AAGjF,OAAO,EAAE,KAAK,mBAAmB,EAAE,KAAK,kBAAkB,EAAE,MAAM,QAAQ,CAAC;AAE3E,MAAM,WAAW,eAAe;IAC9B,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,0BAA0B;IACzC,IAAI,EAAE,cAAc,CAAC;IACrB,EAAE,EAAE,MAAM,CAAC;IACX,OAAO,EAAE,IAAI,CAAC,mBAAmB,EAAE,UAAU,CAAC,CAAC;CAChD;AAED,MAAM,WAAW,wBAAwB;IACvC,aAAa,EAAE,kBAAkB,CAAC;IAClC,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,UAAU,sBAAsB;IAC9B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,cAAc,CAAC;IACrB,MAAM,EAAE,wBAAwB,CAAC;CAClC;AAED,MAAM,WAAW,wBAAwB;IACvC,IAAI,EAAE,OAAO,CAAC;IACd,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE;QACJ,OAAO,EAAE,MAAM,CAAC;QAChB,KAAK,EAAE,MAAM,CAAC;QACd,WAAW,EAAE,MAAM,CAAC;KACrB,CAAC;CACH;AAED,MAAM,MAAM,oBAAoB,GAAG,0BAA0B,CAAC;AAC9D,MAAM,MAAM,oBAAoB,GAAG,sBAAsB,GAAG,wBAAwB,CAAC"}
@@ -0,0 +1,60 @@
1
+ "use strict";
2
+ // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
3
+ // See LICENSE in the project root for license information.
4
+ Object.defineProperty(exports, "__esModule", { value: true });
5
+ const node_worker_threads_1 = require("node:worker_threads");
6
+ const Terminal_1 = require("@rushstack/terminal/lib/Terminal");
7
+ const StringBufferTerminalProvider_1 = require("@rushstack/terminal/lib/StringBufferTerminalProvider");
8
+ const pack_1 = require("./pack");
9
+ const zipSyncUtils_1 = require("./zipSyncUtils");
10
+ if (!node_worker_threads_1.parentPort) {
11
+ throw new Error('This module must be run in a worker thread.');
12
+ }
13
+ const parentPort = node_worker_threads_1.parentPort;
14
+ let inputBuffer = undefined;
15
+ let outputBuffer = undefined;
16
+ function handleMessage(message) {
17
+ if (message === false) {
18
+ parentPort.removeAllListeners();
19
+ parentPort.close();
20
+ return;
21
+ }
22
+ const terminalProvider = new StringBufferTerminalProvider_1.StringBufferTerminalProvider();
23
+ const terminal = new Terminal_1.Terminal(terminalProvider);
24
+ try {
25
+ switch (message.type) {
26
+ case 'zipsync-pack': {
27
+ const { options } = message;
28
+ if (!inputBuffer) {
29
+ inputBuffer = Buffer.allocUnsafeSlow(zipSyncUtils_1.defaultBufferSize);
30
+ }
31
+ if (!outputBuffer) {
32
+ outputBuffer = Buffer.allocUnsafeSlow(zipSyncUtils_1.defaultBufferSize);
33
+ }
34
+ const successMessage = {
35
+ type: message.type,
36
+ id: message.id,
37
+ result: {
38
+ zipSyncReturn: (0, pack_1.pack)({ ...options, terminal, inputBuffer, outputBuffer }),
39
+ zipSyncLogs: terminalProvider.getOutput()
40
+ }
41
+ };
42
+ return parentPort.postMessage(successMessage);
43
+ }
44
+ }
45
+ }
46
+ catch (err) {
47
+ const errorMessage = {
48
+ type: 'error',
49
+ id: message.id,
50
+ args: {
51
+ message: err.message,
52
+ stack: err.stack || '',
53
+ zipSyncLogs: terminalProvider.getOutput()
54
+ }
55
+ };
56
+ parentPort.postMessage(errorMessage);
57
+ }
58
+ }
59
+ parentPort.on('message', handleMessage);
60
+ //# sourceMappingURL=packWorker.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"packWorker.js","sourceRoot":"","sources":["../src/packWorker.ts"],"names":[],"mappings":";AAAA,4FAA4F;AAC5F,2DAA2D;;AAE3D,6DAAoF;AAEpF,+DAA4D;AAC5D,uGAAoG;AAEpG,iCAAiF;AACjF,iDAAmD;AAsCnD,IAAI,CAAC,gCAAa,EAAE,CAAC;IACnB,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAC;AACjE,CAAC;AACD,MAAM,UAAU,GAAgB,gCAAa,CAAC;AAE9C,IAAI,WAAW,GAAoC,SAAS,CAAC;AAC7D,IAAI,YAAY,GAAoC,SAAS,CAAC;AAE9D,SAAS,aAAa,CAAC,OAAqC;IAC1D,IAAI,OAAO,KAAK,KAAK,EAAE,CAAC;QACtB,UAAU,CAAC,kBAAkB,EAAE,CAAC;QAChC,UAAU,CAAC,KAAK,EAAE,CAAC;QACnB,OAAO;IACT,CAAC;IAED,MAAM,gBAAgB,GAAiC,IAAI,2DAA4B,EAAE,CAAC;IAC1F,MAAM,QAAQ,GAAa,IAAI,mBAAQ,CAAC,gBAAgB,CAAC,CAAC;IAE1D,IAAI,CAAC;QACH,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;YACrB,KAAK,cAAc,CAAC,CAAC,CAAC;gBACpB,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC;gBAC5B,IAAI,CAAC,WAAW,EAAE,CAAC;oBACjB,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,gCAAiB,CAAC,CAAC;gBAC1D,CAAC;gBACD,IAAI,CAAC,YAAY,EAAE,CAAC;oBAClB,YAAY,GAAG,MAAM,CAAC,eAAe,CAAC,gCAAiB,CAAC,CAAC;gBAC3D,CAAC;gBAED,MAAM,cAAc,GAA2B;oBAC7C,IAAI,EAAE,OAAO,CAAC,IAAI;oBAClB,EAAE,EAAE,OAAO,CAAC,EAAE;oBACd,MAAM,EAAE;wBACN,aAAa,EAAE,IAAA,WAAI,EAAC,EAAE,GAAG,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC;wBACxE,WAAW,EAAE,gBAAgB,CAAC,SAAS,EAAE;qBAC1C;iBACF,CAAC;gBACF,OAAO,UAAU,CAAC,WAAW,CAAC,cAAc,CAAC,CAAC;YAChD,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM,YAAY,GAA6B;YAC7C,IAAI,EAAE,OAAO;YACb,EAAE,EAAE,OAAO,CAAC,EAAE;YACd,IAAI,EAAE;gBACJ,OAAO,EAAG,GAAa,CAAC,OAAO;gBAC/B,KAAK,EAAG,GAAa,CAAC,KAAK,IAAI,EAAE;gBACjC,WAAW,EAAE,gBAAgB,CAAC,SAAS,EAAE;aAC1C;SACF,CAAC;QACF,UAAU,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,UAAU,CAAC,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.\n// See LICENSE in the project root for license information.\n\nimport { parentPort as rawParentPort, type MessagePort } from 'node:worker_threads';\n\nimport { Terminal } from '@rushstack/terminal/lib/Terminal';\nimport { StringBufferTerminalProvider } from '@rushstack/terminal/lib/StringBufferTerminalProvider';\n\nimport { type IZipSyncPackOptions, type IZipSyncPackResult, pack } from './pack';\nimport { defaultBufferSize } from './zipSyncUtils';\n\nexport { type IZipSyncPackOptions, type IZipSyncPackResult } from './pack';\n\nexport interface IHashWorkerData {\n basePath: string;\n}\n\nexport interface IZipSyncPackCommandMessage {\n type: 'zipsync-pack';\n id: number;\n options: Omit<IZipSyncPackOptions, 'terminal'>;\n}\n\nexport interface IZipSyncPackWorkerResult {\n zipSyncReturn: IZipSyncPackResult;\n zipSyncLogs: string;\n}\n\ninterface IZipSyncSuccessMessage {\n id: number;\n type: 'zipsync-pack';\n result: IZipSyncPackWorkerResult;\n}\n\nexport interface IZipSyncPackErrorMessage {\n type: 'error';\n id: number;\n args: {\n message: string;\n stack: string;\n zipSyncLogs: string;\n };\n}\n\nexport type IHostToWorkerMessage = IZipSyncPackCommandMessage;\nexport type IWorkerToHostMessage = IZipSyncSuccessMessage | IZipSyncPackErrorMessage;\n\nif (!rawParentPort) {\n throw new Error('This module must be run in a worker thread.');\n}\nconst parentPort: MessagePort = rawParentPort;\n\nlet inputBuffer: Buffer<ArrayBuffer> | undefined = undefined;\nlet outputBuffer: Buffer<ArrayBuffer> | undefined = undefined;\n\nfunction handleMessage(message: IHostToWorkerMessage | false): void {\n if (message === false) {\n parentPort.removeAllListeners();\n parentPort.close();\n return;\n }\n\n const terminalProvider: StringBufferTerminalProvider = new StringBufferTerminalProvider();\n const terminal: Terminal = new Terminal(terminalProvider);\n\n try {\n switch (message.type) {\n case 'zipsync-pack': {\n const { options } = message;\n if (!inputBuffer) {\n inputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize);\n }\n if (!outputBuffer) {\n outputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize);\n }\n\n const successMessage: IZipSyncSuccessMessage = {\n type: message.type,\n id: message.id,\n result: {\n zipSyncReturn: pack({ ...options, terminal, inputBuffer, outputBuffer }),\n zipSyncLogs: terminalProvider.getOutput()\n }\n };\n return parentPort.postMessage(successMessage);\n }\n }\n } catch (err) {\n const errorMessage: IZipSyncPackErrorMessage = {\n type: 'error',\n id: message.id,\n args: {\n message: (err as Error).message,\n stack: (err as Error).stack || '',\n zipSyncLogs: terminalProvider.getOutput()\n }\n };\n parentPort.postMessage(errorMessage);\n }\n}\n\nparentPort.on('message', handleMessage);\n"]}
@@ -0,0 +1,4 @@
1
+ import type { IZipSyncPackWorkerResult, IZipSyncPackOptions } from './packWorker';
2
+ export type { IZipSyncPackWorkerResult } from './packWorker';
3
+ export declare function packWorkerAsync(options: Omit<IZipSyncPackOptions, 'terminal'>): Promise<IZipSyncPackWorkerResult>;
4
+ //# sourceMappingURL=packWorkerAsync.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"packWorkerAsync.d.ts","sourceRoot":"","sources":["../src/packWorkerAsync.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAGV,wBAAwB,EACxB,mBAAmB,EACpB,MAAM,cAAc,CAAC;AAEtB,YAAY,EAAE,wBAAwB,EAAE,MAAM,cAAc,CAAC;AAE7D,wBAAsB,eAAe,CACnC,OAAO,EAAE,IAAI,CAAC,mBAAmB,EAAE,UAAU,CAAC,GAC7C,OAAO,CAAC,wBAAwB,CAAC,CA4CnC"}