@rushstack/zipsync 0.2.14 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/CHANGELOG.json +26 -0
  2. package/CHANGELOG.md +8 -1
  3. package/bin/zipsync +1 -1
  4. package/lib-esm/cli/ZipSyncCommandLineParser.js +93 -0
  5. package/lib-esm/cli/ZipSyncCommandLineParser.js.map +1 -0
  6. package/lib-esm/compress.js +115 -0
  7. package/lib-esm/compress.js.map +1 -0
  8. package/lib-esm/crc32.js +32 -0
  9. package/lib-esm/crc32.js.map +1 -0
  10. package/lib-esm/fs.js +48 -0
  11. package/lib-esm/fs.js.map +1 -0
  12. package/lib-esm/hash.js +39 -0
  13. package/lib-esm/hash.js.map +1 -0
  14. package/lib-esm/index.js +5 -0
  15. package/lib-esm/index.js.map +1 -0
  16. package/lib-esm/pack.js +379 -0
  17. package/lib-esm/pack.js.map +1 -0
  18. package/lib-esm/packWorker.js +58 -0
  19. package/lib-esm/packWorker.js.map +1 -0
  20. package/lib-esm/packWorkerAsync.js +43 -0
  21. package/lib-esm/packWorkerAsync.js.map +1 -0
  22. package/lib-esm/perf.js +49 -0
  23. package/lib-esm/perf.js.map +1 -0
  24. package/lib-esm/start.js +17 -0
  25. package/lib-esm/start.js.map +1 -0
  26. package/lib-esm/unpack.js +334 -0
  27. package/lib-esm/unpack.js.map +1 -0
  28. package/lib-esm/unpackWorker.js +54 -0
  29. package/lib-esm/unpackWorker.js.map +1 -0
  30. package/lib-esm/unpackWorkerAsync.js +43 -0
  31. package/lib-esm/unpackWorkerAsync.js.map +1 -0
  32. package/lib-esm/zipSyncUtils.js +6 -0
  33. package/lib-esm/zipSyncUtils.js.map +1 -0
  34. package/lib-esm/zipUtils.js +292 -0
  35. package/lib-esm/zipUtils.js.map +1 -0
  36. package/package.json +33 -6
  37. /package/{lib → lib-commonjs}/cli/ZipSyncCommandLineParser.js +0 -0
  38. /package/{lib → lib-commonjs}/cli/ZipSyncCommandLineParser.js.map +0 -0
  39. /package/{lib → lib-commonjs}/compress.js +0 -0
  40. /package/{lib → lib-commonjs}/compress.js.map +0 -0
  41. /package/{lib → lib-commonjs}/crc32.js +0 -0
  42. /package/{lib → lib-commonjs}/crc32.js.map +0 -0
  43. /package/{lib → lib-commonjs}/fs.js +0 -0
  44. /package/{lib → lib-commonjs}/fs.js.map +0 -0
  45. /package/{lib → lib-commonjs}/hash.js +0 -0
  46. /package/{lib → lib-commonjs}/hash.js.map +0 -0
  47. /package/{lib → lib-commonjs}/index.js +0 -0
  48. /package/{lib → lib-commonjs}/index.js.map +0 -0
  49. /package/{lib → lib-commonjs}/pack.js +0 -0
  50. /package/{lib → lib-commonjs}/pack.js.map +0 -0
  51. /package/{lib → lib-commonjs}/packWorker.js +0 -0
  52. /package/{lib → lib-commonjs}/packWorker.js.map +0 -0
  53. /package/{lib → lib-commonjs}/packWorkerAsync.js +0 -0
  54. /package/{lib → lib-commonjs}/packWorkerAsync.js.map +0 -0
  55. /package/{lib → lib-commonjs}/perf.js +0 -0
  56. /package/{lib → lib-commonjs}/perf.js.map +0 -0
  57. /package/{lib → lib-commonjs}/start.js +0 -0
  58. /package/{lib → lib-commonjs}/start.js.map +0 -0
  59. /package/{lib → lib-commonjs}/unpack.js +0 -0
  60. /package/{lib → lib-commonjs}/unpack.js.map +0 -0
  61. /package/{lib → lib-commonjs}/unpackWorker.js +0 -0
  62. /package/{lib → lib-commonjs}/unpackWorker.js.map +0 -0
  63. /package/{lib → lib-commonjs}/unpackWorkerAsync.js +0 -0
  64. /package/{lib → lib-commonjs}/unpackWorkerAsync.js.map +0 -0
  65. /package/{lib → lib-commonjs}/zipSyncUtils.js +0 -0
  66. /package/{lib → lib-commonjs}/zipSyncUtils.js.map +0 -0
  67. /package/{lib → lib-commonjs}/zipUtils.js +0 -0
  68. /package/{lib → lib-commonjs}/zipUtils.js.map +0 -0
  69. /package/{lib → lib-dts}/cli/ZipSyncCommandLineParser.d.ts +0 -0
  70. /package/{lib → lib-dts}/cli/ZipSyncCommandLineParser.d.ts.map +0 -0
  71. /package/{lib → lib-dts}/compress.d.ts +0 -0
  72. /package/{lib → lib-dts}/compress.d.ts.map +0 -0
  73. /package/{lib → lib-dts}/crc32.d.ts +0 -0
  74. /package/{lib → lib-dts}/crc32.d.ts.map +0 -0
  75. /package/{lib → lib-dts}/fs.d.ts +0 -0
  76. /package/{lib → lib-dts}/fs.d.ts.map +0 -0
  77. /package/{lib → lib-dts}/hash.d.ts +0 -0
  78. /package/{lib → lib-dts}/hash.d.ts.map +0 -0
  79. /package/{lib → lib-dts}/index.d.ts +0 -0
  80. /package/{lib → lib-dts}/index.d.ts.map +0 -0
  81. /package/{lib → lib-dts}/pack.d.ts +0 -0
  82. /package/{lib → lib-dts}/pack.d.ts.map +0 -0
  83. /package/{lib → lib-dts}/packWorker.d.ts +0 -0
  84. /package/{lib → lib-dts}/packWorker.d.ts.map +0 -0
  85. /package/{lib → lib-dts}/packWorkerAsync.d.ts +0 -0
  86. /package/{lib → lib-dts}/packWorkerAsync.d.ts.map +0 -0
  87. /package/{lib → lib-dts}/perf.d.ts +0 -0
  88. /package/{lib → lib-dts}/perf.d.ts.map +0 -0
  89. /package/{lib → lib-dts}/start.d.ts +0 -0
  90. /package/{lib → lib-dts}/start.d.ts.map +0 -0
  91. /package/{lib → lib-dts}/unpack.d.ts +0 -0
  92. /package/{lib → lib-dts}/unpack.d.ts.map +0 -0
  93. /package/{lib → lib-dts}/unpackWorker.d.ts +0 -0
  94. /package/{lib → lib-dts}/unpackWorker.d.ts.map +0 -0
  95. /package/{lib → lib-dts}/unpackWorkerAsync.d.ts +0 -0
  96. /package/{lib → lib-dts}/unpackWorkerAsync.d.ts.map +0 -0
  97. /package/{lib → lib-dts}/zipSyncUtils.d.ts +0 -0
  98. /package/{lib → lib-dts}/zipSyncUtils.d.ts.map +0 -0
  99. /package/{lib → lib-dts}/zipUtils.d.ts +0 -0
  100. /package/{lib → lib-dts}/zipUtils.d.ts.map +0 -0
@@ -0,0 +1,379 @@
1
+ // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
2
+ // See LICENSE in the project root for license information.
3
+ var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
4
+ if (value !== null && value !== void 0) {
5
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
6
+ var dispose, inner;
7
+ if (async) {
8
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
9
+ dispose = value[Symbol.asyncDispose];
10
+ }
11
+ if (dispose === void 0) {
12
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
13
+ dispose = value[Symbol.dispose];
14
+ if (async) inner = dispose;
15
+ }
16
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
17
+ if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
18
+ env.stack.push({ value: value, dispose: dispose, async: async });
19
+ }
20
+ else if (async) {
21
+ env.stack.push({ async: true });
22
+ }
23
+ return value;
24
+ };
25
+ var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
26
+ return function (env) {
27
+ function fail(e) {
28
+ env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
29
+ env.hasError = true;
30
+ }
31
+ var r, s = 0;
32
+ function next() {
33
+ while (r = env.stack.pop()) {
34
+ try {
35
+ if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
36
+ if (r.dispose) {
37
+ var result = r.dispose.call(r.value);
38
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
39
+ }
40
+ else s |= 1;
41
+ }
42
+ catch (e) {
43
+ fail(e);
44
+ }
45
+ }
46
+ if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
47
+ if (env.hasError) throw env.error;
48
+ }
49
+ return next();
50
+ };
51
+ })(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
52
+ var e = new Error(message);
53
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
54
+ });
55
+ import * as fs from 'node:fs';
56
+ import * as path from 'node:path';
57
+ import * as crypto from 'node:crypto';
58
+ import * as zlib from 'node:zlib';
59
+ import { crc32Builder } from './crc32';
60
+ import { DISPOSE_SYMBOL, getDisposableFileHandle } from './fs';
61
+ import { createIncrementalZlib } from './compress';
62
+ import { markStart, markEnd, getDuration, emitSummary, formatDuration } from './perf';
63
+ import { writeLocalFileHeader, writeDataDescriptor, writeCentralDirectoryHeader, writeEndOfCentralDirectory, ZSTD_COMPRESSION, DEFLATE_COMPRESSION, STORE_COMPRESSION, dosDateTime } from './zipUtils';
64
+ import { calculateSHA1 } from './hash';
65
+ import { METADATA_VERSION, METADATA_FILENAME, defaultBufferSize } from './zipSyncUtils';
66
+ /**
67
+ * File extensions for which additional DEFLATE/ZSTD compression is unlikely to help.
68
+ * Used by the 'auto' compression heuristic to avoid wasting CPU on data that is already
69
+ * compressed (images, media, existing archives, fonts, etc.).
70
+ */
71
+ const LIKELY_COMPRESSED_EXTENSION_REGEX = /\.(?:zip|gz|tgz|bz2|xz|7z|rar|jpg|jpeg|png|gif|webp|avif|mp4|m4v|mov|mkv|webm|mp3|ogg|aac|flac|pdf|woff|woff2)$/;
72
+ /**
73
+ * Basic heuristic: skip re-compressing file types that are already compressed.
74
+ */
75
+ function isLikelyAlreadyCompressed(filename) {
76
+ return LIKELY_COMPRESSED_EXTENSION_REGEX.test(filename.toLowerCase());
77
+ }
78
+ /**
79
+ * Map zip compression method code -> incremental zlib mode label
80
+ */
81
+ const zlibPackModes = {
82
+ [ZSTD_COMPRESSION]: 'zstd-compress',
83
+ [DEFLATE_COMPRESSION]: 'deflate',
84
+ [STORE_COMPRESSION]: undefined
85
+ };
86
+ /**
87
+ * Public facing CLI option -> actual zip method used for a file we decide to compress.
88
+ */
89
+ const zipSyncCompressionOptions = {
90
+ store: STORE_COMPRESSION,
91
+ deflate: DEFLATE_COMPRESSION,
92
+ zstd: ZSTD_COMPRESSION,
93
+ auto: DEFLATE_COMPRESSION
94
+ };
95
+ /**
96
+ * Create a zipsync archive by enumerating target directories, then streaming each file into the
97
+ * output zip using the local file header + (optional compressed data) + data descriptor pattern.
98
+ *
99
+ * Performance characteristics:
100
+ * - Single pass per file (no read-then-compress-then-write buffering). CRC32 + SHA-1 are computed
101
+ * while streaming so the metadata JSON can later be used for selective unpack.
102
+ * - Data descriptor usage (bit 3) allows writing headers before we know sizes or CRC32.
103
+ * - A single timestamp (captured once) is applied to all entries for determinism.
104
+ * - Metadata entry is added as a normal zip entry at the end (before central directory) so legacy
105
+ * tools can still list/extract it, while zipsync can quickly parse file hashes.
106
+ */
107
+ export function pack({ archivePath, targetDirectories: rawTargetDirectories, baseDir: rawBaseDir, compression, terminal, inputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize), outputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize) }) {
108
+ const env_1 = { stack: [], error: void 0, hasError: false };
109
+ try {
110
+ const baseDir = path.resolve(rawBaseDir);
111
+ const targetDirectories = rawTargetDirectories.map((dir) => path.join(baseDir, dir));
112
+ terminal.writeLine(`Packing to ${archivePath} from ${rawTargetDirectories.join(', ')}`);
113
+ markStart('pack.total');
114
+ terminal.writeDebugLine('Starting pack');
115
+ // Pass 1: enumerate files with a queue to avoid deep recursion
116
+ markStart('pack.enumerate');
117
+ const filePaths = [];
118
+ const queue = targetDirectories.map((dir) => ({ dir, depth: 0 }));
119
+ while (queue.length) {
120
+ const { dir: currentDir, depth } = queue.shift();
121
+ terminal.writeDebugLine(`Enumerating directory: ${currentDir}`);
122
+ const padding = depth === 0 ? '' : '-↳'.repeat(depth);
123
+ let items;
124
+ try {
125
+ items = fs.readdirSync(currentDir, { withFileTypes: true });
126
+ }
127
+ catch (e) {
128
+ if (e &&
129
+ (e.code === 'ENOENT' || e.code === 'ENOTDIR')) {
130
+ terminal.writeWarningLine(`Failed to read directory: ${currentDir}. Ignoring.`);
131
+ continue;
132
+ }
133
+ else {
134
+ throw e;
135
+ }
136
+ }
137
+ for (const item of items) {
138
+ const fullPath = path.join(currentDir, item.name);
139
+ if (item.isFile()) {
140
+ const relativePath = path.relative(baseDir, fullPath).replace(/\\/g, '/');
141
+ terminal.writeVerboseLine(`${padding}${item.name}`);
142
+ filePaths.push(relativePath);
143
+ }
144
+ else if (item.isDirectory()) {
145
+ terminal.writeVerboseLine(`${padding}${item.name}/`);
146
+ queue.push({ dir: fullPath, depth: depth + 1 });
147
+ }
148
+ else {
149
+ throw new Error(`Unexpected item (not file or directory): ${fullPath}. Aborting.`);
150
+ }
151
+ }
152
+ }
153
+ terminal.writeLine(`Found ${filePaths.length} files to pack (enumerated)`);
154
+ markEnd('pack.enumerate');
155
+ // Pass 2: stream each file: read chunks -> hash + (maybe) compress -> write local header + data descriptor.
156
+ markStart('pack.prepareEntries');
157
+ terminal.writeDebugLine(`Opening archive for write: ${archivePath}`);
158
+ const zipFile = __addDisposableResource(env_1, getDisposableFileHandle(archivePath, 'w'), false);
159
+ let currentOffset = 0;
160
+ /**
161
+ * Write a raw chunk to the archive file descriptor, updating current offset.
162
+ */
163
+ function writeChunkToZip(chunk, lengthBytes = chunk.byteLength) {
164
+ let offset = 0;
165
+ while (lengthBytes > 0 && offset < chunk.byteLength) {
166
+ // In practice this call always writes all data at once, but the spec says it is not an error
167
+ // for it to not do so. Possibly that situation comes up when writing to something that is not
168
+ // an ordinary file.
169
+ const written = fs.writeSync(zipFile.fd, chunk, offset, lengthBytes);
170
+ lengthBytes -= written;
171
+ offset += written;
172
+ }
173
+ currentOffset += offset;
174
+ }
175
+ /** Convenience wrapper for writing multiple buffers sequentially. */
176
+ function writeChunksToZip(chunks) {
177
+ for (const chunk of chunks) {
178
+ writeChunkToZip(chunk);
179
+ }
180
+ }
181
+ const dosDateTimeNow = dosDateTime(new Date());
182
+ /**
183
+ * Stream a single file into the archive.
184
+ * Steps:
185
+ * 1. Decide compression (based on user choice + heuristic).
186
+ * 2. Emit local file header (sizes/CRC zeroed because we use a data descriptor).
187
+ * 3. Read file in 32 MiB chunks: update SHA-1 + CRC32; optionally feed compressor or write raw.
188
+ * 4. Flush compressor (if any) and write trailing data descriptor containing sizes + CRC.
189
+ * 5. Return populated entry metadata for later central directory + JSON metadata.
190
+ */
191
+ function writeFileEntry(relativePath) {
192
+ const env_2 = { stack: [], error: void 0, hasError: false };
193
+ try {
194
+ const fullPath = path.join(baseDir, relativePath);
195
+ /**
196
+ * Read file in large fixed-size buffer; invoke callback for each filled chunk.
197
+ */
198
+ const readInputInChunks = (onChunk) => {
199
+ const env_3 = { stack: [], error: void 0, hasError: false };
200
+ try {
201
+ const inputDisposable = __addDisposableResource(env_3, getDisposableFileHandle(fullPath, 'r'), false);
202
+ let bytesInInputBuffer = 0;
203
+ // The entire input buffer will be drained in each loop iteration
204
+ // So run until EOF
205
+ while (!isNaN(inputDisposable.fd)) {
206
+ bytesInInputBuffer = fs.readSync(inputDisposable.fd, inputBuffer, 0, inputBuffer.byteLength, -1);
207
+ if (bytesInInputBuffer <= 0) {
208
+ // EOF, close the input fd
209
+ inputDisposable[DISPOSE_SYMBOL]();
210
+ }
211
+ onChunk(bytesInInputBuffer);
212
+ }
213
+ }
214
+ catch (e_3) {
215
+ env_3.error = e_3;
216
+ env_3.hasError = true;
217
+ }
218
+ finally {
219
+ __disposeResources(env_3);
220
+ }
221
+ };
222
+ let shouldCompress = false;
223
+ if (compression === 'deflate' || compression === 'zstd') {
224
+ shouldCompress = true;
225
+ }
226
+ else if (compression === 'auto') {
227
+ // Heuristic: skip compression for small files or likely-already-compressed files
228
+ if (!isLikelyAlreadyCompressed(relativePath)) {
229
+ shouldCompress = true;
230
+ }
231
+ else {
232
+ terminal.writeVerboseLine(`Skip compression heuristically (already-compressed) for ${relativePath} (size unknown at this point)`);
233
+ }
234
+ }
235
+ const compressionMethod = shouldCompress
236
+ ? zipSyncCompressionOptions[compression]
237
+ : zipSyncCompressionOptions.store;
238
+ const entry = {
239
+ filename: relativePath,
240
+ size: 0,
241
+ compressedSize: 0,
242
+ crc32: 0,
243
+ sha1Hash: '',
244
+ localHeaderOffset: currentOffset,
245
+ compressionMethod,
246
+ dosDateTime: dosDateTimeNow
247
+ };
248
+ writeChunksToZip(writeLocalFileHeader(entry));
249
+ const sha1HashBuilder = crypto.createHash('sha1');
250
+ let crc32 = 0;
251
+ let uncompressedSize = 0;
252
+ let compressedSize = 0;
253
+ /**
254
+ * Compressor instance (deflate or zstd) created only if needed.
255
+ */
256
+ const incrementalZlib = __addDisposableResource(env_2, shouldCompress
257
+ ? createIncrementalZlib(outputBuffer, (chunk, lengthBytes) => {
258
+ writeChunkToZip(chunk, lengthBytes);
259
+ compressedSize += lengthBytes;
260
+ }, zlibPackModes[compressionMethod])
261
+ : undefined, false);
262
+ // Read input file in chunks, update hashes, and either compress or write raw.
263
+ readInputInChunks((bytesInInputBuffer) => {
264
+ const slice = inputBuffer.subarray(0, bytesInInputBuffer);
265
+ sha1HashBuilder.update(slice);
266
+ crc32 = crc32Builder(slice, crc32);
267
+ if (incrementalZlib) {
268
+ incrementalZlib.update(slice);
269
+ }
270
+ else {
271
+ writeChunkToZip(slice, bytesInInputBuffer);
272
+ }
273
+ uncompressedSize += bytesInInputBuffer;
274
+ });
275
+ // finalize hashes, compression
276
+ incrementalZlib === null || incrementalZlib === void 0 ? void 0 : incrementalZlib.update(Buffer.alloc(0));
277
+ crc32 = crc32 >>> 0;
278
+ const sha1Hash = sha1HashBuilder.digest('hex');
279
+ if (!shouldCompress) {
280
+ compressedSize = uncompressedSize;
281
+ }
282
+ entry.size = uncompressedSize;
283
+ entry.compressedSize = compressedSize;
284
+ entry.crc32 = crc32;
285
+ entry.sha1Hash = sha1Hash;
286
+ // Trailing data descriptor now that final CRC/sizes are known.
287
+ writeChunkToZip(writeDataDescriptor(entry));
288
+ terminal.writeVerboseLine(`${relativePath} (sha1=${entry.sha1Hash}, crc32=${entry.crc32.toString(16)}, size=${entry.size}, compressed=${entry.compressedSize}, method=${entry.compressionMethod}, compressed ${(100 -
289
+ (entry.compressedSize / entry.size) * 100).toFixed(1)}%)`);
290
+ return entry;
291
+ }
292
+ catch (e_2) {
293
+ env_2.error = e_2;
294
+ env_2.hasError = true;
295
+ }
296
+ finally {
297
+ __disposeResources(env_2);
298
+ }
299
+ }
300
+ const entries = [];
301
+ // Emit all file entries in enumeration order.
302
+ for (const relativePath of filePaths) {
303
+ entries.push(writeFileEntry(relativePath));
304
+ }
305
+ markEnd('pack.prepareEntries');
306
+ terminal.writeLine(`Prepared ${entries.length} file entries`);
307
+ markStart('pack.metadata.build');
308
+ const metadata = { version: METADATA_VERSION, files: {} };
309
+ // Build metadata map used for selective unpack (size + SHA‑1 per file).
310
+ for (const entry of entries) {
311
+ metadata.files[entry.filename] = { size: entry.size, sha1Hash: entry.sha1Hash };
312
+ }
313
+ const metadataContent = JSON.stringify(metadata);
314
+ const metadataBuffer = Buffer.from(metadataContent, 'utf8');
315
+ terminal.writeDebugLine(`Metadata size=${metadataBuffer.length} bytes, fileCount=${Object.keys(metadata.files).length}`);
316
+ let metadataCompressionMethod = zipSyncCompressionOptions.store;
317
+ let metadataData = metadataBuffer;
318
+ let metadataCompressedSize = metadataBuffer.length;
319
+ // Compress metadata (deflate) iff user allowed compression and it helps (>64 bytes & smaller result).
320
+ if (compression !== 'store' && metadataBuffer.length > 64) {
321
+ const compressed = zlib.deflateRawSync(metadataBuffer, { level: 9 });
322
+ if (compressed.length < metadataBuffer.length) {
323
+ metadataCompressionMethod = zipSyncCompressionOptions.deflate;
324
+ metadataData = compressed;
325
+ metadataCompressedSize = compressed.length;
326
+ terminal.writeDebugLine(`Metadata compressed (orig=${metadataBuffer.length}, compressed=${compressed.length})`);
327
+ }
328
+ else {
329
+ terminal.writeDebugLine('Metadata compression skipped (not smaller)');
330
+ }
331
+ }
332
+ const metadataEntry = {
333
+ filename: METADATA_FILENAME,
334
+ size: metadataBuffer.length,
335
+ compressedSize: metadataCompressedSize,
336
+ crc32: crc32Builder(metadataBuffer),
337
+ sha1Hash: calculateSHA1(metadataBuffer),
338
+ localHeaderOffset: currentOffset,
339
+ compressionMethod: metadataCompressionMethod,
340
+ dosDateTime: dosDateTimeNow
341
+ };
342
+ writeChunksToZip(writeLocalFileHeader(metadataEntry));
343
+ writeChunkToZip(metadataData, metadataCompressedSize);
344
+ writeChunkToZip(writeDataDescriptor(metadataEntry));
345
+ entries.push(metadataEntry);
346
+ terminal.writeVerboseLine(`Total entries including metadata: ${entries.length}`);
347
+ markEnd('pack.metadata.build');
348
+ markStart('pack.write.entries');
349
+ const outputDir = path.dirname(archivePath);
350
+ fs.mkdirSync(outputDir, { recursive: true });
351
+ markEnd('pack.write.entries');
352
+ markStart('pack.write.centralDirectory');
353
+ const centralDirOffset = currentOffset;
354
+ // Emit central directory records.
355
+ for (const entry of entries) {
356
+ writeChunksToZip(writeCentralDirectoryHeader(entry));
357
+ }
358
+ const centralDirSize = currentOffset - centralDirOffset;
359
+ markEnd('pack.write.centralDirectory');
360
+ // Write end of central directory
361
+ markStart('pack.write.eocd');
362
+ writeChunkToZip(writeEndOfCentralDirectory(centralDirOffset, centralDirSize, entries.length));
363
+ terminal.writeDebugLine('EOCD record written');
364
+ markEnd('pack.write.eocd');
365
+ markEnd('pack.total');
366
+ const total = getDuration('pack.total');
367
+ emitSummary('pack', terminal);
368
+ terminal.writeLine(`Successfully packed ${entries.length} files in ${formatDuration(total)}`);
369
+ return { filesPacked: entries.length, metadata };
370
+ }
371
+ catch (e_1) {
372
+ env_1.error = e_1;
373
+ env_1.hasError = true;
374
+ }
375
+ finally {
376
+ __disposeResources(env_1);
377
+ }
378
+ }
379
+ //# sourceMappingURL=pack.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pack.js","sourceRoot":"","sources":["../src/pack.ts"],"names":[],"mappings":"AAAA,4FAA4F;AAC5F,2DAA2D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAE3D,OAAO,KAAK,EAAE,MAAM,SAAS,CAAC;AAC9B,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAClC,OAAO,KAAK,MAAM,MAAM,aAAa,CAAC;AACtC,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAIlC,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AACvC,OAAO,EAAE,cAAc,EAAE,uBAAuB,EAA8B,MAAM,MAAM,CAAC;AAC3F,OAAO,EAAmD,qBAAqB,EAAE,MAAM,YAAY,CAAC;AACpG,OAAO,EAAE,SAAS,EAAE,OAAO,EAAE,WAAW,EAAE,WAAW,EAAE,cAAc,EAAE,MAAM,QAAQ,CAAC;AACtF,OAAO,EACL,oBAAoB,EACpB,mBAAmB,EACnB,2BAA2B,EAC3B,0BAA0B,EAC1B,gBAAgB,EAChB,mBAAmB,EACnB,iBAAiB,EAGjB,WAAW,EACZ,MAAM,YAAY,CAAC;AACpB,OAAO,EAAE,aAAa,EAAE,MAAM,QAAQ,CAAC;AACvC,OAAO,EAIL,gBAAgB,EAChB,iBAAiB,EACjB,iBAAiB,EAClB,MAAM,gBAAgB,CAAC;AAExB;;;;GAIG;AACH,MAAM,iCAAiC,GACrC,iHAAiH,CAAC;AAEpH;;GAEG;AACH,SAAS,yBAAyB,CAAC,QAAgB;IACjD,OAAO,iCAAiC,CAAC,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;AACxE,CAAC;AAED;;GAEG;AACH,MAAM,aAAa,GAAsE;IACvF,CAAC,gBAAgB,CAAC,EAAE,eAAe;IACnC,CAAC,mBAAmB,CAAC,EAAE,SAAS;IAChC,CAAC,iBAAiB,CAAC,EAAE,SAAS;CACtB,CAAC;AAEX;;GAEG;AACH,MAAM,yBAAyB,GAA+D;IAC5F,KAAK,EAAE,iBAAiB;IACxB,OAAO,EAAE,mBAAmB;IAC5B,IAAI,EAAE,gBAAgB;IACtB,IAAI,EAAE,mBAAmB;CACjB,CAAC;AA2CX;;;;;;;;;;;GAWG;AACH,MAAM,UAAU,IAAI,CAAC,EACnB,WAAW,EACX,iBAAiB,EAAE,oBAAoB,EACvC,OAAO,EAAE,UAAU,EACnB,WAAW,EACX,QAAQ,EACR,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,iBAAiB,CAAC,EACvD,YAAY,GAAG,MAAM,CAAC,eAAe,CAAC,iBAAiB,CAAC,EACpC;;;QACpB,MAAM,OAAO,GAAW,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;QACjD,MAAM,iBAAiB,GAAa,oBAAoB,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC,CAAC;QAC/F,QAAQ,CAAC,SAAS,CAAC,cAAc,WAAW,SAAS,oBAAoB,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAExF,SAAS,CAAC,YAAY,CAAC,CAAC;QACxB,QAAQ,CAAC,cAAc,CAAC,eAAe,CAAC,CAAC;QACzC,+DAA+D;QAC/D,SAAS,CAAC,gBAAgB,CAAC,CAAC;QAE5B,MAAM,SAAS,GAAa,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAoB,iBAAiB,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QAEnF,OAAO,KAAK,CAAC,MAAM,EAAE,CAAC;YACpB,MAAM,EAAE,GAAG,EAAE,UAAU,EAAE,KAAK,EAAE,GAAG,KAAK,CAAC,KAAK,EAAG,CAAC;YAClD,QAAQ,CAAC,cAAc,CAAC,0BAA0B,UAAU,EAAE,CAAC,CAAC;YAEhE,MAAM,OAAO,GAAW,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAE9D,IAAI,KAAkB,CAAC;YACvB,IAAI,CAAC;gBACH,KAAK,GAAG,EAAE,CAAC,WAAW,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;YAC9D,CAAC;YAAC,OAAO,CAAC,EAAE,CAAC;gBACX,IACE,CAAC;oBACD,CAAE,CAA2B,CAAC,IAAI,KAAK,QAAQ,IAAK,CAA2B,CAAC,IAAI,KAAK,SAAS,CAAC,EACnG,CAAC;oBACD,QAAQ,CAAC,gBAAgB,CAAC,6BAA6B,UAAU,aAAa,CAAC,CAAC;oBAChF,SAAS;gBACX,CAAC;qBAAM,CAAC;oBACN,MAAM,CAAC,CAAC;gBACV,CAAC;YACH,CAAC;YAED,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACzB,MAAM,QAAQ,GAAW,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;gBAC1D,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;oBAClB,MAAM,YAAY,GAAW,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;oBAClF,QAAQ,CAAC,gBAAgB,CAAC,GAAG,OAAO,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;oBACpD,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC/B,CAAC;qBAAM,IAAI,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;oBAC9B,QAAQ,CAAC,gBAAgB,CAAC,GAAG,OAAO,GAAG,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC;oBACrD,KAAK,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,GAAG,CAAC,EAAE,CAAC,CAAC;gBAClD,CAAC;qBAAM,CAAC;oBACN,MAAM,IAAI,KAAK,CAAC,4CAA4C,QAAQ,aAAa,CAAC,CAAC;gBACrF,CAAC;YACH,CAAC;QACH,CAAC;QAED,QAAQ,CAAC,SAAS,CAAC,SAAS,SAAS,CAAC,MAAM,6BAA6B,CAAC,CAAC;QAC3E,OAAO,CAAC,gBAAgB,CAAC,CAAC;QAE1B,4GAA4G;QAC5G,SAAS,CAAC,qBAAqB,CAAC,CAAC;QAEjC,QAAQ,CAAC,cAAc,CAAC,8BAA8B,WAAW,EAAE,CAAC,CAAC;QACrE,MAAM,OAAO,kCAA0B,uBAAuB,CAAC,WAAW,EAAE,GAAG,CAAC,QAAA,CAAC;QACjF,IAAI,aAAa,GAAW,CAAC,CAAC;QAC9B;;WAEG;QACH,SAAS,eAAe,CAAC,KAAiB,EAAE,cAAsB,KAAK,CAAC,UAAU;YAChF,IAAI,MAAM,GAAW,CAAC,CAAC;YACvB,OAAO,WAAW,GAAG,CAAC,IAAI,MAAM,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC;gBACpD,6FAA6F;gBAC7F,8FAA8F;gBAC9F,oBAAoB;gBACpB,MAAM,OAAO,GAAW,EAAE,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,WAAW,CAAC,CAAC;gBAC7E,WAAW,IAAI,OAAO,CAAC;gBACvB,MAAM,IAAI,OAAO,CAAC;YACpB,CAAC;YACD,aAAa,IAAI,MAAM,CAAC;QAC1B,CAAC;QACD,qEAAqE;QACrE,SAAS,gBAAgB,CAAC,MAAoB;YAC5C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;gBAC3B,eAAe,CAAC,KAAK,CAAC,CAAC;YACzB,CAAC;QACH,CAAC;QAED,MAAM,cAAc,GAAmC,WAAW,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC;QAC/E;;;;;;;;WAQG;QACH,SAAS,cAAc,CAAC,YAAoB;;;gBAC1C,MAAM,QAAQ,GAAW,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;gBAE1D;;mBAEG;gBACH,MAAM,iBAAiB,GAA4D,CACjF,OAA6C,EACvC,EAAE;;;wBACR,MAAM,eAAe,kCAA0B,uBAAuB,CAAC,QAAQ,EAAE,GAAG,CAAC,QAAA,CAAC;wBAEtF,IAAI,kBAAkB,GAAW,CAAC,CAAC;wBACnC,iEAAiE;wBACjE,mBAAmB;wBACnB,OAAO,CAAC,KAAK,CAAC,eAAe,CAAC,EAAE,CAAC,EAAE,CAAC;4BAClC,kBAAkB,GAAG,EAAE,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAC;4BAEjG,IAAI,kBAAkB,IAAI,CAAC,EAAE,CAAC;gCAC5B,0BAA0B;gCAC1B,eAAe,CAAC,cAAc,CAAC,EAAE,CAAC;4BACpC,CAAC;4BAED,OAAO,CAAC,kBAAkB,CAAC,CAAC;wBAC9B,CAAC;;;;;;;;;iBACF,CAAC;gBAEF,IAAI,cAAc,GAAY,KAAK,CAAC;gBACpC,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,MAAM,EAAE,CAAC;oBACxD,cAAc,GAAG,IAAI,CAAC;gBACxB,CAAC;qBAAM,IAAI,WAAW,KAAK,MAAM,EAAE,CAAC;oBAClC,iFAAiF;oBACjF,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC,EAAE,CAAC;wBAC7C,cAAc,GAAG,IAAI,CAAC;oBACxB,CAAC;yBAAM,CAAC;wBACN,QAAQ,CAAC,gBAAgB,CACvB,2DAA2D,YAAY,+BAA+B,CACvG,CAAC;oBACJ,CAAC;gBACH,CAAC;gBAED,MAAM,iBAAiB,GAA6B,cAAc;oBAChE,CAAC,CAAC,yBAAyB,CAAC,WAAW,CAAC;oBACxC,CAAC,CAAC,yBAAyB,CAAC,KAAK,CAAC;gBAEpC,MAAM,KAAK,GAAe;oBACxB,QAAQ,EAAE,YAAY;oBACtB,IAAI,EAAE,CAAC;oBACP,cAAc,EAAE,CAAC;oBACjB,KAAK,EAAE,CAAC;oBACR,QAAQ,EAAE,EAAE;oBACZ,iBAAiB,EAAE,aAAa;oBAChC,iBAAiB;oBACjB,WAAW,EAAE,cAAc;iBAC5B,CAAC;gBAEF,gBAAgB,CAAC,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;gBAE9C,MAAM,eAAe,GAAgB,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;gBAC/D,IAAI,KAAK,GAAW,CAAC,CAAC;gBACtB,IAAI,gBAAgB,GAAW,CAAC,CAAC;gBACjC,IAAI,cAAc,GAAW,CAAC,CAAC;gBAE/B;;mBAEG;gBACH,MAAM,eAAe,kCAAiC,cAAc;oBAClE,CAAC,CAAC,qBAAqB,CACnB,YAAY,EACZ,CAAC,KAAK,EAAE,WAAW,EAAE,EAAE;wBACrB,eAAe,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;wBACpC,cAAc,IAAI,WAAW,CAAC;oBAChC,CAAC,EACD,aAAa,CAAC,iBAAiB,CAAE,CAClC;oBACH,CAAC,CAAC,SAAS,QAAA,CAAC;gBAEd,8EAA8E;gBAC9E,iBAAiB,CAAC,CAAC,kBAA0B,EAAE,EAAE;oBAC/C,MAAM,KAAK,GAAW,WAAW,CAAC,QAAQ,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC;oBAClE,eAAe,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBAC9B,KAAK,GAAG,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;oBACnC,IAAI,eAAe,EAAE,CAAC;wBACpB,eAAe,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBAChC,CAAC;yBAAM,CAAC;wBACN,eAAe,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;oBAC7C,CAAC;oBACD,gBAAgB,IAAI,kBAAkB,CAAC;gBACzC,CAAC,CAAC,CAAC;gBAEH,+BAA+B;gBAC/B,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;gBACzC,KAAK,GAAG,KAAK,KAAK,CAAC,CAAC;gBACpB,MAAM,QAAQ,GAAW,eAAe,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gBAEvD,IAAI,CAAC,cAAc,EAAE,CAAC;oBACpB,cAAc,GAAG,gBAAgB,CAAC;gBACpC,CAAC;gBAED,KAAK,CAAC,IAAI,GAAG,gBAAgB,CAAC;gBAC9B,KAAK,CAAC,cAAc,GAAG,cAAc,CAAC;gBACtC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC;gBACpB,KAAK,CAAC,QAAQ,GAAG,QAAQ,CAAC;gBAE1B,+DAA+D;gBAC/D,eAAe,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;gBAE5C,QAAQ,CAAC,gBAAgB,CACvB,GAAG,YAAY,UAAU,KAAK,CAAC,QAAQ,WAAW,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,UACxE,KAAK,CAAC,IACR,gBAAgB,KAAK,CAAC,cAAc,YAAY,KAAK,CAAC,iBAAiB,gBAAgB,CACrF,GAAG;oBACH,CAAC,KAAK,CAAC,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,GAAG,CAC1C,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CACjB,CAAC;gBACF,OAAO,KAAK,CAAC;;;;;;;;;SACd;QAED,MAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,8CAA8C;QAC9C,KAAK,MAAM,YAAY,IAAI,SAAS,EAAE,CAAC;YACrC,OAAO,CAAC,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,CAAC,CAAC;QAC7C,CAAC;QAED,OAAO,CAAC,qBAAqB,CAAC,CAAC;QAC/B,QAAQ,CAAC,SAAS,CAAC,YAAY,OAAO,CAAC,MAAM,eAAe,CAAC,CAAC;QAE9D,SAAS,CAAC,qBAAqB,CAAC,CAAC;QACjC,MAAM,QAAQ,GAAc,EAAE,OAAO,EAAE,gBAAgB,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;QACrE,wEAAwE;QACxE,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;YAC5B,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,EAAE,IAAI,EAAE,KAAK,CAAC,IAAI,EAAE,QAAQ,EAAE,KAAK,CAAC,QAAQ,EAAE,CAAC;QAClF,CAAC;QAED,MAAM,eAAe,GAAW,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,cAAc,GAAW,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,MAAM,CAAC,CAAC;QACpE,QAAQ,CAAC,cAAc,CACrB,iBAAiB,cAAc,CAAC,MAAM,qBAAqB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE,CAChG,CAAC;QAEF,IAAI,yBAAyB,GAA6B,yBAAyB,CAAC,KAAK,CAAC;QAC1F,IAAI,YAAY,GAAW,cAAc,CAAC;QAC1C,IAAI,sBAAsB,GAAW,cAAc,CAAC,MAAM,CAAC;QAC3D,sGAAsG;QACtG,IAAI,WAAW,KAAK,OAAO,IAAI,cAAc,CAAC,MAAM,GAAG,EAAE,EAAE,CAAC;YAC1D,MAAM,UAAU,GAAW,IAAI,CAAC,cAAc,CAAC,cAAc,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC;YAC7E,IAAI,UAAU,CAAC,MAAM,GAAG,cAAc,CAAC,MAAM,EAAE,CAAC;gBAC9C,yBAAyB,GAAG,yBAAyB,CAAC,OAAO,CAAC;gBAC9D,YAAY,GAAG,UAAU,CAAC;gBAC1B,sBAAsB,GAAG,UAAU,CAAC,MAAM,CAAC;gBAC3C,QAAQ,CAAC,cAAc,CACrB,6BAA6B,cAAc,CAAC,MAAM,gBAAgB,UAAU,CAAC,MAAM,GAAG,CACvF,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,QAAQ,CAAC,cAAc,CAAC,4CAA4C,CAAC,CAAC;YACxE,CAAC;QACH,CAAC;QAED,MAAM,aAAa,GAAe;YAChC,QAAQ,EAAE,iBAAiB;YAC3B,IAAI,EAAE,cAAc,CAAC,MAAM;YAC3B,cAAc,EAAE,sBAAsB;YACtC,KAAK,EAAE,YAAY,CAAC,cAAc,CAAC;YACnC,QAAQ,EAAE,aAAa,CAAC,cAAc,CAAC;YACvC,iBAAiB,EAAE,aAAa;YAChC,iBAAiB,EAAE,yBAAyB;YAC5C,WAAW,EAAE,cAAc;SAC5B,CAAC;QAEF,gBAAgB,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC;QACtD,eAAe,CAAC,YAAY,EAAE,sBAAsB,CAAC,CAAC;QACtD,eAAe,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC,CAAC;QAEpD,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAC5B,QAAQ,CAAC,gBAAgB,CAAC,qCAAqC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;QAEjF,OAAO,CAAC,qBAAqB,CAAC,CAAC;QAE/B,SAAS,CAAC,oBAAoB,CAAC,CAAC;QAChC,MAAM,SAAS,GAAW,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;QACpD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE7C,OAAO,CAAC,oBAAoB,CAAC,CAAC;QAE9B,SAAS,CAAC,6BAA6B,CAAC,CAAC;QACzC,MAAM,gBAAgB,GAAW,aAAa,CAAC;QAC/C,kCAAkC;QAClC,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;YAC5B,gBAAgB,CAAC,2BAA2B,CAAC,KAAK,CAAC,CAAC,CAAC;QACvD,CAAC;QACD,MAAM,cAAc,GAAW,aAAa,GAAG,gBAAgB,CAAC;QAChE,OAAO,CAAC,6BAA6B,CAAC,CAAC;QAEvC,iCAAiC;QACjC,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAC7B,eAAe,CAAC,0BAA0B,CAAC,gBAAgB,EAAE,cAAc,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC;QAC9F,QAAQ,CAAC,cAAc,CAAC,qBAAqB,CAAC,CAAC;QAC/C,OAAO,CAAC,iBAAiB,CAAC,CAAC;QAE3B,OAAO,CAAC,YAAY,CAAC,CAAC;QACtB,MAAM,KAAK,GAAW,WAAW,CAAC,YAAY,CAAC,CAAC;QAChD,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAC9B,QAAQ,CAAC,SAAS,CAAC,uBAAuB,OAAO,CAAC,MAAM,aAAa,cAAc,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QAC9F,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,CAAC;;;;;;;;;CAClD","sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.\n// See LICENSE in the project root for license information.\n\nimport * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport * as crypto from 'node:crypto';\nimport * as zlib from 'node:zlib';\n\nimport type { ITerminal } from '@rushstack/terminal/lib/ITerminal';\n\nimport { crc32Builder } from './crc32';\nimport { DISPOSE_SYMBOL, getDisposableFileHandle, type IDisposableFileHandle } from './fs';\nimport { type IIncrementalZlib, type IncrementalZlibMode, createIncrementalZlib } from './compress';\nimport { markStart, markEnd, getDuration, emitSummary, formatDuration } from './perf';\nimport {\n writeLocalFileHeader,\n writeDataDescriptor,\n writeCentralDirectoryHeader,\n writeEndOfCentralDirectory,\n ZSTD_COMPRESSION,\n DEFLATE_COMPRESSION,\n STORE_COMPRESSION,\n type ZipMetaCompressionMethod,\n type IFileEntry,\n dosDateTime\n} from './zipUtils';\nimport { calculateSHA1 } from './hash';\nimport {\n type ZipSyncOptionCompression,\n type IMetadata,\n type IDirQueueItem,\n METADATA_VERSION,\n METADATA_FILENAME,\n defaultBufferSize\n} from './zipSyncUtils';\n\n/**\n * File extensions for which additional DEFLATE/ZSTD compression is unlikely to help.\n * Used by the 'auto' compression heuristic to avoid wasting CPU on data that is already\n * compressed (images, media, existing archives, fonts, etc.).\n */\nconst LIKELY_COMPRESSED_EXTENSION_REGEX: RegExp =\n /\\.(?:zip|gz|tgz|bz2|xz|7z|rar|jpg|jpeg|png|gif|webp|avif|mp4|m4v|mov|mkv|webm|mp3|ogg|aac|flac|pdf|woff|woff2)$/;\n\n/**\n * Basic heuristic: skip re-compressing file types that are already compressed.\n */\nfunction isLikelyAlreadyCompressed(filename: string): boolean {\n return LIKELY_COMPRESSED_EXTENSION_REGEX.test(filename.toLowerCase());\n}\n\n/**\n * Map zip compression method code -> incremental zlib mode label\n */\nconst zlibPackModes: Record<ZipMetaCompressionMethod, IncrementalZlibMode | undefined> = {\n [ZSTD_COMPRESSION]: 'zstd-compress',\n [DEFLATE_COMPRESSION]: 'deflate',\n [STORE_COMPRESSION]: undefined\n} as const;\n\n/**\n * Public facing CLI option -> actual zip method used for a file we decide to compress.\n */\nconst zipSyncCompressionOptions: Record<ZipSyncOptionCompression, ZipMetaCompressionMethod> = {\n store: STORE_COMPRESSION,\n deflate: DEFLATE_COMPRESSION,\n zstd: ZSTD_COMPRESSION,\n auto: DEFLATE_COMPRESSION\n} as const;\n\n/**\n * @public\n * Options for zipsync\n */\nexport interface IZipSyncPackOptions {\n /**\n * \\@rushstack/terminal compatible terminal for logging\n */\n terminal: ITerminal;\n /**\n * Zip file path\n */\n archivePath: string;\n /**\n * Target directories to pack (relative to baseDir)\n */\n targetDirectories: ReadonlyArray<string>;\n /**\n * Base directory for relative paths within the archive (defaults to common parent of targetDirectories)\n */\n baseDir: string;\n /**\n * Compression mode. If set to 'deflate', file data will be compressed using raw DEFLATE (method 8) when this\n * produces a smaller result; otherwise it will fall back to 'store' per-file.\n */\n compression: ZipSyncOptionCompression;\n /**\n * Optional buffer that can be provided to avoid internal allocations.\n */\n inputBuffer?: Buffer<ArrayBuffer>;\n /**\n * Optional buffer that can be provided to avoid internal allocations.\n */\n outputBuffer?: Buffer<ArrayBuffer>;\n}\n\nexport interface IZipSyncPackResult {\n filesPacked: number;\n metadata: IMetadata;\n}\n\n/**\n * Create a zipsync archive by enumerating target directories, then streaming each file into the\n * output zip using the local file header + (optional compressed data) + data descriptor pattern.\n *\n * Performance characteristics:\n * - Single pass per file (no read-then-compress-then-write buffering). CRC32 + SHA-1 are computed\n * while streaming so the metadata JSON can later be used for selective unpack.\n * - Data descriptor usage (bit 3) allows writing headers before we know sizes or CRC32.\n * - A single timestamp (captured once) is applied to all entries for determinism.\n * - Metadata entry is added as a normal zip entry at the end (before central directory) so legacy\n * tools can still list/extract it, while zipsync can quickly parse file hashes.\n */\nexport function pack({\n archivePath,\n targetDirectories: rawTargetDirectories,\n baseDir: rawBaseDir,\n compression,\n terminal,\n inputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize),\n outputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize)\n}: IZipSyncPackOptions): IZipSyncPackResult {\n const baseDir: string = path.resolve(rawBaseDir);\n const targetDirectories: string[] = rawTargetDirectories.map((dir) => path.join(baseDir, dir));\n terminal.writeLine(`Packing to ${archivePath} from ${rawTargetDirectories.join(', ')}`);\n\n markStart('pack.total');\n terminal.writeDebugLine('Starting pack');\n // Pass 1: enumerate files with a queue to avoid deep recursion\n markStart('pack.enumerate');\n\n const filePaths: string[] = [];\n const queue: IDirQueueItem[] = targetDirectories.map((dir) => ({ dir, depth: 0 }));\n\n while (queue.length) {\n const { dir: currentDir, depth } = queue.shift()!;\n terminal.writeDebugLine(`Enumerating directory: ${currentDir}`);\n\n const padding: string = depth === 0 ? '' : '-↳'.repeat(depth);\n\n let items: fs.Dirent[];\n try {\n items = fs.readdirSync(currentDir, { withFileTypes: true });\n } catch (e) {\n if (\n e &&\n ((e as NodeJS.ErrnoException).code === 'ENOENT' || (e as NodeJS.ErrnoException).code === 'ENOTDIR')\n ) {\n terminal.writeWarningLine(`Failed to read directory: ${currentDir}. Ignoring.`);\n continue;\n } else {\n throw e;\n }\n }\n\n for (const item of items) {\n const fullPath: string = path.join(currentDir, item.name);\n if (item.isFile()) {\n const relativePath: string = path.relative(baseDir, fullPath).replace(/\\\\/g, '/');\n terminal.writeVerboseLine(`${padding}${item.name}`);\n filePaths.push(relativePath);\n } else if (item.isDirectory()) {\n terminal.writeVerboseLine(`${padding}${item.name}/`);\n queue.push({ dir: fullPath, depth: depth + 1 });\n } else {\n throw new Error(`Unexpected item (not file or directory): ${fullPath}. Aborting.`);\n }\n }\n }\n\n terminal.writeLine(`Found ${filePaths.length} files to pack (enumerated)`);\n markEnd('pack.enumerate');\n\n // Pass 2: stream each file: read chunks -> hash + (maybe) compress -> write local header + data descriptor.\n markStart('pack.prepareEntries');\n\n terminal.writeDebugLine(`Opening archive for write: ${archivePath}`);\n using zipFile: IDisposableFileHandle = getDisposableFileHandle(archivePath, 'w');\n let currentOffset: number = 0;\n /**\n * Write a raw chunk to the archive file descriptor, updating current offset.\n */\n function writeChunkToZip(chunk: Uint8Array, lengthBytes: number = chunk.byteLength): void {\n let offset: number = 0;\n while (lengthBytes > 0 && offset < chunk.byteLength) {\n // In practice this call always writes all data at once, but the spec says it is not an error\n // for it to not do so. Possibly that situation comes up when writing to something that is not\n // an ordinary file.\n const written: number = fs.writeSync(zipFile.fd, chunk, offset, lengthBytes);\n lengthBytes -= written;\n offset += written;\n }\n currentOffset += offset;\n }\n /** Convenience wrapper for writing multiple buffers sequentially. */\n function writeChunksToZip(chunks: Uint8Array[]): void {\n for (const chunk of chunks) {\n writeChunkToZip(chunk);\n }\n }\n\n const dosDateTimeNow: { time: number; date: number } = dosDateTime(new Date());\n /**\n * Stream a single file into the archive.\n * Steps:\n * 1. Decide compression (based on user choice + heuristic).\n * 2. Emit local file header (sizes/CRC zeroed because we use a data descriptor).\n * 3. Read file in 32 MiB chunks: update SHA-1 + CRC32; optionally feed compressor or write raw.\n * 4. Flush compressor (if any) and write trailing data descriptor containing sizes + CRC.\n * 5. Return populated entry metadata for later central directory + JSON metadata.\n */\n function writeFileEntry(relativePath: string): IFileEntry {\n const fullPath: string = path.join(baseDir, relativePath);\n\n /**\n * Read file in large fixed-size buffer; invoke callback for each filled chunk.\n */\n const readInputInChunks: (onChunk: (bytesInInputBuffer: number) => void) => void = (\n onChunk: (bytesInInputBuffer: number) => void\n ): void => {\n using inputDisposable: IDisposableFileHandle = getDisposableFileHandle(fullPath, 'r');\n\n let bytesInInputBuffer: number = 0;\n // The entire input buffer will be drained in each loop iteration\n // So run until EOF\n while (!isNaN(inputDisposable.fd)) {\n bytesInInputBuffer = fs.readSync(inputDisposable.fd, inputBuffer, 0, inputBuffer.byteLength, -1);\n\n if (bytesInInputBuffer <= 0) {\n // EOF, close the input fd\n inputDisposable[DISPOSE_SYMBOL]();\n }\n\n onChunk(bytesInInputBuffer);\n }\n };\n\n let shouldCompress: boolean = false;\n if (compression === 'deflate' || compression === 'zstd') {\n shouldCompress = true;\n } else if (compression === 'auto') {\n // Heuristic: skip compression for small files or likely-already-compressed files\n if (!isLikelyAlreadyCompressed(relativePath)) {\n shouldCompress = true;\n } else {\n terminal.writeVerboseLine(\n `Skip compression heuristically (already-compressed) for ${relativePath} (size unknown at this point)`\n );\n }\n }\n\n const compressionMethod: ZipMetaCompressionMethod = shouldCompress\n ? zipSyncCompressionOptions[compression]\n : zipSyncCompressionOptions.store;\n\n const entry: IFileEntry = {\n filename: relativePath,\n size: 0,\n compressedSize: 0,\n crc32: 0,\n sha1Hash: '',\n localHeaderOffset: currentOffset,\n compressionMethod,\n dosDateTime: dosDateTimeNow\n };\n\n writeChunksToZip(writeLocalFileHeader(entry));\n\n const sha1HashBuilder: crypto.Hash = crypto.createHash('sha1');\n let crc32: number = 0;\n let uncompressedSize: number = 0;\n let compressedSize: number = 0;\n\n /**\n * Compressor instance (deflate or zstd) created only if needed.\n */\n using incrementalZlib: IIncrementalZlib | undefined = shouldCompress\n ? createIncrementalZlib(\n outputBuffer,\n (chunk, lengthBytes) => {\n writeChunkToZip(chunk, lengthBytes);\n compressedSize += lengthBytes;\n },\n zlibPackModes[compressionMethod]!\n )\n : undefined;\n\n // Read input file in chunks, update hashes, and either compress or write raw.\n readInputInChunks((bytesInInputBuffer: number) => {\n const slice: Buffer = inputBuffer.subarray(0, bytesInInputBuffer);\n sha1HashBuilder.update(slice);\n crc32 = crc32Builder(slice, crc32);\n if (incrementalZlib) {\n incrementalZlib.update(slice);\n } else {\n writeChunkToZip(slice, bytesInInputBuffer);\n }\n uncompressedSize += bytesInInputBuffer;\n });\n\n // finalize hashes, compression\n incrementalZlib?.update(Buffer.alloc(0));\n crc32 = crc32 >>> 0;\n const sha1Hash: string = sha1HashBuilder.digest('hex');\n\n if (!shouldCompress) {\n compressedSize = uncompressedSize;\n }\n\n entry.size = uncompressedSize;\n entry.compressedSize = compressedSize;\n entry.crc32 = crc32;\n entry.sha1Hash = sha1Hash;\n\n // Trailing data descriptor now that final CRC/sizes are known.\n writeChunkToZip(writeDataDescriptor(entry));\n\n terminal.writeVerboseLine(\n `${relativePath} (sha1=${entry.sha1Hash}, crc32=${entry.crc32.toString(16)}, size=${\n entry.size\n }, compressed=${entry.compressedSize}, method=${entry.compressionMethod}, compressed ${(\n 100 -\n (entry.compressedSize / entry.size) * 100\n ).toFixed(1)}%)`\n );\n return entry;\n }\n\n const entries: IFileEntry[] = [];\n // Emit all file entries in enumeration order.\n for (const relativePath of filePaths) {\n entries.push(writeFileEntry(relativePath));\n }\n\n markEnd('pack.prepareEntries');\n terminal.writeLine(`Prepared ${entries.length} file entries`);\n\n markStart('pack.metadata.build');\n const metadata: IMetadata = { version: METADATA_VERSION, files: {} };\n // Build metadata map used for selective unpack (size + SHA‑1 per file).\n for (const entry of entries) {\n metadata.files[entry.filename] = { size: entry.size, sha1Hash: entry.sha1Hash };\n }\n\n const metadataContent: string = JSON.stringify(metadata);\n const metadataBuffer: Buffer = Buffer.from(metadataContent, 'utf8');\n terminal.writeDebugLine(\n `Metadata size=${metadataBuffer.length} bytes, fileCount=${Object.keys(metadata.files).length}`\n );\n\n let metadataCompressionMethod: ZipMetaCompressionMethod = zipSyncCompressionOptions.store;\n let metadataData: Buffer = metadataBuffer;\n let metadataCompressedSize: number = metadataBuffer.length;\n // Compress metadata (deflate) iff user allowed compression and it helps (>64 bytes & smaller result).\n if (compression !== 'store' && metadataBuffer.length > 64) {\n const compressed: Buffer = zlib.deflateRawSync(metadataBuffer, { level: 9 });\n if (compressed.length < metadataBuffer.length) {\n metadataCompressionMethod = zipSyncCompressionOptions.deflate;\n metadataData = compressed;\n metadataCompressedSize = compressed.length;\n terminal.writeDebugLine(\n `Metadata compressed (orig=${metadataBuffer.length}, compressed=${compressed.length})`\n );\n } else {\n terminal.writeDebugLine('Metadata compression skipped (not smaller)');\n }\n }\n\n const metadataEntry: IFileEntry = {\n filename: METADATA_FILENAME,\n size: metadataBuffer.length,\n compressedSize: metadataCompressedSize,\n crc32: crc32Builder(metadataBuffer),\n sha1Hash: calculateSHA1(metadataBuffer),\n localHeaderOffset: currentOffset,\n compressionMethod: metadataCompressionMethod,\n dosDateTime: dosDateTimeNow\n };\n\n writeChunksToZip(writeLocalFileHeader(metadataEntry));\n writeChunkToZip(metadataData, metadataCompressedSize);\n writeChunkToZip(writeDataDescriptor(metadataEntry));\n\n entries.push(metadataEntry);\n terminal.writeVerboseLine(`Total entries including metadata: ${entries.length}`);\n\n markEnd('pack.metadata.build');\n\n markStart('pack.write.entries');\n const outputDir: string = path.dirname(archivePath);\n fs.mkdirSync(outputDir, { recursive: true });\n\n markEnd('pack.write.entries');\n\n markStart('pack.write.centralDirectory');\n const centralDirOffset: number = currentOffset;\n // Emit central directory records.\n for (const entry of entries) {\n writeChunksToZip(writeCentralDirectoryHeader(entry));\n }\n const centralDirSize: number = currentOffset - centralDirOffset;\n markEnd('pack.write.centralDirectory');\n\n // Write end of central directory\n markStart('pack.write.eocd');\n writeChunkToZip(writeEndOfCentralDirectory(centralDirOffset, centralDirSize, entries.length));\n terminal.writeDebugLine('EOCD record written');\n markEnd('pack.write.eocd');\n\n markEnd('pack.total');\n const total: number = getDuration('pack.total');\n emitSummary('pack', terminal);\n terminal.writeLine(`Successfully packed ${entries.length} files in ${formatDuration(total)}`);\n return { filesPacked: entries.length, metadata };\n}\n"]}
@@ -0,0 +1,58 @@
1
+ // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
2
+ // See LICENSE in the project root for license information.
3
+ import { parentPort as rawParentPort } from 'node:worker_threads';
4
+ import { Terminal } from '@rushstack/terminal/lib/Terminal';
5
+ import { StringBufferTerminalProvider } from '@rushstack/terminal/lib/StringBufferTerminalProvider';
6
+ import { pack } from './pack';
7
+ import { defaultBufferSize } from './zipSyncUtils';
8
+ if (!rawParentPort) {
9
+ throw new Error('This module must be run in a worker thread.');
10
+ }
11
+ const parentPort = rawParentPort;
12
+ let inputBuffer = undefined;
13
+ let outputBuffer = undefined;
14
+ function handleMessage(message) {
15
+ if (message === false) {
16
+ parentPort.removeAllListeners();
17
+ parentPort.close();
18
+ return;
19
+ }
20
+ const terminalProvider = new StringBufferTerminalProvider();
21
+ const terminal = new Terminal(terminalProvider);
22
+ try {
23
+ switch (message.type) {
24
+ case 'zipsync-pack': {
25
+ const { options } = message;
26
+ if (!inputBuffer) {
27
+ inputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize);
28
+ }
29
+ if (!outputBuffer) {
30
+ outputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize);
31
+ }
32
+ const successMessage = {
33
+ type: message.type,
34
+ id: message.id,
35
+ result: {
36
+ zipSyncReturn: pack({ ...options, terminal, inputBuffer, outputBuffer }),
37
+ zipSyncLogs: terminalProvider.getOutput()
38
+ }
39
+ };
40
+ return parentPort.postMessage(successMessage);
41
+ }
42
+ }
43
+ }
44
+ catch (err) {
45
+ const errorMessage = {
46
+ type: 'error',
47
+ id: message.id,
48
+ args: {
49
+ message: err.message,
50
+ stack: err.stack || '',
51
+ zipSyncLogs: terminalProvider.getOutput()
52
+ }
53
+ };
54
+ parentPort.postMessage(errorMessage);
55
+ }
56
+ }
57
+ parentPort.on('message', handleMessage);
58
+ //# sourceMappingURL=packWorker.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"packWorker.js","sourceRoot":"","sources":["../src/packWorker.ts"],"names":[],"mappings":"AAAA,4FAA4F;AAC5F,2DAA2D;AAE3D,OAAO,EAAE,UAAU,IAAI,aAAa,EAAoB,MAAM,qBAAqB,CAAC;AAEpF,OAAO,EAAE,QAAQ,EAAE,MAAM,kCAAkC,CAAC;AAC5D,OAAO,EAAE,4BAA4B,EAAE,MAAM,sDAAsD,CAAC;AAEpG,OAAO,EAAqD,IAAI,EAAE,MAAM,QAAQ,CAAC;AACjF,OAAO,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;AAsCnD,IAAI,CAAC,aAAa,EAAE,CAAC;IACnB,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAC;AACjE,CAAC;AACD,MAAM,UAAU,GAAgB,aAAa,CAAC;AAE9C,IAAI,WAAW,GAAoC,SAAS,CAAC;AAC7D,IAAI,YAAY,GAAoC,SAAS,CAAC;AAE9D,SAAS,aAAa,CAAC,OAAqC;IAC1D,IAAI,OAAO,KAAK,KAAK,EAAE,CAAC;QACtB,UAAU,CAAC,kBAAkB,EAAE,CAAC;QAChC,UAAU,CAAC,KAAK,EAAE,CAAC;QACnB,OAAO;IACT,CAAC;IAED,MAAM,gBAAgB,GAAiC,IAAI,4BAA4B,EAAE,CAAC;IAC1F,MAAM,QAAQ,GAAa,IAAI,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAE1D,IAAI,CAAC;QACH,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;YACrB,KAAK,cAAc,CAAC,CAAC,CAAC;gBACpB,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC;gBAC5B,IAAI,CAAC,WAAW,EAAE,CAAC;oBACjB,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,iBAAiB,CAAC,CAAC;gBAC1D,CAAC;gBACD,IAAI,CAAC,YAAY,EAAE,CAAC;oBAClB,YAAY,GAAG,MAAM,CAAC,eAAe,CAAC,iBAAiB,CAAC,CAAC;gBAC3D,CAAC;gBAED,MAAM,cAAc,GAA2B;oBAC7C,IAAI,EAAE,OAAO,CAAC,IAAI;oBAClB,EAAE,EAAE,OAAO,CAAC,EAAE;oBACd,MAAM,EAAE;wBACN,aAAa,EAAE,IAAI,CAAC,EAAE,GAAG,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC;wBACxE,WAAW,EAAE,gBAAgB,CAAC,SAAS,EAAE;qBAC1C;iBACF,CAAC;gBACF,OAAO,UAAU,CAAC,WAAW,CAAC,cAAc,CAAC,CAAC;YAChD,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM,YAAY,GAA6B;YAC7C,IAAI,EAAE,OAAO;YACb,EAAE,EAAE,OAAO,CAAC,EAAE;YACd,IAAI,EAAE;gBACJ,OAAO,EAAG,GAAa,CAAC,OAAO;gBAC/B,KAAK,EAAG,GAAa,CAAC,KAAK,IAAI,EAAE;gBACjC,WAAW,EAAE,gBAAgB,CAAC,SAAS,EAAE;aAC1C;SACF,CAAC;QACF,UAAU,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,UAAU,CAAC,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.\n// See LICENSE in the project root for license information.\n\nimport { parentPort as rawParentPort, type MessagePort } from 'node:worker_threads';\n\nimport { Terminal } from '@rushstack/terminal/lib/Terminal';\nimport { StringBufferTerminalProvider } from '@rushstack/terminal/lib/StringBufferTerminalProvider';\n\nimport { type IZipSyncPackOptions, type IZipSyncPackResult, pack } from './pack';\nimport { defaultBufferSize } from './zipSyncUtils';\n\nexport { type IZipSyncPackOptions, type IZipSyncPackResult } from './pack';\n\nexport interface IHashWorkerData {\n basePath: string;\n}\n\nexport interface IZipSyncPackCommandMessage {\n type: 'zipsync-pack';\n id: number;\n options: Omit<IZipSyncPackOptions, 'terminal'>;\n}\n\nexport interface IZipSyncPackWorkerResult {\n zipSyncReturn: IZipSyncPackResult;\n zipSyncLogs: string;\n}\n\ninterface IZipSyncSuccessMessage {\n id: number;\n type: 'zipsync-pack';\n result: IZipSyncPackWorkerResult;\n}\n\nexport interface IZipSyncPackErrorMessage {\n type: 'error';\n id: number;\n args: {\n message: string;\n stack: string;\n zipSyncLogs: string;\n };\n}\n\nexport type IHostToWorkerMessage = IZipSyncPackCommandMessage;\nexport type IWorkerToHostMessage = IZipSyncSuccessMessage | IZipSyncPackErrorMessage;\n\nif (!rawParentPort) {\n throw new Error('This module must be run in a worker thread.');\n}\nconst parentPort: MessagePort = rawParentPort;\n\nlet inputBuffer: Buffer<ArrayBuffer> | undefined = undefined;\nlet outputBuffer: Buffer<ArrayBuffer> | undefined = undefined;\n\nfunction handleMessage(message: IHostToWorkerMessage | false): void {\n if (message === false) {\n parentPort.removeAllListeners();\n parentPort.close();\n return;\n }\n\n const terminalProvider: StringBufferTerminalProvider = new StringBufferTerminalProvider();\n const terminal: Terminal = new Terminal(terminalProvider);\n\n try {\n switch (message.type) {\n case 'zipsync-pack': {\n const { options } = message;\n if (!inputBuffer) {\n inputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize);\n }\n if (!outputBuffer) {\n outputBuffer = Buffer.allocUnsafeSlow(defaultBufferSize);\n }\n\n const successMessage: IZipSyncSuccessMessage = {\n type: message.type,\n id: message.id,\n result: {\n zipSyncReturn: pack({ ...options, terminal, inputBuffer, outputBuffer }),\n zipSyncLogs: terminalProvider.getOutput()\n }\n };\n return parentPort.postMessage(successMessage);\n }\n }\n } catch (err) {\n const errorMessage: IZipSyncPackErrorMessage = {\n type: 'error',\n id: message.id,\n args: {\n message: (err as Error).message,\n stack: (err as Error).stack || '',\n zipSyncLogs: terminalProvider.getOutput()\n }\n };\n parentPort.postMessage(errorMessage);\n }\n}\n\nparentPort.on('message', handleMessage);\n"]}
@@ -0,0 +1,43 @@
1
+ // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
2
+ // See LICENSE in the project root for license information.
3
+ export async function packWorkerAsync(options) {
4
+ const { Worker } = await import('node:worker_threads');
5
+ const worker = new Worker(require.resolve('./packWorker'));
6
+ return new Promise((resolve, reject) => {
7
+ worker.on('message', (message) => {
8
+ switch (message.type) {
9
+ case 'zipsync-pack': {
10
+ resolve(message.result);
11
+ break;
12
+ }
13
+ case 'error': {
14
+ const error = new Error(message.args.message);
15
+ error.stack = message.args.stack;
16
+ reject(error);
17
+ break;
18
+ }
19
+ default: {
20
+ const exhaustiveCheck = message;
21
+ throw new Error(`Unexpected message type: ${JSON.stringify(exhaustiveCheck)}`);
22
+ }
23
+ }
24
+ });
25
+ worker.on('error', (err) => {
26
+ reject(err);
27
+ });
28
+ worker.on('exit', (code) => {
29
+ if (code !== 0) {
30
+ reject(new Error(`Worker stopped with exit code ${code}`));
31
+ }
32
+ });
33
+ const commandMessage = {
34
+ type: 'zipsync-pack',
35
+ id: 0,
36
+ options
37
+ };
38
+ worker.postMessage(commandMessage);
39
+ }).finally(() => {
40
+ worker.postMessage(false);
41
+ });
42
+ }
43
+ //# sourceMappingURL=packWorkerAsync.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"packWorkerAsync.js","sourceRoot":"","sources":["../src/packWorkerAsync.ts"],"names":[],"mappings":"AAAA,4FAA4F;AAC5F,2DAA2D;AAa3D,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,OAA8C;IAE9C,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,MAAM,CAAC,qBAAqB,CAAC,CAAC;IAEvD,MAAM,MAAM,GAAW,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC;IAEnE,OAAO,IAAI,OAAO,CAA2B,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC/D,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE,CAAC,OAA6B,EAAE,EAAE;YACrD,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;gBACrB,KAAK,cAAc,CAAC,CAAC,CAAC;oBACpB,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;oBACxB,MAAM;gBACR,CAAC;gBACD,KAAK,OAAO,CAAC,CAAC,CAAC;oBACb,MAAM,KAAK,GAAU,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;oBACrD,KAAK,CAAC,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC;oBACjC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACd,MAAM;gBACR,CAAC;gBACD,OAAO,CAAC,CAAC,CAAC;oBACR,MAAM,eAAe,GAAU,OAAO,CAAC;oBACvC,MAAM,IAAI,KAAK,CAAC,4BAA4B,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC;gBACjF,CAAC;YACH,CAAC;QACH,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;YACzB,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;gBACf,MAAM,CAAC,IAAI,KAAK,CAAC,iCAAiC,IAAI,EAAE,CAAC,CAAC,CAAC;YAC7D,CAAC;QACH,CAAC,CAAC,CAAC;QAEH,MAAM,cAAc,GAAyB;YAC3C,IAAI,EAAE,cAAc;YACpB,EAAE,EAAE,CAAC;YACL,OAAO;SACR,CAAC;QACF,MAAM,CAAC,WAAW,CAAC,cAAc,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE;QACd,MAAM,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IAC5B,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.\n// See LICENSE in the project root for license information.\n\nimport type { Worker } from 'node:worker_threads';\n\nimport type {\n IWorkerToHostMessage,\n IHostToWorkerMessage,\n IZipSyncPackWorkerResult,\n IZipSyncPackOptions\n} from './packWorker';\n\nexport type { IZipSyncPackWorkerResult } from './packWorker';\n\nexport async function packWorkerAsync(\n options: Omit<IZipSyncPackOptions, 'terminal'>\n): Promise<IZipSyncPackWorkerResult> {\n const { Worker } = await import('node:worker_threads');\n\n const worker: Worker = new Worker(require.resolve('./packWorker'));\n\n return new Promise<IZipSyncPackWorkerResult>((resolve, reject) => {\n worker.on('message', (message: IWorkerToHostMessage) => {\n switch (message.type) {\n case 'zipsync-pack': {\n resolve(message.result);\n break;\n }\n case 'error': {\n const error: Error = new Error(message.args.message);\n error.stack = message.args.stack;\n reject(error);\n break;\n }\n default: {\n const exhaustiveCheck: never = message;\n throw new Error(`Unexpected message type: ${JSON.stringify(exhaustiveCheck)}`);\n }\n }\n });\n\n worker.on('error', (err) => {\n reject(err);\n });\n\n worker.on('exit', (code) => {\n if (code !== 0) {\n reject(new Error(`Worker stopped with exit code ${code}`));\n }\n });\n\n const commandMessage: IHostToWorkerMessage = {\n type: 'zipsync-pack',\n id: 0,\n options\n };\n worker.postMessage(commandMessage);\n }).finally(() => {\n worker.postMessage(false);\n });\n}\n"]}
@@ -0,0 +1,49 @@
1
+ // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
2
+ // See LICENSE in the project root for license information.
3
+ import { performance } from 'node:perf_hooks';
4
+ export function markStart(name) {
5
+ performance.mark(`zipsync:${name}:start`);
6
+ }
7
+ export function markEnd(name) {
8
+ const base = `zipsync:${name}`;
9
+ performance.mark(`${base}:end`);
10
+ performance.measure(base, `${base}:start`, `${base}:end`);
11
+ }
12
+ export function getDuration(name) {
13
+ const measures = performance.getEntriesByName(`zipsync:${name}`);
14
+ if (measures.length === 0)
15
+ return 0;
16
+ return measures[measures.length - 1].duration;
17
+ }
18
+ export function formatDuration(ms) {
19
+ return ms >= 1000 ? (ms / 1000).toFixed(2) + 's' : ms.toFixed(2) + 'ms';
20
+ }
21
+ export function emitSummary(operation, term) {
22
+ const totalName = `${operation}.total`;
23
+ // Ensure total is measured
24
+ markEnd(totalName);
25
+ const totalDuration = getDuration(totalName);
26
+ const prefix = `zipsync:${operation}.`;
27
+ const measures = performance.getEntriesByType('measure');
28
+ const rows = [];
29
+ for (const m of measures) {
30
+ if (!m.name.startsWith(prefix))
31
+ continue;
32
+ if (m.name === `zipsync:${totalName}`)
33
+ continue;
34
+ // Extract segment name (remove prefix)
35
+ const segment = m.name.substring(prefix.length);
36
+ rows.push({ name: segment, dur: m.duration });
37
+ }
38
+ rows.sort((a, b) => b.dur - a.dur);
39
+ const lines = rows.map((r) => {
40
+ const pct = totalDuration ? (r.dur / totalDuration) * 100 : 0;
41
+ return ` ${r.name}: ${formatDuration(r.dur)} (${pct.toFixed(1)}%)`;
42
+ });
43
+ lines.push(` TOTAL ${operation}.total: ${formatDuration(totalDuration)}`);
44
+ term.writeVerboseLine(`Performance summary (${operation}):\n` + lines.join('\n'));
45
+ // Cleanup marks/measures to avoid unbounded growth
46
+ performance.clearMarks();
47
+ performance.clearMeasures();
48
+ }
49
+ //# sourceMappingURL=perf.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"perf.js","sourceRoot":"","sources":["../src/perf.ts"],"names":[],"mappings":"AAAA,4FAA4F;AAC5F,2DAA2D;AAG3D,OAAO,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAI9C,MAAM,UAAU,SAAS,CAAC,IAAY;IACpC,WAAW,CAAC,IAAI,CAAC,WAAW,IAAI,QAAQ,CAAC,CAAC;AAC5C,CAAC;AACD,MAAM,UAAU,OAAO,CAAC,IAAY;IAClC,MAAM,IAAI,GAAW,WAAW,IAAI,EAAE,CAAC;IACvC,WAAW,CAAC,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC,CAAC;IAChC,WAAW,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,IAAI,QAAQ,EAAE,GAAG,IAAI,MAAM,CAAC,CAAC;AAC5D,CAAC;AACD,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,MAAM,QAAQ,GAAuB,WAAW,CAAC,gBAAgB,CAC/D,WAAW,IAAI,EAAE,CACe,CAAC;IACnC,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,CAAC,CAAC;IACpC,OAAO,QAAQ,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,CAAC;AACD,MAAM,UAAU,cAAc,CAAC,EAAU;IACvC,OAAO,EAAE,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;AAC1E,CAAC;AACD,MAAM,UAAU,WAAW,CAAC,SAA4B,EAAE,IAAe;IACvE,MAAM,SAAS,GAAW,GAAG,SAAS,QAAQ,CAAC;IAC/C,2BAA2B;IAC3B,OAAO,CAAC,SAAS,CAAC,CAAC;IACnB,MAAM,aAAa,GAAW,WAAW,CAAC,SAAS,CAAC,CAAC;IACrD,MAAM,MAAM,GAAW,WAAW,SAAS,GAAG,CAAC;IAC/C,MAAM,QAAQ,GAAuB,WAAW,CAAC,gBAAgB,CAC/D,SAAS,CACuB,CAAC;IACnC,MAAM,IAAI,GAAyC,EAAE,CAAC;IACtD,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE,CAAC;QACzB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;YAAE,SAAS;QACzC,IAAI,CAAC,CAAC,IAAI,KAAK,WAAW,SAAS,EAAE;YAAE,SAAS;QAChD,uCAAuC;QACvC,MAAM,OAAO,GAAW,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QACxD,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,EAAE,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;IAChD,CAAC;IACD,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IACnC,MAAM,KAAK,GAAa,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE;QACrC,MAAM,GAAG,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACtE,OAAO,KAAK,CAAC,CAAC,IAAI,KAAK,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;IACtE,CAAC,CAAC,CAAC;IACH,KAAK,CAAC,IAAI,CAAC,WAAW,SAAS,WAAW,cAAc,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC;IAC3E,IAAI,CAAC,gBAAgB,CAAC,wBAAwB,SAAS,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAClF,mDAAmD;IACnD,WAAW,CAAC,UAAU,EAAE,CAAC;IACzB,WAAW,CAAC,aAAa,EAAE,CAAC;AAC9B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.\n// See LICENSE in the project root for license information.\n\nimport type { PerformanceEntry } from 'node:perf_hooks';\nimport { performance } from 'node:perf_hooks';\n\nimport type { ITerminal } from '@rushstack/terminal/lib/ITerminal';\n\nexport function markStart(name: string): void {\n performance.mark(`zipsync:${name}:start`);\n}\nexport function markEnd(name: string): void {\n const base: string = `zipsync:${name}`;\n performance.mark(`${base}:end`);\n performance.measure(base, `${base}:start`, `${base}:end`);\n}\nexport function getDuration(name: string): number {\n const measures: PerformanceEntry[] = performance.getEntriesByName(\n `zipsync:${name}`\n ) as unknown as PerformanceEntry[];\n if (measures.length === 0) return 0;\n return measures[measures.length - 1].duration;\n}\nexport function formatDuration(ms: number): string {\n return ms >= 1000 ? (ms / 1000).toFixed(2) + 's' : ms.toFixed(2) + 'ms';\n}\nexport function emitSummary(operation: 'pack' | 'unpack', term: ITerminal): void {\n const totalName: string = `${operation}.total`;\n // Ensure total is measured\n markEnd(totalName);\n const totalDuration: number = getDuration(totalName);\n const prefix: string = `zipsync:${operation}.`;\n const measures: PerformanceEntry[] = performance.getEntriesByType(\n 'measure'\n ) as unknown as PerformanceEntry[];\n const rows: Array<{ name: string; dur: number }> = [];\n for (const m of measures) {\n if (!m.name.startsWith(prefix)) continue;\n if (m.name === `zipsync:${totalName}`) continue;\n // Extract segment name (remove prefix)\n const segment: string = m.name.substring(prefix.length);\n rows.push({ name: segment, dur: m.duration });\n }\n rows.sort((a, b) => b.dur - a.dur);\n const lines: string[] = rows.map((r) => {\n const pct: number = totalDuration ? (r.dur / totalDuration) * 100 : 0;\n return ` ${r.name}: ${formatDuration(r.dur)} (${pct.toFixed(1)}%)`;\n });\n lines.push(` TOTAL ${operation}.total: ${formatDuration(totalDuration)}`);\n term.writeVerboseLine(`Performance summary (${operation}):\\n` + lines.join('\\n'));\n // Cleanup marks/measures to avoid unbounded growth\n performance.clearMarks();\n performance.clearMeasures();\n}\n"]}
@@ -0,0 +1,17 @@
1
+ // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
2
+ // See LICENSE in the project root for license information.
3
+ import { ConsoleTerminalProvider } from '@rushstack/terminal/lib/ConsoleTerminalProvider';
4
+ import { Terminal } from '@rushstack/terminal/lib/Terminal';
5
+ import { version } from '../package.json';
6
+ import { ZipSyncCommandLineParser } from './cli/ZipSyncCommandLineParser';
7
+ const toolVersion = version;
8
+ const consoleTerminalProvider = new ConsoleTerminalProvider();
9
+ const terminal = new Terminal(consoleTerminalProvider);
10
+ terminal.writeLine();
11
+ terminal.writeLine(`zipsync ${toolVersion} - https://rushstack.io`);
12
+ terminal.writeLine();
13
+ const commandLine = new ZipSyncCommandLineParser(consoleTerminalProvider, terminal);
14
+ commandLine.executeAsync().catch((error) => {
15
+ terminal.writeError(error);
16
+ });
17
+ //# sourceMappingURL=start.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"start.js","sourceRoot":"","sources":["../src/start.ts"],"names":[],"mappings":"AAAA,4FAA4F;AAC5F,2DAA2D;AAE3D,OAAO,EAAE,uBAAuB,EAAE,MAAM,iDAAiD,CAAC;AAC1F,OAAO,EAAE,QAAQ,EAAE,MAAM,kCAAkC,CAAC;AAE5D,OAAO,EAAE,OAAO,EAAE,MAAM,iBAAiB,CAAC;AAC1C,OAAO,EAAE,wBAAwB,EAAE,MAAM,gCAAgC,CAAC;AAE1E,MAAM,WAAW,GAAW,OAAO,CAAC;AAEpC,MAAM,uBAAuB,GAA4B,IAAI,uBAAuB,EAAE,CAAC;AACvF,MAAM,QAAQ,GAAa,IAAI,QAAQ,CAAC,uBAAuB,CAAC,CAAC;AAEjE,QAAQ,CAAC,SAAS,EAAE,CAAC;AACrB,QAAQ,CAAC,SAAS,CAAC,WAAW,WAAW,yBAAyB,CAAC,CAAC;AACpE,QAAQ,CAAC,SAAS,EAAE,CAAC;AAErB,MAAM,WAAW,GAA6B,IAAI,wBAAwB,CAAC,uBAAuB,EAAE,QAAQ,CAAC,CAAC;AAC9G,WAAW,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;IACzC,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;AAC7B,CAAC,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.\n// See LICENSE in the project root for license information.\n\nimport { ConsoleTerminalProvider } from '@rushstack/terminal/lib/ConsoleTerminalProvider';\nimport { Terminal } from '@rushstack/terminal/lib/Terminal';\n\nimport { version } from '../package.json';\nimport { ZipSyncCommandLineParser } from './cli/ZipSyncCommandLineParser';\n\nconst toolVersion: string = version;\n\nconst consoleTerminalProvider: ConsoleTerminalProvider = new ConsoleTerminalProvider();\nconst terminal: Terminal = new Terminal(consoleTerminalProvider);\n\nterminal.writeLine();\nterminal.writeLine(`zipsync ${toolVersion} - https://rushstack.io`);\nterminal.writeLine();\n\nconst commandLine: ZipSyncCommandLineParser = new ZipSyncCommandLineParser(consoleTerminalProvider, terminal);\ncommandLine.executeAsync().catch((error) => {\n terminal.writeError(error);\n});\n"]}