neozip-cli 0.75.0-beta → 0.75.1-beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,924 @@
1
+ "use strict";
2
+ /**
3
+ * File operations for NeoZip CLI
4
+ */
5
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
6
+ if (k2 === undefined) k2 = k;
7
+ var desc = Object.getOwnPropertyDescriptor(m, k);
8
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
9
+ desc = { enumerable: true, get: function() { return m[k]; } };
10
+ }
11
+ Object.defineProperty(o, k2, desc);
12
+ }) : (function(o, m, k, k2) {
13
+ if (k2 === undefined) k2 = k;
14
+ o[k2] = m[k];
15
+ }));
16
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
17
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
18
+ }) : function(o, v) {
19
+ o["default"] = v;
20
+ });
21
+ var __importStar = (this && this.__importStar) || (function () {
22
+ var ownKeys = function(o) {
23
+ ownKeys = Object.getOwnPropertyNames || function (o) {
24
+ var ar = [];
25
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
26
+ return ar;
27
+ };
28
+ return ownKeys(o);
29
+ };
30
+ return function (mod) {
31
+ if (mod && mod.__esModule) return mod;
32
+ var result = {};
33
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
34
+ __setModuleDefault(result, mod);
35
+ return result;
36
+ };
37
+ })();
38
+ var __importDefault = (this && this.__importDefault) || function (mod) {
39
+ return (mod && mod.__esModule) ? mod : { "default": mod };
40
+ };
41
+ Object.defineProperty(exports, "__esModule", { value: true });
42
+ exports.shouldUpdateFile = shouldUpdateFile;
43
+ exports.loadExistingArchive = loadExistingArchive;
44
+ exports.collectFilesRecursively = collectFilesRecursively;
45
+ exports.filterFiles = filterFiles;
46
+ exports.findOldestEntryTime = findOldestEntryTime;
47
+ exports.moveFiles = moveFiles;
48
+ exports.deleteFromArchive = deleteFromArchive;
49
+ exports.loadExistingArchiveLarge = loadExistingArchiveLarge;
50
+ const fs = __importStar(require("fs"));
51
+ const path = __importStar(require("path"));
52
+ const minimatch_1 = require("minimatch");
53
+ const neozipkit_1 = __importDefault(require("neozipkit"));
54
+ const neozipkit_2 = require("neozipkit");
55
+ const node_1 = __importDefault(require("neozipkit/node"));
56
+ const utils_1 = require("./utils");
57
+ const exit_codes_1 = require("../exit-codes");
58
+ /**
59
+ * Check if a file should be updated based on modification time
60
+ */
61
+ async function shouldUpdateFile(entryName, fileStat, existingArchive, isFreshen = false) {
62
+ if (!existingArchive) {
63
+ return !isFreshen; // No existing archive: add all files for update, add none for freshen
64
+ }
65
+ try {
66
+ const entries = existingArchive.getDirectory() || [];
67
+ const existingEntry = entries.find((entry) => entry.filename === entryName);
68
+ if (!existingEntry) {
69
+ return !isFreshen; // File doesn't exist in archive: add it for update, skip it for freshen
70
+ }
71
+ // Compare modification times using milliseconds since 1970
72
+ // Prioritize extended timestamps (NTFS) over DOS timestamps
73
+ let existingTime;
74
+ // First try extended timestamp (NTFS/Unix) - already in milliseconds since 1970
75
+ if (existingEntry.ntfsTime && existingEntry.ntfsTime.mtime) {
76
+ existingTime = existingEntry.ntfsTime.mtime;
77
+ }
78
+ else if (existingEntry.extendedTime && existingEntry.extendedTime.mtime) {
79
+ existingTime = existingEntry.extendedTime.mtime;
80
+ }
81
+ else if (existingEntry.parseDateTime && existingEntry.lastModTimeDate) {
82
+ // Use the parseDateTime method if available (same as neolist.ts)
83
+ const parsedDate = existingEntry.parseDateTime(existingEntry.lastModTimeDate);
84
+ existingTime = parsedDate ? parsedDate.getTime() : 0;
85
+ }
86
+ else if (existingEntry.lastModTimeDate) {
87
+ existingTime = new Date(existingEntry.lastModTimeDate).getTime();
88
+ }
89
+ else if (existingEntry.timeDateDOS) {
90
+ // timeDateDOS is in seconds since 1970, convert to milliseconds
91
+ existingTime = existingEntry.timeDateDOS * 1000;
92
+ }
93
+ else {
94
+ // If we can't get the time, assume we should update
95
+ return true;
96
+ }
97
+ const fileTime = fileStat.mtimeMs;
98
+ // Debug output for date comparison (can be enabled with DEBUG_UPDATE=1)
99
+ if (process.env.DEBUG_UPDATE) {
100
+ console.log(`Date comparison for ${entryName}:`);
101
+ console.log(` File time: ${new Date(fileTime).toISOString()}`);
102
+ console.log(` Archive time: ${new Date(existingTime).toISOString()}`);
103
+ console.log(` File newer: ${fileTime > existingTime}`);
104
+ console.log(` Timestamp sources:`, {
105
+ ntfsTime: existingEntry.ntfsTime,
106
+ extendedTime: existingEntry.extendedTime,
107
+ timeDateDOS: existingEntry.timeDateDOS,
108
+ lastModTimeDate: existingEntry.lastModTimeDate,
109
+ hasParseDateTime: !!existingEntry.parseDateTime
110
+ });
111
+ }
112
+ // Compare at second precision to avoid millisecond differences
113
+ const fileTimeSeconds = Math.floor(fileTime / 1000);
114
+ const existingTimeSeconds = Math.floor(existingTime / 1000);
115
+ return fileTimeSeconds > existingTimeSeconds; // Update if file is newer
116
+ }
117
+ catch (error) {
118
+ // If we can't check, assume we should update
119
+ return true;
120
+ }
121
+ }
122
+ /**
123
+ * Load existing archive if it exists
124
+ */
125
+ function loadExistingArchive(archiveName, options) {
126
+ if (!fs.existsSync(archiveName)) {
127
+ return null;
128
+ }
129
+ try {
130
+ // For in-memory option, use optimized large file loader
131
+ if (options?.inMemory) {
132
+ try {
133
+ const result = loadExistingArchiveLarge(archiveName);
134
+ if (result) {
135
+ return result;
136
+ }
137
+ // If result is null, treat as new archive (file doesn't exist or is invalid)
138
+ return null;
139
+ }
140
+ catch (error) {
141
+ // Only abort if large loader actually fails (throws exception)
142
+ throw new Error(`Failed to load large ZIP archive ${archiveName} using optimized EOCD loading. The archive may be corrupted or in an unsupported format.`);
143
+ }
144
+ }
145
+ // Standard loader (existing code)
146
+ const data = fs.readFileSync(archiveName);
147
+ const zip = new neozipkit_1.default();
148
+ zip.loadZip(data);
149
+ return zip;
150
+ }
151
+ catch (error) {
152
+ // If we can't load the existing archive, treat it as new
153
+ return null;
154
+ }
155
+ }
156
+ /**
157
+ * Recursively collect files and directories
158
+ */
159
+ function collectFilesRecursively(inputPaths, options) {
160
+ const collectedFiles = [];
161
+ // Track inodes to detect hard links (Unix-like systems only)
162
+ const inodeMap = new Map(); // inode -> first entry name
163
+ /**
164
+ * Helper function to process a file and detect hard links
165
+ */
166
+ function processFile(filePath, entryName, lstat) {
167
+ // Check for hard links (Unix-like systems only)
168
+ if (options.hardLinks && process.platform !== 'win32' && lstat.nlink > 1) {
169
+ const inode = lstat.ino; // inode number
170
+ if (inodeMap.has(inode)) {
171
+ // This is a hard link to an existing file
172
+ const originalEntry = inodeMap.get(inode);
173
+ collectedFiles.push({
174
+ entryName: entryName,
175
+ absPath: filePath,
176
+ displayPath: filePath,
177
+ stat: lstat,
178
+ isDirectory: false,
179
+ isHardLink: true,
180
+ originalEntry: originalEntry,
181
+ inode: inode
182
+ });
183
+ if (options.debug) {
184
+ console.log(`🔗 Hard link detected: ${entryName} -> ${originalEntry} (inode: ${inode})`);
185
+ }
186
+ }
187
+ else {
188
+ // First occurrence of this inode
189
+ inodeMap.set(inode, entryName);
190
+ collectedFiles.push({
191
+ entryName: entryName,
192
+ absPath: filePath,
193
+ displayPath: filePath,
194
+ stat: lstat,
195
+ isDirectory: false,
196
+ inode: inode
197
+ });
198
+ }
199
+ }
200
+ else {
201
+ // Regular file (no hard link detection or not requested)
202
+ collectedFiles.push({
203
+ entryName: entryName,
204
+ absPath: filePath,
205
+ displayPath: filePath,
206
+ stat: lstat,
207
+ isDirectory: false
208
+ });
209
+ }
210
+ }
211
+ for (const inputPath of inputPaths) {
212
+ if (!fs.existsSync(inputPath)) {
213
+ console.error(`Error: Path not found: ${inputPath}`);
214
+ (0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.CANT_FIND_ARCHIVE);
215
+ }
216
+ // Use lstat to detect symbolic links
217
+ const lstat = fs.lstatSync(inputPath);
218
+ if (lstat.isSymbolicLink()) {
219
+ // Handle symbolic links
220
+ if (options.symlinks) {
221
+ // Store the symbolic link itself
222
+ const linkTarget = fs.readlinkSync(inputPath);
223
+ collectedFiles.push({
224
+ entryName: options.junkPaths ? path.basename(inputPath) : inputPath,
225
+ absPath: inputPath,
226
+ displayPath: inputPath,
227
+ stat: lstat,
228
+ isDirectory: false,
229
+ isSymlink: true,
230
+ linkTarget: linkTarget
231
+ });
232
+ }
233
+ else {
234
+ // Follow the symbolic link (default behavior)
235
+ try {
236
+ const stat = fs.statSync(inputPath); // This follows the link
237
+ if (stat.isFile()) {
238
+ collectedFiles.push({
239
+ entryName: options.junkPaths ? path.basename(inputPath) : inputPath,
240
+ absPath: inputPath,
241
+ displayPath: inputPath,
242
+ stat: stat,
243
+ isDirectory: false
244
+ });
245
+ }
246
+ else if (stat.isDirectory() && options.recurse) {
247
+ // Recursively process the directory that the symlink points to
248
+ const subFiles = collectFilesRecursively([inputPath], options);
249
+ collectedFiles.push(...subFiles);
250
+ }
251
+ }
252
+ catch (error) {
253
+ console.error(`Warning: Cannot follow symbolic link ${inputPath}: ${error instanceof Error ? error.message : String(error)}`);
254
+ }
255
+ }
256
+ }
257
+ else if (lstat.isFile()) {
258
+ // It's a regular file, process it for hard link detection
259
+ const entryName = options.junkPaths ? path.basename(inputPath) : inputPath;
260
+ processFile(inputPath, entryName, lstat);
261
+ }
262
+ else if (lstat.isDirectory()) {
263
+ if (options.recurse) {
264
+ // Recursively collect files from directory
265
+ const dirContents = fs.readdirSync(inputPath);
266
+ for (const item of dirContents) {
267
+ const itemPath = path.join(inputPath, item);
268
+ const itemLstat = fs.lstatSync(itemPath);
269
+ if (itemLstat.isSymbolicLink()) {
270
+ // Handle symbolic links in subdirectories
271
+ if (options.symlinks) {
272
+ // Store the symbolic link itself
273
+ const linkTarget = fs.readlinkSync(itemPath);
274
+ const relativePath = path.relative(process.cwd(), itemPath);
275
+ collectedFiles.push({
276
+ entryName: options.junkPaths ? path.basename(itemPath) : relativePath,
277
+ absPath: itemPath,
278
+ displayPath: itemPath,
279
+ stat: itemLstat,
280
+ isDirectory: false,
281
+ isSymlink: true,
282
+ linkTarget: linkTarget
283
+ });
284
+ }
285
+ else {
286
+ // Follow the symbolic link (default behavior)
287
+ try {
288
+ const itemStat = fs.statSync(itemPath); // This follows the link
289
+ if (itemStat.isFile()) {
290
+ const relativePath = path.relative(process.cwd(), itemPath);
291
+ collectedFiles.push({
292
+ entryName: options.junkPaths ? path.basename(itemPath) : relativePath,
293
+ absPath: itemPath,
294
+ displayPath: itemPath,
295
+ stat: itemStat,
296
+ isDirectory: false
297
+ });
298
+ }
299
+ else if (itemStat.isDirectory()) {
300
+ // Recursively process the directory that the symlink points to
301
+ const subFiles = collectFilesRecursively([itemPath], options);
302
+ collectedFiles.push(...subFiles);
303
+ }
304
+ }
305
+ catch (error) {
306
+ console.error(`Warning: Cannot follow symbolic link ${itemPath}: ${error instanceof Error ? error.message : String(error)}`);
307
+ }
308
+ }
309
+ }
310
+ else if (itemLstat.isFile()) {
311
+ const relativePath = path.relative(process.cwd(), itemPath);
312
+ const entryName = options.junkPaths ? path.basename(itemPath) : relativePath;
313
+ processFile(itemPath, entryName, itemLstat);
314
+ }
315
+ else if (itemLstat.isDirectory()) {
316
+ // Recursively process subdirectory
317
+ const subFiles = collectFilesRecursively([itemPath], options);
318
+ collectedFiles.push(...subFiles);
319
+ }
320
+ }
321
+ }
322
+ else {
323
+ console.error(`Error: ${inputPath} is a directory. Use -r/--recurse to include directories.`);
324
+ (0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.CANT_CREATE_FILE);
325
+ }
326
+ }
327
+ }
328
+ return collectedFiles;
329
+ }
330
+ /**
331
+ * Filter files based on include/exclude patterns
332
+ */
333
+ function filterFiles(files, options) {
334
+ if (!options.include && !options.exclude && !options.suffixes) {
335
+ return files;
336
+ }
337
+ return files.filter(filePath => {
338
+ const fileName = path.basename(filePath);
339
+ const relativePath = path.relative(process.cwd(), filePath);
340
+ // Check include patterns first (if any)
341
+ if (options.include && options.include.length > 0) {
342
+ const matchesInclude = options.include.some(pattern => (0, minimatch_1.minimatch)(fileName, pattern) || (0, minimatch_1.minimatch)(relativePath, pattern));
343
+ if (!matchesInclude) {
344
+ return false;
345
+ }
346
+ }
347
+ // Check exclude patterns
348
+ if (options.exclude && options.exclude.length > 0) {
349
+ const matchesExclude = options.exclude.some(pattern => (0, minimatch_1.minimatch)(fileName, pattern) || (0, minimatch_1.minimatch)(relativePath, pattern));
350
+ if (matchesExclude) {
351
+ return false;
352
+ }
353
+ }
354
+ // Check suffix exclusion (don't compress files with these suffixes)
355
+ if (options.suffixes && options.suffixes.length > 0) {
356
+ const fileExtension = path.extname(fileName).toLowerCase();
357
+ const hasExcludedSuffix = options.suffixes.some(suffix => {
358
+ // Remove leading dot if present and normalize
359
+ const normalizedSuffix = suffix.startsWith('.') ? suffix.substring(1) : suffix;
360
+ return fileExtension === `.${normalizedSuffix.toLowerCase()}`;
361
+ });
362
+ if (hasExcludedSuffix) {
363
+ return false;
364
+ }
365
+ }
366
+ return true;
367
+ });
368
+ }
369
+ /**
370
+ * Find the oldest entry time in a ZIP archive
371
+ */
372
+ async function findOldestEntryTime(zip) {
373
+ try {
374
+ const entries = await zip.getDirectory(false) || [];
375
+ if (entries.length === 0) {
376
+ return null;
377
+ }
378
+ let oldestTime = null;
379
+ for (const entry of entries) {
380
+ // Convert DOS time to JavaScript Date
381
+ const dosTime = entry.timeDateDOS || 0;
382
+ if (dosTime === 0)
383
+ continue;
384
+ // Extract date and time from DOS format
385
+ const day = dosTime & 0x1F;
386
+ const month = (dosTime >> 5) & 0x0F;
387
+ const year = ((dosTime >> 9) & 0x7F) + 1980;
388
+ const seconds = (dosTime >> 16) & 0x1F;
389
+ const minutes = (dosTime >> 21) & 0x3F;
390
+ const hours = (dosTime >> 27) & 0x1F;
391
+ // Create Date object
392
+ const entryTime = new Date(year, month - 1, day, hours, minutes, seconds * 2);
393
+ if (!oldestTime || entryTime < oldestTime) {
394
+ oldestTime = entryTime;
395
+ }
396
+ }
397
+ return oldestTime;
398
+ }
399
+ catch (error) {
400
+ return null;
401
+ }
402
+ }
403
+ /**
404
+ * Move files (delete originals after successful ZIP creation)
405
+ */
406
+ async function moveFiles(files, options) {
407
+ if (options.quiet)
408
+ return; // Skip if quiet mode
409
+ (0, utils_1.log)('🗑️ Moving files (deleting originals)...', options);
410
+ for (const file of files) {
411
+ try {
412
+ const stat = fs.statSync(file);
413
+ if (stat.isFile()) {
414
+ fs.unlinkSync(file);
415
+ if (options.verbose) {
416
+ (0, utils_1.log)(` deleted: ${file}`, options);
417
+ }
418
+ }
419
+ else if (stat.isDirectory()) {
420
+ // For directories, delete all files recursively
421
+ const deleteRecursively = (dirPath) => {
422
+ const entries = fs.readdirSync(dirPath);
423
+ for (const entry of entries) {
424
+ const fullPath = path.join(dirPath, entry);
425
+ const entryStat = fs.statSync(fullPath);
426
+ if (entryStat.isDirectory()) {
427
+ deleteRecursively(fullPath);
428
+ }
429
+ else {
430
+ fs.unlinkSync(fullPath);
431
+ if (options.verbose) {
432
+ (0, utils_1.log)(` deleted: ${fullPath}`, options);
433
+ }
434
+ }
435
+ }
436
+ fs.rmdirSync(dirPath);
437
+ if (options.verbose) {
438
+ (0, utils_1.log)(` deleted: ${dirPath}/`, options);
439
+ }
440
+ };
441
+ deleteRecursively(file);
442
+ }
443
+ }
444
+ catch (error) {
445
+ (0, utils_1.logError)(`Warning: Could not delete ${file}: ${error instanceof Error ? error.message : String(error)}`);
446
+ }
447
+ }
448
+ (0, utils_1.log)('✅ Files moved successfully', options);
449
+ }
450
+ /**
451
+ * Delete specified files from an existing archive
452
+ */
453
+ async function deleteFromArchive(archiveName, filesToDelete, options) {
454
+ if (!fs.existsSync(archiveName)) {
455
+ console.error(`Error: Archive not found: ${archiveName}`);
456
+ (0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.CANT_FIND_ARCHIVE);
457
+ }
458
+ (0, utils_1.log)(`🗑️ Deleting files from: ${archiveName}`, options);
459
+ (0, utils_1.log)(`📁 Files to delete: ${filesToDelete.length} file(s)`, options);
460
+ try {
461
+ // Read the existing archive
462
+ const zipData = fs.readFileSync(archiveName);
463
+ const zip = new neozipkit_1.default();
464
+ zip.loadZip(zipData);
465
+ const entries = zip.getDirectory() || [];
466
+ const originalCount = entries.length;
467
+ if (options.debug) {
468
+ (0, utils_1.logDebug)(`Original archive has ${originalCount} entries`, options);
469
+ (0, utils_1.logDebug)(`Files to delete: ${filesToDelete.join(', ')}`, options);
470
+ }
471
+ // Find entries to delete
472
+ const entriesToDelete = [];
473
+ const entriesNotFound = [];
474
+ for (const fileToDelete of filesToDelete) {
475
+ const found = entries.find((entry) => entry.filename === fileToDelete);
476
+ if (found) {
477
+ entriesToDelete.push(fileToDelete);
478
+ if (options.verbose) {
479
+ (0, utils_1.log)(` Found: ${fileToDelete}`, options);
480
+ }
481
+ }
482
+ else {
483
+ entriesNotFound.push(fileToDelete);
484
+ if (options.verbose) {
485
+ (0, utils_1.log)(` Not found: ${fileToDelete}`, options);
486
+ }
487
+ }
488
+ }
489
+ if (entriesNotFound.length > 0) {
490
+ console.error(`Warning: ${entriesNotFound.length} file(s) not found in archive:`);
491
+ entriesNotFound.forEach(file => console.error(` ${file}`));
492
+ }
493
+ if (entriesToDelete.length === 0) {
494
+ console.error('Error: No files found to delete');
495
+ (0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.NO_FILES_MATCHED);
496
+ }
497
+ if (entriesToDelete.length === originalCount) {
498
+ // If we're deleting all files, just delete the archive
499
+ fs.unlinkSync(archiveName);
500
+ (0, utils_1.log)('✅ Archive deleted (all files removed)', options);
501
+ return;
502
+ }
503
+ // Create new archive without the deleted files
504
+ const newZip = new node_1.default();
505
+ const outParts = [];
506
+ // Build entries list for new archive
507
+ const newEntries = [];
508
+ for (const entry of entries) {
509
+ if (!entriesToDelete.includes(entry.filename)) {
510
+ // Copy entry to new archive
511
+ const newEntry = new neozipkit_2.ZipEntry(entry.filename);
512
+ Object.assign(newEntry, entry);
513
+ // Copy the file data from the existing archive
514
+ try {
515
+ const existingData = await (async () => {
516
+ try {
517
+ const kit = new node_1.default();
518
+ kit.loadZip(zipData);
519
+ return await kit.extract(entry);
520
+ }
521
+ catch {
522
+ return null;
523
+ }
524
+ })();
525
+ if (existingData) {
526
+ newEntry.fileBuffer = existingData;
527
+ newEntry.isUpdated = true;
528
+ }
529
+ }
530
+ catch (error) {
531
+ if (options.verbose) {
532
+ (0, utils_1.log)(`warning: Could not copy entry ${entry.filename}`, options);
533
+ }
534
+ }
535
+ newEntries.push(newEntry);
536
+ }
537
+ }
538
+ // Build ZIP file data using ZipCompress
539
+ let currentOffset = 0;
540
+ const zipDataChunks = [];
541
+ for (const entry of newEntries) {
542
+ if (entry.fileBuffer) {
543
+ // Entry has pre-loaded buffer
544
+ const fileData = entry.fileBuffer;
545
+ // Compress the data using ZipCompress
546
+ const compressedData = await newZip.compressFileBuffer(entry, fileData, {
547
+ level: 6,
548
+ useZstd: true,
549
+ useSHA256: false
550
+ });
551
+ // Set local header offset
552
+ entry.localHdrOffset = currentOffset;
553
+ // Add to ZIP
554
+ zipDataChunks.push(compressedData);
555
+ currentOffset += compressedData.length;
556
+ }
557
+ }
558
+ // Build central directory
559
+ const centralDirChunks = [];
560
+ for (const entry of newEntries) {
561
+ const centralDirEntry = entry.centralDirEntry();
562
+ centralDirChunks.push(centralDirEntry);
563
+ }
564
+ const centralDir = Buffer.concat(centralDirChunks);
565
+ // Build end of central directory
566
+ const endOfCentralDir = Buffer.alloc(22);
567
+ endOfCentralDir.writeUInt32LE(0x06054b50, 0); // Signature
568
+ endOfCentralDir.writeUInt16LE(0, 4); // This disk
569
+ endOfCentralDir.writeUInt16LE(0, 6); // Disk with CD
570
+ endOfCentralDir.writeUInt16LE(newEntries.length, 8); // Entries on this disk
571
+ endOfCentralDir.writeUInt16LE(newEntries.length, 10); // Total entries
572
+ endOfCentralDir.writeUInt32LE(centralDir.length, 12); // CD size
573
+ endOfCentralDir.writeUInt32LE(currentOffset, 16); // CD offset
574
+ endOfCentralDir.writeUInt16LE(0, 20); // Comment length
575
+ // Combine everything
576
+ const compressedZipData = Buffer.concat(zipDataChunks);
577
+ const newZipData = Buffer.concat([compressedZipData, centralDir, endOfCentralDir]);
578
+ fs.writeFileSync(archiveName, newZipData);
579
+ const remainingCount = originalCount - entriesToDelete.length;
580
+ (0, utils_1.log)(`✅ Deleted ${entriesToDelete.length} file(s) from archive`, options);
581
+ (0, utils_1.log)(`📁 Remaining files: ${remainingCount}`, options);
582
+ }
583
+ catch (error) {
584
+ console.error(`Error: Failed to delete files from archive: ${error instanceof Error ? error.message : String(error)}`);
585
+ (0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.CANT_WRITE_ARCHIVE);
586
+ }
587
+ }
588
+ // ============================================================================
589
+ // Large ZIP File EOCD Loading Functions
590
+ // ============================================================================
591
+ /**
592
+ * Locates the End of Central Directory (EOCD) signature in a ZIP file
593
+ * Uses backwards search from the end of the file (InfoZip algorithm)
594
+ * @param fd - File descriptor
595
+ * @param fileSize - Size of the file in bytes
596
+ * @returns Object with EOCD offset and ZIP64 flag, or null if not found
597
+ */
598
+ function locateEOCD(fd, fileSize) {
599
+ // EOCD signature: 0x06054b50
600
+ const EOCD_SIGNATURE = 0x06054b50;
601
+ const ZIP64_EOCD_SIGNATURE = 0x07064b50;
602
+ // Maximum comment length is 65535 bytes, so search last 64KB + EOCD size
603
+ const maxCommentSize = 65535;
604
+ const eocdMinSize = 22;
605
+ const searchSize = Math.min(maxCommentSize + eocdMinSize, fileSize);
606
+ const searchStart = Math.max(0, fileSize - searchSize);
607
+ // Read the last portion of the file
608
+ const buffer = Buffer.alloc(searchSize);
609
+ try {
610
+ fs.readSync(fd, buffer, 0, searchSize, searchStart);
611
+ }
612
+ catch (error) {
613
+ (0, utils_1.logError)(`Failed to read file for EOCD search: ${error instanceof Error ? error.message : String(error)}`);
614
+ return null;
615
+ }
616
+ // Search backwards for EOCD signature
617
+ for (let i = buffer.length - eocdMinSize; i >= 0; i--) {
618
+ try {
619
+ const signature = buffer.readUInt32LE(i);
620
+ if (signature === EOCD_SIGNATURE) {
621
+ const eocdOffset = searchStart + i;
622
+ // Check if this is a ZIP64 EOCD locator (20 bytes before standard EOCD)
623
+ const zip64LocatorOffset = eocdOffset - 20;
624
+ if (zip64LocatorOffset >= 0) {
625
+ try {
626
+ const zip64Buffer = Buffer.alloc(20);
627
+ fs.readSync(fd, zip64Buffer, 0, 20, zip64LocatorOffset);
628
+ if (zip64Buffer.readUInt32LE(0) === ZIP64_EOCD_SIGNATURE) {
629
+ return { offset: eocdOffset, isZip64: true };
630
+ }
631
+ }
632
+ catch (error) {
633
+ // ZIP64 locator not found, continue with standard EOCD
634
+ }
635
+ }
636
+ return { offset: eocdOffset, isZip64: false };
637
+ }
638
+ }
639
+ catch (error) {
640
+ // Continue searching
641
+ }
642
+ }
643
+ return null;
644
+ }
645
+ /**
646
+ * Parses central directory entries from a central directory buffer
647
+ * @param cdBuffer - Buffer containing central directory data
648
+ * @returns Array of ZipEntry objects
649
+ */
650
+ function parseCentralDirectoryEntries(cdBuffer) {
651
+ const entries = [];
652
+ try {
653
+ // Central Directory File Header signature
654
+ const CD_SIGNATURE = 0x02014b50;
655
+ let offset = 0;
656
+ while (offset < cdBuffer.length - 4) {
657
+ // Look for central directory signature
658
+ if (cdBuffer.readUInt32LE(offset) === CD_SIGNATURE) {
659
+ try {
660
+ // Parse central directory entry
661
+ const entry = parseCentralDirectoryEntry(cdBuffer, offset);
662
+ if (entry) {
663
+ entries.push(entry);
664
+ }
665
+ // Move to next entry
666
+ // Central directory entry size varies, so we need to calculate it
667
+ const filenameLength = cdBuffer.readUInt16LE(offset + 28);
668
+ const extraFieldLength = cdBuffer.readUInt16LE(offset + 30);
669
+ const commentLength = cdBuffer.readUInt16LE(offset + 32);
670
+ const entrySize = 46 + filenameLength + extraFieldLength + commentLength;
671
+ offset += entrySize;
672
+ }
673
+ catch (error) {
674
+ // If we can't parse this entry, skip it
675
+ offset += 4;
676
+ }
677
+ }
678
+ else {
679
+ offset += 4;
680
+ }
681
+ }
682
+ }
683
+ catch (error) {
684
+ (0, utils_1.logError)(`Error parsing central directory entries: ${error instanceof Error ? error.message : String(error)}`);
685
+ }
686
+ return entries;
687
+ }
688
+ /**
689
+ * Parses a single central directory entry
690
+ * @param data - Buffer containing central directory data
691
+ * @param offset - Offset of the entry in the buffer
692
+ * @returns ZipEntry object or null if parsing fails
693
+ */
694
+ function parseCentralDirectoryEntry(data, offset) {
695
+ try {
696
+ // Central Directory File Header structure (46 bytes + variable length fields)
697
+ const versionMadeBy = data.readUInt16LE(offset + 4);
698
+ const versionNeeded = data.readUInt16LE(offset + 6);
699
+ const generalPurposeBitFlag = data.readUInt16LE(offset + 8);
700
+ const compressionMethod = data.readUInt16LE(offset + 10);
701
+ const lastModTime = data.readUInt16LE(offset + 12);
702
+ const lastModDate = data.readUInt16LE(offset + 14);
703
+ const crc32 = data.readUInt32LE(offset + 16);
704
+ const compressedSize = data.readUInt32LE(offset + 20);
705
+ const uncompressedSize = data.readUInt32LE(offset + 24);
706
+ const filenameLength = data.readUInt16LE(offset + 28);
707
+ const extraFieldLength = data.readUInt16LE(offset + 30);
708
+ const commentLength = data.readUInt16LE(offset + 32);
709
+ const diskNumberStart = data.readUInt16LE(offset + 34);
710
+ const internalFileAttributes = data.readUInt16LE(offset + 36);
711
+ const externalFileAttributes = data.readUInt32LE(offset + 38);
712
+ const localHeaderOffset = data.readUInt32LE(offset + 42);
713
+ // Read filename
714
+ const filename = data.subarray(offset + 46, offset + 46 + filenameLength).toString('utf8');
715
+ // Convert DOS date/time to JavaScript timestamp
716
+ const modifiedTime = dosDateTimeToTimestamp(lastModDate, lastModTime);
717
+ // Create ZipEntry object
718
+ const entry = new neozipkit_2.ZipEntry(filename);
719
+ entry.filename = filename;
720
+ entry.uncompressedSize = uncompressedSize;
721
+ entry.compressedSize = compressedSize;
722
+ entry.cmpMethod = compressionMethod;
723
+ entry.crc = crc32;
724
+ entry.lastModTimeDate = modifiedTime;
725
+ entry.verMadeBy = versionMadeBy;
726
+ entry.verExtract = versionNeeded;
727
+ entry.bitFlags = generalPurposeBitFlag;
728
+ entry.intFileAttr = internalFileAttributes;
729
+ entry.extFileAttr = externalFileAttributes;
730
+ entry.localHdrOffset = localHeaderOffset;
731
+ entry.volNumber = diskNumberStart;
732
+ return entry;
733
+ }
734
+ catch (error) {
735
+ (0, utils_1.logError)(`Error parsing central directory entry at offset ${offset}: ${error instanceof Error ? error.message : String(error)}`);
736
+ return null;
737
+ }
738
+ }
739
+ /**
740
+ * Converts DOS date/time to JavaScript timestamp
741
+ * @param dosDate - DOS date value
742
+ * @param dosTime - DOS time value
743
+ * @returns JavaScript timestamp
744
+ */
745
+ function dosDateTimeToTimestamp(dosDate, dosTime) {
746
+ // Extract date components
747
+ const year = ((dosDate >> 9) & 0x7F) + 1980;
748
+ const month = (dosDate >> 5) & 0x0F;
749
+ const day = dosDate & 0x1F;
750
+ // Extract time components
751
+ const hour = (dosTime >> 11) & 0x1F;
752
+ const minute = (dosTime >> 5) & 0x3F;
753
+ const second = (dosTime & 0x1F) * 2;
754
+ // Create Date object
755
+ const date = new Date(year, month - 1, day, hour, minute, second);
756
+ return date.getTime();
757
+ }
758
+ /**
759
+ * Parses a standard End of Central Directory record
760
+ * @param fd - File descriptor
761
+ * @param eocdOffset - Offset of the EOCD record
762
+ * @returns Parsed EOCD information
763
+ */
764
+ function parseEOCDRecord(fd, eocdOffset) {
765
+ try {
766
+ // Read EOCD record (minimum 22 bytes)
767
+ const eocdBuffer = Buffer.alloc(22);
768
+ fs.readSync(fd, eocdBuffer, 0, 22, eocdOffset);
769
+ // Verify signature
770
+ if (eocdBuffer.readUInt32LE(0) !== 0x06054b50) {
771
+ return null;
772
+ }
773
+ const entriesOnDisk = eocdBuffer.readUInt16LE(8);
774
+ const totalEntries = eocdBuffer.readUInt16LE(10);
775
+ const cdSize = eocdBuffer.readUInt32LE(12);
776
+ const cdOffset = eocdBuffer.readUInt32LE(16);
777
+ const commentLength = eocdBuffer.readUInt16LE(20);
778
+ // Check for ZIP64 markers
779
+ const isZip64Marker = (totalEntries === 0xFFFF || cdSize === 0xFFFFFFFF || cdOffset === 0xFFFFFFFF);
780
+ return {
781
+ entriesOnDisk,
782
+ totalEntries,
783
+ cdSize,
784
+ cdOffset,
785
+ commentLength,
786
+ isZip64Marker
787
+ };
788
+ }
789
+ catch (error) {
790
+ (0, utils_1.logError)(`Failed to parse EOCD record: ${error instanceof Error ? error.message : String(error)}`);
791
+ return null;
792
+ }
793
+ }
794
+ /**
795
+ * Parses ZIP64 End of Central Directory record
796
+ * @param fd - File descriptor
797
+ * @param eocdOffset - Offset of the standard EOCD record
798
+ * @returns Parsed ZIP64 EOCD information
799
+ */
800
+ function parseZip64EOCD(fd, eocdOffset) {
801
+ try {
802
+ // ZIP64 EOCD locator is 20 bytes before standard EOCD
803
+ const zip64LocatorOffset = eocdOffset - 20;
804
+ const locatorBuffer = Buffer.alloc(20);
805
+ fs.readSync(fd, locatorBuffer, 0, 20, zip64LocatorOffset);
806
+ // Verify ZIP64 EOCD locator signature
807
+ if (locatorBuffer.readUInt32LE(0) !== 0x07064b50) {
808
+ return null;
809
+ }
810
+ // Get ZIP64 EOCD record offset
811
+ const zip64EOCDOffset = Number(locatorBuffer.readBigUInt64LE(8));
812
+ // Read ZIP64 EOCD record (minimum 56 bytes)
813
+ const zip64EOCDBuffer = Buffer.alloc(56);
814
+ fs.readSync(fd, zip64EOCDBuffer, 0, 56, zip64EOCDOffset);
815
+ // Verify ZIP64 EOCD signature
816
+ if (zip64EOCDBuffer.readUInt32LE(0) !== 0x06064b50) {
817
+ return null;
818
+ }
819
+ const totalEntries = zip64EOCDBuffer.readBigUInt64LE(24);
820
+ const cdSize = zip64EOCDBuffer.readBigUInt64LE(40);
821
+ const cdOffset = zip64EOCDBuffer.readBigUInt64LE(48);
822
+ return {
823
+ totalEntries,
824
+ cdSize,
825
+ cdOffset
826
+ };
827
+ }
828
+ catch (error) {
829
+ (0, utils_1.logError)(`Failed to parse ZIP64 EOCD record: ${error instanceof Error ? error.message : String(error)}`);
830
+ return null;
831
+ }
832
+ }
833
+ /**
834
+ * Loads an existing ZIP archive using optimized large file handling
835
+ * Uses backwards search to locate EOCD and loads only the central directory
836
+ * @param archiveName - Path to the ZIP archive
837
+ * @returns Zipkit instance or null if loading fails
838
+ */
839
+ function loadExistingArchiveLarge(archiveName) {
840
+ if (!fs.existsSync(archiveName)) {
841
+ return null;
842
+ }
843
+ let fd = null;
844
+ try {
845
+ // Open file for random access
846
+ fd = fs.openSync(archiveName, 'r');
847
+ const stats = fs.fstatSync(fd);
848
+ const fileSize = stats.size;
849
+ if (fileSize < 22) {
850
+ // File too small to be a valid ZIP
851
+ return null;
852
+ }
853
+ // Locate EOCD signature
854
+ const eocdLocation = locateEOCD(fd, fileSize);
855
+ if (!eocdLocation) {
856
+ (0, utils_1.logError)(`EOCD signature not found in ${archiveName}`);
857
+ return null;
858
+ }
859
+ // Parse EOCD record
860
+ const eocdInfo = parseEOCDRecord(fd, eocdLocation.offset);
861
+ if (!eocdInfo) {
862
+ (0, utils_1.logError)(`Failed to parse EOCD record in ${archiveName}`);
863
+ return null;
864
+ }
865
+ let cdSize;
866
+ let cdOffset;
867
+ let totalEntries;
868
+ if (eocdInfo.isZip64Marker && eocdLocation.isZip64) {
869
+ // Use ZIP64 values
870
+ const zip64Info = parseZip64EOCD(fd, eocdLocation.offset);
871
+ if (!zip64Info) {
872
+ (0, utils_1.logError)(`Failed to parse ZIP64 EOCD record in ${archiveName}`);
873
+ return null;
874
+ }
875
+ cdSize = Number(zip64Info.cdSize);
876
+ cdOffset = Number(zip64Info.cdOffset);
877
+ totalEntries = Number(zip64Info.totalEntries);
878
+ }
879
+ else {
880
+ // Use standard values
881
+ cdSize = eocdInfo.cdSize;
882
+ cdOffset = eocdInfo.cdOffset;
883
+ totalEntries = eocdInfo.totalEntries;
884
+ }
885
+ // Validate central directory bounds
886
+ if (cdOffset < 0 || cdSize <= 0 || cdOffset + cdSize > fileSize) {
887
+ (0, utils_1.logError)(`Invalid central directory bounds in ${archiveName}: offset=${cdOffset}, size=${cdSize}, fileSize=${fileSize}`);
888
+ return null;
889
+ }
890
+ // Read central directory
891
+ const cdBuffer = Buffer.alloc(cdSize);
892
+ fs.readSync(fd, cdBuffer, 0, cdSize, cdOffset);
893
+ // Verify central directory signature
894
+ if (cdBuffer.readUInt32LE(0) !== 0x02014b50) {
895
+ (0, utils_1.logError)(`Invalid central directory signature in ${archiveName}`);
896
+ return null;
897
+ }
898
+ // Create a complete ZIP file buffer by combining CD with EOCD
899
+ // Read the EOCD and comment
900
+ const eocdAndCommentSize = fileSize - eocdLocation.offset;
901
+ const eocdAndCommentBuffer = Buffer.alloc(eocdAndCommentSize);
902
+ fs.readSync(fd, eocdAndCommentBuffer, 0, eocdAndCommentSize, eocdLocation.offset);
903
+ // Create complete ZIP buffer: CD + EOCD + comment
904
+ const completeZipBuffer = Buffer.concat([cdBuffer, eocdAndCommentBuffer]);
905
+ const zip = new neozipkit_1.default();
906
+ zip.loadZip(completeZipBuffer);
907
+ return zip;
908
+ }
909
+ catch (error) {
910
+ (0, utils_1.logError)(`Failed to load large ZIP archive ${archiveName}: ${error instanceof Error ? error.message : String(error)}`);
911
+ return null;
912
+ }
913
+ finally {
914
+ if (fd !== null) {
915
+ try {
916
+ fs.closeSync(fd);
917
+ }
918
+ catch (error) {
919
+ // Ignore close errors
920
+ }
921
+ }
922
+ }
923
+ }
924
+ //# sourceMappingURL=file-operations.js.map