neozip-cli 0.70.0-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +72 -0
- package/DOCUMENTATION.md +194 -0
- package/LICENSE +22 -0
- package/README.md +504 -0
- package/WHY_NEOZIP.md +212 -0
- package/bin/neolist +16 -0
- package/bin/neounzip +16 -0
- package/bin/neozip +15 -0
- package/dist/neozipkit-bundles/blockchain.js +13091 -0
- package/dist/neozipkit-bundles/browser.js +5733 -0
- package/dist/neozipkit-bundles/core.js +3766 -0
- package/dist/neozipkit-bundles/server.js +14996 -0
- package/dist/neozipkit-wrappers/blockchain/core/contracts.js +16 -0
- package/dist/neozipkit-wrappers/blockchain/index.js +2 -0
- package/dist/neozipkit-wrappers/core/ZipDecompress.js +2 -0
- package/dist/neozipkit-wrappers/core/components/HashCalculator.js +2 -0
- package/dist/neozipkit-wrappers/core/components/Logger.js +2 -0
- package/dist/neozipkit-wrappers/core/constants/Errors.js +2 -0
- package/dist/neozipkit-wrappers/core/constants/Headers.js +2 -0
- package/dist/neozipkit-wrappers/core/encryption/ZipCrypto.js +7 -0
- package/dist/neozipkit-wrappers/core/index.js +3 -0
- package/dist/neozipkit-wrappers/index.js +13 -0
- package/dist/neozipkit-wrappers/server/index.js +2 -0
- package/dist/src/config/ConfigSetup.js +455 -0
- package/dist/src/config/ConfigStore.js +373 -0
- package/dist/src/config/ConfigWizard.js +453 -0
- package/dist/src/config/WalletConfig.js +372 -0
- package/dist/src/exit-codes.js +210 -0
- package/dist/src/index.js +141 -0
- package/dist/src/neolist.js +1194 -0
- package/dist/src/neounzip.js +2177 -0
- package/dist/src/neozip/CommentManager.js +240 -0
- package/dist/src/neozip/blockchain.js +383 -0
- package/dist/src/neozip/createZip.js +2273 -0
- package/dist/src/neozip/file-operations.js +920 -0
- package/dist/src/neozip/types.js +6 -0
- package/dist/src/neozip/user-interaction.js +256 -0
- package/dist/src/neozip/utils.js +96 -0
- package/dist/src/neozip.js +785 -0
- package/dist/src/server/CommentManager.js +240 -0
- package/dist/src/version.js +59 -0
- package/env.example +101 -0
- package/package.json +175 -0
|
@@ -0,0 +1,920 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* File operations for NeoZip CLI
|
|
4
|
+
*/
|
|
5
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
6
|
+
if (k2 === undefined) k2 = k;
|
|
7
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
8
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
9
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
10
|
+
}
|
|
11
|
+
Object.defineProperty(o, k2, desc);
|
|
12
|
+
}) : (function(o, m, k, k2) {
|
|
13
|
+
if (k2 === undefined) k2 = k;
|
|
14
|
+
o[k2] = m[k];
|
|
15
|
+
}));
|
|
16
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
17
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
18
|
+
}) : function(o, v) {
|
|
19
|
+
o["default"] = v;
|
|
20
|
+
});
|
|
21
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
22
|
+
var ownKeys = function(o) {
|
|
23
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
24
|
+
var ar = [];
|
|
25
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
26
|
+
return ar;
|
|
27
|
+
};
|
|
28
|
+
return ownKeys(o);
|
|
29
|
+
};
|
|
30
|
+
return function (mod) {
|
|
31
|
+
if (mod && mod.__esModule) return mod;
|
|
32
|
+
var result = {};
|
|
33
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
34
|
+
__setModuleDefault(result, mod);
|
|
35
|
+
return result;
|
|
36
|
+
};
|
|
37
|
+
})();
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.shouldUpdateFile = shouldUpdateFile;
|
|
40
|
+
exports.loadExistingArchive = loadExistingArchive;
|
|
41
|
+
exports.collectFilesRecursively = collectFilesRecursively;
|
|
42
|
+
exports.filterFiles = filterFiles;
|
|
43
|
+
exports.findOldestEntryTime = findOldestEntryTime;
|
|
44
|
+
exports.moveFiles = moveFiles;
|
|
45
|
+
exports.deleteFromArchive = deleteFromArchive;
|
|
46
|
+
exports.loadExistingArchiveLarge = loadExistingArchiveLarge;
|
|
47
|
+
const fs = __importStar(require("fs"));
|
|
48
|
+
const path = __importStar(require("path"));
|
|
49
|
+
const minimatch_1 = require("minimatch");
|
|
50
|
+
const src_1 = __importStar(require('../../neozipkit-wrappers'));
|
|
51
|
+
const server_1 = require('../../neozipkit-wrappers/server');
|
|
52
|
+
const utils_1 = require("./utils");
|
|
53
|
+
const exit_codes_1 = require("../exit-codes");
|
|
54
|
+
/**
|
|
55
|
+
* Check if a file should be updated based on modification time
|
|
56
|
+
*/
|
|
57
|
+
async function shouldUpdateFile(entryName, fileStat, existingArchive, isFreshen = false) {
|
|
58
|
+
if (!existingArchive) {
|
|
59
|
+
return !isFreshen; // No existing archive: add all files for update, add none for freshen
|
|
60
|
+
}
|
|
61
|
+
try {
|
|
62
|
+
const entries = existingArchive.getDirectory() || [];
|
|
63
|
+
const existingEntry = entries.find((entry) => entry.filename === entryName);
|
|
64
|
+
if (!existingEntry) {
|
|
65
|
+
return !isFreshen; // File doesn't exist in archive: add it for update, skip it for freshen
|
|
66
|
+
}
|
|
67
|
+
// Compare modification times using milliseconds since 1970
|
|
68
|
+
// Prioritize extended timestamps (NTFS) over DOS timestamps
|
|
69
|
+
let existingTime;
|
|
70
|
+
// First try extended timestamp (NTFS/Unix) - already in milliseconds since 1970
|
|
71
|
+
if (existingEntry.ntfsTime && existingEntry.ntfsTime.mtime) {
|
|
72
|
+
existingTime = existingEntry.ntfsTime.mtime;
|
|
73
|
+
}
|
|
74
|
+
else if (existingEntry.extendedTime && existingEntry.extendedTime.mtime) {
|
|
75
|
+
existingTime = existingEntry.extendedTime.mtime;
|
|
76
|
+
}
|
|
77
|
+
else if (existingEntry.parseDateTime && existingEntry.lastModTimeDate) {
|
|
78
|
+
// Use the parseDateTime method if available (same as neolist.ts)
|
|
79
|
+
const parsedDate = existingEntry.parseDateTime(existingEntry.lastModTimeDate);
|
|
80
|
+
existingTime = parsedDate ? parsedDate.getTime() : 0;
|
|
81
|
+
}
|
|
82
|
+
else if (existingEntry.lastModTimeDate) {
|
|
83
|
+
existingTime = new Date(existingEntry.lastModTimeDate).getTime();
|
|
84
|
+
}
|
|
85
|
+
else if (existingEntry.timeDateDOS) {
|
|
86
|
+
// timeDateDOS is in seconds since 1970, convert to milliseconds
|
|
87
|
+
existingTime = existingEntry.timeDateDOS * 1000;
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
// If we can't get the time, assume we should update
|
|
91
|
+
return true;
|
|
92
|
+
}
|
|
93
|
+
const fileTime = fileStat.mtimeMs;
|
|
94
|
+
// Debug output for date comparison (can be enabled with DEBUG_UPDATE=1)
|
|
95
|
+
if (process.env.DEBUG_UPDATE) {
|
|
96
|
+
console.log(`Date comparison for ${entryName}:`);
|
|
97
|
+
console.log(` File time: ${new Date(fileTime).toISOString()}`);
|
|
98
|
+
console.log(` Archive time: ${new Date(existingTime).toISOString()}`);
|
|
99
|
+
console.log(` File newer: ${fileTime > existingTime}`);
|
|
100
|
+
console.log(` Timestamp sources:`, {
|
|
101
|
+
ntfsTime: existingEntry.ntfsTime,
|
|
102
|
+
extendedTime: existingEntry.extendedTime,
|
|
103
|
+
timeDateDOS: existingEntry.timeDateDOS,
|
|
104
|
+
lastModTimeDate: existingEntry.lastModTimeDate,
|
|
105
|
+
hasParseDateTime: !!existingEntry.parseDateTime
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
// Compare at second precision to avoid millisecond differences
|
|
109
|
+
const fileTimeSeconds = Math.floor(fileTime / 1000);
|
|
110
|
+
const existingTimeSeconds = Math.floor(existingTime / 1000);
|
|
111
|
+
return fileTimeSeconds > existingTimeSeconds; // Update if file is newer
|
|
112
|
+
}
|
|
113
|
+
catch (error) {
|
|
114
|
+
// If we can't check, assume we should update
|
|
115
|
+
return true;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Load existing archive if it exists
|
|
120
|
+
*/
|
|
121
|
+
function loadExistingArchive(archiveName, options) {
|
|
122
|
+
if (!fs.existsSync(archiveName)) {
|
|
123
|
+
return null;
|
|
124
|
+
}
|
|
125
|
+
try {
|
|
126
|
+
// For in-memory option, use optimized large file loader
|
|
127
|
+
if (options?.inMemory) {
|
|
128
|
+
try {
|
|
129
|
+
const result = loadExistingArchiveLarge(archiveName);
|
|
130
|
+
if (result) {
|
|
131
|
+
return result;
|
|
132
|
+
}
|
|
133
|
+
// If result is null, treat as new archive (file doesn't exist or is invalid)
|
|
134
|
+
return null;
|
|
135
|
+
}
|
|
136
|
+
catch (error) {
|
|
137
|
+
// Only abort if large loader actually fails (throws exception)
|
|
138
|
+
throw new Error(`Failed to load large ZIP archive ${archiveName} using optimized EOCD loading. The archive may be corrupted or in an unsupported format.`);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
// Standard loader (existing code)
|
|
142
|
+
const data = fs.readFileSync(archiveName);
|
|
143
|
+
const zip = new src_1.default();
|
|
144
|
+
zip.loadZip(data);
|
|
145
|
+
return zip;
|
|
146
|
+
}
|
|
147
|
+
catch (error) {
|
|
148
|
+
// If we can't load the existing archive, treat it as new
|
|
149
|
+
return null;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Recursively collect files and directories
|
|
154
|
+
*/
|
|
155
|
+
function collectFilesRecursively(inputPaths, options) {
|
|
156
|
+
const collectedFiles = [];
|
|
157
|
+
// Track inodes to detect hard links (Unix-like systems only)
|
|
158
|
+
const inodeMap = new Map(); // inode -> first entry name
|
|
159
|
+
/**
|
|
160
|
+
* Helper function to process a file and detect hard links
|
|
161
|
+
*/
|
|
162
|
+
function processFile(filePath, entryName, lstat) {
|
|
163
|
+
// Check for hard links (Unix-like systems only)
|
|
164
|
+
if (options.hardLinks && process.platform !== 'win32' && lstat.nlink > 1) {
|
|
165
|
+
const inode = lstat.ino; // inode number
|
|
166
|
+
if (inodeMap.has(inode)) {
|
|
167
|
+
// This is a hard link to an existing file
|
|
168
|
+
const originalEntry = inodeMap.get(inode);
|
|
169
|
+
collectedFiles.push({
|
|
170
|
+
entryName: entryName,
|
|
171
|
+
absPath: filePath,
|
|
172
|
+
displayPath: filePath,
|
|
173
|
+
stat: lstat,
|
|
174
|
+
isDirectory: false,
|
|
175
|
+
isHardLink: true,
|
|
176
|
+
originalEntry: originalEntry,
|
|
177
|
+
inode: inode
|
|
178
|
+
});
|
|
179
|
+
if (options.debug) {
|
|
180
|
+
console.log(`🔗 Hard link detected: ${entryName} -> ${originalEntry} (inode: ${inode})`);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
else {
|
|
184
|
+
// First occurrence of this inode
|
|
185
|
+
inodeMap.set(inode, entryName);
|
|
186
|
+
collectedFiles.push({
|
|
187
|
+
entryName: entryName,
|
|
188
|
+
absPath: filePath,
|
|
189
|
+
displayPath: filePath,
|
|
190
|
+
stat: lstat,
|
|
191
|
+
isDirectory: false,
|
|
192
|
+
inode: inode
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
else {
|
|
197
|
+
// Regular file (no hard link detection or not requested)
|
|
198
|
+
collectedFiles.push({
|
|
199
|
+
entryName: entryName,
|
|
200
|
+
absPath: filePath,
|
|
201
|
+
displayPath: filePath,
|
|
202
|
+
stat: lstat,
|
|
203
|
+
isDirectory: false
|
|
204
|
+
});
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
for (const inputPath of inputPaths) {
|
|
208
|
+
if (!fs.existsSync(inputPath)) {
|
|
209
|
+
console.error(`Error: Path not found: ${inputPath}`);
|
|
210
|
+
(0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.CANT_FIND_ARCHIVE);
|
|
211
|
+
}
|
|
212
|
+
// Use lstat to detect symbolic links
|
|
213
|
+
const lstat = fs.lstatSync(inputPath);
|
|
214
|
+
if (lstat.isSymbolicLink()) {
|
|
215
|
+
// Handle symbolic links
|
|
216
|
+
if (options.symlinks) {
|
|
217
|
+
// Store the symbolic link itself
|
|
218
|
+
const linkTarget = fs.readlinkSync(inputPath);
|
|
219
|
+
collectedFiles.push({
|
|
220
|
+
entryName: options.junkPaths ? path.basename(inputPath) : inputPath,
|
|
221
|
+
absPath: inputPath,
|
|
222
|
+
displayPath: inputPath,
|
|
223
|
+
stat: lstat,
|
|
224
|
+
isDirectory: false,
|
|
225
|
+
isSymlink: true,
|
|
226
|
+
linkTarget: linkTarget
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
else {
|
|
230
|
+
// Follow the symbolic link (default behavior)
|
|
231
|
+
try {
|
|
232
|
+
const stat = fs.statSync(inputPath); // This follows the link
|
|
233
|
+
if (stat.isFile()) {
|
|
234
|
+
collectedFiles.push({
|
|
235
|
+
entryName: options.junkPaths ? path.basename(inputPath) : inputPath,
|
|
236
|
+
absPath: inputPath,
|
|
237
|
+
displayPath: inputPath,
|
|
238
|
+
stat: stat,
|
|
239
|
+
isDirectory: false
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
else if (stat.isDirectory() && options.recurse) {
|
|
243
|
+
// Recursively process the directory that the symlink points to
|
|
244
|
+
const subFiles = collectFilesRecursively([inputPath], options);
|
|
245
|
+
collectedFiles.push(...subFiles);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
catch (error) {
|
|
249
|
+
console.error(`Warning: Cannot follow symbolic link ${inputPath}: ${error instanceof Error ? error.message : String(error)}`);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
else if (lstat.isFile()) {
|
|
254
|
+
// It's a regular file, process it for hard link detection
|
|
255
|
+
const entryName = options.junkPaths ? path.basename(inputPath) : inputPath;
|
|
256
|
+
processFile(inputPath, entryName, lstat);
|
|
257
|
+
}
|
|
258
|
+
else if (lstat.isDirectory()) {
|
|
259
|
+
if (options.recurse) {
|
|
260
|
+
// Recursively collect files from directory
|
|
261
|
+
const dirContents = fs.readdirSync(inputPath);
|
|
262
|
+
for (const item of dirContents) {
|
|
263
|
+
const itemPath = path.join(inputPath, item);
|
|
264
|
+
const itemLstat = fs.lstatSync(itemPath);
|
|
265
|
+
if (itemLstat.isSymbolicLink()) {
|
|
266
|
+
// Handle symbolic links in subdirectories
|
|
267
|
+
if (options.symlinks) {
|
|
268
|
+
// Store the symbolic link itself
|
|
269
|
+
const linkTarget = fs.readlinkSync(itemPath);
|
|
270
|
+
const relativePath = path.relative(process.cwd(), itemPath);
|
|
271
|
+
collectedFiles.push({
|
|
272
|
+
entryName: options.junkPaths ? path.basename(itemPath) : relativePath,
|
|
273
|
+
absPath: itemPath,
|
|
274
|
+
displayPath: itemPath,
|
|
275
|
+
stat: itemLstat,
|
|
276
|
+
isDirectory: false,
|
|
277
|
+
isSymlink: true,
|
|
278
|
+
linkTarget: linkTarget
|
|
279
|
+
});
|
|
280
|
+
}
|
|
281
|
+
else {
|
|
282
|
+
// Follow the symbolic link (default behavior)
|
|
283
|
+
try {
|
|
284
|
+
const itemStat = fs.statSync(itemPath); // This follows the link
|
|
285
|
+
if (itemStat.isFile()) {
|
|
286
|
+
const relativePath = path.relative(process.cwd(), itemPath);
|
|
287
|
+
collectedFiles.push({
|
|
288
|
+
entryName: options.junkPaths ? path.basename(itemPath) : relativePath,
|
|
289
|
+
absPath: itemPath,
|
|
290
|
+
displayPath: itemPath,
|
|
291
|
+
stat: itemStat,
|
|
292
|
+
isDirectory: false
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
else if (itemStat.isDirectory()) {
|
|
296
|
+
// Recursively process the directory that the symlink points to
|
|
297
|
+
const subFiles = collectFilesRecursively([itemPath], options);
|
|
298
|
+
collectedFiles.push(...subFiles);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
catch (error) {
|
|
302
|
+
console.error(`Warning: Cannot follow symbolic link ${itemPath}: ${error instanceof Error ? error.message : String(error)}`);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
else if (itemLstat.isFile()) {
|
|
307
|
+
const relativePath = path.relative(process.cwd(), itemPath);
|
|
308
|
+
const entryName = options.junkPaths ? path.basename(itemPath) : relativePath;
|
|
309
|
+
processFile(itemPath, entryName, itemLstat);
|
|
310
|
+
}
|
|
311
|
+
else if (itemLstat.isDirectory()) {
|
|
312
|
+
// Recursively process subdirectory
|
|
313
|
+
const subFiles = collectFilesRecursively([itemPath], options);
|
|
314
|
+
collectedFiles.push(...subFiles);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
else {
|
|
319
|
+
console.error(`Error: ${inputPath} is a directory. Use -r/--recurse to include directories.`);
|
|
320
|
+
(0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.CANT_CREATE_FILE);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
return collectedFiles;
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Filter files based on include/exclude patterns
|
|
328
|
+
*/
|
|
329
|
+
function filterFiles(files, options) {
|
|
330
|
+
if (!options.include && !options.exclude && !options.suffixes) {
|
|
331
|
+
return files;
|
|
332
|
+
}
|
|
333
|
+
return files.filter(filePath => {
|
|
334
|
+
const fileName = path.basename(filePath);
|
|
335
|
+
const relativePath = path.relative(process.cwd(), filePath);
|
|
336
|
+
// Check include patterns first (if any)
|
|
337
|
+
if (options.include && options.include.length > 0) {
|
|
338
|
+
const matchesInclude = options.include.some(pattern => (0, minimatch_1.minimatch)(fileName, pattern) || (0, minimatch_1.minimatch)(relativePath, pattern));
|
|
339
|
+
if (!matchesInclude) {
|
|
340
|
+
return false;
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
// Check exclude patterns
|
|
344
|
+
if (options.exclude && options.exclude.length > 0) {
|
|
345
|
+
const matchesExclude = options.exclude.some(pattern => (0, minimatch_1.minimatch)(fileName, pattern) || (0, minimatch_1.minimatch)(relativePath, pattern));
|
|
346
|
+
if (matchesExclude) {
|
|
347
|
+
return false;
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
// Check suffix exclusion (don't compress files with these suffixes)
|
|
351
|
+
if (options.suffixes && options.suffixes.length > 0) {
|
|
352
|
+
const fileExtension = path.extname(fileName).toLowerCase();
|
|
353
|
+
const hasExcludedSuffix = options.suffixes.some(suffix => {
|
|
354
|
+
// Remove leading dot if present and normalize
|
|
355
|
+
const normalizedSuffix = suffix.startsWith('.') ? suffix.substring(1) : suffix;
|
|
356
|
+
return fileExtension === `.${normalizedSuffix.toLowerCase()}`;
|
|
357
|
+
});
|
|
358
|
+
if (hasExcludedSuffix) {
|
|
359
|
+
return false;
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
return true;
|
|
363
|
+
});
|
|
364
|
+
}
|
|
365
|
+
/**
|
|
366
|
+
* Find the oldest entry time in a ZIP archive
|
|
367
|
+
*/
|
|
368
|
+
async function findOldestEntryTime(zip) {
|
|
369
|
+
try {
|
|
370
|
+
const entries = await zip.getDirectory(false) || [];
|
|
371
|
+
if (entries.length === 0) {
|
|
372
|
+
return null;
|
|
373
|
+
}
|
|
374
|
+
let oldestTime = null;
|
|
375
|
+
for (const entry of entries) {
|
|
376
|
+
// Convert DOS time to JavaScript Date
|
|
377
|
+
const dosTime = entry.timeDateDOS || 0;
|
|
378
|
+
if (dosTime === 0)
|
|
379
|
+
continue;
|
|
380
|
+
// Extract date and time from DOS format
|
|
381
|
+
const day = dosTime & 0x1F;
|
|
382
|
+
const month = (dosTime >> 5) & 0x0F;
|
|
383
|
+
const year = ((dosTime >> 9) & 0x7F) + 1980;
|
|
384
|
+
const seconds = (dosTime >> 16) & 0x1F;
|
|
385
|
+
const minutes = (dosTime >> 21) & 0x3F;
|
|
386
|
+
const hours = (dosTime >> 27) & 0x1F;
|
|
387
|
+
// Create Date object
|
|
388
|
+
const entryTime = new Date(year, month - 1, day, hours, minutes, seconds * 2);
|
|
389
|
+
if (!oldestTime || entryTime < oldestTime) {
|
|
390
|
+
oldestTime = entryTime;
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
return oldestTime;
|
|
394
|
+
}
|
|
395
|
+
catch (error) {
|
|
396
|
+
return null;
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
/**
|
|
400
|
+
* Move files (delete originals after successful ZIP creation)
|
|
401
|
+
*/
|
|
402
|
+
async function moveFiles(files, options) {
|
|
403
|
+
if (options.quiet)
|
|
404
|
+
return; // Skip if quiet mode
|
|
405
|
+
(0, utils_1.log)('🗑️ Moving files (deleting originals)...', options);
|
|
406
|
+
for (const file of files) {
|
|
407
|
+
try {
|
|
408
|
+
const stat = fs.statSync(file);
|
|
409
|
+
if (stat.isFile()) {
|
|
410
|
+
fs.unlinkSync(file);
|
|
411
|
+
if (options.verbose) {
|
|
412
|
+
(0, utils_1.log)(` deleted: ${file}`, options);
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
else if (stat.isDirectory()) {
|
|
416
|
+
// For directories, delete all files recursively
|
|
417
|
+
const deleteRecursively = (dirPath) => {
|
|
418
|
+
const entries = fs.readdirSync(dirPath);
|
|
419
|
+
for (const entry of entries) {
|
|
420
|
+
const fullPath = path.join(dirPath, entry);
|
|
421
|
+
const entryStat = fs.statSync(fullPath);
|
|
422
|
+
if (entryStat.isDirectory()) {
|
|
423
|
+
deleteRecursively(fullPath);
|
|
424
|
+
}
|
|
425
|
+
else {
|
|
426
|
+
fs.unlinkSync(fullPath);
|
|
427
|
+
if (options.verbose) {
|
|
428
|
+
(0, utils_1.log)(` deleted: ${fullPath}`, options);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
fs.rmdirSync(dirPath);
|
|
433
|
+
if (options.verbose) {
|
|
434
|
+
(0, utils_1.log)(` deleted: ${dirPath}/`, options);
|
|
435
|
+
}
|
|
436
|
+
};
|
|
437
|
+
deleteRecursively(file);
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
catch (error) {
|
|
441
|
+
(0, utils_1.logError)(`Warning: Could not delete ${file}: ${error instanceof Error ? error.message : String(error)}`);
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
(0, utils_1.log)('✅ Files moved successfully', options);
|
|
445
|
+
}
|
|
446
|
+
/**
|
|
447
|
+
* Delete specified files from an existing archive
|
|
448
|
+
*/
|
|
449
|
+
async function deleteFromArchive(archiveName, filesToDelete, options) {
|
|
450
|
+
if (!fs.existsSync(archiveName)) {
|
|
451
|
+
console.error(`Error: Archive not found: ${archiveName}`);
|
|
452
|
+
(0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.CANT_FIND_ARCHIVE);
|
|
453
|
+
}
|
|
454
|
+
(0, utils_1.log)(`🗑️ Deleting files from: ${archiveName}`, options);
|
|
455
|
+
(0, utils_1.log)(`📁 Files to delete: ${filesToDelete.length} file(s)`, options);
|
|
456
|
+
try {
|
|
457
|
+
// Read the existing archive
|
|
458
|
+
const zipData = fs.readFileSync(archiveName);
|
|
459
|
+
const zip = new src_1.default();
|
|
460
|
+
zip.loadZip(zipData);
|
|
461
|
+
const entries = zip.getDirectory() || [];
|
|
462
|
+
const originalCount = entries.length;
|
|
463
|
+
if (options.debug) {
|
|
464
|
+
(0, utils_1.logDebug)(`Original archive has ${originalCount} entries`, options);
|
|
465
|
+
(0, utils_1.logDebug)(`Files to delete: ${filesToDelete.join(', ')}`, options);
|
|
466
|
+
}
|
|
467
|
+
// Find entries to delete
|
|
468
|
+
const entriesToDelete = [];
|
|
469
|
+
const entriesNotFound = [];
|
|
470
|
+
for (const fileToDelete of filesToDelete) {
|
|
471
|
+
const found = entries.find((entry) => entry.filename === fileToDelete);
|
|
472
|
+
if (found) {
|
|
473
|
+
entriesToDelete.push(fileToDelete);
|
|
474
|
+
if (options.verbose) {
|
|
475
|
+
(0, utils_1.log)(` Found: ${fileToDelete}`, options);
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
else {
|
|
479
|
+
entriesNotFound.push(fileToDelete);
|
|
480
|
+
if (options.verbose) {
|
|
481
|
+
(0, utils_1.log)(` Not found: ${fileToDelete}`, options);
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
if (entriesNotFound.length > 0) {
|
|
486
|
+
console.error(`Warning: ${entriesNotFound.length} file(s) not found in archive:`);
|
|
487
|
+
entriesNotFound.forEach(file => console.error(` ${file}`));
|
|
488
|
+
}
|
|
489
|
+
if (entriesToDelete.length === 0) {
|
|
490
|
+
console.error('Error: No files found to delete');
|
|
491
|
+
(0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.NO_FILES_MATCHED);
|
|
492
|
+
}
|
|
493
|
+
if (entriesToDelete.length === originalCount) {
|
|
494
|
+
// If we're deleting all files, just delete the archive
|
|
495
|
+
fs.unlinkSync(archiveName);
|
|
496
|
+
(0, utils_1.log)('✅ Archive deleted (all files removed)', options);
|
|
497
|
+
return;
|
|
498
|
+
}
|
|
499
|
+
// Create new archive without the deleted files
|
|
500
|
+
const newZip = new server_1.ZipkitServer();
|
|
501
|
+
const outParts = [];
|
|
502
|
+
// Build entries list for new archive
|
|
503
|
+
const newEntries = [];
|
|
504
|
+
for (const entry of entries) {
|
|
505
|
+
if (!entriesToDelete.includes(entry.filename)) {
|
|
506
|
+
// Copy entry to new archive
|
|
507
|
+
const newEntry = new src_1.ZipEntry(entry.filename);
|
|
508
|
+
Object.assign(newEntry, entry);
|
|
509
|
+
// Copy the file data from the existing archive
|
|
510
|
+
try {
|
|
511
|
+
const existingData = await (async () => {
|
|
512
|
+
try {
|
|
513
|
+
const kit = new server_1.ZipkitServer();
|
|
514
|
+
kit.loadZip(zipData);
|
|
515
|
+
return await kit.extract(entry);
|
|
516
|
+
}
|
|
517
|
+
catch {
|
|
518
|
+
return null;
|
|
519
|
+
}
|
|
520
|
+
})();
|
|
521
|
+
if (existingData) {
|
|
522
|
+
newEntry.fileBuffer = existingData;
|
|
523
|
+
newEntry.isUpdated = true;
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
catch (error) {
|
|
527
|
+
if (options.verbose) {
|
|
528
|
+
(0, utils_1.log)(`warning: Could not copy entry ${entry.filename}`, options);
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
newEntries.push(newEntry);
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
// Build ZIP file data using ZipCompress
|
|
535
|
+
let currentOffset = 0;
|
|
536
|
+
const zipDataChunks = [];
|
|
537
|
+
for (const entry of newEntries) {
|
|
538
|
+
if (entry.fileBuffer) {
|
|
539
|
+
// Entry has pre-loaded buffer
|
|
540
|
+
const fileData = entry.fileBuffer;
|
|
541
|
+
// Compress the data using ZipCompress
|
|
542
|
+
const compressedData = await newZip.compressFileBuffer(entry, fileData, {
|
|
543
|
+
level: 6,
|
|
544
|
+
useZstd: true,
|
|
545
|
+
useSHA256: false
|
|
546
|
+
});
|
|
547
|
+
// Set local header offset
|
|
548
|
+
entry.localHdrOffset = currentOffset;
|
|
549
|
+
// Add to ZIP
|
|
550
|
+
zipDataChunks.push(compressedData);
|
|
551
|
+
currentOffset += compressedData.length;
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
// Build central directory
|
|
555
|
+
const centralDirChunks = [];
|
|
556
|
+
for (const entry of newEntries) {
|
|
557
|
+
const centralDirEntry = entry.centralDirEntry();
|
|
558
|
+
centralDirChunks.push(centralDirEntry);
|
|
559
|
+
}
|
|
560
|
+
const centralDir = Buffer.concat(centralDirChunks);
|
|
561
|
+
// Build end of central directory
|
|
562
|
+
const endOfCentralDir = Buffer.alloc(22);
|
|
563
|
+
endOfCentralDir.writeUInt32LE(0x06054b50, 0); // Signature
|
|
564
|
+
endOfCentralDir.writeUInt16LE(0, 4); // This disk
|
|
565
|
+
endOfCentralDir.writeUInt16LE(0, 6); // Disk with CD
|
|
566
|
+
endOfCentralDir.writeUInt16LE(newEntries.length, 8); // Entries on this disk
|
|
567
|
+
endOfCentralDir.writeUInt16LE(newEntries.length, 10); // Total entries
|
|
568
|
+
endOfCentralDir.writeUInt32LE(centralDir.length, 12); // CD size
|
|
569
|
+
endOfCentralDir.writeUInt32LE(currentOffset, 16); // CD offset
|
|
570
|
+
endOfCentralDir.writeUInt16LE(0, 20); // Comment length
|
|
571
|
+
// Combine everything
|
|
572
|
+
const compressedZipData = Buffer.concat(zipDataChunks);
|
|
573
|
+
const newZipData = Buffer.concat([compressedZipData, centralDir, endOfCentralDir]);
|
|
574
|
+
fs.writeFileSync(archiveName, newZipData);
|
|
575
|
+
const remainingCount = originalCount - entriesToDelete.length;
|
|
576
|
+
(0, utils_1.log)(`✅ Deleted ${entriesToDelete.length} file(s) from archive`, options);
|
|
577
|
+
(0, utils_1.log)(`📁 Remaining files: ${remainingCount}`, options);
|
|
578
|
+
}
|
|
579
|
+
catch (error) {
|
|
580
|
+
console.error(`Error: Failed to delete files from archive: ${error instanceof Error ? error.message : String(error)}`);
|
|
581
|
+
(0, exit_codes_1.exitZip)(exit_codes_1.ZIP_EXIT_CODES.CANT_WRITE_ARCHIVE);
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
// ============================================================================
|
|
585
|
+
// Large ZIP File EOCD Loading Functions
|
|
586
|
+
// ============================================================================
|
|
587
|
+
/**
|
|
588
|
+
* Locates the End of Central Directory (EOCD) signature in a ZIP file
|
|
589
|
+
* Uses backwards search from the end of the file (InfoZip algorithm)
|
|
590
|
+
* @param fd - File descriptor
|
|
591
|
+
* @param fileSize - Size of the file in bytes
|
|
592
|
+
* @returns Object with EOCD offset and ZIP64 flag, or null if not found
|
|
593
|
+
*/
|
|
594
|
+
function locateEOCD(fd, fileSize) {
|
|
595
|
+
// EOCD signature: 0x06054b50
|
|
596
|
+
const EOCD_SIGNATURE = 0x06054b50;
|
|
597
|
+
const ZIP64_EOCD_SIGNATURE = 0x07064b50;
|
|
598
|
+
// Maximum comment length is 65535 bytes, so search last 64KB + EOCD size
|
|
599
|
+
const maxCommentSize = 65535;
|
|
600
|
+
const eocdMinSize = 22;
|
|
601
|
+
const searchSize = Math.min(maxCommentSize + eocdMinSize, fileSize);
|
|
602
|
+
const searchStart = Math.max(0, fileSize - searchSize);
|
|
603
|
+
// Read the last portion of the file
|
|
604
|
+
const buffer = Buffer.alloc(searchSize);
|
|
605
|
+
try {
|
|
606
|
+
fs.readSync(fd, buffer, 0, searchSize, searchStart);
|
|
607
|
+
}
|
|
608
|
+
catch (error) {
|
|
609
|
+
(0, utils_1.logError)(`Failed to read file for EOCD search: ${error instanceof Error ? error.message : String(error)}`);
|
|
610
|
+
return null;
|
|
611
|
+
}
|
|
612
|
+
// Search backwards for EOCD signature
|
|
613
|
+
for (let i = buffer.length - eocdMinSize; i >= 0; i--) {
|
|
614
|
+
try {
|
|
615
|
+
const signature = buffer.readUInt32LE(i);
|
|
616
|
+
if (signature === EOCD_SIGNATURE) {
|
|
617
|
+
const eocdOffset = searchStart + i;
|
|
618
|
+
// Check if this is a ZIP64 EOCD locator (20 bytes before standard EOCD)
|
|
619
|
+
const zip64LocatorOffset = eocdOffset - 20;
|
|
620
|
+
if (zip64LocatorOffset >= 0) {
|
|
621
|
+
try {
|
|
622
|
+
const zip64Buffer = Buffer.alloc(20);
|
|
623
|
+
fs.readSync(fd, zip64Buffer, 0, 20, zip64LocatorOffset);
|
|
624
|
+
if (zip64Buffer.readUInt32LE(0) === ZIP64_EOCD_SIGNATURE) {
|
|
625
|
+
return { offset: eocdOffset, isZip64: true };
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
catch (error) {
|
|
629
|
+
// ZIP64 locator not found, continue with standard EOCD
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
return { offset: eocdOffset, isZip64: false };
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
catch (error) {
|
|
636
|
+
// Continue searching
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
return null;
|
|
640
|
+
}
|
|
641
|
+
/**
|
|
642
|
+
* Parses central directory entries from a central directory buffer
|
|
643
|
+
* @param cdBuffer - Buffer containing central directory data
|
|
644
|
+
* @returns Array of ZipEntry objects
|
|
645
|
+
*/
|
|
646
|
+
function parseCentralDirectoryEntries(cdBuffer) {
|
|
647
|
+
const entries = [];
|
|
648
|
+
try {
|
|
649
|
+
// Central Directory File Header signature
|
|
650
|
+
const CD_SIGNATURE = 0x02014b50;
|
|
651
|
+
let offset = 0;
|
|
652
|
+
while (offset < cdBuffer.length - 4) {
|
|
653
|
+
// Look for central directory signature
|
|
654
|
+
if (cdBuffer.readUInt32LE(offset) === CD_SIGNATURE) {
|
|
655
|
+
try {
|
|
656
|
+
// Parse central directory entry
|
|
657
|
+
const entry = parseCentralDirectoryEntry(cdBuffer, offset);
|
|
658
|
+
if (entry) {
|
|
659
|
+
entries.push(entry);
|
|
660
|
+
}
|
|
661
|
+
// Move to next entry
|
|
662
|
+
// Central directory entry size varies, so we need to calculate it
|
|
663
|
+
const filenameLength = cdBuffer.readUInt16LE(offset + 28);
|
|
664
|
+
const extraFieldLength = cdBuffer.readUInt16LE(offset + 30);
|
|
665
|
+
const commentLength = cdBuffer.readUInt16LE(offset + 32);
|
|
666
|
+
const entrySize = 46 + filenameLength + extraFieldLength + commentLength;
|
|
667
|
+
offset += entrySize;
|
|
668
|
+
}
|
|
669
|
+
catch (error) {
|
|
670
|
+
// If we can't parse this entry, skip it
|
|
671
|
+
offset += 4;
|
|
672
|
+
}
|
|
673
|
+
}
|
|
674
|
+
else {
|
|
675
|
+
offset += 4;
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
catch (error) {
|
|
680
|
+
(0, utils_1.logError)(`Error parsing central directory entries: ${error instanceof Error ? error.message : String(error)}`);
|
|
681
|
+
}
|
|
682
|
+
return entries;
|
|
683
|
+
}
|
|
684
|
+
/**
|
|
685
|
+
* Parses a single central directory entry
|
|
686
|
+
* @param data - Buffer containing central directory data
|
|
687
|
+
* @param offset - Offset of the entry in the buffer
|
|
688
|
+
* @returns ZipEntry object or null if parsing fails
|
|
689
|
+
*/
|
|
690
|
+
function parseCentralDirectoryEntry(data, offset) {
|
|
691
|
+
try {
|
|
692
|
+
// Central Directory File Header structure (46 bytes + variable length fields)
|
|
693
|
+
const versionMadeBy = data.readUInt16LE(offset + 4);
|
|
694
|
+
const versionNeeded = data.readUInt16LE(offset + 6);
|
|
695
|
+
const generalPurposeBitFlag = data.readUInt16LE(offset + 8);
|
|
696
|
+
const compressionMethod = data.readUInt16LE(offset + 10);
|
|
697
|
+
const lastModTime = data.readUInt16LE(offset + 12);
|
|
698
|
+
const lastModDate = data.readUInt16LE(offset + 14);
|
|
699
|
+
const crc32 = data.readUInt32LE(offset + 16);
|
|
700
|
+
const compressedSize = data.readUInt32LE(offset + 20);
|
|
701
|
+
const uncompressedSize = data.readUInt32LE(offset + 24);
|
|
702
|
+
const filenameLength = data.readUInt16LE(offset + 28);
|
|
703
|
+
const extraFieldLength = data.readUInt16LE(offset + 30);
|
|
704
|
+
const commentLength = data.readUInt16LE(offset + 32);
|
|
705
|
+
const diskNumberStart = data.readUInt16LE(offset + 34);
|
|
706
|
+
const internalFileAttributes = data.readUInt16LE(offset + 36);
|
|
707
|
+
const externalFileAttributes = data.readUInt32LE(offset + 38);
|
|
708
|
+
const localHeaderOffset = data.readUInt32LE(offset + 42);
|
|
709
|
+
// Read filename
|
|
710
|
+
const filename = data.subarray(offset + 46, offset + 46 + filenameLength).toString('utf8');
|
|
711
|
+
// Convert DOS date/time to JavaScript timestamp
|
|
712
|
+
const modifiedTime = dosDateTimeToTimestamp(lastModDate, lastModTime);
|
|
713
|
+
// Create ZipEntry object
|
|
714
|
+
const entry = new src_1.ZipEntry(filename);
|
|
715
|
+
entry.filename = filename;
|
|
716
|
+
entry.uncompressedSize = uncompressedSize;
|
|
717
|
+
entry.compressedSize = compressedSize;
|
|
718
|
+
entry.cmpMethod = compressionMethod;
|
|
719
|
+
entry.crc = crc32;
|
|
720
|
+
entry.lastModTimeDate = modifiedTime;
|
|
721
|
+
entry.verMadeBy = versionMadeBy;
|
|
722
|
+
entry.verExtract = versionNeeded;
|
|
723
|
+
entry.bitFlags = generalPurposeBitFlag;
|
|
724
|
+
entry.intFileAttr = internalFileAttributes;
|
|
725
|
+
entry.extFileAttr = externalFileAttributes;
|
|
726
|
+
entry.localHdrOffset = localHeaderOffset;
|
|
727
|
+
entry.volNumber = diskNumberStart;
|
|
728
|
+
return entry;
|
|
729
|
+
}
|
|
730
|
+
catch (error) {
|
|
731
|
+
(0, utils_1.logError)(`Error parsing central directory entry at offset ${offset}: ${error instanceof Error ? error.message : String(error)}`);
|
|
732
|
+
return null;
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
/**
|
|
736
|
+
* Converts DOS date/time to JavaScript timestamp
|
|
737
|
+
* @param dosDate - DOS date value
|
|
738
|
+
* @param dosTime - DOS time value
|
|
739
|
+
* @returns JavaScript timestamp
|
|
740
|
+
*/
|
|
741
|
+
function dosDateTimeToTimestamp(dosDate, dosTime) {
|
|
742
|
+
// Extract date components
|
|
743
|
+
const year = ((dosDate >> 9) & 0x7F) + 1980;
|
|
744
|
+
const month = (dosDate >> 5) & 0x0F;
|
|
745
|
+
const day = dosDate & 0x1F;
|
|
746
|
+
// Extract time components
|
|
747
|
+
const hour = (dosTime >> 11) & 0x1F;
|
|
748
|
+
const minute = (dosTime >> 5) & 0x3F;
|
|
749
|
+
const second = (dosTime & 0x1F) * 2;
|
|
750
|
+
// Create Date object
|
|
751
|
+
const date = new Date(year, month - 1, day, hour, minute, second);
|
|
752
|
+
return date.getTime();
|
|
753
|
+
}
|
|
754
|
+
/**
|
|
755
|
+
* Parses a standard End of Central Directory record
|
|
756
|
+
* @param fd - File descriptor
|
|
757
|
+
* @param eocdOffset - Offset of the EOCD record
|
|
758
|
+
* @returns Parsed EOCD information
|
|
759
|
+
*/
|
|
760
|
+
function parseEOCDRecord(fd, eocdOffset) {
|
|
761
|
+
try {
|
|
762
|
+
// Read EOCD record (minimum 22 bytes)
|
|
763
|
+
const eocdBuffer = Buffer.alloc(22);
|
|
764
|
+
fs.readSync(fd, eocdBuffer, 0, 22, eocdOffset);
|
|
765
|
+
// Verify signature
|
|
766
|
+
if (eocdBuffer.readUInt32LE(0) !== 0x06054b50) {
|
|
767
|
+
return null;
|
|
768
|
+
}
|
|
769
|
+
const entriesOnDisk = eocdBuffer.readUInt16LE(8);
|
|
770
|
+
const totalEntries = eocdBuffer.readUInt16LE(10);
|
|
771
|
+
const cdSize = eocdBuffer.readUInt32LE(12);
|
|
772
|
+
const cdOffset = eocdBuffer.readUInt32LE(16);
|
|
773
|
+
const commentLength = eocdBuffer.readUInt16LE(20);
|
|
774
|
+
// Check for ZIP64 markers
|
|
775
|
+
const isZip64Marker = (totalEntries === 0xFFFF || cdSize === 0xFFFFFFFF || cdOffset === 0xFFFFFFFF);
|
|
776
|
+
return {
|
|
777
|
+
entriesOnDisk,
|
|
778
|
+
totalEntries,
|
|
779
|
+
cdSize,
|
|
780
|
+
cdOffset,
|
|
781
|
+
commentLength,
|
|
782
|
+
isZip64Marker
|
|
783
|
+
};
|
|
784
|
+
}
|
|
785
|
+
catch (error) {
|
|
786
|
+
(0, utils_1.logError)(`Failed to parse EOCD record: ${error instanceof Error ? error.message : String(error)}`);
|
|
787
|
+
return null;
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
/**
|
|
791
|
+
* Parses ZIP64 End of Central Directory record
|
|
792
|
+
* @param fd - File descriptor
|
|
793
|
+
* @param eocdOffset - Offset of the standard EOCD record
|
|
794
|
+
* @returns Parsed ZIP64 EOCD information
|
|
795
|
+
*/
|
|
796
|
+
function parseZip64EOCD(fd, eocdOffset) {
|
|
797
|
+
try {
|
|
798
|
+
// ZIP64 EOCD locator is 20 bytes before standard EOCD
|
|
799
|
+
const zip64LocatorOffset = eocdOffset - 20;
|
|
800
|
+
const locatorBuffer = Buffer.alloc(20);
|
|
801
|
+
fs.readSync(fd, locatorBuffer, 0, 20, zip64LocatorOffset);
|
|
802
|
+
// Verify ZIP64 EOCD locator signature
|
|
803
|
+
if (locatorBuffer.readUInt32LE(0) !== 0x07064b50) {
|
|
804
|
+
return null;
|
|
805
|
+
}
|
|
806
|
+
// Get ZIP64 EOCD record offset
|
|
807
|
+
const zip64EOCDOffset = Number(locatorBuffer.readBigUInt64LE(8));
|
|
808
|
+
// Read ZIP64 EOCD record (minimum 56 bytes)
|
|
809
|
+
const zip64EOCDBuffer = Buffer.alloc(56);
|
|
810
|
+
fs.readSync(fd, zip64EOCDBuffer, 0, 56, zip64EOCDOffset);
|
|
811
|
+
// Verify ZIP64 EOCD signature
|
|
812
|
+
if (zip64EOCDBuffer.readUInt32LE(0) !== 0x06064b50) {
|
|
813
|
+
return null;
|
|
814
|
+
}
|
|
815
|
+
const totalEntries = zip64EOCDBuffer.readBigUInt64LE(24);
|
|
816
|
+
const cdSize = zip64EOCDBuffer.readBigUInt64LE(40);
|
|
817
|
+
const cdOffset = zip64EOCDBuffer.readBigUInt64LE(48);
|
|
818
|
+
return {
|
|
819
|
+
totalEntries,
|
|
820
|
+
cdSize,
|
|
821
|
+
cdOffset
|
|
822
|
+
};
|
|
823
|
+
}
|
|
824
|
+
catch (error) {
|
|
825
|
+
(0, utils_1.logError)(`Failed to parse ZIP64 EOCD record: ${error instanceof Error ? error.message : String(error)}`);
|
|
826
|
+
return null;
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
/**
|
|
830
|
+
* Loads an existing ZIP archive using optimized large file handling
|
|
831
|
+
* Uses backwards search to locate EOCD and loads only the central directory
|
|
832
|
+
* @param archiveName - Path to the ZIP archive
|
|
833
|
+
* @returns Zipkit instance or null if loading fails
|
|
834
|
+
*/
|
|
835
|
+
function loadExistingArchiveLarge(archiveName) {
|
|
836
|
+
if (!fs.existsSync(archiveName)) {
|
|
837
|
+
return null;
|
|
838
|
+
}
|
|
839
|
+
let fd = null;
|
|
840
|
+
try {
|
|
841
|
+
// Open file for random access
|
|
842
|
+
fd = fs.openSync(archiveName, 'r');
|
|
843
|
+
const stats = fs.fstatSync(fd);
|
|
844
|
+
const fileSize = stats.size;
|
|
845
|
+
if (fileSize < 22) {
|
|
846
|
+
// File too small to be a valid ZIP
|
|
847
|
+
return null;
|
|
848
|
+
}
|
|
849
|
+
// Locate EOCD signature
|
|
850
|
+
const eocdLocation = locateEOCD(fd, fileSize);
|
|
851
|
+
if (!eocdLocation) {
|
|
852
|
+
(0, utils_1.logError)(`EOCD signature not found in ${archiveName}`);
|
|
853
|
+
return null;
|
|
854
|
+
}
|
|
855
|
+
// Parse EOCD record
|
|
856
|
+
const eocdInfo = parseEOCDRecord(fd, eocdLocation.offset);
|
|
857
|
+
if (!eocdInfo) {
|
|
858
|
+
(0, utils_1.logError)(`Failed to parse EOCD record in ${archiveName}`);
|
|
859
|
+
return null;
|
|
860
|
+
}
|
|
861
|
+
let cdSize;
|
|
862
|
+
let cdOffset;
|
|
863
|
+
let totalEntries;
|
|
864
|
+
if (eocdInfo.isZip64Marker && eocdLocation.isZip64) {
|
|
865
|
+
// Use ZIP64 values
|
|
866
|
+
const zip64Info = parseZip64EOCD(fd, eocdLocation.offset);
|
|
867
|
+
if (!zip64Info) {
|
|
868
|
+
(0, utils_1.logError)(`Failed to parse ZIP64 EOCD record in ${archiveName}`);
|
|
869
|
+
return null;
|
|
870
|
+
}
|
|
871
|
+
cdSize = Number(zip64Info.cdSize);
|
|
872
|
+
cdOffset = Number(zip64Info.cdOffset);
|
|
873
|
+
totalEntries = Number(zip64Info.totalEntries);
|
|
874
|
+
}
|
|
875
|
+
else {
|
|
876
|
+
// Use standard values
|
|
877
|
+
cdSize = eocdInfo.cdSize;
|
|
878
|
+
cdOffset = eocdInfo.cdOffset;
|
|
879
|
+
totalEntries = eocdInfo.totalEntries;
|
|
880
|
+
}
|
|
881
|
+
// Validate central directory bounds
|
|
882
|
+
if (cdOffset < 0 || cdSize <= 0 || cdOffset + cdSize > fileSize) {
|
|
883
|
+
(0, utils_1.logError)(`Invalid central directory bounds in ${archiveName}: offset=${cdOffset}, size=${cdSize}, fileSize=${fileSize}`);
|
|
884
|
+
return null;
|
|
885
|
+
}
|
|
886
|
+
// Read central directory
|
|
887
|
+
const cdBuffer = Buffer.alloc(cdSize);
|
|
888
|
+
fs.readSync(fd, cdBuffer, 0, cdSize, cdOffset);
|
|
889
|
+
// Verify central directory signature
|
|
890
|
+
if (cdBuffer.readUInt32LE(0) !== 0x02014b50) {
|
|
891
|
+
(0, utils_1.logError)(`Invalid central directory signature in ${archiveName}`);
|
|
892
|
+
return null;
|
|
893
|
+
}
|
|
894
|
+
// Create a complete ZIP file buffer by combining CD with EOCD
|
|
895
|
+
// Read the EOCD and comment
|
|
896
|
+
const eocdAndCommentSize = fileSize - eocdLocation.offset;
|
|
897
|
+
const eocdAndCommentBuffer = Buffer.alloc(eocdAndCommentSize);
|
|
898
|
+
fs.readSync(fd, eocdAndCommentBuffer, 0, eocdAndCommentSize, eocdLocation.offset);
|
|
899
|
+
// Create complete ZIP buffer: CD + EOCD + comment
|
|
900
|
+
const completeZipBuffer = Buffer.concat([cdBuffer, eocdAndCommentBuffer]);
|
|
901
|
+
const zip = new src_1.default();
|
|
902
|
+
zip.loadZip(completeZipBuffer);
|
|
903
|
+
return zip;
|
|
904
|
+
}
|
|
905
|
+
catch (error) {
|
|
906
|
+
(0, utils_1.logError)(`Failed to load large ZIP archive ${archiveName}: ${error instanceof Error ? error.message : String(error)}`);
|
|
907
|
+
return null;
|
|
908
|
+
}
|
|
909
|
+
finally {
|
|
910
|
+
if (fd !== null) {
|
|
911
|
+
try {
|
|
912
|
+
fs.closeSync(fd);
|
|
913
|
+
}
|
|
914
|
+
catch (error) {
|
|
915
|
+
// Ignore close errors
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
}
|
|
920
|
+
//# sourceMappingURL=file-operations.js.map
|