mohyung 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,804 @@
1
+ // src/core/store.ts
2
+ import Database from "better-sqlite3";
3
+ var SCHEMA_VERSION = "1";
4
+ var CREATE_TABLES_SQL = `
5
+ -- Metadata
6
+ CREATE TABLE IF NOT EXISTS metadata (
7
+ key TEXT PRIMARY KEY,
8
+ value TEXT
9
+ );
10
+
11
+ -- Package information
12
+ CREATE TABLE IF NOT EXISTS packages (
13
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
14
+ name TEXT NOT NULL,
15
+ version TEXT NOT NULL,
16
+ path TEXT NOT NULL,
17
+ UNIQUE(name, version, path)
18
+ );
19
+
20
+ -- Content-addressable blob storage
21
+ CREATE TABLE IF NOT EXISTS blobs (
22
+ hash TEXT PRIMARY KEY,
23
+ content BLOB NOT NULL,
24
+ original_size INTEGER,
25
+ compressed_size INTEGER
26
+ );
27
+
28
+ -- Files per package
29
+ CREATE TABLE IF NOT EXISTS files (
30
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
31
+ package_id INTEGER REFERENCES packages(id),
32
+ relative_path TEXT NOT NULL,
33
+ blob_hash TEXT REFERENCES blobs(hash),
34
+ mode INTEGER,
35
+ mtime INTEGER,
36
+ UNIQUE(package_id, relative_path)
37
+ );
38
+
39
+ -- Indexes
40
+ CREATE INDEX IF NOT EXISTS idx_files_package ON files(package_id);
41
+ CREATE INDEX IF NOT EXISTS idx_files_blob ON files(blob_hash);
42
+ `;
43
+ var Store = class {
44
+ /** better-sqlite3 database instance */
45
+ db;
46
+ /**
47
+ * Create Store instance
48
+ * @param dbPath - SQLite database file path
49
+ */
50
+ constructor(dbPath) {
51
+ this.db = new Database(dbPath);
52
+ this.db.pragma("journal_mode = WAL");
53
+ this.db.pragma("synchronous = NORMAL");
54
+ this.initSchema();
55
+ }
56
+ /** Initialize schema (create tables and set version) */
57
+ initSchema() {
58
+ this.db.exec(CREATE_TABLES_SQL);
59
+ this.setMetadata("schema_version", SCHEMA_VERSION);
60
+ }
61
+ /**
62
+ * Save metadata
63
+ * @param key - Metadata key
64
+ * @param value - Value to save
65
+ */
66
+ setMetadata(key, value) {
67
+ const stmt = this.db.prepare(`
68
+ INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)
69
+ `);
70
+ stmt.run(key, value);
71
+ }
72
+ /**
73
+ * Get metadata
74
+ * @param key - Metadata key
75
+ * @returns Stored value or null
76
+ */
77
+ getMetadata(key) {
78
+ const stmt = this.db.prepare(`SELECT value FROM metadata WHERE key = ?`);
79
+ const row = stmt.get(key);
80
+ return row?.value ?? null;
81
+ }
82
+ /**
83
+ * Insert package (returns ID only if already exists)
84
+ * @param pkg - Package information
85
+ * @returns Inserted package ID
86
+ */
87
+ insertPackage(pkg) {
88
+ const stmt = this.db.prepare(`
89
+ INSERT INTO packages (name, version, path) VALUES (?, ?, ?)
90
+ ON CONFLICT(name, version, path) DO UPDATE SET name = name
91
+ RETURNING id
92
+ `);
93
+ const result = stmt.get(pkg.name, pkg.version, pkg.path);
94
+ return result.id;
95
+ }
96
+ /**
97
+ * Get package by ID
98
+ * @param id - Package ID
99
+ * @returns Package info or null
100
+ */
101
+ getPackageById(id) {
102
+ const stmt = this.db.prepare(`SELECT * FROM packages WHERE id = ?`);
103
+ const row = stmt.get(id);
104
+ if (!row) return null;
105
+ return {
106
+ id: row.id,
107
+ name: row.name,
108
+ version: row.version,
109
+ path: row.path
110
+ };
111
+ }
112
+ /**
113
+ * Get all packages
114
+ * @returns All package list
115
+ */
116
+ getAllPackages() {
117
+ const stmt = this.db.prepare(`SELECT * FROM packages`);
118
+ const rows = stmt.all();
119
+ return rows.map((row) => ({
120
+ id: row.id,
121
+ name: row.name,
122
+ version: row.version,
123
+ path: row.path
124
+ }));
125
+ }
126
+ /**
127
+ * Check if blob exists
128
+ * @param hash - Blob hash
129
+ * @returns Whether blob exists
130
+ */
131
+ hasBlob(hash) {
132
+ const stmt = this.db.prepare(`SELECT 1 FROM blobs WHERE hash = ?`);
133
+ return stmt.get(hash) !== void 0;
134
+ }
135
+ /**
136
+ * Insert blob (ignore duplicates)
137
+ * @param blob - Blob information
138
+ */
139
+ insertBlob(blob) {
140
+ const stmt = this.db.prepare(`
141
+ INSERT OR IGNORE INTO blobs (hash, content, original_size, compressed_size)
142
+ VALUES (?, ?, ?, ?)
143
+ `);
144
+ stmt.run(blob.hash, blob.content, blob.originalSize, blob.compressedSize);
145
+ }
146
+ /**
147
+ * Get blob content by hash
148
+ * @param hash - Blob hash
149
+ * @returns Compressed blob content or null
150
+ */
151
+ getBlob(hash) {
152
+ const stmt = this.db.prepare(`SELECT content FROM blobs WHERE hash = ?`);
153
+ const row = stmt.get(hash);
154
+ return row?.content ?? null;
155
+ }
156
+ /**
157
+ * Get blob storage statistics
158
+ * @returns Blob count and size statistics
159
+ */
160
+ getBlobStats() {
161
+ const stmt = this.db.prepare(`
162
+ SELECT
163
+ COUNT(*) as count,
164
+ COALESCE(SUM(original_size), 0) as original,
165
+ COALESCE(SUM(compressed_size), 0) as compressed
166
+ FROM blobs
167
+ `);
168
+ const row = stmt.get();
169
+ return {
170
+ totalBlobs: row.count,
171
+ totalOriginalSize: row.original,
172
+ totalCompressedSize: row.compressed
173
+ };
174
+ }
175
+ /**
176
+ * Insert file record (update on conflict)
177
+ * @param file - File record information
178
+ */
179
+ insertFile(file) {
180
+ const stmt = this.db.prepare(`
181
+ INSERT INTO files (package_id, relative_path, blob_hash, mode, mtime)
182
+ VALUES (?, ?, ?, ?, ?)
183
+ ON CONFLICT(package_id, relative_path) DO UPDATE SET
184
+ blob_hash = excluded.blob_hash,
185
+ mode = excluded.mode,
186
+ mtime = excluded.mtime
187
+ `);
188
+ stmt.run(
189
+ file.packageId,
190
+ file.relativePath,
191
+ file.blobHash,
192
+ file.mode,
193
+ file.mtime
194
+ );
195
+ }
196
+ /**
197
+ * Get files by package ID
198
+ * @param packageId - Package ID
199
+ * @returns File records for the package
200
+ */
201
+ getFilesByPackage(packageId) {
202
+ const stmt = this.db.prepare(`
203
+ SELECT id, package_id, relative_path, blob_hash, mode, mtime
204
+ FROM files WHERE package_id = ?
205
+ `);
206
+ const rows = stmt.all(packageId);
207
+ return rows.map((row) => ({
208
+ id: row.id,
209
+ packageId: row.package_id,
210
+ relativePath: row.relative_path,
211
+ blobHash: row.blob_hash,
212
+ mode: row.mode,
213
+ mtime: row.mtime
214
+ }));
215
+ }
216
+ /**
217
+ * Get all files (with package path)
218
+ * @returns All file records
219
+ */
220
+ getAllFiles() {
221
+ const stmt = this.db.prepare(`
222
+ SELECT f.id, f.package_id, f.relative_path, f.blob_hash, f.mode, f.mtime, p.path as package_path
223
+ FROM files f
224
+ JOIN packages p ON f.package_id = p.id
225
+ `);
226
+ const rows = stmt.all();
227
+ return rows.map((row) => ({
228
+ id: row.id,
229
+ packageId: row.package_id,
230
+ relativePath: row.relative_path,
231
+ blobHash: row.blob_hash,
232
+ mode: row.mode,
233
+ mtime: row.mtime,
234
+ packagePath: row.package_path
235
+ }));
236
+ }
237
+ /**
238
+ * Get total file count
239
+ * @returns Total number of files
240
+ */
241
+ getTotalFileCount() {
242
+ const stmt = this.db.prepare(`SELECT COUNT(*) as count FROM files`);
243
+ const row = stmt.get();
244
+ return row.count;
245
+ }
246
+ /**
247
+ * Execute work within transaction
248
+ * @param fn - Function to execute within transaction
249
+ * @returns Function execution result
250
+ */
251
+ transaction(fn) {
252
+ return this.db.transaction(fn)();
253
+ }
254
+ /**
255
+ * Get prepared statement for bulk file insertion
256
+ * @returns Prepared statement for file insertion
257
+ */
258
+ prepareInsertFile() {
259
+ return this.db.prepare(`
260
+ INSERT INTO files (package_id, relative_path, blob_hash, mode, mtime)
261
+ VALUES (?, ?, ?, ?, ?)
262
+ ON CONFLICT(package_id, relative_path) DO UPDATE SET
263
+ blob_hash = excluded.blob_hash,
264
+ mode = excluded.mode,
265
+ mtime = excluded.mtime
266
+ `);
267
+ }
268
+ /**
269
+ * Get prepared statement for bulk blob insertion
270
+ * @returns Prepared statement for blob insertion
271
+ */
272
+ prepareInsertBlob() {
273
+ return this.db.prepare(`
274
+ INSERT OR IGNORE INTO blobs (hash, content, original_size, compressed_size)
275
+ VALUES (?, ?, ?, ?)
276
+ `);
277
+ }
278
+ /** Close database connection */
279
+ close() {
280
+ this.db.close();
281
+ }
282
+ };
283
+
284
+ // src/core/scanner.ts
285
+ import { readdir, stat, readFile } from "fs/promises";
286
+ import { join, relative } from "path";
287
+ import { existsSync } from "fs";
288
+ async function parsePackageJson(pkgJsonPath) {
289
+ try {
290
+ const content = await readFile(pkgJsonPath, "utf8");
291
+ const pkg = JSON.parse(content);
292
+ return {
293
+ name: pkg.name || "unknown",
294
+ version: pkg.version || "0.0.0"
295
+ };
296
+ } catch {
297
+ return null;
298
+ }
299
+ }
300
+ function isPnpmStructure(nodeModulesPath) {
301
+ return existsSync(join(nodeModulesPath, ".pnpm"));
302
+ }
303
+ async function* walkDir(dir, baseDir) {
304
+ const entries = await readdir(dir, { withFileTypes: true });
305
+ for (const entry of entries) {
306
+ const fullPath = join(dir, entry.name);
307
+ if (entry.isDirectory()) {
308
+ yield* walkDir(fullPath, baseDir);
309
+ } else if (entry.isFile()) {
310
+ const stats = await stat(fullPath);
311
+ yield {
312
+ relativePath: relative(baseDir, fullPath),
313
+ absolutePath: fullPath,
314
+ mode: stats.mode,
315
+ size: stats.size,
316
+ mtime: stats.mtimeMs
317
+ };
318
+ }
319
+ }
320
+ }
321
+ async function* findPackageDirs(nodeModulesPath) {
322
+ const entries = await readdir(nodeModulesPath, { withFileTypes: true });
323
+ for (const entry of entries) {
324
+ if (!entry.isDirectory()) continue;
325
+ if (entry.name === ".bin" || entry.name === ".cache" || entry.name === ".pnpm")
326
+ continue;
327
+ const fullPath = join(nodeModulesPath, entry.name);
328
+ if (entry.name.startsWith("@")) {
329
+ const scopedEntries = await readdir(fullPath, { withFileTypes: true });
330
+ for (const scopedEntry of scopedEntries) {
331
+ if (!scopedEntry.isDirectory()) continue;
332
+ yield {
333
+ path: join(fullPath, scopedEntry.name),
334
+ relativePath: join(entry.name, scopedEntry.name)
335
+ };
336
+ }
337
+ } else {
338
+ yield {
339
+ path: fullPath,
340
+ relativePath: entry.name
341
+ };
342
+ }
343
+ }
344
+ }
345
+ async function* findPnpmPackageDirs(nodeModulesPath) {
346
+ const pnpmPath = join(nodeModulesPath, ".pnpm");
347
+ const entries = await readdir(pnpmPath, { withFileTypes: true });
348
+ for (const entry of entries) {
349
+ if (!entry.isDirectory()) continue;
350
+ if (entry.name === "node_modules" || entry.name.startsWith(".")) continue;
351
+ const fullPath = join(pnpmPath, entry.name);
352
+ const innerNodeModules = join(fullPath, "node_modules");
353
+ if (!existsSync(innerNodeModules)) continue;
354
+ const innerEntries = await readdir(innerNodeModules, {
355
+ withFileTypes: true
356
+ });
357
+ for (const innerEntry of innerEntries) {
358
+ if (!innerEntry.isDirectory()) continue;
359
+ if (innerEntry.name === ".bin") continue;
360
+ const pkgPath = join(innerNodeModules, innerEntry.name);
361
+ if (innerEntry.name.startsWith("@")) {
362
+ const scopedEntries = await readdir(pkgPath, {
363
+ withFileTypes: true
364
+ });
365
+ for (const scopedEntry of scopedEntries) {
366
+ if (!scopedEntry.isDirectory()) continue;
367
+ yield {
368
+ path: join(pkgPath, scopedEntry.name),
369
+ relativePath: `.pnpm/${entry.name}/node_modules/${innerEntry.name}/${scopedEntry.name}`
370
+ };
371
+ }
372
+ } else {
373
+ yield {
374
+ path: pkgPath,
375
+ relativePath: `.pnpm/${entry.name}/node_modules/${innerEntry.name}`
376
+ };
377
+ }
378
+ }
379
+ }
380
+ }
381
+ async function scanNodeModules(nodeModulesPath, onProgress) {
382
+ const usePnpm = isPnpmStructure(nodeModulesPath);
383
+ const packageDirs = [];
384
+ if (usePnpm) {
385
+ for await (const dir of findPnpmPackageDirs(nodeModulesPath)) {
386
+ packageDirs.push(dir);
387
+ }
388
+ } else {
389
+ for await (const dir of findPackageDirs(nodeModulesPath)) {
390
+ packageDirs.push(dir);
391
+ }
392
+ }
393
+ const scanPackages = async () => {
394
+ const packages = [];
395
+ let totalFiles = 0;
396
+ let totalSize = 0;
397
+ let packageCount = 0;
398
+ for (const { path: pkgPath, relativePath } of packageDirs) {
399
+ packageCount++;
400
+ onProgress?.(packageCount, packageDirs.length, relativePath);
401
+ const pkgJsonPath = join(pkgPath, "package.json");
402
+ const pkgInfo = await parsePackageJson(pkgJsonPath);
403
+ if (!pkgInfo) continue;
404
+ const files = [];
405
+ for await (const file of walkDir(pkgPath, pkgPath)) {
406
+ files.push(file);
407
+ totalFiles++;
408
+ totalSize += file.size;
409
+ }
410
+ packages.push({
411
+ name: pkgInfo.name,
412
+ version: pkgInfo.version,
413
+ path: relativePath,
414
+ files
415
+ });
416
+ }
417
+ return { packages, totalFiles, totalSize };
418
+ };
419
+ return scanPackages();
420
+ }
421
+ async function countFiles(nodeModulesPath) {
422
+ const countDir = async (dir) => {
423
+ const entries = await readdir(dir, { withFileTypes: true });
424
+ let count = 0;
425
+ for (const entry of entries) {
426
+ const fullPath = join(dir, entry.name);
427
+ if (entry.isDirectory()) {
428
+ count += await countDir(fullPath);
429
+ } else if (entry.isFile()) {
430
+ count++;
431
+ }
432
+ }
433
+ return count;
434
+ };
435
+ return countDir(nodeModulesPath);
436
+ }
437
+
438
+ // src/core/extractor.ts
439
+ import { mkdir, writeFile, chmod } from "fs/promises";
440
+ import { dirname, join as join2 } from "path";
441
+
442
+ // src/utils/compression.ts
443
+ import { gzipSync, gunzipSync } from "zlib";
444
+ function compress(data, level = 6) {
445
+ return gzipSync(data, { level });
446
+ }
447
+ function decompress(data) {
448
+ return gunzipSync(data);
449
+ }
450
+
451
+ // src/utils/logger.ts
452
+ import { consola } from "consola";
453
+ var logger = {
454
+ pack: consola.withTag("pack"),
455
+ unpack: consola.withTag("unpack"),
456
+ status: consola.withTag("status"),
457
+ scan: consola.withTag("scan")
458
+ };
459
+ function createLogger(tag) {
460
+ return consola.withTag(tag);
461
+ }
462
+
463
+ // src/core/extractor.ts
464
+ var log = logger.unpack;
465
+ async function extractFiles(store, outputPath, onProgress) {
466
+ const files = store.getAllFiles();
467
+ const totalFiles = files.length;
468
+ const blobCache = /* @__PURE__ */ new Map();
469
+ const getContent = (blobHash) => {
470
+ if (blobCache.has(blobHash)) {
471
+ return blobCache.get(blobHash);
472
+ }
473
+ const compressed = store.getBlob(blobHash);
474
+ if (!compressed) return null;
475
+ const content = decompress(compressed);
476
+ if (content.length < 100 * 1024) {
477
+ blobCache.set(blobHash, content);
478
+ }
479
+ return content;
480
+ };
481
+ const iterateFiles = async () => {
482
+ let processedFiles = 0;
483
+ let totalSize2 = 0;
484
+ for (const file of files) {
485
+ processedFiles++;
486
+ onProgress?.(processedFiles, totalFiles, file.relativePath.slice(0, 40));
487
+ const fullPath = join2(outputPath, file.packagePath, file.relativePath);
488
+ await mkdir(dirname(fullPath), { recursive: true });
489
+ const content = getContent(file.blobHash);
490
+ if (!content) {
491
+ log.warn(`Blob not found: ${file.relativePath}`);
492
+ continue;
493
+ }
494
+ await writeFile(fullPath, content);
495
+ totalSize2 += content.length;
496
+ if (file.mode) {
497
+ try {
498
+ await chmod(fullPath, file.mode & 511);
499
+ } catch {
500
+ }
501
+ }
502
+ }
503
+ return { totalSize: totalSize2 };
504
+ };
505
+ const { totalSize } = await iterateFiles();
506
+ return {
507
+ totalFiles,
508
+ totalSize
509
+ };
510
+ }
511
+
512
+ // src/core/hasher.ts
513
+ import { createHash } from "crypto";
514
+ function hashBuffer(data) {
515
+ return createHash("sha256").update(data).digest("hex");
516
+ }
517
+ function hashString(data) {
518
+ return createHash("sha256").update(data, "utf8").digest("hex");
519
+ }
520
+
521
+ // src/utils/progress.ts
522
+ function createProgressBar(total) {
523
+ const startTime = Date.now();
524
+ return (current, actualTotal, message) => {
525
+ const t = actualTotal || total;
526
+ const ratio = t > 0 ? Math.min(current / t, 1) : 0;
527
+ const percent = Math.round(ratio * 100);
528
+ const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
529
+ const barWidth = 30;
530
+ const filled = Math.max(0, Math.round(ratio * barWidth));
531
+ const empty = Math.max(0, barWidth - filled);
532
+ const bar = "\u2588".repeat(filled) + "\u2591".repeat(empty);
533
+ const line = `\r[${bar}] ${percent}% (${current}/${t}) ${elapsed}s${message ? ` - ${message}` : ""}`;
534
+ process.stdout.write(line);
535
+ if (current >= t) {
536
+ process.stdout.write("\n");
537
+ }
538
+ };
539
+ }
540
+ function formatBytes(bytes) {
541
+ const units = ["B", "KB", "MB", "GB"];
542
+ let size = bytes;
543
+ let unitIndex = 0;
544
+ while (size >= 1024 && unitIndex < units.length - 1) {
545
+ size /= 1024;
546
+ unitIndex++;
547
+ }
548
+ return `${size.toFixed(1)} ${units[unitIndex]}`;
549
+ }
550
+
551
+ // src/commands/pack.ts
552
+ import { readFile as readFile2, stat as stat2, rm } from "fs/promises";
553
+ import { existsSync as existsSync2, readFileSync } from "fs";
554
+ import { join as join3, resolve } from "path";
555
+ var log2 = logger.pack;
556
+ async function pack(options) {
557
+ const { source, output, compressionLevel, includeLockfile } = options;
558
+ const nodeModulesPath = resolve(source);
559
+ const dbPath = resolve(output);
560
+ if (!existsSync2(nodeModulesPath)) {
561
+ throw new Error(`node_modules not found: ${nodeModulesPath}`);
562
+ }
563
+ log2.start(`Scanning ${nodeModulesPath}`);
564
+ const scanProgress = createProgressBar(100);
565
+ const scanResult = await scanNodeModules(
566
+ nodeModulesPath,
567
+ (current, total, msg) => {
568
+ scanProgress(current, total, msg);
569
+ }
570
+ );
571
+ log2.success(
572
+ `Found ${scanResult.packages.length} packages, ${scanResult.totalFiles} files (${formatBytes(scanResult.totalSize)})`
573
+ );
574
+ if (existsSync2(dbPath)) {
575
+ await rm(dbPath);
576
+ if (existsSync2(dbPath + "-wal")) await rm(dbPath + "-wal");
577
+ if (existsSync2(dbPath + "-shm")) await rm(dbPath + "-shm");
578
+ }
579
+ const store = new Store(dbPath);
580
+ store.setMetadata("created_at", (/* @__PURE__ */ new Date()).toISOString());
581
+ store.setMetadata("node_version", process.version);
582
+ store.setMetadata("source_path", nodeModulesPath);
583
+ if (includeLockfile) {
584
+ const lockfilePath = join3(nodeModulesPath, "..", "package-lock.json");
585
+ if (existsSync2(lockfilePath)) {
586
+ const lockfileContent = await readFile2(lockfilePath, "utf8");
587
+ store.setMetadata("lockfile_hash", hashString(lockfileContent));
588
+ }
589
+ }
590
+ log2.start("Packing files...");
591
+ const packProgress = createProgressBar(scanResult.totalFiles);
592
+ const insertBlob = store.prepareInsertBlob();
593
+ const insertFile = store.prepareInsertFile();
594
+ const packFiles = () => {
595
+ let processedFiles = 0;
596
+ let deduplicatedCount2 = 0;
597
+ for (const pkg of scanResult.packages) {
598
+ const packageId = store.insertPackage({
599
+ name: pkg.name,
600
+ version: pkg.version,
601
+ path: pkg.path
602
+ });
603
+ for (const file of pkg.files) {
604
+ processedFiles++;
605
+ packProgress(
606
+ processedFiles,
607
+ scanResult.totalFiles,
608
+ file.relativePath.slice(0, 40)
609
+ );
610
+ const content = readFileSync(file.absolutePath);
611
+ const hash = hashBuffer(content);
612
+ if (!store.hasBlob(hash)) {
613
+ const compressed = compress(content, compressionLevel);
614
+ insertBlob.run(hash, compressed, content.length, compressed.length);
615
+ } else {
616
+ deduplicatedCount2++;
617
+ }
618
+ insertFile.run(
619
+ packageId,
620
+ file.relativePath,
621
+ hash,
622
+ file.mode,
623
+ file.mtime
624
+ );
625
+ }
626
+ }
627
+ return { deduplicatedCount: deduplicatedCount2 };
628
+ };
629
+ const { deduplicatedCount } = store.transaction(packFiles);
630
+ store.close();
631
+ const dbStats = await stat2(dbPath);
632
+ const compressionRatio = ((1 - dbStats.size / scanResult.totalSize) * 100).toFixed(1);
633
+ log2.box({
634
+ title: "Pack Complete",
635
+ message: [
636
+ `Output: ${dbPath}`,
637
+ `Original: ${formatBytes(scanResult.totalSize)}`,
638
+ `DB size: ${formatBytes(dbStats.size)}`,
639
+ `Compression: ${compressionRatio}%`,
640
+ `Deduplicated: ${deduplicatedCount}`
641
+ ].join("\n"),
642
+ style: {
643
+ borderColor: "green"
644
+ }
645
+ });
646
+ }
647
+
648
+ // src/commands/unpack.ts
649
+ import { existsSync as existsSync3 } from "fs";
650
+ import { rm as rm2 } from "fs/promises";
651
+ import { resolve as resolve2 } from "path";
652
+ var log3 = logger.unpack;
653
+ async function unpack(options) {
654
+ const { input, output, force } = options;
655
+ const dbPath = resolve2(input);
656
+ const outputPath = resolve2(output);
657
+ if (!existsSync3(dbPath)) {
658
+ throw new Error(`Database not found: ${dbPath}`);
659
+ }
660
+ if (existsSync3(outputPath)) {
661
+ if (!force) {
662
+ throw new Error(
663
+ `Output directory already exists: ${outputPath}. Use --force to overwrite.`
664
+ );
665
+ }
666
+ log3.warn(`Removing existing ${outputPath}...`);
667
+ await rm2(outputPath, { recursive: true, force: true });
668
+ }
669
+ log3.info(`Opening ${dbPath}`);
670
+ const store = new Store(dbPath);
671
+ const createdAt = store.getMetadata("created_at");
672
+ const nodeVersion = store.getMetadata("node_version");
673
+ const totalFileCount = store.getTotalFileCount();
674
+ const blobStats = store.getBlobStats();
675
+ log3.box({
676
+ title: "Database Info",
677
+ message: [
678
+ `Created: ${createdAt ?? "unknown"}`,
679
+ `Node version: ${nodeVersion ?? "unknown"}`,
680
+ `Files: ${totalFileCount}`,
681
+ `Original size: ${formatBytes(blobStats.totalOriginalSize)}`,
682
+ `Compressed size: ${formatBytes(blobStats.totalCompressedSize)}`
683
+ ].join("\n")
684
+ });
685
+ log3.start(`Extracting to ${outputPath}`);
686
+ const progress = createProgressBar(totalFileCount);
687
+ const startTime = Date.now();
688
+ const result = await extractFiles(store, outputPath, progress);
689
+ const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
690
+ store.close();
691
+ log3.box({
692
+ title: "Unpack Complete",
693
+ message: [
694
+ `Extracted: ${result.totalFiles} files (${formatBytes(
695
+ result.totalSize
696
+ )})`,
697
+ `Time: ${elapsed}s`
698
+ ].join("\n"),
699
+ style: {
700
+ borderColor: "green"
701
+ }
702
+ });
703
+ }
704
+
705
+ // src/commands/status.ts
706
+ import { existsSync as existsSync4 } from "fs";
707
+ import { readFile as readFile3 } from "fs/promises";
708
+ import { resolve as resolve3, join as join4 } from "path";
709
+ var log4 = logger.status;
710
+ async function status(options) {
711
+ const { db, nodeModules } = options;
712
+ const dbPath = resolve3(db);
713
+ const nodeModulesPath = resolve3(nodeModules);
714
+ if (!existsSync4(dbPath)) {
715
+ throw new Error(`Database not found: ${dbPath}`);
716
+ }
717
+ if (!existsSync4(nodeModulesPath)) {
718
+ log4.warn(`node_modules not found: ${nodeModulesPath}`);
719
+ log4.info('Run "mohyung unpack" to restore from database.');
720
+ return { onlyInDb: [], onlyInFs: [], modified: [], unchanged: 0 };
721
+ }
722
+ log4.start("Comparing...");
723
+ log4.info(`DB: ${dbPath}`);
724
+ log4.info(`node_modules: ${nodeModulesPath}`);
725
+ const store = new Store(dbPath);
726
+ const files = store.getAllFiles();
727
+ const result = {
728
+ onlyInDb: [],
729
+ onlyInFs: [],
730
+ modified: [],
731
+ unchanged: 0
732
+ };
733
+ const progress = createProgressBar(files.length);
734
+ const dbPaths = /* @__PURE__ */ new Set();
735
+ for (const [index, file] of files.entries()) {
736
+ const relativePath = join4(file.packagePath, file.relativePath);
737
+ const fullPath = join4(nodeModulesPath, relativePath);
738
+ dbPaths.add(relativePath);
739
+ progress(index + 1, files.length, relativePath.slice(0, 40));
740
+ if (!existsSync4(fullPath)) {
741
+ result.onlyInDb.push(relativePath);
742
+ continue;
743
+ }
744
+ try {
745
+ const fsContent = await readFile3(fullPath);
746
+ const fsHash = hashBuffer(fsContent);
747
+ if (fsHash !== file.blobHash) {
748
+ result.modified.push(relativePath);
749
+ } else {
750
+ result.unchanged++;
751
+ }
752
+ } catch {
753
+ result.modified.push(relativePath);
754
+ }
755
+ }
756
+ store.close();
757
+ const summaryLines = [
758
+ `Unchanged: ${result.unchanged}`,
759
+ `Modified: ${result.modified.length}`,
760
+ `Only in DB: ${result.onlyInDb.length}`
761
+ ];
762
+ if (result.modified.length > 0 && result.modified.length <= 10) {
763
+ summaryLines.push("", "Modified files:");
764
+ result.modified.forEach((f) => summaryLines.push(` M ${f}`));
765
+ }
766
+ if (result.onlyInDb.length > 0 && result.onlyInDb.length <= 10) {
767
+ summaryLines.push("", "Only in DB (deleted locally):");
768
+ result.onlyInDb.forEach((f) => summaryLines.push(` D ${f}`));
769
+ }
770
+ if (result.modified.length > 10 || result.onlyInDb.length > 10) {
771
+ summaryLines.push("", "(Use verbose mode for full list)");
772
+ }
773
+ const isClean = result.modified.length === 0 && result.onlyInDb.length === 0;
774
+ log4.box({
775
+ title: "Status",
776
+ message: summaryLines.join("\n"),
777
+ style: {
778
+ borderColor: isClean ? "green" : "yellow"
779
+ }
780
+ });
781
+ if (isClean) {
782
+ log4.success("All files match!");
783
+ }
784
+ return result;
785
+ }
786
+ export {
787
+ Store,
788
+ compress,
789
+ consola,
790
+ countFiles,
791
+ createLogger,
792
+ createProgressBar,
793
+ decompress,
794
+ extractFiles,
795
+ formatBytes,
796
+ hashBuffer,
797
+ hashString,
798
+ logger,
799
+ pack,
800
+ scanNodeModules,
801
+ status,
802
+ unpack
803
+ };
804
+ //# sourceMappingURL=index.js.map