mohyung 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js ADDED
@@ -0,0 +1,816 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/cli.ts
4
+ import { Command } from "commander";
5
+
6
+ // src/commands/pack.ts
7
+ import { readFile as readFile2, stat as stat2, rm } from "fs/promises";
8
+ import { existsSync as existsSync2, readFileSync } from "fs";
9
+ import { join as join2, resolve } from "path";
10
+
11
+ // src/core/store.ts
12
+ import Database from "better-sqlite3";
13
+ var SCHEMA_VERSION = "1";
14
+ var CREATE_TABLES_SQL = `
15
+ -- Metadata
16
+ CREATE TABLE IF NOT EXISTS metadata (
17
+ key TEXT PRIMARY KEY,
18
+ value TEXT
19
+ );
20
+
21
+ -- Package information
22
+ CREATE TABLE IF NOT EXISTS packages (
23
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
24
+ name TEXT NOT NULL,
25
+ version TEXT NOT NULL,
26
+ path TEXT NOT NULL,
27
+ UNIQUE(name, version, path)
28
+ );
29
+
30
+ -- Content-addressable blob storage
31
+ CREATE TABLE IF NOT EXISTS blobs (
32
+ hash TEXT PRIMARY KEY,
33
+ content BLOB NOT NULL,
34
+ original_size INTEGER,
35
+ compressed_size INTEGER
36
+ );
37
+
38
+ -- Files per package
39
+ CREATE TABLE IF NOT EXISTS files (
40
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
41
+ package_id INTEGER REFERENCES packages(id),
42
+ relative_path TEXT NOT NULL,
43
+ blob_hash TEXT REFERENCES blobs(hash),
44
+ mode INTEGER,
45
+ mtime INTEGER,
46
+ UNIQUE(package_id, relative_path)
47
+ );
48
+
49
+ -- Indexes
50
+ CREATE INDEX IF NOT EXISTS idx_files_package ON files(package_id);
51
+ CREATE INDEX IF NOT EXISTS idx_files_blob ON files(blob_hash);
52
+ `;
53
+ var Store = class {
54
+ /** better-sqlite3 database instance */
55
+ db;
56
+ /**
57
+ * Create Store instance
58
+ * @param dbPath - SQLite database file path
59
+ */
60
+ constructor(dbPath) {
61
+ this.db = new Database(dbPath);
62
+ this.db.pragma("journal_mode = WAL");
63
+ this.db.pragma("synchronous = NORMAL");
64
+ this.initSchema();
65
+ }
66
+ /** Initialize schema (create tables and set version) */
67
+ initSchema() {
68
+ this.db.exec(CREATE_TABLES_SQL);
69
+ this.setMetadata("schema_version", SCHEMA_VERSION);
70
+ }
71
+ /**
72
+ * Save metadata
73
+ * @param key - Metadata key
74
+ * @param value - Value to save
75
+ */
76
+ setMetadata(key, value) {
77
+ const stmt = this.db.prepare(`
78
+ INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)
79
+ `);
80
+ stmt.run(key, value);
81
+ }
82
+ /**
83
+ * Get metadata
84
+ * @param key - Metadata key
85
+ * @returns Stored value or null
86
+ */
87
+ getMetadata(key) {
88
+ const stmt = this.db.prepare(`SELECT value FROM metadata WHERE key = ?`);
89
+ const row = stmt.get(key);
90
+ return row?.value ?? null;
91
+ }
92
+ /**
93
+ * Insert package (returns ID only if already exists)
94
+ * @param pkg - Package information
95
+ * @returns Inserted package ID
96
+ */
97
+ insertPackage(pkg) {
98
+ const stmt = this.db.prepare(`
99
+ INSERT INTO packages (name, version, path) VALUES (?, ?, ?)
100
+ ON CONFLICT(name, version, path) DO UPDATE SET name = name
101
+ RETURNING id
102
+ `);
103
+ const result = stmt.get(pkg.name, pkg.version, pkg.path);
104
+ return result.id;
105
+ }
106
+ /**
107
+ * Get package by ID
108
+ * @param id - Package ID
109
+ * @returns Package info or null
110
+ */
111
+ getPackageById(id) {
112
+ const stmt = this.db.prepare(`SELECT * FROM packages WHERE id = ?`);
113
+ const row = stmt.get(id);
114
+ if (!row) return null;
115
+ return {
116
+ id: row.id,
117
+ name: row.name,
118
+ version: row.version,
119
+ path: row.path
120
+ };
121
+ }
122
+ /**
123
+ * Get all packages
124
+ * @returns All package list
125
+ */
126
+ getAllPackages() {
127
+ const stmt = this.db.prepare(`SELECT * FROM packages`);
128
+ const rows = stmt.all();
129
+ return rows.map((row) => ({
130
+ id: row.id,
131
+ name: row.name,
132
+ version: row.version,
133
+ path: row.path
134
+ }));
135
+ }
136
+ /**
137
+ * Check if blob exists
138
+ * @param hash - Blob hash
139
+ * @returns Whether blob exists
140
+ */
141
+ hasBlob(hash) {
142
+ const stmt = this.db.prepare(`SELECT 1 FROM blobs WHERE hash = ?`);
143
+ return stmt.get(hash) !== void 0;
144
+ }
145
+ /**
146
+ * Insert blob (ignore duplicates)
147
+ * @param blob - Blob information
148
+ */
149
+ insertBlob(blob) {
150
+ const stmt = this.db.prepare(`
151
+ INSERT OR IGNORE INTO blobs (hash, content, original_size, compressed_size)
152
+ VALUES (?, ?, ?, ?)
153
+ `);
154
+ stmt.run(blob.hash, blob.content, blob.originalSize, blob.compressedSize);
155
+ }
156
+ /**
157
+ * Get blob content by hash
158
+ * @param hash - Blob hash
159
+ * @returns Compressed blob content or null
160
+ */
161
+ getBlob(hash) {
162
+ const stmt = this.db.prepare(`SELECT content FROM blobs WHERE hash = ?`);
163
+ const row = stmt.get(hash);
164
+ return row?.content ?? null;
165
+ }
166
+ /**
167
+ * Get blob storage statistics
168
+ * @returns Blob count and size statistics
169
+ */
170
+ getBlobStats() {
171
+ const stmt = this.db.prepare(`
172
+ SELECT
173
+ COUNT(*) as count,
174
+ COALESCE(SUM(original_size), 0) as original,
175
+ COALESCE(SUM(compressed_size), 0) as compressed
176
+ FROM blobs
177
+ `);
178
+ const row = stmt.get();
179
+ return {
180
+ totalBlobs: row.count,
181
+ totalOriginalSize: row.original,
182
+ totalCompressedSize: row.compressed
183
+ };
184
+ }
185
+ /**
186
+ * Insert file record (update on conflict)
187
+ * @param file - File record information
188
+ */
189
+ insertFile(file) {
190
+ const stmt = this.db.prepare(`
191
+ INSERT INTO files (package_id, relative_path, blob_hash, mode, mtime)
192
+ VALUES (?, ?, ?, ?, ?)
193
+ ON CONFLICT(package_id, relative_path) DO UPDATE SET
194
+ blob_hash = excluded.blob_hash,
195
+ mode = excluded.mode,
196
+ mtime = excluded.mtime
197
+ `);
198
+ stmt.run(
199
+ file.packageId,
200
+ file.relativePath,
201
+ file.blobHash,
202
+ file.mode,
203
+ file.mtime
204
+ );
205
+ }
206
+ /**
207
+ * Get files by package ID
208
+ * @param packageId - Package ID
209
+ * @returns File records for the package
210
+ */
211
+ getFilesByPackage(packageId) {
212
+ const stmt = this.db.prepare(`
213
+ SELECT id, package_id, relative_path, blob_hash, mode, mtime
214
+ FROM files WHERE package_id = ?
215
+ `);
216
+ const rows = stmt.all(packageId);
217
+ return rows.map((row) => ({
218
+ id: row.id,
219
+ packageId: row.package_id,
220
+ relativePath: row.relative_path,
221
+ blobHash: row.blob_hash,
222
+ mode: row.mode,
223
+ mtime: row.mtime
224
+ }));
225
+ }
226
+ /**
227
+ * Get all files (with package path)
228
+ * @returns All file records
229
+ */
230
+ getAllFiles() {
231
+ const stmt = this.db.prepare(`
232
+ SELECT f.id, f.package_id, f.relative_path, f.blob_hash, f.mode, f.mtime, p.path as package_path
233
+ FROM files f
234
+ JOIN packages p ON f.package_id = p.id
235
+ `);
236
+ const rows = stmt.all();
237
+ return rows.map((row) => ({
238
+ id: row.id,
239
+ packageId: row.package_id,
240
+ relativePath: row.relative_path,
241
+ blobHash: row.blob_hash,
242
+ mode: row.mode,
243
+ mtime: row.mtime,
244
+ packagePath: row.package_path
245
+ }));
246
+ }
247
+ /**
248
+ * Get total file count
249
+ * @returns Total number of files
250
+ */
251
+ getTotalFileCount() {
252
+ const stmt = this.db.prepare(`SELECT COUNT(*) as count FROM files`);
253
+ const row = stmt.get();
254
+ return row.count;
255
+ }
256
+ /**
257
+ * Execute work within transaction
258
+ * @param fn - Function to execute within transaction
259
+ * @returns Function execution result
260
+ */
261
+ transaction(fn) {
262
+ return this.db.transaction(fn)();
263
+ }
264
+ /**
265
+ * Get prepared statement for bulk file insertion
266
+ * @returns Prepared statement for file insertion
267
+ */
268
+ prepareInsertFile() {
269
+ return this.db.prepare(`
270
+ INSERT INTO files (package_id, relative_path, blob_hash, mode, mtime)
271
+ VALUES (?, ?, ?, ?, ?)
272
+ ON CONFLICT(package_id, relative_path) DO UPDATE SET
273
+ blob_hash = excluded.blob_hash,
274
+ mode = excluded.mode,
275
+ mtime = excluded.mtime
276
+ `);
277
+ }
278
+ /**
279
+ * Get prepared statement for bulk blob insertion
280
+ * @returns Prepared statement for blob insertion
281
+ */
282
+ prepareInsertBlob() {
283
+ return this.db.prepare(`
284
+ INSERT OR IGNORE INTO blobs (hash, content, original_size, compressed_size)
285
+ VALUES (?, ?, ?, ?)
286
+ `);
287
+ }
288
+ /** Close database connection */
289
+ close() {
290
+ this.db.close();
291
+ }
292
+ };
293
+
294
+ // src/core/scanner.ts
295
+ import { readdir, stat, readFile } from "fs/promises";
296
+ import { join, relative } from "path";
297
+ import { existsSync } from "fs";
298
+ async function parsePackageJson(pkgJsonPath) {
299
+ try {
300
+ const content = await readFile(pkgJsonPath, "utf8");
301
+ const pkg = JSON.parse(content);
302
+ return {
303
+ name: pkg.name || "unknown",
304
+ version: pkg.version || "0.0.0"
305
+ };
306
+ } catch {
307
+ return null;
308
+ }
309
+ }
310
+ function isPnpmStructure(nodeModulesPath) {
311
+ return existsSync(join(nodeModulesPath, ".pnpm"));
312
+ }
313
+ async function* walkDir(dir, baseDir) {
314
+ const entries = await readdir(dir, { withFileTypes: true });
315
+ for (const entry of entries) {
316
+ const fullPath = join(dir, entry.name);
317
+ if (entry.isDirectory()) {
318
+ yield* walkDir(fullPath, baseDir);
319
+ } else if (entry.isFile()) {
320
+ const stats = await stat(fullPath);
321
+ yield {
322
+ relativePath: relative(baseDir, fullPath),
323
+ absolutePath: fullPath,
324
+ mode: stats.mode,
325
+ size: stats.size,
326
+ mtime: stats.mtimeMs
327
+ };
328
+ }
329
+ }
330
+ }
331
+ async function* findPackageDirs(nodeModulesPath) {
332
+ const entries = await readdir(nodeModulesPath, { withFileTypes: true });
333
+ for (const entry of entries) {
334
+ if (!entry.isDirectory()) continue;
335
+ if (entry.name === ".bin" || entry.name === ".cache" || entry.name === ".pnpm")
336
+ continue;
337
+ const fullPath = join(nodeModulesPath, entry.name);
338
+ if (entry.name.startsWith("@")) {
339
+ const scopedEntries = await readdir(fullPath, { withFileTypes: true });
340
+ for (const scopedEntry of scopedEntries) {
341
+ if (!scopedEntry.isDirectory()) continue;
342
+ yield {
343
+ path: join(fullPath, scopedEntry.name),
344
+ relativePath: join(entry.name, scopedEntry.name)
345
+ };
346
+ }
347
+ } else {
348
+ yield {
349
+ path: fullPath,
350
+ relativePath: entry.name
351
+ };
352
+ }
353
+ }
354
+ }
355
+ async function* findPnpmPackageDirs(nodeModulesPath) {
356
+ const pnpmPath = join(nodeModulesPath, ".pnpm");
357
+ const entries = await readdir(pnpmPath, { withFileTypes: true });
358
+ for (const entry of entries) {
359
+ if (!entry.isDirectory()) continue;
360
+ if (entry.name === "node_modules" || entry.name.startsWith(".")) continue;
361
+ const fullPath = join(pnpmPath, entry.name);
362
+ const innerNodeModules = join(fullPath, "node_modules");
363
+ if (!existsSync(innerNodeModules)) continue;
364
+ const innerEntries = await readdir(innerNodeModules, {
365
+ withFileTypes: true
366
+ });
367
+ for (const innerEntry of innerEntries) {
368
+ if (!innerEntry.isDirectory()) continue;
369
+ if (innerEntry.name === ".bin") continue;
370
+ const pkgPath = join(innerNodeModules, innerEntry.name);
371
+ if (innerEntry.name.startsWith("@")) {
372
+ const scopedEntries = await readdir(pkgPath, {
373
+ withFileTypes: true
374
+ });
375
+ for (const scopedEntry of scopedEntries) {
376
+ if (!scopedEntry.isDirectory()) continue;
377
+ yield {
378
+ path: join(pkgPath, scopedEntry.name),
379
+ relativePath: `.pnpm/${entry.name}/node_modules/${innerEntry.name}/${scopedEntry.name}`
380
+ };
381
+ }
382
+ } else {
383
+ yield {
384
+ path: pkgPath,
385
+ relativePath: `.pnpm/${entry.name}/node_modules/${innerEntry.name}`
386
+ };
387
+ }
388
+ }
389
+ }
390
+ }
391
+ async function scanNodeModules(nodeModulesPath, onProgress) {
392
+ const usePnpm = isPnpmStructure(nodeModulesPath);
393
+ const packageDirs = [];
394
+ if (usePnpm) {
395
+ for await (const dir of findPnpmPackageDirs(nodeModulesPath)) {
396
+ packageDirs.push(dir);
397
+ }
398
+ } else {
399
+ for await (const dir of findPackageDirs(nodeModulesPath)) {
400
+ packageDirs.push(dir);
401
+ }
402
+ }
403
+ const scanPackages = async () => {
404
+ const packages = [];
405
+ let totalFiles = 0;
406
+ let totalSize = 0;
407
+ let packageCount = 0;
408
+ for (const { path: pkgPath, relativePath } of packageDirs) {
409
+ packageCount++;
410
+ onProgress?.(packageCount, packageDirs.length, relativePath);
411
+ const pkgJsonPath = join(pkgPath, "package.json");
412
+ const pkgInfo = await parsePackageJson(pkgJsonPath);
413
+ if (!pkgInfo) continue;
414
+ const files = [];
415
+ for await (const file of walkDir(pkgPath, pkgPath)) {
416
+ files.push(file);
417
+ totalFiles++;
418
+ totalSize += file.size;
419
+ }
420
+ packages.push({
421
+ name: pkgInfo.name,
422
+ version: pkgInfo.version,
423
+ path: relativePath,
424
+ files
425
+ });
426
+ }
427
+ return { packages, totalFiles, totalSize };
428
+ };
429
+ return scanPackages();
430
+ }
431
+
432
+ // src/core/hasher.ts
433
+ import { createHash } from "crypto";
434
+ function hashBuffer(data) {
435
+ return createHash("sha256").update(data).digest("hex");
436
+ }
437
+ function hashString(data) {
438
+ return createHash("sha256").update(data, "utf8").digest("hex");
439
+ }
440
+
441
+ // src/utils/compression.ts
442
+ import { gzipSync, gunzipSync } from "zlib";
443
+ function compress(data, level = 6) {
444
+ return gzipSync(data, { level });
445
+ }
446
+ function decompress(data) {
447
+ return gunzipSync(data);
448
+ }
449
+
450
+ // src/utils/progress.ts
451
+ function createProgressBar(total) {
452
+ const startTime = Date.now();
453
+ return (current, actualTotal, message) => {
454
+ const t = actualTotal || total;
455
+ const ratio = t > 0 ? Math.min(current / t, 1) : 0;
456
+ const percent = Math.round(ratio * 100);
457
+ const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
458
+ const barWidth = 30;
459
+ const filled = Math.max(0, Math.round(ratio * barWidth));
460
+ const empty = Math.max(0, barWidth - filled);
461
+ const bar = "\u2588".repeat(filled) + "\u2591".repeat(empty);
462
+ const line = `\r[${bar}] ${percent}% (${current}/${t}) ${elapsed}s${message ? ` - ${message}` : ""}`;
463
+ process.stdout.write(line);
464
+ if (current >= t) {
465
+ process.stdout.write("\n");
466
+ }
467
+ };
468
+ }
469
+ function formatBytes(bytes) {
470
+ const units = ["B", "KB", "MB", "GB"];
471
+ let size = bytes;
472
+ let unitIndex = 0;
473
+ while (size >= 1024 && unitIndex < units.length - 1) {
474
+ size /= 1024;
475
+ unitIndex++;
476
+ }
477
+ return `${size.toFixed(1)} ${units[unitIndex]}`;
478
+ }
479
+
480
+ // src/utils/logger.ts
481
+ import { consola } from "consola";
482
+ var logger = {
483
+ pack: consola.withTag("pack"),
484
+ unpack: consola.withTag("unpack"),
485
+ status: consola.withTag("status"),
486
+ scan: consola.withTag("scan")
487
+ };
488
+
489
+ // src/commands/pack.ts
490
+ var log = logger.pack;
491
+ async function pack(options) {
492
+ const { source, output, compressionLevel, includeLockfile } = options;
493
+ const nodeModulesPath = resolve(source);
494
+ const dbPath = resolve(output);
495
+ if (!existsSync2(nodeModulesPath)) {
496
+ throw new Error(`node_modules not found: ${nodeModulesPath}`);
497
+ }
498
+ log.start(`Scanning ${nodeModulesPath}`);
499
+ const scanProgress = createProgressBar(100);
500
+ const scanResult = await scanNodeModules(
501
+ nodeModulesPath,
502
+ (current, total, msg) => {
503
+ scanProgress(current, total, msg);
504
+ }
505
+ );
506
+ log.success(
507
+ `Found ${scanResult.packages.length} packages, ${scanResult.totalFiles} files (${formatBytes(scanResult.totalSize)})`
508
+ );
509
+ if (existsSync2(dbPath)) {
510
+ await rm(dbPath);
511
+ if (existsSync2(dbPath + "-wal")) await rm(dbPath + "-wal");
512
+ if (existsSync2(dbPath + "-shm")) await rm(dbPath + "-shm");
513
+ }
514
+ const store = new Store(dbPath);
515
+ store.setMetadata("created_at", (/* @__PURE__ */ new Date()).toISOString());
516
+ store.setMetadata("node_version", process.version);
517
+ store.setMetadata("source_path", nodeModulesPath);
518
+ if (includeLockfile) {
519
+ const lockfilePath = join2(nodeModulesPath, "..", "package-lock.json");
520
+ if (existsSync2(lockfilePath)) {
521
+ const lockfileContent = await readFile2(lockfilePath, "utf8");
522
+ store.setMetadata("lockfile_hash", hashString(lockfileContent));
523
+ }
524
+ }
525
+ log.start("Packing files...");
526
+ const packProgress = createProgressBar(scanResult.totalFiles);
527
+ const insertBlob = store.prepareInsertBlob();
528
+ const insertFile = store.prepareInsertFile();
529
+ const packFiles = () => {
530
+ let processedFiles = 0;
531
+ let deduplicatedCount2 = 0;
532
+ for (const pkg of scanResult.packages) {
533
+ const packageId = store.insertPackage({
534
+ name: pkg.name,
535
+ version: pkg.version,
536
+ path: pkg.path
537
+ });
538
+ for (const file of pkg.files) {
539
+ processedFiles++;
540
+ packProgress(
541
+ processedFiles,
542
+ scanResult.totalFiles,
543
+ file.relativePath.slice(0, 40)
544
+ );
545
+ const content = readFileSync(file.absolutePath);
546
+ const hash = hashBuffer(content);
547
+ if (!store.hasBlob(hash)) {
548
+ const compressed = compress(content, compressionLevel);
549
+ insertBlob.run(hash, compressed, content.length, compressed.length);
550
+ } else {
551
+ deduplicatedCount2++;
552
+ }
553
+ insertFile.run(
554
+ packageId,
555
+ file.relativePath,
556
+ hash,
557
+ file.mode,
558
+ file.mtime
559
+ );
560
+ }
561
+ }
562
+ return { deduplicatedCount: deduplicatedCount2 };
563
+ };
564
+ const { deduplicatedCount } = store.transaction(packFiles);
565
+ store.close();
566
+ const dbStats = await stat2(dbPath);
567
+ const compressionRatio = ((1 - dbStats.size / scanResult.totalSize) * 100).toFixed(1);
568
+ log.box({
569
+ title: "Pack Complete",
570
+ message: [
571
+ `Output: ${dbPath}`,
572
+ `Original: ${formatBytes(scanResult.totalSize)}`,
573
+ `DB size: ${formatBytes(dbStats.size)}`,
574
+ `Compression: ${compressionRatio}%`,
575
+ `Deduplicated: ${deduplicatedCount}`
576
+ ].join("\n"),
577
+ style: {
578
+ borderColor: "green"
579
+ }
580
+ });
581
+ }
582
+
583
+ // src/commands/unpack.ts
584
+ import { existsSync as existsSync3 } from "fs";
585
+ import { rm as rm2 } from "fs/promises";
586
+ import { resolve as resolve2 } from "path";
587
+
588
+ // src/core/extractor.ts
589
+ import { mkdir, writeFile, chmod } from "fs/promises";
590
+ import { dirname, join as join3 } from "path";
591
+ var log2 = logger.unpack;
592
+ async function extractFiles(store, outputPath, onProgress) {
593
+ const files = store.getAllFiles();
594
+ const totalFiles = files.length;
595
+ const blobCache = /* @__PURE__ */ new Map();
596
+ const getContent = (blobHash) => {
597
+ if (blobCache.has(blobHash)) {
598
+ return blobCache.get(blobHash);
599
+ }
600
+ const compressed = store.getBlob(blobHash);
601
+ if (!compressed) return null;
602
+ const content = decompress(compressed);
603
+ if (content.length < 100 * 1024) {
604
+ blobCache.set(blobHash, content);
605
+ }
606
+ return content;
607
+ };
608
+ const iterateFiles = async () => {
609
+ let processedFiles = 0;
610
+ let totalSize2 = 0;
611
+ for (const file of files) {
612
+ processedFiles++;
613
+ onProgress?.(processedFiles, totalFiles, file.relativePath.slice(0, 40));
614
+ const fullPath = join3(outputPath, file.packagePath, file.relativePath);
615
+ await mkdir(dirname(fullPath), { recursive: true });
616
+ const content = getContent(file.blobHash);
617
+ if (!content) {
618
+ log2.warn(`Blob not found: ${file.relativePath}`);
619
+ continue;
620
+ }
621
+ await writeFile(fullPath, content);
622
+ totalSize2 += content.length;
623
+ if (file.mode) {
624
+ try {
625
+ await chmod(fullPath, file.mode & 511);
626
+ } catch {
627
+ }
628
+ }
629
+ }
630
+ return { totalSize: totalSize2 };
631
+ };
632
+ const { totalSize } = await iterateFiles();
633
+ return {
634
+ totalFiles,
635
+ totalSize
636
+ };
637
+ }
638
+
639
+ // src/commands/unpack.ts
640
+ var log3 = logger.unpack;
641
+ async function unpack(options) {
642
+ const { input, output, force } = options;
643
+ const dbPath = resolve2(input);
644
+ const outputPath = resolve2(output);
645
+ if (!existsSync3(dbPath)) {
646
+ throw new Error(`Database not found: ${dbPath}`);
647
+ }
648
+ if (existsSync3(outputPath)) {
649
+ if (!force) {
650
+ throw new Error(
651
+ `Output directory already exists: ${outputPath}. Use --force to overwrite.`
652
+ );
653
+ }
654
+ log3.warn(`Removing existing ${outputPath}...`);
655
+ await rm2(outputPath, { recursive: true, force: true });
656
+ }
657
+ log3.info(`Opening ${dbPath}`);
658
+ const store = new Store(dbPath);
659
+ const createdAt = store.getMetadata("created_at");
660
+ const nodeVersion = store.getMetadata("node_version");
661
+ const totalFileCount = store.getTotalFileCount();
662
+ const blobStats = store.getBlobStats();
663
+ log3.box({
664
+ title: "Database Info",
665
+ message: [
666
+ `Created: ${createdAt ?? "unknown"}`,
667
+ `Node version: ${nodeVersion ?? "unknown"}`,
668
+ `Files: ${totalFileCount}`,
669
+ `Original size: ${formatBytes(blobStats.totalOriginalSize)}`,
670
+ `Compressed size: ${formatBytes(blobStats.totalCompressedSize)}`
671
+ ].join("\n")
672
+ });
673
+ log3.start(`Extracting to ${outputPath}`);
674
+ const progress = createProgressBar(totalFileCount);
675
+ const startTime = Date.now();
676
+ const result = await extractFiles(store, outputPath, progress);
677
+ const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
678
+ store.close();
679
+ log3.box({
680
+ title: "Unpack Complete",
681
+ message: [
682
+ `Extracted: ${result.totalFiles} files (${formatBytes(
683
+ result.totalSize
684
+ )})`,
685
+ `Time: ${elapsed}s`
686
+ ].join("\n"),
687
+ style: {
688
+ borderColor: "green"
689
+ }
690
+ });
691
+ }
692
+
693
+ // src/commands/status.ts
694
+ import { existsSync as existsSync4 } from "fs";
695
+ import { readFile as readFile3 } from "fs/promises";
696
+ import { resolve as resolve3, join as join4 } from "path";
697
+ var log4 = logger.status;
698
+ async function status(options) {
699
+ const { db, nodeModules } = options;
700
+ const dbPath = resolve3(db);
701
+ const nodeModulesPath = resolve3(nodeModules);
702
+ if (!existsSync4(dbPath)) {
703
+ throw new Error(`Database not found: ${dbPath}`);
704
+ }
705
+ if (!existsSync4(nodeModulesPath)) {
706
+ log4.warn(`node_modules not found: ${nodeModulesPath}`);
707
+ log4.info('Run "mohyung unpack" to restore from database.');
708
+ return { onlyInDb: [], onlyInFs: [], modified: [], unchanged: 0 };
709
+ }
710
+ log4.start("Comparing...");
711
+ log4.info(`DB: ${dbPath}`);
712
+ log4.info(`node_modules: ${nodeModulesPath}`);
713
+ const store = new Store(dbPath);
714
+ const files = store.getAllFiles();
715
+ const result = {
716
+ onlyInDb: [],
717
+ onlyInFs: [],
718
+ modified: [],
719
+ unchanged: 0
720
+ };
721
+ const progress = createProgressBar(files.length);
722
+ const dbPaths = /* @__PURE__ */ new Set();
723
+ for (const [index, file] of files.entries()) {
724
+ const relativePath = join4(file.packagePath, file.relativePath);
725
+ const fullPath = join4(nodeModulesPath, relativePath);
726
+ dbPaths.add(relativePath);
727
+ progress(index + 1, files.length, relativePath.slice(0, 40));
728
+ if (!existsSync4(fullPath)) {
729
+ result.onlyInDb.push(relativePath);
730
+ continue;
731
+ }
732
+ try {
733
+ const fsContent = await readFile3(fullPath);
734
+ const fsHash = hashBuffer(fsContent);
735
+ if (fsHash !== file.blobHash) {
736
+ result.modified.push(relativePath);
737
+ } else {
738
+ result.unchanged++;
739
+ }
740
+ } catch {
741
+ result.modified.push(relativePath);
742
+ }
743
+ }
744
+ store.close();
745
+ const summaryLines = [
746
+ `Unchanged: ${result.unchanged}`,
747
+ `Modified: ${result.modified.length}`,
748
+ `Only in DB: ${result.onlyInDb.length}`
749
+ ];
750
+ if (result.modified.length > 0 && result.modified.length <= 10) {
751
+ summaryLines.push("", "Modified files:");
752
+ result.modified.forEach((f) => summaryLines.push(` M ${f}`));
753
+ }
754
+ if (result.onlyInDb.length > 0 && result.onlyInDb.length <= 10) {
755
+ summaryLines.push("", "Only in DB (deleted locally):");
756
+ result.onlyInDb.forEach((f) => summaryLines.push(` D ${f}`));
757
+ }
758
+ if (result.modified.length > 10 || result.onlyInDb.length > 10) {
759
+ summaryLines.push("", "(Use verbose mode for full list)");
760
+ }
761
+ const isClean = result.modified.length === 0 && result.onlyInDb.length === 0;
762
+ log4.box({
763
+ title: "Status",
764
+ message: summaryLines.join("\n"),
765
+ style: {
766
+ borderColor: isClean ? "green" : "yellow"
767
+ }
768
+ });
769
+ if (isClean) {
770
+ log4.success("All files match!");
771
+ }
772
+ return result;
773
+ }
774
+
775
+ // src/cli.ts
776
+ var program = new Command();
777
+ program.name("mohyung").description("Snapshot and restore node_modules as a single SQLite file").version("0.1.0");
778
+ program.command("pack").description("Pack node_modules into SQLite DB").option("-o, --output <path>", "output file path", "./node_modules.db").option("-s, --source <path>", "node_modules path", "./node_modules").option("-c, --compression <level>", "compression level (1-9)", "6").option("--include-lockfile", "include package-lock.json", false).action(async (options) => {
779
+ try {
780
+ await pack({
781
+ output: options.output,
782
+ source: options.source,
783
+ compressionLevel: parseInt(options.compression, 10),
784
+ includeLockfile: options.includeLockfile
785
+ });
786
+ } catch (error) {
787
+ consola.error(error instanceof Error ? error.message : error);
788
+ process.exit(1);
789
+ }
790
+ });
791
+ program.command("unpack").description("Restore node_modules from SQLite DB").option("-i, --input <path>", "input DB file path", "./node_modules.db").option("-o, --output <path>", "output directory", "./node_modules").option("--cache <path>", "cache directory").option("-f, --force", "overwrite existing node_modules", false).action(async (options) => {
792
+ try {
793
+ await unpack({
794
+ input: options.input,
795
+ output: options.output,
796
+ cache: options.cache,
797
+ force: options.force
798
+ });
799
+ } catch (error) {
800
+ consola.error(error instanceof Error ? error.message : error);
801
+ process.exit(1);
802
+ }
803
+ });
804
+ program.command("status").description("Compare DB with current node_modules").option("--db <path>", "DB file path", "./node_modules.db").option("-n, --node-modules <path>", "node_modules path", "./node_modules").action(async (options) => {
805
+ try {
806
+ await status({
807
+ db: options.db,
808
+ nodeModules: options.nodeModules
809
+ });
810
+ } catch (error) {
811
+ consola.error(error instanceof Error ? error.message : error);
812
+ process.exit(1);
813
+ }
814
+ });
815
+ program.parse();
816
+ //# sourceMappingURL=cli.js.map