mohyung 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/cli.ts","../src/commands/pack.ts","../src/core/store.ts","../src/core/scanner.ts","../src/core/hasher.ts","../src/utils/compression.ts","../src/utils/progress.ts","../src/utils/logger.ts","../src/commands/unpack.ts","../src/core/extractor.ts","../src/commands/status.ts"],"sourcesContent":["import { Command } from 'commander'\nimport { pack } from './commands/pack.js'\nimport { unpack } from './commands/unpack.js'\nimport { status } from './commands/status.js'\nimport { consola } from './utils/logger.js'\n\nconst program = new Command()\n\nprogram\n .name('mohyung')\n .description('Snapshot and restore node_modules as a single SQLite file')\n .version('0.1.0')\n\nprogram\n .command('pack')\n .description('Pack node_modules into SQLite DB')\n .option('-o, --output <path>', 'output file path', './node_modules.db')\n .option('-s, --source <path>', 'node_modules path', './node_modules')\n .option('-c, --compression <level>', 'compression level (1-9)', '6')\n .option('--include-lockfile', 'include package-lock.json', false)\n .action(async (options) => {\n try {\n await pack({\n output: options.output,\n source: options.source,\n compressionLevel: parseInt(options.compression, 10),\n includeLockfile: options.includeLockfile,\n })\n } catch (error) {\n consola.error(error instanceof Error ? error.message : error)\n process.exit(1)\n }\n })\n\nprogram\n .command('unpack')\n .description('Restore node_modules from SQLite DB')\n .option('-i, --input <path>', 'input DB file path', './node_modules.db')\n .option('-o, --output <path>', 'output directory', './node_modules')\n .option('--cache <path>', 'cache directory')\n .option('-f, --force', 'overwrite existing node_modules', false)\n .action(async (options) => {\n try {\n await unpack({\n input: options.input,\n output: options.output,\n cache: options.cache,\n force: options.force,\n })\n } catch (error) {\n consola.error(error instanceof Error ? error.message : error)\n process.exit(1)\n }\n })\n\nprogram\n .command('status')\n .description('Compare DB with current node_modules')\n .option('--db <path>', 'DB file path', './node_modules.db')\n .option('-n, --node-modules <path>', 'node_modules path', './node_modules')\n .action(async (options) => {\n try {\n await status({\n db: options.db,\n nodeModules: options.nodeModules,\n })\n } catch (error) {\n consola.error(error instanceof Error ? error.message : error)\n process.exit(1)\n }\n })\n\nprogram.parse()\n","import { readFile, stat, rm } from 'node:fs/promises'\nimport { existsSync, readFileSync } from 'node:fs'\nimport { join, resolve } from 'node:path'\nimport { Store } from '../core/store.js'\nimport { scanNodeModules } from '../core/scanner.js'\nimport { hashBuffer, hashString } from '../core/hasher.js'\nimport { compress } from '../utils/compression.js'\nimport { createProgressBar, formatBytes } from '../utils/progress.js'\nimport { logger } from '../utils/logger.js'\nimport type { PackOptions } from '../types.js'\n\nconst log = logger.pack\n\n/**\n * Pack node_modules into SQLite DB\n * @param options - Pack command options\n */\nexport async function pack(options: PackOptions): Promise<void> {\n const { source, output, compressionLevel, includeLockfile } = options\n\n const nodeModulesPath = resolve(source)\n const dbPath = resolve(output)\n\n if (!existsSync(nodeModulesPath)) {\n throw new Error(`node_modules not found: ${nodeModulesPath}`)\n }\n\n log.start(`Scanning ${nodeModulesPath}`)\n\n const scanProgress = createProgressBar(100)\n const scanResult = await scanNodeModules(\n nodeModulesPath,\n (current, total, msg) => {\n scanProgress(current, total, msg)\n }\n )\n\n log.success(\n `Found ${scanResult.packages.length} packages, ${\n scanResult.totalFiles\n } files (${formatBytes(scanResult.totalSize)})`\n )\n\n // @fn cleanupDbFiles - cleanup existing DB and WAL/SHM files\n if (existsSync(dbPath)) {\n await rm(dbPath)\n if (existsSync(dbPath + '-wal')) await rm(dbPath + '-wal')\n if (existsSync(dbPath + '-shm')) await rm(dbPath + '-shm')\n }\n\n const store = new Store(dbPath)\n\n store.setMetadata('created_at', new Date().toISOString())\n store.setMetadata('node_version', process.version)\n store.setMetadata('source_path', nodeModulesPath)\n\n if (includeLockfile) {\n const lockfilePath = join(nodeModulesPath, '..', 'package-lock.json')\n\n if (existsSync(lockfilePath)) {\n const lockfileContent = await readFile(lockfilePath, 'utf8')\n store.setMetadata('lockfile_hash', hashString(lockfileContent))\n }\n }\n\n log.start('Packing files...')\n const packProgress = createProgressBar(scanResult.totalFiles)\n\n const insertBlob = store.prepareInsertBlob()\n const insertFile = store.prepareInsertFile()\n\n // @fn packFiles - iterate packages and pack files\n const packFiles = () => {\n let processedFiles = 0\n let deduplicatedCount = 0\n\n for (const pkg of scanResult.packages) {\n const packageId = store.insertPackage({\n name: pkg.name,\n version: pkg.version,\n path: pkg.path,\n })\n\n for (const file of pkg.files) {\n processedFiles++\n packProgress(\n processedFiles,\n scanResult.totalFiles,\n file.relativePath.slice(0, 40)\n )\n\n const content = readFileSync(file.absolutePath)\n const hash = hashBuffer(content)\n\n if (!store.hasBlob(hash)) {\n const compressed = compress(content, compressionLevel)\n insertBlob.run(hash, compressed, content.length, compressed.length)\n } else {\n deduplicatedCount++\n }\n\n insertFile.run(\n packageId,\n file.relativePath,\n hash,\n file.mode,\n file.mtime\n )\n }\n }\n\n return { deduplicatedCount }\n }\n\n const { deduplicatedCount } = store.transaction(packFiles)\n\n store.close()\n\n // @fn printPackSummary - print pack result summary\n const dbStats = await stat(dbPath)\n const compressionRatio = (\n (1 - dbStats.size / scanResult.totalSize) *\n 100\n ).toFixed(1)\n\n log.box({\n title: 'Pack Complete',\n message: [\n `Output: ${dbPath}`,\n `Original: ${formatBytes(scanResult.totalSize)}`,\n `DB size: ${formatBytes(dbStats.size)}`,\n `Compression: ${compressionRatio}%`,\n `Deduplicated: ${deduplicatedCount}`,\n ].join('\\n'),\n style: {\n borderColor: 'green',\n },\n })\n}\n","import Database from 'better-sqlite3'\nimport type {\n PackageInfo,\n BlobInfo,\n FileRecord,\n MetadataKey,\n} from '../types.js'\n\n/** Current schema version */\nconst SCHEMA_VERSION = '1'\n\n/** Table creation SQL */\nconst CREATE_TABLES_SQL = `\n-- Metadata\nCREATE TABLE IF NOT EXISTS metadata (\n key TEXT PRIMARY KEY,\n value TEXT\n);\n\n-- Package information\nCREATE TABLE IF NOT EXISTS packages (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL,\n version TEXT NOT NULL,\n path TEXT NOT NULL,\n UNIQUE(name, version, path)\n);\n\n-- Content-addressable blob storage\nCREATE TABLE IF NOT EXISTS blobs (\n hash TEXT PRIMARY KEY,\n content BLOB NOT NULL,\n original_size INTEGER,\n compressed_size INTEGER\n);\n\n-- Files per package\nCREATE TABLE IF NOT EXISTS files (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n package_id INTEGER REFERENCES packages(id),\n relative_path TEXT NOT NULL,\n blob_hash TEXT REFERENCES blobs(hash),\n mode INTEGER,\n mtime INTEGER,\n UNIQUE(package_id, relative_path)\n);\n\n-- Indexes\nCREATE INDEX IF NOT EXISTS idx_files_package ON files(package_id);\nCREATE INDEX IF NOT EXISTS idx_files_blob ON files(blob_hash);\n`\n\n/**\n * SQLite database store\n */\nexport class Store {\n /** better-sqlite3 database instance */\n private db: Database.Database\n\n /**\n * Create Store instance\n * @param dbPath - SQLite database file path\n */\n constructor(dbPath: string) {\n this.db = new Database(dbPath)\n this.db.pragma('journal_mode = WAL')\n this.db.pragma('synchronous = NORMAL')\n\n this.initSchema()\n }\n\n /** Initialize schema (create tables and set version) */\n private initSchema(): void {\n this.db.exec(CREATE_TABLES_SQL)\n\n this.setMetadata('schema_version', SCHEMA_VERSION)\n }\n\n /**\n * Save metadata\n * @param key - Metadata key\n * @param value - Value to save\n */\n setMetadata(key: MetadataKey, value: string): void {\n const stmt = this.db.prepare(`\n INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)\n `)\n\n stmt.run(key, value)\n }\n\n /**\n * Get metadata\n * @param key - Metadata key\n * @returns Stored value or null\n */\n getMetadata(key: MetadataKey): string | null {\n const stmt = this.db.prepare(`SELECT value FROM metadata WHERE key = ?`)\n const row = stmt.get(key) as { value: string } | undefined\n\n return row?.value ?? null\n }\n\n /**\n * Insert package (returns ID only if already exists)\n * @param pkg - Package information\n * @returns Inserted package ID\n */\n insertPackage(pkg: PackageInfo): number {\n const stmt = this.db.prepare(`\n INSERT INTO packages (name, version, path) VALUES (?, ?, ?)\n ON CONFLICT(name, version, path) DO UPDATE SET name = name\n RETURNING id\n `)\n const result = stmt.get(pkg.name, pkg.version, pkg.path) as { id: number }\n\n return result.id\n }\n\n /**\n * Get package by ID\n * @param id - Package ID\n * @returns Package info or null\n */\n getPackageById(id: number): PackageInfo | null {\n const stmt = this.db.prepare(`SELECT * FROM packages WHERE id = ?`)\n const row = stmt.get(id) as\n | { id: number; name: string; version: string; path: string }\n | undefined\n\n if (!row) return null\n\n return {\n id: row.id,\n name: row.name,\n version: row.version,\n path: row.path,\n }\n }\n\n /**\n * Get all packages\n * @returns All package list\n */\n getAllPackages(): PackageInfo[] {\n const stmt = this.db.prepare(`SELECT * FROM packages`)\n const rows = stmt.all() as Array<{\n id: number\n name: string\n version: string\n path: string\n }>\n\n return rows.map((row) => ({\n id: row.id,\n name: row.name,\n version: row.version,\n path: row.path,\n }))\n }\n\n /**\n * Check if blob exists\n * @param hash - Blob hash\n * @returns Whether blob exists\n */\n hasBlob(hash: string): boolean {\n const stmt = this.db.prepare(`SELECT 1 FROM blobs WHERE hash = ?`)\n\n return stmt.get(hash) !== undefined\n }\n\n /**\n * Insert blob (ignore duplicates)\n * @param blob - Blob information\n */\n insertBlob(blob: BlobInfo): void {\n const stmt = this.db.prepare(`\n INSERT OR IGNORE INTO blobs (hash, content, original_size, compressed_size)\n VALUES (?, ?, ?, ?)\n `)\n\n stmt.run(blob.hash, blob.content, blob.originalSize, blob.compressedSize)\n }\n\n /**\n * Get blob content by hash\n * @param hash - Blob hash\n * @returns Compressed blob content or null\n */\n getBlob(hash: string): Buffer | null {\n const stmt = this.db.prepare(`SELECT content FROM blobs WHERE hash = ?`)\n const row = stmt.get(hash) as { content: Buffer } | undefined\n\n return row?.content ?? null\n }\n\n /**\n * Get blob storage statistics\n * @returns Blob count and size statistics\n */\n getBlobStats(): {\n totalBlobs: number\n totalOriginalSize: number\n totalCompressedSize: number\n } {\n const stmt = this.db.prepare(`\n SELECT\n COUNT(*) as count,\n COALESCE(SUM(original_size), 0) as original,\n COALESCE(SUM(compressed_size), 0) as compressed\n FROM blobs\n `)\n const row = stmt.get() as {\n count: number\n original: number\n compressed: number\n }\n\n return {\n totalBlobs: row.count,\n totalOriginalSize: row.original,\n totalCompressedSize: row.compressed,\n }\n }\n\n /**\n * Insert file record (update on conflict)\n * @param file - File record information\n */\n insertFile(file: FileRecord): void {\n const stmt = this.db.prepare(`\n INSERT INTO files (package_id, relative_path, blob_hash, mode, mtime)\n VALUES (?, ?, ?, ?, ?)\n ON CONFLICT(package_id, relative_path) DO UPDATE SET\n blob_hash = excluded.blob_hash,\n mode = excluded.mode,\n mtime = excluded.mtime\n `)\n\n stmt.run(\n file.packageId,\n file.relativePath,\n file.blobHash,\n file.mode,\n file.mtime\n )\n }\n\n /**\n * Get files by package ID\n * @param packageId - Package ID\n * @returns File records for the package\n */\n getFilesByPackage(packageId: number): FileRecord[] {\n const stmt = this.db.prepare(`\n SELECT id, package_id, relative_path, blob_hash, mode, mtime\n FROM files WHERE package_id = ?\n `)\n const rows = stmt.all(packageId) as Array<{\n id: number\n package_id: number\n relative_path: string\n blob_hash: string\n mode: number\n mtime: number\n }>\n\n return rows.map((row) => ({\n id: row.id,\n packageId: row.package_id,\n relativePath: row.relative_path,\n blobHash: row.blob_hash,\n mode: row.mode,\n mtime: row.mtime,\n }))\n }\n\n /**\n * Get all files (with package path)\n * @returns All file records\n */\n getAllFiles(): Array<FileRecord & { packagePath: string }> {\n const stmt = this.db.prepare(`\n SELECT f.id, f.package_id, f.relative_path, f.blob_hash, f.mode, f.mtime, p.path as package_path\n FROM files f\n JOIN packages p ON f.package_id = p.id\n `)\n const rows = stmt.all() as Array<{\n id: number\n package_id: number\n relative_path: string\n blob_hash: string\n mode: number\n mtime: number\n package_path: string\n }>\n\n return rows.map((row) => ({\n id: row.id,\n packageId: row.package_id,\n relativePath: row.relative_path,\n blobHash: row.blob_hash,\n mode: row.mode,\n mtime: row.mtime,\n packagePath: row.package_path,\n }))\n }\n\n /**\n * Get total file count\n * @returns Total number of files\n */\n getTotalFileCount(): number {\n const stmt = this.db.prepare(`SELECT COUNT(*) as count FROM files`)\n const row = stmt.get() as { count: number }\n\n return row.count\n }\n\n /**\n * Execute work within transaction\n * @param fn - Function to execute within transaction\n * @returns Function execution result\n */\n transaction<T>(fn: () => T): T {\n return this.db.transaction(fn)()\n }\n\n /**\n * Get prepared statement for bulk file insertion\n * @returns Prepared statement for file insertion\n */\n prepareInsertFile(): Database.Statement {\n return this.db.prepare(`\n INSERT INTO files (package_id, relative_path, blob_hash, mode, mtime)\n VALUES (?, ?, ?, ?, ?)\n ON CONFLICT(package_id, relative_path) DO UPDATE SET\n blob_hash = excluded.blob_hash,\n mode = excluded.mode,\n mtime = excluded.mtime\n `)\n }\n\n /**\n * Get prepared statement for bulk blob insertion\n * @returns Prepared statement for blob insertion\n */\n prepareInsertBlob(): Database.Statement {\n return this.db.prepare(`\n INSERT OR IGNORE INTO blobs (hash, content, original_size, compressed_size)\n VALUES (?, ?, ?, ?)\n `)\n }\n\n /** Close database connection */\n close(): void {\n this.db.close()\n }\n}\n","import { readdir, stat, readFile } from 'node:fs/promises'\nimport { join, relative } from 'node:path'\nimport { existsSync } from 'node:fs'\nimport type { FileEntry, PackageInfo, ProgressCallback } from '../types.js'\n\n/**\n * Extract package info from package.json\n * @param pkgJsonPath - Path to package.json file\n * @returns Package name and version, or null if parse failed\n */\nasync function parsePackageJson(\n pkgJsonPath: string\n): Promise<{ name: string; version: string } | null> {\n try {\n const content = await readFile(pkgJsonPath, 'utf8')\n const pkg = JSON.parse(content)\n\n return {\n name: pkg.name || 'unknown',\n version: pkg.version || '0.0.0',\n }\n } catch {\n return null\n }\n}\n\n/**\n * Check if pnpm structure\n * @param nodeModulesPath - Path to node_modules directory\n * @returns True if .pnpm directory exists\n */\nfunction isPnpmStructure(nodeModulesPath: string): boolean {\n return existsSync(join(nodeModulesPath, '.pnpm'))\n}\n\n/**\n * Recursively scan directory to collect all files\n * @param dir - Directory to scan\n * @param baseDir - Base directory for relative path calculation\n * @yields FileEntry for each file found\n */\nasync function* walkDir(\n dir: string,\n baseDir: string\n): AsyncGenerator<FileEntry> {\n const entries = await readdir(dir, { withFileTypes: true })\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name)\n\n if (entry.isDirectory()) {\n yield* walkDir(fullPath, baseDir)\n } else if (entry.isFile()) {\n const stats = await stat(fullPath)\n\n yield {\n relativePath: relative(baseDir, fullPath),\n absolutePath: fullPath,\n mode: stats.mode,\n size: stats.size,\n mtime: stats.mtimeMs,\n }\n }\n }\n}\n\n/**\n * Find package directories (handles scoped packages)\n * @param nodeModulesPath - Path to node_modules directory\n * @yields Package path and relative path\n */\nasync function* findPackageDirs(nodeModulesPath: string): AsyncGenerator<{\n path: string\n relativePath: string\n}> {\n const entries = await readdir(nodeModulesPath, { withFileTypes: true })\n\n for (const entry of entries) {\n if (!entry.isDirectory()) continue\n if (\n entry.name === '.bin' ||\n entry.name === '.cache' ||\n entry.name === '.pnpm'\n )\n continue\n\n const fullPath = join(nodeModulesPath, entry.name)\n\n if (entry.name.startsWith('@')) {\n // Scoped package (e.g., @types/node)\n const scopedEntries = await readdir(fullPath, { withFileTypes: true })\n\n for (const scopedEntry of scopedEntries) {\n if (!scopedEntry.isDirectory()) continue\n\n yield {\n path: join(fullPath, scopedEntry.name),\n relativePath: join(entry.name, scopedEntry.name),\n }\n }\n } else {\n yield {\n path: fullPath,\n relativePath: entry.name,\n }\n }\n }\n}\n\n/**\n * Find packages in pnpm's .pnpm directory\n * @param nodeModulesPath - Path to node_modules directory\n * @yields Package path and relative path\n */\nasync function* findPnpmPackageDirs(nodeModulesPath: string): AsyncGenerator<{\n path: string\n relativePath: string\n}> {\n const pnpmPath = join(nodeModulesPath, '.pnpm')\n const entries = await readdir(pnpmPath, { withFileTypes: true })\n\n for (const entry of entries) {\n if (!entry.isDirectory()) continue\n if (entry.name === 'node_modules' || entry.name.startsWith('.')) continue\n\n const fullPath = join(pnpmPath, entry.name)\n\n // pnpm structure: .pnpm/package-name@version/node_modules/package-name\n const innerNodeModules = join(fullPath, 'node_modules')\n\n if (!existsSync(innerNodeModules)) continue\n\n const innerEntries = await readdir(innerNodeModules, {\n withFileTypes: true,\n })\n\n for (const innerEntry of innerEntries) {\n if (!innerEntry.isDirectory()) continue\n if (innerEntry.name === '.bin') continue\n\n const pkgPath = join(innerNodeModules, innerEntry.name)\n\n if (innerEntry.name.startsWith('@')) {\n // Scoped package\n const scopedEntries = await readdir(pkgPath, {\n withFileTypes: true,\n })\n\n for (const scopedEntry of scopedEntries) {\n if (!scopedEntry.isDirectory()) continue\n\n yield {\n path: join(pkgPath, scopedEntry.name),\n relativePath: `.pnpm/${entry.name}/node_modules/${innerEntry.name}/${scopedEntry.name}`,\n }\n }\n } else {\n yield {\n path: pkgPath,\n relativePath: `.pnpm/${entry.name}/node_modules/${innerEntry.name}`,\n }\n }\n }\n }\n}\n\n/**\n * node_modules scan result\n */\nexport interface ScanResult {\n /** Scanned package list (with file info) */\n packages: Array<PackageInfo & { files: FileEntry[] }>\n /** Total file count */\n totalFiles: number\n /** Total file size (bytes) */\n totalSize: number\n}\n\n/**\n * Scan node_modules directory\n * @param nodeModulesPath - Path to node_modules directory\n * @param onProgress - Optional progress callback\n * @returns Scan result with packages and file statistics\n */\nexport async function scanNodeModules(\n nodeModulesPath: string,\n onProgress?: ProgressCallback\n): Promise<ScanResult> {\n // Check if pnpm structure\n const usePnpm = isPnpmStructure(nodeModulesPath)\n\n // Collect package directories first\n const packageDirs: Array<{ path: string; relativePath: string }> = []\n\n if (usePnpm) {\n for await (const dir of findPnpmPackageDirs(nodeModulesPath)) {\n packageDirs.push(dir)\n }\n } else {\n for await (const dir of findPackageDirs(nodeModulesPath)) {\n packageDirs.push(dir)\n }\n }\n\n // @fn scanPackages - iterate packages and scan files\n const scanPackages = async () => {\n const packages: Array<PackageInfo & { files: FileEntry[] }> = []\n\n let totalFiles = 0\n let totalSize = 0\n let packageCount = 0\n\n for (const { path: pkgPath, relativePath } of packageDirs) {\n packageCount++\n\n onProgress?.(packageCount, packageDirs.length, relativePath)\n\n const pkgJsonPath = join(pkgPath, 'package.json')\n const pkgInfo = await parsePackageJson(pkgJsonPath)\n\n if (!pkgInfo) continue\n\n const files: FileEntry[] = []\n\n for await (const file of walkDir(pkgPath, pkgPath)) {\n files.push(file)\n totalFiles++\n totalSize += file.size\n }\n\n packages.push({\n name: pkgInfo.name,\n version: pkgInfo.version,\n path: relativePath,\n files,\n })\n }\n\n return { packages, totalFiles, totalSize }\n }\n\n return scanPackages()\n}\n\n/**\n * Quick file count only\n * @param nodeModulesPath - Path to node_modules directory\n * @returns Total number of files\n */\nexport async function countFiles(nodeModulesPath: string): Promise<number> {\n const countDir = async (dir: string): Promise<number> => {\n const entries = await readdir(dir, { withFileTypes: true })\n\n let count = 0\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name)\n\n if (entry.isDirectory()) {\n count += await countDir(fullPath)\n } else if (entry.isFile()) {\n count++\n }\n }\n\n return count\n }\n\n return countDir(nodeModulesPath)\n}\n","import { createHash } from 'node:crypto'\n\n/**\n * Calculate SHA256 hash of a Buffer\n * @param data - Buffer to hash\n * @returns Hexadecimal hash string\n */\nexport function hashBuffer(data: Buffer): string {\n return createHash('sha256').update(data).digest('hex')\n}\n\n/**\n * Calculate SHA256 hash of a string\n * @param data - String to hash\n * @returns Hexadecimal hash string\n */\nexport function hashString(data: string): string {\n return createHash('sha256').update(data, 'utf8').digest('hex')\n}\n","import { gzipSync, gunzipSync } from 'node:zlib'\n\n/**\n * Compress data using gzip\n * @param data - Data to compress\n * @param level - Compression level (1-9, default: 6)\n * @returns Compressed buffer\n */\nexport function compress(data: Buffer, level: number = 6): Buffer {\n return gzipSync(data, { level })\n}\n\n/**\n * Decompress gzip data\n * @param data - Compressed data\n * @returns Decompressed buffer\n */\nexport function decompress(data: Buffer): Buffer {\n return gunzipSync(data)\n}\n","import type { ProgressCallback } from '../types.js'\n\n/**\n * Create a progress bar callback\n * @param total - Expected total count for progress calculation\n * @returns Progress callback function\n */\nexport function createProgressBar(total: number): ProgressCallback {\n const startTime = Date.now()\n\n return (current: number, actualTotal: number, message?: string) => {\n // @fn calculateProgress - calculate progress and render bar\n const t = actualTotal || total\n const ratio = t > 0 ? Math.min(current / t, 1) : 0\n const percent = Math.round(ratio * 100)\n const elapsed = ((Date.now() - startTime) / 1000).toFixed(1)\n const barWidth = 30\n const filled = Math.max(0, Math.round(ratio * barWidth))\n const empty = Math.max(0, barWidth - filled)\n const bar = '█'.repeat(filled) + '░'.repeat(empty)\n\n const line = `\\r[${bar}] ${percent}% (${current}/${t}) ${elapsed}s${\n message ? ` - ${message}` : ''\n }`\n\n process.stdout.write(line)\n\n if (current >= t) {\n process.stdout.write('\\n')\n }\n }\n}\n\n/**\n * Format bytes to human-readable string\n * @param bytes - Number of bytes\n * @returns Formatted string (e.g., \"1.5 MB\")\n */\nexport function formatBytes(bytes: number): string {\n const units = ['B', 'KB', 'MB', 'GB']\n\n let size = bytes\n let unitIndex = 0\n\n while (size >= 1024 && unitIndex < units.length - 1) {\n size /= 1024\n unitIndex++\n }\n\n return `${size.toFixed(1)} ${units[unitIndex]}`\n}\n","import { consola, type ConsolaInstance } from 'consola'\n\n/** Default logger instances */\nexport const logger = {\n pack: consola.withTag('pack'),\n unpack: consola.withTag('unpack'),\n status: consola.withTag('status'),\n scan: consola.withTag('scan'),\n}\n\n/**\n * Create a new logger with tag\n * @param tag - Tag to identify log source\n * @returns Consola instance with tag\n */\nexport function createLogger(tag: string): ConsolaInstance {\n return consola.withTag(tag)\n}\n\n/** Re-export consola instance */\nexport { consola }\n","import { existsSync } from 'node:fs'\nimport { rm } from 'node:fs/promises'\nimport { resolve } from 'node:path'\nimport { Store } from '../core/store.js'\nimport { extractFiles } from '../core/extractor.js'\nimport { createProgressBar, formatBytes } from '../utils/progress.js'\nimport { logger } from '../utils/logger.js'\nimport type { UnpackOptions } from '../types.js'\n\nconst log = logger.unpack\n\n/**\n * Restore node_modules from SQLite DB\n * @param options - Unpack command options\n */\nexport async function unpack(options: UnpackOptions): Promise<void> {\n const { input, output, force } = options\n\n const dbPath = resolve(input)\n const outputPath = resolve(output)\n\n if (!existsSync(dbPath)) {\n throw new Error(`Database not found: ${dbPath}`)\n }\n\n if (existsSync(outputPath)) {\n if (!force) {\n throw new Error(\n `Output directory already exists: ${outputPath}. Use --force to overwrite.`\n )\n }\n\n log.warn(`Removing existing ${outputPath}...`)\n\n await rm(outputPath, { recursive: true, force: true })\n }\n\n log.info(`Opening ${dbPath}`)\n const store = new Store(dbPath)\n\n const createdAt = store.getMetadata('created_at')\n const nodeVersion = store.getMetadata('node_version')\n const totalFileCount = store.getTotalFileCount()\n const blobStats = store.getBlobStats()\n\n log.box({\n title: 'Database Info',\n message: [\n `Created: ${createdAt ?? 'unknown'}`,\n `Node version: ${nodeVersion ?? 'unknown'}`,\n `Files: ${totalFileCount}`,\n `Original size: ${formatBytes(blobStats.totalOriginalSize)}`,\n `Compressed size: ${formatBytes(blobStats.totalCompressedSize)}`,\n ].join('\\n'),\n })\n\n log.start(`Extracting to ${outputPath}`)\n const progress = createProgressBar(totalFileCount)\n\n const startTime = Date.now()\n const result = await extractFiles(store, outputPath, progress)\n const elapsed = ((Date.now() - startTime) / 1000).toFixed(1)\n\n store.close()\n\n log.box({\n title: 'Unpack Complete',\n message: [\n `Extracted: ${result.totalFiles} files (${formatBytes(\n result.totalSize\n )})`,\n `Time: ${elapsed}s`,\n ].join('\\n'),\n style: {\n borderColor: 'green',\n },\n })\n}\n","import { mkdir, writeFile, chmod } from 'node:fs/promises'\nimport { dirname, join } from 'node:path'\nimport { Store } from './store.js'\nimport { decompress } from '../utils/compression.js'\nimport { logger } from '../utils/logger.js'\nimport type { ProgressCallback } from '../types.js'\n\nconst log = logger.unpack\n\n/**\n * Extract files from SQLite DB\n * @param store - Store instance to read from\n * @param outputPath - Output directory path\n * @param onProgress - Optional progress callback\n * @returns Total files and size extracted\n */\nexport async function extractFiles(\n store: Store,\n outputPath: string,\n onProgress?: ProgressCallback\n): Promise<{ totalFiles: number; totalSize: number }> {\n const files = store.getAllFiles()\n const totalFiles = files.length\n\n // Cache to prevent duplicate blob decompression\n const blobCache = new Map<string, Buffer>()\n\n // @fn getContent - get blob from cache or decompress\n const getContent = (blobHash: string): Buffer | null => {\n if (blobCache.has(blobHash)) {\n return blobCache.get(blobHash)!\n }\n\n const compressed = store.getBlob(blobHash)\n\n if (!compressed) return null\n\n const content = decompress(compressed)\n\n // Only cache files under 100KB (memory optimization)\n if (content.length < 100 * 1024) {\n blobCache.set(blobHash, content)\n }\n\n return content\n }\n\n // @fn iterateFiles - iterate and extract files\n const iterateFiles = async () => {\n let processedFiles = 0\n let totalSize = 0\n\n for (const file of files) {\n processedFiles++\n\n onProgress?.(processedFiles, totalFiles, file.relativePath.slice(0, 40))\n\n const fullPath = join(outputPath, file.packagePath, file.relativePath)\n\n await mkdir(dirname(fullPath), { recursive: true })\n\n const content = getContent(file.blobHash)\n\n if (!content) {\n log.warn(`Blob not found: ${file.relativePath}`)\n continue\n }\n\n await writeFile(fullPath, content)\n\n totalSize += content.length\n\n if (file.mode) {\n try {\n await chmod(fullPath, file.mode & 0o777)\n } catch {\n // Ignore permission errors (e.g., Windows)\n }\n }\n }\n\n return { totalSize }\n }\n\n const { totalSize } = await iterateFiles()\n\n return {\n totalFiles,\n totalSize,\n }\n}\n","import { existsSync } from 'node:fs'\nimport { readFile } from 'node:fs/promises'\nimport { resolve, join } from 'node:path'\nimport { Store } from '../core/store.js'\nimport { hashBuffer } from '../core/hasher.js'\nimport { createProgressBar } from '../utils/progress.js'\nimport { logger } from '../utils/logger.js'\nimport type { StatusResult } from '../types.js'\n\nconst log = logger.status\n\n/**\n * Status command options\n */\ninterface StatusOptions {\n /** Database file path */\n db: string\n /** node_modules directory path */\n nodeModules: string\n}\n\n/**\n * Compare DB with current node_modules\n * @param options - Status command options\n * @returns Comparison result with modified/missing files\n */\nexport async function status(options: StatusOptions): Promise<StatusResult> {\n const { db, nodeModules } = options\n\n const dbPath = resolve(db)\n const nodeModulesPath = resolve(nodeModules)\n\n if (!existsSync(dbPath)) {\n throw new Error(`Database not found: ${dbPath}`)\n }\n\n if (!existsSync(nodeModulesPath)) {\n log.warn(`node_modules not found: ${nodeModulesPath}`)\n log.info('Run \"mohyung unpack\" to restore from database.')\n\n return { onlyInDb: [], onlyInFs: [], modified: [], unchanged: 0 }\n }\n\n log.start('Comparing...')\n log.info(`DB: ${dbPath}`)\n log.info(`node_modules: ${nodeModulesPath}`)\n\n const store = new Store(dbPath)\n const files = store.getAllFiles()\n\n const result: StatusResult = {\n onlyInDb: [],\n onlyInFs: [],\n modified: [],\n unchanged: 0,\n }\n\n const progress = createProgressBar(files.length)\n const dbPaths = new Set<string>()\n\n for (const [index, file] of files.entries()) {\n const relativePath = join(file.packagePath, file.relativePath)\n const fullPath = join(nodeModulesPath, relativePath)\n\n dbPaths.add(relativePath)\n\n progress(index + 1, files.length, relativePath.slice(0, 40))\n\n if (!existsSync(fullPath)) {\n result.onlyInDb.push(relativePath)\n continue\n }\n\n // @fn compareFileHash - compare filesystem and DB hashes\n try {\n const fsContent = await readFile(fullPath)\n const fsHash = hashBuffer(fsContent)\n\n if (fsHash !== file.blobHash) {\n result.modified.push(relativePath)\n } else {\n result.unchanged++\n }\n } catch {\n result.modified.push(relativePath)\n }\n }\n\n store.close()\n\n const summaryLines = [\n `Unchanged: ${result.unchanged}`,\n `Modified: ${result.modified.length}`,\n `Only in DB: ${result.onlyInDb.length}`,\n ]\n\n if (result.modified.length > 0 && result.modified.length <= 10) {\n summaryLines.push('', 'Modified files:')\n result.modified.forEach((f) => summaryLines.push(` M ${f}`))\n }\n\n if (result.onlyInDb.length > 0 && result.onlyInDb.length <= 10) {\n summaryLines.push('', 'Only in DB (deleted locally):')\n result.onlyInDb.forEach((f) => summaryLines.push(` D ${f}`))\n }\n\n if (result.modified.length > 10 || result.onlyInDb.length > 10) {\n summaryLines.push('', '(Use verbose mode for full list)')\n }\n\n const isClean = result.modified.length === 0 && result.onlyInDb.length === 0\n\n log.box({\n title: 'Status',\n message: summaryLines.join('\\n'),\n style: {\n borderColor: isClean ? 'green' : 'yellow',\n },\n })\n\n if (isClean) {\n log.success('All files match!')\n }\n\n return result\n}\n"],"mappings":";;;AAAA,SAAS,eAAe;;;ACAxB,SAAS,YAAAA,WAAU,QAAAC,OAAM,UAAU;AACnC,SAAS,cAAAC,aAAY,oBAAoB;AACzC,SAAS,QAAAC,OAAM,eAAe;;;ACF9B,OAAO,cAAc;AASrB,IAAM,iBAAiB;AAGvB,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA2CnB,IAAM,QAAN,MAAY;AAAA;AAAA,EAET;AAAA;AAAA;AAAA;AAAA;AAAA,EAMR,YAAY,QAAgB;AAC1B,SAAK,KAAK,IAAI,SAAS,MAAM;AAC7B,SAAK,GAAG,OAAO,oBAAoB;AACnC,SAAK,GAAG,OAAO,sBAAsB;AAErC,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA,EAGQ,aAAmB;AACzB,SAAK,GAAG,KAAK,iBAAiB;AAE9B,SAAK,YAAY,kBAAkB,cAAc;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,YAAY,KAAkB,OAAqB;AACjD,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA,KAE5B;AAED,SAAK,IAAI,KAAK,KAAK;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,YAAY,KAAiC;AAC3C,UAAM,OAAO,KAAK,GAAG,QAAQ,0CAA0C;AACvE,UAAM,MAAM,KAAK,IAAI,GAAG;AAExB,WAAO,KAAK,SAAS;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAc,KAA0B;AACtC,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA,KAI5B;AACD,UAAM,SAAS,KAAK,IAAI,IAAI,MAAM,IAAI,SAAS,IAAI,IAAI;AAEvD,WAAO,OAAO;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe,IAAgC;AAC7C,UAAM,OAAO,KAAK,GAAG,QAAQ,qCAAqC;AAClE,UAAM,MAAM,KAAK,IAAI,EAAE;AAIvB,QAAI,CAAC,IAAK,QAAO;AAEjB,WAAO;AAAA,MACL,IAAI,IAAI;AAAA,MACR,MAAM,IAAI;AAAA,MACV,SAAS,IAAI;AAAA,MACb,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,iBAAgC;AAC9B,UAAM,OAAO,KAAK,GAAG,QAAQ,wBAAwB;AACrD,UAAM,OAAO,KAAK,IAAI;AAOtB,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,IAAI,IAAI;AAAA,MACR,MAAM,IAAI;AAAA,MACV,SAAS,IAAI;AAAA,MACb,MAAM,IAAI;AAAA,IACZ,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,MAAuB;AAC7B,UAAM,OAAO,KAAK,GAAG,QAAQ,oCAAoC;AAEjE,WAAO,KAAK,IAAI,IAAI,MAAM;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,WAAW,MAAsB;AAC/B,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA,KAG5B;AAED,SAAK,IAAI,KAAK,MAAM,KAAK,SAAS,KAAK,cAAc,KAAK,cAAc;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,MAA6B;AACnC,UAAM,OAAO,KAAK,GAAG,QAAQ,0CAA0C;AACvE,UAAM,MAAM,KAAK,IAAI,IAAI;AAEzB,WAAO,KAAK,WAAW;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,eAIE;AACA,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAM5B;AACD,UAAM,MAAM,KAAK,IAAI;AAMrB,WAAO;AAAA,MACL,YAAY,IAAI;AAAA,MAChB,mBAAmB,IAAI;AAAA,MACvB,qBAAqB,IAAI;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,WAAW,MAAwB;AACjC,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAO5B;AAED,SAAK;AAAA,MACH,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,kBAAkB,WAAiC;AACjD,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA,KAG5B;AACD,UAAM,OAAO,KAAK,IAAI,SAAS;AAS/B,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,IAAI,IAAI;AAAA,MACR,WAAW,IAAI;AAAA,MACf,cAAc,IAAI;AAAA,MAClB,UAAU,IAAI;AAAA,MACd,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,IACb,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,cAA2D;AACzD,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA,KAI5B;AACD,UAAM,OAAO,KAAK,IAAI;AAUtB,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,IAAI,IAAI;AAAA,MACR,WAAW,IAAI;AAAA,MACf,cAAc,IAAI;AAAA,MAClB,UAAU,IAAI;AAAA,MACd,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,aAAa,IAAI;AAAA,IACnB,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,oBAA4B;AAC1B,UAAM,OAAO,KAAK,GAAG,QAAQ,qCAAqC;AAClE,UAAM,MAAM,KAAK,IAAI;AAErB,WAAO,IAAI;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,YAAe,IAAgB;AAC7B,WAAO,KAAK,GAAG,YAAY,EAAE,EAAE;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,oBAAwC;AACtC,WAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAOtB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,oBAAwC;AACtC,WAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA,KAGtB;AAAA,EACH;AAAA;AAAA,EAGA,QAAc;AACZ,SAAK,GAAG,MAAM;AAAA,EAChB;AACF;;;ACvWA,SAAS,SAAS,MAAM,gBAAgB;AACxC,SAAS,MAAM,gBAAgB;AAC/B,SAAS,kBAAkB;AAQ3B,eAAe,iBACb,aACmD;AACnD,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,aAAa,MAAM;AAClD,UAAM,MAAM,KAAK,MAAM,OAAO;AAE9B,WAAO;AAAA,MACL,MAAM,IAAI,QAAQ;AAAA,MAClB,SAAS,IAAI,WAAW;AAAA,IAC1B;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAOA,SAAS,gBAAgB,iBAAkC;AACzD,SAAO,WAAW,KAAK,iBAAiB,OAAO,CAAC;AAClD;AAQA,gBAAgB,QACd,KACA,SAC2B;AAC3B,QAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAE1D,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAW,KAAK,KAAK,MAAM,IAAI;AAErC,QAAI,MAAM,YAAY,GAAG;AACvB,aAAO,QAAQ,UAAU,OAAO;AAAA,IAClC,WAAW,MAAM,OAAO,GAAG;AACzB,YAAM,QAAQ,MAAM,KAAK,QAAQ;AAEjC,YAAM;AAAA,QACJ,cAAc,SAAS,SAAS,QAAQ;AAAA,QACxC,cAAc;AAAA,QACd,MAAM,MAAM;AAAA,QACZ,MAAM,MAAM;AAAA,QACZ,OAAO,MAAM;AAAA,MACf;AAAA,IACF;AAAA,EACF;AACF;AAOA,gBAAgB,gBAAgB,iBAG7B;AACD,QAAM,UAAU,MAAM,QAAQ,iBAAiB,EAAE,eAAe,KAAK,CAAC;AAEtE,aAAW,SAAS,SAAS;AAC3B,QAAI,CAAC,MAAM,YAAY,EAAG;AAC1B,QACE,MAAM,SAAS,UACf,MAAM,SAAS,YACf,MAAM,SAAS;AAEf;AAEF,UAAM,WAAW,KAAK,iBAAiB,MAAM,IAAI;AAEjD,QAAI,MAAM,KAAK,WAAW,GAAG,GAAG;AAE9B,YAAM,gBAAgB,MAAM,QAAQ,UAAU,EAAE,eAAe,KAAK,CAAC;AAErE,iBAAW,eAAe,eAAe;AACvC,YAAI,CAAC,YAAY,YAAY,EAAG;AAEhC,cAAM;AAAA,UACJ,MAAM,KAAK,UAAU,YAAY,IAAI;AAAA,UACrC,cAAc,KAAK,MAAM,MAAM,YAAY,IAAI;AAAA,QACjD;AAAA,MACF;AAAA,IACF,OAAO;AACL,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,cAAc,MAAM;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AACF;AAOA,gBAAgB,oBAAoB,iBAGjC;AACD,QAAM,WAAW,KAAK,iBAAiB,OAAO;AAC9C,QAAM,UAAU,MAAM,QAAQ,UAAU,EAAE,eAAe,KAAK,CAAC;AAE/D,aAAW,SAAS,SAAS;AAC3B,QAAI,CAAC,MAAM,YAAY,EAAG;AAC1B,QAAI,MAAM,SAAS,kBAAkB,MAAM,KAAK,WAAW,GAAG,EAAG;AAEjE,UAAM,WAAW,KAAK,UAAU,MAAM,IAAI;AAG1C,UAAM,mBAAmB,KAAK,UAAU,cAAc;AAEtD,QAAI,CAAC,WAAW,gBAAgB,EAAG;AAEnC,UAAM,eAAe,MAAM,QAAQ,kBAAkB;AAAA,MACnD,eAAe;AAAA,IACjB,CAAC;AAED,eAAW,cAAc,cAAc;AACrC,UAAI,CAAC,WAAW,YAAY,EAAG;AAC/B,UAAI,WAAW,SAAS,OAAQ;AAEhC,YAAM,UAAU,KAAK,kBAAkB,WAAW,IAAI;AAEtD,UAAI,WAAW,KAAK,WAAW,GAAG,GAAG;AAEnC,cAAM,gBAAgB,MAAM,QAAQ,SAAS;AAAA,UAC3C,eAAe;AAAA,QACjB,CAAC;AAED,mBAAW,eAAe,eAAe;AACvC,cAAI,CAAC,YAAY,YAAY,EAAG;AAEhC,gBAAM;AAAA,YACJ,MAAM,KAAK,SAAS,YAAY,IAAI;AAAA,YACpC,cAAc,SAAS,MAAM,IAAI,iBAAiB,WAAW,IAAI,IAAI,YAAY,IAAI;AAAA,UACvF;AAAA,QACF;AAAA,MACF,OAAO;AACL,cAAM;AAAA,UACJ,MAAM;AAAA,UACN,cAAc,SAAS,MAAM,IAAI,iBAAiB,WAAW,IAAI;AAAA,QACnE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAoBA,eAAsB,gBACpB,iBACA,YACqB;AAErB,QAAM,UAAU,gBAAgB,eAAe;AAG/C,QAAM,cAA6D,CAAC;AAEpE,MAAI,SAAS;AACX,qBAAiB,OAAO,oBAAoB,eAAe,GAAG;AAC5D,kBAAY,KAAK,GAAG;AAAA,IACtB;AAAA,EACF,OAAO;AACL,qBAAiB,OAAO,gBAAgB,eAAe,GAAG;AACxD,kBAAY,KAAK,GAAG;AAAA,IACtB;AAAA,EACF;AAGA,QAAM,eAAe,YAAY;AAC/B,UAAM,WAAwD,CAAC;AAE/D,QAAI,aAAa;AACjB,QAAI,YAAY;AAChB,QAAI,eAAe;AAEnB,eAAW,EAAE,MAAM,SAAS,aAAa,KAAK,aAAa;AACzD;AAEA,mBAAa,cAAc,YAAY,QAAQ,YAAY;AAE3D,YAAM,cAAc,KAAK,SAAS,cAAc;AAChD,YAAM,UAAU,MAAM,iBAAiB,WAAW;AAElD,UAAI,CAAC,QAAS;AAEd,YAAM,QAAqB,CAAC;AAE5B,uBAAiB,QAAQ,QAAQ,SAAS,OAAO,GAAG;AAClD,cAAM,KAAK,IAAI;AACf;AACA,qBAAa,KAAK;AAAA,MACpB;AAEA,eAAS,KAAK;AAAA,QACZ,MAAM,QAAQ;AAAA,QACd,SAAS,QAAQ;AAAA,QACjB,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,EAAE,UAAU,YAAY,UAAU;AAAA,EAC3C;AAEA,SAAO,aAAa;AACtB;;;AClPA,SAAS,kBAAkB;AAOpB,SAAS,WAAW,MAAsB;AAC/C,SAAO,WAAW,QAAQ,EAAE,OAAO,IAAI,EAAE,OAAO,KAAK;AACvD;AAOO,SAAS,WAAW,MAAsB;AAC/C,SAAO,WAAW,QAAQ,EAAE,OAAO,MAAM,MAAM,EAAE,OAAO,KAAK;AAC/D;;;AClBA,SAAS,UAAU,kBAAkB;AAQ9B,SAAS,SAAS,MAAc,QAAgB,GAAW;AAChE,SAAO,SAAS,MAAM,EAAE,MAAM,CAAC;AACjC;AAOO,SAAS,WAAW,MAAsB;AAC/C,SAAO,WAAW,IAAI;AACxB;;;ACZO,SAAS,kBAAkB,OAAiC;AACjE,QAAM,YAAY,KAAK,IAAI;AAE3B,SAAO,CAAC,SAAiB,aAAqB,YAAqB;AAEjE,UAAM,IAAI,eAAe;AACzB,UAAM,QAAQ,IAAI,IAAI,KAAK,IAAI,UAAU,GAAG,CAAC,IAAI;AACjD,UAAM,UAAU,KAAK,MAAM,QAAQ,GAAG;AACtC,UAAM,YAAY,KAAK,IAAI,IAAI,aAAa,KAAM,QAAQ,CAAC;AAC3D,UAAM,WAAW;AACjB,UAAM,SAAS,KAAK,IAAI,GAAG,KAAK,MAAM,QAAQ,QAAQ,CAAC;AACvD,UAAM,QAAQ,KAAK,IAAI,GAAG,WAAW,MAAM;AAC3C,UAAM,MAAM,SAAI,OAAO,MAAM,IAAI,SAAI,OAAO,KAAK;AAEjD,UAAM,OAAO,MAAM,GAAG,KAAK,OAAO,MAAM,OAAO,IAAI,CAAC,KAAK,OAAO,IAC9D,UAAU,MAAM,OAAO,KAAK,EAC9B;AAEA,YAAQ,OAAO,MAAM,IAAI;AAEzB,QAAI,WAAW,GAAG;AAChB,cAAQ,OAAO,MAAM,IAAI;AAAA,IAC3B;AAAA,EACF;AACF;AAOO,SAAS,YAAY,OAAuB;AACjD,QAAM,QAAQ,CAAC,KAAK,MAAM,MAAM,IAAI;AAEpC,MAAI,OAAO;AACX,MAAI,YAAY;AAEhB,SAAO,QAAQ,QAAQ,YAAY,MAAM,SAAS,GAAG;AACnD,YAAQ;AACR;AAAA,EACF;AAEA,SAAO,GAAG,KAAK,QAAQ,CAAC,CAAC,IAAI,MAAM,SAAS,CAAC;AAC/C;;;AClDA,SAAS,eAAqC;AAGvC,IAAM,SAAS;AAAA,EACpB,MAAM,QAAQ,QAAQ,MAAM;AAAA,EAC5B,QAAQ,QAAQ,QAAQ,QAAQ;AAAA,EAChC,QAAQ,QAAQ,QAAQ,QAAQ;AAAA,EAChC,MAAM,QAAQ,QAAQ,MAAM;AAC9B;;;ANGA,IAAM,MAAM,OAAO;AAMnB,eAAsB,KAAK,SAAqC;AAC9D,QAAM,EAAE,QAAQ,QAAQ,kBAAkB,gBAAgB,IAAI;AAE9D,QAAM,kBAAkB,QAAQ,MAAM;AACtC,QAAM,SAAS,QAAQ,MAAM;AAE7B,MAAI,CAACC,YAAW,eAAe,GAAG;AAChC,UAAM,IAAI,MAAM,2BAA2B,eAAe,EAAE;AAAA,EAC9D;AAEA,MAAI,MAAM,YAAY,eAAe,EAAE;AAEvC,QAAM,eAAe,kBAAkB,GAAG;AAC1C,QAAM,aAAa,MAAM;AAAA,IACvB;AAAA,IACA,CAAC,SAAS,OAAO,QAAQ;AACvB,mBAAa,SAAS,OAAO,GAAG;AAAA,IAClC;AAAA,EACF;AAEA,MAAI;AAAA,IACF,SAAS,WAAW,SAAS,MAAM,cACjC,WAAW,UACb,WAAW,YAAY,WAAW,SAAS,CAAC;AAAA,EAC9C;AAGA,MAAIA,YAAW,MAAM,GAAG;AACtB,UAAM,GAAG,MAAM;AACf,QAAIA,YAAW,SAAS,MAAM,EAAG,OAAM,GAAG,SAAS,MAAM;AACzD,QAAIA,YAAW,SAAS,MAAM,EAAG,OAAM,GAAG,SAAS,MAAM;AAAA,EAC3D;AAEA,QAAM,QAAQ,IAAI,MAAM,MAAM;AAE9B,QAAM,YAAY,eAAc,oBAAI,KAAK,GAAE,YAAY,CAAC;AACxD,QAAM,YAAY,gBAAgB,QAAQ,OAAO;AACjD,QAAM,YAAY,eAAe,eAAe;AAEhD,MAAI,iBAAiB;AACnB,UAAM,eAAeC,MAAK,iBAAiB,MAAM,mBAAmB;AAEpE,QAAID,YAAW,YAAY,GAAG;AAC5B,YAAM,kBAAkB,MAAME,UAAS,cAAc,MAAM;AAC3D,YAAM,YAAY,iBAAiB,WAAW,eAAe,CAAC;AAAA,IAChE;AAAA,EACF;AAEA,MAAI,MAAM,kBAAkB;AAC5B,QAAM,eAAe,kBAAkB,WAAW,UAAU;AAE5D,QAAM,aAAa,MAAM,kBAAkB;AAC3C,QAAM,aAAa,MAAM,kBAAkB;AAG3C,QAAM,YAAY,MAAM;AACtB,QAAI,iBAAiB;AACrB,QAAIC,qBAAoB;AAExB,eAAW,OAAO,WAAW,UAAU;AACrC,YAAM,YAAY,MAAM,cAAc;AAAA,QACpC,MAAM,IAAI;AAAA,QACV,SAAS,IAAI;AAAA,QACb,MAAM,IAAI;AAAA,MACZ,CAAC;AAED,iBAAW,QAAQ,IAAI,OAAO;AAC5B;AACA;AAAA,UACE;AAAA,UACA,WAAW;AAAA,UACX,KAAK,aAAa,MAAM,GAAG,EAAE;AAAA,QAC/B;AAEA,cAAM,UAAU,aAAa,KAAK,YAAY;AAC9C,cAAM,OAAO,WAAW,OAAO;AAE/B,YAAI,CAAC,MAAM,QAAQ,IAAI,GAAG;AACxB,gBAAM,aAAa,SAAS,SAAS,gBAAgB;AACrD,qBAAW,IAAI,MAAM,YAAY,QAAQ,QAAQ,WAAW,MAAM;AAAA,QACpE,OAAO;AACL,UAAAA;AAAA,QACF;AAEA,mBAAW;AAAA,UACT;AAAA,UACA,KAAK;AAAA,UACL;AAAA,UACA,KAAK;AAAA,UACL,KAAK;AAAA,QACP;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,mBAAAA,mBAAkB;AAAA,EAC7B;AAEA,QAAM,EAAE,kBAAkB,IAAI,MAAM,YAAY,SAAS;AAEzD,QAAM,MAAM;AAGZ,QAAM,UAAU,MAAMC,MAAK,MAAM;AACjC,QAAM,qBACH,IAAI,QAAQ,OAAO,WAAW,aAC/B,KACA,QAAQ,CAAC;AAEX,MAAI,IAAI;AAAA,IACN,OAAO;AAAA,IACP,SAAS;AAAA,MACP,WAAW,MAAM;AAAA,MACjB,aAAa,YAAY,WAAW,SAAS,CAAC;AAAA,MAC9C,YAAY,YAAY,QAAQ,IAAI,CAAC;AAAA,MACrC,gBAAgB,gBAAgB;AAAA,MAChC,iBAAiB,iBAAiB;AAAA,IACpC,EAAE,KAAK,IAAI;AAAA,IACX,OAAO;AAAA,MACL,aAAa;AAAA,IACf;AAAA,EACF,CAAC;AACH;;;AO1IA,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,MAAAC,WAAU;AACnB,SAAS,WAAAC,gBAAe;;;ACFxB,SAAS,OAAO,WAAW,aAAa;AACxC,SAAS,SAAS,QAAAC,aAAY;AAM9B,IAAMC,OAAM,OAAO;AASnB,eAAsB,aACpB,OACA,YACA,YACoD;AACpD,QAAM,QAAQ,MAAM,YAAY;AAChC,QAAM,aAAa,MAAM;AAGzB,QAAM,YAAY,oBAAI,IAAoB;AAG1C,QAAM,aAAa,CAAC,aAAoC;AACtD,QAAI,UAAU,IAAI,QAAQ,GAAG;AAC3B,aAAO,UAAU,IAAI,QAAQ;AAAA,IAC/B;AAEA,UAAM,aAAa,MAAM,QAAQ,QAAQ;AAEzC,QAAI,CAAC,WAAY,QAAO;AAExB,UAAM,UAAU,WAAW,UAAU;AAGrC,QAAI,QAAQ,SAAS,MAAM,MAAM;AAC/B,gBAAU,IAAI,UAAU,OAAO;AAAA,IACjC;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,eAAe,YAAY;AAC/B,QAAI,iBAAiB;AACrB,QAAIC,aAAY;AAEhB,eAAW,QAAQ,OAAO;AACxB;AAEA,mBAAa,gBAAgB,YAAY,KAAK,aAAa,MAAM,GAAG,EAAE,CAAC;AAEvE,YAAM,WAAWC,MAAK,YAAY,KAAK,aAAa,KAAK,YAAY;AAErE,YAAM,MAAM,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,YAAM,UAAU,WAAW,KAAK,QAAQ;AAExC,UAAI,CAAC,SAAS;AACZ,QAAAF,KAAI,KAAK,mBAAmB,KAAK,YAAY,EAAE;AAC/C;AAAA,MACF;AAEA,YAAM,UAAU,UAAU,OAAO;AAEjC,MAAAC,cAAa,QAAQ;AAErB,UAAI,KAAK,MAAM;AACb,YAAI;AACF,gBAAM,MAAM,UAAU,KAAK,OAAO,GAAK;AAAA,QACzC,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,WAAAA,WAAU;AAAA,EACrB;AAEA,QAAM,EAAE,UAAU,IAAI,MAAM,aAAa;AAEzC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;;;ADjFA,IAAME,OAAM,OAAO;AAMnB,eAAsB,OAAO,SAAuC;AAClE,QAAM,EAAE,OAAO,QAAQ,MAAM,IAAI;AAEjC,QAAM,SAASC,SAAQ,KAAK;AAC5B,QAAM,aAAaA,SAAQ,MAAM;AAEjC,MAAI,CAACC,YAAW,MAAM,GAAG;AACvB,UAAM,IAAI,MAAM,uBAAuB,MAAM,EAAE;AAAA,EACjD;AAEA,MAAIA,YAAW,UAAU,GAAG;AAC1B,QAAI,CAAC,OAAO;AACV,YAAM,IAAI;AAAA,QACR,oCAAoC,UAAU;AAAA,MAChD;AAAA,IACF;AAEA,IAAAF,KAAI,KAAK,qBAAqB,UAAU,KAAK;AAE7C,UAAMG,IAAG,YAAY,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACvD;AAEA,EAAAH,KAAI,KAAK,WAAW,MAAM,EAAE;AAC5B,QAAM,QAAQ,IAAI,MAAM,MAAM;AAE9B,QAAM,YAAY,MAAM,YAAY,YAAY;AAChD,QAAM,cAAc,MAAM,YAAY,cAAc;AACpD,QAAM,iBAAiB,MAAM,kBAAkB;AAC/C,QAAM,YAAY,MAAM,aAAa;AAErC,EAAAA,KAAI,IAAI;AAAA,IACN,OAAO;AAAA,IACP,SAAS;AAAA,MACP,YAAY,aAAa,SAAS;AAAA,MAClC,iBAAiB,eAAe,SAAS;AAAA,MACzC,UAAU,cAAc;AAAA,MACxB,kBAAkB,YAAY,UAAU,iBAAiB,CAAC;AAAA,MAC1D,oBAAoB,YAAY,UAAU,mBAAmB,CAAC;AAAA,IAChE,EAAE,KAAK,IAAI;AAAA,EACb,CAAC;AAED,EAAAA,KAAI,MAAM,iBAAiB,UAAU,EAAE;AACvC,QAAM,WAAW,kBAAkB,cAAc;AAEjD,QAAM,YAAY,KAAK,IAAI;AAC3B,QAAM,SAAS,MAAM,aAAa,OAAO,YAAY,QAAQ;AAC7D,QAAM,YAAY,KAAK,IAAI,IAAI,aAAa,KAAM,QAAQ,CAAC;AAE3D,QAAM,MAAM;AAEZ,EAAAA,KAAI,IAAI;AAAA,IACN,OAAO;AAAA,IACP,SAAS;AAAA,MACP,cAAc,OAAO,UAAU,WAAW;AAAA,QACxC,OAAO;AAAA,MACT,CAAC;AAAA,MACD,SAAS,OAAO;AAAA,IAClB,EAAE,KAAK,IAAI;AAAA,IACX,OAAO;AAAA,MACL,aAAa;AAAA,IACf;AAAA,EACF,CAAC;AACH;;;AE7EA,SAAS,cAAAI,mBAAkB;AAC3B,SAAS,YAAAC,iBAAgB;AACzB,SAAS,WAAAC,UAAS,QAAAC,aAAY;AAO9B,IAAMC,OAAM,OAAO;AAiBnB,eAAsB,OAAO,SAA+C;AAC1E,QAAM,EAAE,IAAI,YAAY,IAAI;AAE5B,QAAM,SAASC,SAAQ,EAAE;AACzB,QAAM,kBAAkBA,SAAQ,WAAW;AAE3C,MAAI,CAACC,YAAW,MAAM,GAAG;AACvB,UAAM,IAAI,MAAM,uBAAuB,MAAM,EAAE;AAAA,EACjD;AAEA,MAAI,CAACA,YAAW,eAAe,GAAG;AAChC,IAAAF,KAAI,KAAK,2BAA2B,eAAe,EAAE;AACrD,IAAAA,KAAI,KAAK,gDAAgD;AAEzD,WAAO,EAAE,UAAU,CAAC,GAAG,UAAU,CAAC,GAAG,UAAU,CAAC,GAAG,WAAW,EAAE;AAAA,EAClE;AAEA,EAAAA,KAAI,MAAM,cAAc;AACxB,EAAAA,KAAI,KAAK,OAAO,MAAM,EAAE;AACxB,EAAAA,KAAI,KAAK,iBAAiB,eAAe,EAAE;AAE3C,QAAM,QAAQ,IAAI,MAAM,MAAM;AAC9B,QAAM,QAAQ,MAAM,YAAY;AAEhC,QAAM,SAAuB;AAAA,IAC3B,UAAU,CAAC;AAAA,IACX,UAAU,CAAC;AAAA,IACX,UAAU,CAAC;AAAA,IACX,WAAW;AAAA,EACb;AAEA,QAAM,WAAW,kBAAkB,MAAM,MAAM;AAC/C,QAAM,UAAU,oBAAI,IAAY;AAEhC,aAAW,CAAC,OAAO,IAAI,KAAK,MAAM,QAAQ,GAAG;AAC3C,UAAM,eAAeG,MAAK,KAAK,aAAa,KAAK,YAAY;AAC7D,UAAM,WAAWA,MAAK,iBAAiB,YAAY;AAEnD,YAAQ,IAAI,YAAY;AAExB,aAAS,QAAQ,GAAG,MAAM,QAAQ,aAAa,MAAM,GAAG,EAAE,CAAC;AAE3D,QAAI,CAACD,YAAW,QAAQ,GAAG;AACzB,aAAO,SAAS,KAAK,YAAY;AACjC;AAAA,IACF;AAGA,QAAI;AACF,YAAM,YAAY,MAAME,UAAS,QAAQ;AACzC,YAAM,SAAS,WAAW,SAAS;AAEnC,UAAI,WAAW,KAAK,UAAU;AAC5B,eAAO,SAAS,KAAK,YAAY;AAAA,MACnC,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF,QAAQ;AACN,aAAO,SAAS,KAAK,YAAY;AAAA,IACnC;AAAA,EACF;AAEA,QAAM,MAAM;AAEZ,QAAM,eAAe;AAAA,IACnB,cAAc,OAAO,SAAS;AAAA,IAC9B,aAAa,OAAO,SAAS,MAAM;AAAA,IACnC,eAAe,OAAO,SAAS,MAAM;AAAA,EACvC;AAEA,MAAI,OAAO,SAAS,SAAS,KAAK,OAAO,SAAS,UAAU,IAAI;AAC9D,iBAAa,KAAK,IAAI,iBAAiB;AACvC,WAAO,SAAS,QAAQ,CAAC,MAAM,aAAa,KAAK,OAAO,CAAC,EAAE,CAAC;AAAA,EAC9D;AAEA,MAAI,OAAO,SAAS,SAAS,KAAK,OAAO,SAAS,UAAU,IAAI;AAC9D,iBAAa,KAAK,IAAI,+BAA+B;AACrD,WAAO,SAAS,QAAQ,CAAC,MAAM,aAAa,KAAK,OAAO,CAAC,EAAE,CAAC;AAAA,EAC9D;AAEA,MAAI,OAAO,SAAS,SAAS,MAAM,OAAO,SAAS,SAAS,IAAI;AAC9D,iBAAa,KAAK,IAAI,kCAAkC;AAAA,EAC1D;AAEA,QAAM,UAAU,OAAO,SAAS,WAAW,KAAK,OAAO,SAAS,WAAW;AAE3E,EAAAJ,KAAI,IAAI;AAAA,IACN,OAAO;AAAA,IACP,SAAS,aAAa,KAAK,IAAI;AAAA,IAC/B,OAAO;AAAA,MACL,aAAa,UAAU,UAAU;AAAA,IACnC;AAAA,EACF,CAAC;AAED,MAAI,SAAS;AACX,IAAAA,KAAI,QAAQ,kBAAkB;AAAA,EAChC;AAEA,SAAO;AACT;;;AVvHA,IAAM,UAAU,IAAI,QAAQ;AAE5B,QACG,KAAK,SAAS,EACd,YAAY,2DAA2D,EACvE,QAAQ,OAAO;AAElB,QACG,QAAQ,MAAM,EACd,YAAY,kCAAkC,EAC9C,OAAO,uBAAuB,oBAAoB,mBAAmB,EACrE,OAAO,uBAAuB,qBAAqB,gBAAgB,EACnE,OAAO,6BAA6B,2BAA2B,GAAG,EAClE,OAAO,sBAAsB,6BAA6B,KAAK,EAC/D,OAAO,OAAO,YAAY;AACzB,MAAI;AACF,UAAM,KAAK;AAAA,MACT,QAAQ,QAAQ;AAAA,MAChB,QAAQ,QAAQ;AAAA,MAChB,kBAAkB,SAAS,QAAQ,aAAa,EAAE;AAAA,MAClD,iBAAiB,QAAQ;AAAA,IAC3B,CAAC;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,MAAM,iBAAiB,QAAQ,MAAM,UAAU,KAAK;AAC5D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF,CAAC;AAEH,QACG,QAAQ,QAAQ,EAChB,YAAY,qCAAqC,EACjD,OAAO,sBAAsB,sBAAsB,mBAAmB,EACtE,OAAO,uBAAuB,oBAAoB,gBAAgB,EAClE,OAAO,kBAAkB,iBAAiB,EAC1C,OAAO,eAAe,mCAAmC,KAAK,EAC9D,OAAO,OAAO,YAAY;AACzB,MAAI;AACF,UAAM,OAAO;AAAA,MACX,OAAO,QAAQ;AAAA,MACf,QAAQ,QAAQ;AAAA,MAChB,OAAO,QAAQ;AAAA,MACf,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,MAAM,iBAAiB,QAAQ,MAAM,UAAU,KAAK;AAC5D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF,CAAC;AAEH,QACG,QAAQ,QAAQ,EAChB,YAAY,sCAAsC,EAClD,OAAO,eAAe,gBAAgB,mBAAmB,EACzD,OAAO,6BAA6B,qBAAqB,gBAAgB,EACzE,OAAO,OAAO,YAAY;AACzB,MAAI;AACF,UAAM,OAAO;AAAA,MACX,IAAI,QAAQ;AAAA,MACZ,aAAa,QAAQ;AAAA,IACvB,CAAC;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,MAAM,iBAAiB,QAAQ,MAAM,UAAU,KAAK;AAC5D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF,CAAC;AAEH,QAAQ,MAAM;","names":["readFile","stat","existsSync","join","existsSync","join","readFile","deduplicatedCount","stat","existsSync","rm","resolve","join","log","totalSize","join","log","resolve","existsSync","rm","existsSync","readFile","resolve","join","log","resolve","existsSync","join","readFile"]}
@@ -0,0 +1,340 @@
1
+ import Database from 'better-sqlite3';
2
+ import { ConsolaInstance } from 'consola';
3
+ export { consola } from 'consola';
4
+
5
+ /**
6
+ * File system entry information
7
+ */
8
+ interface FileEntry {
9
+ /** Relative path from node_modules */
10
+ relativePath: string;
11
+ /** Absolute path */
12
+ absolutePath: string;
13
+ /** File permission (e.g., 0o755) */
14
+ mode: number;
15
+ /** File size (bytes) */
16
+ size: number;
17
+ /** Modification time (Unix timestamp ms) */
18
+ mtime: number;
19
+ }
20
+ /**
21
+ * Package information (packages table record)
22
+ */
23
+ interface PackageInfo {
24
+ id?: number;
25
+ /** Package name (e.g., lodash) */
26
+ name: string;
27
+ /** Version (e.g., 4.17.21) */
28
+ version: string;
29
+ /** Path within node_modules (e.g., node_modules/lodash) */
30
+ path: string;
31
+ }
32
+ /**
33
+ * Blob information (blobs table record)
34
+ */
35
+ interface BlobInfo {
36
+ /** SHA256 hash */
37
+ hash: string;
38
+ /** gzip compressed content */
39
+ content: Buffer;
40
+ /** Original size */
41
+ originalSize: number;
42
+ /** Compressed size */
43
+ compressedSize: number;
44
+ }
45
+ /**
46
+ * File record (files table record)
47
+ */
48
+ interface FileRecord {
49
+ id?: number;
50
+ packageId: number;
51
+ /** Relative path within package */
52
+ relativePath: string;
53
+ /** Blob hash */
54
+ blobHash: string;
55
+ /** File permission */
56
+ mode: number;
57
+ /** Modification time */
58
+ mtime: number;
59
+ }
60
+ /**
61
+ * Pack options
62
+ */
63
+ interface PackOptions {
64
+ /** Output file path */
65
+ output: string;
66
+ /** node_modules path */
67
+ source: string;
68
+ /** Compression level (1-9) */
69
+ compressionLevel: number;
70
+ /** Include lockfile */
71
+ includeLockfile: boolean;
72
+ }
73
+ /**
74
+ * Unpack options
75
+ */
76
+ interface UnpackOptions {
77
+ /** Input DB file path */
78
+ input: string;
79
+ /** Output directory */
80
+ output: string;
81
+ /** Cache directory */
82
+ cache?: string;
83
+ /** Overwrite existing node_modules */
84
+ force: boolean;
85
+ }
86
+ /**
87
+ * Status result
88
+ */
89
+ interface StatusResult {
90
+ /** Files only in DB */
91
+ onlyInDb: string[];
92
+ /** Files only in filesystem */
93
+ onlyInFs: string[];
94
+ /** Files with different content */
95
+ modified: string[];
96
+ /** Number of identical files */
97
+ unchanged: number;
98
+ }
99
+ /**
100
+ * Metadata keys
101
+ */
102
+ type MetadataKey = 'schema_version' | 'created_at' | 'node_version' | 'lockfile_hash' | 'source_path';
103
+ /**
104
+ * Progress callback
105
+ */
106
+ type ProgressCallback = (current: number, total: number, message?: string) => void;
107
+
108
+ /**
109
+ * SQLite database store
110
+ */
111
+ declare class Store {
112
+ /** better-sqlite3 database instance */
113
+ private db;
114
+ /**
115
+ * Create Store instance
116
+ * @param dbPath - SQLite database file path
117
+ */
118
+ constructor(dbPath: string);
119
+ /** Initialize schema (create tables and set version) */
120
+ private initSchema;
121
+ /**
122
+ * Save metadata
123
+ * @param key - Metadata key
124
+ * @param value - Value to save
125
+ */
126
+ setMetadata(key: MetadataKey, value: string): void;
127
+ /**
128
+ * Get metadata
129
+ * @param key - Metadata key
130
+ * @returns Stored value or null
131
+ */
132
+ getMetadata(key: MetadataKey): string | null;
133
+ /**
134
+ * Insert package (returns ID only if already exists)
135
+ * @param pkg - Package information
136
+ * @returns Inserted package ID
137
+ */
138
+ insertPackage(pkg: PackageInfo): number;
139
+ /**
140
+ * Get package by ID
141
+ * @param id - Package ID
142
+ * @returns Package info or null
143
+ */
144
+ getPackageById(id: number): PackageInfo | null;
145
+ /**
146
+ * Get all packages
147
+ * @returns All package list
148
+ */
149
+ getAllPackages(): PackageInfo[];
150
+ /**
151
+ * Check if blob exists
152
+ * @param hash - Blob hash
153
+ * @returns Whether blob exists
154
+ */
155
+ hasBlob(hash: string): boolean;
156
+ /**
157
+ * Insert blob (ignore duplicates)
158
+ * @param blob - Blob information
159
+ */
160
+ insertBlob(blob: BlobInfo): void;
161
+ /**
162
+ * Get blob content by hash
163
+ * @param hash - Blob hash
164
+ * @returns Compressed blob content or null
165
+ */
166
+ getBlob(hash: string): Buffer | null;
167
+ /**
168
+ * Get blob storage statistics
169
+ * @returns Blob count and size statistics
170
+ */
171
+ getBlobStats(): {
172
+ totalBlobs: number;
173
+ totalOriginalSize: number;
174
+ totalCompressedSize: number;
175
+ };
176
+ /**
177
+ * Insert file record (update on conflict)
178
+ * @param file - File record information
179
+ */
180
+ insertFile(file: FileRecord): void;
181
+ /**
182
+ * Get files by package ID
183
+ * @param packageId - Package ID
184
+ * @returns File records for the package
185
+ */
186
+ getFilesByPackage(packageId: number): FileRecord[];
187
+ /**
188
+ * Get all files (with package path)
189
+ * @returns All file records
190
+ */
191
+ getAllFiles(): Array<FileRecord & {
192
+ packagePath: string;
193
+ }>;
194
+ /**
195
+ * Get total file count
196
+ * @returns Total number of files
197
+ */
198
+ getTotalFileCount(): number;
199
+ /**
200
+ * Execute work within transaction
201
+ * @param fn - Function to execute within transaction
202
+ * @returns Function execution result
203
+ */
204
+ transaction<T>(fn: () => T): T;
205
+ /**
206
+ * Get prepared statement for bulk file insertion
207
+ * @returns Prepared statement for file insertion
208
+ */
209
+ prepareInsertFile(): Database.Statement;
210
+ /**
211
+ * Get prepared statement for bulk blob insertion
212
+ * @returns Prepared statement for blob insertion
213
+ */
214
+ prepareInsertBlob(): Database.Statement;
215
+ /** Close database connection */
216
+ close(): void;
217
+ }
218
+
219
+ /**
220
+ * node_modules scan result
221
+ */
222
+ interface ScanResult {
223
+ /** Scanned package list (with file info) */
224
+ packages: Array<PackageInfo & {
225
+ files: FileEntry[];
226
+ }>;
227
+ /** Total file count */
228
+ totalFiles: number;
229
+ /** Total file size (bytes) */
230
+ totalSize: number;
231
+ }
232
+ /**
233
+ * Scan node_modules directory
234
+ * @param nodeModulesPath - Path to node_modules directory
235
+ * @param onProgress - Optional progress callback
236
+ * @returns Scan result with packages and file statistics
237
+ */
238
+ declare function scanNodeModules(nodeModulesPath: string, onProgress?: ProgressCallback): Promise<ScanResult>;
239
+ /**
240
+ * Quick file count only
241
+ * @param nodeModulesPath - Path to node_modules directory
242
+ * @returns Total number of files
243
+ */
244
+ declare function countFiles(nodeModulesPath: string): Promise<number>;
245
+
246
+ /**
247
+ * Extract files from SQLite DB
248
+ * @param store - Store instance to read from
249
+ * @param outputPath - Output directory path
250
+ * @param onProgress - Optional progress callback
251
+ * @returns Total files and size extracted
252
+ */
253
+ declare function extractFiles(store: Store, outputPath: string, onProgress?: ProgressCallback): Promise<{
254
+ totalFiles: number;
255
+ totalSize: number;
256
+ }>;
257
+
258
+ /**
259
+ * Calculate SHA256 hash of a Buffer
260
+ * @param data - Buffer to hash
261
+ * @returns Hexadecimal hash string
262
+ */
263
+ declare function hashBuffer(data: Buffer): string;
264
+ /**
265
+ * Calculate SHA256 hash of a string
266
+ * @param data - String to hash
267
+ * @returns Hexadecimal hash string
268
+ */
269
+ declare function hashString(data: string): string;
270
+
271
+ /**
272
+ * Compress data using gzip
273
+ * @param data - Data to compress
274
+ * @param level - Compression level (1-9, default: 6)
275
+ * @returns Compressed buffer
276
+ */
277
+ declare function compress(data: Buffer, level?: number): Buffer;
278
+ /**
279
+ * Decompress gzip data
280
+ * @param data - Compressed data
281
+ * @returns Decompressed buffer
282
+ */
283
+ declare function decompress(data: Buffer): Buffer;
284
+
285
+ /**
286
+ * Create a progress bar callback
287
+ * @param total - Expected total count for progress calculation
288
+ * @returns Progress callback function
289
+ */
290
+ declare function createProgressBar(total: number): ProgressCallback;
291
+ /**
292
+ * Format bytes to human-readable string
293
+ * @param bytes - Number of bytes
294
+ * @returns Formatted string (e.g., "1.5 MB")
295
+ */
296
+ declare function formatBytes(bytes: number): string;
297
+
298
+ /** Default logger instances */
299
+ declare const logger: {
300
+ pack: ConsolaInstance;
301
+ unpack: ConsolaInstance;
302
+ status: ConsolaInstance;
303
+ scan: ConsolaInstance;
304
+ };
305
+ /**
306
+ * Create a new logger with tag
307
+ * @param tag - Tag to identify log source
308
+ * @returns Consola instance with tag
309
+ */
310
+ declare function createLogger(tag: string): ConsolaInstance;
311
+
312
+ /**
313
+ * Pack node_modules into SQLite DB
314
+ * @param options - Pack command options
315
+ */
316
+ declare function pack(options: PackOptions): Promise<void>;
317
+
318
+ /**
319
+ * Restore node_modules from SQLite DB
320
+ * @param options - Unpack command options
321
+ */
322
+ declare function unpack(options: UnpackOptions): Promise<void>;
323
+
324
+ /**
325
+ * Status command options
326
+ */
327
+ interface StatusOptions {
328
+ /** Database file path */
329
+ db: string;
330
+ /** node_modules directory path */
331
+ nodeModules: string;
332
+ }
333
+ /**
334
+ * Compare DB with current node_modules
335
+ * @param options - Status command options
336
+ * @returns Comparison result with modified/missing files
337
+ */
338
+ declare function status(options: StatusOptions): Promise<StatusResult>;
339
+
340
+ export { type BlobInfo, type FileEntry, type FileRecord, type MetadataKey, type PackOptions, type PackageInfo, type ProgressCallback, type StatusResult, Store, type UnpackOptions, compress, countFiles, createLogger, createProgressBar, decompress, extractFiles, formatBytes, hashBuffer, hashString, logger, pack, scanNodeModules, status, unpack };