portapack 0.2.1 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../../src/types.ts","../../src/cli/options.ts","../../src/utils/mime.ts","../../src/core/parser.ts","../../src/core/extractor.ts","../../src/core/minifier.ts","../../src/core/packer.ts","../../src/utils/logger.ts","../../src/utils/slugify.ts","../../src/core/bundler.ts","../../src/core/web-fetcher.ts","../../src/utils/meta.ts","../../src/index.ts","../../src/cli/cli.ts","../../src/cli/cli-entry.ts"],"sourcesContent":["/**\n * @file types.ts\n *\n * @description\n * Centralized types used across the PortaPack CLI, API, core modules, and bundling pipeline.\n *\n * This file defines:\n * - Asset structure\n * - HTML parsing result\n * - Bundling options and metadata\n * - Page structures for recursive bundling\n * - CLI execution output format\n */\n\n/**\n * Represents a single discovered, downloaded, or embedded asset.\n * This includes JS, CSS, images, fonts, etc.\n */\nexport interface Asset {\n type: 'css' | 'js' | 'image' | 'font' | 'video' | 'audio' | 'other'; // Add video and audio\n \n /** The resolved or original URL of the asset */\n url: string;\n\n /** Inlined or fetched content */\n content?: string; // Content is optional as it might not be embedded\n\n /** Font-specific metadata for font-face usage */\n fontMeta?: {\n familyName: string;\n weight?: number;\n style?: 'normal' | 'italic' | 'oblique';\n format?: string;\n };\n}\n\n/**\n * Represents raw HTML and any linked/discovered assets.\n * Result of the parsing stage.\n */\nexport interface ParsedHTML {\n htmlContent: string;\n assets: Asset[]; // List of assets found in the HTML\n}\n\n/**\n * Represents a single page crawled during recursive bundling.\n * Used as input for the multi-page bundler.\n */\nexport interface PageEntry {\n /** Full resolved URL of the crawled page */\n url: string;\n\n /** Raw HTML content of the crawled page */\n html: string;\n}\n\n/**\n * Configuration options provided by the user via CLI or API call.\n * Controls various aspects of the bundling process.\n */\nexport interface BundleOptions {\n /** Embed all discovered assets as data URIs (default: true) */\n embedAssets?: boolean;\n\n /** Enable HTML minification using html-minifier-terser (default: true) */\n minifyHtml?: boolean;\n\n /** Enable CSS minification using clean-css (default: true) */\n minifyCss?: boolean;\n\n /** Enable JavaScript minification using terser (default: true) */\n minifyJs?: boolean;\n\n /** Base URL for resolving relative links, especially for remote fetches or complex local structures */\n baseUrl?: string;\n\n /** Enable verbose logging during CLI execution */\n verbose?: boolean;\n\n /** Skip writing output file to disk (CLI dry-run mode) */\n dryRun?: boolean;\n\n /** Enable recursive crawling. If a number, specifies max depth. If true, uses default depth. */\n recursive?: number | boolean;\n\n /** Optional output file path override (CLI uses this) */\n output?: string;\n\n /** Log level for the internal logger */\n logLevel?: LogLevel;\n}\n\n// --- LogLevel Enum ---\n// Defines available log levels as a numeric enum for comparisons.\nexport enum LogLevel {\n NONE = 0, // No logging (equivalent to 'silent')\n ERROR = 1, // Only errors\n WARN = 2, // Errors and warnings\n INFO = 3, // Errors, warnings, and info (Default)\n DEBUG = 4 // All messages (Verbose)\n}\n\n// --- String Literal Type for LogLevel Names (Optional, useful for CLI parsing) ---\nexport type LogLevelName = 'debug' | 'info' | 'warn' | 'error' | 'silent' | 'none';\n\n\n/**\n * Summary statistics and metadata returned after the packing/bundling process completes.\n */\nexport interface BundleMetadata {\n /** Source HTML file path or URL */\n input: string;\n\n /** Total number of unique assets discovered (CSS, JS, images, fonts etc.) */\n assetCount: number; // Kept as required - should always be calculated or defaulted (e.g., to 0)\n\n /** Final output HTML size in bytes */\n outputSize: number;\n\n /** Elapsed build time in milliseconds */\n buildTimeMs: number;\n\n /** If recursive bundling was performed, the number of pages successfully crawled and included */\n pagesBundled?: number; // Optional, only relevant for recursive mode\n\n /** Any non-critical errors or warnings encountered during bundling (e.g., asset fetch failure) */\n errors?: string[]; // Optional array of error/warning messages\n}\n\n/**\n * Standard result object returned from the main public API functions.\n */\nexport interface BuildResult {\n /** The final generated HTML string */\n html: string;\n /** Metadata summarizing the build process */\n metadata: BundleMetadata;\n}\n\n\n/** CLI-specific options extending BundleOptions. */\nexport interface CLIOptions extends BundleOptions {\n /** Input file or URL (positional). */\n input?: string;\n /** Max depth for recursive crawling (numeric alias for recursive). */\n maxDepth?: number; // Used by commander, then merged into 'recursive'\n minify?: boolean; // Minify assets (defaults to true)\n}\n\n/**\n * Result object specifically for the CLI runner, capturing output streams and exit code.\n */\nexport interface CLIResult {\n /** Captured content written to stdout */\n stdout?: string;\n\n /** Captured content written to stderr */\n stderr?: string;\n\n /** Final exit code intended for the process (0 for success, non-zero for errors) */\n exitCode: number;\n}","/**\n * @file src/cli/options.ts\n * @description Centralized CLI argument parser for PortaPack using Commander.\n * Returns strongly typed options object including the determined LogLevel.\n */\n\nimport { Command, Option } from 'commander';\n// Import LogLevel enum and names type from the central types file\n// Ensure CLIOptions is imported correctly if defined in types.ts\nimport { LogLevel, type LogLevelName, type CLIOptions } from '../types';\n\n\n// Define valid choices for the --log-level option\nconst logLevels: LogLevelName[] = ['debug', 'info', 'warn', 'error', 'silent', 'none'];\n\n/**\n * Custom parser for the --recursive option value.\n * Treats flag without value, non-numeric value, or negative value as true.\n *\n * @param {string | undefined} val - The value passed to the option.\n * @returns {boolean | number} True if flag only/invalid number, otherwise the parsed depth.\n */\nfunction parseRecursiveValue(val: string | undefined): boolean | number {\n if (val === undefined) return true; // Flag only\n const parsed = parseInt(val, 10);\n // Invalid number (NaN) or negative depth treated as simple boolean 'true'\n return isNaN(parsed) || parsed < 0 ? true : parsed;\n}\n\n/**\n * Parses CLI arguments using Commander and returns a typed CLIOptions object.\n * Handles mapping --verbose and --log-level flags to the appropriate LogLevel enum value.\n * Handles mapping --no-minify to individual minification flags.\n * Ensures flags like --no-embed-assets correctly override their positive counterparts.\n *\n * @param {string[]} [argv=process.argv] - Command-line arguments array (e.g., process.argv).\n * @returns {CLIOptions} Parsed and structured options object.\n * @throws {Error} Throws errors if Commander encounters parsing/validation issues.\n */\nexport function parseOptions(argv: string[] = process.argv): CLIOptions {\n const program = new Command();\n\n program\n .name('portapack')\n .version('0.0.0') // Version updated dynamically by cli.ts\n .description('📦 Bundle HTML and its dependencies into a portable file')\n .argument('[input]', 'Input HTML file or URL')\n .option('-o, --output <file>', 'Output file path')\n .option('-m, --minify', 'Enable all minification (HTML, CSS, JS)') // Presence enables default true below\n .option('--no-minify', 'Disable all minification') // Global disable flag\n .option('--no-minify-html', 'Disable HTML minification')\n .option('--no-minify-css', 'Disable CSS minification')\n .option('--no-minify-js', 'Disable JavaScript minification')\n .option('-e, --embed-assets', 'Embed assets as data URIs') // Presence enables default true below\n .option('--no-embed-assets', 'Keep asset links relative/absolute') // Disable flag\n .option('-r, --recursive [depth]', 'Recursively crawl site (optional depth)', parseRecursiveValue)\n .option('--max-depth <n>', 'Set max depth for recursive crawl (alias for -r <n>)', parseInt)\n .option('-b, --base-url <url>', 'Base URL for resolving relative links')\n .option('-d, --dry-run', 'Run without writing output file')\n .option('-v, --verbose', 'Enable verbose (debug) logging')\n .addOption(new Option('--log-level <level>', 'Set logging level')\n .choices(logLevels));\n\n // Prevent commander from exiting on error during tests (optional)\n // program.exitOverride();\n\n program.parse(argv);\n\n // Raw options object from Commander's parsing\n const opts = program.opts<CLIOptions>();\n // Get the positional argument (input) if provided\n const inputArg = program.args.length > 0 ? program.args[0] : undefined;\n\n // --- Determine Effective LogLevel ---\n let finalLogLevel: LogLevel;\n const cliLogLevel = opts.logLevel as unknown as LogLevelName | undefined; // Commander stores choice string\n if (cliLogLevel) {\n // Map string choice to LogLevel enum value\n switch (cliLogLevel) {\n case 'debug': finalLogLevel = LogLevel.DEBUG; break;\n case 'info': finalLogLevel = LogLevel.INFO; break;\n case 'warn': finalLogLevel = LogLevel.WARN; break;\n case 'error': finalLogLevel = LogLevel.ERROR; break;\n case 'silent': case 'none': finalLogLevel = LogLevel.NONE; break;\n default: finalLogLevel = LogLevel.INFO; // Fallback, though choices() should prevent this\n }\n } else if (opts.verbose) {\n // --verbose is shorthand for debug level if --log-level not set\n finalLogLevel = LogLevel.DEBUG;\n } else {\n // Default log level\n finalLogLevel = LogLevel.INFO;\n }\n\n // --- Handle Embedding ---\n // Default is true. --no-embed-assets flag sets opts.embedAssets to false.\n // Check argv directly to ensure --no- wins regardless of order.\n let embedAssets = true; // Start with default\n if (argv.includes('--no-embed-assets')) {\n embedAssets = false; // Explicit negation flag takes precedence\n } else if (opts.embedAssets === true) {\n embedAssets = true; // Positive flag enables it if negation wasn't present\n }\n // If neither flag is present, it remains the default 'true'.\n\n // --- Handle Minification ---\n // Default to true unless specifically disabled by --no-minify-<type>\n let minifyHtml = opts.minifyHtml !== false;\n let minifyCss = opts.minifyCss !== false;\n let minifyJs = opts.minifyJs !== false;\n\n // Global --no-minify flag overrides all individual settings\n // Commander sets opts.minify to false if --no-minify is used.\n if (opts.minify === false) {\n minifyHtml = false;\n minifyCss = false;\n minifyJs = false;\n }\n // Note: Positive flags (-m or individual --minify-<type>) don't need extra handling\n // as the initial state is true, and negations correctly turn them off.\n\n // --- Handle Recursive/MaxDepth ---\n // Start with the value parsed from -r/--recursive\n let recursiveOpt = opts.recursive;\n // If --max-depth was provided and is a valid non-negative number, it overrides -r\n if (opts.maxDepth !== undefined && !isNaN(opts.maxDepth) && opts.maxDepth >= 0) {\n recursiveOpt = opts.maxDepth;\n }\n\n // Return the final structured options object\n return {\n // Pass through directly parsed options\n baseUrl: opts.baseUrl,\n dryRun: opts.dryRun ?? false, // Ensure boolean, default false\n output: opts.output,\n verbose: opts.verbose ?? false, // Ensure boolean, default false\n\n // Set calculated/processed options\n input: inputArg,\n logLevel: finalLogLevel,\n recursive: recursiveOpt, // Final calculated value for recursion\n embedAssets: embedAssets, // Final calculated value\n minifyHtml: minifyHtml, // Final calculated value\n minifyCss: minifyCss, // Final calculated value\n minifyJs: minifyJs, // Final calculated value\n\n // Exclude intermediate commander properties like:\n // minify, logLevel (string version), maxDepth,\n // minifyHtml, minifyCss, minifyJs (commander's raw boolean flags)\n };\n}","/**\n * @file src/utils/mime.ts\n * @description Utilities for guessing MIME types and asset types from URLs/paths.\n */\n\nimport path from 'path';\nimport type { Asset } from '../types'; // Assuming types are in ../types\n\n/**\n * Maps common file extensions to their corresponding MIME types and general Asset types.\n */\nconst MIME_MAP: Record<string, { mime: string; assetType: Asset['type'] }> = {\n // CSS\n '.css': { mime: 'text/css', assetType: 'css' },\n // JavaScript\n '.js': { mime: 'application/javascript', assetType: 'js' },\n '.mjs': { mime: 'application/javascript', assetType: 'js' },\n // Images\n '.png': { mime: 'image/png', assetType: 'image' },\n '.jpg': { mime: 'image/jpeg', assetType: 'image' },\n '.jpeg': { mime: 'image/jpeg', assetType: 'image' },\n '.gif': { mime: 'image/gif', assetType: 'image' },\n '.svg': { mime: 'image/svg+xml', assetType: 'image' },\n '.webp': { mime: 'image/webp', assetType: 'image' },\n '.ico': { mime: 'image/x-icon', assetType: 'image' },\n '.avif': { mime: 'image/avif', assetType: 'image' },\n // Fonts\n '.woff': { mime: 'font/woff', assetType: 'font' },\n '.woff2': { mime: 'font/woff2', assetType: 'font' },\n '.ttf': { mime: 'font/ttf', assetType: 'font' },\n '.otf': { mime: 'font/otf', assetType: 'font' },\n '.eot': { mime: 'application/vnd.ms-fontobject', assetType: 'font' },\n // Audio/Video (add more as needed)\n '.mp3': { mime: 'audio/mpeg', assetType: 'other' },\n '.ogg': { mime: 'audio/ogg', assetType: 'other' },\n '.wav': { mime: 'audio/wav', assetType: 'other' },\n '.mp4': { mime: 'video/mp4', assetType: 'other' },\n '.webm': { mime: 'video/webm', assetType: 'other' },\n // Other common web types\n '.json': { mime: 'application/json', assetType: 'other' },\n '.webmanifest': { mime: 'application/manifest+json', assetType: 'other' },\n '.xml': { mime: 'application/xml', assetType: 'other' },\n '.html': { mime: 'text/html', assetType: 'other' }, // Usually not needed as asset, but for completeness\n '.txt': { mime: 'text/plain', assetType: 'other' },\n};\n\n/**\n * Default MIME type and Asset type for unknown file extensions.\n */\nconst DEFAULT_MIME_TYPE = {\n mime: 'application/octet-stream',\n assetType: 'other' as Asset['type'] // Explicit cast needed\n};\n\n/**\n * Guesses the MIME type and general Asset type based on a URL or file path's extension.\n *\n * @param {string} urlOrPath - The URL or file path string.\n * @returns {{ mime: string; assetType: Asset['type'] }} An object containing the guessed MIME type\n * and the corresponding Asset type (e.g., 'image', 'font', 'css', 'js', 'other'). Returns a default\n * if the extension is unknown.\n */\nexport function guessMimeType(urlOrPath: string): { mime: string; assetType: Asset['type'] } {\n if (!urlOrPath) {\n return DEFAULT_MIME_TYPE;\n }\n // Extract the extension, handling potential query parameters or fragments\n let ext = '';\n try {\n // Use URL parsing first to handle URLs correctly\n const parsedUrl = new URL(urlOrPath);\n ext = path.extname(parsedUrl.pathname).toLowerCase();\n } catch {\n // If it's not a valid URL, treat it as a path\n ext = path.extname(urlOrPath).toLowerCase();\n }\n\n return MIME_MAP[ext] || DEFAULT_MIME_TYPE;\n}\n\n/**\n * Gets the appropriate font MIME type based on the file extension.\n * Deprecated: Prefer `guessMimeType`.\n * @deprecated Use guessMimeType instead.\n * @param {string} fontUrl - The URL or path of the font file.\n * @returns {string} The corresponding font MIME type or a default.\n */\nexport function getFontMimeType(fontUrl: string): string {\n return guessMimeType(fontUrl).mime; // Delegate to the main function\n}","/**\n * @file src/core/parser.ts\n * @description\n * Parses an HTML file using Cheerio to extract the basic structure\n * and identify top-level linked assets (CSS, JS, images, fonts, video, audio etc.).\n * It relies on tag names, link relations, and file extensions to guess asset types.\n * It does *not* fetch or analyze the content of linked assets. Inline styles/scripts\n * and data URIs are ignored. Duplicate asset URLs are ignored.\n */\n\n// FIX: Use only the named import for readFile\nimport { readFile } from 'fs/promises';\n// NOTE: 'path' module was imported but not used, so removed. Add back if needed later.\n// import path from 'path';\nimport * as cheerio from 'cheerio';\nimport type { CheerioAPI } from 'cheerio';\nimport type { Asset, ParsedHTML } from '../types.js';\nimport { Logger } from '../utils/logger.js';\nimport { guessMimeType } from '../utils/mime.js';\n\n/**\n * Parses an HTML file from the given path using Cheerio.\n * Extracts references to external assets like CSS, JS, images, fonts, video, audio\n * found in common HTML tags (<link>, <script>, <img>, <source>, <video>, <audio>, <input type=\"image\">).\n * Does not extract assets linked *within* CSS (like @import, fonts or background images).\n * Data URIs and empty URLs are ignored. Duplicate URLs are ignored.\n *\n * @async\n * @function parseHTML\n * @param {string} entryFilePath - Absolute or relative path to the input HTML file.\n * @param {Logger} [logger] - Optional logger instance.\n * @returns {Promise<ParsedHTML>} A promise that resolves to the parsed HTML content\n * and a list of discovered asset URLs with their inferred types.\n * @throws {Error} Throws an error with cause if the file cannot be read.\n */\nexport async function parseHTML(entryFilePath: string, logger?: Logger): Promise<ParsedHTML> {\n logger?.debug(`Parsing HTML file: ${entryFilePath}`);\n let htmlContent: string;\n try {\n // FIX: Use the correctly imported 'readFile' function directly\n htmlContent = await readFile(entryFilePath, 'utf-8');\n logger?.debug(`Successfully read HTML file (${Buffer.byteLength(htmlContent)} bytes).`);\n } catch (err: any) {\n logger?.error(`Failed to read HTML file \"${entryFilePath}\": ${err.message}`);\n throw new Error(`Could not read input HTML file: ${entryFilePath}`, { cause: err });\n }\n\n const $: CheerioAPI = cheerio.load(htmlContent);\n const assets: Asset[] = [];\n const addedUrls = new Set<string>();\n\n /** Helper to add unique assets */\n const addAsset = (url?: string, forcedType?: Asset['type']): void => {\n if (!url || url.trim() === '' || url.startsWith('data:')) {\n return;\n }\n if (!addedUrls.has(url)) {\n addedUrls.add(url);\n const mimeInfo = guessMimeType(url);\n const type = forcedType ?? mimeInfo.assetType;\n assets.push({ type, url });\n logger?.debug(`Discovered asset: Type='${type}', URL='${url}'`);\n } else {\n logger?.debug(`Skipping duplicate asset URL: ${url}`);\n }\n };\n\n logger?.debug('Extracting assets from HTML tags...');\n\n // --- Extract Assets from Various Tags ---\n // Stylesheets: <link rel=\"stylesheet\" href=\"...\">\n $('link[rel=\"stylesheet\"][href]').each((_, el) => {\n addAsset($(el).attr('href'), 'css');\n });\n // JavaScript: <script src=\"...\">\n $('script[src]').each((_, el) => {\n addAsset($(el).attr('src'), 'js');\n });\n // Images: <img src=\"...\">, <input type=\"image\" src=\"...\">\n $('img[src]').each((_, el) => addAsset($(el).attr('src'), 'image'));\n $('input[type=\"image\"][src]').each((_, el) => addAsset($(el).attr('src'), 'image'));\n // Image srcset: <img srcset=\"...\">, <source srcset=\"...\"> (within picture)\n $('img[srcset], picture source[srcset]').each((_, el) => {\n const srcset = $(el).attr('srcset');\n srcset?.split(',').forEach(entry => {\n const [url] = entry.trim().split(/\\s+/);\n addAsset(url, 'image');\n });\n });\n // Video: <video src=\"...\">, <video poster=\"...\">\n $('video[src]').each((_, el) => addAsset($(el).attr('src'), 'video'));\n $('video[poster]').each((_, el) => addAsset($(el).attr('poster'), 'image'));\n // Audio: <audio src=\"...\">\n $('audio[src]').each((_, el) => addAsset($(el).attr('src'), 'audio'));\n // Media Sources: <source src=\"...\"> within <video> or <audio>\n $('video > source[src]').each((_, el) => addAsset($(el).attr('src'), 'video'));\n $('audio > source[src]').each((_, el) => addAsset($(el).attr('src'), 'audio'));\n // Icons and Manifest: <link rel=\"icon/shortcut icon/apple-touch-icon/manifest\" href=\"...\">\n $('link[href]').filter((_, el) => {\n const rel = $(el).attr('rel')?.toLowerCase() ?? '';\n return ['icon', 'shortcut icon', 'apple-touch-icon', 'manifest'].includes(rel);\n }).each((_, el) => {\n const rel = $(el).attr('rel')?.toLowerCase() ?? '';\n const isIcon = ['icon', 'shortcut icon', 'apple-touch-icon'].includes(rel);\n addAsset($(el).attr('href'), isIcon ? 'image' : undefined);\n });\n // Preloaded Fonts: <link rel=\"preload\" as=\"font\" href=\"...\">\n $('link[rel=\"preload\"][as=\"font\"][href]').each((_, el) => {\n addAsset($(el).attr('href'), 'font');\n });\n\n // --- Parsing Complete ---\n logger?.info(`HTML parsing complete. Discovered ${assets.length} unique asset links.`);\n return { htmlContent, assets };\n}","/**\n * @file src/core/extractor.ts\n * @description Handles discovery, resolution, fetching, and optional embedding of assets\n * linked from HTML and recursively within CSS (@import, url()). This is the heart of finding EVERYTHING.\n * @version 1.1.3 - Fixed CSS path resolution and handling of 'other' asset types.\n */\n\n// === Node.js Core Imports ===\nimport { readFile } from 'fs/promises';\nimport * as fs from 'fs'; // Required for statSync for sync directory check\nimport type { FileHandle } from 'fs/promises';\nimport path from 'path';\nimport { fileURLToPath, URL } from 'url'; // Crucial for file path/URL conversion\n\n// === External Dependencies ===\nimport * as axios from 'axios'; // Using namespace import for clarity\nimport type { AxiosError, AxiosRequestConfig, AxiosResponse, InternalAxiosRequestConfig } from 'axios';\n\n// === Project Imports ===\nimport type { Asset, ParsedHTML } from '../types';\nimport { guessMimeType } from '../utils/mime';\nimport { Logger } from '../utils/logger';\n\n// === Constants ===\n/** Set of asset types defined in Asset['type'] generally considered text-based */\nconst TEXT_ASSET_TYPES: Set<Asset['type']> = new Set(['css', 'js']);\n/** Set of asset types defined in Asset['type'] generally considered binary and embedded via Base64 Data URI */\nconst BINARY_ASSET_TYPES: Set<Asset['type']> = new Set(['image', 'font', 'video', 'audio']);\n/** Maximum number of iterations for the asset discovery loop to prevent infinite cycles. */\nconst MAX_ASSET_EXTRACTION_ITERATIONS = 1000;\n\n// === Helper Functions ===\n\n/**\n * Checks if decoding a buffer as UTF-8 and re-encoding is lossy.\n * @param {Buffer} originalBuffer The original binary buffer.\n * @param {string} decodedString The string resulting from toString('utf-8').\n * @returns {boolean} True if re-encoding doesn't match original buffer (lossy), false otherwise.\n */\nfunction isUtf8DecodingLossy(originalBuffer: Buffer, decodedString: string): boolean {\n try {\n const reEncodedBuffer = Buffer.from(decodedString, 'utf-8');\n return !originalBuffer.equals(reEncodedBuffer);\n } catch (e) {\n return true;\n }\n}\n\n/**\n * Determines the absolute base directory URL (http://, https://, or file:///) ending in '/'.\n * @param {string} inputPathOrUrl - The original source HTML file path or a full HTTP/HTTPS URL.\n * @param {Logger} [logger] - Optional logger instance.\n * @returns {string | undefined} The absolute base URL string ending in '/', or undefined if determination fails.\n */\nfunction determineBaseUrl(inputPathOrUrl: string, logger?: Logger): string | undefined {\n logger?.debug(`Determining base URL for input: ${inputPathOrUrl}`);\n if (!inputPathOrUrl) {\n logger?.warn('Cannot determine base URL: inputPathOrUrl is empty or invalid.');\n return undefined;\n }\n\n try {\n if (/^https?:\\/\\//i.test(inputPathOrUrl)) {\n const url = new URL(inputPathOrUrl);\n url.pathname = url.pathname.substring(0, url.pathname.lastIndexOf('/') + 1);\n url.search = ''; url.hash = '';\n const baseUrl = url.href;\n logger?.debug(`Determined remote base URL: ${baseUrl}`);\n return baseUrl;\n }\n else if (inputPathOrUrl.includes('://') && !inputPathOrUrl.startsWith('file:')) {\n logger?.warn(`Input \"${inputPathOrUrl}\" looks like a URL but uses an unsupported protocol. Cannot determine base URL.`);\n return undefined;\n }\n else {\n let absolutePath: string;\n if (inputPathOrUrl.startsWith('file:')) {\n try { absolutePath = fileURLToPath(inputPathOrUrl); }\n catch (e: any) { logger?.error(`💀 Failed to convert file URL \"${inputPathOrUrl}\" to path: ${e.message}`); return undefined; }\n } else {\n absolutePath = path.resolve(inputPathOrUrl);\n }\n let isDirectory = false;\n try { isDirectory = fs.statSync(absolutePath).isDirectory(); }\n catch (statError: unknown) {\n if (statError instanceof Error && (statError as NodeJS.ErrnoException).code === 'ENOENT') {\n logger?.debug(`Path \"${absolutePath}\" not found. Assuming input represents a file, using its parent directory as base.`);\n } else {\n logger?.warn(`Could not stat local path \"${absolutePath}\" during base URL determination: ${statError instanceof Error ? statError.message : String(statError)}. Assuming input represents a file.`);\n }\n isDirectory = false;\n }\n const dirPath = isDirectory ? absolutePath : path.dirname(absolutePath);\n let normalizedPathForURL = dirPath.replace(/\\\\/g, '/');\n if (/^[A-Z]:\\//i.test(normalizedPathForURL) && !normalizedPathForURL.startsWith('/')) {\n normalizedPathForURL = '/' + normalizedPathForURL;\n }\n const fileUrl = new URL('file://' + normalizedPathForURL);\n let fileUrlString = fileUrl.href;\n if (!fileUrlString.endsWith('/')) { fileUrlString += '/'; }\n logger?.debug(`Determined local base URL: ${fileUrlString} (from: ${inputPathOrUrl}, resolved dir: ${dirPath}, isDir: ${isDirectory})`);\n return fileUrlString;\n }\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger?.error(`💀 Failed to determine base URL for \"${inputPathOrUrl}\": ${message}${error instanceof Error ? ` - Stack: ${error.stack}` : ''}`);\n return undefined;\n }\n}\n\n/**\n * Resolves an asset URL relative to a base URL context.\n * @param {string} assetUrl - The raw URL string found in the source.\n * @param {string} [baseContextUrl] - The absolute base URL of the containing document.\n * @param {Logger} [logger] - Optional logger instance.\n * @returns {URL | null} A validated, absolute URL object or null.\n */\nfunction resolveAssetUrl(assetUrl: string, baseContextUrl?: string, logger?: Logger): URL | null {\n const trimmedUrl = assetUrl?.trim();\n if (!trimmedUrl || trimmedUrl.startsWith('data:') || trimmedUrl.startsWith('#')) {\n return null;\n }\n let resolvableUrl = trimmedUrl;\n if (resolvableUrl.startsWith('//') && baseContextUrl) {\n try {\n const base = new URL(baseContextUrl);\n resolvableUrl = base.protocol + resolvableUrl;\n } catch (e) {\n logger?.warn(`Could not extract protocol from base \"${baseContextUrl}\" for protocol-relative URL \"${trimmedUrl}\". Skipping.`);\n return null;\n }\n }\n try {\n const resolved = new URL(resolvableUrl, baseContextUrl);\n return resolved;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n if (!/^[a-z]+:/i.test(resolvableUrl) && !resolvableUrl.startsWith('/') && !baseContextUrl) {\n logger?.warn(`Cannot resolve relative URL \"${resolvableUrl}\" - Base context URL was not provided or determined.`);\n } else {\n logger?.warn(`⚠️ Failed to parse/resolve URL \"${resolvableUrl}\" ${baseContextUrl ? 'against base \"' + baseContextUrl + '\"' : '(no base provided)'}: ${message}`);\n }\n return null;\n }\n}\n\n/**\n * Properly resolves CSS relative paths, handling \"../\" correctly.\n * This is critical for properly resolving paths in CSS like \"../images/bg.png\".\n * \n * @param {string} relativeUrl - The relative URL from CSS (e.g., \"../images/bg.png\")\n * @param {string} cssBaseUrl - The base URL of the CSS file\n * @param {Logger} [logger] - Optional logger instance\n * @returns {string | null} The resolved absolute URL or null if resolution fails\n */\nfunction resolveCssRelativeUrl(\n relativeUrl: string,\n cssBaseContextUrl: string,\n logger?: Logger\n): string | null {\n // Skip empty or data URLs\n if (!relativeUrl || relativeUrl.startsWith('data:')) {\n return null;\n }\n\n try {\n if (cssBaseContextUrl.startsWith('file:')) {\n // Turn the CSS base URL into a filesystem path\n const basePath = fileURLToPath(cssBaseContextUrl);\n\n // If that base path is actually a directory, use it directly;\n // otherwise, use its dirname. This prevents us from dropping\n // the final directory name when we already have a trailing slash.\n let cssDir: string;\n try {\n const stat = fs.statSync(basePath);\n if (stat.isDirectory()) {\n cssDir = basePath;\n } else {\n cssDir = path.dirname(basePath);\n }\n } catch {\n // If stat fails, assume it's a file path\n cssDir = path.dirname(basePath);\n }\n\n // Resolve relativeUrl against this directory\n let resolvedPath = path.resolve(cssDir, relativeUrl);\n resolvedPath = resolvedPath.replace(/\\\\/g, '/'); // Normalize to forward slashes\n\n // On Windows, ensure file:///C:/something\n if (/^[A-Z]:/i.test(resolvedPath) && !resolvedPath.startsWith('/')) {\n resolvedPath = '/' + resolvedPath;\n }\n return `file://${resolvedPath}`;\n } else {\n // For http/https etc., do standard resolution\n return new URL(relativeUrl, cssBaseContextUrl).href;\n }\n } catch (error) {\n logger?.warn(\n `Failed to resolve CSS URL: \"${relativeUrl}\" against \"${cssBaseContextUrl}\": ${String(error)}`\n );\n return null;\n }\n}\n\n\n/**\n * Asynchronously fetches the content of a resolved asset URL.\n * @async\n * @param {URL} resolvedUrl - The absolute URL object of the asset to fetch.\n * @param {Logger} [logger] - Optional logger instance.\n * @param {number} [timeout=10000] - Network timeout in milliseconds.\n * @returns {Promise<Buffer | null>} Asset content as a Buffer, or null on failure.\n */\n/**\n * Asynchronously fetches the content of a resolved asset URL.\n * @async\n * @param {URL} resolvedUrl - The absolute URL object of the asset to fetch.\n * @param {Logger} [logger] - Optional logger instance.\n * @param {number} [timeout=10000] - Network timeout in milliseconds.\n * @returns {Promise<Buffer | null>} Asset content as a Buffer, or null on failure.\n */\nasync function fetchAsset(resolvedUrl: URL, logger?: Logger, timeout: number = 10000): Promise<Buffer | null> {\n logger?.debug(`Attempting to fetch asset: ${resolvedUrl.href}`);\n const protocol = resolvedUrl.protocol;\n\n try {\n if (protocol === 'http:' || protocol === 'https:') {\n const response: AxiosResponse<ArrayBuffer> = await axios.default.get(resolvedUrl.href, {\n responseType: 'arraybuffer', timeout: timeout,\n });\n logger?.debug(`Workspaceed remote asset ${resolvedUrl.href} (Status: ${response.status}, Type: ${response.headers['content-type'] || 'N/A'}, Size: ${response.data.byteLength} bytes)`);\n return Buffer.from(response.data);\n } else if (protocol === 'file:') {\n let filePath: string;\n try {\n filePath = fileURLToPath(resolvedUrl);\n } catch (e: any) {\n // Log error specifically for path conversion failure\n logger?.error(`Could not convert file URL to path: ${resolvedUrl.href}. Error: ${e.message}`);\n return null; // Cannot proceed without a valid path\n }\n // This section will now only be reached if fileURLToPath succeeded\n const data = await readFile(filePath); // This might throw ENOENT, EACCES etc.\n logger?.debug(`Read local file ${filePath} (${data.byteLength} bytes)`);\n return data;\n } else {\n logger?.warn(`Unsupported protocol \"${protocol}\" in URL: ${resolvedUrl.href}`);\n return null;\n }\n } catch (error: unknown) {\n // --- Handle Errors Based on Protocol/Context ---\n\n // Check for AxiosError FIRST (only relevant if protocol was http/https)\n if ((protocol === 'http:' || protocol === 'https:') && axios.default.isAxiosError(error)) {\n const status = error.response?.status ?? 'N/A';\n const statusText = error.response?.statusText ?? 'Error';\n const code = error.code ?? 'N/A';\n const message = error.message;\n // Construct the message matching test expectation\n const logMessage = `⚠️ Failed to fetch remote asset ${resolvedUrl.href}: Status ${status} - ${statusText}. Code: ${code}, Message: ${message}`;\n logger?.warn(logMessage);\n }\n // Check for specific FS errors (only relevant if protocol was file:)\n else if (protocol === 'file:') {\n // Determine the file path again for logging, handling potential errors\n let failedPath = resolvedUrl.href;\n try { failedPath = fileURLToPath(resolvedUrl); } catch { /* ignore if conversion fails here, use original href */ }\n\n if (error instanceof Error && (error as NodeJS.ErrnoException).code === 'ENOENT') {\n logger?.warn(`⚠️ File not found (ENOENT) for asset: ${failedPath}.`);\n } else if (error instanceof Error && (error as NodeJS.ErrnoException).code === 'EACCES') {\n logger?.warn(`⚠️ Permission denied (EACCES) reading asset: ${failedPath}.`);\n } else if (error instanceof Error) { // Catch other errors during file reading (but not path conversion which is handled above)\n logger?.warn(`⚠️ Failed to read local asset ${failedPath}: ${error.message}`);\n } else {\n logger?.warn(`⚠️ An unknown error occurred while reading local asset ${failedPath}: ${String(error)}`);\n }\n }\n // Check for other specific errors like invalid URL types if necessary (ERR_INVALID_URL handled above mostly)\n // else if (error instanceof TypeError && error.message.includes('ERR_INVALID_URL')) { ... }\n\n // Generic fallback for truly unexpected errors during fetch/read\n else if (error instanceof Error) {\n logger?.warn(`⚠️ An unexpected error occurred processing asset ${resolvedUrl.href}: ${error.message}`);\n } else {\n logger?.warn(`⚠️ An unknown and unexpected error occurred processing asset ${resolvedUrl.href}: ${String(error)}`);\n }\n return null; // Return null on ANY fetch/read error caught here\n }\n}\n\n/**\n * Extracts URLs from CSS content and resolves them against the CSS base URL.\n * @param {string} cssContent - The CSS content to parse\n * @param {string} cssBaseContextUrl - The base URL of the CSS file\n * @param {Asset[]} discoveredAssets - Array to push newly discovered assets to\n * @param {Set<string>} visitedUrls - Set of already visited URLs to avoid duplicates\n * @param {Logger} [logger] - Optional logger instance\n */\n/**\n * Extracts URLs from CSS content and resolves them against the CSS base URL.\n * Returns an array of *potentially* new Asset objects with resolved URLs.\n */\nfunction extractUrlsFromCSS(\n cssContent: string,\n cssBaseContextUrl: string,\n // discoveredAssets: Asset[], // REMOVE: This function will now RETURN the assets\n // visitedUrls: Set<string>, // REMOVE\n logger?: Logger\n): Asset[] { // RETURN the discovered assets\n const newlyDiscovered: Asset[] = []; // Internal list for this parse\n const processedInThisParse = new Set<string>(); // Track URLs found in *this specific* CSS file to avoid duplicates from the same file\n\n const urlRegex = /url\\(\\s*(['\"]?)(.*?)\\1\\s*\\)/gi;\n const importRegex = /@import\\s+(?:url\\(\\s*(['\"]?)(.*?)\\1\\s*\\)|(['\"])(.*?)\\3)\\s*;/gi;\n\n const processFoundUrl = (rawUrl: string | undefined, ruleType: '@import' | 'url()') => {\n if (!rawUrl || rawUrl.trim() === '' || rawUrl.startsWith('data:')) return;\n\n const resolvedUrl = resolveCssRelativeUrl(rawUrl, cssBaseContextUrl, logger);\n\n // Check if resolved AND not already processed within *this* CSS file\n if (resolvedUrl && !processedInThisParse.has(resolvedUrl)) {\n processedInThisParse.add(resolvedUrl); // Mark as found in this file\n const { assetType } = guessMimeType(resolvedUrl);\n\n // Add to the list to be returned\n newlyDiscovered.push({\n type: assetType,\n url: resolvedUrl, // The resolved URL string\n content: undefined\n });\n logger?.debug(`Discovered nested ${assetType} asset (${ruleType}) in CSS ${cssBaseContextUrl}: ${resolvedUrl}`);\n }\n };\n\n // ... (run regex loops calling processFoundUrl) ...\n urlRegex.lastIndex = 0;\n importRegex.lastIndex = 0;\n let match;\n while ((match = urlRegex.exec(cssContent)) !== null) {\n processFoundUrl(match[2], 'url()');\n }\n importRegex.lastIndex = 0;\n while ((match = importRegex.exec(cssContent)) !== null) {\n processFoundUrl(match[2] || match[4], '@import');\n }\n\n return newlyDiscovered; // Return the list\n}\n\n/**\n * Extracts all discoverable assets recursively from HTML and CSS.\n * @async\n * @export\n * @param {ParsedHTML} parsed - Initial parsed HTML data.\n * @param {boolean} [embedAssets=true] - Whether to embed content.\n * @param {string} [inputPathOrUrl] - Original HTML source location.\n * @param {Logger} [logger] - Optional logger instance.\n * @returns {Promise<ParsedHTML>} Processed data with all assets.\n */\n/**\n * Extracts all discoverable assets recursively from HTML and CSS.\n * Fetches assets if embedAssets is true or if the asset is CSS (to parse for more assets).\n * Resolves URLs relative to their context (HTML base or CSS file location).\n * @async\n * @export\n * @param {ParsedHTML} parsed - Initial parsed HTML data containing `htmlContent` and an initial `assets` array.\n * @param {boolean} [embedAssets=true] - Whether to fetch asset content and store it (usually as a data URI or text). If false, content remains undefined, but assets are still discovered.\n * @param {string} [inputPathOrUrl] - The original source location (file path or URL) of the HTML. Used to determine the base context for resolving relative paths in the HTML.\n * @param {Logger} [logger] - Optional logger instance for detailed logging.\n * @returns {Promise<ParsedHTML>} Processed data with `htmlContent` and the final `assets` array containing all discovered assets (with content if `embedAssets` was true and fetch succeeded).\n */\nexport async function extractAssets(\n parsed: ParsedHTML,\n embedAssets = true,\n inputPathOrUrl?: string,\n logger?: Logger\n): Promise<ParsedHTML> {\n logger?.info(`🚀 Starting asset extraction! Embed: ${embedAssets}. Input: ${inputPathOrUrl || '(HTML content only)'}`);\n\n const initialAssets: Asset[] = parsed.assets || [];\n // Stores the final result: Map<resolved URL string, Asset object>\n const finalAssetsMap = new Map<string, Asset>();\n // Queue holds assets to be processed: { url: string (resolved), type: ..., content?: ... }\n let assetsToProcess: Asset[] = [];\n\n // Determine the base URL context for resolving relative paths FROM THE HTML\n const htmlBaseContextUrl = determineBaseUrl(inputPathOrUrl || '', logger);\n if (!htmlBaseContextUrl && initialAssets.some(a => !/^[a-z]+:/i.test(a.url) && !a.url.startsWith('data:') && !a.url.startsWith('#') && !a.url.startsWith('/'))) {\n logger?.warn(\"🚨 No valid base path/URL determined for the HTML source! Resolution of relative asset paths from HTML may fail.\");\n } else if (htmlBaseContextUrl) {\n logger?.debug(`Using HTML base context URL: ${htmlBaseContextUrl}`);\n }\n\n // --- CORRECTED: Define processedOrQueuedUrls HERE in the main function scope ---\n // Set to track URLs that are already processed (in finalAssetsMap) OR currently in the queue (assetsToProcess)\n // This prevents adding the same asset to the queue multiple times.\n const processedOrQueuedUrls = new Set<string>();\n\n // --- Initial Queue Population ---\n logger?.debug(`Queueing ${initialAssets.length} initial assets parsed from HTML...`);\n for (const asset of initialAssets) {\n // Resolve the initial asset URL against the HTML base context\n const resolvedUrlObj = resolveAssetUrl(asset.url, htmlBaseContextUrl, logger);\n // Use the resolved URL string if resolution succeeded, otherwise use the original\n const urlToQueue = resolvedUrlObj ? resolvedUrlObj.href : asset.url;\n\n // Skip data URIs and check if this URL is already tracked\n if (!urlToQueue.startsWith('data:') && !processedOrQueuedUrls.has(urlToQueue)) {\n processedOrQueuedUrls.add(urlToQueue); // Mark as queued\n\n // Guess type from the resolved/original URL if not provided initially\n const { assetType: guessedType } = guessMimeType(urlToQueue);\n const initialType = asset.type ?? guessedType;\n\n // Add to the processing queue\n assetsToProcess.push({\n url: urlToQueue,\n type: initialType,\n content: undefined\n });\n logger?.debug(` -> Queued initial asset: ${urlToQueue} (Original raw: ${asset.url})`);\n } else if (urlToQueue.startsWith('data:')) {\n logger?.debug(` -> Skipping data URI: ${urlToQueue.substring(0, 50)}...`);\n } else {\n logger?.debug(` -> Skipping already queued initial asset: ${urlToQueue}`);\n }\n }\n\n // --- Main processing loop ---\n let iterationCount = 0;\n while (assetsToProcess.length > 0) {\n iterationCount++;\n if (iterationCount > MAX_ASSET_EXTRACTION_ITERATIONS) {\n logger?.error(`🛑 Asset extraction loop limit hit (${MAX_ASSET_EXTRACTION_ITERATIONS})! Aborting.`);\n const remainingUrls = assetsToProcess.map(a => a.url).slice(0, 10).join(', ');\n logger?.error(`Remaining queue sample (${assetsToProcess.length} items): ${remainingUrls}...`);\n // Add assets remaining in queue to final map without content before breaking\n assetsToProcess.forEach(asset => {\n if (!finalAssetsMap.has(asset.url)) {\n finalAssetsMap.set(asset.url, { ...asset, content: undefined });\n }\n });\n assetsToProcess = []; // Clear queue\n break; // Exit loop\n }\n\n // Process assets in batches\n const currentBatch = [...assetsToProcess];\n assetsToProcess = []; // Clear queue for the next batch discovered in this iteration\n\n logger?.debug(`--- Processing batch ${iterationCount}: ${currentBatch.length} asset(s) ---`);\n\n for (const asset of currentBatch) {\n // Skip if already fully processed\n if (finalAssetsMap.has(asset.url)) {\n logger?.debug(`Skipping asset already in final map: ${asset.url}`);\n continue;\n }\n\n let assetContentBuffer: Buffer | null = null;\n let finalContent: string | undefined = undefined; // For embedding\n let cssContentForParsing: string | undefined = undefined; // For CSS parsing\n\n // --- Determine if fetching is needed ---\n const needsFetching = embedAssets || asset.type === 'css';\n let assetUrlObj: URL | null = null;\n\n if (needsFetching) {\n // --- Create URL object for fetching ---\n try {\n assetUrlObj = new URL(asset.url);\n } catch (urlError) {\n logger?.warn(`Cannot create URL object for \"${asset.url}\", skipping fetch. Error: ${urlError instanceof Error ? urlError.message : String(urlError)}`);\n finalAssetsMap.set(asset.url, { ...asset, content: undefined });\n continue; // Skip to next asset in batch\n }\n\n // --- Fetch Asset ---\n if (assetUrlObj) {\n assetContentBuffer = await fetchAsset(assetUrlObj, logger);\n }\n } // End if(needsFetching)\n\n // --- If fetching was needed but failed, add to map without content and skip ---\n if (needsFetching && assetContentBuffer === null) {\n logger?.debug(`Storing asset ${asset.url} without content due to fetch failure.`);\n finalAssetsMap.set(asset.url, { ...asset, content: undefined });\n continue; // Skip to next asset in batch\n }\n\n // --- Prepare Content for Storing/Embedding (if fetched successfully) ---\n if (assetContentBuffer) { // Only proceed if content was fetched\n const mimeInfo = guessMimeType(asset.url);\n const effectiveMime = mimeInfo.mime || 'application/octet-stream';\n\n // Try to decode TEXT types as UTF-8\n if (TEXT_ASSET_TYPES.has(asset.type)) {\n let textContent: string | undefined;\n let wasLossy = false;\n try {\n textContent = assetContentBuffer.toString('utf-8');\n wasLossy = isUtf8DecodingLossy(assetContentBuffer, textContent);\n } catch (e) { textContent = undefined; wasLossy = true; }\n\n if (!wasLossy && textContent !== undefined) {\n // Store the decoded text content if embedding or it's CSS (for parsing)\n if (embedAssets) {\n finalContent = textContent;\n } else {\n finalContent = undefined; // Not embedding text\n }\n // If it's CSS, store it for parsing later regardless of embedding\n if (asset.type === 'css') {\n cssContentForParsing = textContent;\n }\n } else {\n // Decoding failed or was lossy\n logger?.warn(`Could not decode ${asset.type} ${asset.url} as valid UTF-8 text.${embedAssets ? ' Falling back to base64 data URI.' : ''}`);\n cssContentForParsing = undefined; // Cannot parse if decoding failed\n // Embed as base64 if requested\n if (embedAssets) {\n finalContent = `data:${effectiveMime};base64,${assetContentBuffer.toString('base64')}`;\n } else {\n finalContent = undefined; // Not embedding, content remains undefined\n }\n }\n }\n // Embed BINARY types as base64 if requested\n else if (BINARY_ASSET_TYPES.has(asset.type)) {\n if (embedAssets) {\n finalContent = `data:${effectiveMime};base64,${assetContentBuffer.toString('base64')}`;\n } else {\n finalContent = undefined; // Not embedding\n }\n cssContentForParsing = undefined; // Not CSS\n }\n // Handle 'other' types: try text, fallback to base64 if embedding\n else { // asset.type === 'other' or unknown\n cssContentForParsing = undefined; // Not CSS\n if (embedAssets) {\n try {\n const attemptedTextContent = assetContentBuffer.toString('utf-8');\n if (isUtf8DecodingLossy(assetContentBuffer, attemptedTextContent)) {\n logger?.warn(`Couldn't embed unclassified asset ${asset.url} as text due to invalid UTF-8 sequences. Falling back to base64 (octet-stream).`);\n finalContent = `data:application/octet-stream;base64,${assetContentBuffer.toString('base64')}`;\n } else {\n finalContent = attemptedTextContent;\n logger?.debug(`Successfully embedded unclassified asset ${asset.url} as text.`);\n }\n } catch (decodeError) {\n logger?.warn(`Error during text decoding for unclassified asset ${asset.url}: ${decodeError instanceof Error ? decodeError.message : String(decodeError)}. Falling back to base64.`);\n finalContent = `data:application/octet-stream;base64,${assetContentBuffer.toString('base64')}`;\n }\n } else {\n finalContent = undefined; // Not embedding\n }\n }\n } else {\n // Content was not fetched\n finalContent = undefined;\n cssContentForParsing = undefined;\n }\n\n // --- Store the final asset ---\n // Use the resolved URL as the key and in the asset object itself\n finalAssetsMap.set(asset.url, { ...asset, url: asset.url, content: finalContent });\n // Note: URL is already marked in processedOrQueuedUrls\n\n // --- Process CSS for nested assets ---\n // Only if it's CSS and we successfully decoded its content for parsing\n if (asset.type === 'css' && cssContentForParsing) {\n // Determine the base URL *for this specific CSS file*\n const cssBaseContextUrl = determineBaseUrl(asset.url, logger);\n logger?.debug(`CSS base context for resolving nested assets within ${asset.url}: ${cssBaseContextUrl}`);\n\n if (cssBaseContextUrl) {\n // Get the list of *potentially* new assets discovered in this CSS\n const newlyDiscoveredAssets = extractUrlsFromCSS(\n cssContentForParsing,\n cssBaseContextUrl,\n logger\n );\n\n if (newlyDiscoveredAssets.length > 0) {\n logger?.debug(`Discovered ${newlyDiscoveredAssets.length} nested assets in CSS ${asset.url}. Checking against queue...`);\n for (const newAsset of newlyDiscoveredAssets) {\n // CHECK: Add to queue only if this resolved URL hasn't been processed OR queued before.\n // Use the 'processedOrQueuedUrls' Set which tracks both.\n if (!processedOrQueuedUrls.has(newAsset.url)) {\n processedOrQueuedUrls.add(newAsset.url); // Mark as queued now\n assetsToProcess.push(newAsset); // Add to the main queue for the *next* iteration\n logger?.debug(` -> Queued new nested asset: ${newAsset.url}`);\n } else {\n logger?.debug(` -> Skipping already processed/queued nested asset: ${newAsset.url}`);\n }\n }\n }\n } else {\n logger?.warn(`Could not determine base URL context for CSS file ${asset.url}. Cannot resolve nested relative paths within it.`);\n }\n } // End if(asset.type === 'css' && cssContentForParsing)\n } // End for loop over currentBatch\n } // End while loop\n\n const finalIterationCount = iterationCount > MAX_ASSET_EXTRACTION_ITERATIONS ? 'MAX+' : iterationCount;\n logger?.info(`✅ Asset extraction COMPLETE! Found ${finalAssetsMap.size} unique assets in ${finalIterationCount} iterations.`);\n\n // Return the original HTML content and the final list of processed assets\n return {\n htmlContent: parsed.htmlContent,\n assets: Array.from(finalAssetsMap.values())\n };\n}","/**\n * @file src/core/minifier.ts\n * @description\n * Provides the core functionality for minifying HTML, CSS, and JavaScript content\n * within the PortaPack bundling process. Uses `html-minifier-terser`, `clean-css`,\n * and `terser` libraries. Handles errors gracefully by logging warnings and returning\n * original content for the specific asset that failed minification.\n * Includes workarounds for apparent issues in @types/clean-css definitions.\n */\n\n// --- Imports ---\nimport { minify as htmlMinify } from 'html-minifier-terser';\nimport type { Options as HtmlMinifyOptions } from 'html-minifier-terser';\nimport CleanCSS from 'clean-css';\n// Import specific types from clean-css. Note: Using these directly caused issues.\nimport type { Options as CleanCSSOptions } from 'clean-css';\nimport { minify as jsMinify } from 'terser';\nimport type { MinifyOptions, MinifyOutput } from 'terser';\n// Import necessary types from project - ensure these paths are correct and use .js extension\nimport type { ParsedHTML, BundleOptions, Asset } from '../types.js';\nimport { Logger } from '../utils/logger.js';\n\n// --- Helper Interface for Workaround ---\n\n/**\n * Represents the expected structure of the synchronous output from clean-css.\n * Used with type assertion as a workaround for problematic official type definitions.\n */\nexport interface CleanCSSSyncResult { // <<< MUST HAVE 'export'\n styles?: string;\n errors?: string[];\n warnings?: string[];\n stats?: {\n originalSize: number;\n minifiedSize: number;\n };\n}\n\n// --- Default Minification Options Constants ---\n\n/**\n * Default options for html-minifier-terser.\n */\nconst HTML_MINIFY_OPTIONS: HtmlMinifyOptions = {\n collapseWhitespace: true,\n removeComments: true,\n conservativeCollapse: true,\n minifyCSS: false, // Handled separately\n minifyJS: false, // Handled separately\n removeAttributeQuotes: false,\n removeRedundantAttributes: true,\n removeScriptTypeAttributes: true,\n removeStyleLinkTypeAttributes: true,\n useShortDoctype: true,\n};\n\n/**\n * Default options for clean-css.\n * Explicitly set returnPromise to false to ensure synchronous operation.\n */\nconst CSS_MINIFY_OPTIONS: CleanCSSOptions = {\n returnPromise: false, // <<< *** Ensures sync operation at runtime ***\n level: {\n 1: { // Level 1 optimizations (safe transformations)\n optimizeBackground: true,\n optimizeBorderRadius: true,\n optimizeFilter: true,\n optimizeFontWeight: true,\n optimizeOutline: true,\n },\n 2: { // Level 2 optimizations (structural changes, generally safe)\n mergeMedia: true,\n mergeNonAdjacentRules: true,\n removeDuplicateFontRules: true,\n removeDuplicateMediaBlocks: true,\n removeDuplicateRules: true,\n restructureRules: true,\n }\n }\n // Note: Type checking based on these options seems problematic with current @types/clean-css\n};\n\n/**\n * Default options for terser (JavaScript minifier).\n */\nconst JS_MINIFY_OPTIONS: MinifyOptions = {\n compress: {\n dead_code: true,\n drop_console: false,\n drop_debugger: true,\n ecma: 2020,\n keep_classnames: true,\n keep_fnames: true\n },\n mangle: {\n keep_classnames: true,\n keep_fnames: true\n },\n format: { comments: false }\n};\n\n// --- Main Minification Function ---\n\n/**\n * Applies HTML, CSS, and JS minification conditionally based on BundleOptions.\n * Uses type assertion for clean-css result and @ts-ignore for its constructor\n * due to persistent type definition issues.\n * Creates and returns a *new* ParsedHTML object containing the potentially minified content.\n *\n * @param {ParsedHTML} parsed - Input ParsedHTML object.\n * @param {BundleOptions} [options={}] - Options controlling minification.\n * @param {Logger} [logger] - Optional logger instance.\n * @returns {Promise<ParsedHTML>} A Promise resolving to a new ParsedHTML object.\n */\nexport async function minifyAssets(\n parsed: ParsedHTML,\n options: BundleOptions = {},\n logger?: Logger\n): Promise<ParsedHTML> {\n const { htmlContent, assets } = parsed;\n\n // Use optional chaining and nullish coalescing for safer access\n const currentHtmlContent = htmlContent ?? '';\n const currentAssets = assets ?? [];\n\n\n if (!currentHtmlContent && currentAssets.length === 0) {\n logger?.debug('Minification skipped: No content.');\n return { htmlContent: currentHtmlContent, assets: currentAssets };\n }\n\n const minifyFlags = {\n minifyHtml: options.minifyHtml !== false,\n minifyCss: options.minifyCss !== false,\n minifyJs: options.minifyJs !== false\n };\n\n logger?.debug(`Minification flags: ${JSON.stringify(minifyFlags)}`);\n\n const minifiedAssets: Asset[] = await Promise.all(\n currentAssets.map(async (asset): Promise<Asset> => {\n // Make a shallow copy to avoid modifying the original asset object\n let processedAsset = { ...asset };\n\n if (typeof processedAsset.content !== 'string' || processedAsset.content.length === 0) {\n return processedAsset; // Return the copy\n }\n\n let newContent = processedAsset.content; // Work with the content of the copy\n const assetIdentifier = processedAsset.url || `inline ${processedAsset.type}`;\n\n try {\n // --- Minify CSS (Synchronous Call with Type Assertion Workaround) ---\n if (minifyFlags.minifyCss && processedAsset.type === 'css') {\n logger?.debug(`Minifying CSS: ${assetIdentifier}`);\n\n // @ts-ignore - Suppress error TS2769 due to likely faulty @types/clean-css constructor overload definitions for sync mode.\n const cssMinifier = new CleanCSS(CSS_MINIFY_OPTIONS); // <<< @ts-ignore HERE\n\n // WORKAROUND using Type Assertion\n const result = cssMinifier.minify(processedAsset.content) as CleanCSSSyncResult;\n\n // Access properties based on the asserted type\n if (result.errors && result.errors.length > 0) {\n logger?.warn(`⚠️ CleanCSS failed for ${assetIdentifier}: ${result.errors.join(', ')}`);\n } else {\n if (result.warnings && result.warnings.length > 0) {\n logger?.debug(`CleanCSS warnings for ${assetIdentifier}: ${result.warnings.join(', ')}`);\n }\n if (result.styles) {\n newContent = result.styles; // Update newContent\n logger?.debug(`CSS minified successfully: ${assetIdentifier}`);\n } else {\n logger?.warn(`⚠️ CleanCSS produced no styles but reported no errors for ${assetIdentifier}. Keeping original.`);\n }\n }\n }\n\n // --- Minify JS (Asynchronous Call) ---\n if (minifyFlags.minifyJs && processedAsset.type === 'js') {\n logger?.debug(`Minifying JS: ${assetIdentifier}`);\n const result: MinifyOutput = await jsMinify(processedAsset.content, JS_MINIFY_OPTIONS);\n if (result.code) {\n newContent = result.code; // Update newContent\n logger?.debug(`JS minified successfully: ${assetIdentifier}`);\n } else {\n const terserError = (result as any).error;\n if (terserError) {\n logger?.warn(`⚠️ Terser failed for ${assetIdentifier}: ${terserError.message || terserError}`);\n } else {\n logger?.warn(`⚠️ Terser produced no code but reported no errors for ${assetIdentifier}. Keeping original.`);\n }\n }\n }\n } catch (err: unknown) {\n const errorMessage = err instanceof Error ? err.message : String(err);\n logger?.warn(`⚠️ Failed to minify asset ${assetIdentifier} (${processedAsset.type}): ${errorMessage}`);\n // Keep original content if error occurs (newContent remains unchanged)\n }\n\n // Update the content property of the copied asset\n processedAsset.content = newContent;\n return processedAsset; // Return the modified copy\n })\n );\n\n // --- Minify the main HTML content itself ---\n let finalHtml = currentHtmlContent; // Start with potentially empty original HTML\n if (minifyFlags.minifyHtml && finalHtml.length > 0) {\n logger?.debug('Minifying HTML content...');\n try {\n finalHtml = await htmlMinify(finalHtml, {\n ...HTML_MINIFY_OPTIONS,\n minifyCSS: minifyFlags.minifyCss,\n minifyJS: minifyFlags.minifyJs\n });\n logger?.debug('HTML minified successfully.');\n } catch (err: unknown) {\n const errorMessage = err instanceof Error ? err.message : String(err);\n logger?.warn(`⚠️ HTML minification failed: ${errorMessage}`);\n // Keep original HTML (finalHtml already holds it)\n }\n } else if (finalHtml.length > 0) {\n logger?.debug('HTML minification skipped (disabled).');\n }\n\n\n // --- Return the final result object ---\n return {\n htmlContent: finalHtml,\n assets: minifiedAssets // The array of processed asset copies\n };\n}","/**\n * @file src/core/packer.ts\n * @description Inlines CSS, JS, and images into an HTML document for full portability.\n * Uses Cheerio for safe DOM manipulation.\n */\n\nimport * as cheerio from 'cheerio';\n// Import CheerioAPI type\nimport type { CheerioAPI } from 'cheerio';\nimport type { ParsedHTML, Asset } from '../types'; // Assuming correct path\nimport { Logger } from '../utils/logger'; // Assuming correct path\nimport { guessMimeType } from '../utils/mime'; // Assuming correct path\n\n/**\n * Escapes characters potentially problematic within inline `<script>` tags.\n */\nfunction escapeScriptContent(code: string): string {\n return code.replace(/<\\/(script)/gi, '<\\\\/$1');\n}\n\n/**\n * Ensures a `<base href=\"./\">` tag exists within the `<head>` of the HTML.\n * Creates <head> or even <html> if necessary using Cheerio.\n *\n * @param {CheerioAPI} $ - The Cheerio instance representing the HTML document.\n * @param {Logger} [logger] - Optional logger instance.\n */\nfunction ensureBaseTag($: CheerioAPI, logger?: Logger): void {\n let head = $('head');\n\n // If <head> doesn't exist, create it, ensuring <html> exists first.\n if (head.length === 0) {\n logger?.debug('No <head> tag found. Creating <head> and ensuring <html> exists.');\n let htmlElement = $('html');\n\n // If <html> doesn't exist, create it and wrap the existing content.\n if (htmlElement.length === 0) {\n logger?.debug('No <html> tag found. Wrapping content in <html><body>...');\n const bodyContent = $.root().html() || '';\n $.root().empty();\n // FIX: Use 'as any' for type assertion\n htmlElement = $('<html>').appendTo($.root()) as any;\n // FIX: Use 'as any' for type assertion\n head = $('<head>').appendTo(htmlElement) as any;\n $('<body>').html(bodyContent).appendTo(htmlElement);\n } else {\n // If <html> exists but <head> doesn't, prepend <head> to <html>\n // FIX: Use 'as any' for type assertion\n head = $('<head>').prependTo(htmlElement) as any;\n }\n }\n\n // Now head should represent the head element selection.\n // Check if <base> exists within the guaranteed <head>.\n // Use type guard just in case head couldn't be created properly\n if (head && head.length > 0 && head.find('base[href]').length === 0) {\n logger?.debug('Prepending <base href=\"./\"> to <head>.');\n head.prepend('<base href=\"./\">');\n }\n}\n\n\n/**\n * Inlines assets into the HTML document using Cheerio for safe DOM manipulation.\n */\nfunction inlineAssets($: CheerioAPI, assets: Asset[], logger?: Logger): void {\n logger?.debug(`Inlining ${assets.filter(a => a.content).length} assets with content...`);\n const assetMap = new Map<string, Asset>(assets.map(asset => [asset.url, asset]));\n\n // 1. Inline CSS (<link rel=\"stylesheet\" href=\"...\">)\n $('link[rel=\"stylesheet\"][href]').each((_, el) => {\n const link = $(el);\n const href = link.attr('href');\n const asset = href ? assetMap.get(href) : undefined;\n if (asset?.content && typeof asset.content === 'string') {\n if (asset.content.startsWith('data:')) {\n logger?.debug(`Replacing link with style tag using existing data URI: ${asset.url}`);\n const styleTag = $('<style>').text(`@import url(\"${asset.content}\");`);\n link.replaceWith(styleTag);\n } else {\n logger?.debug(`Inlining CSS: ${asset.url}`);\n const styleTag = $('<style>').text(asset.content);\n link.replaceWith(styleTag);\n }\n } else if (href) {\n logger?.warn(`Could not inline CSS: ${href}. Content missing or invalid.`);\n }\n });\n\n // 2. Inline JS (<script src=\"...\">)\n $('script[src]').each((_, el) => {\n const script = $(el);\n const src = script.attr('src');\n const asset = src ? assetMap.get(src) : undefined;\n if (asset?.content && typeof asset.content === 'string') {\n logger?.debug(`Inlining JS: ${asset.url}`);\n const inlineScript = $('<script>');\n inlineScript.text(escapeScriptContent(asset.content));\n Object.entries(script.attr() || {}).forEach(([key, value]) => {\n if (key.toLowerCase() !== 'src') inlineScript.attr(key, value);\n });\n script.replaceWith(inlineScript);\n } else if (src) {\n logger?.warn(`Could not inline JS: ${src}. Content missing or not string.`);\n }\n });\n\n // 3. Inline Images (<img src=\"...\">, <video poster=\"...\">, etc.)\n $('img[src], video[poster], input[type=\"image\"][src]').each((_, el) => {\n const element = $(el);\n const srcAttr = element.is('video') ? 'poster' : 'src';\n const src = element.attr(srcAttr);\n const asset = src ? assetMap.get(src) : undefined;\n if (asset?.content && typeof asset.content === 'string' && asset.content.startsWith('data:')) {\n logger?.debug(`Inlining image via ${srcAttr}: ${asset.url}`);\n element.attr(srcAttr, asset.content);\n } else if (src) {\n logger?.warn(`Could not inline image via ${srcAttr}: ${src}. Content missing or not a data URI.`);\n }\n });\n\n // 4. Inline srcset attributes (<img srcset=\"...\">, <source srcset=\"...\">)\n $('img[srcset], source[srcset]').each((_, el) => {\n const element = $(el);\n const srcset = element.attr('srcset');\n if (!srcset) return;\n const newSrcsetParts: string[] = [];\n let changed = false;\n srcset.split(',').forEach(part => {\n const trimmedPart = part.trim();\n const [url, descriptor] = trimmedPart.split(/\\s+/, 2);\n const asset = url ? assetMap.get(url) : undefined;\n if (asset?.content && typeof asset.content === 'string' && asset.content.startsWith('data:')) {\n newSrcsetParts.push(`${asset.content}${descriptor ? ' ' + descriptor : ''}`);\n changed = true;\n } else {\n newSrcsetParts.push(trimmedPart);\n }\n });\n if (changed) {\n element.attr('srcset', newSrcsetParts.join(', '));\n }\n });\n\n // 5. Inline other asset types (video, audio sources)\n $('video[src], audio[src], video > source[src], audio > source[src]').each((_, el) => {\n const element = $(el);\n const src = element.attr('src');\n const asset = src ? assetMap.get(src) : undefined;\n if (asset?.content && typeof asset.content === 'string' && asset.content.startsWith('data:')) {\n logger?.debug(`Inlining media source: ${asset.url}`);\n element.attr('src', asset.content);\n }\n });\n\n logger?.debug('Asset inlining process complete.');\n}\n\n\n/**\n * Packs a ParsedHTML object into a single, self-contained HTML string.\n * This involves ensuring a base tag exists and inlining all assets\n * that have content available. Uses Cheerio for safe DOM manipulation.\n *\n * @export\n * @param {ParsedHTML} parsed - The parsed HTML document object, including its list of assets (which may have content).\n * @param {Logger} [logger] - Optional logger instance.\n * @returns {string} The packed HTML string with assets inlined. Returns a minimal HTML structure if input is invalid.\n */\nexport function packHTML(parsed: ParsedHTML, logger?: Logger): string {\n const { htmlContent, assets } = parsed;\n if (!htmlContent || typeof htmlContent !== 'string') {\n logger?.warn('Packer received empty or invalid htmlContent. Returning minimal HTML shell.');\n return '<!DOCTYPE html><html><head><base href=\"./\"></head><body></body></html>';\n }\n\n logger?.debug('Loading HTML content into Cheerio for packing...');\n const $ = cheerio.load(htmlContent);\n\n logger?.debug('Ensuring <base> tag exists...');\n ensureBaseTag($, logger); // Ensure base tag safely\n\n logger?.debug('Starting asset inlining...');\n inlineAssets($, assets, logger); // Inline assets safely\n\n logger?.debug('Generating final packed HTML string...');\n const finalHtml = $.html();\n\n logger?.debug(`Packing complete. Final size: ${Buffer.byteLength(finalHtml)} bytes.`);\n return finalHtml;\n}","/**\n * @file src/utils/logger.ts\n * @description Provides a standardized logging utility with configurable levels (based on an enum)\n * to control output verbosity throughout the application (core, API, CLI).\n */\n\n// FIX: Use a regular import for the enum, not 'import type'\nimport { LogLevel } from '../types';\n// Assuming LogLevel enum is defined and exported in '../types' like:\n// export enum LogLevel { NONE = 0, ERROR = 1, WARN = 2, INFO = 3, DEBUG = 4 }\n\n/**\n * Optional configuration for creating a Logger instance.\n * (Note: Currently constructor only accepts LogLevel directly)\n */\nexport interface LoggerOptions {\n level?: LogLevel;\n}\n\n/**\n * A simple logger class that allows filtering messages based on severity levels.\n * Uses standard console methods (debug, info, warn, error) for output.\n */\nexport class Logger {\n /** The current minimum log level required for a message to be output. */\n public level: LogLevel;\n\n /**\n * Creates a new Logger instance.\n * Defaults to LogLevel.INFO if no level is provided.\n *\n * @param {LogLevel} [level=LogLevel.INFO] - The initial log level for this logger instance.\n * Must be one of the values from the LogLevel enum.\n */\n constructor(level: LogLevel = LogLevel.INFO) { // Defaulting to INFO level using the enum value\n // Ensure a valid LogLevel enum member is provided or default correctly\n this.level = (level !== undefined && LogLevel[level] !== undefined)\n ? level\n : LogLevel.INFO; // Use the enum value for default\n }\n\n /**\n * Updates the logger's current level. Messages below this level will be suppressed.\n *\n * @param {LogLevel} level - The new log level to set. Must be a LogLevel enum member.\n */\n setLevel(level: LogLevel): void {\n this.level = level;\n }\n\n /**\n * Logs a debug message if the current log level is DEBUG or higher.\n *\n * @param {string} message - The debug message string.\n */\n debug(message: string): void {\n // Use enum member for comparison\n if (this.level >= LogLevel.DEBUG) {\n console.debug(`[DEBUG] ${message}`);\n }\n }\n\n /**\n * Logs an informational message if the current log level is INFO or higher.\n *\n * @param {string} message - The informational message string.\n */\n info(message: string): void {\n // Use enum member for comparison\n if (this.level >= LogLevel.INFO) {\n console.info(`[INFO] ${message}`);\n }\n }\n\n /**\n * Logs a warning message if the current log level is WARN or higher.\n *\n * @param {string} message - The warning message string.\n */\n warn(message: string): void {\n // Use enum member for comparison\n if (this.level >= LogLevel.WARN) {\n console.warn(`[WARN] ${message}`);\n }\n }\n\n /**\n * Logs an error message if the current log level is ERROR or higher.\n *\n * @param {string} message - The error message string.\n */\n error(message: string): void {\n // Use enum member for comparison\n if (this.level >= LogLevel.ERROR) {\n console.error(`[ERROR] ${message}`);\n }\n }\n\n /**\n * Static factory method to create a Logger instance based on a simple boolean `verbose` flag.\n *\n * @static\n * @param {{ verbose?: boolean }} [options={}] - An object potentially containing a `verbose` flag.\n * @returns {Logger} A new Logger instance set to LogLevel.DEBUG if options.verbose is true,\n * otherwise set to LogLevel.INFO.\n */\n static fromVerboseFlag(options: { verbose?: boolean } = {}): Logger {\n // Use enum members for assignment\n return new Logger(options.verbose ? LogLevel.DEBUG : LogLevel.INFO);\n }\n\n /**\n * Static factory method to create a Logger instance based on a LogLevel string name.\n * Useful for creating a logger from config files or environments variables.\n *\n * @static\n * @param {string | undefined} levelName - The name of the log level (e.g., 'debug', 'info', 'warn', 'error', 'silent'/'none'). Case-insensitive.\n * @param {LogLevel} [defaultLevel=LogLevel.INFO] - The level to use if levelName is invalid or undefined.\n * @returns {Logger} A new Logger instance set to the corresponding LogLevel.\n */\n static fromLevelName(levelName?: string, defaultLevel: LogLevel = LogLevel.INFO): Logger {\n if (!levelName) {\n return new Logger(defaultLevel);\n }\n switch (levelName.toLowerCase()) {\n // Return enum members\n case 'debug': return new Logger(LogLevel.DEBUG);\n case 'info': return new Logger(LogLevel.INFO);\n case 'warn': return new Logger(LogLevel.WARN);\n case 'error': return new Logger(LogLevel.ERROR);\n case 'silent':\n case 'none': return new Logger(LogLevel.NONE);\n default:\n // Use console.warn directly here as logger might not be ready\n console.warn(`[Logger] Invalid log level name \"${levelName}\". Defaulting to ${LogLevel[defaultLevel]}.`);\n return new Logger(defaultLevel);\n }\n }\n}","/**\n * @file src/utils/slugify.ts\n * @description Converts any URL or string to a safe HTML slug usable in IDs, hashes, filenames, etc.\n */\n\n/**\n * Converts a URL or path string into a clean slug suitable for use as an HTML ID or filename segment.\n * - Handles relative and absolute URLs.\n * - Removes common file extensions (.html, .htm, .php, etc.).\n * - Removes URL fragments (#...).\n * - Attempts to parse pathname and search parameters.\n * - Replaces spaces, slashes, and other unsafe characters with hyphens.\n * - Converts to lowercase.\n * - Collapses and trims hyphens.\n * - Returns 'index' for empty or invalid input.\n *\n * @param url - The raw URL or string to slugify.\n * @returns A safe, lowercase slug string.\n */\nexport function slugify(url: string): string {\n if (!url || typeof url !== 'string') return 'index';\n\n let cleaned = url.trim();\n let pathAndSearch = '';\n\n try {\n const urlObj = new URL(url, 'https://placeholder.base');\n pathAndSearch = (urlObj.pathname ?? '') + (urlObj.search ?? '');\n } catch {\n pathAndSearch = cleaned.split('#')[0]; // Remove fragment\n }\n\n // Decode URI components AFTER parsing from URL to handle %20 etc.\n try {\n cleaned = decodeURIComponent(pathAndSearch);\n } catch (e) {\n cleaned = pathAndSearch; // Proceed if decoding fails\n }\n\n cleaned = cleaned\n // Remove common web extensions FIRST\n .replace(/\\.(html?|php|aspx?|jsp)$/i, '')\n // Replace path separators and common separators/spaces with a hyphen\n .replace(/[\\s/?=&\\\\]+/g, '-') // Target spaces, /, ?, =, &, \\\n // Remove any remaining characters that are not alphanumeric, hyphen, underscore, or period\n .replace(/[^\\w._-]+/g, '') // Allow word chars, '.', '_', '-'\n // Collapse consecutive hyphens\n .replace(/-+/g, '-')\n // Trim leading/trailing hyphens\n .replace(/^-+|-+$/g, '')\n // Convert to lowercase\n .toLowerCase();\n\n // Return 'index' if the process results in an empty string\n return cleaned || 'index';\n}\n\n\n/**\n * Converts a URL or path string into a clean slug suitable for use as an HTML ID.\n * Note: This implementation might be very similar or identical to slugify depending on exact needs.\n * This example uses the refined slugify logic. Consider consolidating if appropriate.\n *\n * @param rawUrl - The raw page URL or path.\n * @returns A safe, lowercase slug string (e.g. \"products-item-1\", \"search-q-test-page-2\")\n */\nexport function sanitizeSlug(rawUrl: string): string {\n // Re-use the improved slugify logic for consistency\n return slugify(rawUrl);\n}","/**\n * @file bundler.ts\n * @description Core bundling functions to handle both single and multi-page HTML documents. This includes asset extraction, optional minification, and full inlining into a self-contained HTML file.\n * @version 1.3.0\n */\n\nimport { dirname, resolve } from 'path';\nimport { pathToFileURL, URL } from 'url';\nimport { extractAssets } from './extractor.js';\nimport { minifyAssets } from './minifier.js';\nimport { packHTML } from './packer.js';\nimport { Logger } from '../utils/logger.js';\nimport { ParsedHTML, BundleOptions, PageEntry } from '../types.js';\nimport { sanitizeSlug, slugify } from '../utils/slugify.js';\n\n/**\n * Determines the appropriate base URL for resolving relative assets\n * based on input HTML file path or URL.\n *\n * @param input - The original HTML path or URL.\n * @param logger - Optional logger instance.\n * @returns The resolved base URL, ending in a trailing slash.\n */\nfunction determineBaseUrl(input: string, logger?: Logger): string {\n try {\n if (input.startsWith('http://') || input.startsWith('https://')) {\n const url = new URL(input);\n url.pathname = url.pathname.substring(0, url.pathname.lastIndexOf('/') + 1);\n url.search = '';\n url.hash = '';\n const baseUrl = url.toString();\n logger?.debug(`Determined remote base URL: ${baseUrl}`);\n return baseUrl;\n } else {\n const absoluteDir = dirname(resolve(input));\n const baseUrl = pathToFileURL(absoluteDir + '/').href;\n logger?.debug(`Determined local base URL: ${baseUrl}`);\n return baseUrl;\n }\n } catch (error: any) {\n logger?.error(`Failed to determine base URL for \"${input}\": ${error.message}`);\n return './';\n }\n}\n\n/**\n * Creates a self-contained HTML file from a parsed HTML structure and options.\n *\n * @param parsedHtml - The parsed HTML document.\n * @param inputPath - The original input file or URL for base URL calculation.\n * @param options - Optional bundling options.\n * @param logger - Optional logger instance.\n * @returns A fully inlined and bundled HTML string.\n */\nexport async function bundleSingleHTML(\n parsedHtml: ParsedHTML,\n inputPath: string,\n options: BundleOptions = {},\n logger?: Logger\n): Promise<string> {\n try {\n const defaultOptions: Required<BundleOptions> = {\n embedAssets: true,\n minifyHtml: true,\n minifyJs: true,\n minifyCss: true,\n baseUrl: '',\n verbose: false,\n dryRun: false,\n recursive: false,\n output: '',\n logLevel: logger?.level ?? 3,\n };\n\n const mergedOptions = { ...defaultOptions, ...options };\n\n if (!mergedOptions.baseUrl) {\n mergedOptions.baseUrl = determineBaseUrl(inputPath, logger);\n }\n\n logger?.debug(`Starting HTML bundling for ${inputPath}`);\n logger?.debug(`Effective options: ${JSON.stringify(mergedOptions, null, 2)}`);\n\n const extracted = await extractAssets(parsedHtml, mergedOptions.embedAssets, mergedOptions.baseUrl, logger);\n const minified = await minifyAssets(extracted, mergedOptions, logger);\n const result = packHTML(minified, logger);\n\n logger?.info(`Single HTML bundling complete for: ${inputPath}`);\n return result;\n } catch (error: any) {\n logger?.error(`Error during single HTML bundling: ${error.message}`);\n throw error;\n }\n}\n\n/**\n * Combines multiple HTML pages into a single HTML file with client-side routing.\n *\n * @param pages - An array of PageEntry objects (each with a URL and HTML content).\n * @param logger - Optional logger for diagnostics.\n * @returns A complete HTML document as a string.\n * @throws {Error} If the input is invalid or contains no usable pages.\n */\nexport function bundleMultiPageHTML(pages: PageEntry[], logger?: Logger): string {\n if (!Array.isArray(pages)) {\n const errorMsg = 'Input pages must be an array of PageEntry objects';\n logger?.error(errorMsg);\n throw new Error(errorMsg);\n }\n\n logger?.info(`Bundling ${pages.length} pages into a multi-page HTML document.`);\n\n const validPages = pages.filter(page => {\n const isValid = page && typeof page === 'object' && typeof page.url === 'string' && typeof page.html === 'string';\n if (!isValid) logger?.warn('Skipping invalid page entry');\n return isValid;\n });\n\n if (validPages.length === 0) {\n const errorMsg = 'No valid page entries found in input array';\n logger?.error(errorMsg);\n throw new Error(errorMsg);\n }\n\n const slugMap = new Map<string, string>();\n const usedSlugs = new Set<string>();\n\n for (const page of validPages) {\n const baseSlug = sanitizeSlug(page.url);\n let slug = baseSlug;\n let counter = 1;\n while (usedSlugs.has(slug)) {\n slug = `${baseSlug}-${counter++}`;\n logger?.warn(`Slug collision detected for \"${page.url}\". Using \"${slug}\" instead.`);\n }\n usedSlugs.add(slug);\n slugMap.set(page.url, slug);\n }\n\n const defaultPageSlug = slugMap.get(validPages[0].url);\n\n let output = `<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"UTF-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n <title>Multi-Page Bundle</title>\n</head>\n<body>\n <nav id=\"main-nav\">\n ${validPages.map(p => {\n const slug = slugMap.get(p.url)!;\n const label = p.url.split('/').pop()?.split('.')[0] || 'Page';\n return `<a href=\"#${slug}\" data-page=\"${slug}\">${label}</a>`;\n }).join('\\n')}\n </nav>\n <div id=\"page-container\"></div>\n ${validPages.map(p => {\n const slug = slugMap.get(p.url)!;\n return `<template id=\"page-${slug}\">${p.html}</template>`;\n }).join('\\n')}\n <script id=\"router-script\">\n document.addEventListener('DOMContentLoaded', function() {\n function navigateTo(slug) {\n const template = document.getElementById('page-' + slug);\n const container = document.getElementById('page-container');\n if (!template || !container) return;\n container.innerHTML = '';\n container.appendChild(template.content.cloneNode(true));\n document.querySelectorAll('#main-nav a').forEach(link => {\n if (link.getAttribute('data-page') === slug) link.classList.add('active');\n else link.classList.remove('active');\n });\n if (window.location.hash.substring(1) !== slug) {\n history.pushState(null, '', '#' + slug);\n }\n }\n\n window.addEventListener('hashchange', () => {\n const slug = window.location.hash.substring(1);\n if (document.getElementById('page-' + slug)) navigateTo(slug);\n });\n\n document.querySelectorAll('#main-nav a').forEach(link => {\n link.addEventListener('click', function(e) {\n e.preventDefault();\n const slug = this.getAttribute('data-page');\n navigateTo(slug);\n });\n });\n\n const initial = window.location.hash.substring(1);\n navigateTo(document.getElementById('page-' + initial) ? initial : '${defaultPageSlug}');\n });\n </script>\n</body>\n</html>`;\n\n logger?.info(`Multi-page bundle generated. Size: ${Buffer.byteLength(output, 'utf-8')} bytes.`);\n return output;\n}\n","/**\n * @file src/core/web-fetcher.ts\n * @description Provides functions for fetching web page content using Puppeteer,\n * including recursive site crawling capabilities.\n */\n\nimport * as puppeteer from 'puppeteer';\nimport * as fs from 'fs/promises';\nimport { Logger } from '../utils/logger'; // Assuming logger is in ../utils\nimport { BuildResult, PageEntry } from '../types'; // Assuming types are defined here\nimport { bundleMultiPageHTML } from './bundler'; // Assuming bundler is here\n\n/**\n * @typedef {object} CrawlResult\n * @property {string} url - The URL of the crawled page.\n * @property {string} html - The HTML content of the crawled page.\n */\n\n/**\n * Fetches the rendered HTML content and basic metadata for a single web page URL.\n * Manages its own browser instance lifecycle (launch and close).\n *\n * @param {string} url - The fully qualified URL to fetch.\n * @param {Logger} [logger] - Optional logger instance for debug/info messages.\n * @param {number} [timeout=30000] - Navigation timeout in milliseconds.\n * @returns {Promise<BuildResult>} A promise that resolves with the fetched HTML\n * and metadata, or rejects on critical errors.\n * @throws {Error} Throws errors from Puppeteer launch, page creation, or navigation failures.\n */\nexport async function fetchAndPackWebPage(\n url: string,\n logger?: Logger,\n timeout: number = 30000\n): Promise<BuildResult> {\n let browser: puppeteer.Browser | null = null; // Initialize browser to null\n const start = Date.now();\n logger?.debug(`Initiating fetch for single page: ${url}`);\n\n try {\n browser = await puppeteer.launch({ headless: true });\n logger?.debug(`Browser launched for ${url}`);\n const page = await browser.newPage();\n logger?.debug(`Page created for ${url}`);\n\n try {\n logger?.debug(`Navigating to ${url} with timeout ${timeout}ms`);\n await page.goto(url, { waitUntil: 'networkidle2', timeout: timeout });\n logger?.debug(`Navigation successful for ${url}`);\n const html = await page.content();\n logger?.debug(`Content retrieved for ${url}`);\n\n const metadata: BuildResult['metadata'] = {\n input: url,\n outputSize: Buffer.byteLength(html, 'utf-8'),\n assetCount: 0, // Basic fetch doesn't track assets\n buildTimeMs: Date.now() - start,\n errors: [], // No errors if we reached this point\n };\n\n await page.close(); // Close the page specifically\n logger?.debug(`Page closed for ${url}`);\n // await browser.close(); // Close the browser instance\n logger?.debug(`Browser closed for ${url}`);\n browser = null; // Ensure browser is marked as closed\n\n return { html, metadata };\n\n } catch (pageError: any) {\n logger?.error(`Error during page processing for ${url}: ${pageError.message}`);\n // Ensure page is closed even if an error occurred during processing\n try { await page.close();\n\n } catch (closeErr) { \n throw closeErr;\n }\n throw pageError; // Re-throw the original page processing error\n }\n } catch (launchError: any) {\n logger?.error(`Critical error during browser launch or page creation for ${url}: ${launchError.message}`);\n // Ensure browser is closed if launch succeeded but newPage failed, etc.\n // Although if launch fails, browser might be null.\n if (browser) {\n try { await browser.close(); } catch (closeErr) { /* Ignore browser close error */ }\n }\n throw launchError; // Re-throw the original launch/setup error\n } finally {\n // Final check: If browser somehow wasn't closed and isn't null, attempt closure.\n // This handles edge cases where errors might bypass earlier closes.\n if (browser) {\n logger?.warn(`Closing browser in final cleanup for ${url}. This might indicate an unusual error path.`);\n try { await browser.close(); } catch (closeErr) { /* Ignore final browser close error */ }\n }\n }\n}\n\n/**\n * Internal function to recursively crawl a website starting from a given URL.\n * Uses a single browser instance and manages pages for efficiency during crawl.\n * Implements Breadth-First Search (BFS) using a queue.\n *\n * @private\n * @param {string} startUrl - The initial URL to start crawling from.\n * @param {number} maxDepth - The maximum depth of links to follow (1 means only the start URL).\n * @param {Logger} [logger] - Optional logger instance.\n * @returns {Promise<PageEntry[]>} A promise resolving to an array of PageEntry objects\n * containing the URL and HTML for each successfully crawled page.\n */\nasync function crawlWebsite(\n startUrl: string,\n maxDepth: number,\n logger?: Logger\n): Promise<PageEntry[]> {\n logger?.info(`Starting crawl for ${startUrl} with maxDepth ${maxDepth}`);\n \n // Don't even start a browser if maxDepth is 0\n if (maxDepth <= 0) {\n logger?.info('maxDepth is 0 or negative, no pages will be crawled.');\n return [];\n }\n \n const browser = await puppeteer.launch({ headless: true });\n const visited = new Set<string>();\n const results: PageEntry[] = [];\n // Queue stores URLs to visit and their corresponding depth\n const queue: { url: string; depth: number }[] = [];\n \n // Initialize startOrigin for same-origin check\n let startOrigin: string;\n try {\n startOrigin = new URL(startUrl).origin;\n } catch (e: any) {\n logger?.error(`Invalid start URL: ${startUrl}. ${e.message}`);\n await browser.close();\n return []; // Cannot start crawl with invalid URL\n }\n\n // Normalize start URL (remove fragment) and add to queue/visited if depth allows\n let normalizedStartUrl: string;\n try {\n const parsedStartUrl = new URL(startUrl);\n parsedStartUrl.hash = ''; // Remove fragment for consistent visited checks\n normalizedStartUrl = parsedStartUrl.href;\n } catch (e: any) {\n logger?.error(`Invalid start URL: ${startUrl}. ${e.message}`);\n await browser.close();\n return []; // Cannot start crawl with invalid URL\n }\n\n visited.add(normalizedStartUrl);\n queue.push({ url: normalizedStartUrl, depth: 1 });\n logger?.debug(`Queued initial URL: ${normalizedStartUrl} (depth 1)`);\n\n while (queue.length > 0) {\n const { url, depth } = queue.shift()!; // Non-null assertion ok due to queue.length check\n logger?.info(`Processing: ${url} (depth ${depth})`);\n let page: puppeteer.Page | null = null;\n\n try {\n page = await browser.newPage();\n // Set a reasonable viewport, sometimes helps with rendering/layout dependent scripts\n await page.setViewport({ width: 1280, height: 800 });\n await page.goto(url, { waitUntil: 'networkidle2', timeout: 30000 });\n const html = await page.content();\n\n // Add successfully fetched page to results\n // Ensure the object structure matches your PageEntry type definition\n results.push({ url, html });\n logger?.debug(`Successfully fetched content for ${url}`);\n\n // --- Link Discovery ---\n // Only look for more links if we haven't reached the maximum depth\n if (depth < maxDepth) {\n logger?.debug(`Discovering links on ${url} (current depth ${depth}, maxDepth ${maxDepth})`);\n // Use page.evaluate to get all href attributes directly from the DOM\n const hrefs = await page.evaluate(() =>\n Array.from(document.querySelectorAll('a[href]'), a => a.getAttribute('href'))\n );\n logger?.debug(`Found ${hrefs.length} potential hrefs on ${url}`);\n\n let linksAdded = 0;\n for (const href of hrefs) {\n if (!href) continue; // Skip empty hrefs like href=\"\"\n\n let absoluteUrl: string;\n try {\n // Resolve the href relative to the current page's URL\n const resolved = new URL(href, url);\n // Remove fragment (#) for visited checks and queueing consistency\n resolved.hash = '';\n absoluteUrl = resolved.href;\n } catch (e) {\n // Ignore URLs that fail to parse (e.g., \"javascript:void(0)\")\n logger?.debug(`Ignoring invalid URL syntax: \"${href}\" on page ${url}`);\n continue;\n }\n\n // --- Filtering and Queueing ---\n // 1. Check if it belongs to the same origin as the start URL\n // 2. Check if it has already been visited (or is in the queue)\n if (absoluteUrl.startsWith(startOrigin) && !visited.has(absoluteUrl)) {\n visited.add(absoluteUrl); // Mark as visited *before* adding to queue\n queue.push({ url: absoluteUrl, depth: depth + 1 });\n linksAdded++;\n // logger?.debug(`Queueing: ${absoluteUrl} (depth ${depth + 1})`); // Verbose\n } else {\n // logger?.debug(`Skipping (external, visited, or invalid): ${absoluteUrl}`); // Verbose\n }\n }\n logger?.debug(`Added ${linksAdded} new unique internal links to queue from ${url}`);\n } else {\n logger?.debug(`Max depth (${maxDepth}) reached, not discovering links on ${url}`);\n }\n\n } catch (err: any) {\n // Log errors encountered during page processing (goto, content, evaluate)\n logger?.warn(`❌ Failed to process ${url}: ${err.message}`);\n // Optionally add error details to results or a separate error list if needed\n } finally {\n // Ensure the page is closed reliably after processing or error\n if (page) {\n try {\n await page.close();\n } catch (pageCloseError: any) {\n // Log if closing the page fails, but don't let it stop the crawl\n logger?.error(`Failed to close page for ${url}: ${pageCloseError.message}`);\n }\n }\n }\n } // End while loop\n\n logger?.info(`Crawl finished. Closing browser.`);\n await browser.close();\n logger?.info(`Found ${results.length} pages.`);\n return results;\n}\n\n/**\n * Fetches all internal pages of a website recursively starting from a given URL,\n * bundles them into a single HTML string using the bundler module, and writes\n * the result to a file.\n *\n * @export\n * @param {string} startUrl - The fully qualified URL to begin crawling from.\n * @param {string} outputFile - The path where the bundled HTML file should be saved.\n * @param {number} [maxDepth=1] - The maximum depth to crawl links (default: 1, only the start page).\n * @returns {Promise<{ pages: number; html: string }>} A promise resolving to an object containing\n * the number of pages successfully crawled and the final bundled HTML string.\n * @throws {Error} Throws errors if the crawl initiation fails, bundling fails, or file writing fails.\n */\nexport async function recursivelyBundleSite(\n startUrl: string,\n outputFile: string,\n maxDepth = 1\n): Promise<{ pages: number; html: string }> {\n // Create a logger instance specifically for this operation\n const logger = new Logger();\n logger.info(`Starting recursive site bundle for ${startUrl} to ${outputFile} (maxDepth: ${maxDepth})`);\n\n try {\n // Step 1: Crawl the website\n const pages: PageEntry[] = await crawlWebsite(startUrl, maxDepth, logger);\n\n if (pages.length === 0) {\n logger.warn(\"Crawl completed but found 0 pages. Output file may be empty or reflect an empty bundle.\");\n } else {\n logger.info(`Crawl successful, found ${pages.length} pages. Starting bundling.`);\n }\n\n // Step 2: Bundle the HTML content\n const bundledHtml = bundleMultiPageHTML(pages, logger); // Passing logger for consistency\n logger.info(`Bundling complete. Output size: ${Buffer.byteLength(bundledHtml, 'utf-8')} bytes.`);\n\n // Step 3: Write the bundled HTML to the output file\n logger.info(`Writing bundled HTML to ${outputFile}`);\n await fs.writeFile(outputFile, bundledHtml, 'utf-8');\n logger.info(`Successfully wrote bundled output to ${outputFile}`);\n\n // Step 4: Return the results\n return {\n pages: pages.length,\n html: bundledHtml\n };\n } catch (error: any) {\n logger.error(`Error during recursive site bundle: ${error.message}`);\n // Log the stack trace for better debugging if available\n if (error.stack) {\n logger.error(`Stack trace: ${error.stack}`);\n }\n // Re-throw the error to signal failure to the caller\n throw error;\n }\n}","/**\n * @file src/utils/meta.ts\n * @description Utility class for tracking bundle statistics like size, time,\n * asset counts, page counts, and errors during the build process.\n * Used by both CLI and API to return metadata consistently.\n */\n\nimport type { BundleMetadata } from '../types'; // Assuming types are in ../types\n\n/**\n * Tracks build performance (timing, output size) and collects metadata\n * (asset counts, page counts, errors) during the HTML bundling process.\n */\nexport class BuildTimer {\n private startTime: number;\n private input: string;\n private pagesBundled?: number; // Tracks pages for recursive bundles\n private assetCount: number = 0; // Tracks discovered/processed assets\n private errors: string[] = []; // Collects warnings/errors\n\n /**\n * Creates and starts a build timer session for a given input.\n *\n * @param {string} input - The source file path or URL being processed.\n */\n constructor(input: string) {\n this.startTime = Date.now();\n this.input = input;\n }\n\n /**\n * Explicitly sets the number of assets discovered or processed.\n * This might be called after asset extraction/minification.\n *\n * @param {number} count - The total number of assets.\n */\n setAssetCount(count: number): void {\n this.assetCount = count;\n }\n\n /**\n * Records a warning or error message encountered during the build.\n * These are added to the final metadata.\n *\n * @param {string} message - The warning or error description.\n */\n addError(message: string): void {\n this.errors.push(message);\n }\n\n /**\n * Sets the number of pages bundled, typically used in multi-page\n * or recursive bundling scenarios.\n *\n * @param {number} count - The number of HTML pages included in the bundle.\n */\n setPageCount(count: number): void {\n this.pagesBundled = count;\n }\n\n /**\n * Stops the timer, calculates final metrics, and returns the complete\n * BundleMetadata object. Merges any explicitly provided metadata\n * (like assetCount calculated elsewhere) with the timer's tracked data.\n *\n * @param {string} finalHtml - The final generated HTML string, used to calculate output size.\n * @param {Partial<BundleMetadata>} [extra] - Optional object containing metadata fields\n * (like assetCount or pre-calculated errors) that should override the timer's internal values.\n * @returns {BundleMetadata} The finalized metadata object for the build process.\n */\n finish(html: string, extra?: Partial<BundleMetadata>): BundleMetadata {\n const buildTimeMs = Date.now() - this.startTime;\n const outputSize = Buffer.byteLength(html || '', 'utf-8');\n\n // Combine internal errors with any errors passed in 'extra', avoiding duplicates\n // FIX: Ensure extra.errors is treated as an empty array if undefined/null\n const combinedErrors = Array.from(new Set([...this.errors, ...(extra?.errors ?? [])]));\n\n const finalMetadata: BundleMetadata = {\n input: this.input,\n outputSize,\n buildTimeMs,\n assetCount: extra?.assetCount ?? this.assetCount,\n pagesBundled: extra?.pagesBundled ?? this.pagesBundled,\n // Assign the combined errors array\n errors: combinedErrors,\n };\n\n // Clean up optional fields if they weren't set/provided or are empty\n if (finalMetadata.pagesBundled === undefined) {\n delete finalMetadata.pagesBundled;\n }\n // Delete errors only if the *combined* array is empty\n if (finalMetadata.errors?.length === 0) {\n delete finalMetadata.errors;\n }\n\n return finalMetadata;\n }\n}","/**\n * @file src/index.ts\n * @description\n * Main public API for the PortaPack library.\n * Provides functions to create portable HTML files from local paths or URLs,\n * including single-page fetching, recursive site crawling, and multi-page bundling.\n * It coordinates calls to various core modules (parser, extractor, minifier, packer, web-fetcher, bundler).\n */\n\n// Core processing modules\nimport { parseHTML } from './core/parser';\nimport { extractAssets } from './core/extractor';\nimport { minifyAssets } from './core/minifier';\nimport { packHTML } from './core/packer';\n// Core web fetching modules (imported with aliases)\nimport {\n fetchAndPackWebPage as coreFetchAndPack,\n recursivelyBundleSite as coreRecursivelyBundleSite\n} from './core/web-fetcher';\n// Core bundler module (for multi-page)\nimport { bundleMultiPageHTML as coreBundleMultiPageHTML } from './core/bundler';\n// Utilities\nimport { BuildTimer } from './utils/meta';\nimport { Logger } from './utils/logger';\n\n// Types\nimport type {\n BundleOptions,\n BuildResult,\n PageEntry,\n BundleMetadata // Type used in return values\n} from './types';\n\n/**\n * Generates a single, portable HTML file from a local file path or a remote URL.\n *\n * - **For local files:** Reads the file, parses it, discovers linked assets (CSS, JS, images, fonts),\n * fetches/reads asset content, optionally embeds assets as data URIs (default),\n * optionally minifies HTML/CSS/JS (default), and packs everything into a single HTML string.\n * - **For remote URLs:** Fetches the HTML content of the single specified URL using the core web-fetcher.\n * *Note: This does not process/embed assets for single remote URLs; it returns the fetched HTML as-is.*\n *\n * @export\n * @param {string} input - The local file path or remote http(s) URL of the HTML document.\n * @param {BundleOptions} [options={}] - Configuration options controlling embedding, minification,\n * base URL, logging level, etc. See `BundleOptions` type for details.\n * @param {Logger} [loggerInstance] - Optional pre-configured logger instance to use.\n * @returns {Promise<BuildResult>} A promise resolving to an object containing the final HTML string\n * and metadata (`BundleMetadata`) about the bundling process (input, size, time, assets, errors).\n * @throws {Error} Throws errors if file reading, parsing, required asset fetching, or processing fails critically.\n */\nexport async function generatePortableHTML(\n input: string,\n options: BundleOptions = {},\n loggerInstance?: Logger // Allow passing logger\n): Promise<BuildResult> {\n // Use passed logger or create one based on options. Defaults to LogLevel.INFO.\n const logger = loggerInstance || new Logger(options.logLevel);\n logger.info(`Generating portable HTML for: ${input}`);\n const timer = new BuildTimer(input); // Start timing\n\n // --- Handle Remote URLs ---\n const isRemote = /^https?:\\/\\//i.test(input);\n if (isRemote) {\n logger.info(`Input is a remote URL. Fetching page content directly...`);\n try {\n // Call the specific public API wrapper for fetching, passing logger and options\n const result = await fetchAndPackWebPage(input, options, logger);\n logger.info(`Remote fetch complete. Input: ${input}, Size: ${result.metadata.outputSize} bytes, Time: ${result.metadata.buildTimeMs}ms`);\n // Forward the result (which includes metadata finalized by fetchAndPackWebPage)\n return result;\n } catch (error: any) {\n logger.error(`Failed to fetch remote URL ${input}: ${error.message}`);\n throw error; // Re-throw to signal failure\n }\n }\n\n // --- Handle Local Files ---\n logger.info(`Input is a local file path. Starting local processing pipeline...`);\n // Determine base path for resolving relative assets. Default to input file's path.\n const basePath = options.baseUrl || input;\n logger.debug(`Using base path for asset resolution: ${basePath}`);\n\n try {\n // Execute the core processing steps sequentially, passing the logger\n const parsed = await parseHTML(input, logger);\n const enriched = await extractAssets(parsed, options.embedAssets ?? true, basePath, logger);\n const minified = await minifyAssets(enriched, options, logger); // Pass full options\n const finalHtml = packHTML(minified, logger);\n\n // Finalize metadata using the timer.\n // Pass assetCount calculated from the final list of processed assets.\n const metadata = timer.finish(finalHtml, {\n assetCount: minified.assets.length\n // FIX: Removed incorrect attempt to get errors from logger\n // Errors collected by the timer itself (via timer.addError) will be included automatically.\n });\n logger.info(`Local processing complete. Input: ${input}, Size: ${metadata.outputSize} bytes, Assets: ${metadata.assetCount}, Time: ${metadata.buildTimeMs}ms`);\n if (metadata.errors && metadata.errors.length > 0) {\n logger.warn(`Completed with ${metadata.errors.length} warning(s) logged in metadata.`);\n }\n\n // Include any errors collected *by the timer* in the result\n return { html: finalHtml, metadata };\n\n } catch (error: any) {\n logger.error(`Error during local processing for ${input}: ${error.message}`);\n throw error; // Re-throw critical errors\n }\n}\n\n/**\n * Crawls a website starting from a given URL up to a specified depth,\n * bundles all discovered internal HTML pages into a single multi-page file,\n * and returns the result.\n *\n * @export\n * @param {string} url - The entry point URL to start crawling. Must be http or https.\n * @param {number} [depth=1] - The maximum link depth to crawl (1 means only the starting page).\n * @param {BundleOptions} [options={}] - Configuration options. Primarily used for `logLevel`.\n * @param {Logger} [loggerInstance] - Optional pre-configured logger instance to use.\n * @returns {Promise<BuildResult>} A promise resolving to an object containing the bundled multi-page HTML string\n * and metadata (`BundleMetadata`) about the crawl and bundling process.\n * @throws {Error} Throws errors if the initial URL is invalid, crawling fails, or bundling fails.\n */\nexport async function generateRecursivePortableHTML(\n url: string,\n depth = 1,\n options: BundleOptions = {},\n loggerInstance?: Logger // Allow passing logger\n): Promise<BuildResult> {\n // Use passed logger or create one\n const logger = loggerInstance || new Logger(options.logLevel);\n logger.info(`Generating recursive portable HTML for: ${url}, Max Depth: ${depth}`);\n const timer = new BuildTimer(url);\n\n if (!/^https?:\\/\\//i.test(url)) {\n const errMsg = `Invalid input URL for recursive bundling: ${url}. Must start with http(s)://`;\n logger.error(errMsg);\n throw new Error(errMsg);\n }\n\n // Placeholder output path for core function (consider removing if core doesn't need it)\n const internalOutputPathPlaceholder = `${new URL(url).hostname}_recursive.html`;\n\n try {\n // Call the CORE recursive site function\n // Assuming coreRecursivelyBundleSite accepts logger as an optional argument\n const { html, pages } = await coreRecursivelyBundleSite(url, internalOutputPathPlaceholder, depth); // Pass logger if accepted\n logger.info(`Recursive crawl complete. Discovered and bundled ${pages} pages.`);\n\n // Finalize metadata\n timer.setPageCount(pages); // Store page count\n const metadata = timer.finish(html, {\n assetCount: 0, // NOTE: Asset count across multiple pages is not currently aggregated.\n pagesBundled: pages\n // TODO: Potentially collect errors from the core function if it returns them\n });\n logger.info(`Recursive bundling complete. Input: ${url}, Size: ${metadata.outputSize} bytes, Pages: ${metadata.pagesBundled}, Time: ${metadata.buildTimeMs}ms`);\n if (metadata.errors && metadata.errors.length > 0) {\n logger.warn(`Completed with ${metadata.errors.length} warning(s) logged in metadata.`);\n }\n\n return { html, metadata };\n\n } catch (error: any) {\n logger.error(`Error during recursive generation for ${url}: ${error.message}`);\n if (error.cause instanceof Error) { // Log cause if it's an Error\n logger.error(`Cause: ${error.cause.message}`);\n }\n throw error; // Re-throw\n }\n}\n\n/**\n * Fetches the HTML content of a single remote URL using the core web-fetcher.\n * This function acts as a public wrapper, primarily adding standardized timing and metadata.\n * It does *not* process assets within the fetched HTML.\n *\n * @export\n * @param {string} url - The remote http(s) URL to fetch.\n * @param {BundleOptions} [options={}] - Configuration options, mainly for `logLevel`.\n * @param {Logger} [loggerInstance] - Optional pre-configured logger instance to use.\n * @returns {Promise<BuildResult>} A promise resolving to the BuildResult containing the fetched HTML\n * and metadata from the fetch operation.\n * @throws {Error} Propagates errors directly from the core fetching function or if URL is invalid.\n */\nexport async function fetchAndPackWebPage(\n url: string,\n options: BundleOptions = {},\n loggerInstance?: Logger // Allow passing an existing logger\n): Promise<BuildResult> {\n // Use the passed logger or create a new one based on options\n const logger = loggerInstance || new Logger(options.logLevel);\n logger.info(`Workspaceing single remote page: ${url}`);\n const timer = new BuildTimer(url);\n\n if (!/^https?:\\/\\//i.test(url)) {\n const errMsg = `Invalid input URL for fetchAndPackWebPage: ${url}. Must start with http(s)://`;\n logger.error(errMsg);\n throw new Error(errMsg);\n }\n\n try {\n // Call the CORE fetcher function, passing the logger\n // Assuming coreFetchAndPack accepts logger as an optional second argument\n const result = await coreFetchAndPack(url, logger);\n\n // Finalize metadata using timer and data from the core result\n const metadata = timer.finish(result.html, {\n // Use assetCount and errors from core metadata if available\n assetCount: result.metadata?.assetCount ?? 0,\n errors: result.metadata?.errors ?? [] // Ensure errors array exists\n });\n logger.info(`Single page fetch complete. Input: ${url}, Size: ${metadata.outputSize} bytes, Assets: ${metadata.assetCount}, Time: ${metadata.buildTimeMs}ms`);\n if (metadata.errors && metadata.errors.length > 0) {\n logger.warn(`Completed with ${metadata.errors.length} warning(s) logged in metadata.`);\n }\n\n // Return HTML from core result, but use metadata finalized by this wrapper\n return { html: result.html, metadata };\n } catch (error: any) {\n logger.error(`Error during single page fetch for ${url}: ${error.message}`);\n throw error; // Re-throw original error\n }\n}\n\n/**\n * Bundles an array of pre-fetched/generated HTML pages into a single static HTML file\n * using `<template>` tags and a simple client-side hash-based router.\n * This function does not perform any asset processing on the input HTML strings.\n *\n * @export\n * @param {PageEntry[]} pages - An array of page objects, where each object has a `url` (for slug generation)\n * and `html` (the content for that page).\n * @param {BundleOptions} [options={}] - Configuration options, primarily used for `logLevel`.\n * @param {Logger} [loggerInstance] - Optional pre-configured logger instance.\n * @returns {string} A single HTML string representing the bundled multi-page document.\n */\nexport function bundleMultiPageHTML(\n pages: PageEntry[],\n options: BundleOptions = {},\n loggerInstance?: Logger // Allow passing an existing logger\n): string {\n // Use passed logger or create a new one\n const logger = loggerInstance || new Logger(options.logLevel);\n logger.info(`Bundling ${pages.length} provided pages into multi-page HTML...`);\n\n try {\n // Directly call the CORE multi-page bundler function, passing the logger\n // Assuming coreBundleMultiPageHTML accepts logger as an optional second argument\n const bundledHtml = coreBundleMultiPageHTML(pages, logger);\n logger.info(`Multi-page bundling complete.`);\n return bundledHtml;\n } catch (error: any) {\n logger.error(`Error during multi-page bundling: ${error.message}`);\n throw error; // Re-throw error\n }\n}\n\n// Optional: Export core types directly from index for easier consumption?\nexport * from './types';","/**\n * @file cli.ts\n * @description\n * Main CLI runner for PortaPack. Handles parsing CLI args, executing the HTML bundler,\n * writing output to disk, logging metadata, and returning structured results.\n */\n\nimport fs from 'fs'; // Use default import if mocking default below\nimport path from 'path';\nimport { fileURLToPath } from 'url';\n\nimport { parseOptions } from './options.js';\nimport { generatePortableHTML, generateRecursivePortableHTML } from '../index';\nimport type { CLIResult } from '../types';\n\nimport { LogLevel } from '../types';\n\n/**\n * Dynamically loads package.json metadata.\n */\nfunction getPackageJson(): Record<string, any> {\n try {\n const __filename = fileURLToPath(import.meta.url);\n const __dirname = path.dirname(__filename);\n const pkgPath = path.resolve(__dirname, '../../package.json');\n\n // Use fs directly, assuming mock works or it's okay in non-test env\n if (fs.existsSync(pkgPath)) {\n return JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));\n }\n } catch (_) {\n // Ignore and fallback\n }\n return { version: '0.1.0' }; // Default fallback version\n}\n\n/**\n * Entry function for running the CLI.\n */\nexport async function runCli(argv: string[] = process.argv): Promise<CLIResult> {\n let stdout = '';\n let stderr = '';\n let exitCode = 0;\n\n // Capture console output for result object\n const originalLog = console.log;\n const originalErr = console.error;\n const originalWarn = console.warn;\n console.log = (...args) => { stdout += args.join(' ') + '\\n'; };\n console.error = (...args) => { stderr += args.join(' ') + '\\n'; };\n console.warn = (...args) => { stderr += args.join(' ') + '\\n'; }; // Capture warnings in stderr too\n\n let opts: ReturnType<typeof parseOptions> | undefined;\n try {\n opts = parseOptions(argv);\n const version = getPackageJson().version || '0.1.0';\n\n if (opts.verbose) {\n console.log(`📦 PortaPack v${version}`);\n }\n\n if (!opts.input) {\n console.error('❌ Missing input file or URL');\n // Restore console before returning\n console.log = originalLog; console.error = originalErr; console.warn = originalWarn;\n return { stdout, stderr, exitCode: 1 };\n }\n\n // Determine output path using nullish coalescing\n const outputPath = opts.output ?? `${path.basename(opts.input).split('.')[0] || 'output'}.packed.html`;\n\n if (opts.verbose) {\n console.log(`📥 Input: ${opts.input}`);\n console.log(`📤 Output: ${outputPath}`);\n // Log other effective options if verbose\n console.log(` Recursive: ${opts.recursive ?? false}`);\n console.log(` Embed Assets: ${opts.embedAssets}`);\n console.log(` Minify HTML: ${opts.minifyHtml}`);\n console.log(` Minify CSS: ${opts.minifyCss}`);\n console.log(` Minify JS: ${opts.minifyJs}`);\n console.log(` Log Level: ${LogLevel[opts.logLevel ?? LogLevel.INFO]}`);\n }\n\n if (opts.dryRun) {\n console.log('💡 Dry run mode — no output will be written');\n // Restore console before returning\n console.log = originalLog; console.error = originalErr; console.warn = originalWarn;\n return { stdout, stderr, exitCode: 0 };\n }\n\n // --- FIX: Pass 'opts' object to generate functions ---\n const result = opts.recursive\n // Convert boolean recursive flag to depth 1 if needed, otherwise use number\n ? await generateRecursivePortableHTML(opts.input, typeof opts.recursive === 'boolean' ? 1 : opts.recursive, opts)\n : await generatePortableHTML(opts.input, opts);\n // ----------------------------------------------------\n\n // Use fs directly - ensure mock is working in tests\n fs.writeFileSync(outputPath, result.html, 'utf-8');\n\n const meta = result.metadata;\n console.log(`✅ Packed: ${meta.input} → ${outputPath}`);\n console.log(`📦 Size: ${(meta.outputSize / 1024).toFixed(2)} KB`);\n console.log(`⏱️ Time: ${meta.buildTimeMs} ms`); // Use alternative emoji\n console.log(`🖼️ Assets: ${meta.assetCount}`); // Add asset count log\n\n if (meta.pagesBundled && meta.pagesBundled > 0) { // Check > 0 for clarity\n console.log(`🧩 Pages: ${meta.pagesBundled}`);\n }\n\n if (meta.errors && meta.errors.length > 0) {\n console.warn(`\\n⚠️ ${meta.errors.length} warning(s):`); // Add newline for separation\n for (const err of meta.errors) {\n console.warn(` - ${err}`);\n }\n }\n } catch (err: any) {\n console.error(`\\n💥 Error: ${err?.message || 'Unknown failure'}`); // Add newline\n if (err?.stack && opts?.verbose) { // Show stack only if verbose\n console.error(err.stack);\n }\n exitCode = 1;\n } finally {\n // Restore original console methods\n console.log = originalLog;\n console.error = originalErr;\n console.warn = originalWarn;\n }\n\n return { stdout, stderr, exitCode };\n}\n\n// Optional: Define main export if this file is intended to be run directly\nexport const main = runCli;\n\n// Example direct execution (usually handled by bin entry in package.json)\n// if (require.main === module) {\n// runCli();\n// }","/**\n * @file cli-entry.ts\n * @description\n * Node.js entry point for PortaPack CLI (compatible with ESM).\n * \n * Supports:\n * - Direct execution: `node cli-entry.js`\n * - Programmatic import for testing: `import { startCLI } from './cli-entry'`\n */\n\nimport type { CLIResult } from '../types';\n\n/**\n * Starts the CLI by importing and invoking the main CLI logic.\n * \n * @returns {Promise<CLIResult>} - Exit code and any captured output\n */\nconst startCLI = async (): Promise<CLIResult> => {\n const { main } = await import('./cli.js');\n return await main(process.argv);\n};\n\n// If executed directly from the command line, run and exit.\nif (import.meta.url === `file://${process.argv[1]}`) {\n startCLI().then(({ exitCode }) => process.exit(Number(exitCode))); // Cast exitCode to Number\n}\n\nexport { startCLI };\n"],"mappings":";;;;;;;;;;;;AAAA,IA+FY;AA/FZ;AAAA;AAAA;AA+FO,IAAK,WAAL,kBAAKA,cAAL;AACL,MAAAA,oBAAA,UAAO,KAAP;AACA,MAAAA,oBAAA,WAAQ,KAAR;AACA,MAAAA,oBAAA,UAAO,KAAP;AACA,MAAAA,oBAAA,UAAO,KAAP;AACA,MAAAA,oBAAA,WAAQ,KAAR;AALU,aAAAA;AAAA,OAAA;AAAA;AAAA;;;ACzFZ,SAAS,SAAS,cAAc;AAgBhC,SAAS,oBAAoB,KAA2C;AACpE,MAAI,QAAQ,OAAW,QAAO;AAC9B,QAAM,SAAS,SAAS,KAAK,EAAE;AAE/B,SAAO,MAAM,MAAM,KAAK,SAAS,IAAI,OAAO;AAChD;AAYO,SAAS,aAAa,OAAiB,QAAQ,MAAkB;AACpE,QAAM,UAAU,IAAI,QAAQ;AAE5B,UACK,KAAK,WAAW,EAChB,QAAQ,OAAO,EACf,YAAY,iEAA0D,EACtE,SAAS,WAAW,wBAAwB,EAC5C,OAAO,uBAAuB,kBAAkB,EAChD,OAAO,gBAAgB,yCAAyC,EAChE,OAAO,eAAe,0BAA0B,EAChD,OAAO,oBAAoB,2BAA2B,EACtD,OAAO,mBAAmB,0BAA0B,EACpD,OAAO,kBAAkB,iCAAiC,EAC1D,OAAO,sBAAsB,2BAA2B,EACxD,OAAO,qBAAqB,oCAAoC,EAChE,OAAO,2BAA2B,2CAA2C,mBAAmB,EAChG,OAAO,mBAAmB,wDAAwD,QAAQ,EAC1F,OAAO,wBAAwB,uCAAuC,EACtE,OAAO,iBAAiB,iCAAiC,EACzD,OAAO,iBAAiB,gCAAgC,EACxD,UAAU,IAAI,OAAO,uBAAuB,mBAAmB,EAC3D,QAAQ,SAAS,CAAC;AAK3B,UAAQ,MAAM,IAAI;AAGlB,QAAM,OAAO,QAAQ,KAAiB;AAEtC,QAAM,WAAW,QAAQ,KAAK,SAAS,IAAI,QAAQ,KAAK,CAAC,IAAI;AAG7D,MAAI;AACJ,QAAM,cAAc,KAAK;AACzB,MAAI,aAAa;AAEb,YAAQ,aAAa;AAAA,MACjB,KAAK;AAAS;AAAgC;AAAA,MAC9C,KAAK;AAAQ;AAA+B;AAAA,MAC5C,KAAK;AAAQ;AAA+B;AAAA,MAC5C,KAAK;AAAS;AAAgC;AAAA,MAC9C,KAAK;AAAA,MAAU,KAAK;AAAQ;AAA+B;AAAA,MAC3D;AAAS;AAAA,IACb;AAAA,EACJ,WAAW,KAAK,SAAS;AAErB;AAAA,EACJ,OAAO;AAEH;AAAA,EACJ;AAKA,MAAI,cAAc;AAClB,MAAI,KAAK,SAAS,mBAAmB,GAAG;AACnC,kBAAc;AAAA,EACnB,WAAW,KAAK,gBAAgB,MAAM;AACjC,kBAAc;AAAA,EACnB;AAKA,MAAI,aAAa,KAAK,eAAe;AACrC,MAAI,YAAY,KAAK,cAAc;AACnC,MAAI,WAAW,KAAK,aAAa;AAIjC,MAAI,KAAK,WAAW,OAAO;AACvB,iBAAa;AACb,gBAAY;AACZ,eAAW;AAAA,EACf;AAMA,MAAI,eAAe,KAAK;AAExB,MAAI,KAAK,aAAa,UAAa,CAAC,MAAM,KAAK,QAAQ,KAAK,KAAK,YAAY,GAAG;AAC5E,mBAAe,KAAK;AAAA,EACxB;AAGA,SAAO;AAAA;AAAA,IAEH,SAAS,KAAK;AAAA,IACd,QAAQ,KAAK,UAAU;AAAA;AAAA,IACvB,QAAQ,KAAK;AAAA,IACb,SAAS,KAAK,WAAW;AAAA;AAAA;AAAA,IAGzB,OAAO;AAAA,IACP,UAAU;AAAA,IACV,WAAW;AAAA;AAAA,IACX;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKJ;AACJ;AAtJA,IAaM;AAbN;AAAA;AAAA;AASA;AAIA,IAAM,YAA4B,CAAC,SAAS,QAAQ,QAAQ,SAAS,UAAU,MAAM;AAAA;AAAA;;;ACRrF,OAAO,UAAU;AAyDV,SAAS,cAAc,WAA+D;AACzF,MAAI,CAAC,WAAW;AACZ,WAAO;AAAA,EACX;AAEA,MAAI,MAAM;AACV,MAAI;AAEA,UAAM,YAAY,IAAI,IAAI,SAAS;AACnC,UAAM,KAAK,QAAQ,UAAU,QAAQ,EAAE,YAAY;AAAA,EACvD,QAAQ;AAEJ,UAAM,KAAK,QAAQ,SAAS,EAAE,YAAY;AAAA,EAC9C;AAEA,SAAO,SAAS,GAAG,KAAK;AAC5B;AA9EA,IAWM,UAsCA;AAjDN;AAAA;AAAA;AAWA,IAAM,WAAuE;AAAA;AAAA,MAEzE,QAAQ,EAAE,MAAM,YAAY,WAAW,MAAM;AAAA;AAAA,MAE7C,OAAO,EAAE,MAAM,0BAA0B,WAAW,KAAK;AAAA,MACzD,QAAQ,EAAE,MAAM,0BAA0B,WAAW,KAAK;AAAA;AAAA,MAE1D,QAAQ,EAAE,MAAM,aAAa,WAAW,QAAQ;AAAA,MAChD,QAAQ,EAAE,MAAM,cAAc,WAAW,QAAQ;AAAA,MACjD,SAAS,EAAE,MAAM,cAAc,WAAW,QAAQ;AAAA,MAClD,QAAQ,EAAE,MAAM,aAAa,WAAW,QAAQ;AAAA,MAChD,QAAQ,EAAE,MAAM,iBAAiB,WAAW,QAAQ;AAAA,MACpD,SAAS,EAAE,MAAM,cAAc,WAAW,QAAQ;AAAA,MAClD,QAAQ,EAAE,MAAM,gBAAgB,WAAW,QAAQ;AAAA,MACnD,SAAS,EAAE,MAAM,cAAc,WAAW,QAAQ;AAAA;AAAA,MAElD,SAAS,EAAE,MAAM,aAAa,WAAW,OAAO;AAAA,MAChD,UAAU,EAAE,MAAM,cAAc,WAAW,OAAO;AAAA,MAClD,QAAQ,EAAE,MAAM,YAAY,WAAW,OAAO;AAAA,MAC9C,QAAQ,EAAE,MAAM,YAAY,WAAW,OAAO;AAAA,MAC9C,QAAQ,EAAE,MAAM,iCAAiC,WAAW,OAAO;AAAA;AAAA,MAEnE,QAAQ,EAAE,MAAM,cAAc,WAAW,QAAQ;AAAA,MACjD,QAAQ,EAAE,MAAM,aAAa,WAAW,QAAQ;AAAA,MAChD,QAAQ,EAAE,MAAM,aAAa,WAAW,QAAQ;AAAA,MAChD,QAAQ,EAAE,MAAM,aAAa,WAAW,QAAQ;AAAA,MAChD,SAAS,EAAE,MAAM,cAAc,WAAW,QAAQ;AAAA;AAAA,MAElD,SAAS,EAAE,MAAM,oBAAoB,WAAW,QAAQ;AAAA,MACxD,gBAAgB,EAAE,MAAM,6BAA6B,WAAW,QAAQ;AAAA,MACxE,QAAQ,EAAE,MAAM,mBAAmB,WAAW,QAAQ;AAAA,MACtD,SAAS,EAAE,MAAM,aAAa,WAAW,QAAQ;AAAA;AAAA,MACjD,QAAQ,EAAE,MAAM,cAAc,WAAW,QAAQ;AAAA,IACrD;AAKA,IAAM,oBAAoB;AAAA,MACtB,MAAM;AAAA,MACN,WAAW;AAAA;AAAA,IACf;AAAA;AAAA;;;ACzCA,SAAS,gBAAgB;AAGzB,YAAY,aAAa;AAqBzB,eAAsB,UAAU,eAAuB,QAAsC;AACzF,UAAQ,MAAM,sBAAsB,aAAa,EAAE;AACnD,MAAI;AACJ,MAAI;AAEA,kBAAc,MAAM,SAAS,eAAe,OAAO;AACnD,YAAQ,MAAM,gCAAgC,OAAO,WAAW,WAAW,CAAC,UAAU;AAAA,EAC1F,SAAS,KAAU;AACf,YAAQ,MAAM,6BAA6B,aAAa,MAAM,IAAI,OAAO,EAAE;AAC3E,UAAM,IAAI,MAAM,mCAAmC,aAAa,IAAI,EAAE,OAAO,IAAI,CAAC;AAAA,EACtF;AAEA,QAAM,IAAwB,aAAK,WAAW;AAC9C,QAAM,SAAkB,CAAC;AACzB,QAAM,YAAY,oBAAI,IAAY;AAGlC,QAAM,WAAW,CAAC,KAAc,eAAqC;AACjE,QAAI,CAAC,OAAO,IAAI,KAAK,MAAM,MAAM,IAAI,WAAW,OAAO,GAAG;AACtD;AAAA,IACJ;AACA,QAAI,CAAC,UAAU,IAAI,GAAG,GAAG;AACrB,gBAAU,IAAI,GAAG;AACjB,YAAM,WAAW,cAAc,GAAG;AAClC,YAAM,OAAO,cAAc,SAAS;AACpC,aAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,cAAQ,MAAM,2BAA2B,IAAI,WAAW,GAAG,GAAG;AAAA,IAClE,OAAO;AACF,cAAQ,MAAM,iCAAiC,GAAG,EAAE;AAAA,IACzD;AAAA,EACJ;AAEA,UAAQ,MAAM,qCAAqC;AAInD,IAAE,8BAA8B,EAAE,KAAK,CAAC,GAAG,OAAO;AAC9C,aAAS,EAAE,EAAE,EAAE,KAAK,MAAM,GAAG,KAAK;AAAA,EACtC,CAAC;AAED,IAAE,aAAa,EAAE,KAAK,CAAC,GAAG,OAAO;AAC7B,aAAS,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,IAAI;AAAA,EACpC,CAAC;AAED,IAAE,UAAU,EAAE,KAAK,CAAC,GAAG,OAAO,SAAS,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,OAAO,CAAC;AAClE,IAAE,0BAA0B,EAAE,KAAK,CAAC,GAAG,OAAO,SAAS,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,OAAO,CAAC;AAElF,IAAE,qCAAqC,EAAE,KAAK,CAAC,GAAG,OAAO;AACrD,UAAM,SAAS,EAAE,EAAE,EAAE,KAAK,QAAQ;AAClC,YAAQ,MAAM,GAAG,EAAE,QAAQ,WAAS;AAChC,YAAM,CAAC,GAAG,IAAI,MAAM,KAAK,EAAE,MAAM,KAAK;AACtC,eAAS,KAAK,OAAO;AAAA,IACzB,CAAC;AAAA,EACL,CAAC;AAED,IAAE,YAAY,EAAE,KAAK,CAAC,GAAG,OAAO,SAAS,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,OAAO,CAAC;AACpE,IAAE,eAAe,EAAE,KAAK,CAAC,GAAG,OAAO,SAAS,EAAE,EAAE,EAAE,KAAK,QAAQ,GAAG,OAAO,CAAC;AAE1E,IAAE,YAAY,EAAE,KAAK,CAAC,GAAG,OAAO,SAAS,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,OAAO,CAAC;AAEpE,IAAE,qBAAqB,EAAE,KAAK,CAAC,GAAG,OAAO,SAAS,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,OAAO,CAAC;AAC7E,IAAE,qBAAqB,EAAE,KAAK,CAAC,GAAG,OAAO,SAAS,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,OAAO,CAAC;AAE7E,IAAE,YAAY,EAAE,OAAO,CAAC,GAAG,OAAO;AAC9B,UAAM,MAAM,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,YAAY,KAAK;AAChD,WAAO,CAAC,QAAQ,iBAAiB,oBAAoB,UAAU,EAAE,SAAS,GAAG;AAAA,EACjF,CAAC,EAAE,KAAK,CAAC,GAAG,OAAO;AACd,UAAM,MAAM,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,YAAY,KAAK;AAChD,UAAM,SAAS,CAAC,QAAQ,iBAAiB,kBAAkB,EAAE,SAAS,GAAG;AACzE,aAAS,EAAE,EAAE,EAAE,KAAK,MAAM,GAAG,SAAS,UAAU,MAAS;AAAA,EAC7D,CAAC;AAEF,IAAE,sCAAsC,EAAE,KAAK,CAAC,GAAG,OAAO;AACtD,aAAS,EAAE,EAAE,EAAE,KAAK,MAAM,GAAG,MAAM;AAAA,EACvC,CAAC;AAGD,UAAQ,KAAK,qCAAqC,OAAO,MAAM,sBAAsB;AACrF,SAAO,EAAE,aAAa,OAAO;AACjC;AAlHA;AAAA;AAAA;AAkBA;AAAA;AAAA;;;ACVA,SAAS,YAAAC,iBAAgB;AACzB,YAAY,QAAQ;AAEpB,OAAOC,WAAU;AACjB,SAAS,eAAe,OAAAC,YAAW;AAGnC,YAAY,WAAW;AAwBvB,SAAS,oBAAoB,gBAAwB,eAAgC;AACjF,MAAI;AACA,UAAM,kBAAkB,OAAO,KAAK,eAAe,OAAO;AAC1D,WAAO,CAAC,eAAe,OAAO,eAAe;AAAA,EACjD,SAAS,GAAG;AACR,WAAO;AAAA,EACX;AACJ;AAQA,SAAS,iBAAiB,gBAAwB,QAAqC;AACnF,UAAQ,MAAM,mCAAmC,cAAc,EAAE;AACjE,MAAI,CAAC,gBAAgB;AACjB,YAAQ,KAAK,gEAAgE;AAC7E,WAAO;AAAA,EACX;AAEA,MAAI;AACA,QAAI,gBAAgB,KAAK,cAAc,GAAG;AACtC,YAAM,MAAM,IAAIA,KAAI,cAAc;AAClC,UAAI,WAAW,IAAI,SAAS,UAAU,GAAG,IAAI,SAAS,YAAY,GAAG,IAAI,CAAC;AAC1E,UAAI,SAAS;AAAI,UAAI,OAAO;AAC5B,YAAM,UAAU,IAAI;AACpB,cAAQ,MAAM,+BAA+B,OAAO,EAAE;AACtD,aAAO;AAAA,IACX,WACS,eAAe,SAAS,KAAK,KAAK,CAAC,eAAe,WAAW,OAAO,GAAG;AAC5E,cAAQ,KAAK,UAAU,cAAc,iFAAiF;AACtH,aAAO;AAAA,IACX,OACK;AACD,UAAI;AACJ,UAAI,eAAe,WAAW,OAAO,GAAG;AACpC,YAAI;AAAE,yBAAe,cAAc,cAAc;AAAA,QAAG,SAC7C,GAAQ;AAAE,kBAAQ,MAAM,yCAAkC,cAAc,cAAc,EAAE,OAAO,EAAE;AAAG,iBAAO;AAAA,QAAW;AAAA,MACjI,OAAO;AACH,uBAAeD,MAAK,QAAQ,cAAc;AAAA,MAC9C;AACA,UAAI,cAAc;AAClB,UAAI;AAAE,sBAAiB,YAAS,YAAY,EAAE,YAAY;AAAA,MAAG,SACtD,WAAoB;AACvB,YAAI,qBAAqB,SAAU,UAAoC,SAAS,UAAU;AACtF,kBAAQ,MAAM,SAAS,YAAY,oFAAoF;AAAA,QAC3H,OAAO;AACH,kBAAQ,KAAK,8BAA8B,YAAY,oCAAoC,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS,CAAC,qCAAqC;AAAA,QACtM;AACA,sBAAc;AAAA,MAClB;AACA,YAAM,UAAU,cAAc,eAAeA,MAAK,QAAQ,YAAY;AACtE,UAAI,uBAAuB,QAAQ,QAAQ,OAAO,GAAG;AACrD,UAAI,aAAa,KAAK,oBAAoB,KAAK,CAAC,qBAAqB,WAAW,GAAG,GAAG;AAClF,+BAAuB,MAAM;AAAA,MACjC;AACA,YAAM,UAAU,IAAIC,KAAI,YAAY,oBAAoB;AACxD,UAAI,gBAAgB,QAAQ;AAC5B,UAAI,CAAC,cAAc,SAAS,GAAG,GAAG;AAAE,yBAAiB;AAAA,MAAK;AAC1D,cAAQ,MAAM,8BAA8B,aAAa,WAAW,cAAc,mBAAmB,OAAO,YAAY,WAAW,GAAG;AACtI,aAAO;AAAA,IACX;AAAA,EACJ,SAAS,OAAgB;AACrB,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,YAAQ,MAAM,+CAAwC,cAAc,MAAM,OAAO,GAAG,iBAAiB,QAAQ,aAAa,MAAM,KAAK,KAAK,EAAE,EAAE;AAC9I,WAAO;AAAA,EACX;AACJ;AASA,SAAS,gBAAgB,UAAkB,gBAAyB,QAA6B;AAC7F,QAAM,aAAa,UAAU,KAAK;AAClC,MAAI,CAAC,cAAc,WAAW,WAAW,OAAO,KAAK,WAAW,WAAW,GAAG,GAAG;AAC7E,WAAO;AAAA,EACX;AACA,MAAI,gBAAgB;AACpB,MAAI,cAAc,WAAW,IAAI,KAAK,gBAAgB;AAClD,QAAI;AACA,YAAM,OAAO,IAAIA,KAAI,cAAc;AACnC,sBAAgB,KAAK,WAAW;AAAA,IACpC,SAAS,GAAG;AACR,cAAQ,KAAK,yCAAyC,cAAc,gCAAgC,UAAU,cAAc;AAC5H,aAAO;AAAA,IACX;AAAA,EACJ;AACA,MAAI;AACA,UAAM,WAAW,IAAIA,KAAI,eAAe,cAAc;AACtD,WAAO;AAAA,EACX,SAAS,OAAgB;AACrB,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,QAAI,CAAC,YAAY,KAAK,aAAa,KAAK,CAAC,cAAc,WAAW,GAAG,KAAK,CAAC,gBAAgB;AACvF,cAAQ,KAAK,gCAAgC,aAAa,sDAAsD;AAAA,IACpH,OAAO;AACH,cAAQ,KAAK,6CAAmC,aAAa,KAAK,iBAAiB,mBAAmB,iBAAiB,MAAM,oBAAoB,KAAK,OAAO,EAAE;AAAA,IACnK;AACA,WAAO;AAAA,EACX;AACJ;AAWA,SAAS,sBACL,aACA,mBACA,QACa;AAEb,MAAI,CAAC,eAAe,YAAY,WAAW,OAAO,GAAG;AACjD,WAAO;AAAA,EACX;AAEA,MAAI;AACA,QAAI,kBAAkB,WAAW,OAAO,GAAG;AAEvC,YAAM,WAAW,cAAc,iBAAiB;AAKhD,UAAI;AACJ,UAAI;AACA,cAAM,OAAU,YAAS,QAAQ;AACjC,YAAI,KAAK,YAAY,GAAG;AACpB,mBAAS;AAAA,QACb,OAAO;AACH,mBAASD,MAAK,QAAQ,QAAQ;AAAA,QAClC;AAAA,MACJ,QAAQ;AAEJ,iBAASA,MAAK,QAAQ,QAAQ;AAAA,MAClC;AAGA,UAAI,eAAeA,MAAK,QAAQ,QAAQ,WAAW;AACnD,qBAAe,aAAa,QAAQ,OAAO,GAAG;AAG9C,UAAI,WAAW,KAAK,YAAY,KAAK,CAAC,aAAa,WAAW,GAAG,GAAG;AAChE,uBAAe,MAAM;AAAA,MACzB;AACA,aAAO,UAAU,YAAY;AAAA,IACjC,OAAO;AAEH,aAAO,IAAIC,KAAI,aAAa,iBAAiB,EAAE;AAAA,IACnD;AAAA,EACJ,SAAS,OAAO;AACZ,YAAQ;AAAA,MACJ,+BAA+B,WAAW,cAAc,iBAAiB,MAAM,OAAO,KAAK,CAAC;AAAA,IAChG;AACA,WAAO;AAAA,EACX;AACJ;AAmBA,eAAe,WAAW,aAAkB,QAAiB,UAAkB,KAA+B;AAC1G,UAAQ,MAAM,8BAA8B,YAAY,IAAI,EAAE;AAC9D,QAAM,WAAW,YAAY;AAE7B,MAAI;AACA,QAAI,aAAa,WAAW,aAAa,UAAU;AAC/C,YAAM,WAAuC,MAAY,cAAQ,IAAI,YAAY,MAAM;AAAA,QACnF,cAAc;AAAA,QAAe;AAAA,MACjC,CAAC;AACD,cAAQ,MAAM,4BAA4B,YAAY,IAAI,aAAa,SAAS,MAAM,WAAW,SAAS,QAAQ,cAAc,KAAK,KAAK,WAAW,SAAS,KAAK,UAAU,SAAS;AACtL,aAAO,OAAO,KAAK,SAAS,IAAI;AAAA,IACpC,WAAW,aAAa,SAAS;AAC7B,UAAI;AACJ,UAAI;AACC,mBAAW,cAAc,WAAW;AAAA,MACxC,SAAS,GAAQ;AAEb,gBAAQ,MAAM,uCAAuC,YAAY,IAAI,YAAY,EAAE,OAAO,EAAE;AAC5F,eAAO;AAAA,MACX;AAED,YAAM,OAAO,MAAMF,UAAS,QAAQ;AACpC,cAAQ,MAAM,mBAAmB,QAAQ,KAAK,KAAK,UAAU,SAAS;AACtE,aAAO;AAAA,IACX,OAAO;AACH,cAAQ,KAAK,yBAAyB,QAAQ,aAAa,YAAY,IAAI,EAAE;AAC7E,aAAO;AAAA,IACX;AAAA,EACJ,SAAS,OAAgB;AAIrB,SAAK,aAAa,WAAW,aAAa,aAAmB,cAAQ,aAAa,KAAK,GAAG;AACtF,YAAM,SAAS,MAAM,UAAU,UAAU;AACzC,YAAM,aAAa,MAAM,UAAU,cAAc;AACjD,YAAM,OAAO,MAAM,QAAQ;AAC3B,YAAM,UAAU,MAAM;AAEtB,YAAM,aAAa,6CAAmC,YAAY,IAAI,YAAY,MAAM,MAAM,UAAU,WAAW,IAAI,cAAc,OAAO;AAC5I,cAAQ,KAAK,UAAU;AAAA,IAC3B,WAES,aAAa,SAAS;AAE3B,UAAI,aAAa,YAAY;AAC7B,UAAI;AAAE,qBAAa,cAAc,WAAW;AAAA,MAAG,QAAQ;AAAA,MAA2D;AAElH,UAAI,iBAAiB,SAAU,MAAgC,SAAS,UAAU;AAC9E,gBAAQ,KAAK,mDAAyC,UAAU,GAAG;AAAA,MACvE,WAAW,iBAAiB,SAAU,MAAgC,SAAS,UAAU;AACrF,gBAAQ,KAAK,0DAAgD,UAAU,GAAG;AAAA,MAC9E,WAAW,iBAAiB,OAAO;AAC9B,gBAAQ,KAAK,2CAAiC,UAAU,KAAK,MAAM,OAAO,EAAE;AAAA,MACjF,OAAO;AACF,gBAAQ,KAAK,oEAA0D,UAAU,KAAK,OAAO,KAAK,CAAC,EAAE;AAAA,MAC1G;AAAA,IACJ,WAKS,iBAAiB,OAAO;AAC7B,cAAQ,KAAK,8DAAoD,YAAY,IAAI,KAAK,MAAM,OAAO,EAAE;AAAA,IACzG,OAAO;AACH,cAAQ,KAAK,0EAAgE,YAAY,IAAI,KAAK,OAAO,KAAK,CAAC,EAAE;AAAA,IACrH;AACA,WAAO;AAAA,EACX;AACJ;AAcA,SAAS,mBACL,YACA,mBAGA,QACO;AACP,QAAM,kBAA2B,CAAC;AAClC,QAAM,uBAAuB,oBAAI,IAAY;AAE7C,QAAM,WAAW;AACjB,QAAM,cAAc;AAEpB,QAAM,kBAAkB,CAAC,QAA4B,aAAkC;AACnF,QAAI,CAAC,UAAU,OAAO,KAAK,MAAM,MAAM,OAAO,WAAW,OAAO,EAAG;AAEnE,UAAM,cAAc,sBAAsB,QAAQ,mBAAmB,MAAM;AAG3E,QAAI,eAAe,CAAC,qBAAqB,IAAI,WAAW,GAAG;AACvD,2BAAqB,IAAI,WAAW;AACpC,YAAM,EAAE,UAAU,IAAI,cAAc,WAAW;AAG/C,sBAAgB,KAAK;AAAA,QACjB,MAAM;AAAA,QACN,KAAK;AAAA;AAAA,QACL,SAAS;AAAA,MACb,CAAC;AACD,cAAQ,MAAM,qBAAqB,SAAS,WAAW,QAAQ,YAAY,iBAAiB,KAAK,WAAW,EAAE;AAAA,IAClH;AAAA,EACJ;AAGA,WAAS,YAAY;AACrB,cAAY,YAAY;AACxB,MAAI;AACJ,UAAQ,QAAQ,SAAS,KAAK,UAAU,OAAO,MAAM;AACjD,oBAAgB,MAAM,CAAC,GAAG,OAAO;AAAA,EACrC;AACA,cAAY,YAAY;AACxB,UAAQ,QAAQ,YAAY,KAAK,UAAU,OAAO,MAAM;AACpD,oBAAgB,MAAM,CAAC,KAAK,MAAM,CAAC,GAAG,SAAS;AAAA,EACnD;AAEA,SAAO;AACX;AAwBA,eAAsB,cAClB,QACA,cAAc,MACd,gBACA,QACmB;AACnB,UAAQ,KAAK,+CAAwC,WAAW,YAAY,kBAAkB,qBAAqB,EAAE;AAErH,QAAM,gBAAyB,OAAO,UAAU,CAAC;AAEjD,QAAM,iBAAiB,oBAAI,IAAmB;AAE9C,MAAI,kBAA2B,CAAC;AAGhC,QAAM,qBAAqB,iBAAiB,kBAAkB,IAAI,MAAM;AACxE,MAAI,CAAC,sBAAsB,cAAc,KAAK,OAAK,CAAC,YAAY,KAAK,EAAE,GAAG,KAAK,CAAC,EAAE,IAAI,WAAW,OAAO,KAAK,CAAC,EAAE,IAAI,WAAW,GAAG,KAAK,CAAC,EAAE,IAAI,WAAW,GAAG,CAAC,GAAG;AAC5J,YAAQ,KAAK,yHAAkH;AAAA,EACnI,WAAW,oBAAoB;AAC3B,YAAQ,MAAM,gCAAgC,kBAAkB,EAAE;AAAA,EACtE;AAKA,QAAM,wBAAwB,oBAAI,IAAY;AAG9C,UAAQ,MAAM,YAAY,cAAc,MAAM,qCAAqC;AACnF,aAAW,SAAS,eAAe;AAE/B,UAAM,iBAAiB,gBAAgB,MAAM,KAAK,oBAAoB,MAAM;AAE5E,UAAM,aAAa,iBAAiB,eAAe,OAAO,MAAM;AAGhE,QAAI,CAAC,WAAW,WAAW,OAAO,KAAK,CAAC,sBAAsB,IAAI,UAAU,GAAG;AAC3E,4BAAsB,IAAI,UAAU;AAGpC,YAAM,EAAE,WAAW,YAAY,IAAI,cAAc,UAAU;AAC3D,YAAM,cAAc,MAAM,QAAQ;AAGlC,sBAAgB,KAAK;AAAA,QACjB,KAAK;AAAA,QACL,MAAM;AAAA,QACN,SAAS;AAAA,MACb,CAAC;AACD,cAAQ,MAAM,6BAA6B,UAAU,mBAAmB,MAAM,GAAG,GAAG;AAAA,IACxF,WAAW,WAAW,WAAW,OAAO,GAAG;AACtC,cAAQ,MAAM,0BAA0B,WAAW,UAAU,GAAG,EAAE,CAAC,KAAK;AAAA,IAC7E,OAAO;AACF,cAAQ,MAAM,8CAA8C,UAAU,EAAE;AAAA,IAC7E;AAAA,EACJ;AAGA,MAAI,iBAAiB;AACrB,SAAO,gBAAgB,SAAS,GAAG;AAC/B;AACA,QAAI,iBAAiB,iCAAiC;AAClD,cAAQ,MAAM,8CAAuC,+BAA+B,cAAc;AAClG,YAAM,gBAAgB,gBAAgB,IAAI,OAAK,EAAE,GAAG,EAAE,MAAM,GAAG,EAAE,EAAE,KAAK,IAAI;AAC5E,cAAQ,MAAM,2BAA2B,gBAAgB,MAAM,YAAY,aAAa,KAAK;AAE7F,sBAAgB,QAAQ,WAAS;AAC7B,YAAI,CAAC,eAAe,IAAI,MAAM,GAAG,GAAG;AAC/B,yBAAe,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,SAAS,OAAU,CAAC;AAAA,QACnE;AAAA,MACJ,CAAC;AACD,wBAAkB,CAAC;AACnB;AAAA,IACJ;AAGA,UAAM,eAAe,CAAC,GAAG,eAAe;AACxC,sBAAkB,CAAC;AAEnB,YAAQ,MAAM,wBAAwB,cAAc,KAAK,aAAa,MAAM,eAAe;AAE3F,eAAW,SAAS,cAAc;AAE9B,UAAI,eAAe,IAAI,MAAM,GAAG,GAAG;AAC/B,gBAAQ,MAAM,wCAAwC,MAAM,GAAG,EAAE;AACjE;AAAA,MACJ;AAEA,UAAI,qBAAoC;AACxC,UAAI,eAAmC;AACvC,UAAI,uBAA2C;AAG/C,YAAM,gBAAgB,eAAe,MAAM,SAAS;AACpD,UAAI,cAA0B;AAE9B,UAAI,eAAe;AAEf,YAAI;AACA,wBAAc,IAAIE,KAAI,MAAM,GAAG;AAAA,QACnC,SAAS,UAAU;AACf,kBAAQ,KAAK,iCAAiC,MAAM,GAAG,6BAA6B,oBAAoB,QAAQ,SAAS,UAAU,OAAO,QAAQ,CAAC,EAAE;AACrJ,yBAAe,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,SAAS,OAAU,CAAC;AAC9D;AAAA,QACJ;AAGA,YAAI,aAAa;AACb,+BAAqB,MAAM,WAAW,aAAa,MAAM;AAAA,QAC7D;AAAA,MACJ;AAGA,UAAI,iBAAiB,uBAAuB,MAAM;AAC9C,gBAAQ,MAAM,iBAAiB,MAAM,GAAG,wCAAwC;AAChF,uBAAe,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,SAAS,OAAU,CAAC;AAC9D;AAAA,MACJ;AAGA,UAAI,oBAAoB;AACnB,cAAM,WAAW,cAAc,MAAM,GAAG;AACxC,cAAM,gBAAgB,SAAS,QAAQ;AAGvC,YAAI,iBAAiB,IAAI,MAAM,IAAI,GAAG;AAClC,cAAI;AACJ,cAAI,WAAW;AACf,cAAI;AACA,0BAAc,mBAAmB,SAAS,OAAO;AACjD,uBAAW,oBAAoB,oBAAoB,WAAW;AAAA,UAClE,SAAS,GAAG;AAAE,0BAAc;AAAW,uBAAW;AAAA,UAAM;AAExD,cAAI,CAAC,YAAY,gBAAgB,QAAW;AAExC,gBAAI,aAAa;AACb,6BAAe;AAAA,YACnB,OAAO;AACH,6BAAe;AAAA,YACnB;AAEA,gBAAI,MAAM,SAAS,OAAO;AACtB,qCAAuB;AAAA,YAC3B;AAAA,UACJ,OAAO;AAEH,oBAAQ,KAAK,oBAAoB,MAAM,IAAI,IAAI,MAAM,GAAG,wBAAwB,cAAc,sCAAsC,EAAE,EAAE;AACxI,mCAAuB;AAEvB,gBAAI,aAAa;AACb,6BAAe,QAAQ,aAAa,WAAW,mBAAmB,SAAS,QAAQ,CAAC;AAAA,YACxF,OAAO;AACH,6BAAe;AAAA,YACnB;AAAA,UACJ;AAAA,QACJ,WAES,mBAAmB,IAAI,MAAM,IAAI,GAAG;AACzC,cAAI,aAAa;AACb,2BAAe,QAAQ,aAAa,WAAW,mBAAmB,SAAS,QAAQ,CAAC;AAAA,UACxF,OAAO;AACH,2BAAe;AAAA,UACnB;AACA,iCAAuB;AAAA,QAC3B,OAEK;AACD,iCAAuB;AACvB,cAAI,aAAa;AACb,gBAAI;AACA,oBAAM,uBAAuB,mBAAmB,SAAS,OAAO;AAChE,kBAAI,oBAAoB,oBAAoB,oBAAoB,GAAG;AAC9D,wBAAQ,KAAK,qCAAqC,MAAM,GAAG,iFAAiF;AAC5I,+BAAe,wCAAwC,mBAAmB,SAAS,QAAQ,CAAC;AAAA,cACjG,OAAO;AACF,+BAAe;AACf,wBAAQ,MAAM,4CAA4C,MAAM,GAAG,WAAW;AAAA,cACnF;AAAA,YACJ,SAAS,aAAa;AACjB,sBAAQ,KAAK,qDAAqD,MAAM,GAAG,KAAK,uBAAuB,QAAQ,YAAY,UAAU,OAAO,WAAW,CAAC,2BAA2B;AACnL,6BAAe,wCAAwC,mBAAmB,SAAS,QAAQ,CAAC;AAAA,YACjG;AAAA,UACJ,OAAO;AACH,2BAAe;AAAA,UACnB;AAAA,QACJ;AAAA,MACL,OAAO;AAEF,uBAAe;AACf,+BAAuB;AAAA,MAC5B;AAIA,qBAAe,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,KAAK,MAAM,KAAK,SAAS,aAAa,CAAC;AAKjF,UAAI,MAAM,SAAS,SAAS,sBAAsB;AAE9C,cAAM,oBAAoB,iBAAiB,MAAM,KAAK,MAAM;AAC3D,gBAAQ,MAAM,uDAAuD,MAAM,GAAG,KAAK,iBAAiB,EAAE;AAEvG,YAAI,mBAAmB;AAEnB,gBAAM,wBAAwB;AAAA,YAC1B;AAAA,YACA;AAAA,YACA;AAAA,UACJ;AAEA,cAAI,sBAAsB,SAAS,GAAG;AAClC,oBAAQ,MAAM,cAAc,sBAAsB,MAAM,yBAAyB,MAAM,GAAG,6BAA6B;AACvH,uBAAW,YAAY,uBAAuB;AAG1C,kBAAI,CAAC,sBAAsB,IAAI,SAAS,GAAG,GAAG;AAC1C,sCAAsB,IAAI,SAAS,GAAG;AACtC,gCAAgB,KAAK,QAAQ;AAC5B,wBAAQ,MAAM,gCAAgC,SAAS,GAAG,EAAE;AAAA,cACjE,OAAO;AACF,wBAAQ,MAAM,uDAAuD,SAAS,GAAG,EAAE;AAAA,cACxF;AAAA,YACJ;AAAA,UACJ;AAAA,QACJ,OAAO;AACF,kBAAQ,KAAK,qDAAqD,MAAM,GAAG,mDAAmD;AAAA,QACnI;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ;AAEA,QAAM,sBAAsB,iBAAiB,kCAAkC,SAAS;AACxF,UAAQ,KAAK,2CAAsC,eAAe,IAAI,qBAAqB,mBAAmB,cAAc;AAG5H,SAAO;AAAA,IACH,aAAa,OAAO;AAAA,IACpB,QAAQ,MAAM,KAAK,eAAe,OAAO,CAAC;AAAA,EAC9C;AACJ;AAzmBA,IAyBM,kBAEA,oBAEA;AA7BN;AAAA;AAAA;AAoBA;AAKA,IAAM,mBAAuC,oBAAI,IAAI,CAAC,OAAO,IAAI,CAAC;AAElE,IAAM,qBAAyC,oBAAI,IAAI,CAAC,SAAS,QAAQ,SAAS,OAAO,CAAC;AAE1F,IAAM,kCAAkC;AAAA;AAAA;;;AClBxC,SAAS,UAAU,kBAAkB;AAErC,OAAO,cAAc;AAGrB,SAAS,UAAU,gBAAgB;AAkGnC,eAAsB,aAClB,QACA,UAAyB,CAAC,GAC1B,QACmB;AACnB,QAAM,EAAE,aAAa,OAAO,IAAI;AAGhC,QAAM,qBAAqB,eAAe;AAC1C,QAAM,gBAAgB,UAAU,CAAC;AAGjC,MAAI,CAAC,sBAAsB,cAAc,WAAW,GAAG;AACnD,YAAQ,MAAM,mCAAmC;AACjD,WAAO,EAAE,aAAa,oBAAoB,QAAQ,cAAc;AAAA,EACpE;AAEA,QAAM,cAAc;AAAA,IAChB,YAAY,QAAQ,eAAe;AAAA,IACnC,WAAW,QAAQ,cAAc;AAAA,IACjC,UAAU,QAAQ,aAAa;AAAA,EACnC;AAEA,UAAQ,MAAM,uBAAuB,KAAK,UAAU,WAAW,CAAC,EAAE;AAElE,QAAM,iBAA0B,MAAM,QAAQ;AAAA,IAC1C,cAAc,IAAI,OAAO,UAA0B;AAE9C,UAAI,iBAAiB,EAAE,GAAG,MAAM;AAEjC,UAAI,OAAO,eAAe,YAAY,YAAY,eAAe,QAAQ,WAAW,GAAG;AACnF,eAAO;AAAA,MACX;AAEA,UAAI,aAAa,eAAe;AAChC,YAAM,kBAAkB,eAAe,OAAO,UAAU,eAAe,IAAI;AAE3E,UAAI;AAEA,YAAI,YAAY,aAAa,eAAe,SAAS,OAAO;AACxD,kBAAQ,MAAM,kBAAkB,eAAe,EAAE;AAGjD,gBAAM,cAAc,IAAI,SAAS,kBAAkB;AAGnD,gBAAM,SAAS,YAAY,OAAO,eAAe,OAAO;AAGxD,cAAI,OAAO,UAAU,OAAO,OAAO,SAAS,GAAG;AAC3C,oBAAQ,KAAK,oCAA0B,eAAe,KAAK,OAAO,OAAO,KAAK,IAAI,CAAC,EAAE;AAAA,UACzF,OAAO;AACH,gBAAI,OAAO,YAAY,OAAO,SAAS,SAAS,GAAG;AAC/C,sBAAQ,MAAM,yBAAyB,eAAe,KAAK,OAAO,SAAS,KAAK,IAAI,CAAC,EAAE;AAAA,YAC3F;AACA,gBAAI,OAAO,QAAQ;AAChB,2BAAa,OAAO;AACpB,sBAAQ,MAAM,8BAA8B,eAAe,EAAE;AAAA,YAChE,OAAO;AACJ,sBAAQ,KAAK,uEAA6D,eAAe,qBAAqB;AAAA,YACjH;AAAA,UACJ;AAAA,QACJ;AAGA,YAAI,YAAY,YAAY,eAAe,SAAS,MAAM;AACtD,kBAAQ,MAAM,iBAAiB,eAAe,EAAE;AAChD,gBAAM,SAAuB,MAAM,SAAS,eAAe,SAAS,iBAAiB;AACrF,cAAI,OAAO,MAAM;AACb,yBAAa,OAAO;AACpB,oBAAQ,MAAM,6BAA6B,eAAe,EAAE;AAAA,UAChE,OAAO;AACH,kBAAM,cAAe,OAAe;AACpC,gBAAI,aAAa;AACb,sBAAQ,KAAK,kCAAwB,eAAe,KAAK,YAAY,WAAW,WAAW,EAAE;AAAA,YACjG,OAAO;AACH,sBAAQ,KAAK,mEAAyD,eAAe,qBAAqB;AAAA,YAC9G;AAAA,UACJ;AAAA,QACJ;AAAA,MACJ,SAAS,KAAc;AACnB,cAAM,eAAe,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AACpE,gBAAQ,KAAK,uCAA6B,eAAe,KAAK,eAAe,IAAI,MAAM,YAAY,EAAE;AAAA,MAEzG;AAGA,qBAAe,UAAU;AACzB,aAAO;AAAA,IACX,CAAC;AAAA,EACL;AAGA,MAAI,YAAY;AAChB,MAAI,YAAY,cAAc,UAAU,SAAS,GAAG;AAChD,YAAQ,MAAM,2BAA2B;AACzC,QAAI;AACA,kBAAY,MAAM,WAAW,WAAW;AAAA,QACpC,GAAG;AAAA,QACH,WAAW,YAAY;AAAA,QACvB,UAAU,YAAY;AAAA,MAC1B,CAAC;AACD,cAAQ,MAAM,6BAA6B;AAAA,IAC/C,SAAS,KAAc;AACnB,YAAM,eAAe,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AACpE,cAAQ,KAAK,0CAAgC,YAAY,EAAE;AAAA,IAE/D;AAAA,EACJ,WAAW,UAAU,SAAS,GAAG;AAC7B,YAAQ,MAAM,uCAAuC;AAAA,EACzD;AAIA,SAAO;AAAA,IACH,aAAa;AAAA,IACb,QAAQ;AAAA;AAAA,EACZ;AACJ;AAxOA,IA2CM,qBAiBA,oBAyBA;AArFN;AAAA;AAAA;AA2CA,IAAM,sBAAyC;AAAA,MAC3C,oBAAoB;AAAA,MACpB,gBAAgB;AAAA,MAChB,sBAAsB;AAAA,MACtB,WAAW;AAAA;AAAA,MACX,UAAU;AAAA;AAAA,MACV,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,MAC3B,4BAA4B;AAAA,MAC5B,+BAA+B;AAAA,MAC/B,iBAAiB;AAAA,IACrB;AAMA,IAAM,qBAAsC;AAAA,MACxC,eAAe;AAAA;AAAA,MACf,OAAO;AAAA,QACH,GAAG;AAAA;AAAA,UACC,oBAAoB;AAAA,UACpB,sBAAsB;AAAA,UACtB,gBAAgB;AAAA,UAChB,oBAAoB;AAAA,UACpB,iBAAiB;AAAA,QACrB;AAAA,QACA,GAAG;AAAA;AAAA,UACC,YAAY;AAAA,UACZ,uBAAuB;AAAA,UACvB,0BAA0B;AAAA,UAC1B,4BAA4B;AAAA,UAC5B,sBAAsB;AAAA,UACtB,kBAAkB;AAAA,QACtB;AAAA,MACJ;AAAA;AAAA,IAEJ;AAKA,IAAM,oBAAmC;AAAA,MACrC,UAAU;AAAA,QACN,WAAW;AAAA,QACX,cAAc;AAAA,QACd,eAAe;AAAA,QACf,MAAM;AAAA,QACN,iBAAiB;AAAA,QACjB,aAAa;AAAA,MACjB;AAAA,MACA,QAAQ;AAAA,QACJ,iBAAiB;AAAA,QACjB,aAAa;AAAA,MACjB;AAAA,MACA,QAAQ,EAAE,UAAU,MAAM;AAAA,IAC9B;AAAA;AAAA;;;AC7FA,YAAYC,cAAa;AAUzB,SAAS,oBAAoB,MAAsB;AAC/C,SAAO,KAAK,QAAQ,iBAAiB,QAAQ;AACjD;AASA,SAAS,cAAc,GAAe,QAAuB;AACzD,MAAI,OAAO,EAAE,MAAM;AAGnB,MAAI,KAAK,WAAW,GAAG;AACnB,YAAQ,MAAM,kEAAkE;AAChF,QAAI,cAAc,EAAE,MAAM;AAG1B,QAAI,YAAY,WAAW,GAAG;AAC1B,cAAQ,MAAM,0DAA0D;AACxE,YAAM,cAAc,EAAE,KAAK,EAAE,KAAK,KAAK;AACvC,QAAE,KAAK,EAAE,MAAM;AAEf,oBAAc,EAAE,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC;AAE3C,aAAO,EAAE,QAAQ,EAAE,SAAS,WAAW;AACvC,QAAE,QAAQ,EAAE,KAAK,WAAW,EAAE,SAAS,WAAW;AAAA,IACtD,OAAO;AAGH,aAAO,EAAE,QAAQ,EAAE,UAAU,WAAW;AAAA,IAC5C;AAAA,EACJ;AAKA,MAAI,QAAQ,KAAK,SAAS,KAAK,KAAK,KAAK,YAAY,EAAE,WAAW,GAAG;AACjE,YAAQ,MAAM,wCAAwC;AACtD,SAAK,QAAQ,kBAAkB;AAAA,EACnC;AACJ;AAMA,SAAS,aAAa,GAAe,QAAiB,QAAuB;AACzE,UAAQ,MAAM,YAAY,OAAO,OAAO,OAAK,EAAE,OAAO,EAAE,MAAM,yBAAyB;AACvF,QAAM,WAAW,IAAI,IAAmB,OAAO,IAAI,WAAS,CAAC,MAAM,KAAK,KAAK,CAAC,CAAC;AAG/E,IAAE,8BAA8B,EAAE,KAAK,CAAC,GAAG,OAAO;AAC9C,UAAM,OAAO,EAAE,EAAE;AACjB,UAAM,OAAO,KAAK,KAAK,MAAM;AAC7B,UAAM,QAAQ,OAAO,SAAS,IAAI,IAAI,IAAI;AAC1C,QAAI,OAAO,WAAW,OAAO,MAAM,YAAY,UAAU;AACrD,UAAI,MAAM,QAAQ,WAAW,OAAO,GAAG;AAClC,gBAAQ,MAAM,0DAA0D,MAAM,GAAG,EAAE;AACnF,cAAM,WAAW,EAAE,SAAS,EAAE,KAAK,gBAAgB,MAAM,OAAO,KAAK;AACrE,aAAK,YAAY,QAAQ;AAAA,MAC9B,OAAO;AACF,gBAAQ,MAAM,iBAAiB,MAAM,GAAG,EAAE;AAC1C,cAAM,WAAW,EAAE,SAAS,EAAE,KAAK,MAAM,OAAO;AAChD,aAAK,YAAY,QAAQ;AAAA,MAC9B;AAAA,IACJ,WAAW,MAAM;AACZ,cAAQ,KAAK,yBAAyB,IAAI,+BAA+B;AAAA,IAC9E;AAAA,EACJ,CAAC;AAGD,IAAE,aAAa,EAAE,KAAK,CAAC,GAAG,OAAO;AAC7B,UAAM,SAAS,EAAE,EAAE;AACnB,UAAM,MAAM,OAAO,KAAK,KAAK;AAC7B,UAAM,QAAQ,MAAM,SAAS,IAAI,GAAG,IAAI;AACxC,QAAI,OAAO,WAAW,OAAO,MAAM,YAAY,UAAU;AACrD,cAAQ,MAAM,gBAAgB,MAAM,GAAG,EAAE;AACzC,YAAM,eAAe,EAAE,UAAU;AACjC,mBAAa,KAAK,oBAAoB,MAAM,OAAO,CAAC;AACpD,aAAO,QAAQ,OAAO,KAAK,KAAK,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AACzD,YAAI,IAAI,YAAY,MAAM,MAAO,cAAa,KAAK,KAAK,KAAK;AAAA,MAClE,CAAC;AACD,aAAO,YAAY,YAAY;AAAA,IACnC,WAAW,KAAK;AACZ,cAAQ,KAAK,wBAAwB,GAAG,kCAAkC;AAAA,IAC9E;AAAA,EACJ,CAAC;AAGD,IAAE,mDAAmD,EAAE,KAAK,CAAC,GAAG,OAAO;AACnE,UAAM,UAAU,EAAE,EAAE;AACpB,UAAM,UAAU,QAAQ,GAAG,OAAO,IAAI,WAAW;AACjD,UAAM,MAAM,QAAQ,KAAK,OAAO;AAChC,UAAM,QAAQ,MAAM,SAAS,IAAI,GAAG,IAAI;AACxC,QAAI,OAAO,WAAW,OAAO,MAAM,YAAY,YAAY,MAAM,QAAQ,WAAW,OAAO,GAAG;AAC1F,cAAQ,MAAM,sBAAsB,OAAO,KAAK,MAAM,GAAG,EAAE;AAC3D,cAAQ,KAAK,SAAS,MAAM,OAAO;AAAA,IACvC,WAAW,KAAK;AACZ,cAAQ,KAAK,8BAA8B,OAAO,KAAK,GAAG,sCAAsC;AAAA,IACpG;AAAA,EACJ,CAAC;AAGA,IAAE,6BAA6B,EAAE,KAAK,CAAC,GAAG,OAAO;AAC7C,UAAM,UAAU,EAAE,EAAE;AACpB,UAAM,SAAS,QAAQ,KAAK,QAAQ;AACpC,QAAI,CAAC,OAAQ;AACb,UAAM,iBAA2B,CAAC;AAClC,QAAI,UAAU;AACd,WAAO,MAAM,GAAG,EAAE,QAAQ,UAAQ;AAC9B,YAAM,cAAc,KAAK,KAAK;AAC9B,YAAM,CAAC,KAAK,UAAU,IAAI,YAAY,MAAM,OAAO,CAAC;AACpD,YAAM,QAAQ,MAAM,SAAS,IAAI,GAAG,IAAI;AACxC,UAAI,OAAO,WAAW,OAAO,MAAM,YAAY,YAAY,MAAM,QAAQ,WAAW,OAAO,GAAG;AAC1F,uBAAe,KAAK,GAAG,MAAM,OAAO,GAAG,aAAa,MAAM,aAAa,EAAE,EAAE;AAC3E,kBAAU;AAAA,MACd,OAAO;AACH,uBAAe,KAAK,WAAW;AAAA,MACnC;AAAA,IACJ,CAAC;AACD,QAAI,SAAS;AACR,cAAQ,KAAK,UAAU,eAAe,KAAK,IAAI,CAAC;AAAA,IACrD;AAAA,EACJ,CAAC;AAGA,IAAE,kEAAkE,EAAE,KAAK,CAAC,GAAG,OAAO;AACnF,UAAM,UAAU,EAAE,EAAE;AACpB,UAAM,MAAM,QAAQ,KAAK,KAAK;AAC9B,UAAM,QAAQ,MAAM,SAAS,IAAI,GAAG,IAAI;AACxC,QAAI,OAAO,WAAW,OAAO,MAAM,YAAY,YAAY,MAAM,QAAQ,WAAW,OAAO,GAAG;AAC1F,cAAQ,MAAM,0BAA0B,MAAM,GAAG,EAAE;AACnD,cAAQ,KAAK,OAAO,MAAM,OAAO;AAAA,IACrC;AAAA,EACJ,CAAC;AAEF,UAAQ,MAAM,kCAAkC;AACpD;AAaO,SAAS,SAAS,QAAoB,QAAyB;AAClE,QAAM,EAAE,aAAa,OAAO,IAAI;AAChC,MAAI,CAAC,eAAe,OAAO,gBAAgB,UAAU;AACjD,YAAQ,KAAK,6EAA6E;AAC1F,WAAO;AAAA,EACX;AAEA,UAAQ,MAAM,kDAAkD;AAChE,QAAM,IAAY,cAAK,WAAW;AAElC,UAAQ,MAAM,+BAA+B;AAC7C,gBAAc,GAAG,MAAM;AAEvB,UAAQ,MAAM,4BAA4B;AAC1C,eAAa,GAAG,QAAQ,MAAM;AAE9B,UAAQ,MAAM,wCAAwC;AACtD,QAAM,YAAY,EAAE,KAAK;AAEzB,UAAQ,MAAM,iCAAiC,OAAO,WAAW,SAAS,CAAC,SAAS;AACpF,SAAO;AACX;AA9LA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAuBa;AAvBb;AAAA;AAAA;AAOA;AAgBO,IAAM,SAAN,MAAM,QAAO;AAAA;AAAA,MAET;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASP,YAAY,sBAAiC;AAEzC,aAAK,QAAS,UAAU,UAAa,SAAS,KAAK,MAAM,SACnD;AAAA,MAEV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,SAAS,OAAuB;AAC5B,aAAK,QAAQ;AAAA,MACjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,SAAuB;AAEzB,YAAI,KAAK,wBAAyB;AAC9B,kBAAQ,MAAM,WAAW,OAAO,EAAE;AAAA,QACtC;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,KAAK,SAAuB;AAExB,YAAI,KAAK,uBAAwB;AAC7B,kBAAQ,KAAK,UAAU,OAAO,EAAE;AAAA,QACpC;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,KAAK,SAAuB;AAExB,YAAI,KAAK,uBAAwB;AAC7B,kBAAQ,KAAK,UAAU,OAAO,EAAE;AAAA,QACpC;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,SAAuB;AAEzB,YAAI,KAAK,wBAAyB;AAC9B,kBAAQ,MAAM,WAAW,OAAO,EAAE;AAAA,QACtC;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUA,OAAO,gBAAgB,UAAiC,CAAC,GAAW;AAEhE,eAAO,IAAI,QAAO,QAAQ,sCAAwC;AAAA,MACtE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWC,OAAO,cAAc,WAAoB,6BAAgD;AACrF,YAAI,CAAC,WAAW;AACZ,iBAAO,IAAI,QAAO,YAAY;AAAA,QAClC;AACA,gBAAQ,UAAU,YAAY,GAAG;AAAA;AAAA,UAE7B,KAAK;AAAS,mBAAO,IAAI,qBAAqB;AAAA,UAC9C,KAAK;AAAQ,mBAAO,IAAI,oBAAoB;AAAA,UAC5C,KAAK;AAAQ,mBAAO,IAAI,oBAAoB;AAAA,UAC5C,KAAK;AAAS,mBAAO,IAAI,qBAAqB;AAAA,UAC9C,KAAK;AAAA,UACL,KAAK;AAAQ,mBAAO,IAAI,oBAAoB;AAAA,UAC5C;AAEI,oBAAQ,KAAK,oCAAoC,SAAS,oBAAoB,SAAS,YAAY,CAAC,GAAG;AACvG,mBAAO,IAAI,QAAO,YAAY;AAAA,QACtC;AAAA,MACJ;AAAA,IACL;AAAA;AAAA;;;ACvHO,SAAS,QAAQ,KAAqB;AACzC,MAAI,CAAC,OAAO,OAAO,QAAQ,SAAU,QAAO;AAE5C,MAAI,UAAU,IAAI,KAAK;AACvB,MAAI,gBAAgB;AAEpB,MAAI;AACA,UAAM,SAAS,IAAI,IAAI,KAAK,0BAA0B;AACtD,qBAAiB,OAAO,YAAY,OAAO,OAAO,UAAU;AAAA,EAChE,QAAQ;AACJ,oBAAgB,QAAQ,MAAM,GAAG,EAAE,CAAC;AAAA,EACxC;AAGA,MAAI;AACA,cAAU,mBAAmB,aAAa;AAAA,EAC9C,SAAS,GAAG;AACR,cAAU;AAAA,EACd;AAEA,YAAU,QAEL,QAAQ,6BAA6B,EAAE,EAEvC,QAAQ,gBAAgB,GAAG,EAE3B,QAAQ,cAAc,EAAE,EAExB,QAAQ,OAAO,GAAG,EAElB,QAAQ,YAAY,EAAE,EAEtB,YAAY;AAGjB,SAAO,WAAW;AACtB;AAWO,SAAS,aAAa,QAAwB;AAEjD,SAAO,QAAQ,MAAM;AACzB;AArEA;AAAA;AAAA;AAAA;AAAA;;;ACuGO,SAAS,oBAAoB,OAAoB,QAAyB;AAC7E,MAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AACvB,UAAM,WAAW;AACjB,YAAQ,MAAM,QAAQ;AACtB,UAAM,IAAI,MAAM,QAAQ;AAAA,EAC5B;AAEA,UAAQ,KAAK,YAAY,MAAM,MAAM,yCAAyC;AAE9E,QAAM,aAAa,MAAM,OAAO,UAAQ;AACpC,UAAM,UAAU,QAAQ,OAAO,SAAS,YAAY,OAAO,KAAK,QAAQ,YAAY,OAAO,KAAK,SAAS;AACzG,QAAI,CAAC,QAAS,SAAQ,KAAK,6BAA6B;AACxD,WAAO;AAAA,EACX,CAAC;AAED,MAAI,WAAW,WAAW,GAAG;AACzB,UAAM,WAAW;AACjB,YAAQ,MAAM,QAAQ;AACtB,UAAM,IAAI,MAAM,QAAQ;AAAA,EAC5B;AAEA,QAAM,UAAU,oBAAI,IAAoB;AACxC,QAAM,YAAY,oBAAI,IAAY;AAElC,aAAW,QAAQ,YAAY;AAC3B,UAAM,WAAW,aAAa,KAAK,GAAG;AACtC,QAAI,OAAO;AACX,QAAI,UAAU;AACd,WAAO,UAAU,IAAI,IAAI,GAAG;AACxB,aAAO,GAAG,QAAQ,IAAI,SAAS;AAC/B,cAAQ,KAAK,gCAAgC,KAAK,GAAG,aAAa,IAAI,YAAY;AAAA,IACtF;AACA,cAAU,IAAI,IAAI;AAClB,YAAQ,IAAI,KAAK,KAAK,IAAI;AAAA,EAC9B;AAEA,QAAM,kBAAkB,QAAQ,IAAI,WAAW,CAAC,EAAE,GAAG;AAErD,MAAI,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UASP,WAAW,IAAI,OAAK;AAClB,UAAM,OAAO,QAAQ,IAAI,EAAE,GAAG;AAC9B,UAAM,QAAQ,EAAE,IAAI,MAAM,GAAG,EAAE,IAAI,GAAG,MAAM,GAAG,EAAE,CAAC,KAAK;AACvD,WAAO,aAAa,IAAI,gBAAgB,IAAI,KAAK,KAAK;AAAA,EAC1D,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA,MAGf,WAAW,IAAI,OAAK;AAClB,UAAM,OAAO,QAAQ,IAAI,EAAE,GAAG;AAC9B,WAAO,sBAAsB,IAAI,KAAK,EAAE,IAAI;AAAA,EAChD,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iFAgCgE,eAAe;AAAA;AAAA;AAAA;AAAA;AAM5F,UAAQ,KAAK,sCAAsC,OAAO,WAAW,QAAQ,OAAO,CAAC,SAAS;AAC9F,SAAO;AACX;AAxMA;AAAA;AAAA;AAQA;AACA;AACA;AAGA;AAAA;AAAA;;;ACPA,YAAY,eAAe;AAC3B,YAAYC,SAAQ;AAsBpB,eAAsB,oBAClB,KACA,QACA,UAAkB,KACE;AACpB,MAAI,UAAoC;AACxC,QAAM,QAAQ,KAAK,IAAI;AACvB,UAAQ,MAAM,qCAAqC,GAAG,EAAE;AAExD,MAAI;AACA,cAAU,MAAgB,iBAAO,EAAE,UAAU,KAAK,CAAC;AACnD,YAAQ,MAAM,wBAAwB,GAAG,EAAE;AAC3C,UAAM,OAAO,MAAM,QAAQ,QAAQ;AACnC,YAAQ,MAAM,oBAAoB,GAAG,EAAE;AAEvC,QAAI;AACA,cAAQ,MAAM,iBAAiB,GAAG,iBAAiB,OAAO,IAAI;AAC9D,YAAM,KAAK,KAAK,KAAK,EAAE,WAAW,gBAAgB,QAAiB,CAAC;AACpE,cAAQ,MAAM,6BAA6B,GAAG,EAAE;AAChD,YAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,cAAQ,MAAM,yBAAyB,GAAG,EAAE;AAE5C,YAAM,WAAoC;AAAA,QACtC,OAAO;AAAA,QACP,YAAY,OAAO,WAAW,MAAM,OAAO;AAAA,QAC3C,YAAY;AAAA;AAAA,QACZ,aAAa,KAAK,IAAI,IAAI;AAAA,QAC1B,QAAQ,CAAC;AAAA;AAAA,MACb;AAEA,YAAM,KAAK,MAAM;AACjB,cAAQ,MAAM,mBAAmB,GAAG,EAAE;AAEtC,cAAQ,MAAM,sBAAsB,GAAG,EAAE;AACzC,gBAAU;AAEV,aAAO,EAAE,MAAM,SAAS;AAAA,IAE5B,SAAS,WAAgB;AACrB,cAAQ,MAAM,oCAAoC,GAAG,KAAK,UAAU,OAAO,EAAE;AAE7E,UAAI;AAAE,cAAM,KAAK,MAAM;AAAA,MAEtB,SAAS,UAAU;AAChB,cAAM;AAAA,MACV;AACA,YAAM;AAAA,IACV;AAAA,EACJ,SAAS,aAAkB;AACvB,YAAQ,MAAM,6DAA6D,GAAG,KAAK,YAAY,OAAO,EAAE;AAGxG,QAAI,SAAS;AACT,UAAI;AAAE,cAAM,QAAQ,MAAM;AAAA,MAAG,SAAS,UAAU;AAAA,MAAmC;AAAA,IACvF;AACA,UAAM;AAAA,EACV,UAAE;AAGE,QAAI,SAAS;AACR,cAAQ,KAAK,wCAAwC,GAAG,8CAA8C;AACtG,UAAI;AAAE,cAAM,QAAQ,MAAM;AAAA,MAAG,SAAS,UAAU;AAAA,MAAyC;AAAA,IAC9F;AAAA,EACJ;AACJ;AAcA,eAAe,aACX,UACA,UACA,QACoB;AACpB,UAAQ,KAAK,sBAAsB,QAAQ,kBAAkB,QAAQ,EAAE;AAGvE,MAAI,YAAY,GAAG;AACf,YAAQ,KAAK,sDAAsD;AACnE,WAAO,CAAC;AAAA,EACZ;AAEA,QAAM,UAAU,MAAgB,iBAAO,EAAE,UAAU,KAAK,CAAC;AACzD,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,UAAuB,CAAC;AAE9B,QAAM,QAA0C,CAAC;AAGjD,MAAI;AACJ,MAAI;AACA,kBAAc,IAAI,IAAI,QAAQ,EAAE;AAAA,EACpC,SAAS,GAAQ;AACb,YAAQ,MAAM,sBAAsB,QAAQ,KAAK,EAAE,OAAO,EAAE;AAC5D,UAAM,QAAQ,MAAM;AACpB,WAAO,CAAC;AAAA,EACZ;AAGA,MAAI;AACJ,MAAI;AACA,UAAM,iBAAiB,IAAI,IAAI,QAAQ;AACvC,mBAAe,OAAO;AACtB,yBAAqB,eAAe;AAAA,EACxC,SAAS,GAAQ;AACb,YAAQ,MAAM,sBAAsB,QAAQ,KAAK,EAAE,OAAO,EAAE;AAC5D,UAAM,QAAQ,MAAM;AACpB,WAAO,CAAC;AAAA,EACZ;AAEA,UAAQ,IAAI,kBAAkB;AAC9B,QAAM,KAAK,EAAE,KAAK,oBAAoB,OAAO,EAAE,CAAC;AAChD,UAAQ,MAAM,uBAAuB,kBAAkB,YAAY;AAEnE,SAAO,MAAM,SAAS,GAAG;AACrB,UAAM,EAAE,KAAK,MAAM,IAAI,MAAM,MAAM;AACnC,YAAQ,KAAK,eAAe,GAAG,WAAW,KAAK,GAAG;AAClD,QAAI,OAA8B;AAElC,QAAI;AACA,aAAO,MAAM,QAAQ,QAAQ;AAE7B,YAAM,KAAK,YAAY,EAAE,OAAO,MAAM,QAAQ,IAAI,CAAC;AACnD,YAAM,KAAK,KAAK,KAAK,EAAE,WAAW,gBAAgB,SAAS,IAAM,CAAC;AAClE,YAAM,OAAO,MAAM,KAAK,QAAQ;AAIhC,cAAQ,KAAK,EAAE,KAAK,KAAK,CAAC;AAC1B,cAAQ,MAAM,oCAAoC,GAAG,EAAE;AAIvD,UAAI,QAAQ,UAAU;AAClB,gBAAQ,MAAM,wBAAwB,GAAG,mBAAmB,KAAK,cAAc,QAAQ,GAAG;AAE1F,cAAM,QAAQ,MAAM,KAAK;AAAA,UAAS,MAC9B,MAAM,KAAK,SAAS,iBAAiB,SAAS,GAAG,OAAK,EAAE,aAAa,MAAM,CAAC;AAAA,QAChF;AACA,gBAAQ,MAAM,SAAS,MAAM,MAAM,uBAAuB,GAAG,EAAE;AAE/D,YAAI,aAAa;AACjB,mBAAW,QAAQ,OAAO;AACtB,cAAI,CAAC,KAAM;AAEX,cAAI;AACJ,cAAI;AAEA,kBAAM,WAAW,IAAI,IAAI,MAAM,GAAG;AAElC,qBAAS,OAAO;AAChB,0BAAc,SAAS;AAAA,UAC3B,SAAS,GAAG;AAER,oBAAQ,MAAM,iCAAiC,IAAI,aAAa,GAAG,EAAE;AACrE;AAAA,UACJ;AAKA,cAAI,YAAY,WAAW,WAAW,KAAK,CAAC,QAAQ,IAAI,WAAW,GAAG;AAClE,oBAAQ,IAAI,WAAW;AACvB,kBAAM,KAAK,EAAE,KAAK,aAAa,OAAO,QAAQ,EAAE,CAAC;AACjD;AAAA,UAEJ,OAAO;AAAA,UAEP;AAAA,QACJ;AACA,gBAAQ,MAAM,SAAS,UAAU,4CAA4C,GAAG,EAAE;AAAA,MACtF,OAAO;AACH,gBAAQ,MAAM,cAAc,QAAQ,uCAAuC,GAAG,EAAE;AAAA,MACpF;AAAA,IAEJ,SAAS,KAAU;AAEf,cAAQ,KAAK,4BAAuB,GAAG,KAAK,IAAI,OAAO,EAAE;AAAA,IAE7D,UAAE;AAEE,UAAI,MAAM;AACN,YAAI;AACA,gBAAM,KAAK,MAAM;AAAA,QACrB,SAAS,gBAAqB;AAE1B,kBAAQ,MAAM,4BAA4B,GAAG,KAAK,eAAe,OAAO,EAAE;AAAA,QAC9E;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ;AAEA,UAAQ,KAAK,kCAAkC;AAC/C,QAAM,QAAQ,MAAM;AACpB,UAAQ,KAAK,SAAS,QAAQ,MAAM,SAAS;AAC7C,SAAO;AACX;AAeA,eAAsB,sBAClB,UACA,YACA,WAAW,GAC6B;AAExC,QAAM,SAAS,IAAI,OAAO;AAC1B,SAAO,KAAK,sCAAsC,QAAQ,OAAO,UAAU,eAAe,QAAQ,GAAG;AAErG,MAAI;AAEA,UAAM,QAAqB,MAAM,aAAa,UAAU,UAAU,MAAM;AAExE,QAAI,MAAM,WAAW,GAAG;AACpB,aAAO,KAAK,yFAAyF;AAAA,IACzG,OAAO;AACH,aAAO,KAAK,2BAA2B,MAAM,MAAM,4BAA4B;AAAA,IACnF;AAGA,UAAM,cAAc,oBAAoB,OAAO,MAAM;AACrD,WAAO,KAAK,mCAAmC,OAAO,WAAW,aAAa,OAAO,CAAC,SAAS;AAG/F,WAAO,KAAK,2BAA2B,UAAU,EAAE;AACnD,UAAS,cAAU,YAAY,aAAa,OAAO;AACnD,WAAO,KAAK,wCAAwC,UAAU,EAAE;AAGhE,WAAO;AAAA,MACH,OAAO,MAAM;AAAA,MACb,MAAM;AAAA,IACV;AAAA,EACJ,SAAS,OAAY;AACjB,WAAO,MAAM,uCAAuC,MAAM,OAAO,EAAE;AAEnE,QAAI,MAAM,OAAO;AACb,aAAO,MAAM,gBAAgB,MAAM,KAAK,EAAE;AAAA,IAC9C;AAEA,UAAM;AAAA,EACV;AACJ;AAnSA;AAAA;AAAA;AAQA;AAEA;AAAA;AAAA;;;ACVA,IAaa;AAbb;AAAA;AAAA;AAaO,IAAM,aAAN,MAAiB;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MACA,aAAqB;AAAA;AAAA,MACrB,SAAmB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAO5B,YAAY,OAAe;AACvB,aAAK,YAAY,KAAK,IAAI;AAC1B,aAAK,QAAQ;AAAA,MACjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,cAAc,OAAqB;AAC/B,aAAK,aAAa;AAAA,MACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,SAAS,SAAuB;AAC5B,aAAK,OAAO,KAAK,OAAO;AAAA,MAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,aAAa,OAAqB;AAC9B,aAAK,eAAe;AAAA,MACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAYA,OAAO,MAAc,OAAiD;AACpE,cAAM,cAAc,KAAK,IAAI,IAAI,KAAK;AACtC,cAAM,aAAa,OAAO,WAAW,QAAQ,IAAI,OAAO;AAIxD,cAAM,iBAAiB,MAAM,KAAK,oBAAI,IAAI,CAAC,GAAG,KAAK,QAAQ,GAAI,OAAO,UAAU,CAAC,CAAE,CAAC,CAAC;AAErF,cAAM,gBAAgC;AAAA,UAClC,OAAO,KAAK;AAAA,UACZ;AAAA,UACA;AAAA,UACA,YAAY,OAAO,cAAc,KAAK;AAAA,UACtC,cAAc,OAAO,gBAAgB,KAAK;AAAA;AAAA,UAE1C,QAAQ;AAAA,QACZ;AAGA,YAAI,cAAc,iBAAiB,QAAW;AAC1C,iBAAO,cAAc;AAAA,QACzB;AAEA,YAAI,cAAc,QAAQ,WAAW,GAAG;AACpC,iBAAO,cAAc;AAAA,QACzB;AAEA,eAAO;AAAA,MACX;AAAA,IACF;AAAA;AAAA;;;AChDA,eAAsB,qBAClB,OACA,UAAyB,CAAC,GAC1B,gBACoB;AAEpB,QAAM,SAAS,kBAAkB,IAAI,OAAO,QAAQ,QAAQ;AAC5D,SAAO,KAAK,iCAAiC,KAAK,EAAE;AACpD,QAAM,QAAQ,IAAI,WAAW,KAAK;AAGlC,QAAM,WAAW,gBAAgB,KAAK,KAAK;AAC3C,MAAI,UAAU;AACV,WAAO,KAAK,0DAA0D;AACtE,QAAI;AAEA,YAAM,SAAS,MAAMC,qBAAoB,OAAO,SAAS,MAAM;AAC/D,aAAO,KAAK,iCAAiC,KAAK,WAAW,OAAO,SAAS,UAAU,iBAAiB,OAAO,SAAS,WAAW,IAAI;AAEvI,aAAO;AAAA,IACX,SAAS,OAAY;AACjB,aAAO,MAAM,8BAA8B,KAAK,KAAK,MAAM,OAAO,EAAE;AACpE,YAAM;AAAA,IACV;AAAA,EACJ;AAGA,SAAO,KAAK,mEAAmE;AAE/E,QAAM,WAAW,QAAQ,WAAW;AACpC,SAAO,MAAM,yCAAyC,QAAQ,EAAE;AAEhE,MAAI;AAEA,UAAM,SAAS,MAAM,UAAU,OAAO,MAAM;AAC5C,UAAM,WAAW,MAAM,cAAc,QAAQ,QAAQ,eAAe,MAAM,UAAU,MAAM;AAC1F,UAAM,WAAW,MAAM,aAAa,UAAU,SAAS,MAAM;AAC7D,UAAM,YAAY,SAAS,UAAU,MAAM;AAI3C,UAAM,WAAW,MAAM,OAAO,WAAW;AAAA,MACrC,YAAY,SAAS,OAAO;AAAA;AAAA;AAAA,IAGhC,CAAC;AACD,WAAO,KAAK,qCAAqC,KAAK,WAAW,SAAS,UAAU,mBAAmB,SAAS,UAAU,WAAW,SAAS,WAAW,IAAI;AAC7J,QAAI,SAAS,UAAU,SAAS,OAAO,SAAS,GAAG;AAC9C,aAAO,KAAK,kBAAkB,SAAS,OAAO,MAAM,iCAAiC;AAAA,IAC1F;AAGA,WAAO,EAAE,MAAM,WAAW,SAAS;AAAA,EAEvC,SAAS,OAAY;AACjB,WAAO,MAAM,qCAAqC,KAAK,KAAK,MAAM,OAAO,EAAE;AAC3E,UAAM;AAAA,EACV;AACJ;AAgBA,eAAsB,8BAClB,KACA,QAAQ,GACR,UAAyB,CAAC,GAC1B,gBACoB;AAEpB,QAAM,SAAS,kBAAkB,IAAI,OAAO,QAAQ,QAAQ;AAC5D,SAAO,KAAK,2CAA2C,GAAG,gBAAgB,KAAK,EAAE;AACjF,QAAM,QAAQ,IAAI,WAAW,GAAG;AAEhC,MAAI,CAAC,gBAAgB,KAAK,GAAG,GAAG;AAC5B,UAAM,SAAS,6CAA6C,GAAG;AAC/D,WAAO,MAAM,MAAM;AACnB,UAAM,IAAI,MAAM,MAAM;AAAA,EAC1B;AAGA,QAAM,gCAAgC,GAAG,IAAI,IAAI,GAAG,EAAE,QAAQ;AAE9D,MAAI;AAGA,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,sBAA0B,KAAK,+BAA+B,KAAK;AACjG,WAAO,KAAK,oDAAoD,KAAK,SAAS;AAG9E,UAAM,aAAa,KAAK;AACxB,UAAM,WAAW,MAAM,OAAO,MAAM;AAAA,MAChC,YAAY;AAAA;AAAA,MACZ,cAAc;AAAA;AAAA,IAElB,CAAC;AACD,WAAO,KAAK,uCAAuC,GAAG,WAAW,SAAS,UAAU,kBAAkB,SAAS,YAAY,WAAW,SAAS,WAAW,IAAI;AAC9J,QAAI,SAAS,UAAU,SAAS,OAAO,SAAS,GAAG;AAC9C,aAAO,KAAK,kBAAkB,SAAS,OAAO,MAAM,iCAAiC;AAAA,IAC1F;AAEA,WAAO,EAAE,MAAM,SAAS;AAAA,EAE5B,SAAS,OAAY;AACjB,WAAO,MAAM,yCAAyC,GAAG,KAAK,MAAM,OAAO,EAAE;AAC7E,QAAI,MAAM,iBAAiB,OAAO;AAC9B,aAAO,MAAM,UAAU,MAAM,MAAM,OAAO,EAAE;AAAA,IAChD;AACA,UAAM;AAAA,EACV;AACJ;AAeA,eAAsBA,qBAClB,KACA,UAAyB,CAAC,GAC1B,gBACoB;AAEpB,QAAM,SAAS,kBAAkB,IAAI,OAAO,QAAQ,QAAQ;AAC5D,SAAO,KAAK,oCAAoC,GAAG,EAAE;AACrD,QAAM,QAAQ,IAAI,WAAW,GAAG;AAEhC,MAAI,CAAC,gBAAgB,KAAK,GAAG,GAAG;AAC5B,UAAM,SAAS,8CAA8C,GAAG;AAChE,WAAO,MAAM,MAAM;AACnB,UAAM,IAAI,MAAM,MAAM;AAAA,EAC1B;AAEA,MAAI;AAGA,UAAM,SAAS,MAAM,oBAAiB,KAAK,MAAM;AAGjD,UAAM,WAAW,MAAM,OAAO,OAAO,MAAM;AAAA;AAAA,MAEvC,YAAY,OAAO,UAAU,cAAc;AAAA,MAC3C,QAAQ,OAAO,UAAU,UAAU,CAAC;AAAA;AAAA,IACxC,CAAC;AACD,WAAO,KAAK,sCAAsC,GAAG,WAAW,SAAS,UAAU,mBAAmB,SAAS,UAAU,WAAW,SAAS,WAAW,IAAI;AAC5J,QAAI,SAAS,UAAU,SAAS,OAAO,SAAS,GAAG;AAC9C,aAAO,KAAK,kBAAkB,SAAS,OAAO,MAAM,iCAAiC;AAAA,IAC1F;AAGA,WAAO,EAAE,MAAM,OAAO,MAAM,SAAS;AAAA,EACzC,SAAS,OAAY;AACjB,WAAO,MAAM,sCAAsC,GAAG,KAAK,MAAM,OAAO,EAAE;AAC1E,UAAM;AAAA,EACV;AACJ;AAjOA;AAAA;AAAA;AAUA;AACA;AACA;AACA;AAEA;AAKA;AAEA;AACA;AA8OA;AAAA;AAAA;;;ACrQA;AAAA;AAAA;AAAA;AAAA;AAOA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,SAAS,iBAAAC,sBAAqB;AAW9B,SAAS,iBAAsC;AAC3C,MAAI;AACA,UAAM,aAAaA,eAAc,YAAY,GAAG;AAChD,UAAM,YAAYD,MAAK,QAAQ,UAAU;AACzC,UAAM,UAAUA,MAAK,QAAQ,WAAW,oBAAoB;AAG5D,QAAID,IAAG,WAAW,OAAO,GAAG;AACxB,aAAO,KAAK,MAAMA,IAAG,aAAa,SAAS,OAAO,CAAC;AAAA,IACvD;AAAA,EACJ,SAAS,GAAG;AAAA,EAEZ;AACA,SAAO,EAAE,SAAS,QAAQ;AAC9B;AAKA,eAAsB,OAAO,OAAiB,QAAQ,MAA0B;AAC5E,MAAI,SAAS;AACb,MAAI,SAAS;AACb,MAAI,WAAW;AAGf,QAAM,cAAc,QAAQ;AAC5B,QAAM,cAAc,QAAQ;AAC5B,QAAM,eAAe,QAAQ;AAC7B,UAAQ,MAAM,IAAI,SAAS;AAAE,cAAU,KAAK,KAAK,GAAG,IAAI;AAAA,EAAM;AAC9D,UAAQ,QAAQ,IAAI,SAAS;AAAE,cAAU,KAAK,KAAK,GAAG,IAAI;AAAA,EAAM;AAChE,UAAQ,OAAO,IAAI,SAAS;AAAE,cAAU,KAAK,KAAK,GAAG,IAAI;AAAA,EAAM;AAE/D,MAAI;AACJ,MAAI;AACA,WAAO,aAAa,IAAI;AACxB,UAAM,UAAU,eAAe,EAAE,WAAW;AAE5C,QAAI,KAAK,SAAS;AACd,cAAQ,IAAI,wBAAiB,OAAO,EAAE;AAAA,IAC1C;AAEA,QAAI,CAAC,KAAK,OAAO;AACb,cAAQ,MAAM,kCAA6B;AAE3C,cAAQ,MAAM;AAAa,cAAQ,QAAQ;AAAa,cAAQ,OAAO;AACvE,aAAO,EAAE,QAAQ,QAAQ,UAAU,EAAE;AAAA,IACzC;AAGA,UAAM,aAAa,KAAK,UAAU,GAAGC,MAAK,SAAS,KAAK,KAAK,EAAE,MAAM,GAAG,EAAE,CAAC,KAAK,QAAQ;AAExF,QAAI,KAAK,SAAS;AACd,cAAQ,IAAI,oBAAa,KAAK,KAAK,EAAE;AACrC,cAAQ,IAAI,qBAAc,UAAU,EAAE;AAEtC,cAAQ,IAAI,iBAAiB,KAAK,aAAa,KAAK,EAAE;AACtD,cAAQ,IAAI,oBAAoB,KAAK,WAAW,EAAE;AAClD,cAAQ,IAAI,mBAAmB,KAAK,UAAU,EAAE;AAChD,cAAQ,IAAI,kBAAkB,KAAK,SAAS,EAAE;AAC9C,cAAQ,IAAI,iBAAiB,KAAK,QAAQ,EAAE;AAC5C,cAAQ,IAAI,iBAAiB,SAAS,KAAK,wBAAyB,CAAC,EAAE;AAAA,IAC3E;AAEA,QAAI,KAAK,QAAQ;AACb,cAAQ,IAAI,yDAA6C;AAEzD,cAAQ,MAAM;AAAa,cAAQ,QAAQ;AAAa,cAAQ,OAAO;AACvE,aAAO,EAAE,QAAQ,QAAQ,UAAU,EAAE;AAAA,IACzC;AAGA,UAAM,SAAS,KAAK,YAEd,MAAM,8BAA8B,KAAK,OAAO,OAAO,KAAK,cAAc,YAAY,IAAI,KAAK,WAAW,IAAI,IAC9G,MAAM,qBAAqB,KAAK,OAAO,IAAI;AAIjD,IAAAD,IAAG,cAAc,YAAY,OAAO,MAAM,OAAO;AAEjD,UAAM,OAAO,OAAO;AACpB,YAAQ,IAAI,kBAAa,KAAK,KAAK,WAAM,UAAU,EAAE;AACrD,YAAQ,IAAI,oBAAa,KAAK,aAAa,MAAM,QAAQ,CAAC,CAAC,KAAK;AAChE,YAAQ,IAAI,sBAAY,KAAK,WAAW,KAAK;AAC7C,YAAQ,IAAI,2BAAe,KAAK,UAAU,EAAE;AAE5C,QAAI,KAAK,gBAAgB,KAAK,eAAe,GAAG;AAC5C,cAAQ,IAAI,oBAAa,KAAK,YAAY,EAAE;AAAA,IAChD;AAEA,QAAI,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AACvC,cAAQ,KAAK;AAAA,gBAAS,KAAK,OAAO,MAAM,cAAc;AACtD,iBAAW,OAAO,KAAK,QAAQ;AAC3B,gBAAQ,KAAK,OAAO,GAAG,EAAE;AAAA,MAC7B;AAAA,IACJ;AAAA,EACJ,SAAS,KAAU;AACf,YAAQ,MAAM;AAAA,mBAAe,KAAK,WAAW,iBAAiB,EAAE;AAChE,QAAI,KAAK,SAAS,MAAM,SAAS;AAC7B,cAAQ,MAAM,IAAI,KAAK;AAAA,IAC3B;AACA,eAAW;AAAA,EACf,UAAE;AAEE,YAAQ,MAAM;AACd,YAAQ,QAAQ;AAChB,YAAQ,OAAO;AAAA,EACnB;AAEA,SAAO,EAAE,QAAQ,QAAQ,SAAS;AACtC;AAlIA,IAqIa;AArIb;AAAA;AAAA;AAWA;AACA;AAGA;AAsHO,IAAM,OAAO;AAAA;AAAA;;;ACpHpB,IAAM,WAAW,YAAgC;AAC/C,QAAM,EAAE,MAAAG,MAAK,IAAI,MAAM;AACvB,SAAO,MAAMA,MAAK,QAAQ,IAAI;AAChC;AAGA,IAAI,YAAY,QAAQ,UAAU,QAAQ,KAAK,CAAC,CAAC,IAAI;AACnD,WAAS,EAAE,KAAK,CAAC,EAAE,SAAS,MAAM,QAAQ,KAAK,OAAO,QAAQ,CAAC,CAAC;AAClE;","names":["LogLevel","readFile","path","URL","cheerio","fs","fetchAndPackWebPage","fs","path","fileURLToPath","main"]}
package/docs/demo.md DELETED
@@ -1,46 +0,0 @@
1
- ---
2
- # 🌐 Live Demo
3
-
4
- ## What You’ll See
5
-
6
- - A fully portable HTML site
7
- - Every internal page inlined
8
- - No external requests
9
-
10
- ---
11
-
12
- ## Example Output
13
-
14
- > Download this page and open it offline. It works!
15
-
16
- <!-- [Download Demo Portable HTML](./bootstrap-packed.html) -->
17
-
18
- ---
19
-
20
- ## How It Was Generated
21
-
22
- ```bash
23
- portapack -i https://getbootstrap.com --recursive --max-depth 1 -o bootstrap-packed.html
24
- ```
25
-
26
- ---
27
-
28
- ## Client-Side Navigation
29
-
30
- Recursively packed pages are wrapped in `<template>` blocks with an embedded router.
31
-
32
- ```html
33
- <template id="page-home">
34
- <h1>Homepage</h1>
35
- </template>
36
- <template id="page-about">
37
- <h1>About</h1>
38
- </template>
39
- ```
40
-
41
- ```js
42
- window.addEventListener('hashchange', () => {
43
- const id = location.hash.replace('#', '') || 'home';
44
- showPage(id);
45
- });
46
- ```
package/output.html DELETED
@@ -1 +0,0 @@
1
- <html>Warning</html>
package/site-packed.html DELETED
@@ -1 +0,0 @@
1
- <html>Generated Recursive Number</html>
package/test-output.html DELETED
File without changes