@enslo/sd-metadata 1.3.0 → 1.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/types.ts","../src/parsers/a1111.ts","../src/utils/entries.ts","../src/utils/json.ts","../src/parsers/comfyui.ts","../src/parsers/detect.ts","../src/parsers/easydiffusion.ts","../src/parsers/fooocus.ts","../src/parsers/hf-space.ts","../src/parsers/invokeai.ts","../src/parsers/novelai.ts","../src/parsers/ruined-fooocus.ts","../src/parsers/stability-matrix.ts","../src/parsers/swarmui.ts","../src/parsers/tensorart.ts","../src/parsers/index.ts","../src/utils/binary.ts","../src/utils/exif-constants.ts","../src/readers/exif.ts","../src/readers/jpeg.ts","../src/readers/png.ts","../src/readers/webp.ts","../src/utils/convert.ts","../src/api/read.ts","../src/converters/utils.ts","../src/converters/chunk-encoding.ts","../src/converters/a1111.ts","../src/converters/blind.ts","../src/converters/comfyui.ts","../src/converters/easydiffusion.ts","../src/converters/invokeai.ts","../src/converters/novelai.ts","../src/converters/simple-chunk.ts","../src/converters/swarmui.ts","../src/converters/index.ts","../src/writers/exif.ts","../src/writers/jpeg.ts","../src/writers/png.ts","../src/writers/webp.ts","../src/api/write.ts","../src/serializers/a1111.ts","../src/api/write-webui.ts","../src/serializers/raw.ts"],"sourcesContent":["/**\n * Result type for explicit error handling\n */\nexport type Result<T, E> = { ok: true; value: T } | { ok: false; error: E };\n\n/**\n * Helper functions for Result type\n */\nexport const Result = {\n ok: <T, E>(value: T): Result<T, E> => ({ ok: true, value }),\n error: <T, E>(error: E): Result<T, E> => ({ ok: false, error }),\n};\n\n// ============================================================================\n// PNG Metadata Types\n// ============================================================================\n\n/**\n * Error types for PNG reading\n */\nexport type PngReadError =\n | { type: 'invalidSignature' }\n | { type: 'corruptedChunk'; message: string };\n\n/**\n * Result type for PNG metadata reading\n */\nexport type PngMetadataResult = Result<PngTextChunk[], PngReadError>;\n\n/**\n * Error types for PNG writing\n */\ntype PngWriteError = { type: 'invalidSignature' } | { type: 'noIhdrChunk' };\n\n/**\n * Result type for PNG metadata writing\n */\nexport type PngWriteResult = Result<Uint8Array, PngWriteError>;\n\n// ============================================================================\n// JPEG Writer Types\n// ============================================================================\n\n/**\n * Error types for JPEG writing\n */\ntype JpegWriteError =\n | { type: 'invalidSignature' }\n | { type: 'corruptedStructure'; message: string };\n\n/**\n * Result type for JPEG metadata writing\n */\nexport type JpegWriteResult = Result<Uint8Array, JpegWriteError>;\n\n// ============================================================================\n// WebP Writer Types\n// ============================================================================\n\n/**\n * Error types for WebP writing\n */\ntype WebpWriteError =\n | { type: 'invalidSignature' }\n | { type: 'invalidRiffStructure'; message: string };\n\n/**\n * Result type for WebP metadata writing\n */\nexport type WebpWriteResult = Result<Uint8Array, WebpWriteError>;\n\n/**\n * PNG text chunk (tEXt or iTXt)\n */\nexport type PngTextChunk = TExtChunk | ITXtChunk;\n\n// ============================================================================\n// Exif Metadata Types (shared between JPEG/WebP)\n// ============================================================================\n\n/**\n * Source location of a metadata segment.\n * Used for round-tripping: reading and writing back to the correct location.\n */\nexport type MetadataSegmentSource =\n | { type: 'exifUserComment' }\n | { type: 'exifImageDescription'; prefix?: string }\n | { type: 'exifMake'; prefix?: string }\n | { type: 'jpegCom' };\n\n/**\n * A single metadata segment with source tracking\n */\nexport interface MetadataSegment {\n /** Source location of this segment */\n source: MetadataSegmentSource;\n /** Raw data string */\n data: string;\n}\n\n// ============================================================================\n// Format-Agnostic Metadata Types\n// ============================================================================\n\n/**\n * A single metadata entry (keyword + text)\n *\n * This is a format-agnostic representation used by parsers.\n */\nexport interface MetadataEntry {\n /** Entry keyword (e.g., 'parameters', 'Comment', 'prompt') */\n keyword: string;\n /** Text content */\n text: string;\n}\n\n/**\n * Raw metadata for write-back (preserves original format)\n */\nexport type RawMetadata =\n | { format: 'png'; chunks: PngTextChunk[] }\n | { format: 'jpeg'; segments: MetadataSegment[] }\n | { format: 'webp'; segments: MetadataSegment[] };\n\n/**\n * Error types for JPEG reading\n */\ntype JpegReadError =\n | { type: 'invalidSignature' }\n | { type: 'parseError'; message: string };\n\n/**\n * Result type for JPEG metadata reading\n */\nexport type JpegMetadataResult = Result<MetadataSegment[], JpegReadError>;\n\n// ============================================================================\n// WebP Metadata Types\n// ============================================================================\n\n/**\n * Error types for WebP reading\n */\ntype WebpReadError =\n | { type: 'invalidSignature' }\n | { type: 'parseError'; message: string };\n\n/**\n * Result type for WebP metadata reading\n */\nexport type WebpMetadataResult = Result<MetadataSegment[], WebpReadError>;\n\n/**\n * tEXt chunk (Latin-1 encoded text)\n */\nexport interface TExtChunk {\n type: 'tEXt';\n /** Chunk keyword (e.g., 'parameters', 'Comment') */\n keyword: string;\n /** Text content */\n text: string;\n}\n\n/**\n * iTXt chunk (UTF-8 encoded international text)\n */\nexport interface ITXtChunk {\n type: 'iTXt';\n /** Chunk keyword */\n keyword: string;\n /** Compression flag (0=uncompressed, 1=compressed) */\n compressionFlag: number;\n /** Compression method (0=zlib/deflate) */\n compressionMethod: number;\n /** Language tag (BCP 47) */\n languageTag: string;\n /** Translated keyword */\n translatedKeyword: string;\n /** Text content */\n text: string;\n}\n\n/**\n * Known AI image generation software\n */\nexport type GenerationSoftware =\n | 'novelai'\n | 'comfyui'\n | 'swarmui'\n | 'tensorart'\n | 'stability-matrix'\n | 'invokeai'\n | 'forge-neo'\n | 'forge'\n | 'sd-webui'\n | 'sd-next'\n | 'civitai'\n | 'hf-space'\n | 'easydiffusion'\n | 'fooocus'\n | 'ruined-fooocus';\n\n// ============================================================================\n// Unified Metadata Types\n// ============================================================================\n\n/**\n * Base metadata fields shared by all tools\n */\ninterface BaseMetadata {\n /** Positive prompt */\n prompt: string;\n /** Negative prompt */\n negativePrompt: string;\n /** Model settings */\n model?: ModelSettings;\n /** Sampling settings */\n sampling?: SamplingSettings;\n /** Hires.fix settings (if applied) */\n hires?: HiresSettings;\n /** Upscale settings (if applied) */\n upscale?: UpscaleSettings;\n /** Image width */\n width: number;\n /** Image height */\n height: number;\n}\n\n/**\n * NovelAI-specific metadata\n */\nexport interface NovelAIMetadata extends BaseMetadata {\n software: 'novelai';\n /** V4 character prompts (when using character placement) */\n characterPrompts?: CharacterPrompt[];\n /** Use character coordinates for placement */\n useCoords?: boolean;\n /** Use character order */\n useOrder?: boolean;\n}\n\n/**\n * Character prompt with position (NovelAI V4)\n */\nexport interface CharacterPrompt {\n /** Character-specific prompt */\n prompt: string;\n /** Character position (normalized 0-1) */\n center?: { x: number; y: number };\n}\n\n/**\n * ComfyUI-format metadata (ComfyUI, TensorArt, Stability Matrix)\n *\n * These tools use ComfyUI-compatible workflow format.\n */\n/**\n * ComfyUI node reference (for node outputs)\n *\n * Format: [nodeId, outputIndex]\n * Example: [\"CheckpointLoader_Base\", 0]\n */\nexport type ComfyNodeReference = [nodeId: string, outputIndex: number];\n\n/**\n * ComfyUI node input value\n */\nexport type ComfyNodeInputValue =\n | string\n | number\n | boolean\n | ComfyNodeReference\n | ComfyNodeInputValue[];\n\n/**\n * ComfyUI node structure\n */\nexport interface ComfyNode {\n /** Node class type (e.g., \"CheckpointLoaderSimple\", \"KSampler\") */\n class_type: string;\n /** Node inputs */\n inputs: Record<string, ComfyNodeInputValue>;\n /** Node metadata (ComfyUI only) */\n _meta?: {\n /** Node title for display */\n title?: string;\n };\n /** Change detection hash (rare, for caching) */\n is_changed?: string[] | null;\n}\n\n/**\n * ComfyUI node graph\n *\n * Maps node IDs to their corresponding node data.\n */\nexport type ComfyNodeGraph = Record<string, ComfyNode>;\n\n/**\n * ComfyUI-format metadata (ComfyUI, TensorArt, Stability Matrix)\n *\n * These tools always have nodes in all formats.\n */\nexport interface BasicComfyUIMetadata extends BaseMetadata {\n software: 'comfyui' | 'tensorart' | 'stability-matrix';\n /**\n * ComfyUI node graph (required)\n *\n * Always present in all image formats (PNG, JPEG, WebP).\n * Structure: Record<nodeId, ComfyNode> where ComfyNode contains inputs and class_type.\n */\n nodes: ComfyNodeGraph;\n}\n\n/**\n * SwarmUI-specific metadata\n *\n * SwarmUI uses ComfyUI workflow format but nodes are only present in PNG.\n */\nexport interface SwarmUIMetadata extends BaseMetadata {\n software: 'swarmui';\n /**\n * ComfyUI node graph (optional for SwarmUI)\n *\n * Only present in PNG format. JPEG/WebP contain SwarmUI parameters only.\n * Structure: Record<nodeId, ComfyNode> where ComfyNode contains inputs and class_type.\n */\n nodes?: ComfyNodeGraph;\n}\n\n/**\n * ComfyUI-format metadata (union of BasicComfyUI and SwarmUI)\n *\n * This is a union type to handle different node graph requirements:\n * - ComfyUI/TensorArt/Stability Matrix: nodes are always present\n * - SwarmUI: nodes are only present in PNG format\n */\nexport type ComfyUIMetadata = BasicComfyUIMetadata | SwarmUIMetadata;\n\n/**\n * Standard metadata (SD WebUI, Forge, InvokeAI, and others)\n *\n * Baseline generation metadata without tool-specific extensions.\n * Used by most SD tools that don't require special features like\n * NovelAI's character prompts or ComfyUI's node graphs.\n */\nexport interface StandardMetadata extends BaseMetadata {\n software:\n | 'sd-webui'\n | 'sd-next'\n | 'forge'\n | 'forge-neo'\n | 'invokeai'\n | 'civitai'\n | 'hf-space'\n | 'easydiffusion'\n | 'fooocus'\n | 'ruined-fooocus';\n}\n\n/**\n * Unified generation metadata (discriminated union)\n *\n * Use `metadata.software` to narrow by specific tool:\n * ```typescript\n * if (metadata.software === 'comfyui' ||\n * metadata.software === 'tensorart' ||\n * metadata.software === 'stability-matrix' ||\n * metadata.software === 'swarmui') {\n * // TypeScript knows metadata is ComfyUIMetadata\n * if (metadata.nodes) {\n * // Access workflow graph\n * }\n * }\n * ```\n */\nexport type GenerationMetadata =\n | NovelAIMetadata\n | ComfyUIMetadata\n | StandardMetadata;\n\n/**\n * Model settings\n */\nexport interface ModelSettings {\n /** Model name */\n name?: string;\n /** Model hash */\n hash?: string;\n /** VAE name */\n vae?: string;\n}\n\n/**\n * Sampling settings\n */\nexport interface SamplingSettings {\n /** Sampler name */\n sampler?: string;\n /** Scheduler (sometimes included in sampler, sometimes separate) */\n scheduler?: string;\n /** Sampling steps */\n steps?: number;\n /** CFG scale */\n cfg?: number;\n /** Random seed */\n seed?: number;\n /** CLIP skip layers */\n clipSkip?: number;\n}\n\n/**\n * Hires.fix settings\n */\nexport interface HiresSettings {\n /** Upscale factor */\n scale?: number;\n /** Upscaler name */\n upscaler?: string;\n /** Hires steps */\n steps?: number;\n /** Hires denoising strength */\n denoise?: number;\n}\n\n/**\n * Upscale settings (post-generation)\n */\nexport interface UpscaleSettings {\n /** Upscaler name */\n upscaler?: string;\n /** Scale factor */\n scale?: number;\n}\n\n/**\n * Parse error types\n */\ntype ParseError =\n | { type: 'unsupportedFormat' }\n | { type: 'parseError'; message: string };\n\n/**\n * Result type for internal parsers\n */\nexport type InternalParseResult = Result<GenerationMetadata, ParseError>;\n\n/**\n * Parse result with 4-status design\n *\n * - `success`: Parsing succeeded, metadata and raw data available\n * - `empty`: No metadata found in the file\n * - `unrecognized`: Metadata exists but format is not recognized\n * - `invalid`: File is corrupted or not a valid image\n */\nexport type ParseResult =\n | { status: 'success'; metadata: GenerationMetadata; raw: RawMetadata }\n | { status: 'empty' }\n | { status: 'unrecognized'; raw: RawMetadata }\n | { status: 'invalid'; message?: string };\n\n// ============================================================================\n// Metadata Conversion Types\n// ============================================================================\n\n/**\n * Target format for metadata conversion\n */\nexport type ConversionTargetFormat = 'png' | 'jpeg' | 'webp';\n\n/**\n * Conversion error types\n */\ntype ConversionError =\n | { type: 'unsupportedSoftware'; software: string }\n | { type: 'invalidParseResult'; status: string }\n | { type: 'missingRawData' }\n | { type: 'parseError'; message: string };\n\n/**\n * Result type for metadata conversion\n */\nexport type ConversionResult = Result<RawMetadata, ConversionError>;\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\n\n/**\n * Parse A1111-format metadata from entries\n *\n * A1111 format is used by:\n * - Stable Diffusion WebUI (AUTOMATIC1111)\n * - Forge\n * - Forge Neo\n * - Civitai\n * - Animagine\n *\n * Format:\n * ```\n * positive prompt\n * Negative prompt: negative prompt\n * Steps: 20, Sampler: Euler a, Schedule type: Automatic, CFG scale: 7, ...\n * ```\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseA1111(entries: MetadataEntry[]): InternalParseResult {\n // Find parameters entry (PNG uses 'parameters', JPEG/WebP uses 'Comment')\n const parametersEntry = entries.find(\n (e) => e.keyword === 'parameters' || e.keyword === 'Comment',\n );\n if (!parametersEntry) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n const text = parametersEntry.text;\n\n // Validate that this is AI-generated metadata by checking for typical markers\n // This prevents false positives from retouch software or other non-AI tools\n const hasAIMarkers =\n text.includes('Steps:') ||\n text.includes('Sampler:') ||\n text.includes('Negative prompt:');\n\n if (!hasAIMarkers) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse the text into sections\n const { prompt, negativePrompt, settings } = parseParametersText(text);\n\n // Parse settings key-value pairs\n const settingsMap = parseSettings(settings);\n\n // Extract dimensions (optional, defaults to \"0x0\" like SD Prompt Reader)\n const size = settingsMap.get('Size') ?? '0x0';\n const [width, height] = parseSize(size);\n\n // Determine software variant\n const version = settingsMap.get('Version');\n const app = settingsMap.get('App');\n const software = detectSoftwareVariant(version, app);\n\n // Build metadata\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software,\n prompt,\n negativePrompt,\n width,\n height,\n };\n\n // Add model settings\n const modelName = settingsMap.get('Model');\n const modelHash = settingsMap.get('Model hash');\n if (modelName || modelHash) {\n metadata.model = {\n name: modelName,\n hash: modelHash,\n };\n }\n\n // Add sampling settings\n const sampler = settingsMap.get('Sampler');\n const scheduler = settingsMap.get('Schedule type');\n const steps = parseNumber(settingsMap.get('Steps'));\n const cfg = parseNumber(\n settingsMap.get('CFG scale') ?? settingsMap.get('CFG Scale'),\n );\n const seed = parseNumber(settingsMap.get('Seed'));\n const clipSkip = parseNumber(settingsMap.get('Clip skip'));\n\n if (\n sampler !== undefined ||\n scheduler !== undefined ||\n steps !== undefined ||\n cfg !== undefined ||\n seed !== undefined ||\n clipSkip !== undefined\n ) {\n metadata.sampling = {\n sampler,\n scheduler,\n steps,\n cfg,\n seed,\n clipSkip,\n };\n }\n\n // Add hires settings\n const hiresScale = parseNumber(settingsMap.get('Hires upscale'));\n const upscaler = settingsMap.get('Hires upscaler');\n const hiresSteps = parseNumber(settingsMap.get('Hires steps'));\n const denoise = parseNumber(settingsMap.get('Denoising strength'));\n const hiresSize = settingsMap.get('Hires size');\n\n if (\n [hiresScale, hiresSize, upscaler, hiresSteps, denoise].some(\n (v) => v !== undefined,\n )\n ) {\n const [hiresWidth] = parseSize(hiresSize ?? '');\n const scale = hiresScale ?? hiresWidth / width;\n metadata.hires = { scale, upscaler, steps: hiresSteps, denoise };\n }\n\n return Result.ok(metadata);\n}\n\n/**\n * Parse parameters text into prompt, negative prompt, and settings\n */\nfunction parseParametersText(text: string): {\n prompt: string;\n negativePrompt: string;\n settings: string;\n} {\n // Find \"Negative prompt:\" marker\n const negativeIndex = text.indexOf('Negative prompt:');\n\n // Find the settings line (starts after the last newline before \"Steps:\")\n const stepsIndex = text.indexOf('Steps:');\n\n if (negativeIndex === -1 && stepsIndex === -1) {\n // No negative prompt, no settings - just prompt\n return { prompt: text.trim(), negativePrompt: '', settings: '' };\n }\n\n if (negativeIndex === -1) {\n // No negative prompt\n const settingsStart = text.lastIndexOf('\\n', stepsIndex);\n return {\n prompt: text.slice(0, settingsStart).trim(),\n negativePrompt: '',\n settings: text.slice(settingsStart).trim(),\n };\n }\n\n if (stepsIndex === -1) {\n // No settings (unusual)\n return {\n prompt: text.slice(0, negativeIndex).trim(),\n negativePrompt: text.slice(negativeIndex + 16).trim(),\n settings: '',\n };\n }\n\n // Both exist: find where negative prompt ends and settings begin\n const settingsStart = text.lastIndexOf('\\n', stepsIndex);\n\n return {\n prompt: text.slice(0, negativeIndex).trim(),\n negativePrompt: text.slice(negativeIndex + 16, settingsStart).trim(),\n settings: text.slice(settingsStart).trim(),\n };\n}\n\n/**\n * Parse settings line into key-value map\n *\n * Format: \"Key1: value1, Key2: value2, ...\"\n * Note: Values may contain commas (e.g., model names), so we parse carefully\n */\nfunction parseSettings(settings: string): Map<string, string> {\n const result = new Map<string, string>();\n if (!settings) return result;\n\n // Match \"Key: value\" pairs\n // Key is word characters (including spaces before colon)\n // Value continues until next \"Key:\" pattern or end\n const regex =\n /([A-Za-z][A-Za-z0-9 ]*?):\\s*([^,]+?)(?=,\\s*[A-Za-z][A-Za-z0-9 ]*?:|$)/g;\n\n // Use matchAll for functional iteration\n const matches = Array.from(settings.matchAll(regex));\n\n for (const match of matches) {\n const key = (match[1] ?? '').trim();\n const value = (match[2] ?? '').trim();\n result.set(key, value);\n }\n\n return result;\n}\n\n/**\n * Parse \"WxH\" size string\n */\nfunction parseSize(size: string): [number, number] {\n const match = size.match(/(\\d+)x(\\d+)/);\n if (!match) return [0, 0];\n return [\n Number.parseInt(match[1] ?? '0', 10),\n Number.parseInt(match[2] ?? '0', 10),\n ];\n}\n\n/**\n * Parse number from string, returning undefined if invalid\n */\nfunction parseNumber(value: string | undefined): number | undefined {\n if (value === undefined) return undefined;\n const num = Number.parseFloat(value);\n return Number.isNaN(num) ? undefined : num;\n}\n\n/**\n * Detect software variant from Version and App strings\n */\nfunction detectSoftwareVariant(\n version: string | undefined,\n app: string | undefined,\n): 'sd-webui' | 'sd-next' | 'forge' | 'forge-neo' {\n // Check App field first (SD.Next uses this)\n if (app === 'SD.Next') return 'sd-next';\n\n // Check Version field\n if (!version) return 'sd-webui';\n if (version === 'neo') return 'forge-neo';\n // Forge uses 'classic' or 'fX.Y.Z' versions (semantic version format)\n if (version === 'classic') return 'forge';\n if (/^f\\d+\\.\\d+/.test(version)) return 'forge';\n return 'sd-webui';\n}\n","import type { MetadataEntry } from '../types';\n\n/**\n * Entry record type (readonly for immutability)\n */\nexport type EntryRecord = Readonly<Record<string, string>>;\n\n/**\n * Build an immutable record from metadata entries for keyword lookup\n *\n * @param entries - Array of metadata entries\n * @returns Readonly record mapping keyword to text\n *\n * @example\n * ```typescript\n * const record = buildEntryRecord(entries);\n * const comment = record['Comment']; // string | undefined\n * ```\n */\nexport function buildEntryRecord(entries: MetadataEntry[]): EntryRecord {\n return Object.freeze(\n Object.fromEntries(entries.map((e) => [e.keyword, e.text])),\n );\n}\n","/**\n * Type-safe JSON parsing utilities\n */\n\nimport { Result } from '../types';\n\n/**\n * Type-safe JSON parse with Result\n *\n * Wraps JSON.parse to return a Result type instead of throwing.\n * This enables const-only code without try-catch blocks.\n *\n * @param text - JSON string to parse\n * @returns Result with parsed value or parse error\n *\n * @example\n * const parsed = parseJson<MyType>(text);\n * if (!parsed.ok) return parsed;\n * const data = parsed.value;\n */\nexport function parseJson<T>(\n text: string,\n): Result<T, { type: 'parseError'; message: string }> {\n try {\n return Result.ok(JSON.parse(text) as T);\n } catch {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON',\n });\n }\n}\n","/**\n * ComfyUI metadata parser\n *\n * Parses ComfyUI-format prompt data from node graphs.\n * Also handles Civitai extraMetadata fallbacks for upscale workflows.\n */\n\nimport type {\n BasicComfyUIMetadata,\n ComfyNodeGraph,\n InternalParseResult,\n MetadataEntry,\n} from '../types';\nimport { Result } from '../types';\nimport { type EntryRecord, buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n// =============================================================================\n// Types\n// =============================================================================\n\n/**\n * ComfyUI node structure\n */\ninterface ComfyNode {\n inputs: Record<string, unknown>;\n class_type: string;\n _meta?: { title?: string };\n}\n\n/**\n * ComfyUI prompt structure (node ID -> node)\n */\ntype ComfyPrompt = Record<string, ComfyNode>;\n\n/**\n * Civitai extraMetadata structure (nested JSON in prompt)\n */\ninterface CivitaiExtraMetadata {\n prompt?: string;\n negativePrompt?: string;\n cfgScale?: number;\n sampler?: string;\n clipSkip?: number;\n steps?: number;\n seed?: number;\n width?: number;\n height?: number;\n baseModel?: string;\n transformations?: Array<{\n type?: string;\n upscaleWidth?: number;\n upscaleHeight?: number;\n }>;\n}\n\n// =============================================================================\n// Main Parser\n// =============================================================================\n\n/**\n * Parse ComfyUI metadata from entries\n *\n * ComfyUI stores metadata with:\n * - prompt: JSON containing node graph with inputs\n * - workflow: JSON containing the full workflow (stored in raw, not parsed)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseComfyUI(entries: MetadataEntry[]): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Find prompt JSON from various possible locations\n const promptText = findPromptJson(entryRecord);\n if (!promptText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse prompt JSON\n const parsed = parseJson<ComfyPrompt>(promptText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in prompt entry',\n });\n }\n const prompt = parsed.value;\n\n // Verify it's ComfyUI format (has class_type)\n const nodes = Object.values(prompt);\n if (!nodes.some((node) => 'class_type' in node)) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Find key nodes\n const ksampler = findNode(prompt, ['Sampler']);\n\n // Extract prompts from CLIP nodes\n const positiveClip = findNode(prompt, ['PositiveCLIP_Base']);\n const negativeClip = findNode(prompt, ['NegativeCLIP_Base']);\n const clipPositiveText = extractText(positiveClip);\n const clipNegativeText = extractText(negativeClip);\n\n // Extract dimensions\n const latentImage = findNode(prompt, ['EmptyLatentImage']);\n const latentWidth = latentImage ? Number(latentImage.inputs.width) || 0 : 0;\n const latentHeight = latentImage ? Number(latentImage.inputs.height) || 0 : 0;\n\n // Apply Civitai extraMetadata fallbacks\n const extraMeta = extractExtraMetadata(prompt);\n const positiveText = clipPositiveText || extraMeta?.prompt || '';\n const negativeText = clipNegativeText || extraMeta?.negativePrompt || '';\n const width = latentWidth || extraMeta?.width || 0;\n const height = latentHeight || extraMeta?.height || 0;\n\n // Build metadata\n const metadata: Omit<BasicComfyUIMetadata, 'raw'> = {\n software: 'comfyui',\n prompt: positiveText,\n negativePrompt: negativeText,\n width,\n height,\n nodes: prompt as ComfyNodeGraph, // Store the parsed node graph\n };\n\n // Add model settings\n const checkpoint = findNode(prompt, ['CheckpointLoader_Base'])?.inputs\n ?.ckpt_name;\n\n if (checkpoint) {\n metadata.model = { name: String(checkpoint) };\n } else if (extraMeta?.baseModel) {\n metadata.model = { name: extraMeta.baseModel };\n }\n\n // Add sampling settings\n if (ksampler) {\n metadata.sampling = {\n seed: ksampler.inputs.seed as number,\n steps: ksampler.inputs.steps as number,\n cfg: ksampler.inputs.cfg as number,\n sampler: ksampler.inputs.sampler_name as string,\n scheduler: ksampler.inputs.scheduler as string,\n };\n } else if (extraMeta) {\n metadata.sampling = {\n seed: extraMeta.seed,\n steps: extraMeta.steps,\n cfg: extraMeta.cfgScale,\n sampler: extraMeta.sampler,\n };\n }\n\n // Add HiresFix/Upscaler settings\n const hiresModel = findNode(prompt, [\n 'HiresFix_ModelUpscale_UpscaleModelLoader',\n 'PostUpscale_ModelUpscale_UpscaleModelLoader',\n ])?.inputs;\n const hiresScale = findNode(prompt, [\n 'HiresFix_ImageScale',\n 'PostUpscale_ImageScale',\n ])?.inputs;\n const hiresSampler = findNode(prompt, ['HiresFix_Sampler'])?.inputs;\n\n if (hiresModel && hiresScale) {\n // Calculate scale from HiresFix_ImageScale node\n const hiresWidth = hiresScale.width as number;\n const scale =\n latentWidth > 0\n ? Math.round((hiresWidth / latentWidth) * 100) / 100\n : undefined;\n\n if (hiresSampler) {\n metadata.hires = {\n upscaler: hiresModel.model_name as string,\n scale,\n steps: hiresSampler.steps as number,\n denoise: hiresSampler.denoise as number,\n };\n } else {\n metadata.upscale = {\n upscaler: hiresModel.model_name as string,\n scale,\n };\n }\n }\n\n // Add upscale settings from Civitai extraMetadata\n if (extraMeta?.transformations) {\n const upscaleTransform = extraMeta.transformations.find(\n (t) => t.type === 'upscale',\n );\n if (upscaleTransform) {\n const originalWidth = extraMeta.width ?? width;\n if (originalWidth > 0 && upscaleTransform.upscaleWidth) {\n const scale = upscaleTransform.upscaleWidth / originalWidth;\n metadata.upscale = {\n scale: Math.round(scale * 100) / 100,\n };\n }\n }\n }\n\n return Result.ok(metadata);\n}\n\n// =============================================================================\n// Prompt Finding\n// =============================================================================\n\n/**\n * Find ComfyUI prompt JSON from entry record\n *\n * PNG uses 'prompt', JPEG/WebP may use Comment, Description, or Make.\n */\nfunction findPromptJson(entryRecord: EntryRecord): string | undefined {\n // PNG format: prompt entry\n if (entryRecord.prompt) {\n // Clean invalid JSON values that ComfyUI may include\n // - NaN is not valid in JSON spec (JavaScript only)\n // Replace NaN with null to make it parseable\n return entryRecord.prompt.replace(/:\\s*NaN\\b/g, ': null');\n }\n\n // JPEG/WebP format: may be in various entries\n const candidates = [\n entryRecord.Comment,\n entryRecord.Description,\n entryRecord.Make,\n entryRecord.Prompt, // save-image-extended uses this\n entryRecord.Workflow, // Not a prompt, but may contain nodes info\n ];\n\n for (const candidate of candidates) {\n if (!candidate) continue;\n\n // Check if it's JSON that looks like ComfyUI prompt\n if (candidate.startsWith('{')) {\n // Clean invalid JSON values\n // - Remove null terminators that some tools append\n // - Replace NaN with null (NaN is not valid in JSON spec)\n const cleaned = candidate\n .replace(/\\0+$/, '')\n .replace(/:\\s*NaN\\b/g, ': null');\n const parsed = parseJson<Record<string, unknown>>(cleaned);\n if (!parsed.ok) continue;\n\n // Check if it's wrapped in {\"prompt\": {...}} format\n if (parsed.value.prompt && typeof parsed.value.prompt === 'object') {\n return JSON.stringify(parsed.value.prompt);\n }\n // Check for nodes with class_type\n const values = Object.values(parsed.value);\n if (values.some((v) => v && typeof v === 'object' && 'class_type' in v)) {\n return cleaned; // Return cleaned JSON, not original candidate\n }\n }\n }\n\n return undefined;\n}\n\n// =============================================================================\n// Node Finding\n// =============================================================================\n\n/**\n * Find a node by key name (first match)\n */\nfunction findNode(prompt: ComfyPrompt, keys: string[]): ComfyNode | undefined {\n return Object.entries(prompt).find(([key]) => keys.includes(key))?.[1];\n}\n\n// =============================================================================\n// Text Extraction\n// =============================================================================\n\n/**\n * Extract text from CLIP text encode node\n */\nfunction extractText(node: ComfyNode | undefined): string {\n return typeof node?.inputs.text === 'string' ? node.inputs.text : '';\n}\n\n// =============================================================================\n// Civitai Extra Metadata\n// =============================================================================\n\n/**\n * Extract extraMetadata from ComfyUI prompt\n *\n * Civitai upscale workflows embed original generation params in extraMetadata field\n */\nfunction extractExtraMetadata(\n prompt: ComfyPrompt,\n): CivitaiExtraMetadata | undefined {\n const extraMetaField = (prompt as Record<string, unknown>).extraMetadata;\n if (typeof extraMetaField !== 'string') return undefined;\n\n const parsed = parseJson<CivitaiExtraMetadata>(extraMetaField);\n return parsed.ok ? parsed.value : undefined;\n}\n","import type { GenerationSoftware, MetadataEntry } from '../types';\nimport { type EntryRecord, buildEntryRecord } from '../utils/entries';\n\n/**\n * Detect generation software from metadata entries\n *\n * Analyzes entry keywords and content to identify the software that\n * generated the image. This centralized detection allows parsers to\n * focus on extracting structured data.\n *\n * @param entries - Metadata entries to analyze\n * @returns Detected software or null if unknown\n */\nexport function detectSoftware(\n entries: MetadataEntry[],\n): GenerationSoftware | null {\n const entryRecord = buildEntryRecord(entries);\n\n // Tier 1: Fastest - unique keywords\n const uniqueResult = detectUniqueKeywords(entryRecord);\n if (uniqueResult) return uniqueResult;\n\n // Tier 2: Format-specific structured detection\n const comfyResult = detectComfyUIEntries(entryRecord);\n if (comfyResult) return comfyResult;\n\n // Tier 3: Content analysis\n const text = entryRecord.parameters ?? entryRecord.Comment ?? '';\n if (text) {\n return detectFromTextContent(text);\n }\n\n return null;\n}\n\n/**\n * Detect software from unique keywords (Tier 1)\n *\n * Fast path: checks for presence of specific keywords that uniquely\n * identify each software. These are the most reliable indicators.\n *\n * Includes:\n * - Unique PNG chunk keywords\n * - Unique content patterns in parameters\n * - JPEG/WebP Comment JSON parsing (conversion cases)\n */\nfunction detectUniqueKeywords(\n entryRecord: EntryRecord,\n): GenerationSoftware | null {\n // ========================================\n // PNG Chunk Keywords\n // ========================================\n\n // NovelAI: Uses \"Software\" chunk with \"NovelAI\" value\n if (entryRecord.Software === 'NovelAI') {\n return 'novelai';\n }\n\n // InvokeAI: Has unique \"invokeai_metadata\" chunk\n if ('invokeai_metadata' in entryRecord) {\n return 'invokeai';\n }\n\n // TensorArt: Has unique \"generation_data\" chunk\n if ('generation_data' in entryRecord) {\n return 'tensorart';\n }\n\n // Stability Matrix: Has unique \"smproj\" chunk\n if ('smproj' in entryRecord) {\n return 'stability-matrix';\n }\n\n // Easy Diffusion: Has \"negative_prompt\" or \"Negative Prompt\" keyword\n if ('negative_prompt' in entryRecord || 'Negative Prompt' in entryRecord) {\n return 'easydiffusion';\n }\n\n // ========================================\n // Parameters Content Patterns\n // ========================================\n\n // SwarmUI: Check parameters for \"sui_image_params\"\n // MUST check here to catch it before ComfyUI detection\n const parameters = entryRecord.parameters;\n if (parameters?.includes('sui_image_params')) {\n return 'swarmui';\n }\n\n // ========================================\n // JPEG/WebP Comment JSON\n // ========================================\n\n const comment = entryRecord.Comment;\n if (comment?.startsWith('{')) {\n return detectFromCommentJson(comment);\n }\n\n return null;\n}\n\n/**\n * Detect software from Comment JSON (conversion cases)\n *\n * Handles PNG→JPEG/WebP conversions where chunks become JSON.\n */\nfunction detectFromCommentJson(comment: string): GenerationSoftware | null {\n try {\n const parsed = JSON.parse(comment) as Record<string, unknown>;\n\n // InvokeAI: Same as PNG chunk check, but from JSON\n if ('invokeai_metadata' in parsed) {\n return 'invokeai';\n }\n\n // ComfyUI: Has both prompt and workflow in JSON\n if ('prompt' in parsed && 'workflow' in parsed) {\n const workflow = parsed.workflow;\n const prompt = parsed.prompt;\n\n const isObject =\n typeof workflow === 'object' || typeof prompt === 'object';\n const isJsonString =\n (typeof workflow === 'string' && workflow.startsWith('{')) ||\n (typeof prompt === 'string' && prompt.startsWith('{'));\n\n if (isObject || isJsonString) {\n return 'comfyui';\n }\n }\n\n // SwarmUI: Same as parameters check, but from Comment JSON\n if ('sui_image_params' in parsed) {\n return 'swarmui';\n }\n\n // SwarmUI alternative format\n if ('prompt' in parsed && 'parameters' in parsed) {\n const params = String(parsed.parameters || '');\n if (\n params.includes('sui_image_params') ||\n params.includes('swarm_version')\n ) {\n return 'swarmui';\n }\n }\n } catch {\n // Invalid JSON\n }\n\n return null;\n}\n\n/**\n * Detect ComfyUI from specific entry combinations (Tier 2)\n *\n * ComfyUI has unique entry combinations that can be detected\n * before analyzing text content.\n */\nfunction detectComfyUIEntries(\n entryRecord: EntryRecord,\n): GenerationSoftware | null {\n // ComfyUI: Both prompt AND workflow chunks exist\n if ('prompt' in entryRecord && 'workflow' in entryRecord) {\n return 'comfyui';\n }\n\n // ComfyUI: Workflow chunk only (rare, but valid)\n if ('workflow' in entryRecord) {\n return 'comfyui';\n }\n\n // ComfyUI: Prompt chunk with workflow JSON data\n // IMPORTANT: Check SwarmUI FIRST\n if ('prompt' in entryRecord) {\n const promptText = entryRecord.prompt;\n if (promptText?.startsWith('{')) {\n // SwarmUI: Must check FIRST\n if (promptText.includes('sui_image_params')) {\n return 'swarmui';\n }\n\n // ComfyUI: Has class_type in prompt JSON\n if (promptText.includes('class_type')) {\n return 'comfyui';\n }\n }\n }\n\n return null;\n}\n\n/**\n * Detect software from text content (Tier 3)\n *\n * Analyzes text content which can be either JSON format or A1111 text format.\n * This is the slowest but most thorough detection path.\n */\nfunction detectFromTextContent(text: string): GenerationSoftware | null {\n // JSON format detection\n if (text.startsWith('{')) {\n return detectFromJsonFormat(text);\n }\n\n // A1111-style text format detection\n return detectFromA1111Format(text);\n}\n\n/**\n * Detect software from JSON-formatted metadata\n *\n * Priority order:\n * 1. Unique string patterns (most specific)\n * 2. Multi-field combinations (moderately specific)\n * 3. Generic patterns (least specific, fallback)\n */\nfunction detectFromJsonFormat(json: string): GenerationSoftware | null {\n // ========================================\n // Tier 1: Unique String Identifiers\n // ========================================\n\n // SwarmUI: Has \"sui_image_params\" (unique identifier)\n if (json.includes('sui_image_params')) {\n return 'swarmui';\n }\n\n // Ruined Fooocus: Has explicit software field\n if (\n json.includes('\"software\":\"RuinedFooocus\"') ||\n json.includes('\"software\": \"RuinedFooocus\"')\n ) {\n return 'ruined-fooocus';\n }\n\n // Easy Diffusion: Has unique field name\n if (json.includes('\"use_stable_diffusion_model\"')) {\n return 'easydiffusion';\n }\n\n // Civitai: Has distinctive namespace or field\n if (json.includes('civitai:') || json.includes('\"resource-stack\"')) {\n return 'civitai';\n }\n\n // ========================================\n // Tier 2: Multi-Field Combinations\n // ========================================\n\n // NovelAI: Has distinctive v4_prompt or noise_schedule fields\n if (\n json.includes('\"v4_prompt\"') ||\n json.includes('\"noise_schedule\"') ||\n json.includes('\"uncond_scale\"') ||\n json.includes('\"Software\":\"NovelAI\"') ||\n json.includes('\\\\\"noise_schedule\\\\\"') ||\n json.includes('\\\\\"v4_prompt\\\\\"')\n ) {\n return 'novelai';\n }\n\n // HuggingFace Space: Combination of Model + resolution\n if (json.includes('\"Model\"') && json.includes('\"resolution\"')) {\n return 'hf-space';\n }\n\n // Fooocus: Has prompt + base_model combination\n if (json.includes('\"prompt\"') && json.includes('\"base_model\"')) {\n return 'fooocus';\n }\n\n // ========================================\n // Tier 3: Generic Fallback Patterns\n // ========================================\n\n // ComfyUI: Has \"prompt\" or \"nodes\" (very generic, last resort)\n if (json.includes('\"prompt\"') || json.includes('\"nodes\"')) {\n return 'comfyui';\n }\n\n return null;\n}\n\n/**\n * Detect software from A1111-style text format\n *\n * Priority order:\n * 1. SwarmUI indicators (check first as it has unique markers)\n * 2. Version field analysis (forge, forge-neo, comfyui variants)\n * 3. App field (SD.Next)\n * 4. Resource markers (Civitai)\n * 5. Default A1111 format (steps + sampler)\n */\nfunction detectFromA1111Format(text: string): GenerationSoftware | null {\n // ========================================\n // Tier 1: SwarmUI Detection\n // ========================================\n\n // SwarmUI: Has sui_image_params or swarm_version\n if (text.includes('sui_image_params') || text.includes('swarm_version')) {\n return 'swarmui';\n }\n\n // ========================================\n // Tier 2: Version Field Analysis\n // ========================================\n\n const versionMatch = text.match(/Version:\\s*([^\\s,]+)/);\n if (versionMatch) {\n const version = versionMatch[1];\n\n // Forge Neo: Version starts with \"neo\"\n if (version === 'neo' || version?.startsWith('neo')) {\n return 'forge-neo';\n }\n\n // Forge: Version starts with \"f\" followed by a digit\n if (version?.startsWith('f') && /^f\\d/.test(version)) {\n return 'forge';\n }\n\n // ComfyUI: Version explicitly says \"ComfyUI\"\n if (version === 'ComfyUI') {\n return 'comfyui';\n }\n }\n\n // ========================================\n // Tier 3: Other Unique Text Markers\n // ========================================\n\n // SD.Next: Has App field with SD.Next value\n if (text.includes('App: SD.Next') || text.includes('App:SD.Next')) {\n return 'sd-next';\n }\n\n // Civitai: Has resource list marker\n if (text.includes('Civitai resources:')) {\n return 'civitai';\n }\n\n // ========================================\n // Tier 4: Default A1111 Format\n // ========================================\n\n // SD-WebUI (default): Has typical A1111 parameters\n if (text.includes('Steps:') && text.includes('Sampler:')) {\n return 'sd-webui';\n }\n\n return null;\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * Easy Diffusion JSON metadata structure\n *\n * ⚠️ UNVERIFIED: This parser has not been verified with actual Easy Diffusion samples.\n * The implementation is based on reference code from other libraries but may not be\n * fully accurate. Please report any issues if you encounter problems with Easy Diffusion\n * metadata parsing.\n *\n * Easy Diffusion stores metadata as JSON in various entries:\n * - PNG: negative_prompt or Negative Prompt entry\n * - JPEG/WebP: Exif UserComment\n */\ninterface EasyDiffusionJsonMetadata {\n prompt?: string;\n negative_prompt?: string;\n Prompt?: string;\n 'Negative Prompt'?: string;\n seed?: number;\n Seed?: number;\n use_stable_diffusion_model?: string;\n 'Stable Diffusion model'?: string;\n sampler_name?: string;\n Sampler?: string;\n num_inference_steps?: number;\n Steps?: number;\n guidance_scale?: number;\n 'Guidance Scale'?: number;\n width?: number;\n Width?: number;\n height?: number;\n Height?: number;\n clip_skip?: number;\n 'Clip Skip'?: number;\n use_vae_model?: string;\n 'VAE model'?: string;\n}\n\n/**\n * Get value from JSON with fallback for different key formats\n *\n * Easy Diffusion uses two different key formats:\n * - Format A: prompt, negative_prompt, seed (snake_case)\n * - Format B: Prompt, Negative Prompt, Seed (capitalized)\n */\nfunction getValue<T>(\n json: EasyDiffusionJsonMetadata,\n keyA: keyof EasyDiffusionJsonMetadata,\n keyB: keyof EasyDiffusionJsonMetadata,\n): T | undefined {\n return (json[keyA] ?? json[keyB]) as T | undefined;\n}\n\n/**\n * Extract model name from path\n *\n * Easy Diffusion stores full path like \"path/to/model.safetensors\"\n */\nfunction extractModelName(path: string | undefined): string | undefined {\n if (!path) return undefined;\n // Handle both Windows and POSIX paths\n const parts = path.replace(/\\\\/g, '/').split('/');\n return parts[parts.length - 1];\n}\n\n/**\n * Parse Easy Diffusion metadata from entries\n *\n * Easy Diffusion stores metadata as JSON in:\n * - PNG: info dict with negative_prompt or Negative Prompt key\n * - JPEG/WebP: Exif UserComment\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseEasyDiffusion(\n entries: MetadataEntry[],\n): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Check for standalone entries (PNG format)\n if (entryRecord.negative_prompt || entryRecord['Negative Prompt']) {\n // The entire info dict is what we need to process\n // Try to reconstruct from individual entries or find a JSON source\n // For PNG, Easy Diffusion stores each field as a separate chunk\n return parseFromEntries(entryRecord);\n }\n\n // Find JSON in various possible locations\n const jsonText =\n (entryRecord.parameters?.startsWith('{')\n ? entryRecord.parameters\n : undefined) ??\n (entryRecord.Comment?.startsWith('{') ? entryRecord.Comment : undefined);\n\n if (!jsonText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON\n const parsed = parseJson<EasyDiffusionJsonMetadata>(jsonText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in Easy Diffusion metadata',\n });\n }\n\n return parseFromJson(parsed.value);\n}\n\n/**\n * Parse from individual PNG entries\n */\nfunction parseFromEntries(\n entryRecord: Record<string, string | undefined>,\n): InternalParseResult {\n const prompt = entryRecord.prompt ?? entryRecord.Prompt ?? '';\n const negativePrompt =\n entryRecord.negative_prompt ??\n entryRecord['Negative Prompt'] ??\n entryRecord.negative_prompt ??\n '';\n\n const modelPath =\n entryRecord.use_stable_diffusion_model ??\n entryRecord['Stable Diffusion model'];\n\n const width = Number(entryRecord.width ?? entryRecord.Width) || 0;\n const height = Number(entryRecord.height ?? entryRecord.Height) || 0;\n\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'easydiffusion',\n prompt: prompt.trim(),\n negativePrompt: negativePrompt.trim(),\n width,\n height,\n model: {\n name: extractModelName(modelPath),\n vae: entryRecord.use_vae_model ?? entryRecord['VAE model'],\n },\n sampling: {\n sampler: entryRecord.sampler_name ?? entryRecord.Sampler,\n steps:\n Number(entryRecord.num_inference_steps ?? entryRecord.Steps) ||\n undefined,\n cfg:\n Number(entryRecord.guidance_scale ?? entryRecord['Guidance Scale']) ||\n undefined,\n seed: Number(entryRecord.seed ?? entryRecord.Seed) || undefined,\n clipSkip:\n Number(entryRecord.clip_skip ?? entryRecord['Clip Skip']) || undefined,\n },\n };\n\n return Result.ok(metadata);\n}\n\n/**\n * Parse from JSON object\n */\nfunction parseFromJson(json: EasyDiffusionJsonMetadata): InternalParseResult {\n const prompt = getValue<string>(json, 'prompt', 'Prompt') ?? '';\n const negativePrompt =\n getValue<string>(json, 'negative_prompt', 'Negative Prompt') ?? '';\n\n const modelPath = getValue<string>(\n json,\n 'use_stable_diffusion_model',\n 'Stable Diffusion model',\n );\n\n const width = getValue<number>(json, 'width', 'Width') ?? 0;\n const height = getValue<number>(json, 'height', 'Height') ?? 0;\n\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'easydiffusion',\n prompt: prompt.trim(),\n negativePrompt: negativePrompt.trim(),\n width,\n height,\n model: {\n name: extractModelName(modelPath),\n vae: getValue<string>(json, 'use_vae_model', 'VAE model'),\n },\n sampling: {\n sampler: getValue<string>(json, 'sampler_name', 'Sampler'),\n steps: getValue<number>(json, 'num_inference_steps', 'Steps'),\n cfg: getValue<number>(json, 'guidance_scale', 'Guidance Scale'),\n seed: getValue<number>(json, 'seed', 'Seed'),\n clipSkip: getValue<number>(json, 'clip_skip', 'Clip Skip'),\n },\n };\n\n return Result.ok(metadata);\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * Fooocus JSON metadata structure\n *\n * ⚠️ UNVERIFIED: This parser has not been verified with actual Fooocus samples.\n * The implementation is based on reference code from other libraries but may not be\n * fully accurate. Please report any issues if you encounter problems with Fooocus\n * metadata parsing.\n *\n * Fooocus stores metadata as JSON in:\n * - PNG: Comment chunk\n * - JPEG: comment field\n */\ninterface FooocusJsonMetadata {\n prompt?: string;\n negative_prompt?: string;\n base_model?: string;\n refiner_model?: string;\n sampler?: string;\n scheduler?: string;\n seed?: number;\n cfg?: number;\n steps?: number;\n width?: number;\n height?: number;\n loras?: Array<{ name: string; weight: number }>;\n style_selection?: string[];\n performance?: string;\n}\n\n/**\n * Parse Fooocus metadata from entries\n *\n * Fooocus stores metadata as JSON in the Comment chunk (PNG) or\n * comment field (JPEG).\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseFooocus(entries: MetadataEntry[]): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Find JSON in Comment entry (PNG uses Comment, JPEG uses comment)\n const jsonText = entryRecord.Comment ?? entryRecord.comment;\n\n if (!jsonText || !jsonText.startsWith('{')) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON\n const parsed = parseJson<FooocusJsonMetadata>(jsonText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in Fooocus metadata',\n });\n }\n const json = parsed.value;\n\n // Verify it's Fooocus format (has base_model)\n if (!json.base_model && !json.prompt) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'fooocus',\n prompt: json.prompt?.trim() ?? '',\n negativePrompt: json.negative_prompt?.trim() ?? '',\n width: json.width ?? 0,\n height: json.height ?? 0,\n model: {\n name: json.base_model,\n },\n sampling: {\n sampler: json.sampler,\n scheduler: json.scheduler,\n steps: json.steps,\n cfg: json.cfg,\n seed: json.seed,\n },\n };\n\n return Result.ok(metadata);\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * HuggingFace Space JSON metadata structure\n */\ninterface HfSpaceJsonMetadata {\n prompt?: string;\n negative_prompt?: string;\n resolution?: string;\n guidance_scale?: number;\n num_inference_steps?: number;\n style_preset?: string;\n seed?: number;\n sampler?: string;\n Model?: string;\n 'Model hash'?: string;\n use_upscaler?: unknown;\n}\n\n/**\n * Parse HuggingFace Space metadata from entries\n *\n * HuggingFace Spaces using Gradio + Diffusers store metadata as JSON\n * in the parameters chunk.\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseHfSpace(entries: MetadataEntry[]): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Find parameters entry\n const parametersText = entryRecord.parameters;\n if (!parametersText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON\n const parsed = parseJson<HfSpaceJsonMetadata>(parametersText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in parameters entry',\n });\n }\n const json = parsed.value;\n\n // Parse resolution (format: \"832 x 1216\")\n const parseResolution = (res?: string) => {\n const match = res?.match(/(\\d+)\\s*x\\s*(\\d+)/);\n return match?.[1] && match?.[2]\n ? {\n width: Number.parseInt(match[1], 10),\n height: Number.parseInt(match[2], 10),\n }\n : { width: 0, height: 0 };\n };\n const { width, height } = parseResolution(json.resolution);\n\n // Build metadata\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'hf-space',\n prompt: json.prompt ?? '',\n negativePrompt: json.negative_prompt ?? '',\n width,\n height,\n model: {\n name: json.Model,\n hash: json['Model hash'],\n },\n sampling: {\n sampler: json.sampler,\n steps: json.num_inference_steps,\n cfg: json.guidance_scale,\n seed: json.seed,\n },\n };\n\n return Result.ok(metadata);\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * InvokeAI metadata JSON structure\n */\ninterface InvokeAIMetadataJson {\n positive_prompt?: string;\n negative_prompt?: string;\n width?: number;\n height?: number;\n seed?: number;\n steps?: number;\n cfg_scale?: number;\n scheduler?: string;\n model?: {\n name?: string;\n hash?: string;\n };\n}\n\n/**\n * Extract InvokeAI metadata from entry record\n *\n * Checks direct 'invokeai_metadata' entry first, then tries to extract from Comment JSON\n */\nfunction extractInvokeAIMetadata(\n entryRecord: Record<string, string | undefined>,\n): string | undefined {\n // Direct invokeai_metadata entry (PNG format)\n if (entryRecord.invokeai_metadata) {\n return entryRecord.invokeai_metadata;\n }\n\n // Try to extract from Comment JSON (JPEG/WebP format)\n if (!entryRecord.Comment) {\n return undefined;\n }\n\n const commentParsed = parseJson<Record<string, unknown>>(entryRecord.Comment);\n if (!commentParsed.ok || !('invokeai_metadata' in commentParsed.value)) {\n return undefined;\n }\n\n return JSON.stringify(commentParsed.value.invokeai_metadata);\n}\n\n/**\n * Parse InvokeAI metadata from entries\n *\n * InvokeAI stores metadata with:\n * - invokeai_metadata: JSON containing generation parameters\n * - invokeai_graph: JSON containing the full node graph (not parsed here)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseInvokeAI(entries: MetadataEntry[]): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // Find invokeai_metadata entry\n // For PNG: direct keyword\n // For JPEG/WebP: inside Comment JSON\n const metadataText = extractInvokeAIMetadata(entryRecord);\n\n if (!metadataText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse metadata JSON\n const parsed = parseJson<InvokeAIMetadataJson>(metadataText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in invokeai_metadata entry',\n });\n }\n const data = parsed.value;\n\n // Extract dimensions (fallback to 0 for IHDR extraction)\n const width = data.width ?? 0;\n const height = data.height ?? 0;\n\n // Build metadata\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'invokeai',\n prompt: data.positive_prompt ?? '',\n negativePrompt: data.negative_prompt ?? '',\n width,\n height,\n };\n\n // Add model settings\n if (data.model?.name || data.model?.hash) {\n metadata.model = {\n name: data.model.name,\n hash: data.model.hash,\n };\n }\n\n // Add sampling settings\n if (\n data.seed !== undefined ||\n data.steps !== undefined ||\n data.cfg_scale !== undefined ||\n data.scheduler !== undefined\n ) {\n metadata.sampling = {\n seed: data.seed,\n steps: data.steps,\n cfg: data.cfg_scale,\n sampler: data.scheduler,\n };\n }\n\n return Result.ok(metadata);\n}\n","import type {\n CharacterPrompt,\n InternalParseResult,\n MetadataEntry,\n NovelAIMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * NovelAI Comment JSON structure\n */\ninterface NovelAIComment {\n prompt: string;\n uc?: string;\n steps?: number;\n height?: number;\n width?: number;\n scale?: number;\n seed?: number;\n noise_schedule?: string;\n sampler?: string;\n /** V4 prompt structure */\n v4_prompt?: V4Prompt;\n /** V4 negative prompt structure */\n v4_negative_prompt?: V4Prompt;\n}\n\n/**\n * NovelAI V4 prompt structure\n */\ninterface V4Prompt {\n caption?: {\n base_caption?: string;\n char_captions?: Array<{\n char_caption?: string;\n centers?: Array<{ x: number; y: number }>;\n }>;\n };\n use_coords?: boolean;\n use_order?: boolean;\n}\n\n/**\n * Parse NovelAI metadata from entries\n *\n * NovelAI stores metadata with:\n * - Software: \"NovelAI\"\n * - Comment: JSON containing generation parameters\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseNovelAI(entries: MetadataEntry[]): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // Verify NovelAI format\n if (entryRecord.Software !== 'NovelAI') {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse Comment JSON\n const commentText = entryRecord.Comment;\n if (!commentText) {\n return Result.error({\n type: 'parseError',\n message: 'Missing Comment entry',\n });\n }\n\n const parsed = parseJson<NovelAIComment>(commentText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in Comment entry',\n });\n }\n const comment = parsed.value;\n\n // Extract dimensions (fallback to 0 for IHDR extraction)\n const width = comment.width ?? 0;\n const height = comment.height ?? 0;\n\n // Extract prompt - prefer V4 base_caption if available\n const prompt =\n comment.v4_prompt?.caption?.base_caption ?? comment.prompt ?? '';\n const negativePrompt =\n comment.v4_negative_prompt?.caption?.base_caption ?? comment.uc ?? '';\n\n // Build metadata\n const metadata: Omit<NovelAIMetadata, 'raw'> = {\n software: 'novelai',\n prompt,\n negativePrompt,\n width,\n height,\n };\n\n // Add sampling settings if present\n if (\n comment.steps !== undefined ||\n comment.scale !== undefined ||\n comment.seed !== undefined ||\n comment.noise_schedule !== undefined ||\n comment.sampler !== undefined\n ) {\n metadata.sampling = {\n steps: comment.steps,\n cfg: comment.scale,\n seed: comment.seed,\n sampler: comment.sampler,\n scheduler: comment.noise_schedule,\n };\n }\n\n // Extract V4 character prompts\n const charCaptions = comment.v4_prompt?.caption?.char_captions;\n if (charCaptions && charCaptions.length > 0) {\n metadata.characterPrompts = charCaptions\n .map((cc): CharacterPrompt | null => {\n if (!cc.char_caption) return null;\n return {\n prompt: cc.char_caption,\n center: cc.centers?.[0],\n };\n })\n .filter((cp): cp is CharacterPrompt => cp !== null);\n\n metadata.useCoords = comment.v4_prompt?.use_coords;\n metadata.useOrder = comment.v4_prompt?.use_order;\n }\n\n return Result.ok(metadata);\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * Ruined Fooocus JSON metadata structure\n *\n * Ruined Fooocus stores metadata as JSON in the `parameters` chunk.\n * It has a `software` field set to \"RuinedFooocus\" for identification.\n */\ninterface RuinedFooocusJsonMetadata {\n Prompt?: string;\n Negative?: string;\n steps?: number;\n cfg?: number;\n width?: number;\n height?: number;\n seed?: number;\n sampler_name?: string;\n scheduler?: string;\n base_model_name?: string;\n base_model_hash?: string;\n loras?: Array<{ name: string; weight: number }>;\n clip_skip?: number;\n software?: string;\n}\n\n/**\n * Parse Ruined Fooocus metadata from entries\n *\n * Ruined Fooocus stores metadata as JSON in the `parameters` chunk,\n * with a `software` field set to \"RuinedFooocus\".\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseRuinedFooocus(\n entries: MetadataEntry[],\n): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Find JSON in parameters entry\n const jsonText = entryRecord.parameters;\n\n if (!jsonText || !jsonText.startsWith('{')) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON\n const parsed = parseJson<RuinedFooocusJsonMetadata>(jsonText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in Ruined Fooocus metadata',\n });\n }\n const json = parsed.value;\n\n // Verify it's Ruined Fooocus format\n if (json.software !== 'RuinedFooocus') {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'ruined-fooocus',\n prompt: json.Prompt?.trim() ?? '',\n negativePrompt: json.Negative?.trim() ?? '',\n width: json.width ?? 0,\n height: json.height ?? 0,\n model: {\n name: json.base_model_name,\n hash: json.base_model_hash,\n },\n sampling: {\n sampler: json.sampler_name,\n scheduler: json.scheduler,\n steps: json.steps,\n cfg: json.cfg,\n seed: json.seed,\n clipSkip: json.clip_skip,\n },\n };\n\n return Result.ok(metadata);\n}\n","import type {\n BasicComfyUIMetadata,\n InternalParseResult,\n MetadataEntry,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\nimport { parseComfyUI } from './comfyui';\n\n/**\n * Stability Matrix parameters-json structure\n */\ninterface StabilityMatrixJson {\n PositivePrompt?: string;\n NegativePrompt?: string;\n Width?: number;\n Height?: number;\n Seed?: number;\n Steps?: number;\n CfgScale?: number;\n Sampler?: string;\n ModelName?: string;\n ModelHash?: string;\n}\n\n/**\n * Parse Stability Matrix metadata from entries\n *\n * Stability Matrix stores metadata with:\n * - prompt: ComfyUI-compatible workflow JSON (primary source)\n * - parameters-json: JSON containing generation parameters\n * - Used to override prompts (more complete than workflow)\n * - parameters: A1111-style text (fallback)\n * - smproj: Project data (not parsed here)\n *\n * Strategy:\n * 1. Parse as ComfyUI workflow (workflow, model, sampling, etc.)\n * 2. Override prompts from parameters-json (more complete)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseStabilityMatrix(\n entries: MetadataEntry[],\n): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // First, parse as ComfyUI workflow to get base metadata\n const comfyResult = parseComfyUI(entries);\n if (!comfyResult.ok || comfyResult.value.software !== 'comfyui') {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Override software to stability-matrix\n const metadata: Omit<BasicComfyUIMetadata, 'raw'> = {\n ...comfyResult.value,\n software: 'stability-matrix',\n };\n\n // Find parameters-json entry for prompt override\n const jsonText = entryRecord['parameters-json'];\n if (jsonText) {\n const parsed = parseJson<StabilityMatrixJson>(jsonText);\n if (parsed.ok) {\n const data = parsed.value;\n\n // Override prompts from parameters-json (more complete than workflow)\n if (data.PositivePrompt !== undefined) {\n metadata.prompt = data.PositivePrompt;\n }\n if (data.NegativePrompt !== undefined) {\n metadata.negativePrompt = data.NegativePrompt;\n }\n\n // Override model information from parameters-json\n if (data.ModelName !== undefined || data.ModelHash !== undefined) {\n metadata.model = {\n name: data.ModelName,\n hash: data.ModelHash,\n };\n }\n }\n }\n\n return Result.ok(metadata);\n}\n","import type {\n ComfyNodeGraph,\n InternalParseResult,\n MetadataEntry,\n SwarmUIMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * SwarmUI parameters JSON structure\n */\ninterface SwarmUIParameters {\n sui_image_params?: {\n prompt?: string;\n negativeprompt?: string;\n model?: string;\n seed?: number;\n steps?: number;\n cfgscale?: number;\n width?: number;\n height?: number;\n sampler?: string;\n scheduler?: string;\n // Refiner/Upscale settings\n refinerupscale?: number;\n refinerupscalemethod?: string;\n refinercontrolpercentage?: number;\n };\n}\n\n/**\n * Extract SwarmUI parameters from entry record\n *\n * Checks direct 'parameters' entry first, then tries to extract from Comment JSON.\n * After converter fix, Comment JSON contains direct sui_image_params (native WebP format).\n */\nfunction extractSwarmUIParameters(\n entryRecord: Record<string, string | undefined>,\n): string | undefined {\n // Direct parameters entry (PNG format)\n if (entryRecord.parameters) {\n return entryRecord.parameters;\n }\n\n // Try to extract from Comment JSON (JPEG/WebP format)\n if (!entryRecord.Comment) {\n return undefined;\n }\n\n const commentParsed = parseJson<Record<string, unknown>>(entryRecord.Comment);\n if (!commentParsed.ok) {\n return undefined;\n }\n\n // Native WebP format: direct sui_image_params\n if ('sui_image_params' in commentParsed.value) {\n return entryRecord.Comment; // Return as-is to preserve full structure\n }\n\n return undefined;\n}\n\n/**\n * Parse SwarmUI metadata from entries\n *\n * SwarmUI stores metadata with:\n * - parameters: JSON containing sui_image_params\n * - prompt: ComfyUI-style node graph (fallback)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseSwarmUI(entries: MetadataEntry[]): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // Find parameters entry\n // For PNG: direct keyword 'parameters'\n // For JPEG/WebP: inside Comment JSON\n const parametersText = extractSwarmUIParameters(entryRecord);\n\n if (!parametersText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse parameters JSON\n const parsed = parseJson<SwarmUIParameters>(parametersText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in parameters entry',\n });\n }\n\n // Verify SwarmUI format (has sui_image_params)\n const params = parsed.value.sui_image_params;\n if (!params) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Extract dimensions (fallback to 0 for IHDR extraction)\n const width = params.width ?? 0;\n const height = params.height ?? 0;\n\n // Build metadata\n const metadata: Omit<SwarmUIMetadata, 'raw'> = {\n software: 'swarmui',\n prompt: params.prompt ?? '',\n negativePrompt: params.negativeprompt ?? '',\n width,\n height,\n };\n\n // Add nodes from prompt chunk (PNG format) or Make field (JPEG/WebP extended format)\n const promptSource = entryRecord.prompt || entryRecord.Make;\n if (promptSource) {\n const promptParsed = parseJson(promptSource);\n if (promptParsed.ok) {\n metadata.nodes = promptParsed.value as ComfyNodeGraph;\n }\n }\n\n // Add model settings\n if (params.model) {\n metadata.model = {\n name: params.model,\n };\n }\n\n // Add sampling settings\n if (\n params.seed !== undefined ||\n params.steps !== undefined ||\n params.cfgscale !== undefined ||\n params.sampler !== undefined ||\n params.scheduler !== undefined\n ) {\n metadata.sampling = {\n seed: params.seed,\n steps: params.steps,\n cfg: params.cfgscale,\n sampler: params.sampler,\n scheduler: params.scheduler,\n };\n }\n\n // Add hires/upscale settings\n if (\n params.refinerupscale !== undefined ||\n params.refinerupscalemethod !== undefined ||\n params.refinercontrolpercentage !== undefined\n ) {\n metadata.hires = {\n scale: params.refinerupscale,\n upscaler: params.refinerupscalemethod,\n denoise: params.refinercontrolpercentage,\n };\n }\n\n return Result.ok(metadata);\n}\n","import type {\n BasicComfyUIMetadata,\n ComfyNodeGraph,\n InternalParseResult,\n MetadataEntry,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * TensorArt generation_data JSON structure\n */\ninterface TensorArtGenerationData {\n prompt?: string;\n negativePrompt?: string;\n width?: number;\n height?: number;\n seed?: string;\n steps?: number;\n cfgScale?: number;\n clipSkip?: number;\n baseModel?: {\n modelFileName?: string;\n hash?: string;\n };\n}\n\n/**\n * Parse TensorArt metadata from entries\n *\n * TensorArt stores metadata with:\n * - generation_data: JSON containing generation parameters\n * - prompt: ComfyUI-style node graph (workflow)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseTensorArt(entries: MetadataEntry[]): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // Find generation_data entry\n const dataText = entryRecord.generation_data;\n if (!dataText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON (TensorArt appends NUL characters)\n const cleanedText = dataText.replace(/\\0+$/, '');\n const parsed = parseJson<TensorArtGenerationData>(cleanedText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in generation_data entry',\n });\n }\n const data = parsed.value;\n\n // Extract dimensions (fallback to 0 for IHDR extraction)\n const width = data.width ?? 0;\n const height = data.height ?? 0;\n\n // Parse nodes from prompt chunk (required for TensorArt)\n const promptChunk = entryRecord.prompt;\n if (!promptChunk) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n const promptParsed = parseJson(promptChunk);\n if (!promptParsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in prompt chunk',\n });\n }\n\n // Build metadata\n const metadata: Omit<BasicComfyUIMetadata, 'raw'> = {\n software: 'tensorart',\n prompt: data.prompt ?? '',\n negativePrompt: data.negativePrompt ?? '',\n width,\n height,\n nodes: promptParsed.value as ComfyNodeGraph,\n };\n\n // Add model settings\n if (data.baseModel?.modelFileName || data.baseModel?.hash) {\n metadata.model = {\n name: data.baseModel.modelFileName,\n hash: data.baseModel.hash,\n };\n }\n\n // Add sampling settings\n if (\n data.seed !== undefined ||\n data.steps !== undefined ||\n data.cfgScale !== undefined ||\n data.clipSkip !== undefined\n ) {\n const baseSeed = data.seed ? Number(data.seed) : undefined;\n\n metadata.sampling = {\n seed:\n baseSeed === -1\n ? findActualSeed(promptParsed.value as ComfyNodeGraph)\n : baseSeed,\n steps: data.steps,\n cfg: data.cfgScale,\n clipSkip: data.clipSkip,\n };\n }\n\n return Result.ok(metadata);\n}\n\n/**\n * Find actual seed value from KSampler node in ComfyUI node graph\n *\n * @param nodes - ComfyUI node graph\n * @returns Actual seed value, or -1 if not found\n */\nfunction findActualSeed(nodes: ComfyNodeGraph): number {\n const samplerNode = findSamplerNode(nodes);\n return samplerNode && typeof samplerNode.inputs.seed === 'number'\n ? samplerNode.inputs.seed\n : -1;\n}\n\n/**\n * Find KSampler node in ComfyUI node graph\n *\n * @param nodes - ComfyUI node graph\n * @returns KSampler node or undefined\n */\nfunction findSamplerNode(\n nodes: ComfyNodeGraph,\n): { inputs: Record<string, unknown>; class_type: string } | undefined {\n return Object.values(nodes).find(\n (node) =>\n node.class_type === 'KSampler' ||\n node.class_type.toLowerCase().includes('sampler'),\n );\n}\n","import type { InternalParseResult, MetadataEntry } from '../types';\nimport { Result } from '../types';\nimport { parseA1111 } from './a1111';\nimport { parseComfyUI } from './comfyui';\nimport { detectSoftware } from './detect';\nimport { parseEasyDiffusion } from './easydiffusion';\nimport { parseFooocus } from './fooocus';\nimport { parseHfSpace } from './hf-space';\nimport { parseInvokeAI } from './invokeai';\nimport { parseNovelAI } from './novelai';\nimport { parseRuinedFooocus } from './ruined-fooocus';\nimport { parseStabilityMatrix } from './stability-matrix';\nimport { parseSwarmUI } from './swarmui';\nimport { parseTensorArt } from './tensorart';\n\n/**\n * Parse metadata entries to unified format\n *\n * Automatically detects the generation software and applies the appropriate parser.\n * This function returns metadata WITHOUT the `raw` field; callers should attach it.\n *\n * @param entries - Format-agnostic metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseMetadata(entries: MetadataEntry[]): InternalParseResult {\n // Detect software from entries\n const software = detectSoftware(entries);\n\n // Route to appropriate parser based on detected software\n switch (software) {\n case 'novelai':\n return parseNovelAI(entries);\n\n case 'sd-webui':\n case 'sd-next':\n case 'forge':\n case 'forge-neo':\n return parseA1111(entries);\n\n case 'hf-space':\n return parseHfSpace(entries);\n\n case 'civitai': {\n // Civitai can use either ComfyUI JSON or A1111 text format\n const comfyResult = parseComfyUI(entries);\n if (comfyResult.ok) return comfyResult;\n return parseA1111(entries);\n }\n\n case 'comfyui': {\n // ComfyUI can use either JSON or A1111 text format (e.g., comfy-image-saver)\n const comfyResult = parseComfyUI(entries);\n if (comfyResult.ok) return comfyResult;\n return parseA1111(entries);\n }\n\n case 'invokeai':\n return parseInvokeAI(entries);\n\n case 'swarmui':\n return parseSwarmUI(entries);\n\n case 'tensorart':\n return parseTensorArt(entries);\n\n case 'stability-matrix':\n return parseStabilityMatrix(entries);\n\n case 'easydiffusion':\n return parseEasyDiffusion(entries);\n\n case 'fooocus':\n return parseFooocus(entries);\n\n case 'ruined-fooocus':\n return parseRuinedFooocus(entries);\n\n default: {\n // Try each parser in order\n // First try A1111 format (most common)\n const a1111Result = parseA1111(entries);\n if (a1111Result.ok) return a1111Result;\n\n // Then try ComfyUI\n const comfyResult = parseComfyUI(entries);\n if (comfyResult.ok) return comfyResult;\n\n // Then try InvokeAI\n const invokeResult = parseInvokeAI(entries);\n if (invokeResult.ok) return invokeResult;\n\n // Then try SwarmUI\n const swarmResult = parseSwarmUI(entries);\n if (swarmResult.ok) return swarmResult;\n\n // Then try TensorArt\n const tensorResult = parseTensorArt(entries);\n if (tensorResult.ok) return tensorResult;\n\n // Then try Stability Matrix\n const stabilityResult = parseStabilityMatrix(entries);\n if (stabilityResult.ok) return stabilityResult;\n\n // Finally try NovelAI\n const novelaiResult = parseNovelAI(entries);\n if (novelaiResult.ok) return novelaiResult;\n\n return Result.error({ type: 'unsupportedFormat' });\n }\n }\n}\n","/**\n * Binary data utilities for reading/writing multi-byte integers\n */\n\n/**\n * Read 3-byte little-endian unsigned integer\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @returns 24-bit unsigned integer\n */\nexport function readUint24LE(data: Uint8Array, offset: number): number {\n return (\n (data[offset] ?? 0) |\n ((data[offset + 1] ?? 0) << 8) |\n ((data[offset + 2] ?? 0) << 16)\n );\n}\n\n/**\n * Read 4-byte big-endian unsigned integer\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @returns 32-bit unsigned integer\n */\nexport function readUint32BE(data: Uint8Array, offset: number): number {\n return (\n ((data[offset] ?? 0) << 24) |\n ((data[offset + 1] ?? 0) << 16) |\n ((data[offset + 2] ?? 0) << 8) |\n (data[offset + 3] ?? 0)\n );\n}\n\n/**\n * Read 4-byte little-endian unsigned integer\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @returns 32-bit unsigned integer\n */\nexport function readUint32LE(data: Uint8Array, offset: number): number {\n return (\n (data[offset] ?? 0) |\n ((data[offset + 1] ?? 0) << 8) |\n ((data[offset + 2] ?? 0) << 16) |\n ((data[offset + 3] ?? 0) << 24)\n );\n}\n\n/**\n * Write 4-byte big-endian unsigned integer\n *\n * @param data - Byte array to write to\n * @param offset - Offset to start writing at\n * @param value - 32-bit unsigned integer value\n */\nexport function writeUint32BE(\n data: Uint8Array,\n offset: number,\n value: number,\n): void {\n data[offset] = (value >>> 24) & 0xff;\n data[offset + 1] = (value >>> 16) & 0xff;\n data[offset + 2] = (value >>> 8) & 0xff;\n data[offset + 3] = value & 0xff;\n}\n\n/**\n * Read 4-byte chunk type as ASCII string\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @returns 4-character ASCII string\n */\nexport function readChunkType(data: Uint8Array, offset: number): string {\n return String.fromCharCode(\n data[offset] ?? 0,\n data[offset + 1] ?? 0,\n data[offset + 2] ?? 0,\n data[offset + 3] ?? 0,\n );\n}\n\n/**\n * Read 2-byte unsigned integer with endianness support\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @param isLittleEndian - If true, read as little-endian\n * @returns 16-bit unsigned integer\n */\nexport function readUint16(\n data: Uint8Array,\n offset: number,\n isLittleEndian: boolean,\n): number {\n if (isLittleEndian) {\n return (data[offset] ?? 0) | ((data[offset + 1] ?? 0) << 8);\n }\n return ((data[offset] ?? 0) << 8) | (data[offset + 1] ?? 0);\n}\n\n/**\n * Read 4-byte unsigned integer with endianness support\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @param isLittleEndian - If true, read as little-endian\n * @returns 32-bit unsigned integer\n */\nexport function readUint32(\n data: Uint8Array,\n offset: number,\n isLittleEndian: boolean,\n): number {\n if (isLittleEndian) {\n return (\n (data[offset] ?? 0) |\n ((data[offset + 1] ?? 0) << 8) |\n ((data[offset + 2] ?? 0) << 16) |\n ((data[offset + 3] ?? 0) << 24)\n );\n }\n return (\n ((data[offset] ?? 0) << 24) |\n ((data[offset + 1] ?? 0) << 16) |\n ((data[offset + 2] ?? 0) << 8) |\n (data[offset + 3] ?? 0)\n );\n}\n\n/**\n * Compare two Uint8Arrays for equality\n *\n * @param a - First array\n * @param b - Second array\n * @returns true if arrays have same length and all elements match\n */\nexport function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {\n if (a.length !== b.length) return false;\n for (let i = 0; i < a.length; i++) {\n if (a[i] !== b[i]) return false;\n }\n return true;\n}\n\n/**\n * Write 2-byte unsigned integer with endianness support\n *\n * @param data - Byte array to write to\n * @param offset - Offset to start writing at\n * @param value - 16-bit unsigned integer value\n * @param isLittleEndian - If true, write as little-endian\n */\nexport function writeUint16(\n data: Uint8Array,\n offset: number,\n value: number,\n isLittleEndian: boolean,\n): void {\n if (isLittleEndian) {\n data[offset] = value & 0xff;\n data[offset + 1] = (value >>> 8) & 0xff;\n } else {\n data[offset] = (value >>> 8) & 0xff;\n data[offset + 1] = value & 0xff;\n }\n}\n\n/**\n * Write 4-byte unsigned integer with endianness support\n *\n * @param data - Byte array to write to\n * @param offset - Offset to start writing at\n * @param value - 32-bit unsigned integer value\n * @param isLittleEndian - If true, write as little-endian\n */\nexport function writeUint32(\n data: Uint8Array,\n offset: number,\n value: number,\n isLittleEndian: boolean,\n): void {\n if (isLittleEndian) {\n data[offset] = value & 0xff;\n data[offset + 1] = (value >>> 8) & 0xff;\n data[offset + 2] = (value >>> 16) & 0xff;\n data[offset + 3] = (value >>> 24) & 0xff;\n } else {\n data[offset] = (value >>> 24) & 0xff;\n data[offset + 1] = (value >>> 16) & 0xff;\n data[offset + 2] = (value >>> 8) & 0xff;\n data[offset + 3] = value & 0xff;\n }\n}\n\n/**\n * Write 4-byte little-endian unsigned integer\n *\n * @param data - Byte array to write to\n * @param offset - Offset to start writing at\n * @param value - 32-bit unsigned integer value\n */\nexport function writeUint32LE(\n data: Uint8Array,\n offset: number,\n value: number,\n): void {\n data[offset] = value & 0xff;\n data[offset + 1] = (value >>> 8) & 0xff;\n data[offset + 2] = (value >>> 16) & 0xff;\n data[offset + 3] = (value >>> 24) & 0xff;\n}\n\n/**\n * Supported image formats\n */\nexport type ImageFormat = 'png' | 'jpeg' | 'webp';\n\n/**\n * Validates if data starts with PNG signature\n */\nexport function isPng(data: Uint8Array): boolean {\n if (data.length < 8) return false;\n return (\n data[0] === 0x89 &&\n data[1] === 0x50 &&\n data[2] === 0x4e &&\n data[3] === 0x47 &&\n data[4] === 0x0d &&\n data[5] === 0x0a &&\n data[6] === 0x1a &&\n data[7] === 0x0a\n );\n}\n\n/**\n * Validates if data starts with JPEG signature\n */\nexport function isJpeg(data: Uint8Array): boolean {\n if (data.length < 2) return false;\n return data[0] === 0xff && data[1] === 0xd8;\n}\n\n/**\n * Validates if data starts with WebP signature\n */\nexport function isWebp(data: Uint8Array): boolean {\n if (data.length < 12) return false;\n return (\n data[0] === 0x52 && // R\n data[1] === 0x49 && // I\n data[2] === 0x46 && // F\n data[3] === 0x46 && // F\n data[8] === 0x57 && // W\n data[9] === 0x45 && // E\n data[10] === 0x42 && // B\n data[11] === 0x50 // P\n );\n}\n\n/**\n * Detect image format from magic bytes\n */\nexport function detectFormat(data: Uint8Array): ImageFormat | null {\n if (isPng(data)) return 'png';\n if (isJpeg(data)) return 'jpeg';\n if (isWebp(data)) return 'webp';\n return null;\n}\n","/**\n * Exif/TIFF tag constants\n *\n * Shared between readers and writers for consistent tag handling.\n */\n\n/** UserComment tag ID in Exif */\nexport const USER_COMMENT_TAG = 0x9286;\n\n/** ImageDescription tag ID */\nexport const IMAGE_DESCRIPTION_TAG = 0x010e;\n\n/** Make tag ID */\nexport const MAKE_TAG = 0x010f;\n\n/** Exif IFD pointer tag */\nexport const EXIF_IFD_POINTER_TAG = 0x8769;\n","/**\n * Exif reading utilities\n *\n * Functions for parsing Exif/TIFF structures and extracting metadata segments.\n */\n\nimport type { MetadataSegment } from '../types';\nimport { readUint16, readUint32 } from '../utils/binary';\nimport {\n EXIF_IFD_POINTER_TAG,\n IMAGE_DESCRIPTION_TAG,\n MAKE_TAG,\n USER_COMMENT_TAG,\n} from '../utils/exif-constants';\n\n/**\n * Parse Exif TIFF structure and extract all metadata segments\n *\n * Extracts metadata from:\n * - ImageDescription (0x010E) - Used by ComfyUI Save Image Extended (with \"Workflow:\" prefix)\n * - Make (0x010F) - Used by ComfyUI Save Image Extended (with \"Prompt:\" prefix)\n * - UserComment (0x9286) - Used by most tools\n *\n * @param exifData - TIFF data (starting with II/MM byte order marker)\n * @returns Array of metadata segments found\n */\nexport function parseExifMetadataSegments(\n exifData: Uint8Array,\n): MetadataSegment[] {\n if (exifData.length < 8) return [];\n\n // Check TIFF byte order\n const isLittleEndian = exifData[0] === 0x49 && exifData[1] === 0x49; // \"II\"\n const isBigEndian = exifData[0] === 0x4d && exifData[1] === 0x4d; // \"MM\"\n\n if (!isLittleEndian && !isBigEndian) return [];\n\n // Verify TIFF magic number (42)\n const magic = readUint16(exifData, 2, isLittleEndian);\n if (magic !== 42) return [];\n\n // Get IFD0 offset\n const ifd0Offset = readUint32(exifData, 4, isLittleEndian);\n\n // Extract all tags from IFD0\n const ifd0Segments = extractTagsFromIfd(exifData, ifd0Offset, isLittleEndian);\n\n // Find Exif IFD and extract UserComment from there\n const exifIfdOffset = findExifIfdOffset(exifData, ifd0Offset, isLittleEndian);\n const exifIfdSegments =\n exifIfdOffset !== null\n ? extractTagsFromIfd(exifData, exifIfdOffset, isLittleEndian)\n : [];\n\n return [...ifd0Segments, ...exifIfdSegments];\n}\n\n/**\n * Extract metadata tags from an IFD\n */\nfunction extractTagsFromIfd(\n data: Uint8Array,\n ifdOffset: number,\n isLittleEndian: boolean,\n): MetadataSegment[] {\n const segments: MetadataSegment[] = [];\n\n if (ifdOffset + 2 > data.length) return segments;\n\n const entryCount = readUint16(data, ifdOffset, isLittleEndian);\n let offset = ifdOffset + 2;\n\n for (let i = 0; i < entryCount; i++) {\n if (offset + 12 > data.length) return segments;\n\n const tag = readUint16(data, offset, isLittleEndian);\n const type = readUint16(data, offset + 2, isLittleEndian);\n const count = readUint32(data, offset + 4, isLittleEndian);\n\n // Calculate data size based on type\n const typeSize = getTypeSize(type);\n const dataSize = count * typeSize;\n\n let valueOffset: number;\n if (dataSize <= 4) {\n valueOffset = offset + 8;\n } else {\n valueOffset = readUint32(data, offset + 8, isLittleEndian);\n }\n\n if (valueOffset + dataSize > data.length) {\n offset += 12;\n continue;\n }\n\n const tagData = data.slice(valueOffset, valueOffset + dataSize);\n\n // Process known tags\n if (tag === IMAGE_DESCRIPTION_TAG) {\n const text = decodeAsciiString(tagData);\n if (text) {\n const prefix = extractPrefix(text);\n segments.push({\n source: { type: 'exifImageDescription', prefix: prefix ?? undefined },\n data: prefix ? text.slice(prefix.length + 2) : text,\n });\n }\n } else if (tag === MAKE_TAG) {\n const text = decodeAsciiString(tagData);\n if (text) {\n const prefix = extractPrefix(text);\n segments.push({\n source: { type: 'exifMake', prefix: prefix ?? undefined },\n data: prefix ? text.slice(prefix.length + 2) : text,\n });\n }\n } else if (tag === USER_COMMENT_TAG) {\n const text = decodeUserComment(tagData);\n if (text) {\n segments.push({\n source: { type: 'exifUserComment' },\n data: text,\n });\n }\n }\n\n offset += 12;\n }\n\n return segments;\n}\n\n/**\n * Extract prefix from text like \"Workflow: {...}\" -> \"Workflow\"\n */\nfunction extractPrefix(text: string): string | null {\n const match = text.match(/^([A-Za-z]+):\\s/);\n return match?.[1] ?? null;\n}\n\n/**\n * Get size in bytes for TIFF data type\n */\nfunction getTypeSize(type: number): number {\n switch (type) {\n case 1:\n return 1; // BYTE\n case 2:\n return 1; // ASCII\n case 3:\n return 2; // SHORT\n case 4:\n return 4; // LONG\n case 5:\n return 8; // RATIONAL\n case 7:\n return 1; // UNDEFINED\n default:\n return 1;\n }\n}\n\n/**\n * Decode ASCII/UTF-8 string from tag data\n */\nfunction decodeAsciiString(data: Uint8Array): string | null {\n try {\n const decoder = new TextDecoder('utf-8', { fatal: false });\n let text = decoder.decode(data);\n // Remove null terminator if present\n if (text.endsWith('\\0')) {\n text = text.slice(0, -1);\n }\n return text.trim() || null;\n } catch {\n return null;\n }\n}\n\n/**\n * Find Exif IFD offset from IFD0\n */\nfunction findExifIfdOffset(\n data: Uint8Array,\n ifdOffset: number,\n isLittleEndian: boolean,\n): number | null {\n if (ifdOffset + 2 > data.length) return null;\n\n const entryCount = readUint16(data, ifdOffset, isLittleEndian);\n let offset = ifdOffset + 2;\n\n for (let i = 0; i < entryCount; i++) {\n if (offset + 12 > data.length) return null;\n\n const tag = readUint16(data, offset, isLittleEndian);\n\n if (tag === EXIF_IFD_POINTER_TAG) {\n // Exif IFD pointer found\n return readUint32(data, offset + 8, isLittleEndian);\n }\n\n offset += 12;\n }\n\n return null;\n}\n\n/**\n * Decode UserComment based on encoding prefix\n *\n * @param data - UserComment data including encoding prefix\n * @returns Decoded string\n */\nexport function decodeUserComment(data: Uint8Array): string | null {\n if (data.length < 8) return null;\n\n // Check for UNICODE prefix\n if (\n data[0] === 0x55 && // U\n data[1] === 0x4e && // N\n data[2] === 0x49 && // I\n data[3] === 0x43 && // C\n data[4] === 0x4f && // O\n data[5] === 0x44 && // D\n data[6] === 0x45 && // E\n data[7] === 0x00 // NULL\n ) {\n // UTF-16 encoded - detect byte order by looking at first character\n const textData = data.slice(8);\n if (textData.length >= 2) {\n const isLikelyLE = textData[0] !== 0x00 && textData[1] === 0x00;\n return isLikelyLE ? decodeUtf16LE(textData) : decodeUtf16BE(textData);\n }\n return decodeUtf16BE(textData);\n }\n\n // Check for ASCII prefix\n if (\n data[0] === 0x41 && // A\n data[1] === 0x53 && // S\n data[2] === 0x43 && // C\n data[3] === 0x49 && // I\n data[4] === 0x49 && // I\n data[5] === 0x00 && // NULL\n data[6] === 0x00 && // NULL\n data[7] === 0x00 // NULL\n ) {\n // ASCII encoded\n return decodeAscii(data.slice(8));\n }\n\n // Try UTF-8 (for ComfyUI JSON format without prefix)\n try {\n const decoder = new TextDecoder('utf-8', { fatal: true });\n let result = decoder.decode(data);\n // Strip null terminator if present\n if (result.endsWith('\\0')) {\n result = result.slice(0, -1);\n }\n return result;\n } catch {\n return null;\n }\n}\n\n/**\n * Decode UTF-16BE string\n */\nfunction decodeUtf16BE(data: Uint8Array): string {\n const chars: string[] = [];\n\n for (let i = 0; i < data.length - 1; i += 2) {\n const code = ((data[i] ?? 0) << 8) | (data[i + 1] ?? 0);\n if (code === 0) break;\n chars.push(String.fromCharCode(code));\n }\n\n return chars.join('');\n}\n\n/**\n * Decode UTF-16LE string\n */\nfunction decodeUtf16LE(data: Uint8Array): string {\n const chars: string[] = [];\n\n for (let i = 0; i < data.length - 1; i += 2) {\n const code = (data[i] ?? 0) | ((data[i + 1] ?? 0) << 8);\n if (code === 0) break;\n chars.push(String.fromCharCode(code));\n }\n\n return chars.join('');\n}\n\n/**\n * Decode ASCII string\n */\nfunction decodeAscii(data: Uint8Array): string {\n const chars: string[] = [];\n\n for (let i = 0; i < data.length; i++) {\n if (data[i] === 0) break;\n chars.push(String.fromCharCode(data[i] ?? 0));\n }\n\n return chars.join('');\n}\n","import type { JpegMetadataResult, MetadataSegment } from '../types';\nimport { Result } from '../types';\nimport { arraysEqual } from '../utils/binary';\nimport { parseExifMetadataSegments } from './exif';\n\nimport { isJpeg } from '../utils/binary';\n\n/** APP1 marker */\nconst APP1_MARKER = 0xe1;\n\n/** COM (Comment) marker */\nconst COM_MARKER = 0xfe;\n\n/** Exif header: \"Exif\\0\\0\" */\nconst EXIF_HEADER = new Uint8Array([0x45, 0x78, 0x69, 0x66, 0x00, 0x00]);\n\n/**\n * Read JPEG metadata from binary data\n *\n * Collects metadata from multiple sources:\n * - Exif tags (APP1 segment): UserComment, ImageDescription, Make\n * - COM segment - Used by NovelAI\n *\n * @param data - JPEG file data as Uint8Array\n * @returns Result containing all metadata segments or error\n */\nexport function readJpegMetadata(data: Uint8Array): JpegMetadataResult {\n if (!isJpeg(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n const segments: MetadataSegment[] = [];\n\n // Extract all Exif metadata (UserComment, ImageDescription, Make)\n const app1 = findApp1Segment(data);\n if (app1) {\n const exifData = data.slice(app1.offset, app1.offset + app1.length);\n const exifSegments = parseExifMetadataSegments(exifData);\n segments.push(...exifSegments);\n }\n\n // Try COM segment (NovelAI uses this)\n const comSegment = findComSegment(data);\n if (comSegment) {\n const comData = data.slice(\n comSegment.offset,\n comSegment.offset + comSegment.length,\n );\n const comText = decodeComSegment(comData);\n\n if (comText !== null) {\n segments.push({\n source: { type: 'jpegCom' },\n data: comText,\n });\n }\n }\n\n return Result.ok(segments);\n}\n\n/**\n * Find APP1 segment containing Exif data\n *\n * @param data - JPEG file data\n * @returns Offset and length of APP1 segment data, or null if not found\n */\nexport function findApp1Segment(\n data: Uint8Array,\n): { offset: number; length: number } | null {\n let offset = 2; // Skip SOI marker\n\n while (offset < data.length - 4) {\n // Check for marker\n if (data[offset] !== 0xff) {\n offset++;\n continue;\n }\n\n const marker = data[offset + 1];\n\n // Skip padding bytes\n if (marker === 0xff) {\n offset++;\n continue;\n }\n\n // Get segment length (big-endian, includes length bytes)\n const length = ((data[offset + 2] ?? 0) << 8) | (data[offset + 3] ?? 0);\n\n // Check for APP1 marker\n if (marker === APP1_MARKER) {\n // Verify Exif header\n const headerStart = offset + 4;\n if (headerStart + 6 <= data.length) {\n const header = data.slice(headerStart, headerStart + 6);\n if (arraysEqual(header, EXIF_HEADER)) {\n // Return offset to TIFF data (after Exif header)\n return {\n offset: headerStart + 6,\n length: length - 8, // Subtract length bytes and Exif header\n };\n }\n }\n }\n\n // Move to next segment\n offset += 2 + length;\n\n // Stop at SOS (Start of Scan) or EOI\n if (marker === 0xda || marker === 0xd9) {\n break;\n }\n }\n\n return null;\n}\n\n/**\n * Find COM (Comment) segment\n *\n * COM segments are used by NovelAI to store metadata as UTF-8 JSON.\n *\n * @param data - JPEG file data\n * @returns Offset and length of COM segment data, or null if not found\n */\nfunction findComSegment(\n data: Uint8Array,\n): { offset: number; length: number } | null {\n let offset = 2; // Skip SOI marker\n\n while (offset < data.length - 4) {\n // Check for marker\n if (data[offset] !== 0xff) {\n offset++;\n continue;\n }\n\n const marker = data[offset + 1];\n\n // Skip padding bytes\n if (marker === 0xff) {\n offset++;\n continue;\n }\n\n // Get segment length (big-endian, includes length bytes)\n const length = ((data[offset + 2] ?? 0) << 8) | (data[offset + 3] ?? 0);\n\n // Check for COM marker\n if (marker === COM_MARKER) {\n // Return offset to comment data (after marker and length)\n return {\n offset: offset + 4,\n length: length - 2, // Subtract length bytes only\n };\n }\n\n // Move to next segment\n offset += 2 + length;\n\n // Stop at SOS (Start of Scan) or EOI\n if (marker === 0xda || marker === 0xd9) {\n break;\n }\n }\n\n return null;\n}\n\n/**\n * Decode COM segment data as UTF-8 string\n *\n * @param data - COM segment data\n * @returns Decoded string or null if invalid\n */\nfunction decodeComSegment(data: Uint8Array): string | null {\n try {\n const decoder = new TextDecoder('utf-8', { fatal: true });\n return decoder.decode(data);\n } catch {\n return null;\n }\n}\n","import type {\n ITXtChunk,\n PngMetadataResult,\n PngReadError,\n PngTextChunk,\n TExtChunk,\n} from '../types';\nimport { Result } from '../types';\nimport { readChunkType, readUint32BE } from '../utils/binary';\n\nimport { isPng } from '../utils/binary';\n\n/**\n * Read PNG metadata from binary data\n * @param data - PNG file data as Uint8Array\n * @returns Result containing metadata or error\n */\nexport function readPngMetadata(data: Uint8Array): PngMetadataResult {\n // Validate PNG signature\n if (!isPng(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n // Extract text chunks\n const chunksResult = extractTextChunks(data);\n if (!chunksResult.ok) {\n return chunksResult;\n }\n\n return Result.ok(chunksResult.value);\n}\n\n/**\n * Extract tEXt and iTXt chunks from PNG data\n */\n// 8 bytes for PNG signature\nconst PNG_SIGNATURE_LENGTH = 8;\n\n/**\n * Extract tEXt and iTXt chunks from PNG data\n */\nfunction extractTextChunks(\n data: Uint8Array,\n): Result<PngTextChunk[], PngReadError> {\n const chunks: PngTextChunk[] = [];\n let offset = PNG_SIGNATURE_LENGTH;\n\n while (offset < data.length) {\n // Read chunk length (4 bytes, big-endian)\n if (offset + 4 > data.length) {\n return Result.error({\n type: 'corruptedChunk',\n message: 'Unexpected end of file while reading chunk length',\n });\n }\n const length = readUint32BE(data, offset);\n offset += 4;\n\n // Read chunk type (4 bytes)\n if (offset + 4 > data.length) {\n return Result.error({\n type: 'corruptedChunk',\n message: 'Unexpected end of file while reading chunk type',\n });\n }\n const chunkType = readChunkType(data, offset);\n offset += 4;\n\n // Read chunk data\n if (offset + length > data.length) {\n return Result.error({\n type: 'corruptedChunk',\n message: `Unexpected end of file while reading chunk data (${chunkType})`,\n });\n }\n const chunkData = data.slice(offset, offset + length);\n offset += length;\n\n // Skip CRC (4 bytes)\n offset += 4;\n\n // Parse text chunks\n if (chunkType === 'tEXt') {\n const parsed = parseTExtChunk(chunkData);\n if (parsed) {\n chunks.push(parsed);\n }\n } else if (chunkType === 'iTXt') {\n const parsed = parseITXtChunk(chunkData);\n if (parsed) {\n chunks.push(parsed);\n }\n }\n\n // Stop at IEND\n if (chunkType === 'IEND') {\n break;\n }\n }\n\n return Result.ok(chunks);\n}\n\n/**\n * Parse tEXt chunk data\n *\n * Per PNG specification, tEXt chunks use Latin-1 (ISO-8859-1) encoding.\n * However, some tools (notably TensorArt) incorrectly write UTF-8 bytes\n * directly into tEXt chunks. To handle these non-compliant tools, we\n * attempt UTF-8 decoding first and fall back to Latin-1 if that fails.\n */\nfunction parseTExtChunk(data: Uint8Array): TExtChunk | null {\n // Find null separator\n const nullIndex = data.indexOf(0);\n if (nullIndex === -1) {\n return null;\n }\n\n // Keyword is Latin-1 encoded (per spec, keywords are ASCII-safe)\n const keyword = latin1Decode(data.slice(0, nullIndex));\n\n // Text: Try UTF-8 first (for non-compliant tools), fallback to Latin-1\n const textData = data.slice(nullIndex + 1);\n const text = tryUtf8Decode(textData) ?? latin1Decode(textData);\n\n return { type: 'tEXt', keyword, text };\n}\n\n/**\n * Try to decode data as UTF-8, return null if invalid\n */\nfunction tryUtf8Decode(data: Uint8Array): string | null {\n try {\n return new TextDecoder('utf-8', { fatal: true }).decode(data);\n } catch {\n return null;\n }\n}\n\n/**\n * Parse iTXt chunk data\n */\nfunction parseITXtChunk(data: Uint8Array): ITXtChunk | null {\n let offset = 0;\n\n // Read keyword (null-terminated)\n const keywordEnd = findNull(data, offset);\n if (keywordEnd === -1) return null;\n const keyword = utf8Decode(data.slice(offset, keywordEnd));\n offset = keywordEnd + 1;\n\n // Read compression flag (1 byte)\n if (offset >= data.length) return null;\n const compressionFlag = data[offset] ?? 0;\n offset += 1;\n\n // Read compression method (1 byte)\n if (offset >= data.length) return null;\n const compressionMethod = data[offset] ?? 0;\n offset += 1;\n\n // Read language tag (null-terminated)\n const langEnd = findNull(data, offset);\n if (langEnd === -1) return null;\n const languageTag = utf8Decode(data.slice(offset, langEnd));\n offset = langEnd + 1;\n\n // Read translated keyword (null-terminated)\n const transEnd = findNull(data, offset);\n if (transEnd === -1) return null;\n const translatedKeyword = utf8Decode(data.slice(offset, transEnd));\n offset = transEnd + 1;\n\n // Read text (rest of data)\n let text: string;\n if (compressionFlag === 1) {\n // Compressed with zlib\n const decompressed = decompressZlib(data.slice(offset));\n if (!decompressed) return null;\n text = utf8Decode(decompressed);\n } else {\n text = utf8Decode(data.slice(offset));\n }\n\n return {\n type: 'iTXt',\n keyword,\n compressionFlag,\n compressionMethod,\n languageTag,\n translatedKeyword,\n text,\n };\n}\n\n/**\n * Find null byte in data starting from offset\n */\nfunction findNull(data: Uint8Array, offset: number): number {\n for (let i = offset; i < data.length; i++) {\n if (data[i] === 0) {\n return i;\n }\n }\n return -1;\n}\n\n/**\n * Decode Latin-1 (ISO-8859-1) bytes to string\n */\nfunction latin1Decode(data: Uint8Array): string {\n let result = '';\n for (let i = 0; i < data.length; i++) {\n result += String.fromCharCode(data[i] ?? 0);\n }\n return result;\n}\n\n/**\n * Decode UTF-8 bytes to string\n */\nfunction utf8Decode(data: Uint8Array): string {\n return new TextDecoder('utf-8').decode(data);\n}\n\n/**\n * Decompress zlib-compressed data\n *\n * Currently unimplemented: All surveyed sample images use uncompressed iTXt.\n * When a sample with compressed iTXt is found, implement using pako library.\n *\n * @see https://www.npmjs.com/package/pako\n */\nfunction decompressZlib(_data: Uint8Array): Uint8Array | null {\n // Not yet implemented - no compressed iTXt samples encountered\n return null;\n}\n","import type { WebpMetadataResult } from '../types';\nimport { Result } from '../types';\nimport { arraysEqual, readUint32LE } from '../utils/binary';\nimport { parseExifMetadataSegments } from './exif';\n\nimport { isWebp } from '../utils/binary';\n\n/** EXIF chunk type */\nconst EXIF_CHUNK_TYPE = new Uint8Array([0x45, 0x58, 0x49, 0x46]);\n\n/**\n * Read WebP metadata from binary data\n *\n * Extracts metadata from EXIF chunk in WebP files.\n * The EXIF chunk contains TIFF-formatted data identical to JPEG Exif.\n *\n * @param data - WebP file data as Uint8Array\n * @returns Result containing all metadata segments or error\n */\nexport function readWebpMetadata(data: Uint8Array): WebpMetadataResult {\n if (!isWebp(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n const exifChunk = findExifChunk(data);\n if (!exifChunk) {\n return Result.ok([]);\n }\n\n const exifData = data.slice(\n exifChunk.offset,\n exifChunk.offset + exifChunk.length,\n );\n\n // Parse all EXIF metadata segments (UserComment, ImageDescription, Make)\n const segments = parseExifMetadataSegments(exifData);\n\n return Result.ok(segments);\n}\n\n/**\n * Find EXIF chunk in WebP file\n *\n * WebP uses RIFF container format with named chunks.\n * EXIF chunk contains TIFF data starting with \"II\" or \"MM\" byte order marker.\n *\n * @param data - WebP file data\n * @returns Offset and length of EXIF chunk data, or null if not found\n */\nexport function findExifChunk(\n data: Uint8Array,\n): { offset: number; length: number } | null {\n // Start after RIFF header (12 bytes: \"RIFF\" + size + \"WEBP\")\n let offset = 12;\n\n while (offset < data.length - 8) {\n // Read chunk type (4 bytes)\n const chunkType = data.slice(offset, offset + 4);\n\n // Read chunk size (4 bytes, little-endian)\n const chunkSize = readUint32LE(data, offset + 4);\n\n // Check for EXIF chunk\n if (arraysEqual(chunkType, EXIF_CHUNK_TYPE)) {\n // EXIF chunk data starts after type and size\n return {\n offset: offset + 8,\n length: chunkSize,\n };\n }\n\n // Move to next chunk (chunk size + type + size fields)\n // RIFF chunks are padded to even byte boundaries\n const paddedSize = chunkSize + (chunkSize % 2);\n offset += 8 + paddedSize;\n }\n\n return null;\n}\n","import type {\n MetadataEntry,\n MetadataSegment,\n MetadataSegmentSource,\n PngTextChunk,\n} from '../types';\nimport { parseJson } from './json';\n\n/**\n * Convert PNG text chunks to format-agnostic metadata entries\n *\n * @param chunks - PNG tEXt/iTXt chunks\n * @returns Array of metadata entries\n */\nexport function pngChunksToEntries(chunks: PngTextChunk[]): MetadataEntry[] {\n return chunks.map((chunk) => ({\n keyword: chunk.keyword,\n text: chunk.text,\n }));\n}\n\n/**\n * Convert JPEG/WebP metadata segments to format-agnostic entries\n *\n * Maps segment sources to conventional keywords:\n * - jpegCom → 'Comment'\n * - exifUserComment → 'Comment' (or expand if NovelAI WebP format)\n * - exifImageDescription → prefix or 'Description'\n * - exifMake → prefix or 'Make'\n *\n * Special handling for NovelAI WebP format where metadata is stored as:\n * {\"Comment\": \"{...inner JSON...}\", \"Software\": \"NovelAI\", ...}\n *\n * @param segments - Metadata segments from JPEG/WebP reader\n * @returns Array of metadata entries\n */\nexport function segmentsToEntries(\n segments: MetadataSegment[],\n): MetadataEntry[] {\n const entries: MetadataEntry[] = [];\n\n for (const segment of segments) {\n const keyword = sourceToKeyword(segment.source);\n const text = segment.data;\n\n // Try to detect and expand NovelAI WebP format\n // Format: {\"Comment\": \"{...}\", \"Software\": \"NovelAI\", ...}\n if (segment.source.type === 'exifUserComment' && text.startsWith('{')) {\n const expanded = tryExpandNovelAIWebpFormat(text);\n if (expanded) {\n entries.push(...expanded);\n continue;\n }\n }\n\n entries.push({ keyword, text });\n }\n\n return entries;\n}\n\n/**\n * Try to expand NovelAI WebP format metadata\n *\n * NovelAI WebP stores metadata as outer JSON with:\n * - Software: \"NovelAI\"\n * - Comment: inner JSON string with actual parameters\n *\n * @param text - JSON text to try to expand\n * @returns Array of entries if NovelAI format, null otherwise\n */\nfunction tryExpandNovelAIWebpFormat(text: string): MetadataEntry[] | null {\n const outerParsed = parseJson<Record<string, unknown>>(text);\n if (!outerParsed.ok) {\n return null;\n }\n\n const outer = outerParsed.value;\n\n // Check if this is NovelAI WebP format\n if (\n typeof outer !== 'object' ||\n outer === null ||\n outer.Software !== 'NovelAI' ||\n typeof outer.Comment !== 'string'\n ) {\n return null;\n }\n\n const entries: MetadataEntry[] = [{ keyword: 'Software', text: 'NovelAI' }];\n\n // Parse and add inner Comment as Comment entry\n const innerParsed = parseJson<unknown>(outer.Comment);\n\n return [\n ...entries,\n innerParsed.ok\n ? { keyword: 'Comment', text: JSON.stringify(innerParsed.value) }\n : { keyword: 'Comment', text: outer.Comment },\n ];\n}\n\n/**\n * Map metadata segment source to keyword\n */\nfunction sourceToKeyword(source: MetadataSegmentSource): string {\n switch (source.type) {\n case 'jpegCom':\n return 'Comment';\n case 'exifUserComment':\n return 'Comment';\n case 'exifImageDescription':\n return source.prefix ?? 'Description';\n case 'exifMake':\n return source.prefix ?? 'Make';\n }\n}\n","/**\n * Read API for sd-metadata\n *\n * Handles reading and parsing metadata from images.\n * Automatically detects image format and extracts embedded generation metadata.\n */\n\nimport { parseMetadata } from '../parsers';\nimport { readJpegMetadata } from '../readers/jpeg';\nimport { readPngMetadata } from '../readers/png';\nimport { readWebpMetadata } from '../readers/webp';\nimport type {\n MetadataSegment,\n ParseResult,\n PngTextChunk,\n RawMetadata,\n} from '../types';\nimport {\n type ImageFormat,\n detectFormat,\n readChunkType,\n readUint24LE,\n readUint32BE,\n readUint32LE,\n} from '../utils/binary';\nimport { pngChunksToEntries, segmentsToEntries } from '../utils/convert';\n\n/**\n * Read and parse metadata from an image\n *\n * Automatically detects the image format (PNG, JPEG, WebP) and parses\n * any embedded generation metadata.\n *\n * @param data - Image file data\n * @returns Parse result containing metadata and raw data\n */\nexport function read(data: Uint8Array): ParseResult {\n const format = detectFormat(data);\n\n if (!format) {\n return { status: 'invalid', message: 'Unknown image format' };\n }\n\n // 1. Read raw metadata based on format\n const rawResult = readRawMetadata(data, format);\n if (rawResult.status !== 'success') {\n return rawResult;\n }\n const raw = rawResult.raw;\n\n // 2. Convert to agnostic entries\n const entries =\n raw.format === 'png'\n ? pngChunksToEntries(raw.chunks)\n : segmentsToEntries(raw.segments);\n\n // 3. Parse metadata\n const parseResult = parseMetadata(entries);\n if (!parseResult.ok) {\n return { status: 'unrecognized', raw };\n }\n\n const metadata = parseResult.value;\n\n // 4. Fallback for dimensions if missing\n if (metadata.width === 0 || metadata.height === 0) {\n const dims = HELPERS[format].readDimensions(data);\n\n if (dims) {\n metadata.width = metadata.width || dims.width;\n metadata.height = metadata.height || dims.height;\n }\n }\n\n return { status: 'success', metadata, raw };\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/** Format-specific helper functions */\nconst HELPERS = {\n png: {\n readMetadata: readPngMetadata,\n readDimensions: readPngDimensions,\n createRaw: (chunks: PngTextChunk[]) => ({ format: 'png' as const, chunks }),\n },\n jpeg: {\n readMetadata: readJpegMetadata,\n readDimensions: readJpegDimensions,\n createRaw: (segments: MetadataSegment[]) => ({\n format: 'jpeg' as const,\n segments,\n }),\n },\n webp: {\n readMetadata: readWebpMetadata,\n readDimensions: readWebpDimensions,\n createRaw: (segments: MetadataSegment[]) => ({\n format: 'webp' as const,\n segments,\n }),\n },\n} as const satisfies Record<ImageFormat, unknown>;\n\n/** Result type for readRawMetadata */\ntype RawReadResult =\n | { status: 'success'; raw: RawMetadata }\n | { status: 'empty' }\n | { status: 'invalid'; message: string };\n\n/**\n * Read raw metadata from image data\n */\nfunction readRawMetadata(data: Uint8Array, format: ImageFormat): RawReadResult {\n const result = HELPERS[format].readMetadata(data);\n\n if (!result.ok) {\n const message =\n result.error.type === 'invalidSignature'\n ? `Invalid ${format.toUpperCase()} signature`\n : result.error.message;\n return { status: 'invalid', message };\n }\n\n if (result.value.length === 0) return { status: 'empty' };\n\n // PNG uses PngTextChunk[], JPEG/WebP use MetadataSegment[]\n if (format === 'png') {\n return {\n status: 'success',\n raw: HELPERS.png.createRaw(result.value as PngTextChunk[]),\n };\n }\n return {\n status: 'success',\n raw: HELPERS[format].createRaw(result.value as MetadataSegment[]),\n };\n}\n\n/**\n * Read width and height from PNG IHDR chunk\n */\nfunction readPngDimensions(\n data: Uint8Array,\n): { width: number; height: number } | null {\n const PNG_SIGNATURE_LENGTH = 8;\n if (data.length < 24) return null;\n // IHDR data starts at offset 16 (8 sig + 4 len + 4 type)\n // Check if it is indeed IHDR?\n // We assume valid PNG if detectFormat passed, and IHDR is always first.\n return {\n width: readUint32BE(data, PNG_SIGNATURE_LENGTH + 8),\n height: readUint32BE(data, PNG_SIGNATURE_LENGTH + 12),\n };\n}\n\n/**\n * Read width and height from JPEG chunks\n */\nfunction readJpegDimensions(\n data: Uint8Array,\n): { width: number; height: number } | null {\n // Use a SafeView-like approach or just manual parsing\n let offset = 2;\n while (offset < data.length - 4) {\n // Check validation\n if (data[offset] !== 0xff) {\n // Should handle scanning for FF, but in valid JPEG segments start with FF\n offset++;\n continue;\n }\n\n const marker = data[offset + 1] ?? 0;\n if (marker === 0xff) {\n offset++;\n continue; // Padding\n }\n\n // Read length (16-bit BE)\n const length = ((data[offset + 2] ?? 0) << 8) | (data[offset + 3] ?? 0);\n\n // SOF0 (C0) ... SOF15 (CF), except C4 (DHT), C8 (JPG), CC (DAC)\n if (\n marker >= 0xc0 &&\n marker <= 0xcf &&\n marker !== 0xc4 &&\n marker !== 0xc8 &&\n marker !== 0xcc\n ) {\n // Structure: Precision(1), Height(2), Width(2)\n // Offset: Marker(2) + Length(2) + Precision(1) = 5\n const height = ((data[offset + 5] ?? 0) << 8) | (data[offset + 6] ?? 0);\n const width = ((data[offset + 7] ?? 0) << 8) | (data[offset + 8] ?? 0);\n return { width, height };\n }\n\n offset += 2 + length;\n if (marker === 0xda) break; // SOS\n }\n return null;\n}\n\n/**\n * Read width and height from WebP chunks\n */\nfunction readWebpDimensions(\n data: Uint8Array,\n): { width: number; height: number } | null {\n // RIFF(4) + Size(4) + WEBP(4) = 12 bytes\n let offset = 12;\n\n while (offset < data.length) {\n if (offset + 8 > data.length) break;\n\n const chunkType = readChunkType(data, offset);\n const chunkSize = readUint32LE(data, offset + 4);\n const paddedSize = chunkSize + (chunkSize % 2);\n\n if (chunkType === 'VP8X') {\n // VP8X: Width (3 bytes @ offset 12) + Height (3 bytes @ offset 15)\n // Both are 1-based (stored value is width-1)\n const wMinus1 = readUint24LE(data, offset + 12);\n const hMinus1 = readUint24LE(data, offset + 15);\n return { width: wMinus1 + 1, height: hMinus1 + 1 };\n }\n\n if (chunkType === 'VP8 ') {\n // VP8 (lossy): Check keyframe\n // Frame tag (3 bytes @ offset 8+0)\n // Keyframe if bit 0 is 0\n const start = offset + 8;\n const tag =\n (data[start] ?? 0) |\n ((data[start + 1] ?? 0) << 8) |\n ((data[start + 2] ?? 0) << 16);\n const keyFrame = !(tag & 1);\n\n if (keyFrame) {\n // Validation code: 0x9d 0x01 0x2a bytes @ start+3\n if (\n data[start + 3] === 0x9d &&\n data[start + 4] === 0x01 &&\n data[start + 5] === 0x2a\n ) {\n // Width: 2 bytes @ start+6 (14 bits)\n // Height: 2 bytes @ start+8 (14 bits)\n const wRaw = (data[start + 6] ?? 0) | ((data[start + 7] ?? 0) << 8);\n const hRaw = (data[start + 8] ?? 0) | ((data[start + 9] ?? 0) << 8);\n return { width: wRaw & 0x3fff, height: hRaw & 0x3fff };\n }\n }\n }\n\n if (chunkType === 'VP8L') {\n // VP8L (lossless)\n // Signature 0x2f @ offset + 8\n if (data[offset + 8] === 0x2f) {\n // 4 bytes @ offset + 9 containing W (14 bits), H (14 bits)\n const bits = readUint32LE(data, offset + 9);\n const width = (bits & 0x3fff) + 1;\n const height = ((bits >> 14) & 0x3fff) + 1;\n return { width, height };\n }\n }\n\n offset += 8 + paddedSize;\n }\n return null;\n}\n","/**\n * Shared utilities for metadata converters\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\n\n/**\n * Create a tEXt chunk, returns empty array if text is undefined\n *\n * @param keyword - Chunk keyword\n * @param text - Chunk text, if undefined returns empty array\n * @returns Array with one chunk or empty array\n */\nexport const createTextChunk = (\n keyword: string,\n text: string | undefined,\n): PngTextChunk[] =>\n text !== undefined ? [{ type: 'tEXt', keyword, text }] : [];\n\n/**\n * Create an iTXt chunk, returns empty array if text is undefined\n *\n * @param keyword - Chunk keyword\n * @param text - Chunk text, if undefined returns empty array\n * @returns Array with one chunk or empty array\n */\nexport const createITxtChunk = (\n keyword: string,\n text: string | undefined,\n): PngTextChunk[] =>\n text !== undefined\n ? [\n {\n type: 'iTXt',\n keyword,\n compressionFlag: 0,\n compressionMethod: 0,\n languageTag: '',\n translatedKeyword: '',\n text,\n },\n ]\n : [];\n\n/**\n * Find a segment by source type\n *\n * @param segments - Array of metadata segments\n * @param type - Source type to find\n * @returns Matching segment or undefined\n */\nexport const findSegment = (\n segments: MetadataSegment[],\n type: string,\n): MetadataSegment | undefined => segments.find((s) => s.source.type === type);\n\n/**\n * Stringify value, returns undefined if value is undefined\n *\n * @param value - Value to stringify\n * @returns Stringified value or undefined\n */\nexport const stringify = (value: unknown): string | undefined => {\n if (value === undefined) return undefined;\n return typeof value === 'string' ? value : JSON.stringify(value);\n};\n","/**\n * Unified chunk encoding strategy for PNG converters\n *\n * Handles three different encoding strategies:\n * 1. dynamic: Choose tEXt/iTXt based on content (for tools like A1111, InvokeAI)\n * 2. text-unicode-escape: tEXt with Unicode escaping (for ComfyUI, SwarmUI)\n * 3. text-utf8-raw: tEXt with raw UTF-8 (for Stability Matrix, TensorArt)\n */\n\nimport type { PngTextChunk } from '../types';\nimport { createITxtChunk, createTextChunk } from './utils';\n\n/**\n * Chunk encoding strategy for PNG converters\n */\nexport type ChunkEncodingStrategy =\n | 'dynamic' // Choose tEXt/iTXt based on content\n | 'text-unicode-escape' // tEXt with Unicode escape (spec-compliant)\n | 'text-utf8-raw'; // tEXt with raw UTF-8 (non-compliant but compatible)\n\n/**\n * Tool-specific chunk encoding strategies\n */\nconst CHUNK_ENCODING_STRATEGIES: Record<string, ChunkEncodingStrategy> = {\n // Dynamic selection tools\n a1111: 'dynamic',\n forge: 'dynamic',\n 'forge-neo': 'dynamic',\n 'sd-webui': 'dynamic',\n invokeai: 'dynamic',\n novelai: 'dynamic',\n 'sd-next': 'dynamic',\n easydiffusion: 'dynamic',\n blind: 'dynamic',\n\n // Unicode escape tools (spec-compliant)\n comfyui: 'text-unicode-escape',\n swarmui: 'text-unicode-escape',\n fooocus: 'text-unicode-escape',\n 'ruined-fooocus': 'text-unicode-escape',\n 'hf-space': 'text-unicode-escape',\n\n // Raw UTF-8 tools (non-compliant but compatible)\n 'stability-matrix': 'text-utf8-raw',\n tensorart: 'text-utf8-raw',\n};\n\n/**\n * Get encoding strategy for a tool\n *\n * @param tool - Tool name\n * @returns Encoding strategy (defaults to 'text-unicode-escape')\n */\nexport function getEncodingStrategy(tool: string): ChunkEncodingStrategy {\n return CHUNK_ENCODING_STRATEGIES[tool] ?? 'text-unicode-escape';\n}\n\n/**\n * Escape Unicode characters beyond Latin-1 for tEXt chunk\n *\n * Converts characters beyond Latin-1 to Unicode escape sequences.\n * Latin-1 range (0x00-0xFF) is left as-is since tEXt supports it.\n * Example: テスト → \\u30c6\\u30b9\\u30c8\n *\n * @param text - Text to escape\n * @returns Text with non-Latin-1 characters escaped\n */\nexport function escapeUnicode(text: string): string {\n return text.replace(/[\\u0100-\\uffff]/g, (char) => {\n const code = char.charCodeAt(0).toString(16).padStart(4, '0');\n return `\\\\u${code}`;\n });\n}\n\n/**\n * Check if text contains characters beyond Latin-1 range\n *\n * PNG tEXt chunks support Latin-1 (ISO 8859-1) encoding (0x00-0xFF).\n * Characters beyond this range require iTXt chunks for UTF-8 support.\n *\n * @param text - Text to check\n * @returns True if text contains characters outside Latin-1 range (>= 0x100)\n */\nfunction hasNonLatin1(text: string): boolean {\n // biome-ignore lint/suspicious/noControlCharactersInRegex: checking for non-Latin-1 characters\n return /[^\\x00-\\xFF]/.test(text);\n}\n\n/**\n * Create PNG chunk with appropriate encoding strategy\n *\n * @param keyword - Chunk keyword\n * @param text - Chunk text (undefined returns empty array)\n * @param strategy - Encoding strategy to use\n * @returns Array of PNG text chunks (empty if text is undefined)\n */\nexport function createEncodedChunk(\n keyword: string,\n text: string | undefined,\n strategy: ChunkEncodingStrategy,\n): PngTextChunk[] {\n if (text === undefined) return [];\n\n switch (strategy) {\n case 'dynamic': {\n // Choose based on content: tEXt for Latin-1, iTXt for beyond\n const chunkType = hasNonLatin1(text) ? 'iTXt' : 'tEXt';\n return chunkType === 'iTXt'\n ? createITxtChunk(keyword, text)\n : createTextChunk(keyword, text);\n }\n\n case 'text-unicode-escape': {\n // tEXt with Unicode escaping (spec-compliant)\n const escaped = escapeUnicode(text);\n return createTextChunk(keyword, escaped);\n }\n\n case 'text-utf8-raw': {\n // tEXt with raw UTF-8 (non-compliant but compatible)\n return createTextChunk(keyword, text);\n }\n }\n}\n","/**\n * A1111-format metadata conversion utilities\n *\n * Handles conversion for sd-webui, forge, forge-neo, and civitai.\n * A1111 format stores metadata as plain text in:\n * - PNG: `parameters` tEXt/iTXt chunk (dynamic selection)\n * - JPEG/WebP: Exif UserComment\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { createEncodedChunk, getEncodingStrategy } from './chunk-encoding';\n\n/**\n * Convert A1111-format PNG chunks to JPEG/WebP segments\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertA1111PngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n // Find parameters chunk\n const parameters = chunks.find((c) => c.keyword === 'parameters');\n if (!parameters) {\n return [];\n }\n\n //Simply copy to exifUserComment\n return [\n {\n source: { type: 'exifUserComment' },\n data: parameters.text,\n },\n ];\n}\n\n/**\n * Convert JPEG/WebP segments to A1111-format PNG chunks\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertA1111SegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n // Find exifUserComment segment\n const userComment = segments.find((s) => s.source.type === 'exifUserComment');\n if (!userComment) {\n return [];\n }\n\n // Use dynamic selection (tEXt for ASCII, iTXt for non-ASCII)\n return createEncodedChunk(\n 'parameters',\n userComment.data,\n getEncodingStrategy('a1111'),\n );\n}\n","/**\n * Blind metadata conversion for unrecognized formats\n *\n * Converts all chunks/segments between formats without understanding content.\n * Uses JSON to combine multiple chunks into single exifUserComment (NovelAI strategy).\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk, getEncodingStrategy } from './chunk-encoding';\n\n/**\n * Convert ALL PNG chunks to SINGLE exifUserComment segment with JSON\n *\n * Uses NovelAI strategy: combines all chunks into JSON object\n * to work around Exif's single UserComment limitation.\n *\n * @param chunks - All PNG text chunks from image\n * @returns Single exifUserComment segment with JSON data\n */\nexport function blindPngToSegments(chunks: PngTextChunk[]): MetadataSegment[] {\n if (chunks.length === 0) return [];\n\n // Create object: { keyword: text, ... }\n const chunkMap = Object.fromEntries(\n chunks.map((chunk) => [chunk.keyword, chunk.text]),\n );\n\n // Return single UserComment with JSON\n return [\n {\n source: { type: 'exifUserComment' },\n data: JSON.stringify(chunkMap),\n },\n ];\n}\n\n/**\n * Convert exifUserComment JSON back to PNG chunks\n *\n * Parses NovelAI-style JSON format and converts back to chunks.\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function blindSegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n const userComment = segments.find((s) => s.source.type === 'exifUserComment');\n if (!userComment) return [];\n\n // Try to parse as JSON\n const parsed = parseJson<Record<string, unknown>>(userComment.data);\n if (parsed.ok) {\n // Reconstruct individual chunks with dynamic selection\n return Object.entries(parsed.value).flatMap(([keyword, value]) => {\n const text = typeof value === 'string' ? value : JSON.stringify(value);\n if (!text) return [];\n return createEncodedChunk(keyword, text, getEncodingStrategy('blind'));\n });\n }\n\n // Not JSON: create single metadata chunk with dynamic selection\n return createEncodedChunk(\n 'metadata',\n userComment.data,\n getEncodingStrategy('blind'),\n );\n}\n","/**\n * ComfyUI metadata conversion utilities\n *\n * ComfyUI stores metadata as:\n * - PNG: `prompt` + `workflow` tEXt chunks (both JSON)\n * - JPEG/WebP: exifUserComment with {\"prompt\": {...}, \"workflow\": {...}} (saveimage-plus format)\n *\n * Also handles: tensorart, stability-matrix (same format)\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk, getEncodingStrategy } from './chunk-encoding';\nimport { findSegment, stringify } from './utils';\n\n/**\n * Convert ComfyUI PNG chunks to JPEG/WebP segments\n *\n * Uses saveimage-plus format: stores chunk keywords as JSON keys.\n * For chunks that contain JSON strings (prompt, workflow), parse them\n * and store as objects to match saveimage-plus format.\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertComfyUIPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n // Parse JSON chunks and convert to objects\n const data: Record<string, unknown> = {};\n\n for (const chunk of chunks) {\n // Try to parse as JSON\n const parsed = parseJson<unknown>(chunk.text);\n if (parsed.ok) {\n // Store as object (matches saveimage-plus format)\n data[chunk.keyword] = parsed.value;\n } else {\n // Not JSON, store as string\n data[chunk.keyword] = chunk.text;\n }\n }\n\n return [\n {\n source: { type: 'exifUserComment' },\n data: JSON.stringify(data),\n },\n ];\n}\n\n/**\n * Try save-image-extended format (exifImageDescription + exifMake)\n *\n * @returns PNG chunks if format matches, null otherwise\n */\nconst tryParseExtendedFormat = (\n segments: MetadataSegment[],\n): PngTextChunk[] | null => {\n const imageDescription = findSegment(segments, 'exifImageDescription');\n const make = findSegment(segments, 'exifMake');\n\n if (!imageDescription && !make) {\n return null;\n }\n\n return [\n ...createEncodedChunk('prompt', make?.data, getEncodingStrategy('comfyui')),\n ...createEncodedChunk(\n 'workflow',\n imageDescription?.data,\n getEncodingStrategy('comfyui'),\n ),\n ];\n};\n\n/**\n * Try saveimage-plus format (exifUserComment with JSON)\n *\n * @returns PNG chunks if format matches, null otherwise\n */\nconst tryParseSaveImagePlusFormat = (\n segments: MetadataSegment[],\n): PngTextChunk[] | null => {\n const userComment = findSegment(segments, 'exifUserComment');\n if (!userComment) {\n return null;\n }\n\n const parsed = parseJson<Record<string, unknown>>(userComment.data);\n if (!parsed.ok) {\n // Not valid JSON, return as prompt fallback\n return createEncodedChunk(\n 'prompt',\n userComment.data,\n getEncodingStrategy('comfyui'),\n );\n }\n\n // Convert all keys to PNG chunks with Unicode escaping\n return Object.entries(parsed.value).flatMap(([keyword, value]) =>\n createEncodedChunk(\n keyword,\n stringify(value),\n getEncodingStrategy('comfyui'),\n ),\n );\n};\n\n/**\n * Convert JPEG/WebP segments to ComfyUI PNG chunks\n *\n * Supports:\n * - save-image-extended format: exifImageDescription (workflow) + exifMake (prompt)\n * - saveimage-plus format: exifUserComment with {\"prompt\": {...}, \"workflow\": {...}}\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertComfyUISegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n // Try each format in order of priority\n return (\n tryParseExtendedFormat(segments) ??\n tryParseSaveImagePlusFormat(segments) ??\n []\n );\n}\n","/**\n * Easy Diffusion metadata conversion utilities\n *\n * Easy Diffusion format stores metadata as JSON in various locations:\n * - PNG: Each field as separate chunks (negative_prompt, Negative Prompt, etc.)\n * - JPEG/WebP: JSON in exifUserComment\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk, getEncodingStrategy } from './chunk-encoding';\nimport { findSegment } from './utils';\n\n/**\n * Convert Easy Diffusion PNG chunks to JPEG/WebP segments\n *\n * Easy Diffusion PNG stores metadata as individual chunks.\n * We combine them into a JSON object for JPEG/WebP storage.\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertEasyDiffusionPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n const json = Object.fromEntries(\n chunks.map((chunk) => [chunk.keyword, chunk.text]),\n );\n\n return [\n {\n source: { type: 'exifUserComment' },\n data: JSON.stringify(json),\n },\n ];\n}\n\n/**\n * Convert JPEG/WebP segments to Easy Diffusion PNG chunks\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertEasyDiffusionSegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n const userComment = findSegment(segments, 'exifUserComment');\n if (!userComment) {\n return [];\n }\n\n const parsed = parseJson<Record<string, unknown>>(userComment.data);\n if (!parsed.ok) {\n return [];\n }\n\n // Convert each key-value pair to a chunk with dynamic selection\n return Object.entries(parsed.value).flatMap(([keyword, value]) => {\n const text =\n value != null\n ? typeof value === 'string'\n ? value\n : String(value)\n : undefined;\n if (!text) return [];\n return createEncodedChunk(\n keyword,\n text,\n getEncodingStrategy('easydiffusion'),\n );\n });\n}\n","/**\n * InvokeAI metadata conversion utilities\n *\n * InvokeAI stores metadata as:\n * - PNG: `invokeai_metadata` + `invokeai_graph` iTXt/tEXt chunks (both JSON, dynamic selection)\n * - JPEG/WebP: Not officially supported by InvokeAI\n *\n * For conversion, we use a JSON format similar to ComfyUI saveimage-plus:\n * {\"invokeai_metadata\": {...}, \"invokeai_graph\": {...}}\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk, getEncodingStrategy } from './chunk-encoding';\nimport { findSegment, stringify } from './utils';\n\n/**\n * Convert InvokeAI PNG chunks to JPEG/WebP segments\n *\n * Parses JSON chunks and stores them as objects.\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertInvokeAIPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n const data: Record<string, unknown> = {};\n\n for (const chunk of chunks) {\n const parsed = parseJson<unknown>(chunk.text);\n if (parsed.ok) {\n data[chunk.keyword] = parsed.value;\n } else {\n data[chunk.keyword] = chunk.text;\n }\n }\n\n return [\n {\n source: { type: 'exifUserComment' },\n data: JSON.stringify(data),\n },\n ];\n}\n\n/**\n * Convert JPEG/WebP segments to InvokeAI PNG chunks\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertInvokeAISegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n const userComment = findSegment(segments, 'exifUserComment');\n if (!userComment) {\n return [];\n }\n\n const parsed = parseJson<Record<string, unknown>>(userComment.data);\n if (!parsed.ok) {\n // Not valid JSON, store as single chunk with dynamic selection\n return createEncodedChunk(\n 'invokeai_metadata',\n userComment.data,\n getEncodingStrategy('invokeai'),\n );\n }\n\n // Parse saved chunks\n const metadataText = stringify(parsed.value.invokeai_metadata);\n const graphText = stringify(parsed.value.invokeai_graph);\n\n // Create chunks with dynamic selection\n const chunks = [\n ...createEncodedChunk(\n 'invokeai_metadata',\n metadataText,\n getEncodingStrategy('invokeai'),\n ),\n ...createEncodedChunk(\n 'invokeai_graph',\n graphText,\n getEncodingStrategy('invokeai'),\n ),\n ];\n\n if (chunks.length > 0) {\n return chunks;\n }\n\n // Fallback: return as invokeai_metadata chunk\n return createEncodedChunk(\n 'invokeai_metadata',\n userComment.data,\n getEncodingStrategy('invokeai'),\n );\n}\n","/**\n * NovelAI metadata conversion utilities\n *\n * Converts NovelAI metadata between PNG chunks and JPEG/WebP segments.\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk, getEncodingStrategy } from './chunk-encoding';\nimport { createTextChunk, findSegment, stringify } from './utils';\n\n/** Fixed values for NovelAI PNG chunks */\nconst NOVELAI_TITLE = 'NovelAI generated image';\nconst NOVELAI_SOFTWARE = 'NovelAI';\n\n/**\n * Convert NovelAI PNG chunks to JPEG/WebP segments\n *\n * PNG structure:\n * - Title: \\\"NovelAI generated image\\\"\n * - Description: short prompt\n * - Software: \\\"NovelAI\\\"\n * - Source: version info\n * - Generation time: time\n * - Comment: full JSON parameters\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertNovelaiPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n const comment = chunks.find((c) => c.keyword === 'Comment');\n if (!comment) {\n return [];\n }\n\n const description = chunks.find((c) => c.keyword === 'Description');\n const data = buildUserCommentJson(chunks);\n\n // Build segments array declaratively\n const descriptionSegment: MetadataSegment[] = description\n ? [\n {\n source: { type: 'exifImageDescription' },\n data: `\\0\\0\\0\\0${description.text}`,\n },\n ]\n : [];\n\n const userCommentSegment: MetadataSegment = {\n source: { type: 'exifUserComment' },\n data: JSON.stringify(data),\n };\n\n return [...descriptionSegment, userCommentSegment];\n}\n\n/**\n * Build UserComment JSON from PNG chunks in NovelAI's standard key order\n */\nfunction buildUserCommentJson(chunks: PngTextChunk[]): Record<string, string> {\n return NOVELAI_KEY_ORDER.map((key) => {\n const chunk = chunks.find((c) => c.keyword === key);\n return chunk ? { [key]: chunk.text } : null;\n })\n .filter((entry): entry is Record<string, string> => entry !== null)\n .reduce(\n (acc, entry) => Object.assign(acc, entry),\n {} as Record<string, string>,\n );\n}\n\n/**\n * NovelAI standard key order for UserComment JSON\n */\nconst NOVELAI_KEY_ORDER = [\n 'Comment',\n 'Description',\n 'Generation time',\n 'Software',\n 'Source',\n 'Title',\n] as const;\n\n/**\n * Convert JPEG/WebP segments to NovelAI PNG chunks\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertNovelaiSegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n const userCommentSeg = findSegment(segments, 'exifUserComment');\n const descriptionSeg = findSegment(segments, 'exifImageDescription');\n\n return parseSegments(userCommentSeg, descriptionSeg);\n}\n\n/**\n * Parse UserComment JSON and convert to PNG chunks\n */\nfunction parseSegments(\n userCommentSeg: MetadataSegment | undefined,\n descriptionSeg: MetadataSegment | undefined,\n): PngTextChunk[] {\n if (!userCommentSeg || !descriptionSeg) {\n return [];\n }\n\n const parsed = parseJson<Record<string, unknown>>(userCommentSeg.data);\n if (!parsed.ok) {\n // If parsing fails, treat the whole thing as Comment\n return createTextChunk('Comment', userCommentSeg.data);\n }\n\n const jsonData = parsed.value;\n\n // Extract Description text (prefer exifImageDescription over corrupted JSON)\n const descriptionText = extractDescriptionText(\n descriptionSeg,\n stringify(jsonData.Description),\n );\n\n const descriptionChunks = descriptionText\n ? createEncodedChunk(\n 'Description',\n descriptionText,\n getEncodingStrategy('novelai'),\n )\n : [];\n\n return [\n // Title (required, use default if missing)\n createTextChunk('Title', stringify(jsonData.Title) ?? NOVELAI_TITLE),\n // Description (optional, prefer exifImageDescription over JSON)\n ...descriptionChunks,\n // Software (required, use default if missing)\n createTextChunk(\n 'Software',\n stringify(jsonData.Software) ?? NOVELAI_SOFTWARE,\n ),\n // Source (optional)\n createTextChunk('Source', stringify(jsonData.Source)),\n // Generation time (optional)\n createTextChunk('Generation time', stringify(jsonData['Generation time'])),\n // Comment (optional)\n createTextChunk('Comment', stringify(jsonData.Comment)),\n ].flat();\n}\n\n/**\n * Extract Description text from exifImageDescription or UserComment JSON\n *\n * NovelAI WebP has corrupted UTF-8 in UserComment JSON Description,\n * so we prefer the clean exifImageDescription segment when available.\n */\nfunction extractDescriptionText(\n descriptionSeg: MetadataSegment | undefined,\n jsonDescription: string | undefined,\n): string | undefined {\n // First, try exifImageDescription segment (strip 4-byte null prefix)\n if (descriptionSeg?.data) {\n const data = descriptionSeg.data;\n // NovelAI WebP format has 4-byte null prefix before ImageDescription\n return data.startsWith('\\0\\0\\0\\0') ? data.slice(4) : data;\n }\n\n // Fallback: use JSON value (for non-NovelAI WebP sources)\n if (jsonDescription) {\n // Strip 4-byte null prefix if present\n return jsonDescription.startsWith('\\0\\0\\0\\0')\n ? jsonDescription.slice(4)\n : jsonDescription;\n }\n\n return undefined;\n}\n","/**\n * Simple chunk converter utilities\n *\n * Factory functions for converters that simply copy a single chunk keyword\n * between PNG and JPEG/WebP formats, with encoding based on tool strategy.\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { createEncodedChunk, getEncodingStrategy } from './chunk-encoding';\n\n/**\n * Create a PNG-to-segments converter that extracts a single chunk by keyword\n *\n * @param keyword - The PNG chunk keyword to extract\n * @returns Converter function\n */\nexport function createPngToSegments(\n keyword: string,\n): (chunks: PngTextChunk[]) => MetadataSegment[] {\n return (chunks) => {\n const chunk = chunks.find((c) => c.keyword === keyword);\n return !chunk\n ? []\n : [{ source: { type: 'exifUserComment' }, data: chunk.text }];\n };\n}\n\n/**\n * Create a segments-to-PNG converter that writes to a single chunk keyword\n *\n * Uses getEncodingStrategy to determine encoding based on keyword (tool name).\n *\n * @param keyword - The PNG chunk keyword to write (also used as tool name for strategy)\n * @returns Converter function\n */\nexport function createSegmentsToPng(\n keyword: string,\n): (segments: MetadataSegment[]) => PngTextChunk[] {\n return (segments) => {\n const userComment = segments.find(\n (s) => s.source.type === 'exifUserComment',\n );\n if (!userComment) return [];\n\n // Use keyword as tool name for strategy lookup\n return createEncodedChunk(\n keyword,\n userComment.data,\n getEncodingStrategy(keyword),\n );\n };\n}\n","/**\n * SwarmUI metadata conversion utilities\n *\n * SwarmUI stores metadata as:\n * - PNG: `parameters` chunk containing sui_image_params JSON\n * - JPEG/WebP: exifUserComment contains sui_image_params JSON directly\n *\n * The converter extracts/wraps the content appropriately for each format.\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk, getEncodingStrategy } from './chunk-encoding';\nimport { findSegment } from './utils';\n\n/**\n * Convert SwarmUI PNG chunks to JPEG/WebP segments\n *\n * Extracts the 'parameters' chunk and optionally preserves 'prompt' chunk (ComfyUI workflow).\n * - parameters chunk → exifUserComment (matches native SwarmUI format)\n * - prompt chunk → exifMake (preserves ComfyUI node graph for round-trip)\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertSwarmUIPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n // Find 'parameters' chunk\n const parametersChunk = chunks.find((c) => c.keyword === 'parameters');\n if (!parametersChunk) {\n return [];\n }\n\n // Parse and return the JSON directly (no wrapping in parameters key)\n const parsed = parseJson<unknown>(parametersChunk.text);\n const data = parsed.ok ? parsed.value : parametersChunk.text;\n\n const segments: MetadataSegment[] = [\n {\n source: { type: 'exifUserComment' },\n data: typeof data === 'string' ? data : JSON.stringify(data),\n },\n ];\n\n // Preserve node graph if present (prompt chunk contains ComfyUI node graph)\n const promptChunk = chunks.find((c) => c.keyword === 'prompt');\n if (promptChunk) {\n segments.push({\n source: { type: 'exifMake' },\n data: promptChunk.text,\n });\n }\n\n return segments;\n}\n\n/**\n * Convert JPEG/WebP segments to SwarmUI PNG chunks\n *\n * Handles both native SwarmUI format and extended format with node graph:\n * - exifUserComment → parameters chunk (always present)\n * - exifMake → prompt chunk (optional, contains ComfyUI node graph)\n *\n * Chunk order matches original SwarmUI format: [prompt, parameters]\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertSwarmUISegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n const userComment = findSegment(segments, 'exifUserComment');\n if (!userComment) {\n return [];\n }\n\n const chunks: PngTextChunk[] = [];\n\n // Restore node graph first if present (extended format)\n const make = findSegment(segments, 'exifMake');\n if (make) {\n chunks.push(\n ...createEncodedChunk(\n 'prompt',\n make.data,\n getEncodingStrategy('swarmui'),\n ),\n );\n }\n\n // Add parameters chunk second (always present)\n chunks.push(\n ...createEncodedChunk(\n 'parameters',\n userComment.data,\n getEncodingStrategy('swarmui'),\n ),\n );\n\n return chunks;\n}\n","/**\n * Metadata conversion utilities\n *\n * Provides functions to convert metadata between different image formats.\n */\n\nimport type {\n ConversionResult,\n ConversionTargetFormat,\n ParseResult,\n RawMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { convertA1111PngToSegments, convertA1111SegmentsToPng } from './a1111';\nimport { blindPngToSegments, blindSegmentsToPng } from './blind';\nimport {\n convertComfyUIPngToSegments,\n convertComfyUISegmentsToPng,\n} from './comfyui';\nimport {\n convertEasyDiffusionPngToSegments,\n convertEasyDiffusionSegmentsToPng,\n} from './easydiffusion';\nimport {\n convertInvokeAIPngToSegments,\n convertInvokeAISegmentsToPng,\n} from './invokeai';\nimport {\n convertNovelaiPngToSegments,\n convertNovelaiSegmentsToPng,\n} from './novelai';\nimport { createPngToSegments, createSegmentsToPng } from './simple-chunk';\nimport {\n convertSwarmUIPngToSegments,\n convertSwarmUISegmentsToPng,\n} from './swarmui';\n\n/**\n * Convert metadata from one format to another\n *\n * Takes a ParseResult and converts the raw metadata to the target format.\n * Conversion strategy is determined by the detected software.\n *\n * @param parseResult - Result from parsePng, parseJpeg, or parseWebp\n * @param targetFormat - Target format ('png', 'jpeg', or 'webp')\n * @param force - Force blind conversion for unrecognized formats (default: false)\n * @returns Converted RawMetadata or error\n *\n * @example\n * ```typescript\n * const pngResult = parsePng(pngData);\n * const converted = convertMetadata(pngResult, 'webp');\n * if (converted.ok) {\n * const webpWithMetadata = writeWebpMetadata(webpData, converted.value.segments);\n * }\n * ```\n */\nexport function convertMetadata(\n parseResult: ParseResult,\n targetFormat: ConversionTargetFormat,\n force = false,\n): ConversionResult {\n // Handle non-success statuses\n if (parseResult.status === 'empty') {\n return Result.error({ type: 'missingRawData' });\n }\n\n if (parseResult.status === 'invalid') {\n return Result.error({\n type: 'invalidParseResult',\n status: parseResult.status,\n });\n }\n\n const raw = parseResult.raw;\n\n // If source and target are the same format, return as-is\n if (\n (raw.format === 'png' && targetFormat === 'png') ||\n (raw.format === 'jpeg' && targetFormat === 'jpeg') ||\n (raw.format === 'webp' && targetFormat === 'webp')\n ) {\n return Result.ok(raw);\n }\n\n const software =\n parseResult.status === 'success' ? parseResult.metadata.software : null;\n\n // If software is unknown, use blind conversion if force is enabled\n if (!software) {\n return force\n ? convertBlind(raw, targetFormat)\n : Result.error({\n type: 'unsupportedSoftware',\n software: 'unknown',\n });\n }\n\n // Get converter for detected software\n const converter = softwareConverters[software];\n if (!converter) {\n // This should never happen if software is a valid GenerationSoftware\n return Result.error({\n type: 'unsupportedSoftware',\n software,\n });\n }\n\n return converter(raw, targetFormat);\n}\n\n// Type for converter function\ntype ConverterFn = (\n raw: RawMetadata,\n targetFormat: ConversionTargetFormat,\n) => ConversionResult;\n\n// Type for PNG↔segment conversion functions\ntype PngToSegmentsFn = (\n chunks: import('../types').PngTextChunk[],\n) => import('../types').MetadataSegment[];\ntype SegmentsToPngFn = (\n segments: import('../types').MetadataSegment[],\n) => import('../types').PngTextChunk[];\n\n/**\n * Factory function to create format converters\n *\n * All converters follow the same pattern:\n * - PNG → JPEG/WebP: convert chunks to segments\n * - JPEG/WebP → PNG: convert segments to chunks\n * - Same format: return as-is\n */\nfunction createFormatConverter(\n pngToSegments: PngToSegmentsFn,\n segmentsToPng: SegmentsToPngFn,\n): ConverterFn {\n return (raw, targetFormat) => {\n if (raw.format === 'png') {\n // PNG → same format: return as-is\n if (targetFormat === 'png') {\n return Result.ok(raw);\n }\n // PNG → JPEG/WebP\n const segments = pngToSegments(raw.chunks);\n return Result.ok({ format: targetFormat, segments });\n }\n\n // JPEG/WebP → JPEG/WebP: just copy segments\n if (targetFormat === 'jpeg' || targetFormat === 'webp') {\n return Result.ok({ format: targetFormat, segments: raw.segments });\n }\n\n // JPEG/WebP → PNG\n const chunks = segmentsToPng(raw.segments);\n return Result.ok({ format: 'png', chunks });\n };\n}\n\n// Create converters using factory\nconst convertNovelai = createFormatConverter(\n convertNovelaiPngToSegments,\n convertNovelaiSegmentsToPng,\n);\n\nconst convertA1111 = createFormatConverter(\n convertA1111PngToSegments,\n convertA1111SegmentsToPng,\n);\n\nconst convertComfyUI = createFormatConverter(\n convertComfyUIPngToSegments,\n convertComfyUISegmentsToPng,\n);\n\nconst convertEasyDiffusion = createFormatConverter(\n convertEasyDiffusionPngToSegments,\n convertEasyDiffusionSegmentsToPng,\n);\n\nconst convertFooocus = createFormatConverter(\n createPngToSegments('Comment'),\n createSegmentsToPng('Comment'),\n);\n\nconst convertRuinedFooocus = createFormatConverter(\n createPngToSegments('parameters'),\n createSegmentsToPng('parameters'),\n);\n\nconst convertSwarmUI = createFormatConverter(\n convertSwarmUIPngToSegments,\n convertSwarmUISegmentsToPng,\n);\n\nconst convertInvokeAI = createFormatConverter(\n convertInvokeAIPngToSegments,\n convertInvokeAISegmentsToPng,\n);\n\nconst convertHfSpace = createFormatConverter(\n createPngToSegments('parameters'),\n createSegmentsToPng('parameters'),\n);\n\nconst convertBlind = createFormatConverter(\n blindPngToSegments,\n blindSegmentsToPng,\n);\n\n/**\n * Lookup table: software name → converter function\n */\nconst softwareConverters = {\n // NovelAI\n novelai: convertNovelai,\n // A1111-format (sd-webui, forge, forge-neo, civitai, sd-next)\n 'sd-webui': convertA1111,\n 'sd-next': convertA1111,\n forge: convertA1111,\n 'forge-neo': convertA1111,\n civitai: convertA1111,\n // ComfyUI-format (comfyui, tensorart, stability-matrix)\n comfyui: convertComfyUI,\n tensorart: convertComfyUI,\n 'stability-matrix': convertComfyUI,\n // Easy Diffusion\n easydiffusion: convertEasyDiffusion,\n // Fooocus variants\n fooocus: convertFooocus,\n 'ruined-fooocus': convertRuinedFooocus,\n // SwarmUI\n swarmui: convertSwarmUI,\n // InvokeAI\n invokeai: convertInvokeAI,\n // HuggingFace Space\n 'hf-space': convertHfSpace,\n} as const;\n","/**\n * Exif writing utilities\n *\n * Functions for building Exif/TIFF structures from metadata segments.\n */\n\nimport type { MetadataSegment } from '../types';\nimport { writeUint16, writeUint32 } from '../utils/binary';\nimport {\n EXIF_IFD_POINTER_TAG,\n IMAGE_DESCRIPTION_TAG,\n MAKE_TAG,\n USER_COMMENT_TAG,\n} from '../utils/exif-constants';\n\n/**\n * Build Exif TIFF data from MetadataSegments\n *\n * Creates a complete TIFF structure with IFD0, Exif IFD, and all tag data.\n * Uses little-endian (Intel) byte order for maximum compatibility.\n *\n * @param segments - Metadata segments to encode\n * @returns TIFF data (starts with \"II\" byte order marker)\n */\nexport function buildExifTiffData(segments: MetadataSegment[]): Uint8Array {\n // Separate segments by destination IFD\n const ifd0Segments = segments.filter(\n (s) =>\n s.source.type === 'exifImageDescription' || s.source.type === 'exifMake',\n );\n const exifIfdSegments = segments.filter(\n (s) => s.source.type === 'exifUserComment',\n );\n\n // No Exif-type segments\n if (ifd0Segments.length === 0 && exifIfdSegments.length === 0) {\n return new Uint8Array(0);\n }\n\n const isLittleEndian = true;\n\n // Build tag data for each segment\n const ifd0Tags: Array<{ tag: number; type: number; data: Uint8Array }> = [];\n const exifTags: Array<{ tag: number; type: number; data: Uint8Array }> = [];\n\n for (const seg of ifd0Segments) {\n if (seg.source.type === 'exifImageDescription') {\n const data = encodeAsciiTag(seg.data, seg.source.prefix);\n ifd0Tags.push({ tag: IMAGE_DESCRIPTION_TAG, type: 2, data });\n } else if (seg.source.type === 'exifMake') {\n const data = encodeAsciiTag(seg.data, seg.source.prefix);\n ifd0Tags.push({ tag: MAKE_TAG, type: 2, data });\n }\n }\n\n for (const seg of exifIfdSegments) {\n if (seg.source.type === 'exifUserComment') {\n const data = encodeUserComment(seg.data);\n exifTags.push({ tag: USER_COMMENT_TAG, type: 7, data });\n }\n }\n\n const hasExifIfd = exifTags.length > 0;\n if (hasExifIfd) {\n ifd0Tags.push({\n tag: EXIF_IFD_POINTER_TAG,\n type: 4,\n data: new Uint8Array(4),\n });\n }\n\n // Sort tags by tag number (required by TIFF spec)\n ifd0Tags.sort((a, b) => a.tag - b.tag);\n exifTags.sort((a, b) => a.tag - b.tag);\n\n // Calculate sizes and offsets\n const headerSize = 8;\n const ifd0EntryCount = ifd0Tags.length;\n const ifd0Size = 2 + 12 * ifd0EntryCount + 4;\n const exifEntryCount = exifTags.length;\n const exifIfdSize = hasExifIfd ? 2 + 12 * exifEntryCount + 4 : 0;\n\n const ifd0Offset = headerSize;\n const exifIfdOffset = ifd0Offset + ifd0Size;\n let dataOffset = exifIfdOffset + exifIfdSize;\n\n // Update Exif IFD pointer in IFD0\n if (hasExifIfd) {\n const exifPtrTag = ifd0Tags.find((t) => t.tag === EXIF_IFD_POINTER_TAG);\n if (exifPtrTag) {\n writeUint32(exifPtrTag.data, 0, exifIfdOffset, isLittleEndian);\n }\n }\n\n // Assign data offsets for each tag\n const tagDataOffsets = new Map<\n { tag: number; type: number; data: Uint8Array },\n number\n >();\n\n for (const tag of [...ifd0Tags, ...exifTags]) {\n if (tag.data.length > 4) {\n tagDataOffsets.set(tag, dataOffset);\n dataOffset += tag.data.length;\n if (tag.data.length % 2 !== 0) {\n dataOffset += 1;\n }\n }\n }\n\n // Build result\n const totalSize = dataOffset;\n const result = new Uint8Array(totalSize);\n\n // Write TIFF header\n result[0] = 0x49; // I\n result[1] = 0x49; // I (little-endian)\n writeUint16(result, 2, 42, isLittleEndian);\n writeUint32(result, 4, ifd0Offset, isLittleEndian);\n\n // Write IFD0\n let offset = ifd0Offset;\n writeUint16(result, offset, ifd0EntryCount, isLittleEndian);\n offset += 2;\n\n for (const tag of ifd0Tags) {\n writeIfdEntry(result, offset, tag, tagDataOffsets.get(tag), isLittleEndian);\n offset += 12;\n }\n\n writeUint32(result, offset, 0, isLittleEndian);\n offset += 4;\n\n // Write Exif IFD\n if (hasExifIfd) {\n writeUint16(result, offset, exifEntryCount, isLittleEndian);\n offset += 2;\n\n for (const tag of exifTags) {\n writeIfdEntry(\n result,\n offset,\n tag,\n tagDataOffsets.get(tag),\n isLittleEndian,\n );\n offset += 12;\n }\n\n writeUint32(result, offset, 0, isLittleEndian);\n }\n\n // Write tag data values\n for (const [tag, dataOff] of tagDataOffsets) {\n result.set(tag.data, dataOff);\n }\n\n return result;\n}\n\n/**\n * Write an IFD entry\n */\nfunction writeIfdEntry(\n data: Uint8Array,\n offset: number,\n tag: { tag: number; type: number; data: Uint8Array },\n dataOffset: number | undefined,\n isLittleEndian: boolean,\n): void {\n writeUint16(data, offset, tag.tag, isLittleEndian);\n writeUint16(data, offset + 2, tag.type, isLittleEndian);\n writeUint32(data, offset + 4, tag.data.length, isLittleEndian);\n\n if (tag.data.length <= 4) {\n data.set(tag.data, offset + 8);\n } else {\n writeUint32(data, offset + 8, dataOffset ?? 0, isLittleEndian);\n }\n}\n\n/**\n * Encode string as UserComment with UTF-16LE encoding\n *\n * Uses UNICODE prefix followed by UTF-16LE encoded text.\n *\n * @param text - Text to encode\n * @returns Encoded UserComment data (8-byte prefix + UTF-16LE text)\n */\nfunction encodeUserComment(text: string): Uint8Array {\n const utf16Data: number[] = [];\n for (let i = 0; i < text.length; i++) {\n const code = text.charCodeAt(i);\n utf16Data.push(code & 0xff);\n utf16Data.push((code >> 8) & 0xff);\n }\n\n const result = new Uint8Array(8 + utf16Data.length);\n\n // UNICODE encoding prefix\n result[0] = 0x55; // U\n result[1] = 0x4e; // N\n result[2] = 0x49; // I\n result[3] = 0x43; // C\n result[4] = 0x4f; // O\n result[5] = 0x44; // D\n result[6] = 0x45; // E\n result[7] = 0x00; // NULL\n\n result.set(new Uint8Array(utf16Data), 8);\n return result;\n}\n\n/**\n * Encode ASCII tag data with optional prefix\n *\n * @param text - Text content\n * @param prefix - Optional prefix (e.g., \"Workflow\")\n * @returns Null-terminated ASCII bytes\n */\nfunction encodeAsciiTag(text: string, prefix?: string): Uint8Array {\n const fullText = prefix ? `${prefix}: ${text}` : text;\n const textBytes = new TextEncoder().encode(fullText);\n const result = new Uint8Array(textBytes.length + 1);\n result.set(textBytes, 0);\n result[textBytes.length] = 0;\n return result;\n}\n","import type { JpegWriteResult, MetadataSegment } from '../types';\nimport { Result } from '../types';\nimport { buildExifTiffData } from './exif';\n\nimport { isJpeg } from '../utils/binary';\n\n/** APP1 marker */\nconst APP1_MARKER = 0xe1;\n\n/** COM (Comment) marker */\nconst COM_MARKER = 0xfe;\n\n/** SOS (Start of Scan) marker */\nconst SOS_MARKER = 0xda;\n\n/** EOI (End of Image) marker */\nconst EOI_MARKER = 0xd9;\n\n/** Exif header: \"Exif\\0\\0\" */\nconst EXIF_HEADER = new Uint8Array([0x45, 0x78, 0x69, 0x66, 0x00, 0x00]);\n\n/**\n * Write JPEG metadata to binary data\n *\n * Replaces existing metadata segments with the provided segments.\n * Each segment is written to its original location based on source type:\n * - jpegCom -> COM segment (before SOS)\n * - exifUserComment/exifImageDescription/exifMake -> APP1 Exif segment (after SOI)\n *\n * @param data - Original JPEG file data as Uint8Array\n * @param segments - Metadata segments to embed\n * @returns Result containing new JPEG data with embedded metadata\n */\nexport function writeJpegMetadata(\n data: Uint8Array,\n segments: MetadataSegment[],\n): JpegWriteResult {\n // Validate JPEG signature\n if (!isJpeg(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n // Separate segments by destination\n const comSegments = segments.filter((s) => s.source.type === 'jpegCom');\n const exifSegments = segments.filter(\n (s) =>\n s.source.type === 'exifUserComment' ||\n s.source.type === 'exifImageDescription' ||\n s.source.type === 'exifMake',\n );\n\n // Collect non-metadata segments from original JPEG\n const collectResult = collectNonMetadataSegments(data);\n if (!collectResult.ok) {\n return collectResult;\n }\n\n const { beforeSos, scanData } = collectResult.value;\n\n // Build new APP1 Exif segment\n const app1Segment =\n exifSegments.length > 0 ? buildApp1Segment(exifSegments) : null;\n\n // Build new COM segments\n const comSegmentData = comSegments.map((s) => buildComSegment(s.data));\n\n // Calculate total size\n let totalSize = 2; // SOI\n if (app1Segment) {\n totalSize += app1Segment.length;\n }\n for (const seg of beforeSos) {\n totalSize += seg.length;\n }\n for (const com of comSegmentData) {\n totalSize += com.length;\n }\n totalSize += scanData.length;\n\n // Build output\n const output = new Uint8Array(totalSize);\n let offset = 0;\n\n // Write SOI\n output[offset++] = 0xff;\n output[offset++] = 0xd8;\n\n // Write APP1 Exif (immediately after SOI)\n if (app1Segment) {\n output.set(app1Segment, offset);\n offset += app1Segment.length;\n }\n\n // Write original non-metadata segments\n for (const seg of beforeSos) {\n output.set(seg, offset);\n offset += seg.length;\n }\n\n // Write COM segments (before SOS)\n for (const com of comSegmentData) {\n output.set(com, offset);\n offset += com.length;\n }\n\n // Write scan data (SOS to EOI)\n output.set(scanData, offset);\n\n return Result.ok(output);\n}\n\n/**\n * Collect non-metadata segments from JPEG\n *\n * Returns segments that are not APP1 Exif or COM, plus the scan data (SOS to EOI)\n */\nfunction collectNonMetadataSegments(\n data: Uint8Array,\n): Result<\n { beforeSos: Uint8Array[]; scanData: Uint8Array },\n { type: 'corruptedStructure'; message: string }\n> {\n const beforeSos: Uint8Array[] = [];\n let offset = 2; // Skip SOI\n\n while (offset < data.length - 1) {\n // Check for marker\n if (data[offset] !== 0xff) {\n return Result.error({\n type: 'corruptedStructure',\n message: `Expected marker at offset ${offset}`,\n });\n }\n\n // Skip padding bytes\n while (data[offset] === 0xff && offset < data.length - 1) {\n offset++;\n }\n\n const marker = data[offset];\n offset++;\n\n // Check for SOS - everything after this is scan data\n if (marker === SOS_MARKER) {\n // Include SOS marker in scan data\n const scanData = data.slice(offset - 2);\n return Result.ok({ beforeSos, scanData });\n }\n\n // Check for EOI (shouldn't happen before SOS but handle it)\n if (marker === EOI_MARKER) {\n return Result.ok({ beforeSos, scanData: new Uint8Array([0xff, 0xd9]) });\n }\n\n // Get segment length (big-endian, includes length bytes)\n if (offset + 2 > data.length) {\n return Result.error({\n type: 'corruptedStructure',\n message: 'Unexpected end of file',\n });\n }\n\n const length = ((data[offset] ?? 0) << 8) | (data[offset + 1] ?? 0);\n const segmentStart = offset - 2; // Include marker\n const segmentEnd = offset + length;\n\n if (segmentEnd > data.length) {\n return Result.error({\n type: 'corruptedStructure',\n message: 'Segment extends beyond file',\n });\n }\n\n // Check if this is a metadata segment we want to strip\n const isExifApp1 =\n marker === APP1_MARKER &&\n offset + 2 + 6 <= data.length &&\n data[offset + 2] === 0x45 && // E\n data[offset + 3] === 0x78 && // x\n data[offset + 4] === 0x69 && // i\n data[offset + 5] === 0x66 && // f\n data[offset + 6] === 0x00 && // NULL\n data[offset + 7] === 0x00; // NULL\n\n const isCom = marker === COM_MARKER;\n\n // Keep non-metadata segments\n if (!isExifApp1 && !isCom) {\n beforeSos.push(data.slice(segmentStart, segmentEnd));\n }\n\n offset = segmentEnd;\n }\n\n // If we reach here without finding SOS, the JPEG is malformed\n return Result.error({\n type: 'corruptedStructure',\n message: 'No SOS marker found',\n });\n}\n\n/**\n * Build APP1 Exif segment from metadata segments\n */\nfunction buildApp1Segment(segments: MetadataSegment[]): Uint8Array {\n const tiffData = buildExifTiffData(segments);\n\n if (tiffData.length === 0) {\n return new Uint8Array(0);\n }\n\n // APP1 segment: marker (2) + length (2) + Exif header (6) + TIFF data\n const segmentLength = 2 + EXIF_HEADER.length + tiffData.length;\n const segment = new Uint8Array(2 + segmentLength);\n\n segment[0] = 0xff;\n segment[1] = APP1_MARKER;\n segment[2] = (segmentLength >> 8) & 0xff;\n segment[3] = segmentLength & 0xff;\n segment.set(EXIF_HEADER, 4);\n segment.set(tiffData, 4 + EXIF_HEADER.length);\n\n return segment;\n}\n\n/**\n * Build COM segment from text\n */\nfunction buildComSegment(text: string): Uint8Array {\n const textBytes = new TextEncoder().encode(text);\n const segmentLength = 2 + textBytes.length; // length field includes itself\n\n const segment = new Uint8Array(2 + segmentLength);\n segment[0] = 0xff;\n segment[1] = COM_MARKER;\n segment[2] = (segmentLength >> 8) & 0xff;\n segment[3] = segmentLength & 0xff;\n segment.set(textBytes, 4);\n\n return segment;\n}\n","import type {\n ITXtChunk,\n PngTextChunk,\n PngWriteResult,\n TExtChunk,\n} from '../types';\nimport { Result } from '../types';\nimport { readChunkType, readUint32BE, writeUint32BE } from '../utils/binary';\n\nimport { isPng } from '../utils/binary';\n\n/** PNG file signature (magic bytes) */\nconst PNG_SIGNATURE = new Uint8Array([137, 80, 78, 71, 13, 10, 26, 10]);\n\n/**\n * Write PNG metadata to binary data\n *\n * Replaces all existing tEXt and iTXt chunks with the provided chunks.\n * Chunks are inserted immediately after the IHDR chunk (PNG spec recommended).\n *\n * @param data - Original PNG file data as Uint8Array\n * @param chunks - Text chunks to embed\n * @returns Result containing new PNG data with embedded metadata\n */\nexport function writePngMetadata(\n data: Uint8Array,\n chunks: PngTextChunk[],\n): PngWriteResult {\n // Validate PNG signature\n if (!isPng(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n // Find IHDR chunk end position\n const ihdrEnd = findIhdrChunkEnd(data);\n if (ihdrEnd === -1) {\n return Result.error({ type: 'noIhdrChunk' });\n }\n\n // Collect non-text chunks from original data\n const originalChunks = collectNonTextChunks(data);\n\n // Serialize new text chunks\n const serializedTextChunks = chunks.map((chunk) =>\n chunk.type === 'tEXt'\n ? serializeTExtChunk(chunk)\n : serializeITXtChunk(chunk),\n );\n\n // Calculate total output size\n const totalSize =\n PNG_SIGNATURE.length +\n originalChunks.ihdr.length +\n serializedTextChunks.reduce((sum, chunk) => sum + chunk.length, 0) +\n originalChunks.others.reduce((sum, chunk) => sum + chunk.length, 0);\n\n // Build output\n const output = new Uint8Array(totalSize);\n let offset = 0;\n\n // Write signature\n output.set(PNG_SIGNATURE, offset);\n offset += PNG_SIGNATURE.length;\n\n // Write IHDR\n output.set(originalChunks.ihdr, offset);\n offset += originalChunks.ihdr.length;\n\n // Write text chunks (immediately after IHDR)\n for (const chunk of serializedTextChunks) {\n output.set(chunk, offset);\n offset += chunk.length;\n }\n\n // Write other chunks\n for (const chunk of originalChunks.others) {\n output.set(chunk, offset);\n offset += chunk.length;\n }\n\n return Result.ok(output);\n}\n\n/**\n * Find the end position of IHDR chunk (including CRC)\n * @returns End offset or -1 if not found\n */\nfunction findIhdrChunkEnd(data: Uint8Array): number {\n const offset = PNG_SIGNATURE.length;\n\n if (offset + 8 > data.length) {\n return -1;\n }\n\n const length = readUint32BE(data, offset);\n const chunkType = readChunkType(data, offset + 4);\n\n if (chunkType !== 'IHDR') {\n return -1;\n }\n\n // Return end position: length(4) + type(4) + data(length) + crc(4)\n return offset + 4 + 4 + length + 4;\n}\n\n/**\n * Collect chunks from PNG, separating IHDR and non-text chunks\n */\nfunction collectNonTextChunks(data: Uint8Array): {\n ihdr: Uint8Array;\n others: Uint8Array[];\n} {\n const others: Uint8Array[] = [];\n let offset = PNG_SIGNATURE.length;\n let ihdr: Uint8Array = new Uint8Array(0);\n\n while (offset < data.length) {\n const chunkStart = offset;\n\n // Read chunk length\n if (offset + 4 > data.length) break;\n const length = readUint32BE(data, offset);\n offset += 4;\n\n // Read chunk type\n if (offset + 4 > data.length) break;\n const chunkType = readChunkType(data, offset);\n offset += 4;\n\n // Skip chunk data\n offset += length;\n\n // Skip CRC\n offset += 4;\n\n const chunkEnd = offset;\n const chunkData = data.slice(chunkStart, chunkEnd);\n\n if (chunkType === 'IHDR') {\n ihdr = chunkData;\n } else if (chunkType !== 'tEXt' && chunkType !== 'iTXt') {\n others.push(chunkData);\n }\n\n if (chunkType === 'IEND') {\n break;\n }\n }\n\n return { ihdr, others };\n}\n\n/**\n * Serialize a tEXt chunk to binary\n *\n * Per PNG spec, tEXt uses Latin-1 encoding. However, to maintain round-trip\n * compatibility with tools that incorrectly write UTF-8 (e.g., TensorArt),\n * we encode the text as UTF-8 bytes. This allows non-ASCII characters to\n * survive the read-write cycle.\n */\nfunction serializeTExtChunk(chunk: TExtChunk): Uint8Array {\n // Encode keyword as Latin-1 (keywords are ASCII-safe)\n const keyword = latin1Encode(chunk.keyword);\n // Encode text as UTF-8 (for round-trip compatibility with non-compliant tools)\n const text = utf8Encode(chunk.text);\n\n // Data: keyword + null + text\n const chunkData = new Uint8Array(keyword.length + 1 + text.length);\n chunkData.set(keyword, 0);\n chunkData[keyword.length] = 0; // null separator\n chunkData.set(text, keyword.length + 1);\n\n return buildChunk('tEXt', chunkData);\n}\n\n/**\n * Serialize an iTXt chunk to binary\n */\nfunction serializeITXtChunk(chunk: ITXtChunk): Uint8Array {\n // Encode strings\n const keyword = utf8Encode(chunk.keyword);\n const languageTag = utf8Encode(chunk.languageTag);\n const translatedKeyword = utf8Encode(chunk.translatedKeyword);\n const text = utf8Encode(chunk.text);\n\n // Calculate data size\n const dataSize =\n keyword.length +\n 1 + // null\n 1 + // compression flag\n 1 + // compression method\n languageTag.length +\n 1 + // null\n translatedKeyword.length +\n 1 + // null\n text.length;\n\n const chunkData = new Uint8Array(dataSize);\n let offset = 0;\n\n // Write keyword\n chunkData.set(keyword, offset);\n offset += keyword.length;\n chunkData[offset++] = 0; // null\n\n // Write compression flag and method\n chunkData[offset++] = chunk.compressionFlag;\n chunkData[offset++] = chunk.compressionMethod;\n\n // Write language tag\n chunkData.set(languageTag, offset);\n offset += languageTag.length;\n chunkData[offset++] = 0; // null\n\n // Write translated keyword\n chunkData.set(translatedKeyword, offset);\n offset += translatedKeyword.length;\n chunkData[offset++] = 0; // null\n\n // Write text\n chunkData.set(text, offset);\n\n return buildChunk('iTXt', chunkData);\n}\n\n/**\n * Build a complete PNG chunk with length, type, data, and CRC\n */\nfunction buildChunk(type: string, data: Uint8Array): Uint8Array {\n const chunk = new Uint8Array(4 + 4 + data.length + 4);\n\n // Write length (4 bytes, big-endian)\n writeUint32BE(chunk, 0, data.length);\n\n // Write type (4 bytes)\n for (let i = 0; i < 4; i++) {\n chunk[4 + i] = type.charCodeAt(i);\n }\n\n // Write data\n chunk.set(data, 8);\n\n // Calculate and write CRC (over type + data)\n const crcData = chunk.slice(4, 8 + data.length);\n const crc = calculateCrc32(crcData);\n writeUint32BE(chunk, 8 + data.length, crc);\n\n return chunk;\n}\n\n/**\n * Encode string as Latin-1 bytes\n */\nfunction latin1Encode(str: string): Uint8Array {\n const bytes = new Uint8Array(str.length);\n for (let i = 0; i < str.length; i++) {\n bytes[i] = str.charCodeAt(i) & 0xff;\n }\n return bytes;\n}\n\n/**\n * Encode string as UTF-8 bytes\n */\nfunction utf8Encode(str: string): Uint8Array {\n return new TextEncoder().encode(str);\n}\n\n// ============================================================================\n// CRC-32 Implementation (IEEE polynomial)\n// ============================================================================\n\n/** CRC-32 lookup table */\nconst CRC_TABLE = makeCrcTable();\n\n/**\n * Generate CRC-32 lookup table\n */\nfunction makeCrcTable(): Uint32Array {\n const table = new Uint32Array(256);\n for (let n = 0; n < 256; n++) {\n let c = n;\n for (let k = 0; k < 8; k++) {\n if (c & 1) {\n c = 0xedb88320 ^ (c >>> 1);\n } else {\n c = c >>> 1;\n }\n }\n table[n] = c >>> 0;\n }\n return table;\n}\n\n/**\n * Calculate CRC-32 checksum\n */\nfunction calculateCrc32(data: Uint8Array): number {\n let crc = 0xffffffff;\n for (let i = 0; i < data.length; i++) {\n crc = (CRC_TABLE[(crc ^ (data[i] ?? 0)) & 0xff] ?? 0) ^ (crc >>> 8);\n }\n return (crc ^ 0xffffffff) >>> 0;\n}\n","import type { MetadataSegment, WebpWriteResult } from '../types';\nimport { Result } from '../types';\nimport { arraysEqual, writeUint32LE } from '../utils/binary';\nimport { buildExifTiffData } from './exif';\n\nimport { isWebp } from '../utils/binary';\n\n/** WebP file signature: \"RIFF\" */\nconst RIFF_SIGNATURE = new Uint8Array([0x52, 0x49, 0x46, 0x46]);\n\n/** WebP format marker: \"WEBP\" */\nconst WEBP_MARKER = new Uint8Array([0x57, 0x45, 0x42, 0x50]);\n\n/** EXIF chunk type */\nconst EXIF_CHUNK_TYPE = new Uint8Array([0x45, 0x58, 0x49, 0x46]);\n\n/**\n * Write WebP metadata to binary data\n *\n * Replaces existing EXIF chunk with new metadata.\n * All segments are written to the EXIF chunk based on their source type.\n *\n * @param data - Original WebP file data as Uint8Array\n * @param segments - Metadata segments to embed\n * @returns Result containing new WebP data with embedded metadata\n */\nexport function writeWebpMetadata(\n data: Uint8Array,\n segments: MetadataSegment[],\n): WebpWriteResult {\n // Validate WebP signature\n if (!isWebp(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n // Collect all chunks except EXIF\n const collectResult = collectNonExifChunks(data);\n if (!collectResult.ok) {\n return collectResult;\n }\n\n const { chunks } = collectResult.value;\n\n // Build new EXIF chunk from segments\n const exifChunk = buildExifChunk(segments);\n\n // Calculate new file size (excluding RIFF header)\n let newFileSize = 4; // \"WEBP\"\n for (const chunk of chunks) {\n newFileSize += chunk.length;\n }\n if (exifChunk) {\n newFileSize += exifChunk.length;\n }\n\n // Build output\n const output = new Uint8Array(8 + newFileSize);\n let offset = 0;\n\n // Write RIFF header\n output.set(RIFF_SIGNATURE, offset);\n offset += 4;\n writeUint32LE(output, offset, newFileSize);\n offset += 4;\n\n // Write WEBP marker\n output.set(WEBP_MARKER, offset);\n offset += 4;\n\n // Write EXIF chunk first if we have one (after VP8/VP8L/VP8X)\n // EXIF should come after the image chunk for best compatibility\n let exifWritten = false;\n\n for (const chunk of chunks) {\n // Write chunks in original order\n output.set(chunk, offset);\n offset += chunk.length;\n\n // Write EXIF after first image-related chunk (VP8, VP8L, VP8X)\n if (!exifWritten && exifChunk && isImageChunk(chunk)) {\n output.set(exifChunk, offset);\n offset += exifChunk.length;\n exifWritten = true;\n }\n }\n\n // If EXIF wasn't written yet (no VP8* chunk found), append it\n if (!exifWritten && exifChunk) {\n output.set(exifChunk, offset);\n }\n\n return Result.ok(output);\n}\n\n/**\n * Check if chunk is an image-related chunk (VP8, VP8L, VP8X)\n */\nfunction isImageChunk(chunk: Uint8Array): boolean {\n if (chunk.length < 4) return false;\n const type = String.fromCharCode(\n chunk[0] ?? 0,\n chunk[1] ?? 0,\n chunk[2] ?? 0,\n chunk[3] ?? 0,\n );\n return type === 'VP8 ' || type === 'VP8L' || type === 'VP8X';\n}\n\n/**\n * Collect all chunks except EXIF\n */\nfunction collectNonExifChunks(\n data: Uint8Array,\n): Result<\n { chunks: Uint8Array[]; firstChunkType: string },\n { type: 'invalidRiffStructure'; message: string }\n> {\n const chunks: Uint8Array[] = [];\n let firstChunkType = '';\n\n // Start after RIFF header (12 bytes: \"RIFF\" + size + \"WEBP\")\n let offset = 12;\n\n while (offset < data.length - 8) {\n // Read chunk type (4 bytes)\n const chunkType = data.slice(offset, offset + 4);\n const typeStr = String.fromCharCode(\n chunkType[0] ?? 0,\n chunkType[1] ?? 0,\n chunkType[2] ?? 0,\n chunkType[3] ?? 0,\n );\n\n if (!firstChunkType) {\n firstChunkType = typeStr;\n }\n\n // Read chunk size (4 bytes, little-endian)\n const chunkSize =\n (data[offset + 4] ?? 0) |\n ((data[offset + 5] ?? 0) << 8) |\n ((data[offset + 6] ?? 0) << 16) |\n ((data[offset + 7] ?? 0) << 24);\n\n // Validate chunk\n if (offset + 8 + chunkSize > data.length) {\n return Result.error({\n type: 'invalidRiffStructure',\n message: `Chunk extends beyond file at offset ${offset}`,\n });\n }\n\n // Keep all chunks except EXIF\n if (!arraysEqual(chunkType, EXIF_CHUNK_TYPE)) {\n // Include type + size + data (+ padding if odd)\n const paddedSize = chunkSize + (chunkSize % 2);\n const chunkData = data.slice(offset, offset + 8 + paddedSize);\n chunks.push(chunkData);\n }\n\n // Move to next chunk (chunk size + type + size fields)\n // RIFF chunks are padded to even byte boundaries\n const paddedSize = chunkSize + (chunkSize % 2);\n offset += 8 + paddedSize;\n }\n\n return Result.ok({ chunks, firstChunkType });\n}\n\n/**\n * Build EXIF chunk from metadata segments\n */\nfunction buildExifChunk(segments: MetadataSegment[]): Uint8Array | null {\n // Filter Exif-compatible segments\n const exifSegments = segments.filter(\n (s) =>\n s.source.type === 'exifUserComment' ||\n s.source.type === 'exifImageDescription' ||\n s.source.type === 'exifMake',\n );\n\n if (exifSegments.length === 0) {\n return null;\n }\n\n const tiffData = buildExifTiffData(exifSegments);\n\n if (tiffData.length === 0) {\n return null;\n }\n\n // Build EXIF chunk: type (4) + size (4) + TIFF data\n const chunkSize = tiffData.length;\n const paddedSize = chunkSize + (chunkSize % 2);\n const chunk = new Uint8Array(8 + paddedSize);\n\n chunk.set(EXIF_CHUNK_TYPE, 0);\n writeUint32LE(chunk, 4, chunkSize);\n chunk.set(tiffData, 8);\n\n return chunk;\n}\n","/**\n * Write API for sd-metadata\n *\n * Handles writing metadata to images with automatic format conversion.\n * Supports PNG, JPEG, and WebP formats.\n */\n\nimport { convertMetadata } from '../converters';\nimport type { ParseResult } from '../types';\nimport { Result } from '../types';\nimport type { ImageFormat } from '../utils/binary';\nimport { detectFormat } from '../utils/binary';\nimport { writeJpegMetadata } from '../writers/jpeg';\nimport { writePngMetadata } from '../writers/png';\nimport { writeWebpMetadata } from '../writers/webp';\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Result of the write operation\n */\nexport type WriteResult = Result<\n Uint8Array,\n | { type: 'unsupportedFormat' }\n | { type: 'conversionFailed'; message: string }\n | { type: 'writeFailed'; message: string }\n>;\n\n/**\n * Options for write operation\n */\nexport interface WriteOptions {\n /**\n * Force blind conversion for unrecognized formats\n *\n * When true, converts raw chunks/segments between formats even when\n * the generating software is unknown. Enables format conversion for\n * unknown/future tools without parser implementation.\n *\n * When false (default), returns error for unrecognized formats.\n *\n * @default false\n */\n force?: boolean;\n}\n\n/**\n * Write metadata to an image\n *\n * Automatically detects the target image format and converts the metadata\n * if necessary.\n *\n * @param data - Target image file data\n * @param metadata - ParseResult from `read()` (must be 'success' or contain raw data)\n * @param options - Write options (e.g., { force: true } for blind conversion)\n * @returns New image data with embedded metadata\n */\nexport function write(\n data: Uint8Array,\n metadata: ParseResult,\n options?: WriteOptions,\n): WriteResult {\n const targetFormat = detectFormat(data);\n if (!targetFormat) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Handle empty or invalid metadata\n if (metadata.status === 'empty') {\n // Strip metadata (write empty segments/chunks)\n const result = HELPERS[targetFormat].writeEmpty(data, []);\n if (!result.ok) {\n return Result.error({ type: 'writeFailed', message: result.error.type });\n }\n return Result.ok(result.value);\n }\n\n if (metadata.status === 'invalid') {\n return Result.error({\n type: 'writeFailed',\n message: 'Cannot write invalid metadata',\n });\n }\n\n // Conversion logic handled by convertMetadata\n // If source == target, convertMetadata returns raw as-is.\n // If source != target, it tries to convert.\n // If force option is set, enables blind conversion for unrecognized formats.\n const conversionResult = convertMetadata(\n metadata,\n targetFormat,\n options?.force ?? false,\n );\n\n if (!conversionResult.ok) {\n return Result.error({\n type: 'conversionFailed',\n message: `Failed to convert metadata: ${conversionResult.error.type}`,\n });\n }\n\n const newRaw = conversionResult.value;\n\n // Dispatch to writer\n if (targetFormat === 'png' && newRaw.format === 'png') {\n const result = writePngMetadata(data, newRaw.chunks);\n if (!result.ok)\n return Result.error({ type: 'writeFailed', message: result.error.type });\n return Result.ok(result.value);\n }\n\n if (targetFormat === 'jpeg' && newRaw.format === 'jpeg') {\n const result = writeJpegMetadata(data, newRaw.segments);\n if (!result.ok)\n return Result.error({ type: 'writeFailed', message: result.error.type });\n return Result.ok(result.value);\n }\n\n if (targetFormat === 'webp' && newRaw.format === 'webp') {\n const result = writeWebpMetadata(data, newRaw.segments);\n if (!result.ok)\n return Result.error({ type: 'writeFailed', message: result.error.type });\n return Result.ok(result.value);\n }\n\n return Result.error({\n type: 'writeFailed',\n message: 'Internal error: format mismatch after conversion',\n });\n}\n\n// ============================================================================\n// Format Helpers\n// ============================================================================\n\n/** Format-specific helper functions */\nconst HELPERS = {\n png: {\n writeEmpty: writePngMetadata,\n },\n jpeg: {\n writeEmpty: writeJpegMetadata,\n },\n webp: {\n writeEmpty: writeWebpMetadata,\n },\n} as const satisfies Record<ImageFormat, unknown>;\n","/**\n * A1111-format metadata serialization utilities\n *\n * Converts GenerationMetadata to A1111 (SD WebUI) plain text format.\n */\n\nimport type {\n GenerationMetadata,\n HiresSettings,\n NovelAIMetadata,\n UpscaleSettings,\n} from '../types';\n\n/**\n * Normalize line endings to LF (\\n)\n *\n * Ensures consistent line endings across different platforms.\n * Converts CRLF (\\r\\n) and CR (\\r) to LF (\\n).\n *\n * @param text - Text with potentially mixed line endings\n * @returns Text with normalized line endings (LF only)\n */\nfunction normalizeLineEndings(text: string): string {\n return text.replace(/\\r\\n/g, '\\n').replace(/\\r/g, '\\n');\n}\n\n/**\n * Merge upscale and hires settings\n *\n * A1111 format does not have separate upscale settings.\n * If both exist, hires takes priority.\n *\n * @param hires - Hires settings\n * @param upscale - Upscale settings\n * @returns Merged hires settings\n */\nfunction mergeUpscaleHires(\n hires?: HiresSettings,\n upscale?: UpscaleSettings,\n): HiresSettings | undefined {\n // If hires exists, use it as-is (priority)\n if (hires) {\n return hires;\n }\n\n // If only upscale exists, convert to hires format\n if (upscale) {\n return {\n scale: upscale.scale,\n upscaler: upscale.upscaler,\n // steps and denoise are not available from upscale\n };\n }\n\n return undefined;\n}\n\n/**\n * Build settings line from metadata\n *\n * Generates the \"Steps: X, Sampler: Y, ...\" line.\n *\n * @param metadata - Generation metadata\n * @returns Settings line string\n */\nfunction buildSettingsLine(metadata: GenerationMetadata): string {\n const parts: string[] = [];\n\n // Core settings\n if (metadata.sampling?.steps !== undefined) {\n parts.push(`Steps: ${metadata.sampling.steps}`);\n }\n\n if (metadata.sampling?.sampler) {\n parts.push(`Sampler: ${metadata.sampling.sampler}`);\n }\n\n if (metadata.sampling?.scheduler) {\n parts.push(`Schedule type: ${metadata.sampling.scheduler}`);\n }\n\n if (metadata.sampling?.cfg !== undefined) {\n parts.push(`CFG scale: ${metadata.sampling.cfg}`);\n }\n\n if (metadata.sampling?.seed !== undefined) {\n parts.push(`Seed: ${metadata.sampling.seed}`);\n }\n\n // Size (only if both width and height are positive)\n if (metadata.width > 0 && metadata.height > 0) {\n parts.push(`Size: ${metadata.width}x${metadata.height}`);\n }\n\n // Model\n if (metadata.model?.hash) {\n parts.push(`Model hash: ${metadata.model.hash}`);\n }\n\n if (metadata.model?.name) {\n parts.push(`Model: ${metadata.model.name}`);\n }\n\n // Optional: Clip skip\n if (metadata.sampling?.clipSkip !== undefined) {\n parts.push(`Clip skip: ${metadata.sampling.clipSkip}`);\n }\n\n // Hires.fix / Upscale (merged)\n const mergedHires = mergeUpscaleHires(metadata.hires, metadata.upscale);\n\n if (mergedHires) {\n if (mergedHires.denoise !== undefined) {\n parts.push(`Denoising strength: ${mergedHires.denoise}`);\n }\n\n if (mergedHires.scale !== undefined) {\n parts.push(`Hires upscale: ${mergedHires.scale}`);\n }\n\n if (mergedHires.steps !== undefined) {\n parts.push(`Hires steps: ${mergedHires.steps}`);\n }\n\n if (mergedHires.upscaler) {\n parts.push(`Hires upscaler: ${mergedHires.upscaler}`);\n }\n }\n\n return parts.join(', ');\n}\n\n/**\n * Build NovelAI character prompts section\n *\n * Generates character prompts delimited by comment lines.\n * Format: # Character N [x, y]:\\n[prompt]\n *\n * @param metadata - NovelAI metadata\n * @returns Array of lines (including both header and prompt lines)\n */\nfunction buildCharacterPromptsSection(metadata: NovelAIMetadata): string[] {\n if (!metadata.characterPrompts || metadata.characterPrompts.length === 0) {\n return [];\n }\n\n const lines: string[] = [];\n\n for (const [index, cp] of metadata.characterPrompts.entries()) {\n const characterNum = index + 1;\n const coords = cp.center ? ` [${cp.center.x}, ${cp.center.y}]` : '';\n\n // Header line: # Character N [x, y]:\n lines.push(`# Character ${characterNum}${coords}:`);\n\n // Prompt line (normalized)\n lines.push(normalizeLineEndings(cp.prompt));\n }\n\n return lines;\n}\n\n/**\n * Format metadata as SD WebUI (A1111) plain text\n *\n * Converts GenerationMetadata to human-readable text in the SD WebUI format.\n * This provides a standard, tool-agnostic way to display generation metadata\n * without needing to manually read individual properties.\n *\n * The output format follows the A1111/SD WebUI convention:\n * ```\n * positive prompt\n * [character prompts for NovelAI]\n * Negative prompt: negative prompt\n * Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 12345, ...\n * ```\n *\n * @param metadata - Generation metadata from any tool\n * @returns Human-readable text in SD WebUI format\n *\n * @example\n * ```typescript\n * import { read, formatAsWebUI } from '@enslo/sd-metadata';\n *\n * const result = read(imageData);\n * if (result.status === 'success') {\n * const text = formatAsWebUI(result.metadata);\n * console.log(text);\n * // Output:\n * // masterpiece, 1girl\n * // Negative prompt: low quality, bad anatomy\n * // Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 12345, Size: 512x768, Model: model.safetensors\n * }\n * ```\n */\nexport function formatAsWebUI(metadata: GenerationMetadata): string {\n const sections: string[] = [];\n\n // 1. Positive prompt (always present, normalized)\n sections.push(normalizeLineEndings(metadata.prompt));\n\n // 2. Character prompts (NovelAI only)\n if (metadata.software === 'novelai') {\n const characterLines = buildCharacterPromptsSection(metadata);\n if (characterLines.length > 0) {\n sections.push(characterLines.join('\\n'));\n }\n }\n\n // 3. Negative prompt (if present, normalized)\n if (metadata.negativePrompt) {\n sections.push(\n `Negative prompt: ${normalizeLineEndings(metadata.negativePrompt)}`,\n );\n }\n\n // 4. Settings line\n const settingsLine = buildSettingsLine(metadata);\n if (settingsLine) {\n sections.push(settingsLine);\n }\n\n // Join all sections with newlines\n return sections.join('\\n');\n}\n","/**\n * WebUI (A1111) format writer for sd-metadata\n *\n * Converts any GenerationMetadata to SD WebUI (A1111) plain text format\n * and writes it to PNG, JPEG, or WebP images.\n */\n\nimport {\n createEncodedChunk,\n getEncodingStrategy,\n} from '../converters/chunk-encoding';\nimport { formatAsWebUI } from '../serializers/a1111';\nimport type {\n GenerationMetadata,\n MetadataSegment,\n PngTextChunk,\n} from '../types';\nimport { Result } from '../types';\nimport { detectFormat } from '../utils/binary';\nimport { writeJpegMetadata } from '../writers/jpeg';\nimport { writePngMetadata } from '../writers/png';\nimport { writeWebpMetadata } from '../writers/webp';\nimport type { WriteResult } from './write';\n\n/**\n * Write metadata to an image in SD WebUI format\n *\n * Converts the provided GenerationMetadata to SD WebUI (A1111) plain text\n * format and embeds it into the image. This allows you to:\n * - Create custom metadata from scratch\n * - Modify existing metadata\n * - Convert metadata from any tool to SD WebUI-compatible format\n *\n * The metadata is stored differently based on image format:\n * - PNG: `parameters` tEXt/iTXt chunk (encoding auto-selected based on content)\n * - JPEG/WebP: Exif UserComment field\n *\n * @param data - Target image file data (PNG, JPEG, or WebP)\n * @param metadata - Generation metadata to embed\n * @returns New image data with embedded metadata, or error\n *\n * @example\n * ```typescript\n * import { writeAsWebUI } from '@enslo/sd-metadata';\n *\n * // Create custom metadata\n * const metadata = {\n * software: 'sd-webui',\n * prompt: 'masterpiece, 1girl',\n * negativePrompt: 'lowres, bad quality',\n * width: 512,\n * height: 768,\n * sampling: { steps: 20, sampler: 'Euler a', cfg: 7, seed: 12345 },\n * model: { name: 'model.safetensors' },\n * };\n *\n * // Embed into image\n * const result = writeAsWebUI(imageData, metadata);\n * if (result.ok) {\n * writeFileSync('output.png', result.value);\n * }\n * ```\n */\nexport function writeAsWebUI(\n data: Uint8Array,\n metadata: GenerationMetadata,\n): WriteResult {\n // Detect image format\n const format = detectFormat(data);\n if (!format) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Convert metadata to A1111 plain text format\n const text = formatAsWebUI(metadata);\n\n // Create format-specific metadata structures\n let writeResult:\n | import('../types').PngWriteResult\n | import('../types').JpegWriteResult\n | import('../types').WebpWriteResult;\n\n if (format === 'png') {\n // PNG: Create parameters chunk with dynamic encoding\n const chunks = createPngChunks(text);\n writeResult = writePngMetadata(data, chunks);\n } else if (format === 'jpeg') {\n // JPEG: Create Exif UserComment segment\n const segments = createExifSegments(text);\n writeResult = writeJpegMetadata(data, segments);\n } else if (format === 'webp') {\n // WebP: Create Exif UserComment segment\n const segments = createExifSegments(text);\n writeResult = writeWebpMetadata(data, segments);\n } else {\n // Shouldn't reach here due to detectFormat check above\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Handle write errors\n if (!writeResult.ok) {\n return Result.error({\n type: 'writeFailed',\n message: writeResult.error.type,\n });\n }\n\n return Result.ok(writeResult.value);\n}\n\n/**\n * Create PNG text chunks for SD WebUI format\n *\n * Uses dynamic encoding strategy (tEXt for ASCII, iTXt for non-ASCII).\n *\n * @param text - A1111-format plain text\n * @returns PNG text chunks\n */\nfunction createPngChunks(text: string): PngTextChunk[] {\n const strategy = getEncodingStrategy('a1111');\n return createEncodedChunk('parameters', text, strategy);\n}\n\n/**\n * Create Exif UserComment segment for JPEG/WebP\n *\n * @param text - A1111-format plain text\n * @returns Metadata segment array\n */\nfunction createExifSegments(text: string): MetadataSegment[] {\n return [\n {\n source: { type: 'exifUserComment' },\n data: text,\n },\n ];\n}\n","/**\n * Raw metadata serialization utilities\n *\n * Formats RawMetadata as human-readable plain text.\n */\n\nimport type { RawMetadata } from '../types';\n\n/**\n * Format raw metadata as plain text\n *\n * Extracts text content from RawMetadata and returns it as a simple string.\n * Multiple entries are separated by double newlines.\n *\n * This is useful for displaying unrecognized metadata to end users\n * without needing to manually iterate over chunks or segments.\n *\n * @param raw - Raw metadata from ParseResult\n * @returns Plain text content from the metadata\n *\n * @example\n * ```typescript\n * import { read, formatRaw } from '@enslo/sd-metadata';\n *\n * const result = read(imageData);\n * if (result.status === 'unrecognized') {\n * console.log(formatRaw(result.raw));\n * // Output: the raw text content without prefixes\n * }\n * ```\n */\nexport function formatRaw(raw: RawMetadata): string {\n switch (raw.format) {\n case 'png':\n return raw.chunks.map((chunk) => chunk.text).join('\\n\\n');\n\n case 'jpeg':\n case 'webp':\n return raw.segments.map((segment) => segment.data).join('\\n\\n');\n }\n}\n"],"mappings":";AAQO,IAAM,SAAS;AAAA,EACpB,IAAI,CAAO,WAA4B,EAAE,IAAI,MAAM,MAAM;AAAA,EACzD,OAAO,CAAO,WAA4B,EAAE,IAAI,OAAO,MAAM;AAC/D;;;ACgBO,SAAS,WAAW,SAA+C;AAExE,QAAM,kBAAkB,QAAQ;AAAA,IAC9B,CAAC,MAAM,EAAE,YAAY,gBAAgB,EAAE,YAAY;AAAA,EACrD;AACA,MAAI,CAAC,iBAAiB;AACpB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAEA,QAAM,OAAO,gBAAgB;AAI7B,QAAM,eACJ,KAAK,SAAS,QAAQ,KACtB,KAAK,SAAS,UAAU,KACxB,KAAK,SAAS,kBAAkB;AAElC,MAAI,CAAC,cAAc;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,EAAE,QAAQ,gBAAgB,SAAS,IAAI,oBAAoB,IAAI;AAGrE,QAAM,cAAc,cAAc,QAAQ;AAG1C,QAAM,OAAO,YAAY,IAAI,MAAM,KAAK;AACxC,QAAM,CAAC,OAAO,MAAM,IAAI,UAAU,IAAI;AAGtC,QAAM,UAAU,YAAY,IAAI,SAAS;AACzC,QAAM,MAAM,YAAY,IAAI,KAAK;AACjC,QAAM,WAAW,sBAAsB,SAAS,GAAG;AAGnD,QAAM,WAA0C;AAAA,IAC9C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,YAAY,YAAY,IAAI,OAAO;AACzC,QAAM,YAAY,YAAY,IAAI,YAAY;AAC9C,MAAI,aAAa,WAAW;AAC1B,aAAS,QAAQ;AAAA,MACf,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AAAA,EACF;AAGA,QAAM,UAAU,YAAY,IAAI,SAAS;AACzC,QAAM,YAAY,YAAY,IAAI,eAAe;AACjD,QAAM,QAAQ,YAAY,YAAY,IAAI,OAAO,CAAC;AAClD,QAAM,MAAM;AAAA,IACV,YAAY,IAAI,WAAW,KAAK,YAAY,IAAI,WAAW;AAAA,EAC7D;AACA,QAAM,OAAO,YAAY,YAAY,IAAI,MAAM,CAAC;AAChD,QAAM,WAAW,YAAY,YAAY,IAAI,WAAW,CAAC;AAEzD,MACE,YAAY,UACZ,cAAc,UACd,UAAU,UACV,QAAQ,UACR,SAAS,UACT,aAAa,QACb;AACA,aAAS,WAAW;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa,YAAY,YAAY,IAAI,eAAe,CAAC;AAC/D,QAAM,WAAW,YAAY,IAAI,gBAAgB;AACjD,QAAM,aAAa,YAAY,YAAY,IAAI,aAAa,CAAC;AAC7D,QAAM,UAAU,YAAY,YAAY,IAAI,oBAAoB,CAAC;AACjE,QAAM,YAAY,YAAY,IAAI,YAAY;AAE9C,MACE,CAAC,YAAY,WAAW,UAAU,YAAY,OAAO,EAAE;AAAA,IACrD,CAAC,MAAM,MAAM;AAAA,EACf,GACA;AACA,UAAM,CAAC,UAAU,IAAI,UAAU,aAAa,EAAE;AAC9C,UAAM,QAAQ,cAAc,aAAa;AACzC,aAAS,QAAQ,EAAE,OAAO,UAAU,OAAO,YAAY,QAAQ;AAAA,EACjE;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAKA,SAAS,oBAAoB,MAI3B;AAEA,QAAM,gBAAgB,KAAK,QAAQ,kBAAkB;AAGrD,QAAM,aAAa,KAAK,QAAQ,QAAQ;AAExC,MAAI,kBAAkB,MAAM,eAAe,IAAI;AAE7C,WAAO,EAAE,QAAQ,KAAK,KAAK,GAAG,gBAAgB,IAAI,UAAU,GAAG;AAAA,EACjE;AAEA,MAAI,kBAAkB,IAAI;AAExB,UAAMA,iBAAgB,KAAK,YAAY,MAAM,UAAU;AACvD,WAAO;AAAA,MACL,QAAQ,KAAK,MAAM,GAAGA,cAAa,EAAE,KAAK;AAAA,MAC1C,gBAAgB;AAAA,MAChB,UAAU,KAAK,MAAMA,cAAa,EAAE,KAAK;AAAA,IAC3C;AAAA,EACF;AAEA,MAAI,eAAe,IAAI;AAErB,WAAO;AAAA,MACL,QAAQ,KAAK,MAAM,GAAG,aAAa,EAAE,KAAK;AAAA,MAC1C,gBAAgB,KAAK,MAAM,gBAAgB,EAAE,EAAE,KAAK;AAAA,MACpD,UAAU;AAAA,IACZ;AAAA,EACF;AAGA,QAAM,gBAAgB,KAAK,YAAY,MAAM,UAAU;AAEvD,SAAO;AAAA,IACL,QAAQ,KAAK,MAAM,GAAG,aAAa,EAAE,KAAK;AAAA,IAC1C,gBAAgB,KAAK,MAAM,gBAAgB,IAAI,aAAa,EAAE,KAAK;AAAA,IACnE,UAAU,KAAK,MAAM,aAAa,EAAE,KAAK;AAAA,EAC3C;AACF;AAQA,SAAS,cAAc,UAAuC;AAC5D,QAAM,SAAS,oBAAI,IAAoB;AACvC,MAAI,CAAC,SAAU,QAAO;AAKtB,QAAM,QACJ;AAGF,QAAM,UAAU,MAAM,KAAK,SAAS,SAAS,KAAK,CAAC;AAEnD,aAAW,SAAS,SAAS;AAC3B,UAAM,OAAO,MAAM,CAAC,KAAK,IAAI,KAAK;AAClC,UAAM,SAAS,MAAM,CAAC,KAAK,IAAI,KAAK;AACpC,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AAEA,SAAO;AACT;AAKA,SAAS,UAAU,MAAgC;AACjD,QAAM,QAAQ,KAAK,MAAM,aAAa;AACtC,MAAI,CAAC,MAAO,QAAO,CAAC,GAAG,CAAC;AACxB,SAAO;AAAA,IACL,OAAO,SAAS,MAAM,CAAC,KAAK,KAAK,EAAE;AAAA,IACnC,OAAO,SAAS,MAAM,CAAC,KAAK,KAAK,EAAE;AAAA,EACrC;AACF;AAKA,SAAS,YAAY,OAA+C;AAClE,MAAI,UAAU,OAAW,QAAO;AAChC,QAAM,MAAM,OAAO,WAAW,KAAK;AACnC,SAAO,OAAO,MAAM,GAAG,IAAI,SAAY;AACzC;AAKA,SAAS,sBACP,SACA,KACgD;AAEhD,MAAI,QAAQ,UAAW,QAAO;AAG9B,MAAI,CAAC,QAAS,QAAO;AACrB,MAAI,YAAY,MAAO,QAAO;AAE9B,MAAI,YAAY,UAAW,QAAO;AAClC,MAAI,aAAa,KAAK,OAAO,EAAG,QAAO;AACvC,SAAO;AACT;;;AClOO,SAAS,iBAAiB,SAAuC;AACtE,SAAO,OAAO;AAAA,IACZ,OAAO,YAAY,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;AAAA,EAC5D;AACF;;;ACHO,SAAS,UACd,MACoD;AACpD,MAAI;AACF,WAAO,OAAO,GAAG,KAAK,MAAM,IAAI,CAAM;AAAA,EACxC,QAAQ;AACN,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACF;;;ACuCO,SAAS,aAAa,SAA+C;AAC1E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,aAAa,eAAe,WAAW;AAC7C,MAAI,CAAC,YAAY;AACf,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAAuB,UAAU;AAChD,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,SAAS,OAAO;AAGtB,QAAM,QAAQ,OAAO,OAAO,MAAM;AAClC,MAAI,CAAC,MAAM,KAAK,CAAC,SAAS,gBAAgB,IAAI,GAAG;AAC/C,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,WAAW,SAAS,QAAQ,CAAC,SAAS,CAAC;AAG7C,QAAM,eAAe,SAAS,QAAQ,CAAC,mBAAmB,CAAC;AAC3D,QAAM,eAAe,SAAS,QAAQ,CAAC,mBAAmB,CAAC;AAC3D,QAAM,mBAAmB,YAAY,YAAY;AACjD,QAAM,mBAAmB,YAAY,YAAY;AAGjD,QAAM,cAAc,SAAS,QAAQ,CAAC,kBAAkB,CAAC;AACzD,QAAM,cAAc,cAAc,OAAO,YAAY,OAAO,KAAK,KAAK,IAAI;AAC1E,QAAM,eAAe,cAAc,OAAO,YAAY,OAAO,MAAM,KAAK,IAAI;AAG5E,QAAM,YAAY,qBAAqB,MAAM;AAC7C,QAAM,eAAe,oBAAoB,WAAW,UAAU;AAC9D,QAAM,eAAe,oBAAoB,WAAW,kBAAkB;AACtE,QAAM,QAAQ,eAAe,WAAW,SAAS;AACjD,QAAM,SAAS,gBAAgB,WAAW,UAAU;AAGpD,QAAM,WAA8C;AAAA,IAClD,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,gBAAgB;AAAA,IAChB;AAAA,IACA;AAAA,IACA,OAAO;AAAA;AAAA,EACT;AAGA,QAAM,aAAa,SAAS,QAAQ,CAAC,uBAAuB,CAAC,GAAG,QAC5D;AAEJ,MAAI,YAAY;AACd,aAAS,QAAQ,EAAE,MAAM,OAAO,UAAU,EAAE;AAAA,EAC9C,WAAW,WAAW,WAAW;AAC/B,aAAS,QAAQ,EAAE,MAAM,UAAU,UAAU;AAAA,EAC/C;AAGA,MAAI,UAAU;AACZ,aAAS,WAAW;AAAA,MAClB,MAAM,SAAS,OAAO;AAAA,MACtB,OAAO,SAAS,OAAO;AAAA,MACvB,KAAK,SAAS,OAAO;AAAA,MACrB,SAAS,SAAS,OAAO;AAAA,MACzB,WAAW,SAAS,OAAO;AAAA,IAC7B;AAAA,EACF,WAAW,WAAW;AACpB,aAAS,WAAW;AAAA,MAClB,MAAM,UAAU;AAAA,MAChB,OAAO,UAAU;AAAA,MACjB,KAAK,UAAU;AAAA,MACf,SAAS,UAAU;AAAA,IACrB;AAAA,EACF;AAGA,QAAM,aAAa,SAAS,QAAQ;AAAA,IAClC;AAAA,IACA;AAAA,EACF,CAAC,GAAG;AACJ,QAAM,aAAa,SAAS,QAAQ;AAAA,IAClC;AAAA,IACA;AAAA,EACF,CAAC,GAAG;AACJ,QAAM,eAAe,SAAS,QAAQ,CAAC,kBAAkB,CAAC,GAAG;AAE7D,MAAI,cAAc,YAAY;AAE5B,UAAM,aAAa,WAAW;AAC9B,UAAM,QACJ,cAAc,IACV,KAAK,MAAO,aAAa,cAAe,GAAG,IAAI,MAC/C;AAEN,QAAI,cAAc;AAChB,eAAS,QAAQ;AAAA,QACf,UAAU,WAAW;AAAA,QACrB;AAAA,QACA,OAAO,aAAa;AAAA,QACpB,SAAS,aAAa;AAAA,MACxB;AAAA,IACF,OAAO;AACL,eAAS,UAAU;AAAA,QACjB,UAAU,WAAW;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,WAAW,iBAAiB;AAC9B,UAAM,mBAAmB,UAAU,gBAAgB;AAAA,MACjD,CAAC,MAAM,EAAE,SAAS;AAAA,IACpB;AACA,QAAI,kBAAkB;AACpB,YAAM,gBAAgB,UAAU,SAAS;AACzC,UAAI,gBAAgB,KAAK,iBAAiB,cAAc;AACtD,cAAM,QAAQ,iBAAiB,eAAe;AAC9C,iBAAS,UAAU;AAAA,UACjB,OAAO,KAAK,MAAM,QAAQ,GAAG,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAWA,SAAS,eAAe,aAA8C;AAEpE,MAAI,YAAY,QAAQ;AAItB,WAAO,YAAY,OAAO,QAAQ,cAAc,QAAQ;AAAA,EAC1D;AAGA,QAAM,aAAa;AAAA,IACjB,YAAY;AAAA,IACZ,YAAY;AAAA,IACZ,YAAY;AAAA,IACZ,YAAY;AAAA;AAAA,IACZ,YAAY;AAAA;AAAA,EACd;AAEA,aAAW,aAAa,YAAY;AAClC,QAAI,CAAC,UAAW;AAGhB,QAAI,UAAU,WAAW,GAAG,GAAG;AAI7B,YAAM,UAAU,UACb,QAAQ,QAAQ,EAAE,EAClB,QAAQ,cAAc,QAAQ;AACjC,YAAM,SAAS,UAAmC,OAAO;AACzD,UAAI,CAAC,OAAO,GAAI;AAGhB,UAAI,OAAO,MAAM,UAAU,OAAO,OAAO,MAAM,WAAW,UAAU;AAClE,eAAO,KAAK,UAAU,OAAO,MAAM,MAAM;AAAA,MAC3C;AAEA,YAAM,SAAS,OAAO,OAAO,OAAO,KAAK;AACzC,UAAI,OAAO,KAAK,CAAC,MAAM,KAAK,OAAO,MAAM,YAAY,gBAAgB,CAAC,GAAG;AACvE,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AASA,SAAS,SAAS,QAAqB,MAAuC;AAC5E,SAAO,OAAO,QAAQ,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG,MAAM,KAAK,SAAS,GAAG,CAAC,IAAI,CAAC;AACvE;AASA,SAAS,YAAY,MAAqC;AACxD,SAAO,OAAO,MAAM,OAAO,SAAS,WAAW,KAAK,OAAO,OAAO;AACpE;AAWA,SAAS,qBACP,QACkC;AAClC,QAAM,iBAAkB,OAAmC;AAC3D,MAAI,OAAO,mBAAmB,SAAU,QAAO;AAE/C,QAAM,SAAS,UAAgC,cAAc;AAC7D,SAAO,OAAO,KAAK,OAAO,QAAQ;AACpC;;;ACjSO,SAAS,eACd,SAC2B;AAC3B,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,eAAe,qBAAqB,WAAW;AACrD,MAAI,aAAc,QAAO;AAGzB,QAAM,cAAc,qBAAqB,WAAW;AACpD,MAAI,YAAa,QAAO;AAGxB,QAAM,OAAO,YAAY,cAAc,YAAY,WAAW;AAC9D,MAAI,MAAM;AACR,WAAO,sBAAsB,IAAI;AAAA,EACnC;AAEA,SAAO;AACT;AAaA,SAAS,qBACP,aAC2B;AAM3B,MAAI,YAAY,aAAa,WAAW;AACtC,WAAO;AAAA,EACT;AAGA,MAAI,uBAAuB,aAAa;AACtC,WAAO;AAAA,EACT;AAGA,MAAI,qBAAqB,aAAa;AACpC,WAAO;AAAA,EACT;AAGA,MAAI,YAAY,aAAa;AAC3B,WAAO;AAAA,EACT;AAGA,MAAI,qBAAqB,eAAe,qBAAqB,aAAa;AACxE,WAAO;AAAA,EACT;AAQA,QAAM,aAAa,YAAY;AAC/B,MAAI,YAAY,SAAS,kBAAkB,GAAG;AAC5C,WAAO;AAAA,EACT;AAMA,QAAM,UAAU,YAAY;AAC5B,MAAI,SAAS,WAAW,GAAG,GAAG;AAC5B,WAAO,sBAAsB,OAAO;AAAA,EACtC;AAEA,SAAO;AACT;AAOA,SAAS,sBAAsB,SAA4C;AACzE,MAAI;AACF,UAAM,SAAS,KAAK,MAAM,OAAO;AAGjC,QAAI,uBAAuB,QAAQ;AACjC,aAAO;AAAA,IACT;AAGA,QAAI,YAAY,UAAU,cAAc,QAAQ;AAC9C,YAAM,WAAW,OAAO;AACxB,YAAM,SAAS,OAAO;AAEtB,YAAM,WACJ,OAAO,aAAa,YAAY,OAAO,WAAW;AACpD,YAAM,eACH,OAAO,aAAa,YAAY,SAAS,WAAW,GAAG,KACvD,OAAO,WAAW,YAAY,OAAO,WAAW,GAAG;AAEtD,UAAI,YAAY,cAAc;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAGA,QAAI,sBAAsB,QAAQ;AAChC,aAAO;AAAA,IACT;AAGA,QAAI,YAAY,UAAU,gBAAgB,QAAQ;AAChD,YAAM,SAAS,OAAO,OAAO,cAAc,EAAE;AAC7C,UACE,OAAO,SAAS,kBAAkB,KAClC,OAAO,SAAS,eAAe,GAC/B;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAQA,SAAS,qBACP,aAC2B;AAE3B,MAAI,YAAY,eAAe,cAAc,aAAa;AACxD,WAAO;AAAA,EACT;AAGA,MAAI,cAAc,aAAa;AAC7B,WAAO;AAAA,EACT;AAIA,MAAI,YAAY,aAAa;AAC3B,UAAM,aAAa,YAAY;AAC/B,QAAI,YAAY,WAAW,GAAG,GAAG;AAE/B,UAAI,WAAW,SAAS,kBAAkB,GAAG;AAC3C,eAAO;AAAA,MACT;AAGA,UAAI,WAAW,SAAS,YAAY,GAAG;AACrC,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAQA,SAAS,sBAAsB,MAAyC;AAEtE,MAAI,KAAK,WAAW,GAAG,GAAG;AACxB,WAAO,qBAAqB,IAAI;AAAA,EAClC;AAGA,SAAO,sBAAsB,IAAI;AACnC;AAUA,SAAS,qBAAqB,MAAyC;AAMrE,MAAI,KAAK,SAAS,kBAAkB,GAAG;AACrC,WAAO;AAAA,EACT;AAGA,MACE,KAAK,SAAS,4BAA4B,KAC1C,KAAK,SAAS,6BAA6B,GAC3C;AACA,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,8BAA8B,GAAG;AACjD,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,UAAU,KAAK,KAAK,SAAS,kBAAkB,GAAG;AAClE,WAAO;AAAA,EACT;AAOA,MACE,KAAK,SAAS,aAAa,KAC3B,KAAK,SAAS,kBAAkB,KAChC,KAAK,SAAS,gBAAgB,KAC9B,KAAK,SAAS,sBAAsB,KACpC,KAAK,SAAS,sBAAsB,KACpC,KAAK,SAAS,iBAAiB,GAC/B;AACA,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,SAAS,KAAK,KAAK,SAAS,cAAc,GAAG;AAC7D,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,UAAU,KAAK,KAAK,SAAS,cAAc,GAAG;AAC9D,WAAO;AAAA,EACT;AAOA,MAAI,KAAK,SAAS,UAAU,KAAK,KAAK,SAAS,SAAS,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAYA,SAAS,sBAAsB,MAAyC;AAMtE,MAAI,KAAK,SAAS,kBAAkB,KAAK,KAAK,SAAS,eAAe,GAAG;AACvE,WAAO;AAAA,EACT;AAMA,QAAM,eAAe,KAAK,MAAM,sBAAsB;AACtD,MAAI,cAAc;AAChB,UAAM,UAAU,aAAa,CAAC;AAG9B,QAAI,YAAY,SAAS,SAAS,WAAW,KAAK,GAAG;AACnD,aAAO;AAAA,IACT;AAGA,QAAI,SAAS,WAAW,GAAG,KAAK,OAAO,KAAK,OAAO,GAAG;AACpD,aAAO;AAAA,IACT;AAGA,QAAI,YAAY,WAAW;AACzB,aAAO;AAAA,IACT;AAAA,EACF;AAOA,MAAI,KAAK,SAAS,cAAc,KAAK,KAAK,SAAS,aAAa,GAAG;AACjE,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,oBAAoB,GAAG;AACvC,WAAO;AAAA,EACT;AAOA,MAAI,KAAK,SAAS,QAAQ,KAAK,KAAK,SAAS,UAAU,GAAG;AACxD,WAAO;AAAA,EACT;AAEA,SAAO;AACT;;;ACzSA,SAAS,SACP,MACA,MACA,MACe;AACf,SAAQ,KAAK,IAAI,KAAK,KAAK,IAAI;AACjC;AAOA,SAAS,iBAAiB,MAA8C;AACtE,MAAI,CAAC,KAAM,QAAO;AAElB,QAAM,QAAQ,KAAK,QAAQ,OAAO,GAAG,EAAE,MAAM,GAAG;AAChD,SAAO,MAAM,MAAM,SAAS,CAAC;AAC/B;AAYO,SAAS,mBACd,SACqB;AACrB,QAAM,cAAc,iBAAiB,OAAO;AAG5C,MAAI,YAAY,mBAAmB,YAAY,iBAAiB,GAAG;AAIjE,WAAO,iBAAiB,WAAW;AAAA,EACrC;AAGA,QAAM,YACH,YAAY,YAAY,WAAW,GAAG,IACnC,YAAY,aACZ,YACH,YAAY,SAAS,WAAW,GAAG,IAAI,YAAY,UAAU;AAEhE,MAAI,CAAC,UAAU;AACb,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAAqC,QAAQ;AAC5D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,SAAO,cAAc,OAAO,KAAK;AACnC;AAKA,SAAS,iBACP,aACqB;AACrB,QAAM,SAAS,YAAY,UAAU,YAAY,UAAU;AAC3D,QAAM,iBACJ,YAAY,mBACZ,YAAY,iBAAiB,KAC7B,YAAY,mBACZ;AAEF,QAAM,YACJ,YAAY,8BACZ,YAAY,wBAAwB;AAEtC,QAAM,QAAQ,OAAO,YAAY,SAAS,YAAY,KAAK,KAAK;AAChE,QAAM,SAAS,OAAO,YAAY,UAAU,YAAY,MAAM,KAAK;AAEnE,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,OAAO,KAAK;AAAA,IACpB,gBAAgB,eAAe,KAAK;AAAA,IACpC;AAAA,IACA;AAAA,IACA,OAAO;AAAA,MACL,MAAM,iBAAiB,SAAS;AAAA,MAChC,KAAK,YAAY,iBAAiB,YAAY,WAAW;AAAA,IAC3D;AAAA,IACA,UAAU;AAAA,MACR,SAAS,YAAY,gBAAgB,YAAY;AAAA,MACjD,OACE,OAAO,YAAY,uBAAuB,YAAY,KAAK,KAC3D;AAAA,MACF,KACE,OAAO,YAAY,kBAAkB,YAAY,gBAAgB,CAAC,KAClE;AAAA,MACF,MAAM,OAAO,YAAY,QAAQ,YAAY,IAAI,KAAK;AAAA,MACtD,UACE,OAAO,YAAY,aAAa,YAAY,WAAW,CAAC,KAAK;AAAA,IACjE;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAKA,SAAS,cAAc,MAAsD;AAC3E,QAAM,SAAS,SAAiB,MAAM,UAAU,QAAQ,KAAK;AAC7D,QAAM,iBACJ,SAAiB,MAAM,mBAAmB,iBAAiB,KAAK;AAElE,QAAM,YAAY;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,QAAQ,SAAiB,MAAM,SAAS,OAAO,KAAK;AAC1D,QAAM,SAAS,SAAiB,MAAM,UAAU,QAAQ,KAAK;AAE7D,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,OAAO,KAAK;AAAA,IACpB,gBAAgB,eAAe,KAAK;AAAA,IACpC;AAAA,IACA;AAAA,IACA,OAAO;AAAA,MACL,MAAM,iBAAiB,SAAS;AAAA,MAChC,KAAK,SAAiB,MAAM,iBAAiB,WAAW;AAAA,IAC1D;AAAA,IACA,UAAU;AAAA,MACR,SAAS,SAAiB,MAAM,gBAAgB,SAAS;AAAA,MACzD,OAAO,SAAiB,MAAM,uBAAuB,OAAO;AAAA,MAC5D,KAAK,SAAiB,MAAM,kBAAkB,gBAAgB;AAAA,MAC9D,MAAM,SAAiB,MAAM,QAAQ,MAAM;AAAA,MAC3C,UAAU,SAAiB,MAAM,aAAa,WAAW;AAAA,IAC3D;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;AC5JO,SAAS,aAAa,SAA+C;AAC1E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,WAAW,YAAY,WAAW,YAAY;AAEpD,MAAI,CAAC,YAAY,CAAC,SAAS,WAAW,GAAG,GAAG;AAC1C,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAA+B,QAAQ;AACtD,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,MAAI,CAAC,KAAK,cAAc,CAAC,KAAK,QAAQ;AACpC,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAEA,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,KAAK,QAAQ,KAAK,KAAK;AAAA,IAC/B,gBAAgB,KAAK,iBAAiB,KAAK,KAAK;AAAA,IAChD,OAAO,KAAK,SAAS;AAAA,IACrB,QAAQ,KAAK,UAAU;AAAA,IACvB,OAAO;AAAA,MACL,MAAM,KAAK;AAAA,IACb;AAAA,IACA,UAAU;AAAA,MACR,SAAS,KAAK;AAAA,MACd,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;ACxDO,SAAS,aAAa,SAA+C;AAC1E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,iBAAiB,YAAY;AACnC,MAAI,CAAC,gBAAgB;AACnB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAA+B,cAAc;AAC5D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,QAAM,kBAAkB,CAAC,QAAiB;AACxC,UAAM,QAAQ,KAAK,MAAM,mBAAmB;AAC5C,WAAO,QAAQ,CAAC,KAAK,QAAQ,CAAC,IAC1B;AAAA,MACE,OAAO,OAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,MACnC,QAAQ,OAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,IACtC,IACA,EAAE,OAAO,GAAG,QAAQ,EAAE;AAAA,EAC5B;AACA,QAAM,EAAE,OAAO,OAAO,IAAI,gBAAgB,KAAK,UAAU;AAGzD,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,KAAK,UAAU;AAAA,IACvB,gBAAgB,KAAK,mBAAmB;AAAA,IACxC;AAAA,IACA;AAAA,IACA,OAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,MAAM,KAAK,YAAY;AAAA,IACzB;AAAA,IACA,UAAU;AAAA,MACR,SAAS,KAAK;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;ACtDA,SAAS,wBACP,aACoB;AAEpB,MAAI,YAAY,mBAAmB;AACjC,WAAO,YAAY;AAAA,EACrB;AAGA,MAAI,CAAC,YAAY,SAAS;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,gBAAgB,UAAmC,YAAY,OAAO;AAC5E,MAAI,CAAC,cAAc,MAAM,EAAE,uBAAuB,cAAc,QAAQ;AACtE,WAAO;AAAA,EACT;AAEA,SAAO,KAAK,UAAU,cAAc,MAAM,iBAAiB;AAC7D;AAYO,SAAS,cAAc,SAA+C;AAE3E,QAAM,cAAc,iBAAiB,OAAO;AAK5C,QAAM,eAAe,wBAAwB,WAAW;AAExD,MAAI,CAAC,cAAc;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAAgC,YAAY;AAC3D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,QAAM,QAAQ,KAAK,SAAS;AAC5B,QAAM,SAAS,KAAK,UAAU;AAG9B,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,KAAK,mBAAmB;AAAA,IAChC,gBAAgB,KAAK,mBAAmB;AAAA,IACxC;AAAA,IACA;AAAA,EACF;AAGA,MAAI,KAAK,OAAO,QAAQ,KAAK,OAAO,MAAM;AACxC,aAAS,QAAQ;AAAA,MACf,MAAM,KAAK,MAAM;AAAA,MACjB,MAAM,KAAK,MAAM;AAAA,IACnB;AAAA,EACF;AAGA,MACE,KAAK,SAAS,UACd,KAAK,UAAU,UACf,KAAK,cAAc,UACnB,KAAK,cAAc,QACnB;AACA,aAAS,WAAW;AAAA,MAClB,MAAM,KAAK;AAAA,MACX,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,SAAS,KAAK;AAAA,IAChB;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;ACrEO,SAAS,aAAa,SAA+C;AAE1E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,MAAI,YAAY,aAAa,WAAW;AACtC,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,cAAc,YAAY;AAChC,MAAI,CAAC,aAAa;AAChB,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,UAA0B,WAAW;AACpD,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,UAAU,OAAO;AAGvB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAGjC,QAAM,SACJ,QAAQ,WAAW,SAAS,gBAAgB,QAAQ,UAAU;AAChE,QAAM,iBACJ,QAAQ,oBAAoB,SAAS,gBAAgB,QAAQ,MAAM;AAGrE,QAAM,WAAyC;AAAA,IAC7C,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MACE,QAAQ,UAAU,UAClB,QAAQ,UAAU,UAClB,QAAQ,SAAS,UACjB,QAAQ,mBAAmB,UAC3B,QAAQ,YAAY,QACpB;AACA,aAAS,WAAW;AAAA,MAClB,OAAO,QAAQ;AAAA,MACf,KAAK,QAAQ;AAAA,MACb,MAAM,QAAQ;AAAA,MACd,SAAS,QAAQ;AAAA,MACjB,WAAW,QAAQ;AAAA,IACrB;AAAA,EACF;AAGA,QAAM,eAAe,QAAQ,WAAW,SAAS;AACjD,MAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,aAAS,mBAAmB,aACzB,IAAI,CAAC,OAA+B;AACnC,UAAI,CAAC,GAAG,aAAc,QAAO;AAC7B,aAAO;AAAA,QACL,QAAQ,GAAG;AAAA,QACX,QAAQ,GAAG,UAAU,CAAC;AAAA,MACxB;AAAA,IACF,CAAC,EACA,OAAO,CAAC,OAA8B,OAAO,IAAI;AAEpD,aAAS,YAAY,QAAQ,WAAW;AACxC,aAAS,WAAW,QAAQ,WAAW;AAAA,EACzC;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;AC9FO,SAAS,mBACd,SACqB;AACrB,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,WAAW,YAAY;AAE7B,MAAI,CAAC,YAAY,CAAC,SAAS,WAAW,GAAG,GAAG;AAC1C,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAAqC,QAAQ;AAC5D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,MAAI,KAAK,aAAa,iBAAiB;AACrC,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAEA,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,KAAK,QAAQ,KAAK,KAAK;AAAA,IAC/B,gBAAgB,KAAK,UAAU,KAAK,KAAK;AAAA,IACzC,OAAO,KAAK,SAAS;AAAA,IACrB,QAAQ,KAAK,UAAU;AAAA,IACvB,OAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,MAAM,KAAK;AAAA,IACb;AAAA,IACA,UAAU;AAAA,MACR,SAAS,KAAK;AAAA,MACd,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,MACX,UAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;AC9CO,SAAS,qBACd,SACqB;AAErB,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,cAAc,aAAa,OAAO;AACxC,MAAI,CAAC,YAAY,MAAM,YAAY,MAAM,aAAa,WAAW;AAC/D,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,WAA8C;AAAA,IAClD,GAAG,YAAY;AAAA,IACf,UAAU;AAAA,EACZ;AAGA,QAAM,WAAW,YAAY,iBAAiB;AAC9C,MAAI,UAAU;AACZ,UAAM,SAAS,UAA+B,QAAQ;AACtD,QAAI,OAAO,IAAI;AACb,YAAM,OAAO,OAAO;AAGpB,UAAI,KAAK,mBAAmB,QAAW;AACrC,iBAAS,SAAS,KAAK;AAAA,MACzB;AACA,UAAI,KAAK,mBAAmB,QAAW;AACrC,iBAAS,iBAAiB,KAAK;AAAA,MACjC;AAGA,UAAI,KAAK,cAAc,UAAa,KAAK,cAAc,QAAW;AAChE,iBAAS,QAAQ;AAAA,UACf,MAAM,KAAK;AAAA,UACX,MAAM,KAAK;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;ACjDA,SAAS,yBACP,aACoB;AAEpB,MAAI,YAAY,YAAY;AAC1B,WAAO,YAAY;AAAA,EACrB;AAGA,MAAI,CAAC,YAAY,SAAS;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,gBAAgB,UAAmC,YAAY,OAAO;AAC5E,MAAI,CAAC,cAAc,IAAI;AACrB,WAAO;AAAA,EACT;AAGA,MAAI,sBAAsB,cAAc,OAAO;AAC7C,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAYO,SAAS,aAAa,SAA+C;AAE1E,QAAM,cAAc,iBAAiB,OAAO;AAK5C,QAAM,iBAAiB,yBAAyB,WAAW;AAE3D,MAAI,CAAC,gBAAgB;AACnB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAA6B,cAAc;AAC1D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,QAAM,SAAS,OAAO,MAAM;AAC5B,MAAI,CAAC,QAAQ;AACX,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,QAAQ,OAAO,SAAS;AAC9B,QAAM,SAAS,OAAO,UAAU;AAGhC,QAAM,WAAyC;AAAA,IAC7C,UAAU;AAAA,IACV,QAAQ,OAAO,UAAU;AAAA,IACzB,gBAAgB,OAAO,kBAAkB;AAAA,IACzC;AAAA,IACA;AAAA,EACF;AAGA,QAAM,eAAe,YAAY,UAAU,YAAY;AACvD,MAAI,cAAc;AAChB,UAAM,eAAe,UAAU,YAAY;AAC3C,QAAI,aAAa,IAAI;AACnB,eAAS,QAAQ,aAAa;AAAA,IAChC;AAAA,EACF;AAGA,MAAI,OAAO,OAAO;AAChB,aAAS,QAAQ;AAAA,MACf,MAAM,OAAO;AAAA,IACf;AAAA,EACF;AAGA,MACE,OAAO,SAAS,UAChB,OAAO,UAAU,UACjB,OAAO,aAAa,UACpB,OAAO,YAAY,UACnB,OAAO,cAAc,QACrB;AACA,aAAS,WAAW;AAAA,MAClB,MAAM,OAAO;AAAA,MACb,OAAO,OAAO;AAAA,MACd,KAAK,OAAO;AAAA,MACZ,SAAS,OAAO;AAAA,MAChB,WAAW,OAAO;AAAA,IACpB;AAAA,EACF;AAGA,MACE,OAAO,mBAAmB,UAC1B,OAAO,yBAAyB,UAChC,OAAO,6BAA6B,QACpC;AACA,aAAS,QAAQ;AAAA,MACf,OAAO,OAAO;AAAA,MACd,UAAU,OAAO;AAAA,MACjB,SAAS,OAAO;AAAA,IAClB;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;AC5HO,SAAS,eAAe,SAA+C;AAE5E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,WAAW,YAAY;AAC7B,MAAI,CAAC,UAAU;AACb,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,cAAc,SAAS,QAAQ,QAAQ,EAAE;AAC/C,QAAM,SAAS,UAAmC,WAAW;AAC7D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,QAAM,QAAQ,KAAK,SAAS;AAC5B,QAAM,SAAS,KAAK,UAAU;AAG9B,QAAM,cAAc,YAAY;AAChC,MAAI,CAAC,aAAa;AAChB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AACA,QAAM,eAAe,UAAU,WAAW;AAC1C,MAAI,CAAC,aAAa,IAAI;AACpB,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,QAAM,WAA8C;AAAA,IAClD,UAAU;AAAA,IACV,QAAQ,KAAK,UAAU;AAAA,IACvB,gBAAgB,KAAK,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA,OAAO,aAAa;AAAA,EACtB;AAGA,MAAI,KAAK,WAAW,iBAAiB,KAAK,WAAW,MAAM;AACzD,aAAS,QAAQ;AAAA,MACf,MAAM,KAAK,UAAU;AAAA,MACrB,MAAM,KAAK,UAAU;AAAA,IACvB;AAAA,EACF;AAGA,MACE,KAAK,SAAS,UACd,KAAK,UAAU,UACf,KAAK,aAAa,UAClB,KAAK,aAAa,QAClB;AACA,UAAM,WAAW,KAAK,OAAO,OAAO,KAAK,IAAI,IAAI;AAEjD,aAAS,WAAW;AAAA,MAClB,MACE,aAAa,KACT,eAAe,aAAa,KAAuB,IACnD;AAAA,MACN,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,UAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAQA,SAAS,eAAe,OAA+B;AACrD,QAAM,cAAc,gBAAgB,KAAK;AACzC,SAAO,eAAe,OAAO,YAAY,OAAO,SAAS,WACrD,YAAY,OAAO,OACnB;AACN;AAQA,SAAS,gBACP,OACqE;AACrE,SAAO,OAAO,OAAO,KAAK,EAAE;AAAA,IAC1B,CAAC,SACC,KAAK,eAAe,cACpB,KAAK,WAAW,YAAY,EAAE,SAAS,SAAS;AAAA,EACpD;AACF;;;ACxHO,SAAS,cAAc,SAA+C;AAE3E,QAAM,WAAW,eAAe,OAAO;AAGvC,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO,aAAa,OAAO;AAAA,IAE7B,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,WAAW,OAAO;AAAA,IAE3B,KAAK;AACH,aAAO,aAAa,OAAO;AAAA,IAE7B,KAAK,WAAW;AAEd,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,YAAY,GAAI,QAAO;AAC3B,aAAO,WAAW,OAAO;AAAA,IAC3B;AAAA,IAEA,KAAK,WAAW;AAEd,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,YAAY,GAAI,QAAO;AAC3B,aAAO,WAAW,OAAO;AAAA,IAC3B;AAAA,IAEA,KAAK;AACH,aAAO,cAAc,OAAO;AAAA,IAE9B,KAAK;AACH,aAAO,aAAa,OAAO;AAAA,IAE7B,KAAK;AACH,aAAO,eAAe,OAAO;AAAA,IAE/B,KAAK;AACH,aAAO,qBAAqB,OAAO;AAAA,IAErC,KAAK;AACH,aAAO,mBAAmB,OAAO;AAAA,IAEnC,KAAK;AACH,aAAO,aAAa,OAAO;AAAA,IAE7B,KAAK;AACH,aAAO,mBAAmB,OAAO;AAAA,IAEnC,SAAS;AAGP,YAAM,cAAc,WAAW,OAAO;AACtC,UAAI,YAAY,GAAI,QAAO;AAG3B,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,YAAY,GAAI,QAAO;AAG3B,YAAM,eAAe,cAAc,OAAO;AAC1C,UAAI,aAAa,GAAI,QAAO;AAG5B,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,YAAY,GAAI,QAAO;AAG3B,YAAM,eAAe,eAAe,OAAO;AAC3C,UAAI,aAAa,GAAI,QAAO;AAG5B,YAAM,kBAAkB,qBAAqB,OAAO;AACpD,UAAI,gBAAgB,GAAI,QAAO;AAG/B,YAAM,gBAAgB,aAAa,OAAO;AAC1C,UAAI,cAAc,GAAI,QAAO;AAE7B,aAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,IACnD;AAAA,EACF;AACF;;;ACnGO,SAAS,aAAa,MAAkB,QAAwB;AACrE,UACG,KAAK,MAAM,KAAK,MACf,KAAK,SAAS,CAAC,KAAK,MAAM,KAC1B,KAAK,SAAS,CAAC,KAAK,MAAM;AAEhC;AASO,SAAS,aAAa,MAAkB,QAAwB;AACrE,UACI,KAAK,MAAM,KAAK,MAAM,MACtB,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,KAC3B,KAAK,SAAS,CAAC,KAAK;AAEzB;AASO,SAAS,aAAa,MAAkB,QAAwB;AACrE,UACG,KAAK,MAAM,KAAK,MACf,KAAK,SAAS,CAAC,KAAK,MAAM,KAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM;AAEhC;AASO,SAAS,cACd,MACA,QACA,OACM;AACN,OAAK,MAAM,IAAK,UAAU,KAAM;AAChC,OAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACpC,OAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AACnC,OAAK,SAAS,CAAC,IAAI,QAAQ;AAC7B;AASO,SAAS,cAAc,MAAkB,QAAwB;AACtE,SAAO,OAAO;AAAA,IACZ,KAAK,MAAM,KAAK;AAAA,IAChB,KAAK,SAAS,CAAC,KAAK;AAAA,IACpB,KAAK,SAAS,CAAC,KAAK;AAAA,IACpB,KAAK,SAAS,CAAC,KAAK;AAAA,EACtB;AACF;AAUO,SAAS,WACd,MACA,QACA,gBACQ;AACR,MAAI,gBAAgB;AAClB,YAAQ,KAAK,MAAM,KAAK,MAAO,KAAK,SAAS,CAAC,KAAK,MAAM;AAAA,EAC3D;AACA,UAAS,KAAK,MAAM,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AAC3D;AAUO,SAAS,WACd,MACA,QACA,gBACQ;AACR,MAAI,gBAAgB;AAClB,YACG,KAAK,MAAM,KAAK,MACf,KAAK,SAAS,CAAC,KAAK,MAAM,KAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM;AAAA,EAEhC;AACA,UACI,KAAK,MAAM,KAAK,MAAM,MACtB,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,KAC3B,KAAK,SAAS,CAAC,KAAK;AAEzB;AASO,SAAS,YAAY,GAAe,GAAwB;AACjE,MAAI,EAAE,WAAW,EAAE,OAAQ,QAAO;AAClC,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,QAAI,EAAE,CAAC,MAAM,EAAE,CAAC,EAAG,QAAO;AAAA,EAC5B;AACA,SAAO;AACT;AAUO,SAAS,YACd,MACA,QACA,OACA,gBACM;AACN,MAAI,gBAAgB;AAClB,SAAK,MAAM,IAAI,QAAQ;AACvB,SAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AAAA,EACrC,OAAO;AACL,SAAK,MAAM,IAAK,UAAU,IAAK;AAC/B,SAAK,SAAS,CAAC,IAAI,QAAQ;AAAA,EAC7B;AACF;AAUO,SAAS,YACd,MACA,QACA,OACA,gBACM;AACN,MAAI,gBAAgB;AAClB,SAAK,MAAM,IAAI,QAAQ;AACvB,SAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AACnC,SAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACpC,SAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AAAA,EACtC,OAAO;AACL,SAAK,MAAM,IAAK,UAAU,KAAM;AAChC,SAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACpC,SAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AACnC,SAAK,SAAS,CAAC,IAAI,QAAQ;AAAA,EAC7B;AACF;AASO,SAAS,cACd,MACA,QACA,OACM;AACN,OAAK,MAAM,IAAI,QAAQ;AACvB,OAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AACnC,OAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACpC,OAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACtC;AAUO,SAAS,MAAM,MAA2B;AAC/C,MAAI,KAAK,SAAS,EAAG,QAAO;AAC5B,SACE,KAAK,CAAC,MAAM,OACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM;AAEhB;AAKO,SAAS,OAAO,MAA2B;AAChD,MAAI,KAAK,SAAS,EAAG,QAAO;AAC5B,SAAO,KAAK,CAAC,MAAM,OAAQ,KAAK,CAAC,MAAM;AACzC;AAKO,SAAS,OAAO,MAA2B;AAChD,MAAI,KAAK,SAAS,GAAI,QAAO;AAC7B,SACE,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,EAAE,MAAM;AAAA,EACb,KAAK,EAAE,MAAM;AAEjB;AAKO,SAAS,aAAa,MAAsC;AACjE,MAAI,MAAM,IAAI,EAAG,QAAO;AACxB,MAAI,OAAO,IAAI,EAAG,QAAO;AACzB,MAAI,OAAO,IAAI,EAAG,QAAO;AACzB,SAAO;AACT;;;ACxQO,IAAM,mBAAmB;AAGzB,IAAM,wBAAwB;AAG9B,IAAM,WAAW;AAGjB,IAAM,uBAAuB;;;ACU7B,SAAS,0BACd,UACmB;AACnB,MAAI,SAAS,SAAS,EAAG,QAAO,CAAC;AAGjC,QAAM,iBAAiB,SAAS,CAAC,MAAM,MAAQ,SAAS,CAAC,MAAM;AAC/D,QAAM,cAAc,SAAS,CAAC,MAAM,MAAQ,SAAS,CAAC,MAAM;AAE5D,MAAI,CAAC,kBAAkB,CAAC,YAAa,QAAO,CAAC;AAG7C,QAAM,QAAQ,WAAW,UAAU,GAAG,cAAc;AACpD,MAAI,UAAU,GAAI,QAAO,CAAC;AAG1B,QAAM,aAAa,WAAW,UAAU,GAAG,cAAc;AAGzD,QAAM,eAAe,mBAAmB,UAAU,YAAY,cAAc;AAG5E,QAAM,gBAAgB,kBAAkB,UAAU,YAAY,cAAc;AAC5E,QAAM,kBACJ,kBAAkB,OACd,mBAAmB,UAAU,eAAe,cAAc,IAC1D,CAAC;AAEP,SAAO,CAAC,GAAG,cAAc,GAAG,eAAe;AAC7C;AAKA,SAAS,mBACP,MACA,WACA,gBACmB;AACnB,QAAM,WAA8B,CAAC;AAErC,MAAI,YAAY,IAAI,KAAK,OAAQ,QAAO;AAExC,QAAM,aAAa,WAAW,MAAM,WAAW,cAAc;AAC7D,MAAI,SAAS,YAAY;AAEzB,WAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,QAAI,SAAS,KAAK,KAAK,OAAQ,QAAO;AAEtC,UAAM,MAAM,WAAW,MAAM,QAAQ,cAAc;AACnD,UAAM,OAAO,WAAW,MAAM,SAAS,GAAG,cAAc;AACxD,UAAM,QAAQ,WAAW,MAAM,SAAS,GAAG,cAAc;AAGzD,UAAM,WAAW,YAAY,IAAI;AACjC,UAAM,WAAW,QAAQ;AAEzB,QAAI;AACJ,QAAI,YAAY,GAAG;AACjB,oBAAc,SAAS;AAAA,IACzB,OAAO;AACL,oBAAc,WAAW,MAAM,SAAS,GAAG,cAAc;AAAA,IAC3D;AAEA,QAAI,cAAc,WAAW,KAAK,QAAQ;AACxC,gBAAU;AACV;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,MAAM,aAAa,cAAc,QAAQ;AAG9D,QAAI,QAAQ,uBAAuB;AACjC,YAAM,OAAO,kBAAkB,OAAO;AACtC,UAAI,MAAM;AACR,cAAM,SAAS,cAAc,IAAI;AACjC,iBAAS,KAAK;AAAA,UACZ,QAAQ,EAAE,MAAM,wBAAwB,QAAQ,UAAU,OAAU;AAAA,UACpE,MAAM,SAAS,KAAK,MAAM,OAAO,SAAS,CAAC,IAAI;AAAA,QACjD,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,UAAU;AAC3B,YAAM,OAAO,kBAAkB,OAAO;AACtC,UAAI,MAAM;AACR,cAAM,SAAS,cAAc,IAAI;AACjC,iBAAS,KAAK;AAAA,UACZ,QAAQ,EAAE,MAAM,YAAY,QAAQ,UAAU,OAAU;AAAA,UACxD,MAAM,SAAS,KAAK,MAAM,OAAO,SAAS,CAAC,IAAI;AAAA,QACjD,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,kBAAkB;AACnC,YAAM,OAAO,kBAAkB,OAAO;AACtC,UAAI,MAAM;AACR,iBAAS,KAAK;AAAA,UACZ,QAAQ,EAAE,MAAM,kBAAkB;AAAA,UAClC,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF;AAEA,cAAU;AAAA,EACZ;AAEA,SAAO;AACT;AAKA,SAAS,cAAc,MAA6B;AAClD,QAAM,QAAQ,KAAK,MAAM,iBAAiB;AAC1C,SAAO,QAAQ,CAAC,KAAK;AACvB;AAKA,SAAS,YAAY,MAAsB;AACzC,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAKA,SAAS,kBAAkB,MAAiC;AAC1D,MAAI;AACF,UAAM,UAAU,IAAI,YAAY,SAAS,EAAE,OAAO,MAAM,CAAC;AACzD,QAAI,OAAO,QAAQ,OAAO,IAAI;AAE9B,QAAI,KAAK,SAAS,IAAI,GAAG;AACvB,aAAO,KAAK,MAAM,GAAG,EAAE;AAAA,IACzB;AACA,WAAO,KAAK,KAAK,KAAK;AAAA,EACxB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,kBACP,MACA,WACA,gBACe;AACf,MAAI,YAAY,IAAI,KAAK,OAAQ,QAAO;AAExC,QAAM,aAAa,WAAW,MAAM,WAAW,cAAc;AAC7D,MAAI,SAAS,YAAY;AAEzB,WAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,QAAI,SAAS,KAAK,KAAK,OAAQ,QAAO;AAEtC,UAAM,MAAM,WAAW,MAAM,QAAQ,cAAc;AAEnD,QAAI,QAAQ,sBAAsB;AAEhC,aAAO,WAAW,MAAM,SAAS,GAAG,cAAc;AAAA,IACpD;AAEA,cAAU;AAAA,EACZ;AAEA,SAAO;AACT;AAQO,SAAS,kBAAkB,MAAiC;AACjE,MAAI,KAAK,SAAS,EAAG,QAAO;AAG5B,MACE,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM,GACZ;AAEA,UAAM,WAAW,KAAK,MAAM,CAAC;AAC7B,QAAI,SAAS,UAAU,GAAG;AACxB,YAAM,aAAa,SAAS,CAAC,MAAM,KAAQ,SAAS,CAAC,MAAM;AAC3D,aAAO,aAAa,cAAc,QAAQ,IAAI,cAAc,QAAQ;AAAA,IACtE;AACA,WAAO,cAAc,QAAQ;AAAA,EAC/B;AAGA,MACE,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM,GACZ;AAEA,WAAO,YAAY,KAAK,MAAM,CAAC,CAAC;AAAA,EAClC;AAGA,MAAI;AACF,UAAM,UAAU,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC;AACxD,QAAI,SAAS,QAAQ,OAAO,IAAI;AAEhC,QAAI,OAAO,SAAS,IAAI,GAAG;AACzB,eAAS,OAAO,MAAM,GAAG,EAAE;AAAA,IAC7B;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,cAAc,MAA0B;AAC/C,QAAM,QAAkB,CAAC;AAEzB,WAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG;AAC3C,UAAM,QAAS,KAAK,CAAC,KAAK,MAAM,KAAM,KAAK,IAAI,CAAC,KAAK;AACrD,QAAI,SAAS,EAAG;AAChB,UAAM,KAAK,OAAO,aAAa,IAAI,CAAC;AAAA,EACtC;AAEA,SAAO,MAAM,KAAK,EAAE;AACtB;AAKA,SAAS,cAAc,MAA0B;AAC/C,QAAM,QAAkB,CAAC;AAEzB,WAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG;AAC3C,UAAM,QAAQ,KAAK,CAAC,KAAK,MAAO,KAAK,IAAI,CAAC,KAAK,MAAM;AACrD,QAAI,SAAS,EAAG;AAChB,UAAM,KAAK,OAAO,aAAa,IAAI,CAAC;AAAA,EACtC;AAEA,SAAO,MAAM,KAAK,EAAE;AACtB;AAKA,SAAS,YAAY,MAA0B;AAC7C,QAAM,QAAkB,CAAC;AAEzB,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,QAAI,KAAK,CAAC,MAAM,EAAG;AACnB,UAAM,KAAK,OAAO,aAAa,KAAK,CAAC,KAAK,CAAC,CAAC;AAAA,EAC9C;AAEA,SAAO,MAAM,KAAK,EAAE;AACtB;;;AC5SA,IAAM,cAAc;AAGpB,IAAM,aAAa;AAGnB,IAAM,cAAc,IAAI,WAAW,CAAC,IAAM,KAAM,KAAM,KAAM,GAAM,CAAI,CAAC;AAYhE,SAAS,iBAAiB,MAAsC;AACrE,MAAI,CAAC,OAAO,IAAI,GAAG;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAEA,QAAM,WAA8B,CAAC;AAGrC,QAAM,OAAO,gBAAgB,IAAI;AACjC,MAAI,MAAM;AACR,UAAM,WAAW,KAAK,MAAM,KAAK,QAAQ,KAAK,SAAS,KAAK,MAAM;AAClE,UAAM,eAAe,0BAA0B,QAAQ;AACvD,aAAS,KAAK,GAAG,YAAY;AAAA,EAC/B;AAGA,QAAM,aAAa,eAAe,IAAI;AACtC,MAAI,YAAY;AACd,UAAM,UAAU,KAAK;AAAA,MACnB,WAAW;AAAA,MACX,WAAW,SAAS,WAAW;AAAA,IACjC;AACA,UAAM,UAAU,iBAAiB,OAAO;AAExC,QAAI,YAAY,MAAM;AACpB,eAAS,KAAK;AAAA,QACZ,QAAQ,EAAE,MAAM,UAAU;AAAA,QAC1B,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAQO,SAAS,gBACd,MAC2C;AAC3C,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,QAAI,KAAK,MAAM,MAAM,KAAM;AACzB;AACA;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,SAAS,CAAC;AAG9B,QAAI,WAAW,KAAM;AACnB;AACA;AAAA,IACF;AAGA,UAAM,UAAW,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AAGrE,QAAI,WAAW,aAAa;AAE1B,YAAM,cAAc,SAAS;AAC7B,UAAI,cAAc,KAAK,KAAK,QAAQ;AAClC,cAAM,SAAS,KAAK,MAAM,aAAa,cAAc,CAAC;AACtD,YAAI,YAAY,QAAQ,WAAW,GAAG;AAEpC,iBAAO;AAAA,YACL,QAAQ,cAAc;AAAA,YACtB,QAAQ,SAAS;AAAA;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,cAAU,IAAI;AAGd,QAAI,WAAW,OAAQ,WAAW,KAAM;AACtC;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAUA,SAAS,eACP,MAC2C;AAC3C,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,QAAI,KAAK,MAAM,MAAM,KAAM;AACzB;AACA;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,SAAS,CAAC;AAG9B,QAAI,WAAW,KAAM;AACnB;AACA;AAAA,IACF;AAGA,UAAM,UAAW,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AAGrE,QAAI,WAAW,YAAY;AAEzB,aAAO;AAAA,QACL,QAAQ,SAAS;AAAA,QACjB,QAAQ,SAAS;AAAA;AAAA,MACnB;AAAA,IACF;AAGA,cAAU,IAAI;AAGd,QAAI,WAAW,OAAQ,WAAW,KAAM;AACtC;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAQA,SAAS,iBAAiB,MAAiC;AACzD,MAAI;AACF,UAAM,UAAU,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC;AACxD,WAAO,QAAQ,OAAO,IAAI;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACtKO,SAAS,gBAAgB,MAAqC;AAEnE,MAAI,CAAC,MAAM,IAAI,GAAG;AAChB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAGA,QAAM,eAAe,kBAAkB,IAAI;AAC3C,MAAI,CAAC,aAAa,IAAI;AACpB,WAAO;AAAA,EACT;AAEA,SAAO,OAAO,GAAG,aAAa,KAAK;AACrC;AAMA,IAAM,uBAAuB;AAK7B,SAAS,kBACP,MACsC;AACtC,QAAM,SAAyB,CAAC;AAChC,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,QAAQ;AAE3B,QAAI,SAAS,IAAI,KAAK,QAAQ;AAC5B,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AACA,UAAM,SAAS,aAAa,MAAM,MAAM;AACxC,cAAU;AAGV,QAAI,SAAS,IAAI,KAAK,QAAQ;AAC5B,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AACA,UAAM,YAAY,cAAc,MAAM,MAAM;AAC5C,cAAU;AAGV,QAAI,SAAS,SAAS,KAAK,QAAQ;AACjC,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS,oDAAoD,SAAS;AAAA,MACxE,CAAC;AAAA,IACH;AACA,UAAM,YAAY,KAAK,MAAM,QAAQ,SAAS,MAAM;AACpD,cAAU;AAGV,cAAU;AAGV,QAAI,cAAc,QAAQ;AACxB,YAAM,SAAS,eAAe,SAAS;AACvC,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAAA,MACpB;AAAA,IACF,WAAW,cAAc,QAAQ;AAC/B,YAAM,SAAS,eAAe,SAAS;AACvC,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAAA,MACpB;AAAA,IACF;AAGA,QAAI,cAAc,QAAQ;AACxB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,MAAM;AACzB;AAUA,SAAS,eAAe,MAAoC;AAE1D,QAAM,YAAY,KAAK,QAAQ,CAAC;AAChC,MAAI,cAAc,IAAI;AACpB,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,aAAa,KAAK,MAAM,GAAG,SAAS,CAAC;AAGrD,QAAM,WAAW,KAAK,MAAM,YAAY,CAAC;AACzC,QAAM,OAAO,cAAc,QAAQ,KAAK,aAAa,QAAQ;AAE7D,SAAO,EAAE,MAAM,QAAQ,SAAS,KAAK;AACvC;AAKA,SAAS,cAAc,MAAiC;AACtD,MAAI;AACF,WAAO,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,OAAO,IAAI;AAAA,EAC9D,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,eAAe,MAAoC;AAC1D,MAAI,SAAS;AAGb,QAAM,aAAa,SAAS,MAAM,MAAM;AACxC,MAAI,eAAe,GAAI,QAAO;AAC9B,QAAM,UAAU,WAAW,KAAK,MAAM,QAAQ,UAAU,CAAC;AACzD,WAAS,aAAa;AAGtB,MAAI,UAAU,KAAK,OAAQ,QAAO;AAClC,QAAM,kBAAkB,KAAK,MAAM,KAAK;AACxC,YAAU;AAGV,MAAI,UAAU,KAAK,OAAQ,QAAO;AAClC,QAAM,oBAAoB,KAAK,MAAM,KAAK;AAC1C,YAAU;AAGV,QAAM,UAAU,SAAS,MAAM,MAAM;AACrC,MAAI,YAAY,GAAI,QAAO;AAC3B,QAAM,cAAc,WAAW,KAAK,MAAM,QAAQ,OAAO,CAAC;AAC1D,WAAS,UAAU;AAGnB,QAAM,WAAW,SAAS,MAAM,MAAM;AACtC,MAAI,aAAa,GAAI,QAAO;AAC5B,QAAM,oBAAoB,WAAW,KAAK,MAAM,QAAQ,QAAQ,CAAC;AACjE,WAAS,WAAW;AAGpB,MAAI;AACJ,MAAI,oBAAoB,GAAG;AAEzB,UAAM,eAAe,eAAe,KAAK,MAAM,MAAM,CAAC;AACtD,QAAI,CAAC,aAAc,QAAO;AAC1B,WAAO,WAAW,YAAY;AAAA,EAChC,OAAO;AACL,WAAO,WAAW,KAAK,MAAM,MAAM,CAAC;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,SAAS,MAAkB,QAAwB;AAC1D,WAAS,IAAI,QAAQ,IAAI,KAAK,QAAQ,KAAK;AACzC,QAAI,KAAK,CAAC,MAAM,GAAG;AACjB,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAKA,SAAS,aAAa,MAA0B;AAC9C,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,cAAU,OAAO,aAAa,KAAK,CAAC,KAAK,CAAC;AAAA,EAC5C;AACA,SAAO;AACT;AAKA,SAAS,WAAW,MAA0B;AAC5C,SAAO,IAAI,YAAY,OAAO,EAAE,OAAO,IAAI;AAC7C;AAUA,SAAS,eAAe,OAAsC;AAE5D,SAAO;AACT;;;ACpOA,IAAM,kBAAkB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAWxD,SAAS,iBAAiB,MAAsC;AACrE,MAAI,CAAC,OAAO,IAAI,GAAG;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAEA,QAAM,YAAY,cAAc,IAAI;AACpC,MAAI,CAAC,WAAW;AACd,WAAO,OAAO,GAAG,CAAC,CAAC;AAAA,EACrB;AAEA,QAAM,WAAW,KAAK;AAAA,IACpB,UAAU;AAAA,IACV,UAAU,SAAS,UAAU;AAAA,EAC/B;AAGA,QAAM,WAAW,0BAA0B,QAAQ;AAEnD,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAWO,SAAS,cACd,MAC2C;AAE3C,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,UAAM,YAAY,KAAK,MAAM,QAAQ,SAAS,CAAC;AAG/C,UAAM,YAAY,aAAa,MAAM,SAAS,CAAC;AAG/C,QAAI,YAAY,WAAW,eAAe,GAAG;AAE3C,aAAO;AAAA,QACL,QAAQ,SAAS;AAAA,QACjB,QAAQ;AAAA,MACV;AAAA,IACF;AAIA,UAAM,aAAa,YAAa,YAAY;AAC5C,cAAU,IAAI;AAAA,EAChB;AAEA,SAAO;AACT;;;AChEO,SAAS,mBAAmB,QAAyC;AAC1E,SAAO,OAAO,IAAI,CAAC,WAAW;AAAA,IAC5B,SAAS,MAAM;AAAA,IACf,MAAM,MAAM;AAAA,EACd,EAAE;AACJ;AAiBO,SAAS,kBACd,UACiB;AACjB,QAAM,UAA2B,CAAC;AAElC,aAAW,WAAW,UAAU;AAC9B,UAAM,UAAU,gBAAgB,QAAQ,MAAM;AAC9C,UAAM,OAAO,QAAQ;AAIrB,QAAI,QAAQ,OAAO,SAAS,qBAAqB,KAAK,WAAW,GAAG,GAAG;AACrE,YAAM,WAAW,2BAA2B,IAAI;AAChD,UAAI,UAAU;AACZ,gBAAQ,KAAK,GAAG,QAAQ;AACxB;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,KAAK,EAAE,SAAS,KAAK,CAAC;AAAA,EAChC;AAEA,SAAO;AACT;AAYA,SAAS,2BAA2B,MAAsC;AACxE,QAAM,cAAc,UAAmC,IAAI;AAC3D,MAAI,CAAC,YAAY,IAAI;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,YAAY;AAG1B,MACE,OAAO,UAAU,YACjB,UAAU,QACV,MAAM,aAAa,aACnB,OAAO,MAAM,YAAY,UACzB;AACA,WAAO;AAAA,EACT;AAEA,QAAM,UAA2B,CAAC,EAAE,SAAS,YAAY,MAAM,UAAU,CAAC;AAG1E,QAAM,cAAc,UAAmB,MAAM,OAAO;AAEpD,SAAO;AAAA,IACL,GAAG;AAAA,IACH,YAAY,KACR,EAAE,SAAS,WAAW,MAAM,KAAK,UAAU,YAAY,KAAK,EAAE,IAC9D,EAAE,SAAS,WAAW,MAAM,MAAM,QAAQ;AAAA,EAChD;AACF;AAKA,SAAS,gBAAgB,QAAuC;AAC9D,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO,OAAO,UAAU;AAAA,IAC1B,KAAK;AACH,aAAO,OAAO,UAAU;AAAA,EAC5B;AACF;;;AChFO,SAAS,KAAK,MAA+B;AAClD,QAAM,SAAS,aAAa,IAAI;AAEhC,MAAI,CAAC,QAAQ;AACX,WAAO,EAAE,QAAQ,WAAW,SAAS,uBAAuB;AAAA,EAC9D;AAGA,QAAM,YAAY,gBAAgB,MAAM,MAAM;AAC9C,MAAI,UAAU,WAAW,WAAW;AAClC,WAAO;AAAA,EACT;AACA,QAAM,MAAM,UAAU;AAGtB,QAAM,UACJ,IAAI,WAAW,QACX,mBAAmB,IAAI,MAAM,IAC7B,kBAAkB,IAAI,QAAQ;AAGpC,QAAM,cAAc,cAAc,OAAO;AACzC,MAAI,CAAC,YAAY,IAAI;AACnB,WAAO,EAAE,QAAQ,gBAAgB,IAAI;AAAA,EACvC;AAEA,QAAM,WAAW,YAAY;AAG7B,MAAI,SAAS,UAAU,KAAK,SAAS,WAAW,GAAG;AACjD,UAAM,OAAO,QAAQ,MAAM,EAAE,eAAe,IAAI;AAEhD,QAAI,MAAM;AACR,eAAS,QAAQ,SAAS,SAAS,KAAK;AACxC,eAAS,SAAS,SAAS,UAAU,KAAK;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,WAAW,UAAU,IAAI;AAC5C;AAOA,IAAM,UAAU;AAAA,EACd,KAAK;AAAA,IACH,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,WAAW,CAAC,YAA4B,EAAE,QAAQ,OAAgB,OAAO;AAAA,EAC3E;AAAA,EACA,MAAM;AAAA,IACJ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,WAAW,CAAC,cAAiC;AAAA,MAC3C,QAAQ;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EACA,MAAM;AAAA,IACJ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,WAAW,CAAC,cAAiC;AAAA,MAC3C,QAAQ;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAWA,SAAS,gBAAgB,MAAkB,QAAoC;AAC7E,QAAM,SAAS,QAAQ,MAAM,EAAE,aAAa,IAAI;AAEhD,MAAI,CAAC,OAAO,IAAI;AACd,UAAM,UACJ,OAAO,MAAM,SAAS,qBAClB,WAAW,OAAO,YAAY,CAAC,eAC/B,OAAO,MAAM;AACnB,WAAO,EAAE,QAAQ,WAAW,QAAQ;AAAA,EACtC;AAEA,MAAI,OAAO,MAAM,WAAW,EAAG,QAAO,EAAE,QAAQ,QAAQ;AAGxD,MAAI,WAAW,OAAO;AACpB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,KAAK,QAAQ,IAAI,UAAU,OAAO,KAAuB;AAAA,IAC3D;AAAA,EACF;AACA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,KAAK,QAAQ,MAAM,EAAE,UAAU,OAAO,KAA0B;AAAA,EAClE;AACF;AAKA,SAAS,kBACP,MAC0C;AAC1C,QAAMC,wBAAuB;AAC7B,MAAI,KAAK,SAAS,GAAI,QAAO;AAI7B,SAAO;AAAA,IACL,OAAO,aAAa,MAAMA,wBAAuB,CAAC;AAAA,IAClD,QAAQ,aAAa,MAAMA,wBAAuB,EAAE;AAAA,EACtD;AACF;AAKA,SAAS,mBACP,MAC0C;AAE1C,MAAI,SAAS;AACb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,QAAI,KAAK,MAAM,MAAM,KAAM;AAEzB;AACA;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,SAAS,CAAC,KAAK;AACnC,QAAI,WAAW,KAAM;AACnB;AACA;AAAA,IACF;AAGA,UAAM,UAAW,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AAGrE,QACE,UAAU,OACV,UAAU,OACV,WAAW,OACX,WAAW,OACX,WAAW,KACX;AAGA,YAAM,UAAW,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AACrE,YAAM,SAAU,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AACpE,aAAO,EAAE,OAAO,OAAO;AAAA,IACzB;AAEA,cAAU,IAAI;AACd,QAAI,WAAW,IAAM;AAAA,EACvB;AACA,SAAO;AACT;AAKA,SAAS,mBACP,MAC0C;AAE1C,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,QAAQ;AAC3B,QAAI,SAAS,IAAI,KAAK,OAAQ;AAE9B,UAAM,YAAY,cAAc,MAAM,MAAM;AAC5C,UAAM,YAAY,aAAa,MAAM,SAAS,CAAC;AAC/C,UAAM,aAAa,YAAa,YAAY;AAE5C,QAAI,cAAc,QAAQ;AAGxB,YAAM,UAAU,aAAa,MAAM,SAAS,EAAE;AAC9C,YAAM,UAAU,aAAa,MAAM,SAAS,EAAE;AAC9C,aAAO,EAAE,OAAO,UAAU,GAAG,QAAQ,UAAU,EAAE;AAAA,IACnD;AAEA,QAAI,cAAc,QAAQ;AAIxB,YAAM,QAAQ,SAAS;AACvB,YAAM,OACH,KAAK,KAAK,KAAK,MACd,KAAK,QAAQ,CAAC,KAAK,MAAM,KACzB,KAAK,QAAQ,CAAC,KAAK,MAAM;AAC7B,YAAM,WAAW,EAAE,MAAM;AAEzB,UAAI,UAAU;AAEZ,YACE,KAAK,QAAQ,CAAC,MAAM,OACpB,KAAK,QAAQ,CAAC,MAAM,KACpB,KAAK,QAAQ,CAAC,MAAM,IACpB;AAGA,gBAAM,QAAQ,KAAK,QAAQ,CAAC,KAAK,MAAO,KAAK,QAAQ,CAAC,KAAK,MAAM;AACjE,gBAAM,QAAQ,KAAK,QAAQ,CAAC,KAAK,MAAO,KAAK,QAAQ,CAAC,KAAK,MAAM;AACjE,iBAAO,EAAE,OAAO,OAAO,OAAQ,QAAQ,OAAO,MAAO;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,QAAQ;AAGxB,UAAI,KAAK,SAAS,CAAC,MAAM,IAAM;AAE7B,cAAM,OAAO,aAAa,MAAM,SAAS,CAAC;AAC1C,cAAM,SAAS,OAAO,SAAU;AAChC,cAAM,UAAW,QAAQ,KAAM,SAAU;AACzC,eAAO,EAAE,OAAO,OAAO;AAAA,MACzB;AAAA,IACF;AAEA,cAAU,IAAI;AAAA,EAChB;AACA,SAAO;AACT;;;ACjQO,IAAM,kBAAkB,CAC7B,SACA,SAEA,SAAS,SAAY,CAAC,EAAE,MAAM,QAAQ,SAAS,KAAK,CAAC,IAAI,CAAC;AASrD,IAAM,kBAAkB,CAC7B,SACA,SAEA,SAAS,SACL;AAAA,EACE;AAAA,IACE,MAAM;AAAA,IACN;AAAA,IACA,iBAAiB;AAAA,IACjB,mBAAmB;AAAA,IACnB,aAAa;AAAA,IACb,mBAAmB;AAAA,IACnB;AAAA,EACF;AACF,IACA,CAAC;AASA,IAAM,cAAc,CACzB,UACA,SACgC,SAAS,KAAK,CAAC,MAAM,EAAE,OAAO,SAAS,IAAI;AAQtE,IAAM,YAAY,CAAC,UAAuC;AAC/D,MAAI,UAAU,OAAW,QAAO;AAChC,SAAO,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,KAAK;AACjE;;;AC1CA,IAAM,4BAAmE;AAAA;AAAA,EAEvE,OAAO;AAAA,EACP,OAAO;AAAA,EACP,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,SAAS;AAAA,EACT,WAAW;AAAA,EACX,eAAe;AAAA,EACf,OAAO;AAAA;AAAA,EAGP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,kBAAkB;AAAA,EAClB,YAAY;AAAA;AAAA,EAGZ,oBAAoB;AAAA,EACpB,WAAW;AACb;AAQO,SAAS,oBAAoB,MAAqC;AACvE,SAAO,0BAA0B,IAAI,KAAK;AAC5C;AAYO,SAAS,cAAc,MAAsB;AAClD,SAAO,KAAK,QAAQ,oBAAoB,CAAC,SAAS;AAChD,UAAM,OAAO,KAAK,WAAW,CAAC,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC5D,WAAO,MAAM,IAAI;AAAA,EACnB,CAAC;AACH;AAWA,SAAS,aAAa,MAAuB;AAE3C,SAAO,eAAe,KAAK,IAAI;AACjC;AAUO,SAAS,mBACd,SACA,MACA,UACgB;AAChB,MAAI,SAAS,OAAW,QAAO,CAAC;AAEhC,UAAQ,UAAU;AAAA,IAChB,KAAK,WAAW;AAEd,YAAM,YAAY,aAAa,IAAI,IAAI,SAAS;AAChD,aAAO,cAAc,SACjB,gBAAgB,SAAS,IAAI,IAC7B,gBAAgB,SAAS,IAAI;AAAA,IACnC;AAAA,IAEA,KAAK,uBAAuB;AAE1B,YAAM,UAAU,cAAc,IAAI;AAClC,aAAO,gBAAgB,SAAS,OAAO;AAAA,IACzC;AAAA,IAEA,KAAK,iBAAiB;AAEpB,aAAO,gBAAgB,SAAS,IAAI;AAAA,IACtC;AAAA,EACF;AACF;;;ACzGO,SAAS,0BACd,QACmB;AAEnB,QAAM,aAAa,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,YAAY;AAChE,MAAI,CAAC,YAAY;AACf,WAAO,CAAC;AAAA,EACV;AAGA,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,WAAW;AAAA,IACnB;AAAA,EACF;AACF;AAQO,SAAS,0BACd,UACgB;AAEhB,QAAM,cAAc,SAAS,KAAK,CAAC,MAAM,EAAE,OAAO,SAAS,iBAAiB;AAC5E,MAAI,CAAC,aAAa;AAChB,WAAO,CAAC;AAAA,EACV;AAGA,SAAO;AAAA,IACL;AAAA,IACA,YAAY;AAAA,IACZ,oBAAoB,OAAO;AAAA,EAC7B;AACF;;;ACrCO,SAAS,mBAAmB,QAA2C;AAC5E,MAAI,OAAO,WAAW,EAAG,QAAO,CAAC;AAGjC,QAAM,WAAW,OAAO;AAAA,IACtB,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,SAAS,MAAM,IAAI,CAAC;AAAA,EACnD;AAGA,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,KAAK,UAAU,QAAQ;AAAA,IAC/B;AAAA,EACF;AACF;AAUO,SAAS,mBACd,UACgB;AAChB,QAAM,cAAc,SAAS,KAAK,CAAC,MAAM,EAAE,OAAO,SAAS,iBAAiB;AAC5E,MAAI,CAAC,YAAa,QAAO,CAAC;AAG1B,QAAM,SAAS,UAAmC,YAAY,IAAI;AAClE,MAAI,OAAO,IAAI;AAEb,WAAO,OAAO,QAAQ,OAAO,KAAK,EAAE,QAAQ,CAAC,CAAC,SAAS,KAAK,MAAM;AAChE,YAAM,OAAO,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,KAAK;AACrE,UAAI,CAAC,KAAM,QAAO,CAAC;AACnB,aAAO,mBAAmB,SAAS,MAAM,oBAAoB,OAAO,CAAC;AAAA,IACvE,CAAC;AAAA,EACH;AAGA,SAAO;AAAA,IACL;AAAA,IACA,YAAY;AAAA,IACZ,oBAAoB,OAAO;AAAA,EAC7B;AACF;;;AC3CO,SAAS,4BACd,QACmB;AAEnB,QAAM,OAAgC,CAAC;AAEvC,aAAW,SAAS,QAAQ;AAE1B,UAAM,SAAS,UAAmB,MAAM,IAAI;AAC5C,QAAI,OAAO,IAAI;AAEb,WAAK,MAAM,OAAO,IAAI,OAAO;AAAA,IAC/B,OAAO;AAEL,WAAK,MAAM,OAAO,IAAI,MAAM;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B;AAAA,EACF;AACF;AAOA,IAAM,yBAAyB,CAC7B,aAC0B;AAC1B,QAAM,mBAAmB,YAAY,UAAU,sBAAsB;AACrE,QAAM,OAAO,YAAY,UAAU,UAAU;AAE7C,MAAI,CAAC,oBAAoB,CAAC,MAAM;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,GAAG,mBAAmB,UAAU,MAAM,MAAM,oBAAoB,SAAS,CAAC;AAAA,IAC1E,GAAG;AAAA,MACD;AAAA,MACA,kBAAkB;AAAA,MAClB,oBAAoB,SAAS;AAAA,IAC/B;AAAA,EACF;AACF;AAOA,IAAM,8BAA8B,CAClC,aAC0B;AAC1B,QAAM,cAAc,YAAY,UAAU,iBAAiB;AAC3D,MAAI,CAAC,aAAa;AAChB,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,UAAmC,YAAY,IAAI;AAClE,MAAI,CAAC,OAAO,IAAI;AAEd,WAAO;AAAA,MACL;AAAA,MACA,YAAY;AAAA,MACZ,oBAAoB,SAAS;AAAA,IAC/B;AAAA,EACF;AAGA,SAAO,OAAO,QAAQ,OAAO,KAAK,EAAE;AAAA,IAAQ,CAAC,CAAC,SAAS,KAAK,MAC1D;AAAA,MACE;AAAA,MACA,UAAU,KAAK;AAAA,MACf,oBAAoB,SAAS;AAAA,IAC/B;AAAA,EACF;AACF;AAYO,SAAS,4BACd,UACgB;AAEhB,SACE,uBAAuB,QAAQ,KAC/B,4BAA4B,QAAQ,KACpC,CAAC;AAEL;;;AC1GO,SAAS,kCACd,QACmB;AACnB,QAAM,OAAO,OAAO;AAAA,IAClB,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,SAAS,MAAM,IAAI,CAAC;AAAA,EACnD;AAEA,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B;AAAA,EACF;AACF;AAQO,SAAS,kCACd,UACgB;AAChB,QAAM,cAAc,YAAY,UAAU,iBAAiB;AAC3D,MAAI,CAAC,aAAa;AAChB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS,UAAmC,YAAY,IAAI;AAClE,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,CAAC;AAAA,EACV;AAGA,SAAO,OAAO,QAAQ,OAAO,KAAK,EAAE,QAAQ,CAAC,CAAC,SAAS,KAAK,MAAM;AAChE,UAAM,OACJ,SAAS,OACL,OAAO,UAAU,WACf,QACA,OAAO,KAAK,IACd;AACN,QAAI,CAAC,KAAM,QAAO,CAAC;AACnB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,oBAAoB,eAAe;AAAA,IACrC;AAAA,EACF,CAAC;AACH;;;AC/CO,SAAS,6BACd,QACmB;AACnB,QAAM,OAAgC,CAAC;AAEvC,aAAW,SAAS,QAAQ;AAC1B,UAAM,SAAS,UAAmB,MAAM,IAAI;AAC5C,QAAI,OAAO,IAAI;AACb,WAAK,MAAM,OAAO,IAAI,OAAO;AAAA,IAC/B,OAAO;AACL,WAAK,MAAM,OAAO,IAAI,MAAM;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B;AAAA,EACF;AACF;AAQO,SAAS,6BACd,UACgB;AAChB,QAAM,cAAc,YAAY,UAAU,iBAAiB;AAC3D,MAAI,CAAC,aAAa;AAChB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS,UAAmC,YAAY,IAAI;AAClE,MAAI,CAAC,OAAO,IAAI;AAEd,WAAO;AAAA,MACL;AAAA,MACA,YAAY;AAAA,MACZ,oBAAoB,UAAU;AAAA,IAChC;AAAA,EACF;AAGA,QAAM,eAAe,UAAU,OAAO,MAAM,iBAAiB;AAC7D,QAAM,YAAY,UAAU,OAAO,MAAM,cAAc;AAGvD,QAAM,SAAS;AAAA,IACb,GAAG;AAAA,MACD;AAAA,MACA;AAAA,MACA,oBAAoB,UAAU;AAAA,IAChC;AAAA,IACA,GAAG;AAAA,MACD;AAAA,MACA;AAAA,MACA,oBAAoB,UAAU;AAAA,IAChC;AAAA,EACF;AAEA,MAAI,OAAO,SAAS,GAAG;AACrB,WAAO;AAAA,EACT;AAGA,SAAO;AAAA,IACL;AAAA,IACA,YAAY;AAAA,IACZ,oBAAoB,UAAU;AAAA,EAChC;AACF;;;ACtFA,IAAM,gBAAgB;AACtB,IAAM,mBAAmB;AAgBlB,SAAS,4BACd,QACmB;AACnB,QAAM,UAAU,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,SAAS;AAC1D,MAAI,CAAC,SAAS;AACZ,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,cAAc,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,aAAa;AAClE,QAAM,OAAO,qBAAqB,MAAM;AAGxC,QAAM,qBAAwC,cAC1C;AAAA,IACE;AAAA,MACE,QAAQ,EAAE,MAAM,uBAAuB;AAAA,MACvC,MAAM,WAAW,YAAY,IAAI;AAAA,IACnC;AAAA,EACF,IACA,CAAC;AAEL,QAAM,qBAAsC;AAAA,IAC1C,QAAQ,EAAE,MAAM,kBAAkB;AAAA,IAClC,MAAM,KAAK,UAAU,IAAI;AAAA,EAC3B;AAEA,SAAO,CAAC,GAAG,oBAAoB,kBAAkB;AACnD;AAKA,SAAS,qBAAqB,QAAgD;AAC5E,SAAO,kBAAkB,IAAI,CAAC,QAAQ;AACpC,UAAM,QAAQ,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,GAAG;AAClD,WAAO,QAAQ,EAAE,CAAC,GAAG,GAAG,MAAM,KAAK,IAAI;AAAA,EACzC,CAAC,EACE,OAAO,CAAC,UAA2C,UAAU,IAAI,EACjE;AAAA,IACC,CAAC,KAAK,UAAU,OAAO,OAAO,KAAK,KAAK;AAAA,IACxC,CAAC;AAAA,EACH;AACJ;AAKA,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAQO,SAAS,4BACd,UACgB;AAChB,QAAM,iBAAiB,YAAY,UAAU,iBAAiB;AAC9D,QAAM,iBAAiB,YAAY,UAAU,sBAAsB;AAEnE,SAAO,cAAc,gBAAgB,cAAc;AACrD;AAKA,SAAS,cACP,gBACA,gBACgB;AAChB,MAAI,CAAC,kBAAkB,CAAC,gBAAgB;AACtC,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS,UAAmC,eAAe,IAAI;AACrE,MAAI,CAAC,OAAO,IAAI;AAEd,WAAO,gBAAgB,WAAW,eAAe,IAAI;AAAA,EACvD;AAEA,QAAM,WAAW,OAAO;AAGxB,QAAM,kBAAkB;AAAA,IACtB;AAAA,IACA,UAAU,SAAS,WAAW;AAAA,EAChC;AAEA,QAAM,oBAAoB,kBACtB;AAAA,IACE;AAAA,IACA;AAAA,IACA,oBAAoB,SAAS;AAAA,EAC/B,IACA,CAAC;AAEL,SAAO;AAAA;AAAA,IAEL,gBAAgB,SAAS,UAAU,SAAS,KAAK,KAAK,aAAa;AAAA;AAAA,IAEnE,GAAG;AAAA;AAAA,IAEH;AAAA,MACE;AAAA,MACA,UAAU,SAAS,QAAQ,KAAK;AAAA,IAClC;AAAA;AAAA,IAEA,gBAAgB,UAAU,UAAU,SAAS,MAAM,CAAC;AAAA;AAAA,IAEpD,gBAAgB,mBAAmB,UAAU,SAAS,iBAAiB,CAAC,CAAC;AAAA;AAAA,IAEzE,gBAAgB,WAAW,UAAU,SAAS,OAAO,CAAC;AAAA,EACxD,EAAE,KAAK;AACT;AAQA,SAAS,uBACP,gBACA,iBACoB;AAEpB,MAAI,gBAAgB,MAAM;AACxB,UAAM,OAAO,eAAe;AAE5B,WAAO,KAAK,WAAW,UAAU,IAAI,KAAK,MAAM,CAAC,IAAI;AAAA,EACvD;AAGA,MAAI,iBAAiB;AAEnB,WAAO,gBAAgB,WAAW,UAAU,IACxC,gBAAgB,MAAM,CAAC,IACvB;AAAA,EACN;AAEA,SAAO;AACT;;;AClKO,SAAS,oBACd,SAC+C;AAC/C,SAAO,CAAC,WAAW;AACjB,UAAM,QAAQ,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,OAAO;AACtD,WAAO,CAAC,QACJ,CAAC,IACD,CAAC,EAAE,QAAQ,EAAE,MAAM,kBAAkB,GAAG,MAAM,MAAM,KAAK,CAAC;AAAA,EAChE;AACF;AAUO,SAAS,oBACd,SACiD;AACjD,SAAO,CAAC,aAAa;AACnB,UAAM,cAAc,SAAS;AAAA,MAC3B,CAAC,MAAM,EAAE,OAAO,SAAS;AAAA,IAC3B;AACA,QAAI,CAAC,YAAa,QAAO,CAAC;AAG1B,WAAO;AAAA,MACL;AAAA,MACA,YAAY;AAAA,MACZ,oBAAoB,OAAO;AAAA,IAC7B;AAAA,EACF;AACF;;;AC1BO,SAAS,4BACd,QACmB;AAEnB,QAAM,kBAAkB,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,YAAY;AACrE,MAAI,CAAC,iBAAiB;AACpB,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,SAAS,UAAmB,gBAAgB,IAAI;AACtD,QAAM,OAAO,OAAO,KAAK,OAAO,QAAQ,gBAAgB;AAExD,QAAM,WAA8B;AAAA,IAClC;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,OAAO,SAAS,WAAW,OAAO,KAAK,UAAU,IAAI;AAAA,IAC7D;AAAA,EACF;AAGA,QAAM,cAAc,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,QAAQ;AAC7D,MAAI,aAAa;AACf,aAAS,KAAK;AAAA,MACZ,QAAQ,EAAE,MAAM,WAAW;AAAA,MAC3B,MAAM,YAAY;AAAA,IACpB,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAcO,SAAS,4BACd,UACgB;AAChB,QAAM,cAAc,YAAY,UAAU,iBAAiB;AAC3D,MAAI,CAAC,aAAa;AAChB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAyB,CAAC;AAGhC,QAAM,OAAO,YAAY,UAAU,UAAU;AAC7C,MAAI,MAAM;AACR,WAAO;AAAA,MACL,GAAG;AAAA,QACD;AAAA,QACA,KAAK;AAAA,QACL,oBAAoB,SAAS;AAAA,MAC/B;AAAA,IACF;AAAA,EACF;AAGA,SAAO;AAAA,IACL,GAAG;AAAA,MACD;AAAA,MACA,YAAY;AAAA,MACZ,oBAAoB,SAAS;AAAA,IAC/B;AAAA,EACF;AAEA,SAAO;AACT;;;AC5CO,SAAS,gBACd,aACA,cACA,QAAQ,OACU;AAElB,MAAI,YAAY,WAAW,SAAS;AAClC,WAAO,OAAO,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAAA,EAChD;AAEA,MAAI,YAAY,WAAW,WAAW;AACpC,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,QAAQ,YAAY;AAAA,IACtB,CAAC;AAAA,EACH;AAEA,QAAM,MAAM,YAAY;AAGxB,MACG,IAAI,WAAW,SAAS,iBAAiB,SACzC,IAAI,WAAW,UAAU,iBAAiB,UAC1C,IAAI,WAAW,UAAU,iBAAiB,QAC3C;AACA,WAAO,OAAO,GAAG,GAAG;AAAA,EACtB;AAEA,QAAM,WACJ,YAAY,WAAW,YAAY,YAAY,SAAS,WAAW;AAGrE,MAAI,CAAC,UAAU;AACb,WAAO,QACH,aAAa,KAAK,YAAY,IAC9B,OAAO,MAAM;AAAA,MACX,MAAM;AAAA,MACN,UAAU;AAAA,IACZ,CAAC;AAAA,EACP;AAGA,QAAM,YAAY,mBAAmB,QAAQ;AAC7C,MAAI,CAAC,WAAW;AAEd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,UAAU,KAAK,YAAY;AACpC;AAwBA,SAAS,sBACP,eACA,eACa;AACb,SAAO,CAAC,KAAK,iBAAiB;AAC5B,QAAI,IAAI,WAAW,OAAO;AAExB,UAAI,iBAAiB,OAAO;AAC1B,eAAO,OAAO,GAAG,GAAG;AAAA,MACtB;AAEA,YAAM,WAAW,cAAc,IAAI,MAAM;AACzC,aAAO,OAAO,GAAG,EAAE,QAAQ,cAAc,SAAS,CAAC;AAAA,IACrD;AAGA,QAAI,iBAAiB,UAAU,iBAAiB,QAAQ;AACtD,aAAO,OAAO,GAAG,EAAE,QAAQ,cAAc,UAAU,IAAI,SAAS,CAAC;AAAA,IACnE;AAGA,UAAM,SAAS,cAAc,IAAI,QAAQ;AACzC,WAAO,OAAO,GAAG,EAAE,QAAQ,OAAO,OAAO,CAAC;AAAA,EAC5C;AACF;AAGA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AACF;AAEA,IAAM,eAAe;AAAA,EACnB;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AACF;AAEA,IAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB;AAAA,EACrB,oBAAoB,SAAS;AAAA,EAC7B,oBAAoB,SAAS;AAC/B;AAEA,IAAM,uBAAuB;AAAA,EAC3B,oBAAoB,YAAY;AAAA,EAChC,oBAAoB,YAAY;AAClC;AAEA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AACF;AAEA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB;AAAA,EACrB,oBAAoB,YAAY;AAAA,EAChC,oBAAoB,YAAY;AAClC;AAEA,IAAM,eAAe;AAAA,EACnB;AAAA,EACA;AACF;AAKA,IAAM,qBAAqB;AAAA;AAAA,EAEzB,SAAS;AAAA;AAAA,EAET,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,OAAO;AAAA,EACP,aAAa;AAAA,EACb,SAAS;AAAA;AAAA,EAET,SAAS;AAAA,EACT,WAAW;AAAA,EACX,oBAAoB;AAAA;AAAA,EAEpB,eAAe;AAAA;AAAA,EAEf,SAAS;AAAA,EACT,kBAAkB;AAAA;AAAA,EAElB,SAAS;AAAA;AAAA,EAET,UAAU;AAAA;AAAA,EAEV,YAAY;AACd;;;ACrNO,SAAS,kBAAkB,UAAyC;AAEzE,QAAM,eAAe,SAAS;AAAA,IAC5B,CAAC,MACC,EAAE,OAAO,SAAS,0BAA0B,EAAE,OAAO,SAAS;AAAA,EAClE;AACA,QAAM,kBAAkB,SAAS;AAAA,IAC/B,CAAC,MAAM,EAAE,OAAO,SAAS;AAAA,EAC3B;AAGA,MAAI,aAAa,WAAW,KAAK,gBAAgB,WAAW,GAAG;AAC7D,WAAO,IAAI,WAAW,CAAC;AAAA,EACzB;AAEA,QAAM,iBAAiB;AAGvB,QAAM,WAAmE,CAAC;AAC1E,QAAM,WAAmE,CAAC;AAE1E,aAAW,OAAO,cAAc;AAC9B,QAAI,IAAI,OAAO,SAAS,wBAAwB;AAC9C,YAAM,OAAO,eAAe,IAAI,MAAM,IAAI,OAAO,MAAM;AACvD,eAAS,KAAK,EAAE,KAAK,uBAAuB,MAAM,GAAG,KAAK,CAAC;AAAA,IAC7D,WAAW,IAAI,OAAO,SAAS,YAAY;AACzC,YAAM,OAAO,eAAe,IAAI,MAAM,IAAI,OAAO,MAAM;AACvD,eAAS,KAAK,EAAE,KAAK,UAAU,MAAM,GAAG,KAAK,CAAC;AAAA,IAChD;AAAA,EACF;AAEA,aAAW,OAAO,iBAAiB;AACjC,QAAI,IAAI,OAAO,SAAS,mBAAmB;AACzC,YAAM,OAAO,kBAAkB,IAAI,IAAI;AACvC,eAAS,KAAK,EAAE,KAAK,kBAAkB,MAAM,GAAG,KAAK,CAAC;AAAA,IACxD;AAAA,EACF;AAEA,QAAM,aAAa,SAAS,SAAS;AACrC,MAAI,YAAY;AACd,aAAS,KAAK;AAAA,MACZ,KAAK;AAAA,MACL,MAAM;AAAA,MACN,MAAM,IAAI,WAAW,CAAC;AAAA,IACxB,CAAC;AAAA,EACH;AAGA,WAAS,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG;AACrC,WAAS,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG;AAGrC,QAAM,aAAa;AACnB,QAAM,iBAAiB,SAAS;AAChC,QAAM,WAAW,IAAI,KAAK,iBAAiB;AAC3C,QAAM,iBAAiB,SAAS;AAChC,QAAM,cAAc,aAAa,IAAI,KAAK,iBAAiB,IAAI;AAE/D,QAAM,aAAa;AACnB,QAAM,gBAAgB,aAAa;AACnC,MAAI,aAAa,gBAAgB;AAGjC,MAAI,YAAY;AACd,UAAM,aAAa,SAAS,KAAK,CAAC,MAAM,EAAE,QAAQ,oBAAoB;AACtE,QAAI,YAAY;AACd,kBAAY,WAAW,MAAM,GAAG,eAAe,cAAc;AAAA,IAC/D;AAAA,EACF;AAGA,QAAM,iBAAiB,oBAAI,IAGzB;AAEF,aAAW,OAAO,CAAC,GAAG,UAAU,GAAG,QAAQ,GAAG;AAC5C,QAAI,IAAI,KAAK,SAAS,GAAG;AACvB,qBAAe,IAAI,KAAK,UAAU;AAClC,oBAAc,IAAI,KAAK;AACvB,UAAI,IAAI,KAAK,SAAS,MAAM,GAAG;AAC7B,sBAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAGA,QAAM,YAAY;AAClB,QAAM,SAAS,IAAI,WAAW,SAAS;AAGvC,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,cAAY,QAAQ,GAAG,IAAI,cAAc;AACzC,cAAY,QAAQ,GAAG,YAAY,cAAc;AAGjD,MAAI,SAAS;AACb,cAAY,QAAQ,QAAQ,gBAAgB,cAAc;AAC1D,YAAU;AAEV,aAAW,OAAO,UAAU;AAC1B,kBAAc,QAAQ,QAAQ,KAAK,eAAe,IAAI,GAAG,GAAG,cAAc;AAC1E,cAAU;AAAA,EACZ;AAEA,cAAY,QAAQ,QAAQ,GAAG,cAAc;AAC7C,YAAU;AAGV,MAAI,YAAY;AACd,gBAAY,QAAQ,QAAQ,gBAAgB,cAAc;AAC1D,cAAU;AAEV,eAAW,OAAO,UAAU;AAC1B;AAAA,QACE;AAAA,QACA;AAAA,QACA;AAAA,QACA,eAAe,IAAI,GAAG;AAAA,QACtB;AAAA,MACF;AACA,gBAAU;AAAA,IACZ;AAEA,gBAAY,QAAQ,QAAQ,GAAG,cAAc;AAAA,EAC/C;AAGA,aAAW,CAAC,KAAK,OAAO,KAAK,gBAAgB;AAC3C,WAAO,IAAI,IAAI,MAAM,OAAO;AAAA,EAC9B;AAEA,SAAO;AACT;AAKA,SAAS,cACP,MACA,QACA,KACA,YACA,gBACM;AACN,cAAY,MAAM,QAAQ,IAAI,KAAK,cAAc;AACjD,cAAY,MAAM,SAAS,GAAG,IAAI,MAAM,cAAc;AACtD,cAAY,MAAM,SAAS,GAAG,IAAI,KAAK,QAAQ,cAAc;AAE7D,MAAI,IAAI,KAAK,UAAU,GAAG;AACxB,SAAK,IAAI,IAAI,MAAM,SAAS,CAAC;AAAA,EAC/B,OAAO;AACL,gBAAY,MAAM,SAAS,GAAG,cAAc,GAAG,cAAc;AAAA,EAC/D;AACF;AAUA,SAAS,kBAAkB,MAA0B;AACnD,QAAM,YAAsB,CAAC;AAC7B,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,OAAO,KAAK,WAAW,CAAC;AAC9B,cAAU,KAAK,OAAO,GAAI;AAC1B,cAAU,KAAM,QAAQ,IAAK,GAAI;AAAA,EACnC;AAEA,QAAM,SAAS,IAAI,WAAW,IAAI,UAAU,MAAM;AAGlD,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AAEZ,SAAO,IAAI,IAAI,WAAW,SAAS,GAAG,CAAC;AACvC,SAAO;AACT;AASA,SAAS,eAAe,MAAc,QAA6B;AACjE,QAAM,WAAW,SAAS,GAAG,MAAM,KAAK,IAAI,KAAK;AACjD,QAAM,YAAY,IAAI,YAAY,EAAE,OAAO,QAAQ;AACnD,QAAM,SAAS,IAAI,WAAW,UAAU,SAAS,CAAC;AAClD,SAAO,IAAI,WAAW,CAAC;AACvB,SAAO,UAAU,MAAM,IAAI;AAC3B,SAAO;AACT;;;AC5NA,IAAMC,eAAc;AAGpB,IAAMC,cAAa;AAGnB,IAAM,aAAa;AAGnB,IAAM,aAAa;AAGnB,IAAMC,eAAc,IAAI,WAAW,CAAC,IAAM,KAAM,KAAM,KAAM,GAAM,CAAI,CAAC;AAchE,SAAS,kBACd,MACA,UACiB;AAEjB,MAAI,CAAC,OAAO,IAAI,GAAG;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAGA,QAAM,cAAc,SAAS,OAAO,CAAC,MAAM,EAAE,OAAO,SAAS,SAAS;AACtE,QAAM,eAAe,SAAS;AAAA,IAC5B,CAAC,MACC,EAAE,OAAO,SAAS,qBAClB,EAAE,OAAO,SAAS,0BAClB,EAAE,OAAO,SAAS;AAAA,EACtB;AAGA,QAAM,gBAAgB,2BAA2B,IAAI;AACrD,MAAI,CAAC,cAAc,IAAI;AACrB,WAAO;AAAA,EACT;AAEA,QAAM,EAAE,WAAW,SAAS,IAAI,cAAc;AAG9C,QAAM,cACJ,aAAa,SAAS,IAAI,iBAAiB,YAAY,IAAI;AAG7D,QAAM,iBAAiB,YAAY,IAAI,CAAC,MAAM,gBAAgB,EAAE,IAAI,CAAC;AAGrE,MAAI,YAAY;AAChB,MAAI,aAAa;AACf,iBAAa,YAAY;AAAA,EAC3B;AACA,aAAW,OAAO,WAAW;AAC3B,iBAAa,IAAI;AAAA,EACnB;AACA,aAAW,OAAO,gBAAgB;AAChC,iBAAa,IAAI;AAAA,EACnB;AACA,eAAa,SAAS;AAGtB,QAAM,SAAS,IAAI,WAAW,SAAS;AACvC,MAAI,SAAS;AAGb,SAAO,QAAQ,IAAI;AACnB,SAAO,QAAQ,IAAI;AAGnB,MAAI,aAAa;AACf,WAAO,IAAI,aAAa,MAAM;AAC9B,cAAU,YAAY;AAAA,EACxB;AAGA,aAAW,OAAO,WAAW;AAC3B,WAAO,IAAI,KAAK,MAAM;AACtB,cAAU,IAAI;AAAA,EAChB;AAGA,aAAW,OAAO,gBAAgB;AAChC,WAAO,IAAI,KAAK,MAAM;AACtB,cAAU,IAAI;AAAA,EAChB;AAGA,SAAO,IAAI,UAAU,MAAM;AAE3B,SAAO,OAAO,GAAG,MAAM;AACzB;AAOA,SAAS,2BACP,MAIA;AACA,QAAM,YAA0B,CAAC;AACjC,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,QAAI,KAAK,MAAM,MAAM,KAAM;AACzB,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS,6BAA6B,MAAM;AAAA,MAC9C,CAAC;AAAA,IACH;AAGA,WAAO,KAAK,MAAM,MAAM,OAAQ,SAAS,KAAK,SAAS,GAAG;AACxD;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,MAAM;AAC1B;AAGA,QAAI,WAAW,YAAY;AAEzB,YAAM,WAAW,KAAK,MAAM,SAAS,CAAC;AACtC,aAAO,OAAO,GAAG,EAAE,WAAW,SAAS,CAAC;AAAA,IAC1C;AAGA,QAAI,WAAW,YAAY;AACzB,aAAO,OAAO,GAAG,EAAE,WAAW,UAAU,IAAI,WAAW,CAAC,KAAM,GAAI,CAAC,EAAE,CAAC;AAAA,IACxE;AAGA,QAAI,SAAS,IAAI,KAAK,QAAQ;AAC5B,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,UAAW,KAAK,MAAM,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AACjE,UAAM,eAAe,SAAS;AAC9B,UAAM,aAAa,SAAS;AAE5B,QAAI,aAAa,KAAK,QAAQ;AAC5B,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,UAAM,aACJ,WAAWF,gBACX,SAAS,IAAI,KAAK,KAAK,UACvB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAEvB,UAAM,QAAQ,WAAWC;AAGzB,QAAI,CAAC,cAAc,CAAC,OAAO;AACzB,gBAAU,KAAK,KAAK,MAAM,cAAc,UAAU,CAAC;AAAA,IACrD;AAEA,aAAS;AAAA,EACX;AAGA,SAAO,OAAO,MAAM;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,EACX,CAAC;AACH;AAKA,SAAS,iBAAiB,UAAyC;AACjE,QAAM,WAAW,kBAAkB,QAAQ;AAE3C,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO,IAAI,WAAW,CAAC;AAAA,EACzB;AAGA,QAAM,gBAAgB,IAAIC,aAAY,SAAS,SAAS;AACxD,QAAM,UAAU,IAAI,WAAW,IAAI,aAAa;AAEhD,UAAQ,CAAC,IAAI;AACb,UAAQ,CAAC,IAAIF;AACb,UAAQ,CAAC,IAAK,iBAAiB,IAAK;AACpC,UAAQ,CAAC,IAAI,gBAAgB;AAC7B,UAAQ,IAAIE,cAAa,CAAC;AAC1B,UAAQ,IAAI,UAAU,IAAIA,aAAY,MAAM;AAE5C,SAAO;AACT;AAKA,SAAS,gBAAgB,MAA0B;AACjD,QAAM,YAAY,IAAI,YAAY,EAAE,OAAO,IAAI;AAC/C,QAAM,gBAAgB,IAAI,UAAU;AAEpC,QAAM,UAAU,IAAI,WAAW,IAAI,aAAa;AAChD,UAAQ,CAAC,IAAI;AACb,UAAQ,CAAC,IAAID;AACb,UAAQ,CAAC,IAAK,iBAAiB,IAAK;AACpC,UAAQ,CAAC,IAAI,gBAAgB;AAC7B,UAAQ,IAAI,WAAW,CAAC;AAExB,SAAO;AACT;;;ACpOA,IAAM,gBAAgB,IAAI,WAAW,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,EAAE,CAAC;AAY/D,SAAS,iBACd,MACA,QACgB;AAEhB,MAAI,CAAC,MAAM,IAAI,GAAG;AAChB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAGA,QAAM,UAAU,iBAAiB,IAAI;AACrC,MAAI,YAAY,IAAI;AAClB,WAAO,OAAO,MAAM,EAAE,MAAM,cAAc,CAAC;AAAA,EAC7C;AAGA,QAAM,iBAAiB,qBAAqB,IAAI;AAGhD,QAAM,uBAAuB,OAAO;AAAA,IAAI,CAAC,UACvC,MAAM,SAAS,SACX,mBAAmB,KAAK,IACxB,mBAAmB,KAAK;AAAA,EAC9B;AAGA,QAAM,YACJ,cAAc,SACd,eAAe,KAAK,SACpB,qBAAqB,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,CAAC,IACjE,eAAe,OAAO,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,CAAC;AAGpE,QAAM,SAAS,IAAI,WAAW,SAAS;AACvC,MAAI,SAAS;AAGb,SAAO,IAAI,eAAe,MAAM;AAChC,YAAU,cAAc;AAGxB,SAAO,IAAI,eAAe,MAAM,MAAM;AACtC,YAAU,eAAe,KAAK;AAG9B,aAAW,SAAS,sBAAsB;AACxC,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EAClB;AAGA,aAAW,SAAS,eAAe,QAAQ;AACzC,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EAClB;AAEA,SAAO,OAAO,GAAG,MAAM;AACzB;AAMA,SAAS,iBAAiB,MAA0B;AAClD,QAAM,SAAS,cAAc;AAE7B,MAAI,SAAS,IAAI,KAAK,QAAQ;AAC5B,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,aAAa,MAAM,MAAM;AACxC,QAAM,YAAY,cAAc,MAAM,SAAS,CAAC;AAEhD,MAAI,cAAc,QAAQ;AACxB,WAAO;AAAA,EACT;AAGA,SAAO,SAAS,IAAI,IAAI,SAAS;AACnC;AAKA,SAAS,qBAAqB,MAG5B;AACA,QAAM,SAAuB,CAAC;AAC9B,MAAI,SAAS,cAAc;AAC3B,MAAI,OAAmB,IAAI,WAAW,CAAC;AAEvC,SAAO,SAAS,KAAK,QAAQ;AAC3B,UAAM,aAAa;AAGnB,QAAI,SAAS,IAAI,KAAK,OAAQ;AAC9B,UAAM,SAAS,aAAa,MAAM,MAAM;AACxC,cAAU;AAGV,QAAI,SAAS,IAAI,KAAK,OAAQ;AAC9B,UAAM,YAAY,cAAc,MAAM,MAAM;AAC5C,cAAU;AAGV,cAAU;AAGV,cAAU;AAEV,UAAM,WAAW;AACjB,UAAM,YAAY,KAAK,MAAM,YAAY,QAAQ;AAEjD,QAAI,cAAc,QAAQ;AACxB,aAAO;AAAA,IACT,WAAW,cAAc,UAAU,cAAc,QAAQ;AACvD,aAAO,KAAK,SAAS;AAAA,IACvB;AAEA,QAAI,cAAc,QAAQ;AACxB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,OAAO;AACxB;AAUA,SAAS,mBAAmB,OAA8B;AAExD,QAAM,UAAU,aAAa,MAAM,OAAO;AAE1C,QAAM,OAAO,WAAW,MAAM,IAAI;AAGlC,QAAM,YAAY,IAAI,WAAW,QAAQ,SAAS,IAAI,KAAK,MAAM;AACjE,YAAU,IAAI,SAAS,CAAC;AACxB,YAAU,QAAQ,MAAM,IAAI;AAC5B,YAAU,IAAI,MAAM,QAAQ,SAAS,CAAC;AAEtC,SAAO,WAAW,QAAQ,SAAS;AACrC;AAKA,SAAS,mBAAmB,OAA8B;AAExD,QAAM,UAAU,WAAW,MAAM,OAAO;AACxC,QAAM,cAAc,WAAW,MAAM,WAAW;AAChD,QAAM,oBAAoB,WAAW,MAAM,iBAAiB;AAC5D,QAAM,OAAO,WAAW,MAAM,IAAI;AAGlC,QAAM,WACJ,QAAQ,SACR;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY,SACZ;AAAA,EACA,kBAAkB,SAClB;AAAA,EACA,KAAK;AAEP,QAAM,YAAY,IAAI,WAAW,QAAQ;AACzC,MAAI,SAAS;AAGb,YAAU,IAAI,SAAS,MAAM;AAC7B,YAAU,QAAQ;AAClB,YAAU,QAAQ,IAAI;AAGtB,YAAU,QAAQ,IAAI,MAAM;AAC5B,YAAU,QAAQ,IAAI,MAAM;AAG5B,YAAU,IAAI,aAAa,MAAM;AACjC,YAAU,YAAY;AACtB,YAAU,QAAQ,IAAI;AAGtB,YAAU,IAAI,mBAAmB,MAAM;AACvC,YAAU,kBAAkB;AAC5B,YAAU,QAAQ,IAAI;AAGtB,YAAU,IAAI,MAAM,MAAM;AAE1B,SAAO,WAAW,QAAQ,SAAS;AACrC;AAKA,SAAS,WAAW,MAAc,MAA8B;AAC9D,QAAM,QAAQ,IAAI,WAAW,IAAI,IAAI,KAAK,SAAS,CAAC;AAGpD,gBAAc,OAAO,GAAG,KAAK,MAAM;AAGnC,WAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAM,IAAI,CAAC,IAAI,KAAK,WAAW,CAAC;AAAA,EAClC;AAGA,QAAM,IAAI,MAAM,CAAC;AAGjB,QAAM,UAAU,MAAM,MAAM,GAAG,IAAI,KAAK,MAAM;AAC9C,QAAM,MAAM,eAAe,OAAO;AAClC,gBAAc,OAAO,IAAI,KAAK,QAAQ,GAAG;AAEzC,SAAO;AACT;AAKA,SAAS,aAAa,KAAyB;AAC7C,QAAM,QAAQ,IAAI,WAAW,IAAI,MAAM;AACvC,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,UAAM,CAAC,IAAI,IAAI,WAAW,CAAC,IAAI;AAAA,EACjC;AACA,SAAO;AACT;AAKA,SAAS,WAAW,KAAyB;AAC3C,SAAO,IAAI,YAAY,EAAE,OAAO,GAAG;AACrC;AAOA,IAAM,YAAY,aAAa;AAK/B,SAAS,eAA4B;AACnC,QAAM,QAAQ,IAAI,YAAY,GAAG;AACjC,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,QAAI,IAAI;AACR,aAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAI,IAAI,GAAG;AACT,YAAI,aAAc,MAAM;AAAA,MAC1B,OAAO;AACL,YAAI,MAAM;AAAA,MACZ;AAAA,IACF;AACA,UAAM,CAAC,IAAI,MAAM;AAAA,EACnB;AACA,SAAO;AACT;AAKA,SAAS,eAAe,MAA0B;AAChD,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,WAAO,WAAW,OAAO,KAAK,CAAC,KAAK,MAAM,GAAI,KAAK,KAAM,QAAQ;AAAA,EACnE;AACA,UAAQ,MAAM,gBAAgB;AAChC;;;ACvSA,IAAM,iBAAiB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAG9D,IAAM,cAAc,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAG3D,IAAME,mBAAkB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAYxD,SAAS,kBACd,MACA,UACiB;AAEjB,MAAI,CAAC,OAAO,IAAI,GAAG;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAGA,QAAM,gBAAgB,qBAAqB,IAAI;AAC/C,MAAI,CAAC,cAAc,IAAI;AACrB,WAAO;AAAA,EACT;AAEA,QAAM,EAAE,OAAO,IAAI,cAAc;AAGjC,QAAM,YAAY,eAAe,QAAQ;AAGzC,MAAI,cAAc;AAClB,aAAW,SAAS,QAAQ;AAC1B,mBAAe,MAAM;AAAA,EACvB;AACA,MAAI,WAAW;AACb,mBAAe,UAAU;AAAA,EAC3B;AAGA,QAAM,SAAS,IAAI,WAAW,IAAI,WAAW;AAC7C,MAAI,SAAS;AAGb,SAAO,IAAI,gBAAgB,MAAM;AACjC,YAAU;AACV,gBAAc,QAAQ,QAAQ,WAAW;AACzC,YAAU;AAGV,SAAO,IAAI,aAAa,MAAM;AAC9B,YAAU;AAIV,MAAI,cAAc;AAElB,aAAW,SAAS,QAAQ;AAE1B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAGhB,QAAI,CAAC,eAAe,aAAa,aAAa,KAAK,GAAG;AACpD,aAAO,IAAI,WAAW,MAAM;AAC5B,gBAAU,UAAU;AACpB,oBAAc;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,CAAC,eAAe,WAAW;AAC7B,WAAO,IAAI,WAAW,MAAM;AAAA,EAC9B;AAEA,SAAO,OAAO,GAAG,MAAM;AACzB;AAKA,SAAS,aAAa,OAA4B;AAChD,MAAI,MAAM,SAAS,EAAG,QAAO;AAC7B,QAAM,OAAO,OAAO;AAAA,IAClB,MAAM,CAAC,KAAK;AAAA,IACZ,MAAM,CAAC,KAAK;AAAA,IACZ,MAAM,CAAC,KAAK;AAAA,IACZ,MAAM,CAAC,KAAK;AAAA,EACd;AACA,SAAO,SAAS,UAAU,SAAS,UAAU,SAAS;AACxD;AAKA,SAAS,qBACP,MAIA;AACA,QAAM,SAAuB,CAAC;AAC9B,MAAI,iBAAiB;AAGrB,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,UAAM,YAAY,KAAK,MAAM,QAAQ,SAAS,CAAC;AAC/C,UAAM,UAAU,OAAO;AAAA,MACrB,UAAU,CAAC,KAAK;AAAA,MAChB,UAAU,CAAC,KAAK;AAAA,MAChB,UAAU,CAAC,KAAK;AAAA,MAChB,UAAU,CAAC,KAAK;AAAA,IAClB;AAEA,QAAI,CAAC,gBAAgB;AACnB,uBAAiB;AAAA,IACnB;AAGA,UAAM,aACH,KAAK,SAAS,CAAC,KAAK,MACnB,KAAK,SAAS,CAAC,KAAK,MAAM,KAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM;AAG9B,QAAI,SAAS,IAAI,YAAY,KAAK,QAAQ;AACxC,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS,uCAAuC,MAAM;AAAA,MACxD,CAAC;AAAA,IACH;AAGA,QAAI,CAAC,YAAY,WAAWA,gBAAe,GAAG;AAE5C,YAAMC,cAAa,YAAa,YAAY;AAC5C,YAAM,YAAY,KAAK,MAAM,QAAQ,SAAS,IAAIA,WAAU;AAC5D,aAAO,KAAK,SAAS;AAAA,IACvB;AAIA,UAAM,aAAa,YAAa,YAAY;AAC5C,cAAU,IAAI;AAAA,EAChB;AAEA,SAAO,OAAO,GAAG,EAAE,QAAQ,eAAe,CAAC;AAC7C;AAKA,SAAS,eAAe,UAAgD;AAEtE,QAAM,eAAe,SAAS;AAAA,IAC5B,CAAC,MACC,EAAE,OAAO,SAAS,qBAClB,EAAE,OAAO,SAAS,0BAClB,EAAE,OAAO,SAAS;AAAA,EACtB;AAEA,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,kBAAkB,YAAY;AAE/C,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,SAAS;AAC3B,QAAM,aAAa,YAAa,YAAY;AAC5C,QAAM,QAAQ,IAAI,WAAW,IAAI,UAAU;AAE3C,QAAM,IAAID,kBAAiB,CAAC;AAC5B,gBAAc,OAAO,GAAG,SAAS;AACjC,QAAM,IAAI,UAAU,CAAC;AAErB,SAAO;AACT;;;AC9IO,SAAS,MACd,MACA,UACA,SACa;AACb,QAAM,eAAe,aAAa,IAAI;AACtC,MAAI,CAAC,cAAc;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,MAAI,SAAS,WAAW,SAAS;AAE/B,UAAM,SAASE,SAAQ,YAAY,EAAE,WAAW,MAAM,CAAC,CAAC;AACxD,QAAI,CAAC,OAAO,IAAI;AACd,aAAO,OAAO,MAAM,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK,CAAC;AAAA,IACzE;AACA,WAAO,OAAO,GAAG,OAAO,KAAK;AAAA,EAC/B;AAEA,MAAI,SAAS,WAAW,WAAW;AACjC,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAMA,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA,SAAS,SAAS;AAAA,EACpB;AAEA,MAAI,CAAC,iBAAiB,IAAI;AACxB,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS,+BAA+B,iBAAiB,MAAM,IAAI;AAAA,IACrE,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,iBAAiB;AAGhC,MAAI,iBAAiB,SAAS,OAAO,WAAW,OAAO;AACrD,UAAM,SAAS,iBAAiB,MAAM,OAAO,MAAM;AACnD,QAAI,CAAC,OAAO;AACV,aAAO,OAAO,MAAM,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK,CAAC;AACzE,WAAO,OAAO,GAAG,OAAO,KAAK;AAAA,EAC/B;AAEA,MAAI,iBAAiB,UAAU,OAAO,WAAW,QAAQ;AACvD,UAAM,SAAS,kBAAkB,MAAM,OAAO,QAAQ;AACtD,QAAI,CAAC,OAAO;AACV,aAAO,OAAO,MAAM,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK,CAAC;AACzE,WAAO,OAAO,GAAG,OAAO,KAAK;AAAA,EAC/B;AAEA,MAAI,iBAAiB,UAAU,OAAO,WAAW,QAAQ;AACvD,UAAM,SAAS,kBAAkB,MAAM,OAAO,QAAQ;AACtD,QAAI,CAAC,OAAO;AACV,aAAO,OAAO,MAAM,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK,CAAC;AACzE,WAAO,OAAO,GAAG,OAAO,KAAK;AAAA,EAC/B;AAEA,SAAO,OAAO,MAAM;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,EACX,CAAC;AACH;AAOA,IAAMA,WAAU;AAAA,EACd,KAAK;AAAA,IACH,YAAY;AAAA,EACd;AAAA,EACA,MAAM;AAAA,IACJ,YAAY;AAAA,EACd;AAAA,EACA,MAAM;AAAA,IACJ,YAAY;AAAA,EACd;AACF;;;AC9HA,SAAS,qBAAqB,MAAsB;AAClD,SAAO,KAAK,QAAQ,SAAS,IAAI,EAAE,QAAQ,OAAO,IAAI;AACxD;AAYA,SAAS,kBACP,OACA,SAC2B;AAE3B,MAAI,OAAO;AACT,WAAO;AAAA,EACT;AAGA,MAAI,SAAS;AACX,WAAO;AAAA,MACL,OAAO,QAAQ;AAAA,MACf,UAAU,QAAQ;AAAA;AAAA,IAEpB;AAAA,EACF;AAEA,SAAO;AACT;AAUA,SAAS,kBAAkB,UAAsC;AAC/D,QAAM,QAAkB,CAAC;AAGzB,MAAI,SAAS,UAAU,UAAU,QAAW;AAC1C,UAAM,KAAK,UAAU,SAAS,SAAS,KAAK,EAAE;AAAA,EAChD;AAEA,MAAI,SAAS,UAAU,SAAS;AAC9B,UAAM,KAAK,YAAY,SAAS,SAAS,OAAO,EAAE;AAAA,EACpD;AAEA,MAAI,SAAS,UAAU,WAAW;AAChC,UAAM,KAAK,kBAAkB,SAAS,SAAS,SAAS,EAAE;AAAA,EAC5D;AAEA,MAAI,SAAS,UAAU,QAAQ,QAAW;AACxC,UAAM,KAAK,cAAc,SAAS,SAAS,GAAG,EAAE;AAAA,EAClD;AAEA,MAAI,SAAS,UAAU,SAAS,QAAW;AACzC,UAAM,KAAK,SAAS,SAAS,SAAS,IAAI,EAAE;AAAA,EAC9C;AAGA,MAAI,SAAS,QAAQ,KAAK,SAAS,SAAS,GAAG;AAC7C,UAAM,KAAK,SAAS,SAAS,KAAK,IAAI,SAAS,MAAM,EAAE;AAAA,EACzD;AAGA,MAAI,SAAS,OAAO,MAAM;AACxB,UAAM,KAAK,eAAe,SAAS,MAAM,IAAI,EAAE;AAAA,EACjD;AAEA,MAAI,SAAS,OAAO,MAAM;AACxB,UAAM,KAAK,UAAU,SAAS,MAAM,IAAI,EAAE;AAAA,EAC5C;AAGA,MAAI,SAAS,UAAU,aAAa,QAAW;AAC7C,UAAM,KAAK,cAAc,SAAS,SAAS,QAAQ,EAAE;AAAA,EACvD;AAGA,QAAM,cAAc,kBAAkB,SAAS,OAAO,SAAS,OAAO;AAEtE,MAAI,aAAa;AACf,QAAI,YAAY,YAAY,QAAW;AACrC,YAAM,KAAK,uBAAuB,YAAY,OAAO,EAAE;AAAA,IACzD;AAEA,QAAI,YAAY,UAAU,QAAW;AACnC,YAAM,KAAK,kBAAkB,YAAY,KAAK,EAAE;AAAA,IAClD;AAEA,QAAI,YAAY,UAAU,QAAW;AACnC,YAAM,KAAK,gBAAgB,YAAY,KAAK,EAAE;AAAA,IAChD;AAEA,QAAI,YAAY,UAAU;AACxB,YAAM,KAAK,mBAAmB,YAAY,QAAQ,EAAE;AAAA,IACtD;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAWA,SAAS,6BAA6B,UAAqC;AACzE,MAAI,CAAC,SAAS,oBAAoB,SAAS,iBAAiB,WAAW,GAAG;AACxE,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,QAAkB,CAAC;AAEzB,aAAW,CAAC,OAAO,EAAE,KAAK,SAAS,iBAAiB,QAAQ,GAAG;AAC7D,UAAM,eAAe,QAAQ;AAC7B,UAAM,SAAS,GAAG,SAAS,KAAK,GAAG,OAAO,CAAC,KAAK,GAAG,OAAO,CAAC,MAAM;AAGjE,UAAM,KAAK,eAAe,YAAY,GAAG,MAAM,GAAG;AAGlD,UAAM,KAAK,qBAAqB,GAAG,MAAM,CAAC;AAAA,EAC5C;AAEA,SAAO;AACT;AAmCO,SAAS,cAAc,UAAsC;AAClE,QAAM,WAAqB,CAAC;AAG5B,WAAS,KAAK,qBAAqB,SAAS,MAAM,CAAC;AAGnD,MAAI,SAAS,aAAa,WAAW;AACnC,UAAM,iBAAiB,6BAA6B,QAAQ;AAC5D,QAAI,eAAe,SAAS,GAAG;AAC7B,eAAS,KAAK,eAAe,KAAK,IAAI,CAAC;AAAA,IACzC;AAAA,EACF;AAGA,MAAI,SAAS,gBAAgB;AAC3B,aAAS;AAAA,MACP,oBAAoB,qBAAqB,SAAS,cAAc,CAAC;AAAA,IACnE;AAAA,EACF;AAGA,QAAM,eAAe,kBAAkB,QAAQ;AAC/C,MAAI,cAAc;AAChB,aAAS,KAAK,YAAY;AAAA,EAC5B;AAGA,SAAO,SAAS,KAAK,IAAI;AAC3B;;;ACjKO,SAAS,aACd,MACA,UACa;AAEb,QAAM,SAAS,aAAa,IAAI;AAChC,MAAI,CAAC,QAAQ;AACX,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,OAAO,cAAc,QAAQ;AAGnC,MAAI;AAKJ,MAAI,WAAW,OAAO;AAEpB,UAAM,SAAS,gBAAgB,IAAI;AACnC,kBAAc,iBAAiB,MAAM,MAAM;AAAA,EAC7C,WAAW,WAAW,QAAQ;AAE5B,UAAM,WAAW,mBAAmB,IAAI;AACxC,kBAAc,kBAAkB,MAAM,QAAQ;AAAA,EAChD,WAAW,WAAW,QAAQ;AAE5B,UAAM,WAAW,mBAAmB,IAAI;AACxC,kBAAc,kBAAkB,MAAM,QAAQ;AAAA,EAChD,OAAO;AAEL,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,MAAI,CAAC,YAAY,IAAI;AACnB,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS,YAAY,MAAM;AAAA,IAC7B,CAAC;AAAA,EACH;AAEA,SAAO,OAAO,GAAG,YAAY,KAAK;AACpC;AAUA,SAAS,gBAAgB,MAA8B;AACrD,QAAM,WAAW,oBAAoB,OAAO;AAC5C,SAAO,mBAAmB,cAAc,MAAM,QAAQ;AACxD;AAQA,SAAS,mBAAmB,MAAiC;AAC3D,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM;AAAA,IACR;AAAA,EACF;AACF;;;ACzGO,SAAS,UAAU,KAA0B;AAClD,UAAQ,IAAI,QAAQ;AAAA,IAClB,KAAK;AACH,aAAO,IAAI,OAAO,IAAI,CAAC,UAAU,MAAM,IAAI,EAAE,KAAK,MAAM;AAAA,IAE1D,KAAK;AAAA,IACL,KAAK;AACH,aAAO,IAAI,SAAS,IAAI,CAAC,YAAY,QAAQ,IAAI,EAAE,KAAK,MAAM;AAAA,EAClE;AACF;","names":["settingsStart","PNG_SIGNATURE_LENGTH","APP1_MARKER","COM_MARKER","EXIF_HEADER","EXIF_CHUNK_TYPE","paddedSize","HELPERS"]}
1
+ {"version":3,"sources":["../src/types.ts","../src/parsers/a1111.ts","../src/utils/entries.ts","../src/utils/json.ts","../src/parsers/comfyui.ts","../src/parsers/detect.ts","../src/parsers/easydiffusion.ts","../src/parsers/fooocus.ts","../src/parsers/hf-space.ts","../src/parsers/invokeai.ts","../src/parsers/novelai.ts","../src/parsers/ruined-fooocus.ts","../src/parsers/stability-matrix.ts","../src/parsers/swarmui.ts","../src/parsers/tensorart.ts","../src/parsers/index.ts","../src/utils/binary.ts","../src/utils/exif-constants.ts","../src/readers/exif.ts","../src/readers/jpeg.ts","../src/readers/png.ts","../src/readers/webp.ts","../src/utils/convert.ts","../src/api/read.ts","../src/converters/utils.ts","../src/converters/chunk-encoding.ts","../src/converters/a1111.ts","../src/converters/base-json.ts","../src/converters/comfyui.ts","../src/converters/easydiffusion.ts","../src/converters/invokeai.ts","../src/converters/novelai.ts","../src/converters/simple-chunk.ts","../src/converters/swarmui.ts","../src/converters/index.ts","../src/writers/exif.ts","../src/writers/jpeg.ts","../src/writers/png.ts","../src/writers/webp.ts","../src/api/write.ts","../src/serializers/a1111.ts","../src/api/write-webui.ts","../src/serializers/raw.ts"],"sourcesContent":["/**\n * Result type for explicit error handling\n */\nexport type Result<T, E> = { ok: true; value: T } | { ok: false; error: E };\n\n/**\n * Helper functions for Result type\n */\nexport const Result = {\n ok: <T, E>(value: T): Result<T, E> => ({ ok: true, value }),\n error: <T, E>(error: E): Result<T, E> => ({ ok: false, error }),\n};\n\n// ============================================================================\n// PNG Metadata Types\n// ============================================================================\n\n/**\n * Error types for PNG reading\n */\nexport type PngReadError =\n | { type: 'invalidSignature' }\n | { type: 'corruptedChunk'; message: string };\n\n/**\n * Result type for PNG metadata reading\n */\nexport type PngMetadataResult = Result<PngTextChunk[], PngReadError>;\n\n/**\n * Error types for PNG writing\n */\ntype PngWriteError = { type: 'invalidSignature' } | { type: 'noIhdrChunk' };\n\n/**\n * Result type for PNG metadata writing\n */\nexport type PngWriteResult = Result<Uint8Array, PngWriteError>;\n\n// ============================================================================\n// JPEG Writer Types\n// ============================================================================\n\n/**\n * Error types for JPEG writing\n */\ntype JpegWriteError =\n | { type: 'invalidSignature' }\n | { type: 'corruptedStructure'; message: string };\n\n/**\n * Result type for JPEG metadata writing\n */\nexport type JpegWriteResult = Result<Uint8Array, JpegWriteError>;\n\n// ============================================================================\n// WebP Writer Types\n// ============================================================================\n\n/**\n * Error types for WebP writing\n */\ntype WebpWriteError =\n | { type: 'invalidSignature' }\n | { type: 'invalidRiffStructure'; message: string };\n\n/**\n * Result type for WebP metadata writing\n */\nexport type WebpWriteResult = Result<Uint8Array, WebpWriteError>;\n\n/**\n * PNG text chunk (tEXt or iTXt)\n */\nexport type PngTextChunk = TExtChunk | ITXtChunk;\n\n// ============================================================================\n// Exif Metadata Types (shared between JPEG/WebP)\n// ============================================================================\n\n/**\n * Source location of a metadata segment.\n * Used for round-tripping: reading and writing back to the correct location.\n */\nexport type MetadataSegmentSource =\n | { type: 'exifUserComment' }\n | { type: 'exifImageDescription'; prefix?: string }\n | { type: 'exifMake'; prefix?: string }\n | { type: 'exifSoftware' }\n | { type: 'exifDocumentName' }\n | { type: 'jpegCom' };\n\n/**\n * A single metadata segment with source tracking\n */\nexport interface MetadataSegment {\n /** Source location of this segment */\n source: MetadataSegmentSource;\n /** Raw data string */\n data: string;\n}\n\n// ============================================================================\n// Format-Agnostic Metadata Types\n// ============================================================================\n\n/**\n * A single metadata entry (keyword + text)\n *\n * This is a format-agnostic representation used by parsers.\n */\nexport interface MetadataEntry {\n /** Entry keyword (e.g., 'parameters', 'Comment', 'prompt') */\n keyword: string;\n /** Text content */\n text: string;\n}\n\n/**\n * Raw metadata for write-back (preserves original format)\n */\nexport type RawMetadata =\n | { format: 'png'; chunks: PngTextChunk[] }\n | { format: 'jpeg'; segments: MetadataSegment[] }\n | { format: 'webp'; segments: MetadataSegment[] };\n\n/**\n * Error types for JPEG reading\n */\ntype JpegReadError =\n | { type: 'invalidSignature' }\n | { type: 'parseError'; message: string };\n\n/**\n * Result type for JPEG metadata reading\n */\nexport type JpegMetadataResult = Result<MetadataSegment[], JpegReadError>;\n\n// ============================================================================\n// WebP Metadata Types\n// ============================================================================\n\n/**\n * Error types for WebP reading\n */\ntype WebpReadError =\n | { type: 'invalidSignature' }\n | { type: 'parseError'; message: string };\n\n/**\n * Result type for WebP metadata reading\n */\nexport type WebpMetadataResult = Result<MetadataSegment[], WebpReadError>;\n\n/**\n * tEXt chunk (Latin-1 encoded text)\n */\nexport interface TExtChunk {\n type: 'tEXt';\n /** Chunk keyword (e.g., 'parameters', 'Comment') */\n keyword: string;\n /** Text content */\n text: string;\n}\n\n/**\n * iTXt chunk (UTF-8 encoded international text)\n */\nexport interface ITXtChunk {\n type: 'iTXt';\n /** Chunk keyword */\n keyword: string;\n /** Compression flag (0=uncompressed, 1=compressed) */\n compressionFlag: number;\n /** Compression method (0=zlib/deflate) */\n compressionMethod: number;\n /** Language tag (BCP 47) */\n languageTag: string;\n /** Translated keyword */\n translatedKeyword: string;\n /** Text content */\n text: string;\n}\n\n/**\n * Known AI image generation software\n */\nexport type GenerationSoftware =\n | 'novelai'\n | 'comfyui'\n | 'swarmui'\n | 'tensorart'\n | 'stability-matrix'\n | 'invokeai'\n | 'forge-neo'\n | 'forge'\n | 'sd-webui'\n | 'sd-next'\n | 'civitai'\n | 'hf-space'\n | 'easydiffusion'\n | 'fooocus'\n | 'ruined-fooocus';\n\n// ============================================================================\n// Unified Metadata Types\n// ============================================================================\n\n/**\n * Base metadata fields shared by all tools\n */\ninterface BaseMetadata {\n /** Positive prompt */\n prompt: string;\n /** Negative prompt */\n negativePrompt: string;\n /** Model settings */\n model?: ModelSettings;\n /** Sampling settings */\n sampling?: SamplingSettings;\n /** Hires.fix settings (if applied) */\n hires?: HiresSettings;\n /** Upscale settings (if applied) */\n upscale?: UpscaleSettings;\n /** Image width */\n width: number;\n /** Image height */\n height: number;\n}\n\n/**\n * NovelAI-specific metadata\n */\nexport interface NovelAIMetadata extends BaseMetadata {\n software: 'novelai';\n /** V4 character prompts (when using character placement) */\n characterPrompts?: CharacterPrompt[];\n /** Use character coordinates for placement */\n useCoords?: boolean;\n /** Use character order */\n useOrder?: boolean;\n}\n\n/**\n * Character prompt with position (NovelAI V4)\n */\nexport interface CharacterPrompt {\n /** Character-specific prompt */\n prompt: string;\n /** Character position (normalized 0-1) */\n center?: { x: number; y: number };\n}\n\n/**\n * ComfyUI-format metadata (ComfyUI, TensorArt, Stability Matrix)\n *\n * These tools use ComfyUI-compatible workflow format.\n */\n/**\n * ComfyUI node reference (for node outputs)\n *\n * Format: [nodeId, outputIndex]\n * Example: [\"CheckpointLoader_Base\", 0]\n */\nexport type ComfyNodeReference = [nodeId: string, outputIndex: number];\n\n/**\n * ComfyUI node input value\n */\nexport type ComfyNodeInputValue =\n | string\n | number\n | boolean\n | ComfyNodeReference\n | ComfyNodeInputValue[];\n\n/**\n * ComfyUI node structure\n */\nexport interface ComfyNode {\n /** Node class type (e.g., \"CheckpointLoaderSimple\", \"KSampler\") */\n class_type: string;\n /** Node inputs */\n inputs: Record<string, ComfyNodeInputValue>;\n /** Node metadata (ComfyUI only) */\n _meta?: {\n /** Node title for display */\n title?: string;\n };\n /** Change detection hash (rare, for caching) */\n is_changed?: string[] | null;\n}\n\n/**\n * ComfyUI node graph\n *\n * Maps node IDs to their corresponding node data.\n */\nexport type ComfyNodeGraph = Record<string, ComfyNode>;\n\n/**\n * ComfyUI-format metadata (ComfyUI, TensorArt, Stability Matrix)\n *\n * These tools always have nodes in all formats.\n */\nexport interface BasicComfyUIMetadata extends BaseMetadata {\n software: 'comfyui' | 'tensorart' | 'stability-matrix';\n /**\n * ComfyUI node graph (required)\n *\n * Always present in all image formats (PNG, JPEG, WebP).\n * Structure: Record<nodeId, ComfyNode> where ComfyNode contains inputs and class_type.\n */\n nodes: ComfyNodeGraph;\n}\n\n/**\n * SwarmUI-specific metadata\n *\n * SwarmUI uses ComfyUI workflow format but nodes are only present in PNG.\n */\nexport interface SwarmUIMetadata extends BaseMetadata {\n software: 'swarmui';\n /**\n * ComfyUI node graph (optional for SwarmUI)\n *\n * Only present in PNG format. JPEG/WebP contain SwarmUI parameters only.\n * Structure: Record<nodeId, ComfyNode> where ComfyNode contains inputs and class_type.\n */\n nodes?: ComfyNodeGraph;\n}\n\n/**\n * ComfyUI-format metadata (union of BasicComfyUI and SwarmUI)\n *\n * This is a union type to handle different node graph requirements:\n * - ComfyUI/TensorArt/Stability Matrix: nodes are always present\n * - SwarmUI: nodes are only present in PNG format\n */\nexport type ComfyUIMetadata = BasicComfyUIMetadata | SwarmUIMetadata;\n\n/**\n * Standard metadata (SD WebUI, Forge, InvokeAI, and others)\n *\n * Baseline generation metadata without tool-specific extensions.\n * Used by most SD tools that don't require special features like\n * NovelAI's character prompts or ComfyUI's node graphs.\n */\nexport interface StandardMetadata extends BaseMetadata {\n software:\n | 'sd-webui'\n | 'sd-next'\n | 'forge'\n | 'forge-neo'\n | 'invokeai'\n | 'civitai'\n | 'hf-space'\n | 'easydiffusion'\n | 'fooocus'\n | 'ruined-fooocus';\n}\n\n/**\n * Unified generation metadata (discriminated union)\n *\n * Use `metadata.software` to narrow by specific tool:\n * ```typescript\n * if (metadata.software === 'comfyui' ||\n * metadata.software === 'tensorart' ||\n * metadata.software === 'stability-matrix' ||\n * metadata.software === 'swarmui') {\n * // TypeScript knows metadata is ComfyUIMetadata\n * if (metadata.nodes) {\n * // Access workflow graph\n * }\n * }\n * ```\n */\nexport type GenerationMetadata =\n | NovelAIMetadata\n | ComfyUIMetadata\n | StandardMetadata;\n\n/**\n * Model settings\n */\nexport interface ModelSettings {\n /** Model name */\n name?: string;\n /** Model hash */\n hash?: string;\n /** VAE name */\n vae?: string;\n}\n\n/**\n * Sampling settings\n */\nexport interface SamplingSettings {\n /** Sampler name */\n sampler?: string;\n /** Scheduler (sometimes included in sampler, sometimes separate) */\n scheduler?: string;\n /** Sampling steps */\n steps?: number;\n /** CFG scale */\n cfg?: number;\n /** Random seed */\n seed?: number;\n /** CLIP skip layers */\n clipSkip?: number;\n}\n\n/**\n * Hires.fix settings\n */\nexport interface HiresSettings {\n /** Upscale factor */\n scale?: number;\n /** Upscaler name */\n upscaler?: string;\n /** Hires steps */\n steps?: number;\n /** Hires denoising strength */\n denoise?: number;\n}\n\n/**\n * Upscale settings (post-generation)\n */\nexport interface UpscaleSettings {\n /** Upscaler name */\n upscaler?: string;\n /** Scale factor */\n scale?: number;\n}\n\n/**\n * Parse error types\n */\ntype ParseError =\n | { type: 'unsupportedFormat' }\n | { type: 'parseError'; message: string };\n\n/**\n * Result type for internal parsers\n */\nexport type InternalParseResult = Result<GenerationMetadata, ParseError>;\n\n/**\n * Parse result with 4-status design\n *\n * - `success`: Parsing succeeded, metadata and raw data available\n * - `empty`: No metadata found in the file\n * - `unrecognized`: Metadata exists but format is not recognized\n * - `invalid`: File is corrupted or not a valid image\n */\nexport type ParseResult =\n | { status: 'success'; metadata: GenerationMetadata; raw: RawMetadata }\n | { status: 'empty' }\n | { status: 'unrecognized'; raw: RawMetadata }\n | { status: 'invalid'; message?: string };\n\n// ============================================================================\n// Metadata Conversion Types\n// ============================================================================\n\n/**\n * Target format for metadata conversion\n */\nexport type ConversionTargetFormat = 'png' | 'jpeg' | 'webp';\n\n/**\n * Conversion error types\n */\ntype ConversionError =\n | { type: 'unsupportedSoftware'; software: string }\n | { type: 'invalidParseResult'; status: string }\n | { type: 'missingRawData' }\n | { type: 'parseError'; message: string };\n\n/**\n * Result type for metadata conversion\n */\nexport type ConversionResult = Result<RawMetadata, ConversionError>;\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\n\n/**\n * Parse A1111-format metadata from entries\n *\n * A1111 format is used by:\n * - Stable Diffusion WebUI (AUTOMATIC1111)\n * - Forge\n * - Forge Neo\n * - Civitai\n * - Animagine\n *\n * Format:\n * ```\n * positive prompt\n * Negative prompt: negative prompt\n * Steps: 20, Sampler: Euler a, Schedule type: Automatic, CFG scale: 7, ...\n * ```\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseA1111(entries: MetadataEntry[]): InternalParseResult {\n // Find parameters entry (PNG uses 'parameters', JPEG/WebP uses 'Comment')\n const parametersEntry = entries.find(\n (e) => e.keyword === 'parameters' || e.keyword === 'Comment',\n );\n if (!parametersEntry) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n const text = parametersEntry.text;\n\n // Validate that this is AI-generated metadata by checking for typical markers\n // This prevents false positives from retouch software or other non-AI tools\n const hasAIMarkers =\n text.includes('Steps:') ||\n text.includes('Sampler:') ||\n text.includes('Negative prompt:');\n\n if (!hasAIMarkers) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse the text into sections\n const { prompt, negativePrompt, settings } = parseParametersText(text);\n\n // Parse settings key-value pairs\n const settingsMap = parseSettings(settings);\n\n // Extract dimensions (optional, defaults to \"0x0\" like SD Prompt Reader)\n const size = settingsMap.get('Size') ?? '0x0';\n const [width, height] = parseSize(size);\n\n // Determine software variant\n const version = settingsMap.get('Version');\n const app = settingsMap.get('App');\n const software = detectSoftwareVariant(version, app);\n\n // Build metadata\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software,\n prompt,\n negativePrompt,\n width,\n height,\n };\n\n // Add model settings\n const modelName = settingsMap.get('Model');\n const modelHash = settingsMap.get('Model hash');\n if (modelName || modelHash) {\n metadata.model = {\n name: modelName,\n hash: modelHash,\n };\n }\n\n // Add sampling settings\n const sampler = settingsMap.get('Sampler');\n const scheduler = settingsMap.get('Schedule type');\n const steps = parseNumber(settingsMap.get('Steps'));\n const cfg = parseNumber(\n settingsMap.get('CFG scale') ?? settingsMap.get('CFG Scale'),\n );\n const seed = parseNumber(settingsMap.get('Seed'));\n const clipSkip = parseNumber(settingsMap.get('Clip skip'));\n\n if (\n sampler !== undefined ||\n scheduler !== undefined ||\n steps !== undefined ||\n cfg !== undefined ||\n seed !== undefined ||\n clipSkip !== undefined\n ) {\n metadata.sampling = {\n sampler,\n scheduler,\n steps,\n cfg,\n seed,\n clipSkip,\n };\n }\n\n // Add hires settings\n const hiresScale = parseNumber(settingsMap.get('Hires upscale'));\n const upscaler = settingsMap.get('Hires upscaler');\n const hiresSteps = parseNumber(settingsMap.get('Hires steps'));\n const denoise = parseNumber(settingsMap.get('Denoising strength'));\n const hiresSize = settingsMap.get('Hires size');\n\n if (\n [hiresScale, hiresSize, upscaler, hiresSteps, denoise].some(\n (v) => v !== undefined,\n )\n ) {\n const [hiresWidth] = parseSize(hiresSize ?? '');\n const scale = hiresScale ?? hiresWidth / width;\n metadata.hires = { scale, upscaler, steps: hiresSteps, denoise };\n }\n\n return Result.ok(metadata);\n}\n\n/**\n * Parse parameters text into prompt, negative prompt, and settings\n */\nfunction parseParametersText(text: string): {\n prompt: string;\n negativePrompt: string;\n settings: string;\n} {\n // Find \"Negative prompt:\" marker\n const negativeIndex = text.indexOf('Negative prompt:');\n\n // Find the settings line (starts after the last newline before \"Steps:\")\n const stepsIndex = text.indexOf('Steps:');\n\n if (negativeIndex === -1 && stepsIndex === -1) {\n // No negative prompt, no settings - just prompt\n return { prompt: text.trim(), negativePrompt: '', settings: '' };\n }\n\n if (negativeIndex === -1) {\n // No negative prompt\n const settingsStart = text.lastIndexOf('\\n', stepsIndex);\n return {\n prompt: text.slice(0, settingsStart).trim(),\n negativePrompt: '',\n settings: text.slice(settingsStart).trim(),\n };\n }\n\n if (stepsIndex === -1) {\n // No settings (unusual)\n return {\n prompt: text.slice(0, negativeIndex).trim(),\n negativePrompt: text.slice(negativeIndex + 16).trim(),\n settings: '',\n };\n }\n\n // Both exist: find where negative prompt ends and settings begin\n const settingsStart = text.lastIndexOf('\\n', stepsIndex);\n\n return {\n prompt: text.slice(0, negativeIndex).trim(),\n negativePrompt: text.slice(negativeIndex + 16, settingsStart).trim(),\n settings: text.slice(settingsStart).trim(),\n };\n}\n\n/**\n * Parse settings line into key-value map\n *\n * Format: \"Key1: value1, Key2: value2, ...\"\n * Note: Values may contain commas (e.g., model names), so we parse carefully\n */\nfunction parseSettings(settings: string): Map<string, string> {\n const result = new Map<string, string>();\n if (!settings) return result;\n\n // Match \"Key: value\" pairs\n // Key is word characters (including spaces before colon)\n // Value continues until next \"Key:\" pattern or end\n const regex =\n /([A-Za-z][A-Za-z0-9 ]*?):\\s*([^,]+?)(?=,\\s*[A-Za-z][A-Za-z0-9 ]*?:|$)/g;\n\n // Use matchAll for functional iteration\n const matches = Array.from(settings.matchAll(regex));\n\n for (const match of matches) {\n const key = (match[1] ?? '').trim();\n const value = (match[2] ?? '').trim();\n result.set(key, value);\n }\n\n return result;\n}\n\n/**\n * Parse \"WxH\" size string\n */\nfunction parseSize(size: string): [number, number] {\n const match = size.match(/(\\d+)x(\\d+)/);\n if (!match) return [0, 0];\n return [\n Number.parseInt(match[1] ?? '0', 10),\n Number.parseInt(match[2] ?? '0', 10),\n ];\n}\n\n/**\n * Parse number from string, returning undefined if invalid\n */\nfunction parseNumber(value: string | undefined): number | undefined {\n if (value === undefined) return undefined;\n const num = Number.parseFloat(value);\n return Number.isNaN(num) ? undefined : num;\n}\n\n/**\n * Detect software variant from Version and App strings\n */\nfunction detectSoftwareVariant(\n version: string | undefined,\n app: string | undefined,\n): 'sd-webui' | 'sd-next' | 'forge' | 'forge-neo' {\n // Check App field first (SD.Next uses this)\n if (app === 'SD.Next') return 'sd-next';\n\n // Check Version field\n if (!version) return 'sd-webui';\n if (version === 'neo') return 'forge-neo';\n // Forge uses 'classic' or 'fX.Y.Z' versions (semantic version format)\n if (version === 'classic') return 'forge';\n if (/^f\\d+\\.\\d+/.test(version)) return 'forge';\n return 'sd-webui';\n}\n","import type { MetadataEntry } from '../types';\n\n/**\n * Entry record type (readonly for immutability)\n */\nexport type EntryRecord = Readonly<Record<string, string>>;\n\n/**\n * Build an immutable record from metadata entries for keyword lookup\n *\n * @param entries - Array of metadata entries\n * @returns Readonly record mapping keyword to text\n *\n * @example\n * ```typescript\n * const record = buildEntryRecord(entries);\n * const comment = record['Comment']; // string | undefined\n * ```\n */\nexport function buildEntryRecord(entries: MetadataEntry[]): EntryRecord {\n return Object.freeze(\n Object.fromEntries(entries.map((e) => [e.keyword, e.text])),\n );\n}\n","/**\n * Type-safe JSON parsing utilities\n */\n\nimport { Result } from '../types';\n\n/**\n * Type-safe JSON parse with Result\n *\n * Wraps JSON.parse to return a Result type instead of throwing.\n * This enables const-only code without try-catch blocks.\n *\n * @param text - JSON string to parse\n * @returns Result with parsed value or parse error\n *\n * @example\n * const parsed = parseJson<MyType>(text);\n * if (!parsed.ok) return parsed;\n * const data = parsed.value;\n */\nexport function parseJson<T>(\n text: string,\n): Result<T, { type: 'parseError'; message: string }> {\n try {\n return Result.ok(JSON.parse(text) as T);\n } catch {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON',\n });\n }\n}\n","/**\n * ComfyUI metadata parser\n *\n * Parses ComfyUI-format prompt data from node graphs.\n * Also handles Civitai extraMetadata fallbacks for upscale workflows.\n */\n\nimport type {\n BasicComfyUIMetadata,\n ComfyNodeGraph,\n InternalParseResult,\n MetadataEntry,\n} from '../types';\nimport { Result } from '../types';\nimport { type EntryRecord, buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n// =============================================================================\n// Types\n// =============================================================================\n\n/**\n * ComfyUI node structure\n */\ninterface ComfyNode {\n inputs: Record<string, unknown>;\n class_type: string;\n _meta?: { title?: string };\n}\n\n/**\n * ComfyUI prompt structure (node ID -> node)\n */\ntype ComfyPrompt = Record<string, ComfyNode>;\n\n/**\n * Civitai extraMetadata structure (nested JSON in prompt)\n */\ninterface CivitaiExtraMetadata {\n prompt?: string;\n negativePrompt?: string;\n cfgScale?: number;\n sampler?: string;\n clipSkip?: number;\n steps?: number;\n seed?: number;\n width?: number;\n height?: number;\n baseModel?: string;\n transformations?: Array<{\n type?: string;\n upscaleWidth?: number;\n upscaleHeight?: number;\n }>;\n}\n\n// =============================================================================\n// Main Parser\n// =============================================================================\n\n/**\n * Parse ComfyUI metadata from entries\n *\n * ComfyUI stores metadata with:\n * - prompt: JSON containing node graph with inputs\n * - workflow: JSON containing the full workflow (stored in raw, not parsed)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseComfyUI(entries: MetadataEntry[]): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Find prompt JSON from various possible locations\n const promptText = findPromptJson(entryRecord);\n if (!promptText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse prompt JSON\n const parsed = parseJson<ComfyPrompt>(promptText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in prompt entry',\n });\n }\n const prompt = parsed.value;\n\n // Verify it's ComfyUI format (has class_type)\n const nodes = Object.values(prompt);\n if (!nodes.some((node) => 'class_type' in node)) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Find key nodes\n const ksampler = findNode(prompt, ['Sampler']);\n\n // Extract prompts from CLIP nodes\n const positiveClip = findNode(prompt, ['PositiveCLIP_Base']);\n const negativeClip = findNode(prompt, ['NegativeCLIP_Base']);\n const clipPositiveText = extractText(positiveClip);\n const clipNegativeText = extractText(negativeClip);\n\n // Extract dimensions\n const latentImage = findNode(prompt, ['EmptyLatentImage']);\n const latentWidth = latentImage ? Number(latentImage.inputs.width) || 0 : 0;\n const latentHeight = latentImage ? Number(latentImage.inputs.height) || 0 : 0;\n\n // Apply Civitai extraMetadata fallbacks\n const extraMeta = extractExtraMetadata(prompt);\n const positiveText = clipPositiveText || extraMeta?.prompt || '';\n const negativeText = clipNegativeText || extraMeta?.negativePrompt || '';\n const width = latentWidth || extraMeta?.width || 0;\n const height = latentHeight || extraMeta?.height || 0;\n\n // Build metadata\n const metadata: Omit<BasicComfyUIMetadata, 'raw'> = {\n software: 'comfyui',\n prompt: positiveText,\n negativePrompt: negativeText,\n width,\n height,\n nodes: prompt as ComfyNodeGraph, // Store the parsed node graph\n };\n\n // Add model settings\n const checkpoint = findNode(prompt, ['CheckpointLoader_Base'])?.inputs\n ?.ckpt_name;\n\n if (checkpoint) {\n metadata.model = { name: String(checkpoint) };\n } else if (extraMeta?.baseModel) {\n metadata.model = { name: extraMeta.baseModel };\n }\n\n // Add sampling settings\n if (ksampler) {\n metadata.sampling = {\n seed: ksampler.inputs.seed as number,\n steps: ksampler.inputs.steps as number,\n cfg: ksampler.inputs.cfg as number,\n sampler: ksampler.inputs.sampler_name as string,\n scheduler: ksampler.inputs.scheduler as string,\n };\n } else if (extraMeta) {\n metadata.sampling = {\n seed: extraMeta.seed,\n steps: extraMeta.steps,\n cfg: extraMeta.cfgScale,\n sampler: extraMeta.sampler,\n };\n }\n\n // Add HiresFix/Upscaler settings\n const hiresModel = findNode(prompt, [\n 'HiresFix_ModelUpscale_UpscaleModelLoader',\n 'PostUpscale_ModelUpscale_UpscaleModelLoader',\n ])?.inputs;\n const hiresScale = findNode(prompt, [\n 'HiresFix_ImageScale',\n 'PostUpscale_ImageScale',\n ])?.inputs;\n const hiresSampler = findNode(prompt, ['HiresFix_Sampler'])?.inputs;\n\n if (hiresModel && hiresScale) {\n // Calculate scale from HiresFix_ImageScale node\n const hiresWidth = hiresScale.width as number;\n const scale =\n latentWidth > 0\n ? Math.round((hiresWidth / latentWidth) * 100) / 100\n : undefined;\n\n if (hiresSampler) {\n metadata.hires = {\n upscaler: hiresModel.model_name as string,\n scale,\n steps: hiresSampler.steps as number,\n denoise: hiresSampler.denoise as number,\n };\n } else {\n metadata.upscale = {\n upscaler: hiresModel.model_name as string,\n scale,\n };\n }\n }\n\n // Add upscale settings from Civitai extraMetadata\n if (extraMeta?.transformations) {\n const upscaleTransform = extraMeta.transformations.find(\n (t) => t.type === 'upscale',\n );\n if (upscaleTransform) {\n const originalWidth = extraMeta.width ?? width;\n if (originalWidth > 0 && upscaleTransform.upscaleWidth) {\n const scale = upscaleTransform.upscaleWidth / originalWidth;\n metadata.upscale = {\n scale: Math.round(scale * 100) / 100,\n };\n }\n }\n }\n\n return Result.ok(metadata);\n}\n\n// =============================================================================\n// Prompt Finding\n// =============================================================================\n\n/**\n * Find ComfyUI prompt JSON from entry record\n *\n * PNG uses 'prompt', JPEG/WebP may use Comment, Description, or Make.\n */\nfunction findPromptJson(entryRecord: EntryRecord): string | undefined {\n // PNG format: prompt entry\n if (entryRecord.prompt) {\n // Clean invalid JSON values that ComfyUI may include\n // - NaN is not valid in JSON spec (JavaScript only)\n // Replace NaN with null to make it parseable\n return entryRecord.prompt.replace(/:\\s*NaN\\b/g, ': null');\n }\n\n // JPEG/WebP format: may be in various entries\n const candidates = [\n entryRecord.Comment,\n entryRecord.Description,\n entryRecord.Make,\n entryRecord.Prompt, // save-image-extended uses this\n entryRecord.Workflow, // Not a prompt, but may contain nodes info\n ];\n\n for (const candidate of candidates) {\n if (!candidate) continue;\n\n // Check if it's JSON that looks like ComfyUI prompt\n if (candidate.startsWith('{')) {\n // Clean invalid JSON values\n // - Remove null terminators that some tools append\n // - Replace NaN with null (NaN is not valid in JSON spec)\n const cleaned = candidate\n .replace(/\\0+$/, '')\n .replace(/:\\s*NaN\\b/g, ': null');\n const parsed = parseJson<Record<string, unknown>>(cleaned);\n if (!parsed.ok) continue;\n\n // Check if it's wrapped in {\"prompt\": {...}} format\n if (parsed.value.prompt && typeof parsed.value.prompt === 'object') {\n return JSON.stringify(parsed.value.prompt);\n }\n // Check for nodes with class_type\n const values = Object.values(parsed.value);\n if (values.some((v) => v && typeof v === 'object' && 'class_type' in v)) {\n return cleaned; // Return cleaned JSON, not original candidate\n }\n }\n }\n\n return undefined;\n}\n\n// =============================================================================\n// Node Finding\n// =============================================================================\n\n/**\n * Find a node by key name (first match)\n */\nfunction findNode(prompt: ComfyPrompt, keys: string[]): ComfyNode | undefined {\n return Object.entries(prompt).find(([key]) => keys.includes(key))?.[1];\n}\n\n// =============================================================================\n// Text Extraction\n// =============================================================================\n\n/**\n * Extract text from CLIP text encode node\n */\nfunction extractText(node: ComfyNode | undefined): string {\n return typeof node?.inputs.text === 'string' ? node.inputs.text : '';\n}\n\n// =============================================================================\n// Civitai Extra Metadata\n// =============================================================================\n\n/**\n * Extract extraMetadata from ComfyUI prompt\n *\n * Civitai upscale workflows embed original generation params in extraMetadata field\n */\nfunction extractExtraMetadata(\n prompt: ComfyPrompt,\n): CivitaiExtraMetadata | undefined {\n const extraMetaField = (prompt as Record<string, unknown>).extraMetadata;\n if (typeof extraMetaField !== 'string') return undefined;\n\n const parsed = parseJson<CivitaiExtraMetadata>(extraMetaField);\n return parsed.ok ? parsed.value : undefined;\n}\n","import type { GenerationSoftware, MetadataEntry } from '../types';\nimport { type EntryRecord, buildEntryRecord } from '../utils/entries';\n\n/**\n * Detect generation software from metadata entries\n *\n * Analyzes entry keywords and content to identify the software that\n * generated the image. This centralized detection allows parsers to\n * focus on extracting structured data.\n *\n * @param entries - Metadata entries to analyze\n * @returns Detected software or null if unknown\n */\nexport function detectSoftware(\n entries: MetadataEntry[],\n): GenerationSoftware | null {\n const entryRecord = buildEntryRecord(entries);\n\n // Tier 1: Fastest - unique keywords\n const uniqueResult = detectUniqueKeywords(entryRecord);\n if (uniqueResult) return uniqueResult;\n\n // Tier 2: Format-specific structured detection\n const comfyResult = detectComfyUIEntries(entryRecord);\n if (comfyResult) return comfyResult;\n\n // Tier 3: Content analysis\n const text = entryRecord.parameters ?? entryRecord.Comment ?? '';\n if (text) {\n return detectFromTextContent(text);\n }\n\n return null;\n}\n\n/**\n * Detect software from unique keywords (Tier 1)\n *\n * Fast path: checks for presence of specific keywords that uniquely\n * identify each software. These are the most reliable indicators.\n *\n * Includes:\n * - Unique PNG chunk keywords\n * - Unique content patterns in parameters\n * - JPEG/WebP Comment JSON parsing (conversion cases)\n */\nfunction detectUniqueKeywords(\n entryRecord: EntryRecord,\n): GenerationSoftware | null {\n // ========================================\n // PNG Chunk Keywords\n // ========================================\n\n // NovelAI: Uses \"Software\" chunk with \"NovelAI\" value\n if (entryRecord.Software?.startsWith('NovelAI')) {\n return 'novelai';\n }\n\n // InvokeAI: Has unique \"invokeai_metadata\" chunk\n if ('invokeai_metadata' in entryRecord) {\n return 'invokeai';\n }\n\n // TensorArt: Has unique \"generation_data\" chunk\n if ('generation_data' in entryRecord) {\n return 'tensorart';\n }\n\n // Stability Matrix: Has unique \"smproj\" chunk\n if ('smproj' in entryRecord) {\n return 'stability-matrix';\n }\n\n // Easy Diffusion: Has \"negative_prompt\" or \"Negative Prompt\" keyword\n if ('negative_prompt' in entryRecord || 'Negative Prompt' in entryRecord) {\n return 'easydiffusion';\n }\n\n // ========================================\n // Parameters Content Patterns\n // ========================================\n\n // SwarmUI: Check parameters for \"sui_image_params\"\n // MUST check here to catch it before ComfyUI detection\n const parameters = entryRecord.parameters;\n if (parameters?.includes('sui_image_params')) {\n return 'swarmui';\n }\n\n // ========================================\n // JPEG/WebP Comment JSON\n // ========================================\n\n const comment = entryRecord.Comment;\n if (comment?.startsWith('{')) {\n return detectFromCommentJson(comment);\n }\n\n return null;\n}\n\n/**\n * Detect software from Comment JSON (conversion cases)\n *\n * Handles PNG→JPEG/WebP conversions where chunks become JSON.\n */\nfunction detectFromCommentJson(comment: string): GenerationSoftware | null {\n try {\n const parsed = JSON.parse(comment) as Record<string, unknown>;\n\n // InvokeAI: Same as PNG chunk check, but from JSON\n if ('invokeai_metadata' in parsed) {\n return 'invokeai';\n }\n\n // ComfyUI: Has both prompt and workflow in JSON\n if ('prompt' in parsed && 'workflow' in parsed) {\n const workflow = parsed.workflow;\n const prompt = parsed.prompt;\n\n const isObject =\n typeof workflow === 'object' || typeof prompt === 'object';\n const isJsonString =\n (typeof workflow === 'string' && workflow.startsWith('{')) ||\n (typeof prompt === 'string' && prompt.startsWith('{'));\n\n if (isObject || isJsonString) {\n return 'comfyui';\n }\n }\n\n // SwarmUI: Same as parameters check, but from Comment JSON\n if ('sui_image_params' in parsed) {\n return 'swarmui';\n }\n\n // SwarmUI alternative format\n if ('prompt' in parsed && 'parameters' in parsed) {\n const params = String(parsed.parameters || '');\n if (\n params.includes('sui_image_params') ||\n params.includes('swarm_version')\n ) {\n return 'swarmui';\n }\n }\n } catch {\n // Invalid JSON\n }\n\n return null;\n}\n\n/**\n * Detect ComfyUI from specific entry combinations (Tier 2)\n *\n * ComfyUI has unique entry combinations that can be detected\n * before analyzing text content.\n */\nfunction detectComfyUIEntries(\n entryRecord: EntryRecord,\n): GenerationSoftware | null {\n // ComfyUI: Both prompt AND workflow chunks exist\n if ('prompt' in entryRecord && 'workflow' in entryRecord) {\n return 'comfyui';\n }\n\n // ComfyUI: Workflow chunk only (rare, but valid)\n if ('workflow' in entryRecord) {\n return 'comfyui';\n }\n\n // ComfyUI: Prompt chunk with workflow JSON data\n // IMPORTANT: Check SwarmUI FIRST\n if ('prompt' in entryRecord) {\n const promptText = entryRecord.prompt;\n if (promptText?.startsWith('{')) {\n // SwarmUI: Must check FIRST\n if (promptText.includes('sui_image_params')) {\n return 'swarmui';\n }\n\n // ComfyUI: Has class_type in prompt JSON\n if (promptText.includes('class_type')) {\n return 'comfyui';\n }\n }\n }\n\n return null;\n}\n\n/**\n * Detect software from text content (Tier 3)\n *\n * Analyzes text content which can be either JSON format or A1111 text format.\n * This is the slowest but most thorough detection path.\n */\nfunction detectFromTextContent(text: string): GenerationSoftware | null {\n // JSON format detection\n if (text.startsWith('{')) {\n return detectFromJsonFormat(text);\n }\n\n // A1111-style text format detection\n return detectFromA1111Format(text);\n}\n\n/**\n * Detect software from JSON-formatted metadata\n *\n * Priority order:\n * 1. Unique string patterns (most specific)\n * 2. Multi-field combinations (moderately specific)\n * 3. Generic patterns (least specific, fallback)\n */\nfunction detectFromJsonFormat(json: string): GenerationSoftware | null {\n // ========================================\n // Tier 1: Unique String Identifiers\n // ========================================\n\n // SwarmUI: Has \"sui_image_params\" (unique identifier)\n if (json.includes('sui_image_params')) {\n return 'swarmui';\n }\n\n // Ruined Fooocus: Has explicit software field\n if (\n json.includes('\"software\":\"RuinedFooocus\"') ||\n json.includes('\"software\": \"RuinedFooocus\"')\n ) {\n return 'ruined-fooocus';\n }\n\n // Easy Diffusion: Has unique field name\n if (json.includes('\"use_stable_diffusion_model\"')) {\n return 'easydiffusion';\n }\n\n // Civitai: Has distinctive namespace or field\n if (json.includes('civitai:') || json.includes('\"resource-stack\"')) {\n return 'civitai';\n }\n\n // ========================================\n // Tier 2: Multi-Field Combinations\n // ========================================\n\n // NovelAI: Has distinctive v4_prompt or noise_schedule fields\n if (\n json.includes('\"v4_prompt\"') ||\n json.includes('\"noise_schedule\"') ||\n json.includes('\"uncond_scale\"') ||\n json.includes('\"Software\":\"NovelAI\"') ||\n json.includes('\\\\\"noise_schedule\\\\\"') ||\n json.includes('\\\\\"v4_prompt\\\\\"')\n ) {\n return 'novelai';\n }\n\n // HuggingFace Space: Combination of Model + resolution\n if (json.includes('\"Model\"') && json.includes('\"resolution\"')) {\n return 'hf-space';\n }\n\n // Fooocus: Has prompt + base_model combination\n if (json.includes('\"prompt\"') && json.includes('\"base_model\"')) {\n return 'fooocus';\n }\n\n // ========================================\n // Tier 3: Generic Fallback Patterns\n // ========================================\n\n // ComfyUI: Has \"prompt\" or \"nodes\" (very generic, last resort)\n if (json.includes('\"prompt\"') || json.includes('\"nodes\"')) {\n return 'comfyui';\n }\n\n return null;\n}\n\n/**\n * Detect software from A1111-style text format\n *\n * Priority order:\n * 1. SwarmUI indicators (check first as it has unique markers)\n * 2. Version field analysis (forge, forge-neo, comfyui variants)\n * 3. App field (SD.Next)\n * 4. Resource markers (Civitai)\n * 5. Default A1111 format (steps + sampler)\n */\nfunction detectFromA1111Format(text: string): GenerationSoftware | null {\n // ========================================\n // Tier 1: SwarmUI Detection\n // ========================================\n\n // SwarmUI: Has sui_image_params or swarm_version\n if (text.includes('sui_image_params') || text.includes('swarm_version')) {\n return 'swarmui';\n }\n\n // ========================================\n // Tier 2: Version Field Analysis\n // ========================================\n\n const versionMatch = text.match(/Version:\\s*([^\\s,]+)/);\n if (versionMatch) {\n const version = versionMatch[1];\n\n // Forge Neo: Version starts with \"neo\"\n if (version === 'neo' || version?.startsWith('neo')) {\n return 'forge-neo';\n }\n\n // Forge: Version starts with \"f\" followed by a digit\n if (version?.startsWith('f') && /^f\\d/.test(version)) {\n return 'forge';\n }\n\n // ComfyUI: Version explicitly says \"ComfyUI\"\n if (version === 'ComfyUI') {\n return 'comfyui';\n }\n }\n\n // ========================================\n // Tier 3: Other Unique Text Markers\n // ========================================\n\n // SD.Next: Has App field with SD.Next value\n if (text.includes('App: SD.Next') || text.includes('App:SD.Next')) {\n return 'sd-next';\n }\n\n // Civitai: Has resource list marker\n if (text.includes('Civitai resources:')) {\n return 'civitai';\n }\n\n // ========================================\n // Tier 4: Default A1111 Format\n // ========================================\n\n // SD-WebUI (default): Has typical A1111 parameters\n if (text.includes('Steps:') && text.includes('Sampler:')) {\n return 'sd-webui';\n }\n\n return null;\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * Easy Diffusion JSON metadata structure\n *\n * ⚠️ UNVERIFIED: This parser has not been verified with actual Easy Diffusion samples.\n * The implementation is based on reference code from other libraries but may not be\n * fully accurate. Please report any issues if you encounter problems with Easy Diffusion\n * metadata parsing.\n *\n * Easy Diffusion stores metadata as JSON in various entries:\n * - PNG: negative_prompt or Negative Prompt entry\n * - JPEG/WebP: Exif UserComment\n */\ninterface EasyDiffusionJsonMetadata {\n prompt?: string;\n negative_prompt?: string;\n Prompt?: string;\n 'Negative Prompt'?: string;\n seed?: number;\n Seed?: number;\n use_stable_diffusion_model?: string;\n 'Stable Diffusion model'?: string;\n sampler_name?: string;\n Sampler?: string;\n num_inference_steps?: number;\n Steps?: number;\n guidance_scale?: number;\n 'Guidance Scale'?: number;\n width?: number;\n Width?: number;\n height?: number;\n Height?: number;\n clip_skip?: number;\n 'Clip Skip'?: number;\n use_vae_model?: string;\n 'VAE model'?: string;\n}\n\n/**\n * Get value from JSON with fallback for different key formats\n *\n * Easy Diffusion uses two different key formats:\n * - Format A: prompt, negative_prompt, seed (snake_case)\n * - Format B: Prompt, Negative Prompt, Seed (capitalized)\n */\nfunction getValue<T>(\n json: EasyDiffusionJsonMetadata,\n keyA: keyof EasyDiffusionJsonMetadata,\n keyB: keyof EasyDiffusionJsonMetadata,\n): T | undefined {\n return (json[keyA] ?? json[keyB]) as T | undefined;\n}\n\n/**\n * Extract model name from path\n *\n * Easy Diffusion stores full path like \"path/to/model.safetensors\"\n */\nfunction extractModelName(path: string | undefined): string | undefined {\n if (!path) return undefined;\n // Handle both Windows and POSIX paths\n const parts = path.replace(/\\\\/g, '/').split('/');\n return parts[parts.length - 1];\n}\n\n/**\n * Parse Easy Diffusion metadata from entries\n *\n * Easy Diffusion stores metadata as JSON in:\n * - PNG: info dict with negative_prompt or Negative Prompt key\n * - JPEG/WebP: Exif UserComment\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseEasyDiffusion(\n entries: MetadataEntry[],\n): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Check for standalone entries (PNG format)\n if (entryRecord.negative_prompt || entryRecord['Negative Prompt']) {\n // The entire info dict is what we need to process\n // Try to reconstruct from individual entries or find a JSON source\n // For PNG, Easy Diffusion stores each field as a separate chunk\n return parseFromEntries(entryRecord);\n }\n\n // Find JSON in various possible locations\n const jsonText =\n (entryRecord.parameters?.startsWith('{')\n ? entryRecord.parameters\n : undefined) ??\n (entryRecord.Comment?.startsWith('{') ? entryRecord.Comment : undefined);\n\n if (!jsonText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON\n const parsed = parseJson<EasyDiffusionJsonMetadata>(jsonText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in Easy Diffusion metadata',\n });\n }\n\n return parseFromJson(parsed.value);\n}\n\n/**\n * Parse from individual PNG entries\n */\nfunction parseFromEntries(\n entryRecord: Record<string, string | undefined>,\n): InternalParseResult {\n const prompt = entryRecord.prompt ?? entryRecord.Prompt ?? '';\n const negativePrompt =\n entryRecord.negative_prompt ??\n entryRecord['Negative Prompt'] ??\n entryRecord.negative_prompt ??\n '';\n\n const modelPath =\n entryRecord.use_stable_diffusion_model ??\n entryRecord['Stable Diffusion model'];\n\n const width = Number(entryRecord.width ?? entryRecord.Width) || 0;\n const height = Number(entryRecord.height ?? entryRecord.Height) || 0;\n\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'easydiffusion',\n prompt: prompt.trim(),\n negativePrompt: negativePrompt.trim(),\n width,\n height,\n model: {\n name: extractModelName(modelPath),\n vae: entryRecord.use_vae_model ?? entryRecord['VAE model'],\n },\n sampling: {\n sampler: entryRecord.sampler_name ?? entryRecord.Sampler,\n steps:\n Number(entryRecord.num_inference_steps ?? entryRecord.Steps) ||\n undefined,\n cfg:\n Number(entryRecord.guidance_scale ?? entryRecord['Guidance Scale']) ||\n undefined,\n seed: Number(entryRecord.seed ?? entryRecord.Seed) || undefined,\n clipSkip:\n Number(entryRecord.clip_skip ?? entryRecord['Clip Skip']) || undefined,\n },\n };\n\n return Result.ok(metadata);\n}\n\n/**\n * Parse from JSON object\n */\nfunction parseFromJson(json: EasyDiffusionJsonMetadata): InternalParseResult {\n const prompt = getValue<string>(json, 'prompt', 'Prompt') ?? '';\n const negativePrompt =\n getValue<string>(json, 'negative_prompt', 'Negative Prompt') ?? '';\n\n const modelPath = getValue<string>(\n json,\n 'use_stable_diffusion_model',\n 'Stable Diffusion model',\n );\n\n const width = getValue<number>(json, 'width', 'Width') ?? 0;\n const height = getValue<number>(json, 'height', 'Height') ?? 0;\n\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'easydiffusion',\n prompt: prompt.trim(),\n negativePrompt: negativePrompt.trim(),\n width,\n height,\n model: {\n name: extractModelName(modelPath),\n vae: getValue<string>(json, 'use_vae_model', 'VAE model'),\n },\n sampling: {\n sampler: getValue<string>(json, 'sampler_name', 'Sampler'),\n steps: getValue<number>(json, 'num_inference_steps', 'Steps'),\n cfg: getValue<number>(json, 'guidance_scale', 'Guidance Scale'),\n seed: getValue<number>(json, 'seed', 'Seed'),\n clipSkip: getValue<number>(json, 'clip_skip', 'Clip Skip'),\n },\n };\n\n return Result.ok(metadata);\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * Fooocus JSON metadata structure\n *\n * ⚠️ UNVERIFIED: This parser has not been verified with actual Fooocus samples.\n * The implementation is based on reference code from other libraries but may not be\n * fully accurate. Please report any issues if you encounter problems with Fooocus\n * metadata parsing.\n *\n * Fooocus stores metadata as JSON in:\n * - PNG: Comment chunk\n * - JPEG: comment field\n */\ninterface FooocusJsonMetadata {\n prompt?: string;\n negative_prompt?: string;\n base_model?: string;\n refiner_model?: string;\n sampler?: string;\n scheduler?: string;\n seed?: number;\n cfg?: number;\n steps?: number;\n width?: number;\n height?: number;\n loras?: Array<{ name: string; weight: number }>;\n style_selection?: string[];\n performance?: string;\n}\n\n/**\n * Parse Fooocus metadata from entries\n *\n * Fooocus stores metadata as JSON in the Comment chunk (PNG) or\n * comment field (JPEG).\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseFooocus(entries: MetadataEntry[]): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Find JSON in Comment entry (PNG uses Comment, JPEG uses comment)\n const jsonText = entryRecord.Comment ?? entryRecord.comment;\n\n if (!jsonText || !jsonText.startsWith('{')) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON\n const parsed = parseJson<FooocusJsonMetadata>(jsonText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in Fooocus metadata',\n });\n }\n const json = parsed.value;\n\n // Verify it's Fooocus format (has base_model)\n if (!json.base_model && !json.prompt) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'fooocus',\n prompt: json.prompt?.trim() ?? '',\n negativePrompt: json.negative_prompt?.trim() ?? '',\n width: json.width ?? 0,\n height: json.height ?? 0,\n model: {\n name: json.base_model,\n },\n sampling: {\n sampler: json.sampler,\n scheduler: json.scheduler,\n steps: json.steps,\n cfg: json.cfg,\n seed: json.seed,\n },\n };\n\n return Result.ok(metadata);\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * HuggingFace Space JSON metadata structure\n */\ninterface HfSpaceJsonMetadata {\n prompt?: string;\n negative_prompt?: string;\n resolution?: string;\n guidance_scale?: number;\n num_inference_steps?: number;\n style_preset?: string;\n seed?: number;\n sampler?: string;\n Model?: string;\n 'Model hash'?: string;\n use_upscaler?: unknown;\n}\n\n/**\n * Parse HuggingFace Space metadata from entries\n *\n * HuggingFace Spaces using Gradio + Diffusers store metadata as JSON\n * in the parameters chunk.\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseHfSpace(entries: MetadataEntry[]): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Find parameters entry\n const parametersText = entryRecord.parameters;\n if (!parametersText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON\n const parsed = parseJson<HfSpaceJsonMetadata>(parametersText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in parameters entry',\n });\n }\n const json = parsed.value;\n\n // Parse resolution (format: \"832 x 1216\")\n const parseResolution = (res?: string) => {\n const match = res?.match(/(\\d+)\\s*x\\s*(\\d+)/);\n return match?.[1] && match?.[2]\n ? {\n width: Number.parseInt(match[1], 10),\n height: Number.parseInt(match[2], 10),\n }\n : { width: 0, height: 0 };\n };\n const { width, height } = parseResolution(json.resolution);\n\n // Build metadata\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'hf-space',\n prompt: json.prompt ?? '',\n negativePrompt: json.negative_prompt ?? '',\n width,\n height,\n model: {\n name: json.Model,\n hash: json['Model hash'],\n },\n sampling: {\n sampler: json.sampler,\n steps: json.num_inference_steps,\n cfg: json.guidance_scale,\n seed: json.seed,\n },\n };\n\n return Result.ok(metadata);\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * InvokeAI metadata JSON structure\n */\ninterface InvokeAIMetadataJson {\n positive_prompt?: string;\n negative_prompt?: string;\n width?: number;\n height?: number;\n seed?: number;\n steps?: number;\n cfg_scale?: number;\n scheduler?: string;\n model?: {\n name?: string;\n hash?: string;\n };\n}\n\n/**\n * Extract InvokeAI metadata from entry record\n *\n * Checks direct 'invokeai_metadata' entry first, then tries to extract from Comment JSON\n */\nfunction extractInvokeAIMetadata(\n entryRecord: Record<string, string | undefined>,\n): string | undefined {\n // Direct invokeai_metadata entry (PNG format)\n if (entryRecord.invokeai_metadata) {\n return entryRecord.invokeai_metadata;\n }\n\n // Try to extract from Comment JSON (JPEG/WebP format)\n if (!entryRecord.Comment) {\n return undefined;\n }\n\n const commentParsed = parseJson<Record<string, unknown>>(entryRecord.Comment);\n if (!commentParsed.ok || !('invokeai_metadata' in commentParsed.value)) {\n return undefined;\n }\n\n return JSON.stringify(commentParsed.value.invokeai_metadata);\n}\n\n/**\n * Parse InvokeAI metadata from entries\n *\n * InvokeAI stores metadata with:\n * - invokeai_metadata: JSON containing generation parameters\n * - invokeai_graph: JSON containing the full node graph (not parsed here)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseInvokeAI(entries: MetadataEntry[]): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // Find invokeai_metadata entry\n // For PNG: direct keyword\n // For JPEG/WebP: inside Comment JSON\n const metadataText = extractInvokeAIMetadata(entryRecord);\n\n if (!metadataText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse metadata JSON\n const parsed = parseJson<InvokeAIMetadataJson>(metadataText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in invokeai_metadata entry',\n });\n }\n const data = parsed.value;\n\n // Extract dimensions (fallback to 0 for IHDR extraction)\n const width = data.width ?? 0;\n const height = data.height ?? 0;\n\n // Build metadata\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'invokeai',\n prompt: data.positive_prompt ?? '',\n negativePrompt: data.negative_prompt ?? '',\n width,\n height,\n };\n\n // Add model settings\n if (data.model?.name || data.model?.hash) {\n metadata.model = {\n name: data.model.name,\n hash: data.model.hash,\n };\n }\n\n // Add sampling settings\n if (\n data.seed !== undefined ||\n data.steps !== undefined ||\n data.cfg_scale !== undefined ||\n data.scheduler !== undefined\n ) {\n metadata.sampling = {\n seed: data.seed,\n steps: data.steps,\n cfg: data.cfg_scale,\n sampler: data.scheduler,\n };\n }\n\n return Result.ok(metadata);\n}\n","import type {\n CharacterPrompt,\n InternalParseResult,\n MetadataEntry,\n NovelAIMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * NovelAI Comment JSON structure\n */\ninterface NovelAIComment {\n prompt: string;\n uc?: string;\n steps?: number;\n height?: number;\n width?: number;\n scale?: number;\n seed?: number;\n noise_schedule?: string;\n sampler?: string;\n /** V4 prompt structure */\n v4_prompt?: V4Prompt;\n /** V4 negative prompt structure */\n v4_negative_prompt?: V4Prompt;\n}\n\n/**\n * NovelAI V4 prompt structure\n */\ninterface V4Prompt {\n caption?: {\n base_caption?: string;\n char_captions?: Array<{\n char_caption?: string;\n centers?: Array<{ x: number; y: number }>;\n }>;\n };\n use_coords?: boolean;\n use_order?: boolean;\n}\n\n/**\n * Parse NovelAI metadata from entries\n *\n * NovelAI stores metadata with:\n * - Software: \"NovelAI\"\n * - Comment: JSON containing generation parameters\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseNovelAI(entries: MetadataEntry[]): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // Verify NovelAI format\n if (!entryRecord.Software?.startsWith('NovelAI')) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse Comment JSON\n const commentText = entryRecord.Comment;\n if (!commentText) {\n return Result.error({\n type: 'parseError',\n message: 'Missing Comment entry',\n });\n }\n\n const parsed = parseJson<NovelAIComment>(commentText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in Comment entry',\n });\n }\n const comment = parsed.value;\n\n // Extract dimensions (fallback to 0 for IHDR extraction)\n const width = comment.width ?? 0;\n const height = comment.height ?? 0;\n\n // Extract prompt - prefer V4 base_caption if available\n const prompt =\n comment.v4_prompt?.caption?.base_caption ?? comment.prompt ?? '';\n const negativePrompt =\n comment.v4_negative_prompt?.caption?.base_caption ?? comment.uc ?? '';\n\n // Build metadata\n const metadata: Omit<NovelAIMetadata, 'raw'> = {\n software: 'novelai',\n prompt,\n negativePrompt,\n width,\n height,\n };\n\n // Add sampling settings if present\n if (\n comment.steps !== undefined ||\n comment.scale !== undefined ||\n comment.seed !== undefined ||\n comment.noise_schedule !== undefined ||\n comment.sampler !== undefined\n ) {\n metadata.sampling = {\n steps: comment.steps,\n cfg: comment.scale,\n seed: comment.seed,\n sampler: comment.sampler,\n scheduler: comment.noise_schedule,\n };\n }\n\n // Extract V4 character prompts\n const charCaptions = comment.v4_prompt?.caption?.char_captions;\n if (charCaptions && charCaptions.length > 0) {\n metadata.characterPrompts = charCaptions\n .map((cc): CharacterPrompt | null => {\n if (!cc.char_caption) return null;\n return {\n prompt: cc.char_caption,\n center: cc.centers?.[0],\n };\n })\n .filter((cp): cp is CharacterPrompt => cp !== null);\n\n metadata.useCoords = comment.v4_prompt?.use_coords;\n metadata.useOrder = comment.v4_prompt?.use_order;\n }\n\n return Result.ok(metadata);\n}\n","import type {\n InternalParseResult,\n MetadataEntry,\n StandardMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * Ruined Fooocus JSON metadata structure\n *\n * Ruined Fooocus stores metadata as JSON in the `parameters` chunk.\n * It has a `software` field set to \"RuinedFooocus\" for identification.\n */\ninterface RuinedFooocusJsonMetadata {\n Prompt?: string;\n Negative?: string;\n steps?: number;\n cfg?: number;\n width?: number;\n height?: number;\n seed?: number;\n sampler_name?: string;\n scheduler?: string;\n base_model_name?: string;\n base_model_hash?: string;\n loras?: Array<{ name: string; weight: number }>;\n clip_skip?: number;\n software?: string;\n}\n\n/**\n * Parse Ruined Fooocus metadata from entries\n *\n * Ruined Fooocus stores metadata as JSON in the `parameters` chunk,\n * with a `software` field set to \"RuinedFooocus\".\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseRuinedFooocus(\n entries: MetadataEntry[],\n): InternalParseResult {\n const entryRecord = buildEntryRecord(entries);\n\n // Find JSON in parameters entry\n const jsonText = entryRecord.parameters;\n\n if (!jsonText || !jsonText.startsWith('{')) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON\n const parsed = parseJson<RuinedFooocusJsonMetadata>(jsonText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in Ruined Fooocus metadata',\n });\n }\n const json = parsed.value;\n\n // Verify it's Ruined Fooocus format\n if (json.software !== 'RuinedFooocus') {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n const metadata: Omit<StandardMetadata, 'raw'> = {\n software: 'ruined-fooocus',\n prompt: json.Prompt?.trim() ?? '',\n negativePrompt: json.Negative?.trim() ?? '',\n width: json.width ?? 0,\n height: json.height ?? 0,\n model: {\n name: json.base_model_name,\n hash: json.base_model_hash,\n },\n sampling: {\n sampler: json.sampler_name,\n scheduler: json.scheduler,\n steps: json.steps,\n cfg: json.cfg,\n seed: json.seed,\n clipSkip: json.clip_skip,\n },\n };\n\n return Result.ok(metadata);\n}\n","import type {\n BasicComfyUIMetadata,\n InternalParseResult,\n MetadataEntry,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\nimport { parseComfyUI } from './comfyui';\n\n/**\n * Stability Matrix parameters-json structure\n */\ninterface StabilityMatrixJson {\n PositivePrompt?: string;\n NegativePrompt?: string;\n Width?: number;\n Height?: number;\n Seed?: number;\n Steps?: number;\n CfgScale?: number;\n Sampler?: string;\n ModelName?: string;\n ModelHash?: string;\n}\n\n/**\n * Parse Stability Matrix metadata from entries\n *\n * Stability Matrix stores metadata with:\n * - prompt: ComfyUI-compatible workflow JSON (primary source)\n * - parameters-json: JSON containing generation parameters\n * - Used to override prompts (more complete than workflow)\n * - parameters: A1111-style text (fallback)\n * - smproj: Project data (not parsed here)\n *\n * Strategy:\n * 1. Parse as ComfyUI workflow (workflow, model, sampling, etc.)\n * 2. Override prompts from parameters-json (more complete)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseStabilityMatrix(\n entries: MetadataEntry[],\n): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // First, parse as ComfyUI workflow to get base metadata\n const comfyResult = parseComfyUI(entries);\n if (!comfyResult.ok || comfyResult.value.software !== 'comfyui') {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Override software to stability-matrix\n const metadata: Omit<BasicComfyUIMetadata, 'raw'> = {\n ...comfyResult.value,\n software: 'stability-matrix',\n };\n\n // Find parameters-json entry for prompt override\n const jsonText = entryRecord['parameters-json'];\n if (jsonText) {\n const parsed = parseJson<StabilityMatrixJson>(jsonText);\n if (parsed.ok) {\n const data = parsed.value;\n\n // Override prompts from parameters-json (more complete than workflow)\n if (data.PositivePrompt !== undefined) {\n metadata.prompt = data.PositivePrompt;\n }\n if (data.NegativePrompt !== undefined) {\n metadata.negativePrompt = data.NegativePrompt;\n }\n\n // Override model information from parameters-json\n if (data.ModelName !== undefined || data.ModelHash !== undefined) {\n metadata.model = {\n name: data.ModelName,\n hash: data.ModelHash,\n };\n }\n }\n }\n\n return Result.ok(metadata);\n}\n","import type {\n ComfyNodeGraph,\n InternalParseResult,\n MetadataEntry,\n SwarmUIMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * SwarmUI parameters JSON structure\n */\ninterface SwarmUIParameters {\n sui_image_params?: {\n prompt?: string;\n negativeprompt?: string;\n model?: string;\n seed?: number;\n steps?: number;\n cfgscale?: number;\n width?: number;\n height?: number;\n sampler?: string;\n scheduler?: string;\n // Refiner/Upscale settings\n refinerupscale?: number;\n refinerupscalemethod?: string;\n refinercontrolpercentage?: number;\n };\n}\n\n/**\n * Extract SwarmUI parameters from entry record\n *\n * Checks direct 'parameters' entry first, then tries to extract from Comment JSON.\n * After converter fix, Comment JSON contains direct sui_image_params (native WebP format).\n */\nfunction extractSwarmUIParameters(\n entryRecord: Record<string, string | undefined>,\n): string | undefined {\n // Direct parameters entry (PNG format)\n if (entryRecord.parameters) {\n return entryRecord.parameters;\n }\n\n // Try to extract from Comment JSON (JPEG/WebP format)\n if (!entryRecord.Comment) {\n return undefined;\n }\n\n const commentParsed = parseJson<Record<string, unknown>>(entryRecord.Comment);\n if (!commentParsed.ok) {\n return undefined;\n }\n\n // Native WebP format: direct sui_image_params\n if ('sui_image_params' in commentParsed.value) {\n return entryRecord.Comment; // Return as-is to preserve full structure\n }\n\n return undefined;\n}\n\n/**\n * Parse SwarmUI metadata from entries\n *\n * SwarmUI stores metadata with:\n * - parameters: JSON containing sui_image_params\n * - prompt: ComfyUI-style node graph (fallback)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseSwarmUI(entries: MetadataEntry[]): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // Find parameters entry\n // For PNG: direct keyword 'parameters'\n // For JPEG/WebP: inside Comment JSON\n const parametersText = extractSwarmUIParameters(entryRecord);\n\n if (!parametersText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse parameters JSON\n const parsed = parseJson<SwarmUIParameters>(parametersText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in parameters entry',\n });\n }\n\n // Verify SwarmUI format (has sui_image_params)\n const params = parsed.value.sui_image_params;\n if (!params) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Extract dimensions (fallback to 0 for IHDR extraction)\n const width = params.width ?? 0;\n const height = params.height ?? 0;\n\n // Build metadata\n const metadata: Omit<SwarmUIMetadata, 'raw'> = {\n software: 'swarmui',\n prompt: params.prompt ?? '',\n negativePrompt: params.negativeprompt ?? '',\n width,\n height,\n };\n\n // Add nodes from prompt chunk (PNG format) or Make field (JPEG/WebP extended format)\n const promptSource = entryRecord.prompt || entryRecord.Make;\n if (promptSource) {\n const promptParsed = parseJson(promptSource);\n if (promptParsed.ok) {\n metadata.nodes = promptParsed.value as ComfyNodeGraph;\n }\n }\n\n // Add model settings\n if (params.model) {\n metadata.model = {\n name: params.model,\n };\n }\n\n // Add sampling settings\n if (\n params.seed !== undefined ||\n params.steps !== undefined ||\n params.cfgscale !== undefined ||\n params.sampler !== undefined ||\n params.scheduler !== undefined\n ) {\n metadata.sampling = {\n seed: params.seed,\n steps: params.steps,\n cfg: params.cfgscale,\n sampler: params.sampler,\n scheduler: params.scheduler,\n };\n }\n\n // Add hires/upscale settings\n if (\n params.refinerupscale !== undefined ||\n params.refinerupscalemethod !== undefined ||\n params.refinercontrolpercentage !== undefined\n ) {\n metadata.hires = {\n scale: params.refinerupscale,\n upscaler: params.refinerupscalemethod,\n denoise: params.refinercontrolpercentage,\n };\n }\n\n return Result.ok(metadata);\n}\n","import type {\n BasicComfyUIMetadata,\n ComfyNodeGraph,\n InternalParseResult,\n MetadataEntry,\n} from '../types';\nimport { Result } from '../types';\nimport { buildEntryRecord } from '../utils/entries';\nimport { parseJson } from '../utils/json';\n\n/**\n * TensorArt generation_data JSON structure\n */\ninterface TensorArtGenerationData {\n prompt?: string;\n negativePrompt?: string;\n width?: number;\n height?: number;\n seed?: string;\n steps?: number;\n cfgScale?: number;\n clipSkip?: number;\n baseModel?: {\n modelFileName?: string;\n hash?: string;\n };\n}\n\n/**\n * Parse TensorArt metadata from entries\n *\n * TensorArt stores metadata with:\n * - generation_data: JSON containing generation parameters\n * - prompt: ComfyUI-style node graph (workflow)\n *\n * @param entries - Metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseTensorArt(entries: MetadataEntry[]): InternalParseResult {\n // Build entry record for easy access\n const entryRecord = buildEntryRecord(entries);\n\n // Find generation_data entry\n const dataText = entryRecord.generation_data;\n if (!dataText) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Parse JSON (TensorArt appends NUL characters)\n const cleanedText = dataText.replace(/\\0+$/, '');\n const parsed = parseJson<TensorArtGenerationData>(cleanedText);\n if (!parsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in generation_data entry',\n });\n }\n const data = parsed.value;\n\n // Extract dimensions (fallback to 0 for IHDR extraction)\n const width = data.width ?? 0;\n const height = data.height ?? 0;\n\n // Parse nodes from prompt chunk (required for TensorArt)\n const promptChunk = entryRecord.prompt;\n if (!promptChunk) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n const promptParsed = parseJson(promptChunk);\n if (!promptParsed.ok) {\n return Result.error({\n type: 'parseError',\n message: 'Invalid JSON in prompt chunk',\n });\n }\n\n // Build metadata\n const metadata: Omit<BasicComfyUIMetadata, 'raw'> = {\n software: 'tensorart',\n prompt: data.prompt ?? '',\n negativePrompt: data.negativePrompt ?? '',\n width,\n height,\n nodes: promptParsed.value as ComfyNodeGraph,\n };\n\n // Add model settings\n if (data.baseModel?.modelFileName || data.baseModel?.hash) {\n metadata.model = {\n name: data.baseModel.modelFileName,\n hash: data.baseModel.hash,\n };\n }\n\n // Add sampling settings\n if (\n data.seed !== undefined ||\n data.steps !== undefined ||\n data.cfgScale !== undefined ||\n data.clipSkip !== undefined\n ) {\n const baseSeed = data.seed ? Number(data.seed) : undefined;\n\n metadata.sampling = {\n seed:\n baseSeed === -1\n ? findActualSeed(promptParsed.value as ComfyNodeGraph)\n : baseSeed,\n steps: data.steps,\n cfg: data.cfgScale,\n clipSkip: data.clipSkip,\n };\n }\n\n return Result.ok(metadata);\n}\n\n/**\n * Find actual seed value from KSampler node in ComfyUI node graph\n *\n * @param nodes - ComfyUI node graph\n * @returns Actual seed value, or -1 if not found\n */\nfunction findActualSeed(nodes: ComfyNodeGraph): number {\n const samplerNode = findSamplerNode(nodes);\n return samplerNode && typeof samplerNode.inputs.seed === 'number'\n ? samplerNode.inputs.seed\n : -1;\n}\n\n/**\n * Find KSampler node in ComfyUI node graph\n *\n * @param nodes - ComfyUI node graph\n * @returns KSampler node or undefined\n */\nfunction findSamplerNode(\n nodes: ComfyNodeGraph,\n): { inputs: Record<string, unknown>; class_type: string } | undefined {\n return Object.values(nodes).find(\n (node) =>\n node.class_type === 'KSampler' ||\n node.class_type.toLowerCase().includes('sampler'),\n );\n}\n","import type { InternalParseResult, MetadataEntry } from '../types';\nimport { Result } from '../types';\nimport { parseA1111 } from './a1111';\nimport { parseComfyUI } from './comfyui';\nimport { detectSoftware } from './detect';\nimport { parseEasyDiffusion } from './easydiffusion';\nimport { parseFooocus } from './fooocus';\nimport { parseHfSpace } from './hf-space';\nimport { parseInvokeAI } from './invokeai';\nimport { parseNovelAI } from './novelai';\nimport { parseRuinedFooocus } from './ruined-fooocus';\nimport { parseStabilityMatrix } from './stability-matrix';\nimport { parseSwarmUI } from './swarmui';\nimport { parseTensorArt } from './tensorart';\n\n/**\n * Parse metadata entries to unified format\n *\n * Automatically detects the generation software and applies the appropriate parser.\n * This function returns metadata WITHOUT the `raw` field; callers should attach it.\n *\n * @param entries - Format-agnostic metadata entries\n * @returns Parsed metadata or error\n */\nexport function parseMetadata(entries: MetadataEntry[]): InternalParseResult {\n // Detect software from entries\n const software = detectSoftware(entries);\n\n // Route to appropriate parser based on detected software\n switch (software) {\n case 'novelai':\n return parseNovelAI(entries);\n\n case 'sd-webui':\n case 'sd-next':\n case 'forge':\n case 'forge-neo':\n return parseA1111(entries);\n\n case 'hf-space':\n return parseHfSpace(entries);\n\n case 'civitai': {\n // Civitai can use either ComfyUI JSON or A1111 text format\n const comfyResult = parseComfyUI(entries);\n if (comfyResult.ok) return comfyResult;\n return parseA1111(entries);\n }\n\n case 'comfyui': {\n // ComfyUI can use either JSON or A1111 text format (e.g., comfy-image-saver)\n const comfyResult = parseComfyUI(entries);\n if (comfyResult.ok) return comfyResult;\n return parseA1111(entries);\n }\n\n case 'invokeai':\n return parseInvokeAI(entries);\n\n case 'swarmui':\n return parseSwarmUI(entries);\n\n case 'tensorart':\n return parseTensorArt(entries);\n\n case 'stability-matrix':\n return parseStabilityMatrix(entries);\n\n case 'easydiffusion':\n return parseEasyDiffusion(entries);\n\n case 'fooocus':\n return parseFooocus(entries);\n\n case 'ruined-fooocus':\n return parseRuinedFooocus(entries);\n\n default: {\n // Try each parser in order\n // First try A1111 format (most common)\n const a1111Result = parseA1111(entries);\n if (a1111Result.ok) return a1111Result;\n\n // Then try ComfyUI\n const comfyResult = parseComfyUI(entries);\n if (comfyResult.ok) return comfyResult;\n\n // Then try InvokeAI\n const invokeResult = parseInvokeAI(entries);\n if (invokeResult.ok) return invokeResult;\n\n // Then try SwarmUI\n const swarmResult = parseSwarmUI(entries);\n if (swarmResult.ok) return swarmResult;\n\n // Then try TensorArt\n const tensorResult = parseTensorArt(entries);\n if (tensorResult.ok) return tensorResult;\n\n // Then try Stability Matrix\n const stabilityResult = parseStabilityMatrix(entries);\n if (stabilityResult.ok) return stabilityResult;\n\n // Finally try NovelAI\n const novelaiResult = parseNovelAI(entries);\n if (novelaiResult.ok) return novelaiResult;\n\n return Result.error({ type: 'unsupportedFormat' });\n }\n }\n}\n","/**\n * Binary data utilities for reading/writing multi-byte integers\n */\n\n/**\n * Read 3-byte little-endian unsigned integer\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @returns 24-bit unsigned integer\n */\nexport function readUint24LE(data: Uint8Array, offset: number): number {\n return (\n (data[offset] ?? 0) |\n ((data[offset + 1] ?? 0) << 8) |\n ((data[offset + 2] ?? 0) << 16)\n );\n}\n\n/**\n * Read 4-byte big-endian unsigned integer\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @returns 32-bit unsigned integer\n */\nexport function readUint32BE(data: Uint8Array, offset: number): number {\n return (\n ((data[offset] ?? 0) << 24) |\n ((data[offset + 1] ?? 0) << 16) |\n ((data[offset + 2] ?? 0) << 8) |\n (data[offset + 3] ?? 0)\n );\n}\n\n/**\n * Read 4-byte little-endian unsigned integer\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @returns 32-bit unsigned integer\n */\nexport function readUint32LE(data: Uint8Array, offset: number): number {\n return (\n (data[offset] ?? 0) |\n ((data[offset + 1] ?? 0) << 8) |\n ((data[offset + 2] ?? 0) << 16) |\n ((data[offset + 3] ?? 0) << 24)\n );\n}\n\n/**\n * Write 4-byte big-endian unsigned integer\n *\n * @param data - Byte array to write to\n * @param offset - Offset to start writing at\n * @param value - 32-bit unsigned integer value\n */\nexport function writeUint32BE(\n data: Uint8Array,\n offset: number,\n value: number,\n): void {\n data[offset] = (value >>> 24) & 0xff;\n data[offset + 1] = (value >>> 16) & 0xff;\n data[offset + 2] = (value >>> 8) & 0xff;\n data[offset + 3] = value & 0xff;\n}\n\n/**\n * Read 4-byte chunk type as ASCII string\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @returns 4-character ASCII string\n */\nexport function readChunkType(data: Uint8Array, offset: number): string {\n return String.fromCharCode(\n data[offset] ?? 0,\n data[offset + 1] ?? 0,\n data[offset + 2] ?? 0,\n data[offset + 3] ?? 0,\n );\n}\n\n/**\n * Read 2-byte unsigned integer with endianness support\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @param isLittleEndian - If true, read as little-endian\n * @returns 16-bit unsigned integer\n */\nexport function readUint16(\n data: Uint8Array,\n offset: number,\n isLittleEndian: boolean,\n): number {\n if (isLittleEndian) {\n return (data[offset] ?? 0) | ((data[offset + 1] ?? 0) << 8);\n }\n return ((data[offset] ?? 0) << 8) | (data[offset + 1] ?? 0);\n}\n\n/**\n * Read 4-byte unsigned integer with endianness support\n *\n * @param data - Byte array\n * @param offset - Offset to start reading from\n * @param isLittleEndian - If true, read as little-endian\n * @returns 32-bit unsigned integer\n */\nexport function readUint32(\n data: Uint8Array,\n offset: number,\n isLittleEndian: boolean,\n): number {\n if (isLittleEndian) {\n return (\n (data[offset] ?? 0) |\n ((data[offset + 1] ?? 0) << 8) |\n ((data[offset + 2] ?? 0) << 16) |\n ((data[offset + 3] ?? 0) << 24)\n );\n }\n return (\n ((data[offset] ?? 0) << 24) |\n ((data[offset + 1] ?? 0) << 16) |\n ((data[offset + 2] ?? 0) << 8) |\n (data[offset + 3] ?? 0)\n );\n}\n\n/**\n * Compare two Uint8Arrays for equality\n *\n * @param a - First array\n * @param b - Second array\n * @returns true if arrays have same length and all elements match\n */\nexport function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {\n if (a.length !== b.length) return false;\n for (let i = 0; i < a.length; i++) {\n if (a[i] !== b[i]) return false;\n }\n return true;\n}\n\n/**\n * Write 2-byte unsigned integer with endianness support\n *\n * @param data - Byte array to write to\n * @param offset - Offset to start writing at\n * @param value - 16-bit unsigned integer value\n * @param isLittleEndian - If true, write as little-endian\n */\nexport function writeUint16(\n data: Uint8Array,\n offset: number,\n value: number,\n isLittleEndian: boolean,\n): void {\n if (isLittleEndian) {\n data[offset] = value & 0xff;\n data[offset + 1] = (value >>> 8) & 0xff;\n } else {\n data[offset] = (value >>> 8) & 0xff;\n data[offset + 1] = value & 0xff;\n }\n}\n\n/**\n * Write 4-byte unsigned integer with endianness support\n *\n * @param data - Byte array to write to\n * @param offset - Offset to start writing at\n * @param value - 32-bit unsigned integer value\n * @param isLittleEndian - If true, write as little-endian\n */\nexport function writeUint32(\n data: Uint8Array,\n offset: number,\n value: number,\n isLittleEndian: boolean,\n): void {\n if (isLittleEndian) {\n data[offset] = value & 0xff;\n data[offset + 1] = (value >>> 8) & 0xff;\n data[offset + 2] = (value >>> 16) & 0xff;\n data[offset + 3] = (value >>> 24) & 0xff;\n } else {\n data[offset] = (value >>> 24) & 0xff;\n data[offset + 1] = (value >>> 16) & 0xff;\n data[offset + 2] = (value >>> 8) & 0xff;\n data[offset + 3] = value & 0xff;\n }\n}\n\n/**\n * Write 4-byte little-endian unsigned integer\n *\n * @param data - Byte array to write to\n * @param offset - Offset to start writing at\n * @param value - 32-bit unsigned integer value\n */\nexport function writeUint32LE(\n data: Uint8Array,\n offset: number,\n value: number,\n): void {\n data[offset] = value & 0xff;\n data[offset + 1] = (value >>> 8) & 0xff;\n data[offset + 2] = (value >>> 16) & 0xff;\n data[offset + 3] = (value >>> 24) & 0xff;\n}\n\n/**\n * Supported image formats\n */\nexport type ImageFormat = 'png' | 'jpeg' | 'webp';\n\n/**\n * Validates if data starts with PNG signature\n */\nexport function isPng(data: Uint8Array): boolean {\n if (data.length < 8) return false;\n return (\n data[0] === 0x89 &&\n data[1] === 0x50 &&\n data[2] === 0x4e &&\n data[3] === 0x47 &&\n data[4] === 0x0d &&\n data[5] === 0x0a &&\n data[6] === 0x1a &&\n data[7] === 0x0a\n );\n}\n\n/**\n * Validates if data starts with JPEG signature\n */\nexport function isJpeg(data: Uint8Array): boolean {\n if (data.length < 2) return false;\n return data[0] === 0xff && data[1] === 0xd8;\n}\n\n/**\n * Validates if data starts with WebP signature\n */\nexport function isWebp(data: Uint8Array): boolean {\n if (data.length < 12) return false;\n return (\n data[0] === 0x52 && // R\n data[1] === 0x49 && // I\n data[2] === 0x46 && // F\n data[3] === 0x46 && // F\n data[8] === 0x57 && // W\n data[9] === 0x45 && // E\n data[10] === 0x42 && // B\n data[11] === 0x50 // P\n );\n}\n\n/**\n * Detect image format from magic bytes\n */\nexport function detectFormat(data: Uint8Array): ImageFormat | null {\n if (isPng(data)) return 'png';\n if (isJpeg(data)) return 'jpeg';\n if (isWebp(data)) return 'webp';\n return null;\n}\n","/**\n * Exif/TIFF tag constants\n *\n * Shared between readers and writers for consistent tag handling.\n */\n\n/** UserComment tag ID in Exif */\nexport const USER_COMMENT_TAG = 0x9286;\n\n/** ImageDescription tag ID */\nexport const IMAGE_DESCRIPTION_TAG = 0x010e;\n\n/** DocumentName tag ID */\nexport const DOCUMENT_NAME_TAG = 0x010d;\n\n/** Make tag ID */\nexport const MAKE_TAG = 0x010f;\n\n/** Software tag ID */\nexport const SOFTWARE_TAG = 0x0131;\n\n/** Exif IFD pointer tag */\nexport const EXIF_IFD_POINTER_TAG = 0x8769;\n","/**\n * Exif reading utilities\n *\n * Functions for parsing Exif/TIFF structures and extracting metadata segments.\n */\n\nimport type { MetadataSegment } from '../types';\nimport { readUint16, readUint32 } from '../utils/binary';\nimport {\n DOCUMENT_NAME_TAG,\n EXIF_IFD_POINTER_TAG,\n IMAGE_DESCRIPTION_TAG,\n MAKE_TAG,\n SOFTWARE_TAG,\n USER_COMMENT_TAG,\n} from '../utils/exif-constants';\n\n/**\n * Parse Exif TIFF structure and extract all metadata segments\n *\n * Extracts metadata from:\n * - ImageDescription (0x010E) - Used by ComfyUI Save Image Extended (with \"Workflow:\" prefix)\n * - Make (0x010F) - Used by ComfyUI Save Image Extended (with \"Prompt:\" prefix)\n * - UserComment (0x9286) - Used by most tools\n *\n * @param exifData - TIFF data (starting with II/MM byte order marker)\n * @returns Array of metadata segments found\n */\nexport function parseExifMetadataSegments(\n exifData: Uint8Array,\n): MetadataSegment[] {\n if (exifData.length < 8) return [];\n\n // Check TIFF byte order\n const isLittleEndian = exifData[0] === 0x49 && exifData[1] === 0x49; // \"II\"\n const isBigEndian = exifData[0] === 0x4d && exifData[1] === 0x4d; // \"MM\"\n\n if (!isLittleEndian && !isBigEndian) return [];\n\n // Verify TIFF magic number (42)\n const magic = readUint16(exifData, 2, isLittleEndian);\n if (magic !== 42) return [];\n\n // Get IFD0 offset\n const ifd0Offset = readUint32(exifData, 4, isLittleEndian);\n\n // Extract all tags from IFD0\n const ifd0Segments = extractTagsFromIfd(exifData, ifd0Offset, isLittleEndian);\n\n // Find Exif IFD and extract UserComment from there\n const exifIfdOffset = findExifIfdOffset(exifData, ifd0Offset, isLittleEndian);\n const exifIfdSegments =\n exifIfdOffset !== null\n ? extractTagsFromIfd(exifData, exifIfdOffset, isLittleEndian)\n : [];\n\n return [...ifd0Segments, ...exifIfdSegments];\n}\n\n/**\n * Extract metadata tags from an IFD\n */\nfunction extractTagsFromIfd(\n data: Uint8Array,\n ifdOffset: number,\n isLittleEndian: boolean,\n): MetadataSegment[] {\n const segments: MetadataSegment[] = [];\n\n if (ifdOffset + 2 > data.length) return segments;\n\n const entryCount = readUint16(data, ifdOffset, isLittleEndian);\n let offset = ifdOffset + 2;\n\n for (let i = 0; i < entryCount; i++) {\n if (offset + 12 > data.length) return segments;\n\n const tag = readUint16(data, offset, isLittleEndian);\n const type = readUint16(data, offset + 2, isLittleEndian);\n const count = readUint32(data, offset + 4, isLittleEndian);\n\n // Calculate data size based on type\n const typeSize = getTypeSize(type);\n const dataSize = count * typeSize;\n\n let valueOffset: number;\n if (dataSize <= 4) {\n valueOffset = offset + 8;\n } else {\n valueOffset = readUint32(data, offset + 8, isLittleEndian);\n }\n\n if (valueOffset + dataSize > data.length) {\n offset += 12;\n continue;\n }\n\n const tagData = data.slice(valueOffset, valueOffset + dataSize);\n\n // Process known tags\n if (tag === IMAGE_DESCRIPTION_TAG) {\n const text = decodeAsciiString(tagData);\n if (text) {\n const prefix = extractPrefix(text);\n segments.push({\n source: { type: 'exifImageDescription', prefix: prefix ?? undefined },\n data: prefix ? text.slice(prefix.length + 2) : text,\n });\n }\n } else if (tag === MAKE_TAG) {\n const text = decodeAsciiString(tagData);\n if (text) {\n const prefix = extractPrefix(text);\n segments.push({\n source: { type: 'exifMake', prefix: prefix ?? undefined },\n data: prefix ? text.slice(prefix.length + 2) : text,\n });\n }\n } else if (tag === SOFTWARE_TAG) {\n const text = decodeAsciiString(tagData);\n if (text) {\n segments.push({\n source: { type: 'exifSoftware' },\n data: text,\n });\n }\n } else if (tag === DOCUMENT_NAME_TAG) {\n const text = decodeAsciiString(tagData);\n if (text) {\n segments.push({\n source: { type: 'exifDocumentName' },\n data: text,\n });\n }\n } else if (tag === USER_COMMENT_TAG) {\n const text = decodeUserComment(tagData);\n if (text) {\n segments.push({\n source: { type: 'exifUserComment' },\n data: text,\n });\n }\n }\n\n offset += 12;\n }\n\n return segments;\n}\n\n/**\n * Extract prefix from text like \"Workflow: {...}\" -> \"Workflow\"\n */\nfunction extractPrefix(text: string): string | null {\n const match = text.match(/^([A-Za-z]+):\\s/);\n return match?.[1] ?? null;\n}\n\n/**\n * Get size in bytes for TIFF data type\n */\nfunction getTypeSize(type: number): number {\n switch (type) {\n case 1:\n return 1; // BYTE\n case 2:\n return 1; // ASCII\n case 3:\n return 2; // SHORT\n case 4:\n return 4; // LONG\n case 5:\n return 8; // RATIONAL\n case 7:\n return 1; // UNDEFINED\n default:\n return 1;\n }\n}\n\n/**\n * Decode ASCII/UTF-8 string from tag data\n */\nfunction decodeAsciiString(data: Uint8Array): string | null {\n try {\n const decoder = new TextDecoder('utf-8', { fatal: false });\n let text = decoder.decode(data);\n // Remove null terminator if present\n if (text.endsWith('\\0')) {\n text = text.slice(0, -1);\n }\n return text.trim() || null;\n } catch {\n return null;\n }\n}\n\n/**\n * Find Exif IFD offset from IFD0\n */\nfunction findExifIfdOffset(\n data: Uint8Array,\n ifdOffset: number,\n isLittleEndian: boolean,\n): number | null {\n if (ifdOffset + 2 > data.length) return null;\n\n const entryCount = readUint16(data, ifdOffset, isLittleEndian);\n let offset = ifdOffset + 2;\n\n for (let i = 0; i < entryCount; i++) {\n if (offset + 12 > data.length) return null;\n\n const tag = readUint16(data, offset, isLittleEndian);\n\n if (tag === EXIF_IFD_POINTER_TAG) {\n // Exif IFD pointer found\n return readUint32(data, offset + 8, isLittleEndian);\n }\n\n offset += 12;\n }\n\n return null;\n}\n\n/**\n * Decode UserComment based on encoding prefix\n *\n * @param data - UserComment data including encoding prefix\n * @returns Decoded string\n */\nexport function decodeUserComment(data: Uint8Array): string | null {\n if (data.length < 8) return null;\n\n // Check for UNICODE prefix\n if (\n data[0] === 0x55 && // U\n data[1] === 0x4e && // N\n data[2] === 0x49 && // I\n data[3] === 0x43 && // C\n data[4] === 0x4f && // O\n data[5] === 0x44 && // D\n data[6] === 0x45 && // E\n data[7] === 0x00 // NULL\n ) {\n // UTF-16 encoded - detect byte order by looking at first character\n const textData = data.slice(8);\n if (textData.length >= 2) {\n const isLikelyLE = textData[0] !== 0x00 && textData[1] === 0x00;\n return isLikelyLE ? decodeUtf16LE(textData) : decodeUtf16BE(textData);\n }\n return decodeUtf16BE(textData);\n }\n\n // Check for ASCII prefix\n if (\n data[0] === 0x41 && // A\n data[1] === 0x53 && // S\n data[2] === 0x43 && // C\n data[3] === 0x49 && // I\n data[4] === 0x49 && // I\n data[5] === 0x00 && // NULL\n data[6] === 0x00 && // NULL\n data[7] === 0x00 // NULL\n ) {\n // ASCII encoded\n return decodeAscii(data.slice(8));\n }\n\n // Try UTF-8 (for ComfyUI JSON format without prefix)\n try {\n const decoder = new TextDecoder('utf-8', { fatal: true });\n let result = decoder.decode(data);\n // Strip null terminator if present\n if (result.endsWith('\\0')) {\n result = result.slice(0, -1);\n }\n return result;\n } catch {\n return null;\n }\n}\n\n/**\n * Decode UTF-16BE string\n */\nfunction decodeUtf16BE(data: Uint8Array): string {\n const chars: string[] = [];\n\n for (let i = 0; i < data.length - 1; i += 2) {\n const code = ((data[i] ?? 0) << 8) | (data[i + 1] ?? 0);\n if (code === 0) break;\n chars.push(String.fromCharCode(code));\n }\n\n return chars.join('');\n}\n\n/**\n * Decode UTF-16LE string\n */\nfunction decodeUtf16LE(data: Uint8Array): string {\n const chars: string[] = [];\n\n for (let i = 0; i < data.length - 1; i += 2) {\n const code = (data[i] ?? 0) | ((data[i + 1] ?? 0) << 8);\n if (code === 0) break;\n chars.push(String.fromCharCode(code));\n }\n\n return chars.join('');\n}\n\n/**\n * Decode ASCII string\n */\nfunction decodeAscii(data: Uint8Array): string {\n const chars: string[] = [];\n\n for (let i = 0; i < data.length; i++) {\n if (data[i] === 0) break;\n chars.push(String.fromCharCode(data[i] ?? 0));\n }\n\n return chars.join('');\n}\n","import type { JpegMetadataResult, MetadataSegment } from '../types';\nimport { Result } from '../types';\nimport { arraysEqual } from '../utils/binary';\nimport { parseExifMetadataSegments } from './exif';\n\nimport { isJpeg } from '../utils/binary';\n\n/** APP1 marker */\nconst APP1_MARKER = 0xe1;\n\n/** COM (Comment) marker */\nconst COM_MARKER = 0xfe;\n\n/** Exif header: \"Exif\\0\\0\" */\nconst EXIF_HEADER = new Uint8Array([0x45, 0x78, 0x69, 0x66, 0x00, 0x00]);\n\n/**\n * Read JPEG metadata from binary data\n *\n * Collects metadata from multiple sources:\n * - Exif tags (APP1 segment): UserComment, ImageDescription, Make\n * - COM segment - Used by NovelAI\n *\n * @param data - JPEG file data as Uint8Array\n * @returns Result containing all metadata segments or error\n */\nexport function readJpegMetadata(data: Uint8Array): JpegMetadataResult {\n if (!isJpeg(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n const segments: MetadataSegment[] = [];\n\n // Extract all Exif metadata (UserComment, ImageDescription, Make)\n const app1 = findApp1Segment(data);\n if (app1) {\n const exifData = data.slice(app1.offset, app1.offset + app1.length);\n const exifSegments = parseExifMetadataSegments(exifData);\n segments.push(...exifSegments);\n }\n\n // Try COM segment (NovelAI uses this)\n const comSegment = findComSegment(data);\n if (comSegment) {\n const comData = data.slice(\n comSegment.offset,\n comSegment.offset + comSegment.length,\n );\n const comText = decodeComSegment(comData);\n\n if (comText !== null) {\n segments.push({\n source: { type: 'jpegCom' },\n data: comText,\n });\n }\n }\n\n return Result.ok(segments);\n}\n\n/**\n * Find APP1 segment containing Exif data\n *\n * @param data - JPEG file data\n * @returns Offset and length of APP1 segment data, or null if not found\n */\nexport function findApp1Segment(\n data: Uint8Array,\n): { offset: number; length: number } | null {\n let offset = 2; // Skip SOI marker\n\n while (offset < data.length - 4) {\n // Check for marker\n if (data[offset] !== 0xff) {\n offset++;\n continue;\n }\n\n const marker = data[offset + 1];\n\n // Skip padding bytes\n if (marker === 0xff) {\n offset++;\n continue;\n }\n\n // Get segment length (big-endian, includes length bytes)\n const length = ((data[offset + 2] ?? 0) << 8) | (data[offset + 3] ?? 0);\n\n // Check for APP1 marker\n if (marker === APP1_MARKER) {\n // Verify Exif header\n const headerStart = offset + 4;\n if (headerStart + 6 <= data.length) {\n const header = data.slice(headerStart, headerStart + 6);\n if (arraysEqual(header, EXIF_HEADER)) {\n // Return offset to TIFF data (after Exif header)\n return {\n offset: headerStart + 6,\n length: length - 8, // Subtract length bytes and Exif header\n };\n }\n }\n }\n\n // Move to next segment\n offset += 2 + length;\n\n // Stop at SOS (Start of Scan) or EOI\n if (marker === 0xda || marker === 0xd9) {\n break;\n }\n }\n\n return null;\n}\n\n/**\n * Find COM (Comment) segment\n *\n * COM segments are used by NovelAI to store metadata as UTF-8 JSON.\n *\n * @param data - JPEG file data\n * @returns Offset and length of COM segment data, or null if not found\n */\nfunction findComSegment(\n data: Uint8Array,\n): { offset: number; length: number } | null {\n let offset = 2; // Skip SOI marker\n\n while (offset < data.length - 4) {\n // Check for marker\n if (data[offset] !== 0xff) {\n offset++;\n continue;\n }\n\n const marker = data[offset + 1];\n\n // Skip padding bytes\n if (marker === 0xff) {\n offset++;\n continue;\n }\n\n // Get segment length (big-endian, includes length bytes)\n const length = ((data[offset + 2] ?? 0) << 8) | (data[offset + 3] ?? 0);\n\n // Check for COM marker\n if (marker === COM_MARKER) {\n // Return offset to comment data (after marker and length)\n return {\n offset: offset + 4,\n length: length - 2, // Subtract length bytes only\n };\n }\n\n // Move to next segment\n offset += 2 + length;\n\n // Stop at SOS (Start of Scan) or EOI\n if (marker === 0xda || marker === 0xd9) {\n break;\n }\n }\n\n return null;\n}\n\n/**\n * Decode COM segment data as UTF-8 string\n *\n * @param data - COM segment data\n * @returns Decoded string or null if invalid\n */\nfunction decodeComSegment(data: Uint8Array): string | null {\n try {\n const decoder = new TextDecoder('utf-8', { fatal: true });\n return decoder.decode(data);\n } catch {\n return null;\n }\n}\n","import type {\n ITXtChunk,\n PngMetadataResult,\n PngReadError,\n PngTextChunk,\n TExtChunk,\n} from '../types';\nimport { Result } from '../types';\nimport { readChunkType, readUint32BE } from '../utils/binary';\n\nimport { isPng } from '../utils/binary';\n\n/**\n * Read PNG metadata from binary data\n * @param data - PNG file data as Uint8Array\n * @returns Result containing metadata or error\n */\nexport function readPngMetadata(data: Uint8Array): PngMetadataResult {\n // Validate PNG signature\n if (!isPng(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n // Extract text chunks\n const chunksResult = extractTextChunks(data);\n if (!chunksResult.ok) {\n return chunksResult;\n }\n\n return Result.ok(chunksResult.value);\n}\n\n/**\n * Extract tEXt and iTXt chunks from PNG data\n */\n// 8 bytes for PNG signature\nconst PNG_SIGNATURE_LENGTH = 8;\n\n/**\n * Extract tEXt and iTXt chunks from PNG data\n */\nfunction extractTextChunks(\n data: Uint8Array,\n): Result<PngTextChunk[], PngReadError> {\n const chunks: PngTextChunk[] = [];\n let offset = PNG_SIGNATURE_LENGTH;\n\n while (offset < data.length) {\n // Read chunk length (4 bytes, big-endian)\n if (offset + 4 > data.length) {\n return Result.error({\n type: 'corruptedChunk',\n message: 'Unexpected end of file while reading chunk length',\n });\n }\n const length = readUint32BE(data, offset);\n offset += 4;\n\n // Read chunk type (4 bytes)\n if (offset + 4 > data.length) {\n return Result.error({\n type: 'corruptedChunk',\n message: 'Unexpected end of file while reading chunk type',\n });\n }\n const chunkType = readChunkType(data, offset);\n offset += 4;\n\n // Read chunk data\n if (offset + length > data.length) {\n return Result.error({\n type: 'corruptedChunk',\n message: `Unexpected end of file while reading chunk data (${chunkType})`,\n });\n }\n const chunkData = data.slice(offset, offset + length);\n offset += length;\n\n // Skip CRC (4 bytes)\n offset += 4;\n\n // Parse text chunks\n if (chunkType === 'tEXt') {\n const parsed = parseTExtChunk(chunkData);\n if (parsed) {\n chunks.push(parsed);\n }\n } else if (chunkType === 'iTXt') {\n const parsed = parseITXtChunk(chunkData);\n if (parsed) {\n chunks.push(parsed);\n }\n }\n\n // Stop at IEND\n if (chunkType === 'IEND') {\n break;\n }\n }\n\n return Result.ok(chunks);\n}\n\n/**\n * Parse tEXt chunk data\n *\n * Per PNG specification, tEXt chunks use Latin-1 (ISO-8859-1) encoding.\n * However, some tools (notably TensorArt) incorrectly write UTF-8 bytes\n * directly into tEXt chunks. To handle these non-compliant tools, we\n * attempt UTF-8 decoding first and fall back to Latin-1 if that fails.\n */\nfunction parseTExtChunk(data: Uint8Array): TExtChunk | null {\n // Find null separator\n const nullIndex = data.indexOf(0);\n if (nullIndex === -1) {\n return null;\n }\n\n // Keyword is Latin-1 encoded (per spec, keywords are ASCII-safe)\n const keyword = latin1Decode(data.slice(0, nullIndex));\n\n // Text: Try UTF-8 first (for non-compliant tools), fallback to Latin-1\n const textData = data.slice(nullIndex + 1);\n const text = tryUtf8Decode(textData) ?? latin1Decode(textData);\n\n return { type: 'tEXt', keyword, text };\n}\n\n/**\n * Try to decode data as UTF-8, return null if invalid\n */\nfunction tryUtf8Decode(data: Uint8Array): string | null {\n try {\n return new TextDecoder('utf-8', { fatal: true }).decode(data);\n } catch {\n return null;\n }\n}\n\n/**\n * Parse iTXt chunk data\n */\nfunction parseITXtChunk(data: Uint8Array): ITXtChunk | null {\n let offset = 0;\n\n // Read keyword (null-terminated)\n const keywordEnd = findNull(data, offset);\n if (keywordEnd === -1) return null;\n const keyword = utf8Decode(data.slice(offset, keywordEnd));\n offset = keywordEnd + 1;\n\n // Read compression flag (1 byte)\n if (offset >= data.length) return null;\n const compressionFlag = data[offset] ?? 0;\n offset += 1;\n\n // Read compression method (1 byte)\n if (offset >= data.length) return null;\n const compressionMethod = data[offset] ?? 0;\n offset += 1;\n\n // Read language tag (null-terminated)\n const langEnd = findNull(data, offset);\n if (langEnd === -1) return null;\n const languageTag = utf8Decode(data.slice(offset, langEnd));\n offset = langEnd + 1;\n\n // Read translated keyword (null-terminated)\n const transEnd = findNull(data, offset);\n if (transEnd === -1) return null;\n const translatedKeyword = utf8Decode(data.slice(offset, transEnd));\n offset = transEnd + 1;\n\n // Read text (rest of data)\n let text: string;\n if (compressionFlag === 1) {\n // Compressed with zlib\n const decompressed = decompressZlib(data.slice(offset));\n if (!decompressed) return null;\n text = utf8Decode(decompressed);\n } else {\n text = utf8Decode(data.slice(offset));\n }\n\n return {\n type: 'iTXt',\n keyword,\n compressionFlag,\n compressionMethod,\n languageTag,\n translatedKeyword,\n text,\n };\n}\n\n/**\n * Find null byte in data starting from offset\n */\nfunction findNull(data: Uint8Array, offset: number): number {\n for (let i = offset; i < data.length; i++) {\n if (data[i] === 0) {\n return i;\n }\n }\n return -1;\n}\n\n/**\n * Decode Latin-1 (ISO-8859-1) bytes to string\n */\nfunction latin1Decode(data: Uint8Array): string {\n let result = '';\n for (let i = 0; i < data.length; i++) {\n result += String.fromCharCode(data[i] ?? 0);\n }\n return result;\n}\n\n/**\n * Decode UTF-8 bytes to string\n */\nfunction utf8Decode(data: Uint8Array): string {\n return new TextDecoder('utf-8').decode(data);\n}\n\n/**\n * Decompress zlib-compressed data\n *\n * Currently unimplemented: All surveyed sample images use uncompressed iTXt.\n * When a sample with compressed iTXt is found, implement using pako library.\n *\n * @see https://www.npmjs.com/package/pako\n */\nfunction decompressZlib(_data: Uint8Array): Uint8Array | null {\n // Not yet implemented - no compressed iTXt samples encountered\n return null;\n}\n","import type { WebpMetadataResult } from '../types';\nimport { Result } from '../types';\nimport { arraysEqual, readUint32LE } from '../utils/binary';\nimport { parseExifMetadataSegments } from './exif';\n\nimport { isWebp } from '../utils/binary';\n\n/** EXIF chunk type */\nconst EXIF_CHUNK_TYPE = new Uint8Array([0x45, 0x58, 0x49, 0x46]);\n\n/**\n * Read WebP metadata from binary data\n *\n * Extracts metadata from EXIF chunk in WebP files.\n * The EXIF chunk contains TIFF-formatted data identical to JPEG Exif.\n *\n * @param data - WebP file data as Uint8Array\n * @returns Result containing all metadata segments or error\n */\nexport function readWebpMetadata(data: Uint8Array): WebpMetadataResult {\n if (!isWebp(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n const exifChunk = findExifChunk(data);\n if (!exifChunk) {\n return Result.ok([]);\n }\n\n const exifData = data.slice(\n exifChunk.offset,\n exifChunk.offset + exifChunk.length,\n );\n\n // Parse all EXIF metadata segments (UserComment, ImageDescription, Make)\n const segments = parseExifMetadataSegments(exifData);\n\n return Result.ok(segments);\n}\n\n/**\n * Find EXIF chunk in WebP file\n *\n * WebP uses RIFF container format with named chunks.\n * EXIF chunk contains TIFF data starting with \"II\" or \"MM\" byte order marker.\n *\n * @param data - WebP file data\n * @returns Offset and length of EXIF chunk data, or null if not found\n */\nexport function findExifChunk(\n data: Uint8Array,\n): { offset: number; length: number } | null {\n // Start after RIFF header (12 bytes: \"RIFF\" + size + \"WEBP\")\n let offset = 12;\n\n while (offset < data.length - 8) {\n // Read chunk type (4 bytes)\n const chunkType = data.slice(offset, offset + 4);\n\n // Read chunk size (4 bytes, little-endian)\n const chunkSize = readUint32LE(data, offset + 4);\n\n // Check for EXIF chunk\n if (arraysEqual(chunkType, EXIF_CHUNK_TYPE)) {\n // EXIF chunk data starts after type and size\n return {\n offset: offset + 8,\n length: chunkSize,\n };\n }\n\n // Move to next chunk (chunk size + type + size fields)\n // RIFF chunks are padded to even byte boundaries\n const paddedSize = chunkSize + (chunkSize % 2);\n offset += 8 + paddedSize;\n }\n\n return null;\n}\n","import type {\n MetadataEntry,\n MetadataSegment,\n MetadataSegmentSource,\n PngTextChunk,\n} from '../types';\nimport { parseJson } from './json';\n\n/**\n * Convert PNG text chunks to format-agnostic metadata entries\n *\n * @param chunks - PNG tEXt/iTXt chunks\n * @returns Array of metadata entries\n */\nexport function pngChunksToEntries(chunks: PngTextChunk[]): MetadataEntry[] {\n return chunks.map((chunk) => ({\n keyword: chunk.keyword,\n text: chunk.text,\n }));\n}\n\n/**\n * Convert JPEG/WebP metadata segments to format-agnostic entries\n *\n * Maps segment sources to conventional keywords:\n * - jpegCom → 'Comment'\n * - exifUserComment → 'Comment' (or expand if NovelAI WebP format)\n * - exifImageDescription → prefix or 'Description'\n * - exifMake → prefix or 'Make'\n *\n * Special handling for NovelAI WebP format where metadata is stored as:\n * {\"Comment\": \"{...inner JSON...}\", \"Software\": \"NovelAI\", ...}\n *\n * @param segments - Metadata segments from JPEG/WebP reader\n * @returns Array of metadata entries\n */\nexport function segmentsToEntries(\n segments: MetadataSegment[],\n): MetadataEntry[] {\n const entries: MetadataEntry[] = [];\n\n for (const segment of segments) {\n const keyword = sourceToKeyword(segment.source);\n const text = segment.data;\n\n // Try to detect and expand NovelAI WebP format\n // Format: {\"Comment\": \"{...}\", \"Software\": \"NovelAI\", ...}\n if (segment.source.type === 'exifUserComment' && text.startsWith('{')) {\n const expanded = tryExpandNovelAIWebpFormat(text);\n if (expanded) {\n entries.push(...expanded);\n continue;\n }\n }\n\n entries.push({ keyword, text });\n }\n\n return entries;\n}\n\n/**\n * Try to expand NovelAI WebP format metadata\n *\n * NovelAI WebP stores metadata as outer JSON with:\n * - Software: \"NovelAI\"\n * - Comment: inner JSON string with actual parameters\n *\n * @param text - JSON text to try to expand\n * @returns Array of entries if NovelAI format, null otherwise\n */\nfunction tryExpandNovelAIWebpFormat(text: string): MetadataEntry[] | null {\n const outerParsed = parseJson<Record<string, unknown>>(text);\n if (!outerParsed.ok) {\n return null;\n }\n\n const outer = outerParsed.value;\n\n // Check if this is NovelAI WebP format\n if (\n typeof outer !== 'object' ||\n outer === null ||\n (typeof outer.Software === 'string' &&\n !outer.Software.startsWith('NovelAI')) ||\n typeof outer.Comment !== 'string'\n ) {\n return null;\n }\n\n const entries: MetadataEntry[] = [{ keyword: 'Software', text: 'NovelAI' }];\n\n // Parse and add inner Comment as Comment entry\n const innerParsed = parseJson<unknown>(outer.Comment);\n\n return [\n ...entries,\n innerParsed.ok\n ? { keyword: 'Comment', text: JSON.stringify(innerParsed.value) }\n : { keyword: 'Comment', text: outer.Comment },\n ];\n}\n\n/**\n * Map metadata segment source to keyword\n */\nfunction sourceToKeyword(source: MetadataSegmentSource): string {\n switch (source.type) {\n case 'jpegCom':\n return 'Comment';\n case 'exifUserComment':\n return 'Comment';\n case 'exifImageDescription':\n return source.prefix ?? 'Description';\n case 'exifMake':\n return source.prefix ?? 'Make';\n case 'exifSoftware':\n return 'Software';\n case 'exifDocumentName':\n return 'Title';\n }\n}\n","/**\n * Read API for sd-metadata\n *\n * Handles reading and parsing metadata from images.\n * Automatically detects image format and extracts embedded generation metadata.\n */\n\nimport { parseMetadata } from '../parsers';\nimport { readJpegMetadata } from '../readers/jpeg';\nimport { readPngMetadata } from '../readers/png';\nimport { readWebpMetadata } from '../readers/webp';\nimport type {\n MetadataSegment,\n ParseResult,\n PngTextChunk,\n RawMetadata,\n} from '../types';\nimport {\n type ImageFormat,\n detectFormat,\n readChunkType,\n readUint24LE,\n readUint32BE,\n readUint32LE,\n} from '../utils/binary';\nimport { pngChunksToEntries, segmentsToEntries } from '../utils/convert';\n\n/**\n * Read and parse metadata from an image\n *\n * Automatically detects the image format (PNG, JPEG, WebP) and parses\n * any embedded generation metadata.\n *\n * @param data - Image file data\n * @returns Parse result containing metadata and raw data\n */\nexport function read(data: Uint8Array): ParseResult {\n const format = detectFormat(data);\n\n if (!format) {\n return { status: 'invalid', message: 'Unknown image format' };\n }\n\n // 1. Read raw metadata based on format\n const rawResult = readRawMetadata(data, format);\n if (rawResult.status !== 'success') {\n return rawResult;\n }\n const raw = rawResult.raw;\n\n // 2. Convert to agnostic entries\n const entries =\n raw.format === 'png'\n ? pngChunksToEntries(raw.chunks)\n : segmentsToEntries(raw.segments);\n\n // 3. Parse metadata\n const parseResult = parseMetadata(entries);\n if (!parseResult.ok) {\n return { status: 'unrecognized', raw };\n }\n\n const metadata = parseResult.value;\n\n // 4. Fallback for dimensions if missing\n if (metadata.width === 0 || metadata.height === 0) {\n const dims = HELPERS[format].readDimensions(data);\n\n if (dims) {\n metadata.width = metadata.width || dims.width;\n metadata.height = metadata.height || dims.height;\n }\n }\n\n return { status: 'success', metadata, raw };\n}\n\n// ============================================================================\n// Helpers\n// ============================================================================\n\n/** Format-specific helper functions */\nconst HELPERS = {\n png: {\n readMetadata: readPngMetadata,\n readDimensions: readPngDimensions,\n createRaw: (chunks: PngTextChunk[]) => ({ format: 'png' as const, chunks }),\n },\n jpeg: {\n readMetadata: readJpegMetadata,\n readDimensions: readJpegDimensions,\n createRaw: (segments: MetadataSegment[]) => ({\n format: 'jpeg' as const,\n segments,\n }),\n },\n webp: {\n readMetadata: readWebpMetadata,\n readDimensions: readWebpDimensions,\n createRaw: (segments: MetadataSegment[]) => ({\n format: 'webp' as const,\n segments,\n }),\n },\n} as const satisfies Record<ImageFormat, unknown>;\n\n/** Result type for readRawMetadata */\ntype RawReadResult =\n | { status: 'success'; raw: RawMetadata }\n | { status: 'empty' }\n | { status: 'invalid'; message: string };\n\n/**\n * Read raw metadata from image data\n */\nfunction readRawMetadata(data: Uint8Array, format: ImageFormat): RawReadResult {\n const result = HELPERS[format].readMetadata(data);\n\n if (!result.ok) {\n const message =\n result.error.type === 'invalidSignature'\n ? `Invalid ${format.toUpperCase()} signature`\n : result.error.message;\n return { status: 'invalid', message };\n }\n\n if (result.value.length === 0) return { status: 'empty' };\n\n // PNG uses PngTextChunk[], JPEG/WebP use MetadataSegment[]\n if (format === 'png') {\n return {\n status: 'success',\n raw: HELPERS.png.createRaw(result.value as PngTextChunk[]),\n };\n }\n return {\n status: 'success',\n raw: HELPERS[format].createRaw(result.value as MetadataSegment[]),\n };\n}\n\n/**\n * Read width and height from PNG IHDR chunk\n */\nfunction readPngDimensions(\n data: Uint8Array,\n): { width: number; height: number } | null {\n const PNG_SIGNATURE_LENGTH = 8;\n if (data.length < 24) return null;\n // IHDR data starts at offset 16 (8 sig + 4 len + 4 type)\n // Check if it is indeed IHDR?\n // We assume valid PNG if detectFormat passed, and IHDR is always first.\n return {\n width: readUint32BE(data, PNG_SIGNATURE_LENGTH + 8),\n height: readUint32BE(data, PNG_SIGNATURE_LENGTH + 12),\n };\n}\n\n/**\n * Read width and height from JPEG chunks\n */\nfunction readJpegDimensions(\n data: Uint8Array,\n): { width: number; height: number } | null {\n // Use a SafeView-like approach or just manual parsing\n let offset = 2;\n while (offset < data.length - 4) {\n // Check validation\n if (data[offset] !== 0xff) {\n // Should handle scanning for FF, but in valid JPEG segments start with FF\n offset++;\n continue;\n }\n\n const marker = data[offset + 1] ?? 0;\n if (marker === 0xff) {\n offset++;\n continue; // Padding\n }\n\n // Read length (16-bit BE)\n const length = ((data[offset + 2] ?? 0) << 8) | (data[offset + 3] ?? 0);\n\n // SOF0 (C0) ... SOF15 (CF), except C4 (DHT), C8 (JPG), CC (DAC)\n if (\n marker >= 0xc0 &&\n marker <= 0xcf &&\n marker !== 0xc4 &&\n marker !== 0xc8 &&\n marker !== 0xcc\n ) {\n // Structure: Precision(1), Height(2), Width(2)\n // Offset: Marker(2) + Length(2) + Precision(1) = 5\n const height = ((data[offset + 5] ?? 0) << 8) | (data[offset + 6] ?? 0);\n const width = ((data[offset + 7] ?? 0) << 8) | (data[offset + 8] ?? 0);\n return { width, height };\n }\n\n offset += 2 + length;\n if (marker === 0xda) break; // SOS\n }\n return null;\n}\n\n/**\n * Read width and height from WebP chunks\n */\nfunction readWebpDimensions(\n data: Uint8Array,\n): { width: number; height: number } | null {\n // RIFF(4) + Size(4) + WEBP(4) = 12 bytes\n let offset = 12;\n\n while (offset < data.length) {\n if (offset + 8 > data.length) break;\n\n const chunkType = readChunkType(data, offset);\n const chunkSize = readUint32LE(data, offset + 4);\n const paddedSize = chunkSize + (chunkSize % 2);\n\n if (chunkType === 'VP8X') {\n // VP8X: Width (3 bytes @ offset 12) + Height (3 bytes @ offset 15)\n // Both are 1-based (stored value is width-1)\n const wMinus1 = readUint24LE(data, offset + 12);\n const hMinus1 = readUint24LE(data, offset + 15);\n return { width: wMinus1 + 1, height: hMinus1 + 1 };\n }\n\n if (chunkType === 'VP8 ') {\n // VP8 (lossy): Check keyframe\n // Frame tag (3 bytes @ offset 8+0)\n // Keyframe if bit 0 is 0\n const start = offset + 8;\n const tag =\n (data[start] ?? 0) |\n ((data[start + 1] ?? 0) << 8) |\n ((data[start + 2] ?? 0) << 16);\n const keyFrame = !(tag & 1);\n\n if (keyFrame) {\n // Validation code: 0x9d 0x01 0x2a bytes @ start+3\n if (\n data[start + 3] === 0x9d &&\n data[start + 4] === 0x01 &&\n data[start + 5] === 0x2a\n ) {\n // Width: 2 bytes @ start+6 (14 bits)\n // Height: 2 bytes @ start+8 (14 bits)\n const wRaw = (data[start + 6] ?? 0) | ((data[start + 7] ?? 0) << 8);\n const hRaw = (data[start + 8] ?? 0) | ((data[start + 9] ?? 0) << 8);\n return { width: wRaw & 0x3fff, height: hRaw & 0x3fff };\n }\n }\n }\n\n if (chunkType === 'VP8L') {\n // VP8L (lossless)\n // Signature 0x2f @ offset + 8\n if (data[offset + 8] === 0x2f) {\n // 4 bytes @ offset + 9 containing W (14 bits), H (14 bits)\n const bits = readUint32LE(data, offset + 9);\n const width = (bits & 0x3fff) + 1;\n const height = ((bits >> 14) & 0x3fff) + 1;\n return { width, height };\n }\n }\n\n offset += 8 + paddedSize;\n }\n return null;\n}\n","/**\n * Shared utilities for metadata converters\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\n\n/**\n * Create a tEXt chunk, returns empty array if text is undefined\n *\n * @param keyword - Chunk keyword\n * @param text - Chunk text, if undefined returns empty array\n * @returns Array with one chunk or empty array\n */\nexport const createTextChunk = (\n keyword: string,\n text: string | undefined,\n): PngTextChunk[] =>\n text !== undefined ? [{ type: 'tEXt', keyword, text }] : [];\n\n/**\n * Create an iTXt chunk, returns empty array if text is undefined\n *\n * @param keyword - Chunk keyword\n * @param text - Chunk text, if undefined returns empty array\n * @returns Array with one chunk or empty array\n */\nexport const createITxtChunk = (\n keyword: string,\n text: string | undefined,\n): PngTextChunk[] =>\n text !== undefined\n ? [\n {\n type: 'iTXt',\n keyword,\n compressionFlag: 0,\n compressionMethod: 0,\n languageTag: '',\n translatedKeyword: '',\n text,\n },\n ]\n : [];\n\n/**\n * Find a segment by source type\n *\n * @param segments - Array of metadata segments\n * @param type - Source type to find\n * @returns Matching segment or undefined\n */\nexport const findSegment = (\n segments: MetadataSegment[],\n type: string,\n): MetadataSegment | undefined => segments.find((s) => s.source.type === type);\n\n/**\n * Stringify value, returns undefined if value is undefined\n *\n * @param value - Value to stringify\n * @returns Stringified value or undefined\n */\nexport const stringify = (value: unknown): string | undefined => {\n if (value === undefined) return undefined;\n return typeof value === 'string' ? value : JSON.stringify(value);\n};\n","/**\n * Unified chunk encoding strategy for PNG converters\n *\n * Handles three different encoding strategies:\n * 1. dynamic: Choose tEXt/iTXt based on content (for tools like A1111, InvokeAI)\n * 2. text-unicode-escape: tEXt with Unicode escaping (for ComfyUI, SwarmUI)\n * 3. text-utf8-raw: tEXt with raw UTF-8 (for Stability Matrix, TensorArt)\n */\n\nimport type { PngTextChunk } from '../types';\nimport { createITxtChunk, createTextChunk } from './utils';\n\n/**\n * Chunk encoding strategy for PNG converters\n */\nexport type ChunkEncodingStrategy =\n | 'dynamic' // Choose tEXt/iTXt based on content\n | 'text-unicode-escape' // tEXt with Unicode escape (spec-compliant)\n | 'text-utf8-raw'; // tEXt with raw UTF-8 (non-compliant but compatible)\n\n/**\n * Tool-specific chunk encoding strategies\n */\n\n/**\n * Escape Unicode characters beyond Latin-1 for tEXt chunk\n *\n * Converts characters beyond Latin-1 to Unicode escape sequences.\n * Latin-1 range (0x00-0xFF) is left as-is since tEXt supports it.\n * Example: テスト → \\u30c6\\u30b9\\u30c8\n *\n * @param text - Text to escape\n * @returns Text with non-Latin-1 characters escaped\n */\nexport function escapeUnicode(text: string): string {\n return text.replace(/[\\u0100-\\uffff]/g, (char) => {\n const code = char.charCodeAt(0).toString(16).padStart(4, '0');\n return `\\\\u${code}`;\n });\n}\n\n/**\n * Check if text contains characters beyond Latin-1 range\n *\n * PNG tEXt chunks support Latin-1 (ISO 8859-1) encoding (0x00-0xFF).\n * Characters beyond this range require iTXt chunks for UTF-8 support.\n *\n * @param text - Text to check\n * @returns True if text contains characters outside Latin-1 range (>= 0x100)\n */\nfunction hasNonLatin1(text: string): boolean {\n // biome-ignore lint/suspicious/noControlCharactersInRegex: checking for non-Latin-1 characters\n return /[^\\x00-\\xFF]/.test(text);\n}\n\n/**\n * Create PNG chunk with appropriate encoding strategy\n *\n * @param keyword - Chunk keyword\n * @param text - Chunk text (undefined returns empty array)\n * @param strategy - Encoding strategy to use\n * @returns Array of PNG text chunks (empty if text is undefined)\n */\nexport function createEncodedChunk(\n keyword: string,\n text: string | undefined,\n strategy: ChunkEncodingStrategy,\n): PngTextChunk[] {\n if (text === undefined) return [];\n\n switch (strategy) {\n case 'dynamic': {\n // Choose based on content: tEXt for Latin-1, iTXt for beyond\n const chunkType = hasNonLatin1(text) ? 'iTXt' : 'tEXt';\n return chunkType === 'iTXt'\n ? createITxtChunk(keyword, text)\n : createTextChunk(keyword, text);\n }\n\n case 'text-unicode-escape': {\n // tEXt with Unicode escaping (spec-compliant)\n const escaped = escapeUnicode(text);\n return createTextChunk(keyword, escaped);\n }\n\n case 'text-utf8-raw': {\n // tEXt with raw UTF-8 (non-compliant but compatible)\n return createTextChunk(keyword, text);\n }\n }\n}\n","/**\n * A1111-format metadata conversion utilities\n *\n * Handles conversion for sd-webui, forge, forge-neo, and civitai.\n * A1111 format stores metadata as plain text in:\n * - PNG: `parameters` tEXt/iTXt chunk (dynamic selection)\n * - JPEG/WebP: Exif UserComment\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { createEncodedChunk } from './chunk-encoding';\nimport { createTextChunk, findSegment } from './utils';\n\n/**\n * Convert A1111-format PNG chunks to JPEG/WebP segments\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertA1111PngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n // Find parameters chunk\n const parameters = chunks.find((c) => c.keyword === 'parameters');\n if (!parameters) {\n return [];\n }\n\n //Simply copy to exifUserComment\n const segments: MetadataSegment[] = [\n {\n source: { type: 'exifUserComment' },\n data: parameters.text,\n },\n ];\n\n const software = chunks.find((c) => c.keyword === 'Software');\n if (software) {\n segments.push({\n source: { type: 'exifSoftware' },\n data: software.text,\n });\n }\n\n const title = chunks.find((c) => c.keyword === 'Title');\n if (title) {\n segments.push({\n source: { type: 'exifDocumentName' },\n data: title.text,\n });\n }\n\n const description = chunks.find((c) => c.keyword === 'Description');\n if (description) {\n segments.push({\n source: { type: 'exifImageDescription' },\n data: description.text,\n });\n }\n\n const make = chunks.find((c) => c.keyword === 'Make');\n if (make) {\n segments.push({\n source: { type: 'exifMake' },\n data: make.text,\n });\n }\n\n return segments;\n}\n\n/**\n * Convert JPEG/WebP segments to A1111-format PNG chunks\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertA1111SegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n // Find exifUserComment segment\n const userComment = segments.find((s) => s.source.type === 'exifUserComment');\n if (!userComment) {\n return [];\n }\n\n // Use dynamic selection (tEXt for ASCII, iTXt for non-ASCII)\n const parametersChunks = createEncodedChunk(\n 'parameters',\n userComment.data,\n 'dynamic',\n );\n\n // Preserve other standard Exif tags if present\n const chunks: PngTextChunk[] = [...parametersChunks];\n\n const software = findSegment(segments, 'exifSoftware');\n if (software) {\n chunks.push(...createTextChunk('Software', software.data));\n }\n\n const title = findSegment(segments, 'exifDocumentName');\n if (title) {\n chunks.push(...createTextChunk('Title', title.data));\n }\n\n const description = findSegment(segments, 'exifImageDescription');\n if (description) {\n // A1111 usually puts description in UserComment parameters but if Exif has it, preserve it\n chunks.push(...createTextChunk('Description', description.data));\n }\n\n const make = findSegment(segments, 'exifMake');\n if (make) {\n chunks.push(...createTextChunk('Make', make.data));\n }\n\n return chunks;\n}\n","/**\n * Shared logic for JSON-based metadata converters\n *\n * Many formats (ComfyUI, InvokeAI) roughly follow a \"Key-Value\" pattern:\n * - PNG: Multiple chunks where Keyword=Key, Text=Value (Value might be JSON string)\n * - JPEG/WebP: A single UserComment containing a JSON object { Key: Value, ... }\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport {\n type ChunkEncodingStrategy,\n createEncodedChunk,\n} from './chunk-encoding';\nimport { findSegment, stringify } from './utils';\n\n/**\n * Convert KV-style PNG chunks to a unified JSON object in a segment\n *\n * @param chunks - Raw PNG chunks\n * @returns Array containing a single UserComment segment with JSON data\n */\nexport function convertKvPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n const data: Record<string, unknown> = {};\n\n for (const chunk of chunks) {\n const parsed = parseJson<unknown>(chunk.text);\n if (parsed.ok) {\n data[chunk.keyword] = parsed.value;\n } else {\n data[chunk.keyword] = chunk.text;\n }\n }\n\n return [\n {\n source: { type: 'exifUserComment' },\n data: JSON.stringify(data),\n },\n ];\n}\n\n/**\n * Convert a unified JSON object in a segment back to KV-style PNG chunks\n *\n * @param segments - Metadata segments\n * @param encodingStrategy - Encoding strategy to use for chunks\n * @returns Array of PNG chunks\n */\nexport function convertKvSegmentsToPng(\n segments: MetadataSegment[],\n encodingStrategy: ChunkEncodingStrategy,\n): PngTextChunk[] {\n const userComment = findSegment(segments, 'exifUserComment');\n if (!userComment) {\n return [];\n }\n\n const parsed = parseJson<Record<string, unknown>>(userComment.data);\n if (!parsed.ok) {\n // If not JSON, we can't blindly map keys.\n // Return empty here, letting specific converters handle fallback if needed.\n return [];\n }\n\n // Map each key back to a chunk\n // Value is stringified if it's an object/array\n return Object.entries(parsed.value).flatMap(([keyword, value]) =>\n createEncodedChunk(keyword, stringify(value), encodingStrategy),\n );\n}\n","/**\n * ComfyUI metadata conversion utilities\n *\n * ComfyUI stores metadata as:\n * - PNG: `prompt` + `workflow` tEXt chunks (both JSON)\n * - JPEG/WebP: exifUserComment with {\"prompt\": {...}, \"workflow\": {...}} (saveimage-plus format)\n *\n * Also handles: tensorart, stability-matrix (same format)\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { convertKvPngToSegments, convertKvSegmentsToPng } from './base-json';\nimport { createEncodedChunk } from './chunk-encoding';\nimport { findSegment } from './utils';\n\n/**\n * Convert ComfyUI PNG chunks to JPEG/WebP segments\n *\n * Uses saveimage-plus format: stores chunk keywords as JSON keys.\n * For chunks that contain JSON strings (prompt, workflow), parse them\n * and store as objects to match saveimage-plus format.\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertComfyUIPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n // Use generic KV converter\n return convertKvPngToSegments(chunks);\n}\n\n/**\n * Try save-image-extended format (exifImageDescription + exifMake)\n *\n * @returns PNG chunks if format matches, null otherwise\n */\nconst tryParseExtendedFormat = (\n segments: MetadataSegment[],\n): PngTextChunk[] | null => {\n const imageDescription = findSegment(segments, 'exifImageDescription');\n const make = findSegment(segments, 'exifMake');\n\n if (!imageDescription && !make) {\n return null;\n }\n\n return [\n ...createEncodedChunk('prompt', make?.data, 'text-unicode-escape'),\n ...createEncodedChunk(\n 'workflow',\n imageDescription?.data,\n 'text-unicode-escape',\n ),\n ];\n};\n\n/**\n * Try saveimage-plus format (exifUserComment with JSON)\n *\n * @returns PNG chunks if format matches, null otherwise\n */\nconst tryParseSaveImagePlusFormat = (\n segments: MetadataSegment[],\n): PngTextChunk[] | null => {\n const chunks = convertKvSegmentsToPng(segments, 'text-unicode-escape');\n return chunks.length > 0 ? chunks : null;\n};\n\n/**\n * Convert JPEG/WebP segments to ComfyUI PNG chunks\n *\n * Supports:\n * - save-image-extended format: exifImageDescription (workflow) + exifMake (prompt)\n * - saveimage-plus format: exifUserComment with {\"prompt\": {...}, \"workflow\": {...}}\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertComfyUISegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n // Try each format in order of priority\n return (\n tryParseExtendedFormat(segments) ??\n tryParseSaveImagePlusFormat(segments) ??\n []\n );\n}\n","/**\n * Easy Diffusion metadata conversion utilities\n *\n * Easy Diffusion format stores metadata as JSON in various locations:\n * - PNG: Each field as separate chunks (negative_prompt, Negative Prompt, etc.)\n * - JPEG/WebP: JSON in exifUserComment\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk } from './chunk-encoding';\nimport { findSegment, stringify } from './utils';\n\n/**\n * Convert Easy Diffusion PNG chunks to JPEG/WebP segments\n *\n * Easy Diffusion PNG stores metadata as individual chunks.\n * We combine them into a JSON object for JPEG/WebP storage.\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertEasyDiffusionPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n const json = Object.fromEntries(\n chunks.map((chunk) => [chunk.keyword, chunk.text]),\n );\n\n return [\n {\n source: { type: 'exifUserComment' },\n data: JSON.stringify(json),\n },\n ];\n}\n\n/**\n * Convert JPEG/WebP segments to Easy Diffusion PNG chunks\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertEasyDiffusionSegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n const userComment = findSegment(segments, 'exifUserComment');\n if (!userComment) {\n return [];\n }\n\n const parsed = parseJson<Record<string, unknown>>(userComment.data);\n if (!parsed.ok) {\n return [];\n }\n\n // Convert each key-value pair to a chunk with dynamic selection\n return Object.entries(parsed.value).flatMap(([keyword, value]) =>\n createEncodedChunk(keyword, stringify(value), 'dynamic'),\n );\n}\n","/**\n * InvokeAI metadata conversion utilities\n *\n * InvokeAI stores metadata as:\n * - PNG: `invokeai_metadata` + `invokeai_graph` iTXt/tEXt chunks (both JSON, dynamic selection)\n * - JPEG/WebP: Not officially supported by InvokeAI\n *\n * For conversion, we use a JSON format similar to ComfyUI saveimage-plus:\n * {\"invokeai_metadata\": {...}, \"invokeai_graph\": {...}}\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { convertKvPngToSegments, convertKvSegmentsToPng } from './base-json';\n\n/**\n * Convert InvokeAI PNG chunks to JPEG/WebP segments\n *\n * Parses JSON chunks and stores them as objects.\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertInvokeAIPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n // Use generic KV converter\n return convertKvPngToSegments(chunks);\n}\n\n/**\n * Convert JPEG/WebP segments to InvokeAI PNG chunks\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertInvokeAISegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n // Use generic KV converter with dynamic encoding strategy\n return convertKvSegmentsToPng(segments, 'dynamic');\n}\n","/**\n * NovelAI metadata conversion utilities\n *\n * Converts NovelAI metadata between PNG chunks and JPEG/WebP segments.\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk } from './chunk-encoding';\nimport { createTextChunk, findSegment, stringify } from './utils';\n\n/** Fixed values for NovelAI PNG chunks */\nconst NOVELAI_SOFTWARE = 'NovelAI';\nconst NOVELAI_TITLE = 'NovelAI generated image';\n\n/**\n * Convert NovelAI PNG chunks to JPEG/WebP segments\n *\n * PNG structure:\n * - Title: \\\"NovelAI generated image\\\"\n * - Description: short prompt\n * - Software: \\\"NovelAI\\\"\n * - Source: version info\n * - Generation time: time\n * - Comment: full JSON parameters\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertNovelaiPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n const data = buildUserCommentJson(chunks);\n const userCommentSegment: MetadataSegment = {\n source: { type: 'exifUserComment' },\n data: JSON.stringify(data),\n };\n\n // Build segments array declaratively\n const description = chunks.find((c) => c.keyword === 'Description');\n const descriptionSegment: MetadataSegment | undefined = description && {\n source: { type: 'exifImageDescription' },\n data: `\\0\\0\\0\\0${description.text}`,\n };\n\n const software = chunks.find((c) => c.keyword === 'Software');\n const softwareSegment: MetadataSegment | undefined = software && {\n source: { type: 'exifSoftware' },\n data: software.text,\n };\n\n const title = chunks.find((c) => c.keyword === 'Title');\n const titleSegment: MetadataSegment | undefined = title && {\n source: { type: 'exifDocumentName' },\n data: title.text,\n };\n\n return [\n userCommentSegment,\n descriptionSegment,\n softwareSegment,\n titleSegment,\n ].filter((segment): segment is MetadataSegment => Boolean(segment));\n}\n\n/**\n * Build UserComment JSON from PNG chunks in NovelAI's standard key order\n */\nfunction buildUserCommentJson(chunks: PngTextChunk[]): Record<string, string> {\n return NOVELAI_KEY_ORDER.map((key) => {\n const chunk = chunks.find((c) => c.keyword === key);\n return chunk ? { [key]: chunk.text } : null;\n })\n .filter((entry): entry is Record<string, string> => entry !== null)\n .reduce(\n (acc, entry) => Object.assign(acc, entry),\n {} as Record<string, string>,\n );\n}\n\n/**\n * NovelAI standard key order for UserComment JSON\n */\nconst NOVELAI_KEY_ORDER = [\n 'Comment',\n 'Description',\n 'Generation time',\n 'Software',\n 'Source',\n 'Title',\n] as const;\n\n/**\n * Convert JPEG/WebP segments to NovelAI PNG chunks\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertNovelaiSegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n const userCommentSeg = findSegment(segments, 'exifUserComment');\n const descriptionSeg = findSegment(segments, 'exifImageDescription');\n const softwareSeg = findSegment(segments, 'exifSoftware');\n const titleSeg = findSegment(segments, 'exifDocumentName');\n\n return parseSegments(userCommentSeg, descriptionSeg, softwareSeg, titleSeg);\n}\n\n/**\n * Parse UserComment JSON and convert to PNG chunks\n */\nfunction parseSegments(\n userCommentSeg: MetadataSegment | undefined,\n descriptionSeg: MetadataSegment | undefined,\n softwareSeg: MetadataSegment | undefined,\n titleSeg: MetadataSegment | undefined,\n): PngTextChunk[] {\n if (!userCommentSeg || !descriptionSeg) {\n return [];\n }\n\n const parsed = parseJson<Record<string, unknown>>(userCommentSeg.data);\n if (!parsed.ok) {\n // If parsing fails, treat the whole thing as Comment\n return createTextChunk('Comment', userCommentSeg.data);\n }\n\n const jsonData = parsed.value;\n\n // Extract Description text (prefer exifImageDescription over corrupted JSON)\n const descriptionText = extractDescriptionText(\n descriptionSeg,\n stringify(jsonData.Description),\n );\n\n return [\n // Title (required, use default if missing)\n createTextChunk(\n 'Title',\n titleSeg?.data ?? stringify(jsonData.Title) ?? NOVELAI_TITLE,\n ),\n // Description (optional, prefer exifImageDescription over JSON)\n createEncodedChunk('Description', descriptionText, 'dynamic'),\n // Software (required, use default if missing)\n createTextChunk(\n 'Software',\n softwareSeg?.data ?? stringify(jsonData.Software) ?? NOVELAI_SOFTWARE,\n ),\n // Source (optional)\n createTextChunk('Source', stringify(jsonData.Source)),\n // Generation time (optional)\n createTextChunk('Generation time', stringify(jsonData['Generation time'])),\n // Comment (optional)\n createTextChunk('Comment', stringify(jsonData.Comment)),\n ].flat();\n}\n\n/**\n * Extract Description text from exifImageDescription or UserComment JSON\n *\n * NovelAI WebP has corrupted UTF-8 in UserComment JSON Description,\n * so we prefer the clean exifImageDescription segment when available.\n */\nfunction extractDescriptionText(\n descriptionSeg: MetadataSegment | undefined,\n jsonDescription: string | undefined,\n): string | undefined {\n // First, try exifImageDescription segment (strip 4-byte null prefix)\n if (descriptionSeg?.data) {\n const data = descriptionSeg.data;\n // NovelAI WebP format has 4-byte null prefix before ImageDescription\n return data.startsWith('\\0\\0\\0\\0') ? data.slice(4) : data;\n }\n\n // Fallback: use JSON value (for non-NovelAI WebP sources)\n if (jsonDescription) {\n // Strip 4-byte null prefix if present\n return jsonDescription.startsWith('\\0\\0\\0\\0')\n ? jsonDescription.slice(4)\n : jsonDescription;\n }\n\n return undefined;\n}\n","/**\n * Simple chunk converter utilities\n *\n * Factory functions for converters that simply copy a single chunk keyword\n * between PNG and JPEG/WebP formats, with encoding based on tool strategy.\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport {\n type ChunkEncodingStrategy,\n createEncodedChunk,\n} from './chunk-encoding';\n\n/**\n * Create a PNG-to-segments converter that extracts a single chunk by keyword\n *\n * @param keyword - The PNG chunk keyword to extract\n * @returns Converter function\n */\nexport function createPngToSegments(\n keyword: string,\n): (chunks: PngTextChunk[]) => MetadataSegment[] {\n return (chunks) => {\n const chunk = chunks.find((c) => c.keyword === keyword);\n return !chunk\n ? []\n : [{ source: { type: 'exifUserComment' }, data: chunk.text }];\n };\n}\n\n/**\n * Create a segments-to-PNG converter that writes to a single chunk keyword\n *\n * @param keyword - The PNG chunk keyword to write\n * @param encodingStrategy - Encoding strategy to use\n * @returns Converter function\n */\nexport function createSegmentsToPng(\n keyword: string,\n encodingStrategy: ChunkEncodingStrategy,\n): (segments: MetadataSegment[]) => PngTextChunk[] {\n return (segments) => {\n const userComment = segments.find(\n (s) => s.source.type === 'exifUserComment',\n );\n if (!userComment) return [];\n\n return createEncodedChunk(keyword, userComment.data, encodingStrategy);\n };\n}\n","/**\n * SwarmUI metadata conversion utilities\n *\n * SwarmUI stores metadata as:\n * - PNG: `parameters` chunk containing sui_image_params JSON\n * - JPEG/WebP: exifUserComment contains sui_image_params JSON directly\n *\n * The converter extracts/wraps the content appropriately for each format.\n */\n\nimport type { MetadataSegment, PngTextChunk } from '../types';\nimport { parseJson } from '../utils/json';\nimport { createEncodedChunk } from './chunk-encoding';\nimport { findSegment } from './utils';\n\n/**\n * Convert SwarmUI PNG chunks to JPEG/WebP segments\n *\n * Extracts the 'parameters' chunk and optionally preserves 'prompt' chunk (ComfyUI workflow).\n * - parameters chunk → exifUserComment (matches native SwarmUI format)\n * - prompt chunk → exifMake (preserves ComfyUI node graph for round-trip)\n *\n * @param chunks - PNG text chunks\n * @returns Metadata segments for JPEG/WebP\n */\nexport function convertSwarmUIPngToSegments(\n chunks: PngTextChunk[],\n): MetadataSegment[] {\n // Find 'parameters' chunk\n const parametersChunk = chunks.find((c) => c.keyword === 'parameters');\n if (!parametersChunk) {\n return [];\n }\n\n // Parse and return the JSON directly (no wrapping in parameters key)\n const parsed = parseJson<unknown>(parametersChunk.text);\n const segments: MetadataSegment[] = [\n {\n source: { type: 'exifUserComment' },\n data: parsed.ok ? JSON.stringify(parsed.value) : parametersChunk.text,\n },\n ];\n\n // Preserve node graph if present (prompt chunk contains ComfyUI node graph)\n const promptChunk = chunks.find((c) => c.keyword === 'prompt');\n if (promptChunk) {\n segments.push({\n source: { type: 'exifMake' },\n data: promptChunk.text,\n });\n }\n\n return segments;\n}\n\n/**\n * Convert JPEG/WebP segments to SwarmUI PNG chunks\n *\n * Handles both native SwarmUI format and extended format with node graph:\n * - exifUserComment → parameters chunk (always present)\n * - exifMake → prompt chunk (optional, contains ComfyUI node graph)\n *\n * Chunk order matches original SwarmUI format: [prompt, parameters]\n *\n * @param segments - Metadata segments from JPEG/WebP\n * @returns PNG text chunks\n */\nexport function convertSwarmUISegmentsToPng(\n segments: MetadataSegment[],\n): PngTextChunk[] {\n const userComment = findSegment(segments, 'exifUserComment');\n const make = findSegment(segments, 'exifMake');\n\n const chunks: PngTextChunk[] = [\n // Restore node graph first if present (extended format)\n createEncodedChunk('prompt', make?.data, 'text-unicode-escape'),\n // Add parameters chunk second (always present)\n createEncodedChunk('parameters', userComment?.data, 'text-unicode-escape'),\n ].flat();\n\n return chunks;\n}\n","/**\n * Metadata conversion utilities\n *\n * Provides functions to convert metadata between different image formats.\n */\n\nimport type {\n ConversionResult,\n ConversionTargetFormat,\n ParseResult,\n RawMetadata,\n} from '../types';\nimport { Result } from '../types';\nimport { convertA1111PngToSegments, convertA1111SegmentsToPng } from './a1111';\nimport {\n convertComfyUIPngToSegments,\n convertComfyUISegmentsToPng,\n} from './comfyui';\nimport {\n convertEasyDiffusionPngToSegments,\n convertEasyDiffusionSegmentsToPng,\n} from './easydiffusion';\nimport {\n convertInvokeAIPngToSegments,\n convertInvokeAISegmentsToPng,\n} from './invokeai';\nimport {\n convertNovelaiPngToSegments,\n convertNovelaiSegmentsToPng,\n} from './novelai';\nimport { createPngToSegments, createSegmentsToPng } from './simple-chunk';\nimport {\n convertSwarmUIPngToSegments,\n convertSwarmUISegmentsToPng,\n} from './swarmui';\n\n/**\n * Convert metadata from one format to another\n *\n * Takes a ParseResult and converts the raw metadata to the target format.\n * Conversion strategy is determined by the detected software.\n *\n * @param parseResult - Result from parsePng, parseJpeg, or parseWebp\n * @param targetFormat - Target format ('png', 'jpeg', or 'webp')\n * @returns Converted RawMetadata or error\n *\n * @example\n * ```typescript\n * const pngResult = parsePng(pngData);\n * const converted = convertMetadata(pngResult, 'webp');\n * if (converted.ok) {\n * const webpWithMetadata = writeWebpMetadata(webpData, converted.value.segments);\n * }\n * ```\n */\nexport function convertMetadata(\n parseResult: ParseResult,\n targetFormat: ConversionTargetFormat,\n): ConversionResult {\n // Handle non-success statuses\n if (parseResult.status === 'empty') {\n return Result.error({ type: 'missingRawData' });\n }\n\n if (parseResult.status === 'invalid') {\n return Result.error({\n type: 'invalidParseResult',\n status: parseResult.status,\n });\n }\n\n // Handle unrecognized - should not reach here in normal flow\n // since write() handles unrecognized separately\n if (parseResult.status === 'unrecognized') {\n return Result.error({\n type: 'unsupportedSoftware',\n software: 'unknown',\n });\n }\n\n const raw = parseResult.raw;\n\n // If source and target are the same format, return as-is\n if (\n (raw.format === 'png' && targetFormat === 'png') ||\n (raw.format === 'jpeg' && targetFormat === 'jpeg') ||\n (raw.format === 'webp' && targetFormat === 'webp')\n ) {\n return Result.ok(raw);\n }\n\n const software = parseResult.metadata.software;\n\n // Get converter for detected software\n const converter = softwareConverters[software];\n if (!converter) {\n // This should never happen if software is a valid GenerationSoftware\n return Result.error({\n type: 'unsupportedSoftware',\n software,\n });\n }\n\n return converter(raw, targetFormat);\n}\n\n// Type for converter function\ntype ConverterFn = (\n raw: RawMetadata,\n targetFormat: ConversionTargetFormat,\n) => ConversionResult;\n\n// Type for PNG↔segment conversion functions\ntype PngToSegmentsFn = (\n chunks: import('../types').PngTextChunk[],\n) => import('../types').MetadataSegment[];\ntype SegmentsToPngFn = (\n segments: import('../types').MetadataSegment[],\n) => import('../types').PngTextChunk[];\n\n/**\n * Factory function to create format converters\n *\n * All converters follow the same pattern:\n * - PNG → JPEG/WebP: convert chunks to segments\n * - JPEG/WebP → PNG: convert segments to chunks\n * - Same format: return as-is\n */\nfunction createFormatConverter(\n pngToSegments: PngToSegmentsFn,\n segmentsToPng: SegmentsToPngFn,\n): ConverterFn {\n return (raw, targetFormat) => {\n if (raw.format === 'png') {\n // PNG → same format: return as-is\n if (targetFormat === 'png') {\n return Result.ok(raw);\n }\n // PNG → JPEG/WebP\n const segments = pngToSegments(raw.chunks);\n return Result.ok({ format: targetFormat, segments });\n }\n\n // JPEG/WebP → JPEG/WebP: just copy segments\n if (targetFormat === 'jpeg' || targetFormat === 'webp') {\n return Result.ok({ format: targetFormat, segments: raw.segments });\n }\n\n // JPEG/WebP → PNG\n const chunks = segmentsToPng(raw.segments);\n return Result.ok({ format: 'png', chunks });\n };\n}\n\n// Create converters using factory\nconst convertNovelai = createFormatConverter(\n convertNovelaiPngToSegments,\n convertNovelaiSegmentsToPng,\n);\n\nconst convertA1111 = createFormatConverter(\n convertA1111PngToSegments,\n convertA1111SegmentsToPng,\n);\n\nconst convertComfyUI = createFormatConverter(\n convertComfyUIPngToSegments,\n convertComfyUISegmentsToPng,\n);\n\nconst convertEasyDiffusion = createFormatConverter(\n convertEasyDiffusionPngToSegments,\n convertEasyDiffusionSegmentsToPng,\n);\n\nconst convertFooocus = createFormatConverter(\n createPngToSegments('Comment'),\n createSegmentsToPng('Comment', 'text-unicode-escape'),\n);\n\nconst convertRuinedFooocus = createFormatConverter(\n createPngToSegments('parameters'),\n createSegmentsToPng('parameters', 'text-unicode-escape'),\n);\n\nconst convertSwarmUI = createFormatConverter(\n convertSwarmUIPngToSegments,\n convertSwarmUISegmentsToPng,\n);\n\nconst convertInvokeAI = createFormatConverter(\n convertInvokeAIPngToSegments,\n convertInvokeAISegmentsToPng,\n);\n\nconst convertHfSpace = createFormatConverter(\n createPngToSegments('parameters'),\n createSegmentsToPng('parameters', 'text-unicode-escape'),\n);\n\n/**\n * Lookup table: software name → converter function\n */\nconst softwareConverters = {\n // NovelAI\n novelai: convertNovelai,\n // A1111-format (sd-webui, forge, forge-neo, civitai, sd-next)\n 'sd-webui': convertA1111,\n 'sd-next': convertA1111,\n forge: convertA1111,\n 'forge-neo': convertA1111,\n civitai: convertA1111,\n // ComfyUI-format (comfyui, tensorart, stability-matrix)\n comfyui: convertComfyUI,\n tensorart: convertComfyUI,\n 'stability-matrix': convertComfyUI,\n // Easy Diffusion\n easydiffusion: convertEasyDiffusion,\n // Fooocus variants\n fooocus: convertFooocus,\n 'ruined-fooocus': convertRuinedFooocus,\n // SwarmUI\n swarmui: convertSwarmUI,\n // InvokeAI\n invokeai: convertInvokeAI,\n // HuggingFace Space\n 'hf-space': convertHfSpace,\n} as const;\n","/**\n * Exif writing utilities\n *\n * Functions for building Exif/TIFF structures from metadata segments.\n */\n\nimport type { MetadataSegment } from '../types';\nimport { writeUint16, writeUint32 } from '../utils/binary';\nimport {\n DOCUMENT_NAME_TAG,\n EXIF_IFD_POINTER_TAG,\n IMAGE_DESCRIPTION_TAG,\n MAKE_TAG,\n SOFTWARE_TAG,\n USER_COMMENT_TAG,\n} from '../utils/exif-constants';\n\n/**\n * Build Exif TIFF data from MetadataSegments\n *\n * Creates a complete TIFF structure with IFD0, Exif IFD, and all tag data.\n * Uses little-endian (Intel) byte order for maximum compatibility.\n *\n * @param segments - Metadata segments to encode\n * @returns TIFF data (starts with \"II\" byte order marker)\n */\nexport function buildExifTiffData(segments: MetadataSegment[]): Uint8Array {\n // Separate segments by destination IFD\n const ifd0Segments = segments.filter(\n (s) =>\n s.source.type === 'exifImageDescription' ||\n s.source.type === 'exifMake' ||\n s.source.type === 'exifSoftware' ||\n s.source.type === 'exifDocumentName',\n );\n const exifIfdSegments = segments.filter(\n (s) => s.source.type === 'exifUserComment',\n );\n\n // No Exif-type segments\n if (ifd0Segments.length === 0 && exifIfdSegments.length === 0) {\n return new Uint8Array(0);\n }\n\n const isLittleEndian = true;\n\n // Build tag data for each segment\n const ifd0Tags: Array<{ tag: number; type: number; data: Uint8Array }> = [];\n const exifTags: Array<{ tag: number; type: number; data: Uint8Array }> = [];\n\n for (const seg of ifd0Segments) {\n if (seg.source.type === 'exifImageDescription') {\n const data = encodeAsciiTag(seg.data, seg.source.prefix);\n ifd0Tags.push({ tag: IMAGE_DESCRIPTION_TAG, type: 2, data });\n } else if (seg.source.type === 'exifMake') {\n const data = encodeAsciiTag(seg.data, seg.source.prefix);\n ifd0Tags.push({ tag: MAKE_TAG, type: 2, data });\n } else if (seg.source.type === 'exifSoftware') {\n const data = encodeAsciiTag(seg.data);\n ifd0Tags.push({ tag: SOFTWARE_TAG, type: 2, data });\n } else if (seg.source.type === 'exifDocumentName') {\n const data = encodeAsciiTag(seg.data);\n ifd0Tags.push({ tag: DOCUMENT_NAME_TAG, type: 2, data });\n }\n }\n\n for (const seg of exifIfdSegments) {\n if (seg.source.type === 'exifUserComment') {\n const data = encodeUserComment(seg.data);\n exifTags.push({ tag: USER_COMMENT_TAG, type: 7, data });\n }\n }\n\n const hasExifIfd = exifTags.length > 0;\n if (hasExifIfd) {\n ifd0Tags.push({\n tag: EXIF_IFD_POINTER_TAG,\n type: 4,\n data: new Uint8Array(4),\n });\n }\n\n // Sort tags by tag number (required by TIFF spec)\n ifd0Tags.sort((a, b) => a.tag - b.tag);\n exifTags.sort((a, b) => a.tag - b.tag);\n\n // Calculate sizes and offsets\n const headerSize = 8;\n const ifd0EntryCount = ifd0Tags.length;\n const ifd0Size = 2 + 12 * ifd0EntryCount + 4;\n const exifEntryCount = exifTags.length;\n const exifIfdSize = hasExifIfd ? 2 + 12 * exifEntryCount + 4 : 0;\n\n const ifd0Offset = headerSize;\n const exifIfdOffset = ifd0Offset + ifd0Size;\n let dataOffset = exifIfdOffset + exifIfdSize;\n\n // Update Exif IFD pointer in IFD0\n if (hasExifIfd) {\n const exifPtrTag = ifd0Tags.find((t) => t.tag === EXIF_IFD_POINTER_TAG);\n if (exifPtrTag) {\n writeUint32(exifPtrTag.data, 0, exifIfdOffset, isLittleEndian);\n }\n }\n\n // Assign data offsets for each tag\n const tagDataOffsets = new Map<\n { tag: number; type: number; data: Uint8Array },\n number\n >();\n\n for (const tag of [...ifd0Tags, ...exifTags]) {\n if (tag.data.length > 4) {\n tagDataOffsets.set(tag, dataOffset);\n dataOffset += tag.data.length;\n if (tag.data.length % 2 !== 0) {\n dataOffset += 1;\n }\n }\n }\n\n // Build result\n const totalSize = dataOffset;\n const result = new Uint8Array(totalSize);\n\n // Write TIFF header\n result[0] = 0x49; // I\n result[1] = 0x49; // I (little-endian)\n writeUint16(result, 2, 42, isLittleEndian);\n writeUint32(result, 4, ifd0Offset, isLittleEndian);\n\n // Write IFD0\n let offset = ifd0Offset;\n writeUint16(result, offset, ifd0EntryCount, isLittleEndian);\n offset += 2;\n\n for (const tag of ifd0Tags) {\n writeIfdEntry(result, offset, tag, tagDataOffsets.get(tag), isLittleEndian);\n offset += 12;\n }\n\n writeUint32(result, offset, 0, isLittleEndian);\n offset += 4;\n\n // Write Exif IFD\n if (hasExifIfd) {\n writeUint16(result, offset, exifEntryCount, isLittleEndian);\n offset += 2;\n\n for (const tag of exifTags) {\n writeIfdEntry(\n result,\n offset,\n tag,\n tagDataOffsets.get(tag),\n isLittleEndian,\n );\n offset += 12;\n }\n\n writeUint32(result, offset, 0, isLittleEndian);\n }\n\n // Write tag data values\n for (const [tag, dataOff] of tagDataOffsets) {\n result.set(tag.data, dataOff);\n }\n\n return result;\n}\n\n/**\n * Write an IFD entry\n */\nfunction writeIfdEntry(\n data: Uint8Array,\n offset: number,\n tag: { tag: number; type: number; data: Uint8Array },\n dataOffset: number | undefined,\n isLittleEndian: boolean,\n): void {\n writeUint16(data, offset, tag.tag, isLittleEndian);\n writeUint16(data, offset + 2, tag.type, isLittleEndian);\n writeUint32(data, offset + 4, tag.data.length, isLittleEndian);\n\n if (tag.data.length <= 4) {\n data.set(tag.data, offset + 8);\n } else {\n writeUint32(data, offset + 8, dataOffset ?? 0, isLittleEndian);\n }\n}\n\n/**\n * Encode string as UserComment with UTF-16LE encoding\n *\n * Uses UNICODE prefix followed by UTF-16LE encoded text.\n *\n * @param text - Text to encode\n * @returns Encoded UserComment data (8-byte prefix + UTF-16LE text)\n */\nfunction encodeUserComment(text: string): Uint8Array {\n const utf16Data: number[] = [];\n for (let i = 0; i < text.length; i++) {\n const code = text.charCodeAt(i);\n utf16Data.push(code & 0xff);\n utf16Data.push((code >> 8) & 0xff);\n }\n\n const result = new Uint8Array(8 + utf16Data.length);\n\n // UNICODE encoding prefix\n result[0] = 0x55; // U\n result[1] = 0x4e; // N\n result[2] = 0x49; // I\n result[3] = 0x43; // C\n result[4] = 0x4f; // O\n result[5] = 0x44; // D\n result[6] = 0x45; // E\n result[7] = 0x00; // NULL\n\n result.set(new Uint8Array(utf16Data), 8);\n return result;\n}\n\n/**\n * Encode ASCII tag data with optional prefix\n *\n * @param text - Text content\n * @param prefix - Optional prefix (e.g., \"Workflow\")\n * @returns Null-terminated ASCII bytes\n */\nfunction encodeAsciiTag(text: string, prefix?: string): Uint8Array {\n const fullText = prefix ? `${prefix}: ${text}` : text;\n const textBytes = new TextEncoder().encode(fullText);\n const result = new Uint8Array(textBytes.length + 1);\n result.set(textBytes, 0);\n result[textBytes.length] = 0;\n return result;\n}\n","import type { JpegWriteResult, MetadataSegment } from '../types';\nimport { Result } from '../types';\nimport { buildExifTiffData } from './exif';\n\nimport { isJpeg } from '../utils/binary';\n\n/** APP1 marker */\nconst APP1_MARKER = 0xe1;\n\n/** COM (Comment) marker */\nconst COM_MARKER = 0xfe;\n\n/** SOS (Start of Scan) marker */\nconst SOS_MARKER = 0xda;\n\n/** EOI (End of Image) marker */\nconst EOI_MARKER = 0xd9;\n\n/** Exif header: \"Exif\\0\\0\" */\nconst EXIF_HEADER = new Uint8Array([0x45, 0x78, 0x69, 0x66, 0x00, 0x00]);\n\n/**\n * Write JPEG metadata to binary data\n *\n * Replaces existing metadata segments with the provided segments.\n * Each segment is written to its original location based on source type:\n * - jpegCom -> COM segment (before SOS)\n * - exifUserComment/exifImageDescription/exifMake -> APP1 Exif segment (after SOI)\n *\n * @param data - Original JPEG file data as Uint8Array\n * @param segments - Metadata segments to embed\n * @returns Result containing new JPEG data with embedded metadata\n */\nexport function writeJpegMetadata(\n data: Uint8Array,\n segments: MetadataSegment[],\n): JpegWriteResult {\n // Validate JPEG signature\n if (!isJpeg(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n // Separate segments by destination\n const comSegments = segments.filter((s) => s.source.type === 'jpegCom');\n const exifSegments = segments.filter(\n (s) =>\n s.source.type === 'exifUserComment' ||\n s.source.type === 'exifImageDescription' ||\n s.source.type === 'exifMake' ||\n s.source.type === 'exifSoftware' ||\n s.source.type === 'exifDocumentName',\n );\n\n // Collect non-metadata segments from original JPEG\n const collectResult = collectNonMetadataSegments(data);\n if (!collectResult.ok) {\n return collectResult;\n }\n\n const { beforeSos, scanData } = collectResult.value;\n\n // Build new APP1 Exif segment\n const app1Segment =\n exifSegments.length > 0 ? buildApp1Segment(exifSegments) : null;\n\n // Build new COM segments\n const comSegmentData = comSegments.map((s) => buildComSegment(s.data));\n\n // Calculate total size\n let totalSize = 2; // SOI\n if (app1Segment) {\n totalSize += app1Segment.length;\n }\n for (const seg of beforeSos) {\n totalSize += seg.length;\n }\n for (const com of comSegmentData) {\n totalSize += com.length;\n }\n totalSize += scanData.length;\n\n // Build output\n const output = new Uint8Array(totalSize);\n let offset = 0;\n\n // Write SOI\n output[offset++] = 0xff;\n output[offset++] = 0xd8;\n\n // Write APP1 Exif (immediately after SOI)\n if (app1Segment) {\n output.set(app1Segment, offset);\n offset += app1Segment.length;\n }\n\n // Write original non-metadata segments\n for (const seg of beforeSos) {\n output.set(seg, offset);\n offset += seg.length;\n }\n\n // Write COM segments (before SOS)\n for (const com of comSegmentData) {\n output.set(com, offset);\n offset += com.length;\n }\n\n // Write scan data (SOS to EOI)\n output.set(scanData, offset);\n\n return Result.ok(output);\n}\n\n/**\n * Collect non-metadata segments from JPEG\n *\n * Returns segments that are not APP1 Exif or COM, plus the scan data (SOS to EOI)\n */\nfunction collectNonMetadataSegments(\n data: Uint8Array,\n): Result<\n { beforeSos: Uint8Array[]; scanData: Uint8Array },\n { type: 'corruptedStructure'; message: string }\n> {\n const beforeSos: Uint8Array[] = [];\n let offset = 2; // Skip SOI\n\n while (offset < data.length - 1) {\n // Check for marker\n if (data[offset] !== 0xff) {\n return Result.error({\n type: 'corruptedStructure',\n message: `Expected marker at offset ${offset}`,\n });\n }\n\n // Skip padding bytes\n while (data[offset] === 0xff && offset < data.length - 1) {\n offset++;\n }\n\n const marker = data[offset];\n offset++;\n\n // Check for SOS - everything after this is scan data\n if (marker === SOS_MARKER) {\n // Include SOS marker in scan data\n const scanData = data.slice(offset - 2);\n return Result.ok({ beforeSos, scanData });\n }\n\n // Check for EOI (shouldn't happen before SOS but handle it)\n if (marker === EOI_MARKER) {\n return Result.ok({ beforeSos, scanData: new Uint8Array([0xff, 0xd9]) });\n }\n\n // Get segment length (big-endian, includes length bytes)\n if (offset + 2 > data.length) {\n return Result.error({\n type: 'corruptedStructure',\n message: 'Unexpected end of file',\n });\n }\n\n const length = ((data[offset] ?? 0) << 8) | (data[offset + 1] ?? 0);\n const segmentStart = offset - 2; // Include marker\n const segmentEnd = offset + length;\n\n if (segmentEnd > data.length) {\n return Result.error({\n type: 'corruptedStructure',\n message: 'Segment extends beyond file',\n });\n }\n\n // Check if this is a metadata segment we want to strip\n const isExifApp1 =\n marker === APP1_MARKER &&\n offset + 2 + 6 <= data.length &&\n data[offset + 2] === 0x45 && // E\n data[offset + 3] === 0x78 && // x\n data[offset + 4] === 0x69 && // i\n data[offset + 5] === 0x66 && // f\n data[offset + 6] === 0x00 && // NULL\n data[offset + 7] === 0x00; // NULL\n\n const isCom = marker === COM_MARKER;\n\n // Keep non-metadata segments\n if (!isExifApp1 && !isCom) {\n beforeSos.push(data.slice(segmentStart, segmentEnd));\n }\n\n offset = segmentEnd;\n }\n\n // If we reach here without finding SOS, the JPEG is malformed\n return Result.error({\n type: 'corruptedStructure',\n message: 'No SOS marker found',\n });\n}\n\n/**\n * Build APP1 Exif segment from metadata segments\n */\nfunction buildApp1Segment(segments: MetadataSegment[]): Uint8Array {\n const tiffData = buildExifTiffData(segments);\n\n if (tiffData.length === 0) {\n return new Uint8Array(0);\n }\n\n // APP1 segment: marker (2) + length (2) + Exif header (6) + TIFF data\n const segmentLength = 2 + EXIF_HEADER.length + tiffData.length;\n const segment = new Uint8Array(2 + segmentLength);\n\n segment[0] = 0xff;\n segment[1] = APP1_MARKER;\n segment[2] = (segmentLength >> 8) & 0xff;\n segment[3] = segmentLength & 0xff;\n segment.set(EXIF_HEADER, 4);\n segment.set(tiffData, 4 + EXIF_HEADER.length);\n\n return segment;\n}\n\n/**\n * Build COM segment from text\n */\nfunction buildComSegment(text: string): Uint8Array {\n const textBytes = new TextEncoder().encode(text);\n const segmentLength = 2 + textBytes.length; // length field includes itself\n\n const segment = new Uint8Array(2 + segmentLength);\n segment[0] = 0xff;\n segment[1] = COM_MARKER;\n segment[2] = (segmentLength >> 8) & 0xff;\n segment[3] = segmentLength & 0xff;\n segment.set(textBytes, 4);\n\n return segment;\n}\n","import type {\n ITXtChunk,\n PngTextChunk,\n PngWriteResult,\n TExtChunk,\n} from '../types';\nimport { Result } from '../types';\nimport { readChunkType, readUint32BE, writeUint32BE } from '../utils/binary';\n\nimport { isPng } from '../utils/binary';\n\n/** PNG file signature (magic bytes) */\nconst PNG_SIGNATURE = new Uint8Array([137, 80, 78, 71, 13, 10, 26, 10]);\n\n/**\n * Write PNG metadata to binary data\n *\n * Replaces all existing tEXt and iTXt chunks with the provided chunks.\n * Chunks are inserted immediately after the IHDR chunk (PNG spec recommended).\n *\n * @param data - Original PNG file data as Uint8Array\n * @param chunks - Text chunks to embed\n * @returns Result containing new PNG data with embedded metadata\n */\nexport function writePngMetadata(\n data: Uint8Array,\n chunks: PngTextChunk[],\n): PngWriteResult {\n // Validate PNG signature\n if (!isPng(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n // Find IHDR chunk end position\n const ihdrEnd = findIhdrChunkEnd(data);\n if (ihdrEnd === -1) {\n return Result.error({ type: 'noIhdrChunk' });\n }\n\n // Collect non-text chunks from original data\n const originalChunks = collectNonTextChunks(data);\n\n // Serialize new text chunks\n const serializedTextChunks = chunks.map((chunk) =>\n chunk.type === 'tEXt'\n ? serializeTExtChunk(chunk)\n : serializeITXtChunk(chunk),\n );\n\n // Calculate total output size\n const totalSize =\n PNG_SIGNATURE.length +\n originalChunks.ihdr.length +\n serializedTextChunks.reduce((sum, chunk) => sum + chunk.length, 0) +\n originalChunks.others.reduce((sum, chunk) => sum + chunk.length, 0);\n\n // Build output\n const output = new Uint8Array(totalSize);\n let offset = 0;\n\n // Write signature\n output.set(PNG_SIGNATURE, offset);\n offset += PNG_SIGNATURE.length;\n\n // Write IHDR\n output.set(originalChunks.ihdr, offset);\n offset += originalChunks.ihdr.length;\n\n // Write text chunks (immediately after IHDR)\n for (const chunk of serializedTextChunks) {\n output.set(chunk, offset);\n offset += chunk.length;\n }\n\n // Write other chunks\n for (const chunk of originalChunks.others) {\n output.set(chunk, offset);\n offset += chunk.length;\n }\n\n return Result.ok(output);\n}\n\n/**\n * Find the end position of IHDR chunk (including CRC)\n * @returns End offset or -1 if not found\n */\nfunction findIhdrChunkEnd(data: Uint8Array): number {\n const offset = PNG_SIGNATURE.length;\n\n if (offset + 8 > data.length) {\n return -1;\n }\n\n const length = readUint32BE(data, offset);\n const chunkType = readChunkType(data, offset + 4);\n\n if (chunkType !== 'IHDR') {\n return -1;\n }\n\n // Return end position: length(4) + type(4) + data(length) + crc(4)\n return offset + 4 + 4 + length + 4;\n}\n\n/**\n * Collect chunks from PNG, separating IHDR and non-text chunks\n */\nfunction collectNonTextChunks(data: Uint8Array): {\n ihdr: Uint8Array;\n others: Uint8Array[];\n} {\n const others: Uint8Array[] = [];\n let offset = PNG_SIGNATURE.length;\n let ihdr: Uint8Array = new Uint8Array(0);\n\n while (offset < data.length) {\n const chunkStart = offset;\n\n // Read chunk length\n if (offset + 4 > data.length) break;\n const length = readUint32BE(data, offset);\n offset += 4;\n\n // Read chunk type\n if (offset + 4 > data.length) break;\n const chunkType = readChunkType(data, offset);\n offset += 4;\n\n // Skip chunk data\n offset += length;\n\n // Skip CRC\n offset += 4;\n\n const chunkEnd = offset;\n const chunkData = data.slice(chunkStart, chunkEnd);\n\n if (chunkType === 'IHDR') {\n ihdr = chunkData;\n } else if (chunkType !== 'tEXt' && chunkType !== 'iTXt') {\n others.push(chunkData);\n }\n\n if (chunkType === 'IEND') {\n break;\n }\n }\n\n return { ihdr, others };\n}\n\n/**\n * Serialize a tEXt chunk to binary\n *\n * Per PNG spec, tEXt uses Latin-1 encoding. However, to maintain round-trip\n * compatibility with tools that incorrectly write UTF-8 (e.g., TensorArt),\n * we encode the text as UTF-8 bytes. This allows non-ASCII characters to\n * survive the read-write cycle.\n */\nfunction serializeTExtChunk(chunk: TExtChunk): Uint8Array {\n // Encode keyword as Latin-1 (keywords are ASCII-safe)\n const keyword = latin1Encode(chunk.keyword);\n // Encode text as UTF-8 (for round-trip compatibility with non-compliant tools)\n const text = utf8Encode(chunk.text);\n\n // Data: keyword + null + text\n const chunkData = new Uint8Array(keyword.length + 1 + text.length);\n chunkData.set(keyword, 0);\n chunkData[keyword.length] = 0; // null separator\n chunkData.set(text, keyword.length + 1);\n\n return buildChunk('tEXt', chunkData);\n}\n\n/**\n * Serialize an iTXt chunk to binary\n */\nfunction serializeITXtChunk(chunk: ITXtChunk): Uint8Array {\n // Encode strings\n const keyword = utf8Encode(chunk.keyword);\n const languageTag = utf8Encode(chunk.languageTag);\n const translatedKeyword = utf8Encode(chunk.translatedKeyword);\n const text = utf8Encode(chunk.text);\n\n // Calculate data size\n const dataSize =\n keyword.length +\n 1 + // null\n 1 + // compression flag\n 1 + // compression method\n languageTag.length +\n 1 + // null\n translatedKeyword.length +\n 1 + // null\n text.length;\n\n const chunkData = new Uint8Array(dataSize);\n let offset = 0;\n\n // Write keyword\n chunkData.set(keyword, offset);\n offset += keyword.length;\n chunkData[offset++] = 0; // null\n\n // Write compression flag and method\n chunkData[offset++] = chunk.compressionFlag;\n chunkData[offset++] = chunk.compressionMethod;\n\n // Write language tag\n chunkData.set(languageTag, offset);\n offset += languageTag.length;\n chunkData[offset++] = 0; // null\n\n // Write translated keyword\n chunkData.set(translatedKeyword, offset);\n offset += translatedKeyword.length;\n chunkData[offset++] = 0; // null\n\n // Write text\n chunkData.set(text, offset);\n\n return buildChunk('iTXt', chunkData);\n}\n\n/**\n * Build a complete PNG chunk with length, type, data, and CRC\n */\nfunction buildChunk(type: string, data: Uint8Array): Uint8Array {\n const chunk = new Uint8Array(4 + 4 + data.length + 4);\n\n // Write length (4 bytes, big-endian)\n writeUint32BE(chunk, 0, data.length);\n\n // Write type (4 bytes)\n for (let i = 0; i < 4; i++) {\n chunk[4 + i] = type.charCodeAt(i);\n }\n\n // Write data\n chunk.set(data, 8);\n\n // Calculate and write CRC (over type + data)\n const crcData = chunk.slice(4, 8 + data.length);\n const crc = calculateCrc32(crcData);\n writeUint32BE(chunk, 8 + data.length, crc);\n\n return chunk;\n}\n\n/**\n * Encode string as Latin-1 bytes\n */\nfunction latin1Encode(str: string): Uint8Array {\n const bytes = new Uint8Array(str.length);\n for (let i = 0; i < str.length; i++) {\n bytes[i] = str.charCodeAt(i) & 0xff;\n }\n return bytes;\n}\n\n/**\n * Encode string as UTF-8 bytes\n */\nfunction utf8Encode(str: string): Uint8Array {\n return new TextEncoder().encode(str);\n}\n\n// ============================================================================\n// CRC-32 Implementation (IEEE polynomial)\n// ============================================================================\n\n/** CRC-32 lookup table */\nconst CRC_TABLE = makeCrcTable();\n\n/**\n * Generate CRC-32 lookup table\n */\nfunction makeCrcTable(): Uint32Array {\n const table = new Uint32Array(256);\n for (let n = 0; n < 256; n++) {\n let c = n;\n for (let k = 0; k < 8; k++) {\n if (c & 1) {\n c = 0xedb88320 ^ (c >>> 1);\n } else {\n c = c >>> 1;\n }\n }\n table[n] = c >>> 0;\n }\n return table;\n}\n\n/**\n * Calculate CRC-32 checksum\n */\nfunction calculateCrc32(data: Uint8Array): number {\n let crc = 0xffffffff;\n for (let i = 0; i < data.length; i++) {\n crc = (CRC_TABLE[(crc ^ (data[i] ?? 0)) & 0xff] ?? 0) ^ (crc >>> 8);\n }\n return (crc ^ 0xffffffff) >>> 0;\n}\n","import type { MetadataSegment, WebpWriteResult } from '../types';\nimport { Result } from '../types';\nimport { arraysEqual, writeUint32LE } from '../utils/binary';\nimport { buildExifTiffData } from './exif';\n\nimport { isWebp } from '../utils/binary';\n\n/** WebP file signature: \"RIFF\" */\nconst RIFF_SIGNATURE = new Uint8Array([0x52, 0x49, 0x46, 0x46]);\n\n/** WebP format marker: \"WEBP\" */\nconst WEBP_MARKER = new Uint8Array([0x57, 0x45, 0x42, 0x50]);\n\n/** EXIF chunk type */\nconst EXIF_CHUNK_TYPE = new Uint8Array([0x45, 0x58, 0x49, 0x46]);\n\n/**\n * Write WebP metadata to binary data\n *\n * Replaces existing EXIF chunk with new metadata.\n * All segments are written to the EXIF chunk based on their source type.\n *\n * @param data - Original WebP file data as Uint8Array\n * @param segments - Metadata segments to embed\n * @returns Result containing new WebP data with embedded metadata\n */\nexport function writeWebpMetadata(\n data: Uint8Array,\n segments: MetadataSegment[],\n): WebpWriteResult {\n // Validate WebP signature\n if (!isWebp(data)) {\n return Result.error({ type: 'invalidSignature' });\n }\n\n // Collect all chunks except EXIF\n const collectResult = collectNonExifChunks(data);\n if (!collectResult.ok) {\n return collectResult;\n }\n\n const { chunks } = collectResult.value;\n\n // Build new EXIF chunk from segments\n const exifChunk = buildExifChunk(segments);\n\n // Calculate new file size (excluding RIFF header)\n let newFileSize = 4; // \"WEBP\"\n for (const chunk of chunks) {\n newFileSize += chunk.length;\n }\n if (exifChunk) {\n newFileSize += exifChunk.length;\n }\n\n // Build output\n const output = new Uint8Array(8 + newFileSize);\n let offset = 0;\n\n // Write RIFF header\n output.set(RIFF_SIGNATURE, offset);\n offset += 4;\n writeUint32LE(output, offset, newFileSize);\n offset += 4;\n\n // Write WEBP marker\n output.set(WEBP_MARKER, offset);\n offset += 4;\n\n // Write EXIF chunk first if we have one (after VP8/VP8L/VP8X)\n // EXIF should come after the image chunk for best compatibility\n let exifWritten = false;\n\n for (const chunk of chunks) {\n // Write chunks in original order\n output.set(chunk, offset);\n offset += chunk.length;\n\n // Write EXIF after first image-related chunk (VP8, VP8L, VP8X)\n if (!exifWritten && exifChunk && isImageChunk(chunk)) {\n output.set(exifChunk, offset);\n offset += exifChunk.length;\n exifWritten = true;\n }\n }\n\n // If EXIF wasn't written yet (no VP8* chunk found), append it\n if (!exifWritten && exifChunk) {\n output.set(exifChunk, offset);\n }\n\n return Result.ok(output);\n}\n\n/**\n * Check if chunk is an image-related chunk (VP8, VP8L, VP8X)\n */\nfunction isImageChunk(chunk: Uint8Array): boolean {\n if (chunk.length < 4) return false;\n const type = String.fromCharCode(\n chunk[0] ?? 0,\n chunk[1] ?? 0,\n chunk[2] ?? 0,\n chunk[3] ?? 0,\n );\n return type === 'VP8 ' || type === 'VP8L' || type === 'VP8X';\n}\n\n/**\n * Collect all chunks except EXIF\n */\nfunction collectNonExifChunks(\n data: Uint8Array,\n): Result<\n { chunks: Uint8Array[]; firstChunkType: string },\n { type: 'invalidRiffStructure'; message: string }\n> {\n const chunks: Uint8Array[] = [];\n let firstChunkType = '';\n\n // Start after RIFF header (12 bytes: \"RIFF\" + size + \"WEBP\")\n let offset = 12;\n\n while (offset < data.length - 8) {\n // Read chunk type (4 bytes)\n const chunkType = data.slice(offset, offset + 4);\n const typeStr = String.fromCharCode(\n chunkType[0] ?? 0,\n chunkType[1] ?? 0,\n chunkType[2] ?? 0,\n chunkType[3] ?? 0,\n );\n\n if (!firstChunkType) {\n firstChunkType = typeStr;\n }\n\n // Read chunk size (4 bytes, little-endian)\n const chunkSize =\n (data[offset + 4] ?? 0) |\n ((data[offset + 5] ?? 0) << 8) |\n ((data[offset + 6] ?? 0) << 16) |\n ((data[offset + 7] ?? 0) << 24);\n\n // Validate chunk\n if (offset + 8 + chunkSize > data.length) {\n return Result.error({\n type: 'invalidRiffStructure',\n message: `Chunk extends beyond file at offset ${offset}`,\n });\n }\n\n // Keep all chunks except EXIF\n if (!arraysEqual(chunkType, EXIF_CHUNK_TYPE)) {\n // Include type + size + data (+ padding if odd)\n const paddedSize = chunkSize + (chunkSize % 2);\n const chunkData = data.slice(offset, offset + 8 + paddedSize);\n chunks.push(chunkData);\n }\n\n // Move to next chunk (chunk size + type + size fields)\n // RIFF chunks are padded to even byte boundaries\n const paddedSize = chunkSize + (chunkSize % 2);\n offset += 8 + paddedSize;\n }\n\n return Result.ok({ chunks, firstChunkType });\n}\n\n/**\n * Build EXIF chunk from metadata segments\n */\nfunction buildExifChunk(segments: MetadataSegment[]): Uint8Array | null {\n // Filter Exif-compatible segments\n const exifSegments = segments.filter(\n (s) =>\n s.source.type === 'exifUserComment' ||\n s.source.type === 'exifImageDescription' ||\n s.source.type === 'exifMake' ||\n s.source.type === 'exifSoftware' ||\n s.source.type === 'exifDocumentName',\n );\n\n if (exifSegments.length === 0) {\n return null;\n }\n\n const tiffData = buildExifTiffData(exifSegments);\n\n if (tiffData.length === 0) {\n return null;\n }\n\n // Build EXIF chunk: type (4) + size (4) + TIFF data\n const chunkSize = tiffData.length;\n const paddedSize = chunkSize + (chunkSize % 2);\n const chunk = new Uint8Array(8 + paddedSize);\n\n chunk.set(EXIF_CHUNK_TYPE, 0);\n writeUint32LE(chunk, 4, chunkSize);\n chunk.set(tiffData, 8);\n\n return chunk;\n}\n","/**\n * Write API for sd-metadata\n *\n * Handles writing metadata to images with automatic format conversion.\n * Supports PNG, JPEG, and WebP formats.\n */\n\nimport { convertMetadata } from '../converters';\nimport type { ParseResult } from '../types';\n\nimport type { ImageFormat } from '../utils/binary';\nimport { detectFormat } from '../utils/binary';\nimport { writeJpegMetadata } from '../writers/jpeg';\nimport { writePngMetadata } from '../writers/png';\nimport { writeWebpMetadata } from '../writers/webp';\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Warning types for write operations\n */\nexport type WriteWarning = {\n type: 'metadataDropped';\n reason: 'unrecognizedCrossFormat';\n};\n\n/**\n * Error types for write operations\n */\ntype WriteError =\n | { type: 'unsupportedFormat' }\n | { type: 'conversionFailed'; message: string }\n | { type: 'writeFailed'; message: string };\n\n/**\n * Result of the write operation\n *\n * Success case may include a warning when metadata was intentionally dropped.\n */\nexport type WriteResult =\n | { ok: true; value: Uint8Array; warning?: WriteWarning }\n | { ok: false; error: WriteError };\n\n/**\n * Write metadata to an image\n *\n * Automatically detects the target image format and converts the metadata\n * if necessary. For unrecognized metadata with cross-format conversion,\n * metadata is dropped and a warning is returned.\n *\n * @param data - Target image file data\n * @param metadata - ParseResult from `read()`\n * @returns New image data with embedded metadata (or warning if metadata was dropped)\n */\nexport function write(data: Uint8Array, metadata: ParseResult): WriteResult {\n const targetFormat = detectFormat(data);\n if (!targetFormat) {\n return { ok: false, error: { type: 'unsupportedFormat' } };\n }\n\n // Handle empty metadata: strip all metadata\n if (metadata.status === 'empty') {\n const result = HELPERS[targetFormat].writeEmpty(data, []);\n if (!result.ok) {\n return {\n ok: false,\n error: { type: 'writeFailed', message: result.error.type },\n };\n }\n return { ok: true, value: result.value };\n }\n\n // Handle invalid metadata\n if (metadata.status === 'invalid') {\n return {\n ok: false,\n error: { type: 'writeFailed', message: 'Cannot write invalid metadata' },\n };\n }\n\n // Handle unrecognized metadata\n if (metadata.status === 'unrecognized') {\n const sourceFormat = metadata.raw.format;\n\n // Same format: write as-is\n if (sourceFormat === targetFormat) {\n return writeRaw(data, targetFormat, metadata.raw);\n }\n\n // Cross-format: drop metadata and return with warning\n const result = HELPERS[targetFormat].writeEmpty(data, []);\n if (!result.ok) {\n return {\n ok: false,\n error: { type: 'writeFailed', message: result.error.type },\n };\n }\n return {\n ok: true,\n value: result.value,\n warning: { type: 'metadataDropped', reason: 'unrecognizedCrossFormat' },\n };\n }\n\n // Handle success metadata: convert if needed\n const conversionResult = convertMetadata(metadata, targetFormat);\n\n if (!conversionResult.ok) {\n return {\n ok: false,\n error: {\n type: 'conversionFailed',\n message: `Failed to convert metadata: ${conversionResult.error.type}`,\n },\n };\n }\n\n return writeRaw(data, targetFormat, conversionResult.value);\n}\n\n/**\n * Write raw metadata to image\n */\nfunction writeRaw(\n data: Uint8Array,\n targetFormat: ImageFormat,\n raw: import('../types').RawMetadata,\n): WriteResult {\n if (targetFormat === 'png' && raw.format === 'png') {\n const result = writePngMetadata(data, raw.chunks);\n if (!result.ok) {\n return {\n ok: false,\n error: { type: 'writeFailed', message: result.error.type },\n };\n }\n return { ok: true, value: result.value };\n }\n\n if (targetFormat === 'jpeg' && raw.format === 'jpeg') {\n const result = writeJpegMetadata(data, raw.segments);\n if (!result.ok) {\n return {\n ok: false,\n error: { type: 'writeFailed', message: result.error.type },\n };\n }\n return { ok: true, value: result.value };\n }\n\n if (targetFormat === 'webp' && raw.format === 'webp') {\n const result = writeWebpMetadata(data, raw.segments);\n if (!result.ok) {\n return {\n ok: false,\n error: { type: 'writeFailed', message: result.error.type },\n };\n }\n return { ok: true, value: result.value };\n }\n\n return {\n ok: false,\n error: {\n type: 'writeFailed',\n message: 'Internal error: format mismatch after conversion',\n },\n };\n}\n\n// ============================================================================\n// Format Helpers\n// ============================================================================\n\n/** Format-specific helper functions */\nconst HELPERS = {\n png: {\n writeEmpty: writePngMetadata,\n },\n jpeg: {\n writeEmpty: writeJpegMetadata,\n },\n webp: {\n writeEmpty: writeWebpMetadata,\n },\n} as const satisfies Record<ImageFormat, unknown>;\n","/**\n * A1111-format metadata serialization utilities\n *\n * Converts GenerationMetadata to A1111 (SD WebUI) plain text format.\n */\n\nimport type {\n GenerationMetadata,\n HiresSettings,\n NovelAIMetadata,\n UpscaleSettings,\n} from '../types';\n\n/**\n * Normalize line endings to LF (\\n)\n *\n * Ensures consistent line endings across different platforms.\n * Converts CRLF (\\r\\n) and CR (\\r) to LF (\\n).\n *\n * @param text - Text with potentially mixed line endings\n * @returns Text with normalized line endings (LF only)\n */\nfunction normalizeLineEndings(text: string): string {\n return text.replace(/\\r\\n/g, '\\n').replace(/\\r/g, '\\n');\n}\n\n/**\n * Merge upscale and hires settings\n *\n * A1111 format does not have separate upscale settings.\n * If both exist, hires takes priority.\n *\n * @param hires - Hires settings\n * @param upscale - Upscale settings\n * @returns Merged hires settings\n */\nfunction mergeUpscaleHires(\n hires?: HiresSettings,\n upscale?: UpscaleSettings,\n): HiresSettings | undefined {\n // If hires exists, use it as-is (priority)\n if (hires) {\n return hires;\n }\n\n // If only upscale exists, convert to hires format\n if (upscale) {\n return {\n scale: upscale.scale,\n upscaler: upscale.upscaler,\n // steps and denoise are not available from upscale\n };\n }\n\n return undefined;\n}\n\n/**\n * Build settings line from metadata\n *\n * Generates the \"Steps: X, Sampler: Y, ...\" line.\n *\n * @param metadata - Generation metadata\n * @returns Settings line string\n */\nfunction buildSettingsLine(metadata: GenerationMetadata): string {\n const parts: string[] = [];\n\n // Core settings\n if (metadata.sampling?.steps !== undefined) {\n parts.push(`Steps: ${metadata.sampling.steps}`);\n }\n\n if (metadata.sampling?.sampler) {\n parts.push(`Sampler: ${metadata.sampling.sampler}`);\n }\n\n if (metadata.sampling?.scheduler) {\n parts.push(`Schedule type: ${metadata.sampling.scheduler}`);\n }\n\n if (metadata.sampling?.cfg !== undefined) {\n parts.push(`CFG scale: ${metadata.sampling.cfg}`);\n }\n\n if (metadata.sampling?.seed !== undefined) {\n parts.push(`Seed: ${metadata.sampling.seed}`);\n }\n\n // Size (only if both width and height are positive)\n if (metadata.width > 0 && metadata.height > 0) {\n parts.push(`Size: ${metadata.width}x${metadata.height}`);\n }\n\n // Model\n if (metadata.model?.hash) {\n parts.push(`Model hash: ${metadata.model.hash}`);\n }\n\n if (metadata.model?.name) {\n parts.push(`Model: ${metadata.model.name}`);\n }\n\n // Optional: Clip skip\n if (metadata.sampling?.clipSkip !== undefined) {\n parts.push(`Clip skip: ${metadata.sampling.clipSkip}`);\n }\n\n // Hires.fix / Upscale (merged)\n const mergedHires = mergeUpscaleHires(metadata.hires, metadata.upscale);\n\n if (mergedHires) {\n if (mergedHires.denoise !== undefined) {\n parts.push(`Denoising strength: ${mergedHires.denoise}`);\n }\n\n if (mergedHires.scale !== undefined) {\n parts.push(`Hires upscale: ${mergedHires.scale}`);\n }\n\n if (mergedHires.steps !== undefined) {\n parts.push(`Hires steps: ${mergedHires.steps}`);\n }\n\n if (mergedHires.upscaler) {\n parts.push(`Hires upscaler: ${mergedHires.upscaler}`);\n }\n }\n\n return parts.join(', ');\n}\n\n/**\n * Build NovelAI character prompts section\n *\n * Generates character prompts delimited by comment lines.\n * Format: # Character N [x, y]:\\n[prompt]\n *\n * @param metadata - NovelAI metadata\n * @returns Array of lines (including both header and prompt lines)\n */\nfunction buildCharacterPromptsSection(metadata: NovelAIMetadata): string[] {\n if (!metadata.characterPrompts || metadata.characterPrompts.length === 0) {\n return [];\n }\n\n const lines: string[] = [];\n\n for (const [index, cp] of metadata.characterPrompts.entries()) {\n const characterNum = index + 1;\n const coords = cp.center ? ` [${cp.center.x}, ${cp.center.y}]` : '';\n\n // Header line: # Character N [x, y]:\n lines.push(`# Character ${characterNum}${coords}:`);\n\n // Prompt line (normalized)\n lines.push(normalizeLineEndings(cp.prompt));\n }\n\n return lines;\n}\n\n/**\n * Format metadata as SD WebUI (A1111) plain text\n *\n * Converts GenerationMetadata to human-readable text in the SD WebUI format.\n * This provides a standard, tool-agnostic way to display generation metadata\n * without needing to manually read individual properties.\n *\n * The output format follows the A1111/SD WebUI convention:\n * ```\n * positive prompt\n * [character prompts for NovelAI]\n * Negative prompt: negative prompt\n * Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 12345, ...\n * ```\n *\n * @param metadata - Generation metadata from any tool\n * @returns Human-readable text in SD WebUI format\n *\n * @example\n * ```typescript\n * import { read, formatAsWebUI } from '@enslo/sd-metadata';\n *\n * const result = read(imageData);\n * if (result.status === 'success') {\n * const text = formatAsWebUI(result.metadata);\n * console.log(text);\n * // Output:\n * // masterpiece, 1girl\n * // Negative prompt: low quality, bad anatomy\n * // Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 12345, Size: 512x768, Model: model.safetensors\n * }\n * ```\n */\nexport function formatAsWebUI(metadata: GenerationMetadata): string {\n const sections: string[] = [];\n\n // 1. Positive prompt (always present, normalized)\n sections.push(normalizeLineEndings(metadata.prompt));\n\n // 2. Character prompts (NovelAI only)\n if (metadata.software === 'novelai') {\n const characterLines = buildCharacterPromptsSection(metadata);\n if (characterLines.length > 0) {\n sections.push(characterLines.join('\\n'));\n }\n }\n\n // 3. Negative prompt (if present, normalized)\n if (metadata.negativePrompt) {\n sections.push(\n `Negative prompt: ${normalizeLineEndings(metadata.negativePrompt)}`,\n );\n }\n\n // 4. Settings line\n const settingsLine = buildSettingsLine(metadata);\n if (settingsLine) {\n sections.push(settingsLine);\n }\n\n // Join all sections with newlines\n return sections.join('\\n');\n}\n","/**\n * WebUI (A1111) format writer for sd-metadata\n *\n * Converts any GenerationMetadata to SD WebUI (A1111) plain text format\n * and writes it to PNG, JPEG, or WebP images.\n */\n\nimport { createEncodedChunk } from '../converters/chunk-encoding';\nimport { formatAsWebUI } from '../serializers/a1111';\nimport type {\n GenerationMetadata,\n MetadataSegment,\n PngTextChunk,\n} from '../types';\nimport { Result } from '../types';\nimport { detectFormat } from '../utils/binary';\nimport { writeJpegMetadata } from '../writers/jpeg';\nimport { writePngMetadata } from '../writers/png';\nimport { writeWebpMetadata } from '../writers/webp';\nimport type { WriteResult } from './write';\n\n/**\n * Write metadata to an image in SD WebUI format\n *\n * Converts the provided GenerationMetadata to SD WebUI (A1111) plain text\n * format and embeds it into the image. This allows you to:\n * - Create custom metadata from scratch\n * - Modify existing metadata\n * - Convert metadata from any tool to SD WebUI-compatible format\n *\n * The metadata is stored differently based on image format:\n * - PNG: `parameters` tEXt/iTXt chunk (encoding auto-selected based on content)\n * - JPEG/WebP: Exif UserComment field\n *\n * @param data - Target image file data (PNG, JPEG, or WebP)\n * @param metadata - Generation metadata to embed\n * @returns New image data with embedded metadata, or error\n *\n * @example\n * ```typescript\n * import { writeAsWebUI } from '@enslo/sd-metadata';\n *\n * // Create custom metadata\n * const metadata = {\n * software: 'sd-webui',\n * prompt: 'masterpiece, 1girl',\n * negativePrompt: 'lowres, bad quality',\n * width: 512,\n * height: 768,\n * sampling: { steps: 20, sampler: 'Euler a', cfg: 7, seed: 12345 },\n * model: { name: 'model.safetensors' },\n * };\n *\n * // Embed into image\n * const result = writeAsWebUI(imageData, metadata);\n * if (result.ok) {\n * writeFileSync('output.png', result.value);\n * }\n * ```\n */\nexport function writeAsWebUI(\n data: Uint8Array,\n metadata: GenerationMetadata,\n): WriteResult {\n // Detect image format\n const format = detectFormat(data);\n if (!format) {\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Convert metadata to A1111 plain text format\n const text = formatAsWebUI(metadata);\n\n // Create format-specific metadata structures\n let writeResult:\n | import('../types').PngWriteResult\n | import('../types').JpegWriteResult\n | import('../types').WebpWriteResult;\n\n if (format === 'png') {\n // PNG: Create parameters chunk with dynamic encoding\n const chunks = createPngChunks(text);\n writeResult = writePngMetadata(data, chunks);\n } else if (format === 'jpeg') {\n // JPEG: Create Exif UserComment segment\n const segments = createExifSegments(text);\n writeResult = writeJpegMetadata(data, segments);\n } else if (format === 'webp') {\n // WebP: Create Exif UserComment segment\n const segments = createExifSegments(text);\n writeResult = writeWebpMetadata(data, segments);\n } else {\n // Shouldn't reach here due to detectFormat check above\n return Result.error({ type: 'unsupportedFormat' });\n }\n\n // Handle write errors\n if (!writeResult.ok) {\n return Result.error({\n type: 'writeFailed',\n message: writeResult.error.type,\n });\n }\n\n return Result.ok(writeResult.value);\n}\n\n/**\n * Create PNG text chunks for SD WebUI format\n *\n * Uses dynamic encoding strategy (tEXt for ASCII, iTXt for non-ASCII).\n *\n * @param text - A1111-format plain text\n * @returns PNG text chunks\n */\nfunction createPngChunks(text: string): PngTextChunk[] {\n return createEncodedChunk('parameters', text, 'dynamic');\n}\n\n/**\n * Create Exif UserComment segment for JPEG/WebP\n *\n * @param text - A1111-format plain text\n * @returns Metadata segment array\n */\nfunction createExifSegments(text: string): MetadataSegment[] {\n return [\n {\n source: { type: 'exifUserComment' },\n data: text,\n },\n ];\n}\n","/**\n * Raw metadata serialization utilities\n *\n * Formats RawMetadata as human-readable plain text.\n */\n\nimport type { RawMetadata } from '../types';\n\n/**\n * Format raw metadata as plain text\n *\n * Extracts text content from RawMetadata and returns it as a simple string.\n * Multiple entries are separated by double newlines.\n *\n * This is useful for displaying unrecognized metadata to end users\n * without needing to manually iterate over chunks or segments.\n *\n * @param raw - Raw metadata from ParseResult\n * @returns Plain text content from the metadata\n *\n * @example\n * ```typescript\n * import { read, formatRaw } from '@enslo/sd-metadata';\n *\n * const result = read(imageData);\n * if (result.status === 'unrecognized') {\n * console.log(formatRaw(result.raw));\n * // Output: the raw text content without prefixes\n * }\n * ```\n */\nexport function formatRaw(raw: RawMetadata): string {\n switch (raw.format) {\n case 'png':\n return raw.chunks.map((chunk) => chunk.text).join('\\n\\n');\n\n case 'jpeg':\n case 'webp':\n return raw.segments.map((segment) => segment.data).join('\\n\\n');\n }\n}\n"],"mappings":";AAQO,IAAM,SAAS;AAAA,EACpB,IAAI,CAAO,WAA4B,EAAE,IAAI,MAAM,MAAM;AAAA,EACzD,OAAO,CAAO,WAA4B,EAAE,IAAI,OAAO,MAAM;AAC/D;;;ACgBO,SAAS,WAAW,SAA+C;AAExE,QAAM,kBAAkB,QAAQ;AAAA,IAC9B,CAAC,MAAM,EAAE,YAAY,gBAAgB,EAAE,YAAY;AAAA,EACrD;AACA,MAAI,CAAC,iBAAiB;AACpB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAEA,QAAM,OAAO,gBAAgB;AAI7B,QAAM,eACJ,KAAK,SAAS,QAAQ,KACtB,KAAK,SAAS,UAAU,KACxB,KAAK,SAAS,kBAAkB;AAElC,MAAI,CAAC,cAAc;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,EAAE,QAAQ,gBAAgB,SAAS,IAAI,oBAAoB,IAAI;AAGrE,QAAM,cAAc,cAAc,QAAQ;AAG1C,QAAM,OAAO,YAAY,IAAI,MAAM,KAAK;AACxC,QAAM,CAAC,OAAO,MAAM,IAAI,UAAU,IAAI;AAGtC,QAAM,UAAU,YAAY,IAAI,SAAS;AACzC,QAAM,MAAM,YAAY,IAAI,KAAK;AACjC,QAAM,WAAW,sBAAsB,SAAS,GAAG;AAGnD,QAAM,WAA0C;AAAA,IAC9C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,YAAY,YAAY,IAAI,OAAO;AACzC,QAAM,YAAY,YAAY,IAAI,YAAY;AAC9C,MAAI,aAAa,WAAW;AAC1B,aAAS,QAAQ;AAAA,MACf,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AAAA,EACF;AAGA,QAAM,UAAU,YAAY,IAAI,SAAS;AACzC,QAAM,YAAY,YAAY,IAAI,eAAe;AACjD,QAAM,QAAQ,YAAY,YAAY,IAAI,OAAO,CAAC;AAClD,QAAM,MAAM;AAAA,IACV,YAAY,IAAI,WAAW,KAAK,YAAY,IAAI,WAAW;AAAA,EAC7D;AACA,QAAM,OAAO,YAAY,YAAY,IAAI,MAAM,CAAC;AAChD,QAAM,WAAW,YAAY,YAAY,IAAI,WAAW,CAAC;AAEzD,MACE,YAAY,UACZ,cAAc,UACd,UAAU,UACV,QAAQ,UACR,SAAS,UACT,aAAa,QACb;AACA,aAAS,WAAW;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa,YAAY,YAAY,IAAI,eAAe,CAAC;AAC/D,QAAM,WAAW,YAAY,IAAI,gBAAgB;AACjD,QAAM,aAAa,YAAY,YAAY,IAAI,aAAa,CAAC;AAC7D,QAAM,UAAU,YAAY,YAAY,IAAI,oBAAoB,CAAC;AACjE,QAAM,YAAY,YAAY,IAAI,YAAY;AAE9C,MACE,CAAC,YAAY,WAAW,UAAU,YAAY,OAAO,EAAE;AAAA,IACrD,CAAC,MAAM,MAAM;AAAA,EACf,GACA;AACA,UAAM,CAAC,UAAU,IAAI,UAAU,aAAa,EAAE;AAC9C,UAAM,QAAQ,cAAc,aAAa;AACzC,aAAS,QAAQ,EAAE,OAAO,UAAU,OAAO,YAAY,QAAQ;AAAA,EACjE;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAKA,SAAS,oBAAoB,MAI3B;AAEA,QAAM,gBAAgB,KAAK,QAAQ,kBAAkB;AAGrD,QAAM,aAAa,KAAK,QAAQ,QAAQ;AAExC,MAAI,kBAAkB,MAAM,eAAe,IAAI;AAE7C,WAAO,EAAE,QAAQ,KAAK,KAAK,GAAG,gBAAgB,IAAI,UAAU,GAAG;AAAA,EACjE;AAEA,MAAI,kBAAkB,IAAI;AAExB,UAAMA,iBAAgB,KAAK,YAAY,MAAM,UAAU;AACvD,WAAO;AAAA,MACL,QAAQ,KAAK,MAAM,GAAGA,cAAa,EAAE,KAAK;AAAA,MAC1C,gBAAgB;AAAA,MAChB,UAAU,KAAK,MAAMA,cAAa,EAAE,KAAK;AAAA,IAC3C;AAAA,EACF;AAEA,MAAI,eAAe,IAAI;AAErB,WAAO;AAAA,MACL,QAAQ,KAAK,MAAM,GAAG,aAAa,EAAE,KAAK;AAAA,MAC1C,gBAAgB,KAAK,MAAM,gBAAgB,EAAE,EAAE,KAAK;AAAA,MACpD,UAAU;AAAA,IACZ;AAAA,EACF;AAGA,QAAM,gBAAgB,KAAK,YAAY,MAAM,UAAU;AAEvD,SAAO;AAAA,IACL,QAAQ,KAAK,MAAM,GAAG,aAAa,EAAE,KAAK;AAAA,IAC1C,gBAAgB,KAAK,MAAM,gBAAgB,IAAI,aAAa,EAAE,KAAK;AAAA,IACnE,UAAU,KAAK,MAAM,aAAa,EAAE,KAAK;AAAA,EAC3C;AACF;AAQA,SAAS,cAAc,UAAuC;AAC5D,QAAM,SAAS,oBAAI,IAAoB;AACvC,MAAI,CAAC,SAAU,QAAO;AAKtB,QAAM,QACJ;AAGF,QAAM,UAAU,MAAM,KAAK,SAAS,SAAS,KAAK,CAAC;AAEnD,aAAW,SAAS,SAAS;AAC3B,UAAM,OAAO,MAAM,CAAC,KAAK,IAAI,KAAK;AAClC,UAAM,SAAS,MAAM,CAAC,KAAK,IAAI,KAAK;AACpC,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AAEA,SAAO;AACT;AAKA,SAAS,UAAU,MAAgC;AACjD,QAAM,QAAQ,KAAK,MAAM,aAAa;AACtC,MAAI,CAAC,MAAO,QAAO,CAAC,GAAG,CAAC;AACxB,SAAO;AAAA,IACL,OAAO,SAAS,MAAM,CAAC,KAAK,KAAK,EAAE;AAAA,IACnC,OAAO,SAAS,MAAM,CAAC,KAAK,KAAK,EAAE;AAAA,EACrC;AACF;AAKA,SAAS,YAAY,OAA+C;AAClE,MAAI,UAAU,OAAW,QAAO;AAChC,QAAM,MAAM,OAAO,WAAW,KAAK;AACnC,SAAO,OAAO,MAAM,GAAG,IAAI,SAAY;AACzC;AAKA,SAAS,sBACP,SACA,KACgD;AAEhD,MAAI,QAAQ,UAAW,QAAO;AAG9B,MAAI,CAAC,QAAS,QAAO;AACrB,MAAI,YAAY,MAAO,QAAO;AAE9B,MAAI,YAAY,UAAW,QAAO;AAClC,MAAI,aAAa,KAAK,OAAO,EAAG,QAAO;AACvC,SAAO;AACT;;;AClOO,SAAS,iBAAiB,SAAuC;AACtE,SAAO,OAAO;AAAA,IACZ,OAAO,YAAY,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;AAAA,EAC5D;AACF;;;ACHO,SAAS,UACd,MACoD;AACpD,MAAI;AACF,WAAO,OAAO,GAAG,KAAK,MAAM,IAAI,CAAM;AAAA,EACxC,QAAQ;AACN,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACF;;;ACuCO,SAAS,aAAa,SAA+C;AAC1E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,aAAa,eAAe,WAAW;AAC7C,MAAI,CAAC,YAAY;AACf,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAAuB,UAAU;AAChD,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,SAAS,OAAO;AAGtB,QAAM,QAAQ,OAAO,OAAO,MAAM;AAClC,MAAI,CAAC,MAAM,KAAK,CAAC,SAAS,gBAAgB,IAAI,GAAG;AAC/C,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,WAAW,SAAS,QAAQ,CAAC,SAAS,CAAC;AAG7C,QAAM,eAAe,SAAS,QAAQ,CAAC,mBAAmB,CAAC;AAC3D,QAAM,eAAe,SAAS,QAAQ,CAAC,mBAAmB,CAAC;AAC3D,QAAM,mBAAmB,YAAY,YAAY;AACjD,QAAM,mBAAmB,YAAY,YAAY;AAGjD,QAAM,cAAc,SAAS,QAAQ,CAAC,kBAAkB,CAAC;AACzD,QAAM,cAAc,cAAc,OAAO,YAAY,OAAO,KAAK,KAAK,IAAI;AAC1E,QAAM,eAAe,cAAc,OAAO,YAAY,OAAO,MAAM,KAAK,IAAI;AAG5E,QAAM,YAAY,qBAAqB,MAAM;AAC7C,QAAM,eAAe,oBAAoB,WAAW,UAAU;AAC9D,QAAM,eAAe,oBAAoB,WAAW,kBAAkB;AACtE,QAAM,QAAQ,eAAe,WAAW,SAAS;AACjD,QAAM,SAAS,gBAAgB,WAAW,UAAU;AAGpD,QAAM,WAA8C;AAAA,IAClD,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,gBAAgB;AAAA,IAChB;AAAA,IACA;AAAA,IACA,OAAO;AAAA;AAAA,EACT;AAGA,QAAM,aAAa,SAAS,QAAQ,CAAC,uBAAuB,CAAC,GAAG,QAC5D;AAEJ,MAAI,YAAY;AACd,aAAS,QAAQ,EAAE,MAAM,OAAO,UAAU,EAAE;AAAA,EAC9C,WAAW,WAAW,WAAW;AAC/B,aAAS,QAAQ,EAAE,MAAM,UAAU,UAAU;AAAA,EAC/C;AAGA,MAAI,UAAU;AACZ,aAAS,WAAW;AAAA,MAClB,MAAM,SAAS,OAAO;AAAA,MACtB,OAAO,SAAS,OAAO;AAAA,MACvB,KAAK,SAAS,OAAO;AAAA,MACrB,SAAS,SAAS,OAAO;AAAA,MACzB,WAAW,SAAS,OAAO;AAAA,IAC7B;AAAA,EACF,WAAW,WAAW;AACpB,aAAS,WAAW;AAAA,MAClB,MAAM,UAAU;AAAA,MAChB,OAAO,UAAU;AAAA,MACjB,KAAK,UAAU;AAAA,MACf,SAAS,UAAU;AAAA,IACrB;AAAA,EACF;AAGA,QAAM,aAAa,SAAS,QAAQ;AAAA,IAClC;AAAA,IACA;AAAA,EACF,CAAC,GAAG;AACJ,QAAM,aAAa,SAAS,QAAQ;AAAA,IAClC;AAAA,IACA;AAAA,EACF,CAAC,GAAG;AACJ,QAAM,eAAe,SAAS,QAAQ,CAAC,kBAAkB,CAAC,GAAG;AAE7D,MAAI,cAAc,YAAY;AAE5B,UAAM,aAAa,WAAW;AAC9B,UAAM,QACJ,cAAc,IACV,KAAK,MAAO,aAAa,cAAe,GAAG,IAAI,MAC/C;AAEN,QAAI,cAAc;AAChB,eAAS,QAAQ;AAAA,QACf,UAAU,WAAW;AAAA,QACrB;AAAA,QACA,OAAO,aAAa;AAAA,QACpB,SAAS,aAAa;AAAA,MACxB;AAAA,IACF,OAAO;AACL,eAAS,UAAU;AAAA,QACjB,UAAU,WAAW;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,WAAW,iBAAiB;AAC9B,UAAM,mBAAmB,UAAU,gBAAgB;AAAA,MACjD,CAAC,MAAM,EAAE,SAAS;AAAA,IACpB;AACA,QAAI,kBAAkB;AACpB,YAAM,gBAAgB,UAAU,SAAS;AACzC,UAAI,gBAAgB,KAAK,iBAAiB,cAAc;AACtD,cAAM,QAAQ,iBAAiB,eAAe;AAC9C,iBAAS,UAAU;AAAA,UACjB,OAAO,KAAK,MAAM,QAAQ,GAAG,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAWA,SAAS,eAAe,aAA8C;AAEpE,MAAI,YAAY,QAAQ;AAItB,WAAO,YAAY,OAAO,QAAQ,cAAc,QAAQ;AAAA,EAC1D;AAGA,QAAM,aAAa;AAAA,IACjB,YAAY;AAAA,IACZ,YAAY;AAAA,IACZ,YAAY;AAAA,IACZ,YAAY;AAAA;AAAA,IACZ,YAAY;AAAA;AAAA,EACd;AAEA,aAAW,aAAa,YAAY;AAClC,QAAI,CAAC,UAAW;AAGhB,QAAI,UAAU,WAAW,GAAG,GAAG;AAI7B,YAAM,UAAU,UACb,QAAQ,QAAQ,EAAE,EAClB,QAAQ,cAAc,QAAQ;AACjC,YAAM,SAAS,UAAmC,OAAO;AACzD,UAAI,CAAC,OAAO,GAAI;AAGhB,UAAI,OAAO,MAAM,UAAU,OAAO,OAAO,MAAM,WAAW,UAAU;AAClE,eAAO,KAAK,UAAU,OAAO,MAAM,MAAM;AAAA,MAC3C;AAEA,YAAM,SAAS,OAAO,OAAO,OAAO,KAAK;AACzC,UAAI,OAAO,KAAK,CAAC,MAAM,KAAK,OAAO,MAAM,YAAY,gBAAgB,CAAC,GAAG;AACvE,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AASA,SAAS,SAAS,QAAqB,MAAuC;AAC5E,SAAO,OAAO,QAAQ,MAAM,EAAE,KAAK,CAAC,CAAC,GAAG,MAAM,KAAK,SAAS,GAAG,CAAC,IAAI,CAAC;AACvE;AASA,SAAS,YAAY,MAAqC;AACxD,SAAO,OAAO,MAAM,OAAO,SAAS,WAAW,KAAK,OAAO,OAAO;AACpE;AAWA,SAAS,qBACP,QACkC;AAClC,QAAM,iBAAkB,OAAmC;AAC3D,MAAI,OAAO,mBAAmB,SAAU,QAAO;AAE/C,QAAM,SAAS,UAAgC,cAAc;AAC7D,SAAO,OAAO,KAAK,OAAO,QAAQ;AACpC;;;ACjSO,SAAS,eACd,SAC2B;AAC3B,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,eAAe,qBAAqB,WAAW;AACrD,MAAI,aAAc,QAAO;AAGzB,QAAM,cAAc,qBAAqB,WAAW;AACpD,MAAI,YAAa,QAAO;AAGxB,QAAM,OAAO,YAAY,cAAc,YAAY,WAAW;AAC9D,MAAI,MAAM;AACR,WAAO,sBAAsB,IAAI;AAAA,EACnC;AAEA,SAAO;AACT;AAaA,SAAS,qBACP,aAC2B;AAM3B,MAAI,YAAY,UAAU,WAAW,SAAS,GAAG;AAC/C,WAAO;AAAA,EACT;AAGA,MAAI,uBAAuB,aAAa;AACtC,WAAO;AAAA,EACT;AAGA,MAAI,qBAAqB,aAAa;AACpC,WAAO;AAAA,EACT;AAGA,MAAI,YAAY,aAAa;AAC3B,WAAO;AAAA,EACT;AAGA,MAAI,qBAAqB,eAAe,qBAAqB,aAAa;AACxE,WAAO;AAAA,EACT;AAQA,QAAM,aAAa,YAAY;AAC/B,MAAI,YAAY,SAAS,kBAAkB,GAAG;AAC5C,WAAO;AAAA,EACT;AAMA,QAAM,UAAU,YAAY;AAC5B,MAAI,SAAS,WAAW,GAAG,GAAG;AAC5B,WAAO,sBAAsB,OAAO;AAAA,EACtC;AAEA,SAAO;AACT;AAOA,SAAS,sBAAsB,SAA4C;AACzE,MAAI;AACF,UAAM,SAAS,KAAK,MAAM,OAAO;AAGjC,QAAI,uBAAuB,QAAQ;AACjC,aAAO;AAAA,IACT;AAGA,QAAI,YAAY,UAAU,cAAc,QAAQ;AAC9C,YAAM,WAAW,OAAO;AACxB,YAAM,SAAS,OAAO;AAEtB,YAAM,WACJ,OAAO,aAAa,YAAY,OAAO,WAAW;AACpD,YAAM,eACH,OAAO,aAAa,YAAY,SAAS,WAAW,GAAG,KACvD,OAAO,WAAW,YAAY,OAAO,WAAW,GAAG;AAEtD,UAAI,YAAY,cAAc;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAGA,QAAI,sBAAsB,QAAQ;AAChC,aAAO;AAAA,IACT;AAGA,QAAI,YAAY,UAAU,gBAAgB,QAAQ;AAChD,YAAM,SAAS,OAAO,OAAO,cAAc,EAAE;AAC7C,UACE,OAAO,SAAS,kBAAkB,KAClC,OAAO,SAAS,eAAe,GAC/B;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAQA,SAAS,qBACP,aAC2B;AAE3B,MAAI,YAAY,eAAe,cAAc,aAAa;AACxD,WAAO;AAAA,EACT;AAGA,MAAI,cAAc,aAAa;AAC7B,WAAO;AAAA,EACT;AAIA,MAAI,YAAY,aAAa;AAC3B,UAAM,aAAa,YAAY;AAC/B,QAAI,YAAY,WAAW,GAAG,GAAG;AAE/B,UAAI,WAAW,SAAS,kBAAkB,GAAG;AAC3C,eAAO;AAAA,MACT;AAGA,UAAI,WAAW,SAAS,YAAY,GAAG;AACrC,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAQA,SAAS,sBAAsB,MAAyC;AAEtE,MAAI,KAAK,WAAW,GAAG,GAAG;AACxB,WAAO,qBAAqB,IAAI;AAAA,EAClC;AAGA,SAAO,sBAAsB,IAAI;AACnC;AAUA,SAAS,qBAAqB,MAAyC;AAMrE,MAAI,KAAK,SAAS,kBAAkB,GAAG;AACrC,WAAO;AAAA,EACT;AAGA,MACE,KAAK,SAAS,4BAA4B,KAC1C,KAAK,SAAS,6BAA6B,GAC3C;AACA,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,8BAA8B,GAAG;AACjD,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,UAAU,KAAK,KAAK,SAAS,kBAAkB,GAAG;AAClE,WAAO;AAAA,EACT;AAOA,MACE,KAAK,SAAS,aAAa,KAC3B,KAAK,SAAS,kBAAkB,KAChC,KAAK,SAAS,gBAAgB,KAC9B,KAAK,SAAS,sBAAsB,KACpC,KAAK,SAAS,sBAAsB,KACpC,KAAK,SAAS,iBAAiB,GAC/B;AACA,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,SAAS,KAAK,KAAK,SAAS,cAAc,GAAG;AAC7D,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,UAAU,KAAK,KAAK,SAAS,cAAc,GAAG;AAC9D,WAAO;AAAA,EACT;AAOA,MAAI,KAAK,SAAS,UAAU,KAAK,KAAK,SAAS,SAAS,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAYA,SAAS,sBAAsB,MAAyC;AAMtE,MAAI,KAAK,SAAS,kBAAkB,KAAK,KAAK,SAAS,eAAe,GAAG;AACvE,WAAO;AAAA,EACT;AAMA,QAAM,eAAe,KAAK,MAAM,sBAAsB;AACtD,MAAI,cAAc;AAChB,UAAM,UAAU,aAAa,CAAC;AAG9B,QAAI,YAAY,SAAS,SAAS,WAAW,KAAK,GAAG;AACnD,aAAO;AAAA,IACT;AAGA,QAAI,SAAS,WAAW,GAAG,KAAK,OAAO,KAAK,OAAO,GAAG;AACpD,aAAO;AAAA,IACT;AAGA,QAAI,YAAY,WAAW;AACzB,aAAO;AAAA,IACT;AAAA,EACF;AAOA,MAAI,KAAK,SAAS,cAAc,KAAK,KAAK,SAAS,aAAa,GAAG;AACjE,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,oBAAoB,GAAG;AACvC,WAAO;AAAA,EACT;AAOA,MAAI,KAAK,SAAS,QAAQ,KAAK,KAAK,SAAS,UAAU,GAAG;AACxD,WAAO;AAAA,EACT;AAEA,SAAO;AACT;;;ACzSA,SAAS,SACP,MACA,MACA,MACe;AACf,SAAQ,KAAK,IAAI,KAAK,KAAK,IAAI;AACjC;AAOA,SAAS,iBAAiB,MAA8C;AACtE,MAAI,CAAC,KAAM,QAAO;AAElB,QAAM,QAAQ,KAAK,QAAQ,OAAO,GAAG,EAAE,MAAM,GAAG;AAChD,SAAO,MAAM,MAAM,SAAS,CAAC;AAC/B;AAYO,SAAS,mBACd,SACqB;AACrB,QAAM,cAAc,iBAAiB,OAAO;AAG5C,MAAI,YAAY,mBAAmB,YAAY,iBAAiB,GAAG;AAIjE,WAAO,iBAAiB,WAAW;AAAA,EACrC;AAGA,QAAM,YACH,YAAY,YAAY,WAAW,GAAG,IACnC,YAAY,aACZ,YACH,YAAY,SAAS,WAAW,GAAG,IAAI,YAAY,UAAU;AAEhE,MAAI,CAAC,UAAU;AACb,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAAqC,QAAQ;AAC5D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,SAAO,cAAc,OAAO,KAAK;AACnC;AAKA,SAAS,iBACP,aACqB;AACrB,QAAM,SAAS,YAAY,UAAU,YAAY,UAAU;AAC3D,QAAM,iBACJ,YAAY,mBACZ,YAAY,iBAAiB,KAC7B,YAAY,mBACZ;AAEF,QAAM,YACJ,YAAY,8BACZ,YAAY,wBAAwB;AAEtC,QAAM,QAAQ,OAAO,YAAY,SAAS,YAAY,KAAK,KAAK;AAChE,QAAM,SAAS,OAAO,YAAY,UAAU,YAAY,MAAM,KAAK;AAEnE,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,OAAO,KAAK;AAAA,IACpB,gBAAgB,eAAe,KAAK;AAAA,IACpC;AAAA,IACA;AAAA,IACA,OAAO;AAAA,MACL,MAAM,iBAAiB,SAAS;AAAA,MAChC,KAAK,YAAY,iBAAiB,YAAY,WAAW;AAAA,IAC3D;AAAA,IACA,UAAU;AAAA,MACR,SAAS,YAAY,gBAAgB,YAAY;AAAA,MACjD,OACE,OAAO,YAAY,uBAAuB,YAAY,KAAK,KAC3D;AAAA,MACF,KACE,OAAO,YAAY,kBAAkB,YAAY,gBAAgB,CAAC,KAClE;AAAA,MACF,MAAM,OAAO,YAAY,QAAQ,YAAY,IAAI,KAAK;AAAA,MACtD,UACE,OAAO,YAAY,aAAa,YAAY,WAAW,CAAC,KAAK;AAAA,IACjE;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAKA,SAAS,cAAc,MAAsD;AAC3E,QAAM,SAAS,SAAiB,MAAM,UAAU,QAAQ,KAAK;AAC7D,QAAM,iBACJ,SAAiB,MAAM,mBAAmB,iBAAiB,KAAK;AAElE,QAAM,YAAY;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,QAAQ,SAAiB,MAAM,SAAS,OAAO,KAAK;AAC1D,QAAM,SAAS,SAAiB,MAAM,UAAU,QAAQ,KAAK;AAE7D,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,OAAO,KAAK;AAAA,IACpB,gBAAgB,eAAe,KAAK;AAAA,IACpC;AAAA,IACA;AAAA,IACA,OAAO;AAAA,MACL,MAAM,iBAAiB,SAAS;AAAA,MAChC,KAAK,SAAiB,MAAM,iBAAiB,WAAW;AAAA,IAC1D;AAAA,IACA,UAAU;AAAA,MACR,SAAS,SAAiB,MAAM,gBAAgB,SAAS;AAAA,MACzD,OAAO,SAAiB,MAAM,uBAAuB,OAAO;AAAA,MAC5D,KAAK,SAAiB,MAAM,kBAAkB,gBAAgB;AAAA,MAC9D,MAAM,SAAiB,MAAM,QAAQ,MAAM;AAAA,MAC3C,UAAU,SAAiB,MAAM,aAAa,WAAW;AAAA,IAC3D;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;AC5JO,SAAS,aAAa,SAA+C;AAC1E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,WAAW,YAAY,WAAW,YAAY;AAEpD,MAAI,CAAC,YAAY,CAAC,SAAS,WAAW,GAAG,GAAG;AAC1C,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAA+B,QAAQ;AACtD,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,MAAI,CAAC,KAAK,cAAc,CAAC,KAAK,QAAQ;AACpC,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAEA,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,KAAK,QAAQ,KAAK,KAAK;AAAA,IAC/B,gBAAgB,KAAK,iBAAiB,KAAK,KAAK;AAAA,IAChD,OAAO,KAAK,SAAS;AAAA,IACrB,QAAQ,KAAK,UAAU;AAAA,IACvB,OAAO;AAAA,MACL,MAAM,KAAK;AAAA,IACb;AAAA,IACA,UAAU;AAAA,MACR,SAAS,KAAK;AAAA,MACd,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;ACxDO,SAAS,aAAa,SAA+C;AAC1E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,iBAAiB,YAAY;AACnC,MAAI,CAAC,gBAAgB;AACnB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAA+B,cAAc;AAC5D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,QAAM,kBAAkB,CAAC,QAAiB;AACxC,UAAM,QAAQ,KAAK,MAAM,mBAAmB;AAC5C,WAAO,QAAQ,CAAC,KAAK,QAAQ,CAAC,IAC1B;AAAA,MACE,OAAO,OAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,MACnC,QAAQ,OAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,IACtC,IACA,EAAE,OAAO,GAAG,QAAQ,EAAE;AAAA,EAC5B;AACA,QAAM,EAAE,OAAO,OAAO,IAAI,gBAAgB,KAAK,UAAU;AAGzD,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,KAAK,UAAU;AAAA,IACvB,gBAAgB,KAAK,mBAAmB;AAAA,IACxC;AAAA,IACA;AAAA,IACA,OAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,MAAM,KAAK,YAAY;AAAA,IACzB;AAAA,IACA,UAAU;AAAA,MACR,SAAS,KAAK;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;ACtDA,SAAS,wBACP,aACoB;AAEpB,MAAI,YAAY,mBAAmB;AACjC,WAAO,YAAY;AAAA,EACrB;AAGA,MAAI,CAAC,YAAY,SAAS;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,gBAAgB,UAAmC,YAAY,OAAO;AAC5E,MAAI,CAAC,cAAc,MAAM,EAAE,uBAAuB,cAAc,QAAQ;AACtE,WAAO;AAAA,EACT;AAEA,SAAO,KAAK,UAAU,cAAc,MAAM,iBAAiB;AAC7D;AAYO,SAAS,cAAc,SAA+C;AAE3E,QAAM,cAAc,iBAAiB,OAAO;AAK5C,QAAM,eAAe,wBAAwB,WAAW;AAExD,MAAI,CAAC,cAAc;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAAgC,YAAY;AAC3D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,QAAM,QAAQ,KAAK,SAAS;AAC5B,QAAM,SAAS,KAAK,UAAU;AAG9B,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,KAAK,mBAAmB;AAAA,IAChC,gBAAgB,KAAK,mBAAmB;AAAA,IACxC;AAAA,IACA;AAAA,EACF;AAGA,MAAI,KAAK,OAAO,QAAQ,KAAK,OAAO,MAAM;AACxC,aAAS,QAAQ;AAAA,MACf,MAAM,KAAK,MAAM;AAAA,MACjB,MAAM,KAAK,MAAM;AAAA,IACnB;AAAA,EACF;AAGA,MACE,KAAK,SAAS,UACd,KAAK,UAAU,UACf,KAAK,cAAc,UACnB,KAAK,cAAc,QACnB;AACA,aAAS,WAAW;AAAA,MAClB,MAAM,KAAK;AAAA,MACX,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,SAAS,KAAK;AAAA,IAChB;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;ACrEO,SAAS,aAAa,SAA+C;AAE1E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,MAAI,CAAC,YAAY,UAAU,WAAW,SAAS,GAAG;AAChD,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,cAAc,YAAY;AAChC,MAAI,CAAC,aAAa;AAChB,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,UAA0B,WAAW;AACpD,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,UAAU,OAAO;AAGvB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAGjC,QAAM,SACJ,QAAQ,WAAW,SAAS,gBAAgB,QAAQ,UAAU;AAChE,QAAM,iBACJ,QAAQ,oBAAoB,SAAS,gBAAgB,QAAQ,MAAM;AAGrE,QAAM,WAAyC;AAAA,IAC7C,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MACE,QAAQ,UAAU,UAClB,QAAQ,UAAU,UAClB,QAAQ,SAAS,UACjB,QAAQ,mBAAmB,UAC3B,QAAQ,YAAY,QACpB;AACA,aAAS,WAAW;AAAA,MAClB,OAAO,QAAQ;AAAA,MACf,KAAK,QAAQ;AAAA,MACb,MAAM,QAAQ;AAAA,MACd,SAAS,QAAQ;AAAA,MACjB,WAAW,QAAQ;AAAA,IACrB;AAAA,EACF;AAGA,QAAM,eAAe,QAAQ,WAAW,SAAS;AACjD,MAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,aAAS,mBAAmB,aACzB,IAAI,CAAC,OAA+B;AACnC,UAAI,CAAC,GAAG,aAAc,QAAO;AAC7B,aAAO;AAAA,QACL,QAAQ,GAAG;AAAA,QACX,QAAQ,GAAG,UAAU,CAAC;AAAA,MACxB;AAAA,IACF,CAAC,EACA,OAAO,CAAC,OAA8B,OAAO,IAAI;AAEpD,aAAS,YAAY,QAAQ,WAAW;AACxC,aAAS,WAAW,QAAQ,WAAW;AAAA,EACzC;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;AC9FO,SAAS,mBACd,SACqB;AACrB,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,WAAW,YAAY;AAE7B,MAAI,CAAC,YAAY,CAAC,SAAS,WAAW,GAAG,GAAG;AAC1C,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAAqC,QAAQ;AAC5D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,MAAI,KAAK,aAAa,iBAAiB;AACrC,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAEA,QAAM,WAA0C;AAAA,IAC9C,UAAU;AAAA,IACV,QAAQ,KAAK,QAAQ,KAAK,KAAK;AAAA,IAC/B,gBAAgB,KAAK,UAAU,KAAK,KAAK;AAAA,IACzC,OAAO,KAAK,SAAS;AAAA,IACrB,QAAQ,KAAK,UAAU;AAAA,IACvB,OAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,MAAM,KAAK;AAAA,IACb;AAAA,IACA,UAAU;AAAA,MACR,SAAS,KAAK;AAAA,MACd,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,MACX,UAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;AC9CO,SAAS,qBACd,SACqB;AAErB,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,cAAc,aAAa,OAAO;AACxC,MAAI,CAAC,YAAY,MAAM,YAAY,MAAM,aAAa,WAAW;AAC/D,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,WAA8C;AAAA,IAClD,GAAG,YAAY;AAAA,IACf,UAAU;AAAA,EACZ;AAGA,QAAM,WAAW,YAAY,iBAAiB;AAC9C,MAAI,UAAU;AACZ,UAAM,SAAS,UAA+B,QAAQ;AACtD,QAAI,OAAO,IAAI;AACb,YAAM,OAAO,OAAO;AAGpB,UAAI,KAAK,mBAAmB,QAAW;AACrC,iBAAS,SAAS,KAAK;AAAA,MACzB;AACA,UAAI,KAAK,mBAAmB,QAAW;AACrC,iBAAS,iBAAiB,KAAK;AAAA,MACjC;AAGA,UAAI,KAAK,cAAc,UAAa,KAAK,cAAc,QAAW;AAChE,iBAAS,QAAQ;AAAA,UACf,MAAM,KAAK;AAAA,UACX,MAAM,KAAK;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;ACjDA,SAAS,yBACP,aACoB;AAEpB,MAAI,YAAY,YAAY;AAC1B,WAAO,YAAY;AAAA,EACrB;AAGA,MAAI,CAAC,YAAY,SAAS;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,gBAAgB,UAAmC,YAAY,OAAO;AAC5E,MAAI,CAAC,cAAc,IAAI;AACrB,WAAO;AAAA,EACT;AAGA,MAAI,sBAAsB,cAAc,OAAO;AAC7C,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAYO,SAAS,aAAa,SAA+C;AAE1E,QAAM,cAAc,iBAAiB,OAAO;AAK5C,QAAM,iBAAiB,yBAAyB,WAAW;AAE3D,MAAI,CAAC,gBAAgB;AACnB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,SAAS,UAA6B,cAAc;AAC1D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,QAAM,SAAS,OAAO,MAAM;AAC5B,MAAI,CAAC,QAAQ;AACX,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,QAAQ,OAAO,SAAS;AAC9B,QAAM,SAAS,OAAO,UAAU;AAGhC,QAAM,WAAyC;AAAA,IAC7C,UAAU;AAAA,IACV,QAAQ,OAAO,UAAU;AAAA,IACzB,gBAAgB,OAAO,kBAAkB;AAAA,IACzC;AAAA,IACA;AAAA,EACF;AAGA,QAAM,eAAe,YAAY,UAAU,YAAY;AACvD,MAAI,cAAc;AAChB,UAAM,eAAe,UAAU,YAAY;AAC3C,QAAI,aAAa,IAAI;AACnB,eAAS,QAAQ,aAAa;AAAA,IAChC;AAAA,EACF;AAGA,MAAI,OAAO,OAAO;AAChB,aAAS,QAAQ;AAAA,MACf,MAAM,OAAO;AAAA,IACf;AAAA,EACF;AAGA,MACE,OAAO,SAAS,UAChB,OAAO,UAAU,UACjB,OAAO,aAAa,UACpB,OAAO,YAAY,UACnB,OAAO,cAAc,QACrB;AACA,aAAS,WAAW;AAAA,MAClB,MAAM,OAAO;AAAA,MACb,OAAO,OAAO;AAAA,MACd,KAAK,OAAO;AAAA,MACZ,SAAS,OAAO;AAAA,MAChB,WAAW,OAAO;AAAA,IACpB;AAAA,EACF;AAGA,MACE,OAAO,mBAAmB,UAC1B,OAAO,yBAAyB,UAChC,OAAO,6BAA6B,QACpC;AACA,aAAS,QAAQ;AAAA,MACf,OAAO,OAAO;AAAA,MACd,UAAU,OAAO;AAAA,MACjB,SAAS,OAAO;AAAA,IAClB;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;;;AC5HO,SAAS,eAAe,SAA+C;AAE5E,QAAM,cAAc,iBAAiB,OAAO;AAG5C,QAAM,WAAW,YAAY;AAC7B,MAAI,CAAC,UAAU;AACb,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,cAAc,SAAS,QAAQ,QAAQ,EAAE;AAC/C,QAAM,SAAS,UAAmC,WAAW;AAC7D,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,QAAM,OAAO,OAAO;AAGpB,QAAM,QAAQ,KAAK,SAAS;AAC5B,QAAM,SAAS,KAAK,UAAU;AAG9B,QAAM,cAAc,YAAY;AAChC,MAAI,CAAC,aAAa;AAChB,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AACA,QAAM,eAAe,UAAU,WAAW;AAC1C,MAAI,CAAC,aAAa,IAAI;AACpB,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,QAAM,WAA8C;AAAA,IAClD,UAAU;AAAA,IACV,QAAQ,KAAK,UAAU;AAAA,IACvB,gBAAgB,KAAK,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA,OAAO,aAAa;AAAA,EACtB;AAGA,MAAI,KAAK,WAAW,iBAAiB,KAAK,WAAW,MAAM;AACzD,aAAS,QAAQ;AAAA,MACf,MAAM,KAAK,UAAU;AAAA,MACrB,MAAM,KAAK,UAAU;AAAA,IACvB;AAAA,EACF;AAGA,MACE,KAAK,SAAS,UACd,KAAK,UAAU,UACf,KAAK,aAAa,UAClB,KAAK,aAAa,QAClB;AACA,UAAM,WAAW,KAAK,OAAO,OAAO,KAAK,IAAI,IAAI;AAEjD,aAAS,WAAW;AAAA,MAClB,MACE,aAAa,KACT,eAAe,aAAa,KAAuB,IACnD;AAAA,MACN,OAAO,KAAK;AAAA,MACZ,KAAK,KAAK;AAAA,MACV,UAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAQA,SAAS,eAAe,OAA+B;AACrD,QAAM,cAAc,gBAAgB,KAAK;AACzC,SAAO,eAAe,OAAO,YAAY,OAAO,SAAS,WACrD,YAAY,OAAO,OACnB;AACN;AAQA,SAAS,gBACP,OACqE;AACrE,SAAO,OAAO,OAAO,KAAK,EAAE;AAAA,IAC1B,CAAC,SACC,KAAK,eAAe,cACpB,KAAK,WAAW,YAAY,EAAE,SAAS,SAAS;AAAA,EACpD;AACF;;;ACxHO,SAAS,cAAc,SAA+C;AAE3E,QAAM,WAAW,eAAe,OAAO;AAGvC,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO,aAAa,OAAO;AAAA,IAE7B,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,WAAW,OAAO;AAAA,IAE3B,KAAK;AACH,aAAO,aAAa,OAAO;AAAA,IAE7B,KAAK,WAAW;AAEd,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,YAAY,GAAI,QAAO;AAC3B,aAAO,WAAW,OAAO;AAAA,IAC3B;AAAA,IAEA,KAAK,WAAW;AAEd,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,YAAY,GAAI,QAAO;AAC3B,aAAO,WAAW,OAAO;AAAA,IAC3B;AAAA,IAEA,KAAK;AACH,aAAO,cAAc,OAAO;AAAA,IAE9B,KAAK;AACH,aAAO,aAAa,OAAO;AAAA,IAE7B,KAAK;AACH,aAAO,eAAe,OAAO;AAAA,IAE/B,KAAK;AACH,aAAO,qBAAqB,OAAO;AAAA,IAErC,KAAK;AACH,aAAO,mBAAmB,OAAO;AAAA,IAEnC,KAAK;AACH,aAAO,aAAa,OAAO;AAAA,IAE7B,KAAK;AACH,aAAO,mBAAmB,OAAO;AAAA,IAEnC,SAAS;AAGP,YAAM,cAAc,WAAW,OAAO;AACtC,UAAI,YAAY,GAAI,QAAO;AAG3B,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,YAAY,GAAI,QAAO;AAG3B,YAAM,eAAe,cAAc,OAAO;AAC1C,UAAI,aAAa,GAAI,QAAO;AAG5B,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,YAAY,GAAI,QAAO;AAG3B,YAAM,eAAe,eAAe,OAAO;AAC3C,UAAI,aAAa,GAAI,QAAO;AAG5B,YAAM,kBAAkB,qBAAqB,OAAO;AACpD,UAAI,gBAAgB,GAAI,QAAO;AAG/B,YAAM,gBAAgB,aAAa,OAAO;AAC1C,UAAI,cAAc,GAAI,QAAO;AAE7B,aAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,IACnD;AAAA,EACF;AACF;;;ACnGO,SAAS,aAAa,MAAkB,QAAwB;AACrE,UACG,KAAK,MAAM,KAAK,MACf,KAAK,SAAS,CAAC,KAAK,MAAM,KAC1B,KAAK,SAAS,CAAC,KAAK,MAAM;AAEhC;AASO,SAAS,aAAa,MAAkB,QAAwB;AACrE,UACI,KAAK,MAAM,KAAK,MAAM,MACtB,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,KAC3B,KAAK,SAAS,CAAC,KAAK;AAEzB;AASO,SAAS,aAAa,MAAkB,QAAwB;AACrE,UACG,KAAK,MAAM,KAAK,MACf,KAAK,SAAS,CAAC,KAAK,MAAM,KAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM;AAEhC;AASO,SAAS,cACd,MACA,QACA,OACM;AACN,OAAK,MAAM,IAAK,UAAU,KAAM;AAChC,OAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACpC,OAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AACnC,OAAK,SAAS,CAAC,IAAI,QAAQ;AAC7B;AASO,SAAS,cAAc,MAAkB,QAAwB;AACtE,SAAO,OAAO;AAAA,IACZ,KAAK,MAAM,KAAK;AAAA,IAChB,KAAK,SAAS,CAAC,KAAK;AAAA,IACpB,KAAK,SAAS,CAAC,KAAK;AAAA,IACpB,KAAK,SAAS,CAAC,KAAK;AAAA,EACtB;AACF;AAUO,SAAS,WACd,MACA,QACA,gBACQ;AACR,MAAI,gBAAgB;AAClB,YAAQ,KAAK,MAAM,KAAK,MAAO,KAAK,SAAS,CAAC,KAAK,MAAM;AAAA,EAC3D;AACA,UAAS,KAAK,MAAM,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AAC3D;AAUO,SAAS,WACd,MACA,QACA,gBACQ;AACR,MAAI,gBAAgB;AAClB,YACG,KAAK,MAAM,KAAK,MACf,KAAK,SAAS,CAAC,KAAK,MAAM,KAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM;AAAA,EAEhC;AACA,UACI,KAAK,MAAM,KAAK,MAAM,MACtB,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,KAC3B,KAAK,SAAS,CAAC,KAAK;AAEzB;AASO,SAAS,YAAY,GAAe,GAAwB;AACjE,MAAI,EAAE,WAAW,EAAE,OAAQ,QAAO;AAClC,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,QAAI,EAAE,CAAC,MAAM,EAAE,CAAC,EAAG,QAAO;AAAA,EAC5B;AACA,SAAO;AACT;AAUO,SAAS,YACd,MACA,QACA,OACA,gBACM;AACN,MAAI,gBAAgB;AAClB,SAAK,MAAM,IAAI,QAAQ;AACvB,SAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AAAA,EACrC,OAAO;AACL,SAAK,MAAM,IAAK,UAAU,IAAK;AAC/B,SAAK,SAAS,CAAC,IAAI,QAAQ;AAAA,EAC7B;AACF;AAUO,SAAS,YACd,MACA,QACA,OACA,gBACM;AACN,MAAI,gBAAgB;AAClB,SAAK,MAAM,IAAI,QAAQ;AACvB,SAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AACnC,SAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACpC,SAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AAAA,EACtC,OAAO;AACL,SAAK,MAAM,IAAK,UAAU,KAAM;AAChC,SAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACpC,SAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AACnC,SAAK,SAAS,CAAC,IAAI,QAAQ;AAAA,EAC7B;AACF;AASO,SAAS,cACd,MACA,QACA,OACM;AACN,OAAK,MAAM,IAAI,QAAQ;AACvB,OAAK,SAAS,CAAC,IAAK,UAAU,IAAK;AACnC,OAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACpC,OAAK,SAAS,CAAC,IAAK,UAAU,KAAM;AACtC;AAUO,SAAS,MAAM,MAA2B;AAC/C,MAAI,KAAK,SAAS,EAAG,QAAO;AAC5B,SACE,KAAK,CAAC,MAAM,OACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM,MACZ,KAAK,CAAC,MAAM;AAEhB;AAKO,SAAS,OAAO,MAA2B;AAChD,MAAI,KAAK,SAAS,EAAG,QAAO;AAC5B,SAAO,KAAK,CAAC,MAAM,OAAQ,KAAK,CAAC,MAAM;AACzC;AAKO,SAAS,OAAO,MAA2B;AAChD,MAAI,KAAK,SAAS,GAAI,QAAO;AAC7B,SACE,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,EAAE,MAAM;AAAA,EACb,KAAK,EAAE,MAAM;AAEjB;AAKO,SAAS,aAAa,MAAsC;AACjE,MAAI,MAAM,IAAI,EAAG,QAAO;AACxB,MAAI,OAAO,IAAI,EAAG,QAAO;AACzB,MAAI,OAAO,IAAI,EAAG,QAAO;AACzB,SAAO;AACT;;;ACxQO,IAAM,mBAAmB;AAGzB,IAAM,wBAAwB;AAG9B,IAAM,oBAAoB;AAG1B,IAAM,WAAW;AAGjB,IAAM,eAAe;AAGrB,IAAM,uBAAuB;;;ACM7B,SAAS,0BACd,UACmB;AACnB,MAAI,SAAS,SAAS,EAAG,QAAO,CAAC;AAGjC,QAAM,iBAAiB,SAAS,CAAC,MAAM,MAAQ,SAAS,CAAC,MAAM;AAC/D,QAAM,cAAc,SAAS,CAAC,MAAM,MAAQ,SAAS,CAAC,MAAM;AAE5D,MAAI,CAAC,kBAAkB,CAAC,YAAa,QAAO,CAAC;AAG7C,QAAM,QAAQ,WAAW,UAAU,GAAG,cAAc;AACpD,MAAI,UAAU,GAAI,QAAO,CAAC;AAG1B,QAAM,aAAa,WAAW,UAAU,GAAG,cAAc;AAGzD,QAAM,eAAe,mBAAmB,UAAU,YAAY,cAAc;AAG5E,QAAM,gBAAgB,kBAAkB,UAAU,YAAY,cAAc;AAC5E,QAAM,kBACJ,kBAAkB,OACd,mBAAmB,UAAU,eAAe,cAAc,IAC1D,CAAC;AAEP,SAAO,CAAC,GAAG,cAAc,GAAG,eAAe;AAC7C;AAKA,SAAS,mBACP,MACA,WACA,gBACmB;AACnB,QAAM,WAA8B,CAAC;AAErC,MAAI,YAAY,IAAI,KAAK,OAAQ,QAAO;AAExC,QAAM,aAAa,WAAW,MAAM,WAAW,cAAc;AAC7D,MAAI,SAAS,YAAY;AAEzB,WAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,QAAI,SAAS,KAAK,KAAK,OAAQ,QAAO;AAEtC,UAAM,MAAM,WAAW,MAAM,QAAQ,cAAc;AACnD,UAAM,OAAO,WAAW,MAAM,SAAS,GAAG,cAAc;AACxD,UAAM,QAAQ,WAAW,MAAM,SAAS,GAAG,cAAc;AAGzD,UAAM,WAAW,YAAY,IAAI;AACjC,UAAM,WAAW,QAAQ;AAEzB,QAAI;AACJ,QAAI,YAAY,GAAG;AACjB,oBAAc,SAAS;AAAA,IACzB,OAAO;AACL,oBAAc,WAAW,MAAM,SAAS,GAAG,cAAc;AAAA,IAC3D;AAEA,QAAI,cAAc,WAAW,KAAK,QAAQ;AACxC,gBAAU;AACV;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,MAAM,aAAa,cAAc,QAAQ;AAG9D,QAAI,QAAQ,uBAAuB;AACjC,YAAM,OAAO,kBAAkB,OAAO;AACtC,UAAI,MAAM;AACR,cAAM,SAAS,cAAc,IAAI;AACjC,iBAAS,KAAK;AAAA,UACZ,QAAQ,EAAE,MAAM,wBAAwB,QAAQ,UAAU,OAAU;AAAA,UACpE,MAAM,SAAS,KAAK,MAAM,OAAO,SAAS,CAAC,IAAI;AAAA,QACjD,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,UAAU;AAC3B,YAAM,OAAO,kBAAkB,OAAO;AACtC,UAAI,MAAM;AACR,cAAM,SAAS,cAAc,IAAI;AACjC,iBAAS,KAAK;AAAA,UACZ,QAAQ,EAAE,MAAM,YAAY,QAAQ,UAAU,OAAU;AAAA,UACxD,MAAM,SAAS,KAAK,MAAM,OAAO,SAAS,CAAC,IAAI;AAAA,QACjD,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,cAAc;AAC/B,YAAM,OAAO,kBAAkB,OAAO;AACtC,UAAI,MAAM;AACR,iBAAS,KAAK;AAAA,UACZ,QAAQ,EAAE,MAAM,eAAe;AAAA,UAC/B,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,mBAAmB;AACpC,YAAM,OAAO,kBAAkB,OAAO;AACtC,UAAI,MAAM;AACR,iBAAS,KAAK;AAAA,UACZ,QAAQ,EAAE,MAAM,mBAAmB;AAAA,UACnC,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,kBAAkB;AACnC,YAAM,OAAO,kBAAkB,OAAO;AACtC,UAAI,MAAM;AACR,iBAAS,KAAK;AAAA,UACZ,QAAQ,EAAE,MAAM,kBAAkB;AAAA,UAClC,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF;AAEA,cAAU;AAAA,EACZ;AAEA,SAAO;AACT;AAKA,SAAS,cAAc,MAA6B;AAClD,QAAM,QAAQ,KAAK,MAAM,iBAAiB;AAC1C,SAAO,QAAQ,CAAC,KAAK;AACvB;AAKA,SAAS,YAAY,MAAsB;AACzC,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAKA,SAAS,kBAAkB,MAAiC;AAC1D,MAAI;AACF,UAAM,UAAU,IAAI,YAAY,SAAS,EAAE,OAAO,MAAM,CAAC;AACzD,QAAI,OAAO,QAAQ,OAAO,IAAI;AAE9B,QAAI,KAAK,SAAS,IAAI,GAAG;AACvB,aAAO,KAAK,MAAM,GAAG,EAAE;AAAA,IACzB;AACA,WAAO,KAAK,KAAK,KAAK;AAAA,EACxB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,kBACP,MACA,WACA,gBACe;AACf,MAAI,YAAY,IAAI,KAAK,OAAQ,QAAO;AAExC,QAAM,aAAa,WAAW,MAAM,WAAW,cAAc;AAC7D,MAAI,SAAS,YAAY;AAEzB,WAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,QAAI,SAAS,KAAK,KAAK,OAAQ,QAAO;AAEtC,UAAM,MAAM,WAAW,MAAM,QAAQ,cAAc;AAEnD,QAAI,QAAQ,sBAAsB;AAEhC,aAAO,WAAW,MAAM,SAAS,GAAG,cAAc;AAAA,IACpD;AAEA,cAAU;AAAA,EACZ;AAEA,SAAO;AACT;AAQO,SAAS,kBAAkB,MAAiC;AACjE,MAAI,KAAK,SAAS,EAAG,QAAO;AAG5B,MACE,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM,GACZ;AAEA,UAAM,WAAW,KAAK,MAAM,CAAC;AAC7B,QAAI,SAAS,UAAU,GAAG;AACxB,YAAM,aAAa,SAAS,CAAC,MAAM,KAAQ,SAAS,CAAC,MAAM;AAC3D,aAAO,aAAa,cAAc,QAAQ,IAAI,cAAc,QAAQ;AAAA,IACtE;AACA,WAAO,cAAc,QAAQ;AAAA,EAC/B;AAGA,MACE,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM;AAAA,EACZ,KAAK,CAAC,MAAM,GACZ;AAEA,WAAO,YAAY,KAAK,MAAM,CAAC,CAAC;AAAA,EAClC;AAGA,MAAI;AACF,UAAM,UAAU,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC;AACxD,QAAI,SAAS,QAAQ,OAAO,IAAI;AAEhC,QAAI,OAAO,SAAS,IAAI,GAAG;AACzB,eAAS,OAAO,MAAM,GAAG,EAAE;AAAA,IAC7B;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,cAAc,MAA0B;AAC/C,QAAM,QAAkB,CAAC;AAEzB,WAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG;AAC3C,UAAM,QAAS,KAAK,CAAC,KAAK,MAAM,KAAM,KAAK,IAAI,CAAC,KAAK;AACrD,QAAI,SAAS,EAAG;AAChB,UAAM,KAAK,OAAO,aAAa,IAAI,CAAC;AAAA,EACtC;AAEA,SAAO,MAAM,KAAK,EAAE;AACtB;AAKA,SAAS,cAAc,MAA0B;AAC/C,QAAM,QAAkB,CAAC;AAEzB,WAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG;AAC3C,UAAM,QAAQ,KAAK,CAAC,KAAK,MAAO,KAAK,IAAI,CAAC,KAAK,MAAM;AACrD,QAAI,SAAS,EAAG;AAChB,UAAM,KAAK,OAAO,aAAa,IAAI,CAAC;AAAA,EACtC;AAEA,SAAO,MAAM,KAAK,EAAE;AACtB;AAKA,SAAS,YAAY,MAA0B;AAC7C,QAAM,QAAkB,CAAC;AAEzB,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,QAAI,KAAK,CAAC,MAAM,EAAG;AACnB,UAAM,KAAK,OAAO,aAAa,KAAK,CAAC,KAAK,CAAC,CAAC;AAAA,EAC9C;AAEA,SAAO,MAAM,KAAK,EAAE;AACtB;;;AC9TA,IAAM,cAAc;AAGpB,IAAM,aAAa;AAGnB,IAAM,cAAc,IAAI,WAAW,CAAC,IAAM,KAAM,KAAM,KAAM,GAAM,CAAI,CAAC;AAYhE,SAAS,iBAAiB,MAAsC;AACrE,MAAI,CAAC,OAAO,IAAI,GAAG;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAEA,QAAM,WAA8B,CAAC;AAGrC,QAAM,OAAO,gBAAgB,IAAI;AACjC,MAAI,MAAM;AACR,UAAM,WAAW,KAAK,MAAM,KAAK,QAAQ,KAAK,SAAS,KAAK,MAAM;AAClE,UAAM,eAAe,0BAA0B,QAAQ;AACvD,aAAS,KAAK,GAAG,YAAY;AAAA,EAC/B;AAGA,QAAM,aAAa,eAAe,IAAI;AACtC,MAAI,YAAY;AACd,UAAM,UAAU,KAAK;AAAA,MACnB,WAAW;AAAA,MACX,WAAW,SAAS,WAAW;AAAA,IACjC;AACA,UAAM,UAAU,iBAAiB,OAAO;AAExC,QAAI,YAAY,MAAM;AACpB,eAAS,KAAK;AAAA,QACZ,QAAQ,EAAE,MAAM,UAAU;AAAA,QAC1B,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAQO,SAAS,gBACd,MAC2C;AAC3C,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,QAAI,KAAK,MAAM,MAAM,KAAM;AACzB;AACA;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,SAAS,CAAC;AAG9B,QAAI,WAAW,KAAM;AACnB;AACA;AAAA,IACF;AAGA,UAAM,UAAW,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AAGrE,QAAI,WAAW,aAAa;AAE1B,YAAM,cAAc,SAAS;AAC7B,UAAI,cAAc,KAAK,KAAK,QAAQ;AAClC,cAAM,SAAS,KAAK,MAAM,aAAa,cAAc,CAAC;AACtD,YAAI,YAAY,QAAQ,WAAW,GAAG;AAEpC,iBAAO;AAAA,YACL,QAAQ,cAAc;AAAA,YACtB,QAAQ,SAAS;AAAA;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,cAAU,IAAI;AAGd,QAAI,WAAW,OAAQ,WAAW,KAAM;AACtC;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAUA,SAAS,eACP,MAC2C;AAC3C,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,QAAI,KAAK,MAAM,MAAM,KAAM;AACzB;AACA;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,SAAS,CAAC;AAG9B,QAAI,WAAW,KAAM;AACnB;AACA;AAAA,IACF;AAGA,UAAM,UAAW,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AAGrE,QAAI,WAAW,YAAY;AAEzB,aAAO;AAAA,QACL,QAAQ,SAAS;AAAA,QACjB,QAAQ,SAAS;AAAA;AAAA,MACnB;AAAA,IACF;AAGA,cAAU,IAAI;AAGd,QAAI,WAAW,OAAQ,WAAW,KAAM;AACtC;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAQA,SAAS,iBAAiB,MAAiC;AACzD,MAAI;AACF,UAAM,UAAU,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC;AACxD,WAAO,QAAQ,OAAO,IAAI;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACtKO,SAAS,gBAAgB,MAAqC;AAEnE,MAAI,CAAC,MAAM,IAAI,GAAG;AAChB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAGA,QAAM,eAAe,kBAAkB,IAAI;AAC3C,MAAI,CAAC,aAAa,IAAI;AACpB,WAAO;AAAA,EACT;AAEA,SAAO,OAAO,GAAG,aAAa,KAAK;AACrC;AAMA,IAAM,uBAAuB;AAK7B,SAAS,kBACP,MACsC;AACtC,QAAM,SAAyB,CAAC;AAChC,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,QAAQ;AAE3B,QAAI,SAAS,IAAI,KAAK,QAAQ;AAC5B,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AACA,UAAM,SAAS,aAAa,MAAM,MAAM;AACxC,cAAU;AAGV,QAAI,SAAS,IAAI,KAAK,QAAQ;AAC5B,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AACA,UAAM,YAAY,cAAc,MAAM,MAAM;AAC5C,cAAU;AAGV,QAAI,SAAS,SAAS,KAAK,QAAQ;AACjC,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS,oDAAoD,SAAS;AAAA,MACxE,CAAC;AAAA,IACH;AACA,UAAM,YAAY,KAAK,MAAM,QAAQ,SAAS,MAAM;AACpD,cAAU;AAGV,cAAU;AAGV,QAAI,cAAc,QAAQ;AACxB,YAAM,SAAS,eAAe,SAAS;AACvC,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAAA,MACpB;AAAA,IACF,WAAW,cAAc,QAAQ;AAC/B,YAAM,SAAS,eAAe,SAAS;AACvC,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAAA,MACpB;AAAA,IACF;AAGA,QAAI,cAAc,QAAQ;AACxB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,GAAG,MAAM;AACzB;AAUA,SAAS,eAAe,MAAoC;AAE1D,QAAM,YAAY,KAAK,QAAQ,CAAC;AAChC,MAAI,cAAc,IAAI;AACpB,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,aAAa,KAAK,MAAM,GAAG,SAAS,CAAC;AAGrD,QAAM,WAAW,KAAK,MAAM,YAAY,CAAC;AACzC,QAAM,OAAO,cAAc,QAAQ,KAAK,aAAa,QAAQ;AAE7D,SAAO,EAAE,MAAM,QAAQ,SAAS,KAAK;AACvC;AAKA,SAAS,cAAc,MAAiC;AACtD,MAAI;AACF,WAAO,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,OAAO,IAAI;AAAA,EAC9D,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,eAAe,MAAoC;AAC1D,MAAI,SAAS;AAGb,QAAM,aAAa,SAAS,MAAM,MAAM;AACxC,MAAI,eAAe,GAAI,QAAO;AAC9B,QAAM,UAAU,WAAW,KAAK,MAAM,QAAQ,UAAU,CAAC;AACzD,WAAS,aAAa;AAGtB,MAAI,UAAU,KAAK,OAAQ,QAAO;AAClC,QAAM,kBAAkB,KAAK,MAAM,KAAK;AACxC,YAAU;AAGV,MAAI,UAAU,KAAK,OAAQ,QAAO;AAClC,QAAM,oBAAoB,KAAK,MAAM,KAAK;AAC1C,YAAU;AAGV,QAAM,UAAU,SAAS,MAAM,MAAM;AACrC,MAAI,YAAY,GAAI,QAAO;AAC3B,QAAM,cAAc,WAAW,KAAK,MAAM,QAAQ,OAAO,CAAC;AAC1D,WAAS,UAAU;AAGnB,QAAM,WAAW,SAAS,MAAM,MAAM;AACtC,MAAI,aAAa,GAAI,QAAO;AAC5B,QAAM,oBAAoB,WAAW,KAAK,MAAM,QAAQ,QAAQ,CAAC;AACjE,WAAS,WAAW;AAGpB,MAAI;AACJ,MAAI,oBAAoB,GAAG;AAEzB,UAAM,eAAe,eAAe,KAAK,MAAM,MAAM,CAAC;AACtD,QAAI,CAAC,aAAc,QAAO;AAC1B,WAAO,WAAW,YAAY;AAAA,EAChC,OAAO;AACL,WAAO,WAAW,KAAK,MAAM,MAAM,CAAC;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,SAAS,MAAkB,QAAwB;AAC1D,WAAS,IAAI,QAAQ,IAAI,KAAK,QAAQ,KAAK;AACzC,QAAI,KAAK,CAAC,MAAM,GAAG;AACjB,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAKA,SAAS,aAAa,MAA0B;AAC9C,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,cAAU,OAAO,aAAa,KAAK,CAAC,KAAK,CAAC;AAAA,EAC5C;AACA,SAAO;AACT;AAKA,SAAS,WAAW,MAA0B;AAC5C,SAAO,IAAI,YAAY,OAAO,EAAE,OAAO,IAAI;AAC7C;AAUA,SAAS,eAAe,OAAsC;AAE5D,SAAO;AACT;;;ACpOA,IAAM,kBAAkB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAWxD,SAAS,iBAAiB,MAAsC;AACrE,MAAI,CAAC,OAAO,IAAI,GAAG;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAEA,QAAM,YAAY,cAAc,IAAI;AACpC,MAAI,CAAC,WAAW;AACd,WAAO,OAAO,GAAG,CAAC,CAAC;AAAA,EACrB;AAEA,QAAM,WAAW,KAAK;AAAA,IACpB,UAAU;AAAA,IACV,UAAU,SAAS,UAAU;AAAA,EAC/B;AAGA,QAAM,WAAW,0BAA0B,QAAQ;AAEnD,SAAO,OAAO,GAAG,QAAQ;AAC3B;AAWO,SAAS,cACd,MAC2C;AAE3C,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,UAAM,YAAY,KAAK,MAAM,QAAQ,SAAS,CAAC;AAG/C,UAAM,YAAY,aAAa,MAAM,SAAS,CAAC;AAG/C,QAAI,YAAY,WAAW,eAAe,GAAG;AAE3C,aAAO;AAAA,QACL,QAAQ,SAAS;AAAA,QACjB,QAAQ;AAAA,MACV;AAAA,IACF;AAIA,UAAM,aAAa,YAAa,YAAY;AAC5C,cAAU,IAAI;AAAA,EAChB;AAEA,SAAO;AACT;;;AChEO,SAAS,mBAAmB,QAAyC;AAC1E,SAAO,OAAO,IAAI,CAAC,WAAW;AAAA,IAC5B,SAAS,MAAM;AAAA,IACf,MAAM,MAAM;AAAA,EACd,EAAE;AACJ;AAiBO,SAAS,kBACd,UACiB;AACjB,QAAM,UAA2B,CAAC;AAElC,aAAW,WAAW,UAAU;AAC9B,UAAM,UAAU,gBAAgB,QAAQ,MAAM;AAC9C,UAAM,OAAO,QAAQ;AAIrB,QAAI,QAAQ,OAAO,SAAS,qBAAqB,KAAK,WAAW,GAAG,GAAG;AACrE,YAAM,WAAW,2BAA2B,IAAI;AAChD,UAAI,UAAU;AACZ,gBAAQ,KAAK,GAAG,QAAQ;AACxB;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,KAAK,EAAE,SAAS,KAAK,CAAC;AAAA,EAChC;AAEA,SAAO;AACT;AAYA,SAAS,2BAA2B,MAAsC;AACxE,QAAM,cAAc,UAAmC,IAAI;AAC3D,MAAI,CAAC,YAAY,IAAI;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,YAAY;AAG1B,MACE,OAAO,UAAU,YACjB,UAAU,QACT,OAAO,MAAM,aAAa,YACzB,CAAC,MAAM,SAAS,WAAW,SAAS,KACtC,OAAO,MAAM,YAAY,UACzB;AACA,WAAO;AAAA,EACT;AAEA,QAAM,UAA2B,CAAC,EAAE,SAAS,YAAY,MAAM,UAAU,CAAC;AAG1E,QAAM,cAAc,UAAmB,MAAM,OAAO;AAEpD,SAAO;AAAA,IACL,GAAG;AAAA,IACH,YAAY,KACR,EAAE,SAAS,WAAW,MAAM,KAAK,UAAU,YAAY,KAAK,EAAE,IAC9D,EAAE,SAAS,WAAW,MAAM,MAAM,QAAQ;AAAA,EAChD;AACF;AAKA,SAAS,gBAAgB,QAAuC;AAC9D,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO,OAAO,UAAU;AAAA,IAC1B,KAAK;AACH,aAAO,OAAO,UAAU;AAAA,IAC1B,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,EACX;AACF;;;ACrFO,SAAS,KAAK,MAA+B;AAClD,QAAM,SAAS,aAAa,IAAI;AAEhC,MAAI,CAAC,QAAQ;AACX,WAAO,EAAE,QAAQ,WAAW,SAAS,uBAAuB;AAAA,EAC9D;AAGA,QAAM,YAAY,gBAAgB,MAAM,MAAM;AAC9C,MAAI,UAAU,WAAW,WAAW;AAClC,WAAO;AAAA,EACT;AACA,QAAM,MAAM,UAAU;AAGtB,QAAM,UACJ,IAAI,WAAW,QACX,mBAAmB,IAAI,MAAM,IAC7B,kBAAkB,IAAI,QAAQ;AAGpC,QAAM,cAAc,cAAc,OAAO;AACzC,MAAI,CAAC,YAAY,IAAI;AACnB,WAAO,EAAE,QAAQ,gBAAgB,IAAI;AAAA,EACvC;AAEA,QAAM,WAAW,YAAY;AAG7B,MAAI,SAAS,UAAU,KAAK,SAAS,WAAW,GAAG;AACjD,UAAM,OAAO,QAAQ,MAAM,EAAE,eAAe,IAAI;AAEhD,QAAI,MAAM;AACR,eAAS,QAAQ,SAAS,SAAS,KAAK;AACxC,eAAS,SAAS,SAAS,UAAU,KAAK;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,WAAW,UAAU,IAAI;AAC5C;AAOA,IAAM,UAAU;AAAA,EACd,KAAK;AAAA,IACH,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,WAAW,CAAC,YAA4B,EAAE,QAAQ,OAAgB,OAAO;AAAA,EAC3E;AAAA,EACA,MAAM;AAAA,IACJ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,WAAW,CAAC,cAAiC;AAAA,MAC3C,QAAQ;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EACA,MAAM;AAAA,IACJ,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,WAAW,CAAC,cAAiC;AAAA,MAC3C,QAAQ;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAWA,SAAS,gBAAgB,MAAkB,QAAoC;AAC7E,QAAM,SAAS,QAAQ,MAAM,EAAE,aAAa,IAAI;AAEhD,MAAI,CAAC,OAAO,IAAI;AACd,UAAM,UACJ,OAAO,MAAM,SAAS,qBAClB,WAAW,OAAO,YAAY,CAAC,eAC/B,OAAO,MAAM;AACnB,WAAO,EAAE,QAAQ,WAAW,QAAQ;AAAA,EACtC;AAEA,MAAI,OAAO,MAAM,WAAW,EAAG,QAAO,EAAE,QAAQ,QAAQ;AAGxD,MAAI,WAAW,OAAO;AACpB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,KAAK,QAAQ,IAAI,UAAU,OAAO,KAAuB;AAAA,IAC3D;AAAA,EACF;AACA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,KAAK,QAAQ,MAAM,EAAE,UAAU,OAAO,KAA0B;AAAA,EAClE;AACF;AAKA,SAAS,kBACP,MAC0C;AAC1C,QAAMC,wBAAuB;AAC7B,MAAI,KAAK,SAAS,GAAI,QAAO;AAI7B,SAAO;AAAA,IACL,OAAO,aAAa,MAAMA,wBAAuB,CAAC;AAAA,IAClD,QAAQ,aAAa,MAAMA,wBAAuB,EAAE;AAAA,EACtD;AACF;AAKA,SAAS,mBACP,MAC0C;AAE1C,MAAI,SAAS;AACb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,QAAI,KAAK,MAAM,MAAM,KAAM;AAEzB;AACA;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,SAAS,CAAC,KAAK;AACnC,QAAI,WAAW,KAAM;AACnB;AACA;AAAA,IACF;AAGA,UAAM,UAAW,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AAGrE,QACE,UAAU,OACV,UAAU,OACV,WAAW,OACX,WAAW,OACX,WAAW,KACX;AAGA,YAAM,UAAW,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AACrE,YAAM,SAAU,KAAK,SAAS,CAAC,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AACpE,aAAO,EAAE,OAAO,OAAO;AAAA,IACzB;AAEA,cAAU,IAAI;AACd,QAAI,WAAW,IAAM;AAAA,EACvB;AACA,SAAO;AACT;AAKA,SAAS,mBACP,MAC0C;AAE1C,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,QAAQ;AAC3B,QAAI,SAAS,IAAI,KAAK,OAAQ;AAE9B,UAAM,YAAY,cAAc,MAAM,MAAM;AAC5C,UAAM,YAAY,aAAa,MAAM,SAAS,CAAC;AAC/C,UAAM,aAAa,YAAa,YAAY;AAE5C,QAAI,cAAc,QAAQ;AAGxB,YAAM,UAAU,aAAa,MAAM,SAAS,EAAE;AAC9C,YAAM,UAAU,aAAa,MAAM,SAAS,EAAE;AAC9C,aAAO,EAAE,OAAO,UAAU,GAAG,QAAQ,UAAU,EAAE;AAAA,IACnD;AAEA,QAAI,cAAc,QAAQ;AAIxB,YAAM,QAAQ,SAAS;AACvB,YAAM,OACH,KAAK,KAAK,KAAK,MACd,KAAK,QAAQ,CAAC,KAAK,MAAM,KACzB,KAAK,QAAQ,CAAC,KAAK,MAAM;AAC7B,YAAM,WAAW,EAAE,MAAM;AAEzB,UAAI,UAAU;AAEZ,YACE,KAAK,QAAQ,CAAC,MAAM,OACpB,KAAK,QAAQ,CAAC,MAAM,KACpB,KAAK,QAAQ,CAAC,MAAM,IACpB;AAGA,gBAAM,QAAQ,KAAK,QAAQ,CAAC,KAAK,MAAO,KAAK,QAAQ,CAAC,KAAK,MAAM;AACjE,gBAAM,QAAQ,KAAK,QAAQ,CAAC,KAAK,MAAO,KAAK,QAAQ,CAAC,KAAK,MAAM;AACjE,iBAAO,EAAE,OAAO,OAAO,OAAQ,QAAQ,OAAO,MAAO;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,QAAQ;AAGxB,UAAI,KAAK,SAAS,CAAC,MAAM,IAAM;AAE7B,cAAM,OAAO,aAAa,MAAM,SAAS,CAAC;AAC1C,cAAM,SAAS,OAAO,SAAU;AAChC,cAAM,UAAW,QAAQ,KAAM,SAAU;AACzC,eAAO,EAAE,OAAO,OAAO;AAAA,MACzB;AAAA,IACF;AAEA,cAAU,IAAI;AAAA,EAChB;AACA,SAAO;AACT;;;ACjQO,IAAM,kBAAkB,CAC7B,SACA,SAEA,SAAS,SAAY,CAAC,EAAE,MAAM,QAAQ,SAAS,KAAK,CAAC,IAAI,CAAC;AASrD,IAAM,kBAAkB,CAC7B,SACA,SAEA,SAAS,SACL;AAAA,EACE;AAAA,IACE,MAAM;AAAA,IACN;AAAA,IACA,iBAAiB;AAAA,IACjB,mBAAmB;AAAA,IACnB,aAAa;AAAA,IACb,mBAAmB;AAAA,IACnB;AAAA,EACF;AACF,IACA,CAAC;AASA,IAAM,cAAc,CACzB,UACA,SACgC,SAAS,KAAK,CAAC,MAAM,EAAE,OAAO,SAAS,IAAI;AAQtE,IAAM,YAAY,CAAC,UAAuC;AAC/D,MAAI,UAAU,OAAW,QAAO;AAChC,SAAO,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,KAAK;AACjE;;;AC/BO,SAAS,cAAc,MAAsB;AAClD,SAAO,KAAK,QAAQ,oBAAoB,CAAC,SAAS;AAChD,UAAM,OAAO,KAAK,WAAW,CAAC,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC5D,WAAO,MAAM,IAAI;AAAA,EACnB,CAAC;AACH;AAWA,SAAS,aAAa,MAAuB;AAE3C,SAAO,eAAe,KAAK,IAAI;AACjC;AAUO,SAAS,mBACd,SACA,MACA,UACgB;AAChB,MAAI,SAAS,OAAW,QAAO,CAAC;AAEhC,UAAQ,UAAU;AAAA,IAChB,KAAK,WAAW;AAEd,YAAM,YAAY,aAAa,IAAI,IAAI,SAAS;AAChD,aAAO,cAAc,SACjB,gBAAgB,SAAS,IAAI,IAC7B,gBAAgB,SAAS,IAAI;AAAA,IACnC;AAAA,IAEA,KAAK,uBAAuB;AAE1B,YAAM,UAAU,cAAc,IAAI;AAClC,aAAO,gBAAgB,SAAS,OAAO;AAAA,IACzC;AAAA,IAEA,KAAK,iBAAiB;AAEpB,aAAO,gBAAgB,SAAS,IAAI;AAAA,IACtC;AAAA,EACF;AACF;;;ACvEO,SAAS,0BACd,QACmB;AAEnB,QAAM,aAAa,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,YAAY;AAChE,MAAI,CAAC,YAAY;AACf,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,WAA8B;AAAA,IAClC;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,WAAW;AAAA,IACnB;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,UAAU;AAC5D,MAAI,UAAU;AACZ,aAAS,KAAK;AAAA,MACZ,QAAQ,EAAE,MAAM,eAAe;AAAA,MAC/B,MAAM,SAAS;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,QAAQ,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,OAAO;AACtD,MAAI,OAAO;AACT,aAAS,KAAK;AAAA,MACZ,QAAQ,EAAE,MAAM,mBAAmB;AAAA,MACnC,MAAM,MAAM;AAAA,IACd,CAAC;AAAA,EACH;AAEA,QAAM,cAAc,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,aAAa;AAClE,MAAI,aAAa;AACf,aAAS,KAAK;AAAA,MACZ,QAAQ,EAAE,MAAM,uBAAuB;AAAA,MACvC,MAAM,YAAY;AAAA,IACpB,CAAC;AAAA,EACH;AAEA,QAAM,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,MAAM;AACpD,MAAI,MAAM;AACR,aAAS,KAAK;AAAA,MACZ,QAAQ,EAAE,MAAM,WAAW;AAAA,MAC3B,MAAM,KAAK;AAAA,IACb,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAQO,SAAS,0BACd,UACgB;AAEhB,QAAM,cAAc,SAAS,KAAK,CAAC,MAAM,EAAE,OAAO,SAAS,iBAAiB;AAC5E,MAAI,CAAC,aAAa;AAChB,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,EACF;AAGA,QAAM,SAAyB,CAAC,GAAG,gBAAgB;AAEnD,QAAM,WAAW,YAAY,UAAU,cAAc;AACrD,MAAI,UAAU;AACZ,WAAO,KAAK,GAAG,gBAAgB,YAAY,SAAS,IAAI,CAAC;AAAA,EAC3D;AAEA,QAAM,QAAQ,YAAY,UAAU,kBAAkB;AACtD,MAAI,OAAO;AACT,WAAO,KAAK,GAAG,gBAAgB,SAAS,MAAM,IAAI,CAAC;AAAA,EACrD;AAEA,QAAM,cAAc,YAAY,UAAU,sBAAsB;AAChE,MAAI,aAAa;AAEf,WAAO,KAAK,GAAG,gBAAgB,eAAe,YAAY,IAAI,CAAC;AAAA,EACjE;AAEA,QAAM,OAAO,YAAY,UAAU,UAAU;AAC7C,MAAI,MAAM;AACR,WAAO,KAAK,GAAG,gBAAgB,QAAQ,KAAK,IAAI,CAAC;AAAA,EACnD;AAEA,SAAO;AACT;;;AChGO,SAAS,uBACd,QACmB;AACnB,QAAM,OAAgC,CAAC;AAEvC,aAAW,SAAS,QAAQ;AAC1B,UAAM,SAAS,UAAmB,MAAM,IAAI;AAC5C,QAAI,OAAO,IAAI;AACb,WAAK,MAAM,OAAO,IAAI,OAAO;AAAA,IAC/B,OAAO;AACL,WAAK,MAAM,OAAO,IAAI,MAAM;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B;AAAA,EACF;AACF;AASO,SAAS,uBACd,UACA,kBACgB;AAChB,QAAM,cAAc,YAAY,UAAU,iBAAiB;AAC3D,MAAI,CAAC,aAAa;AAChB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS,UAAmC,YAAY,IAAI;AAClE,MAAI,CAAC,OAAO,IAAI;AAGd,WAAO,CAAC;AAAA,EACV;AAIA,SAAO,OAAO,QAAQ,OAAO,KAAK,EAAE;AAAA,IAAQ,CAAC,CAAC,SAAS,KAAK,MAC1D,mBAAmB,SAAS,UAAU,KAAK,GAAG,gBAAgB;AAAA,EAChE;AACF;;;AC/CO,SAAS,4BACd,QACmB;AAEnB,SAAO,uBAAuB,MAAM;AACtC;AAOA,IAAM,yBAAyB,CAC7B,aAC0B;AAC1B,QAAM,mBAAmB,YAAY,UAAU,sBAAsB;AACrE,QAAM,OAAO,YAAY,UAAU,UAAU;AAE7C,MAAI,CAAC,oBAAoB,CAAC,MAAM;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,GAAG,mBAAmB,UAAU,MAAM,MAAM,qBAAqB;AAAA,IACjE,GAAG;AAAA,MACD;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AACF;AAOA,IAAM,8BAA8B,CAClC,aAC0B;AAC1B,QAAM,SAAS,uBAAuB,UAAU,qBAAqB;AACrE,SAAO,OAAO,SAAS,IAAI,SAAS;AACtC;AAYO,SAAS,4BACd,UACgB;AAEhB,SACE,uBAAuB,QAAQ,KAC/B,4BAA4B,QAAQ,KACpC,CAAC;AAEL;;;AClEO,SAAS,kCACd,QACmB;AACnB,QAAM,OAAO,OAAO;AAAA,IAClB,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,SAAS,MAAM,IAAI,CAAC;AAAA,EACnD;AAEA,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B;AAAA,EACF;AACF;AAQO,SAAS,kCACd,UACgB;AAChB,QAAM,cAAc,YAAY,UAAU,iBAAiB;AAC3D,MAAI,CAAC,aAAa;AAChB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS,UAAmC,YAAY,IAAI;AAClE,MAAI,CAAC,OAAO,IAAI;AACd,WAAO,CAAC;AAAA,EACV;AAGA,SAAO,OAAO,QAAQ,OAAO,KAAK,EAAE;AAAA,IAAQ,CAAC,CAAC,SAAS,KAAK,MAC1D,mBAAmB,SAAS,UAAU,KAAK,GAAG,SAAS;AAAA,EACzD;AACF;;;ACtCO,SAAS,6BACd,QACmB;AAEnB,SAAO,uBAAuB,MAAM;AACtC;AAQO,SAAS,6BACd,UACgB;AAEhB,SAAO,uBAAuB,UAAU,SAAS;AACnD;;;AC5BA,IAAM,mBAAmB;AACzB,IAAM,gBAAgB;AAgBf,SAAS,4BACd,QACmB;AACnB,QAAM,OAAO,qBAAqB,MAAM;AACxC,QAAM,qBAAsC;AAAA,IAC1C,QAAQ,EAAE,MAAM,kBAAkB;AAAA,IAClC,MAAM,KAAK,UAAU,IAAI;AAAA,EAC3B;AAGA,QAAM,cAAc,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,aAAa;AAClE,QAAM,qBAAkD,eAAe;AAAA,IACrE,QAAQ,EAAE,MAAM,uBAAuB;AAAA,IACvC,MAAM,WAAW,YAAY,IAAI;AAAA,EACnC;AAEA,QAAM,WAAW,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,UAAU;AAC5D,QAAM,kBAA+C,YAAY;AAAA,IAC/D,QAAQ,EAAE,MAAM,eAAe;AAAA,IAC/B,MAAM,SAAS;AAAA,EACjB;AAEA,QAAM,QAAQ,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,OAAO;AACtD,QAAM,eAA4C,SAAS;AAAA,IACzD,QAAQ,EAAE,MAAM,mBAAmB;AAAA,IACnC,MAAM,MAAM;AAAA,EACd;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,EAAE,OAAO,CAAC,YAAwC,QAAQ,OAAO,CAAC;AACpE;AAKA,SAAS,qBAAqB,QAAgD;AAC5E,SAAO,kBAAkB,IAAI,CAAC,QAAQ;AACpC,UAAM,QAAQ,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,GAAG;AAClD,WAAO,QAAQ,EAAE,CAAC,GAAG,GAAG,MAAM,KAAK,IAAI;AAAA,EACzC,CAAC,EACE,OAAO,CAAC,UAA2C,UAAU,IAAI,EACjE;AAAA,IACC,CAAC,KAAK,UAAU,OAAO,OAAO,KAAK,KAAK;AAAA,IACxC,CAAC;AAAA,EACH;AACJ;AAKA,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAQO,SAAS,4BACd,UACgB;AAChB,QAAM,iBAAiB,YAAY,UAAU,iBAAiB;AAC9D,QAAM,iBAAiB,YAAY,UAAU,sBAAsB;AACnE,QAAM,cAAc,YAAY,UAAU,cAAc;AACxD,QAAM,WAAW,YAAY,UAAU,kBAAkB;AAEzD,SAAO,cAAc,gBAAgB,gBAAgB,aAAa,QAAQ;AAC5E;AAKA,SAAS,cACP,gBACA,gBACA,aACA,UACgB;AAChB,MAAI,CAAC,kBAAkB,CAAC,gBAAgB;AACtC,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS,UAAmC,eAAe,IAAI;AACrE,MAAI,CAAC,OAAO,IAAI;AAEd,WAAO,gBAAgB,WAAW,eAAe,IAAI;AAAA,EACvD;AAEA,QAAM,WAAW,OAAO;AAGxB,QAAM,kBAAkB;AAAA,IACtB;AAAA,IACA,UAAU,SAAS,WAAW;AAAA,EAChC;AAEA,SAAO;AAAA;AAAA,IAEL;AAAA,MACE;AAAA,MACA,UAAU,QAAQ,UAAU,SAAS,KAAK,KAAK;AAAA,IACjD;AAAA;AAAA,IAEA,mBAAmB,eAAe,iBAAiB,SAAS;AAAA;AAAA,IAE5D;AAAA,MACE;AAAA,MACA,aAAa,QAAQ,UAAU,SAAS,QAAQ,KAAK;AAAA,IACvD;AAAA;AAAA,IAEA,gBAAgB,UAAU,UAAU,SAAS,MAAM,CAAC;AAAA;AAAA,IAEpD,gBAAgB,mBAAmB,UAAU,SAAS,iBAAiB,CAAC,CAAC;AAAA;AAAA,IAEzE,gBAAgB,WAAW,UAAU,SAAS,OAAO,CAAC;AAAA,EACxD,EAAE,KAAK;AACT;AAQA,SAAS,uBACP,gBACA,iBACoB;AAEpB,MAAI,gBAAgB,MAAM;AACxB,UAAM,OAAO,eAAe;AAE5B,WAAO,KAAK,WAAW,UAAU,IAAI,KAAK,MAAM,CAAC,IAAI;AAAA,EACvD;AAGA,MAAI,iBAAiB;AAEnB,WAAO,gBAAgB,WAAW,UAAU,IACxC,gBAAgB,MAAM,CAAC,IACvB;AAAA,EACN;AAEA,SAAO;AACT;;;ACrKO,SAAS,oBACd,SAC+C;AAC/C,SAAO,CAAC,WAAW;AACjB,UAAM,QAAQ,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,OAAO;AACtD,WAAO,CAAC,QACJ,CAAC,IACD,CAAC,EAAE,QAAQ,EAAE,MAAM,kBAAkB,GAAG,MAAM,MAAM,KAAK,CAAC;AAAA,EAChE;AACF;AASO,SAAS,oBACd,SACA,kBACiD;AACjD,SAAO,CAAC,aAAa;AACnB,UAAM,cAAc,SAAS;AAAA,MAC3B,CAAC,MAAM,EAAE,OAAO,SAAS;AAAA,IAC3B;AACA,QAAI,CAAC,YAAa,QAAO,CAAC;AAE1B,WAAO,mBAAmB,SAAS,YAAY,MAAM,gBAAgB;AAAA,EACvE;AACF;;;ACxBO,SAAS,4BACd,QACmB;AAEnB,QAAM,kBAAkB,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,YAAY;AACrE,MAAI,CAAC,iBAAiB;AACpB,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,SAAS,UAAmB,gBAAgB,IAAI;AACtD,QAAM,WAA8B;AAAA,IAClC;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM,OAAO,KAAK,KAAK,UAAU,OAAO,KAAK,IAAI,gBAAgB;AAAA,IACnE;AAAA,EACF;AAGA,QAAM,cAAc,OAAO,KAAK,CAAC,MAAM,EAAE,YAAY,QAAQ;AAC7D,MAAI,aAAa;AACf,aAAS,KAAK;AAAA,MACZ,QAAQ,EAAE,MAAM,WAAW;AAAA,MAC3B,MAAM,YAAY;AAAA,IACpB,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAcO,SAAS,4BACd,UACgB;AAChB,QAAM,cAAc,YAAY,UAAU,iBAAiB;AAC3D,QAAM,OAAO,YAAY,UAAU,UAAU;AAE7C,QAAM,SAAyB;AAAA;AAAA,IAE7B,mBAAmB,UAAU,MAAM,MAAM,qBAAqB;AAAA;AAAA,IAE9D,mBAAmB,cAAc,aAAa,MAAM,qBAAqB;AAAA,EAC3E,EAAE,KAAK;AAEP,SAAO;AACT;;;AC1BO,SAAS,gBACd,aACA,cACkB;AAElB,MAAI,YAAY,WAAW,SAAS;AAClC,WAAO,OAAO,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAAA,EAChD;AAEA,MAAI,YAAY,WAAW,WAAW;AACpC,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,QAAQ,YAAY;AAAA,IACtB,CAAC;AAAA,EACH;AAIA,MAAI,YAAY,WAAW,gBAAgB;AACzC,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,UAAU;AAAA,IACZ,CAAC;AAAA,EACH;AAEA,QAAM,MAAM,YAAY;AAGxB,MACG,IAAI,WAAW,SAAS,iBAAiB,SACzC,IAAI,WAAW,UAAU,iBAAiB,UAC1C,IAAI,WAAW,UAAU,iBAAiB,QAC3C;AACA,WAAO,OAAO,GAAG,GAAG;AAAA,EACtB;AAEA,QAAM,WAAW,YAAY,SAAS;AAGtC,QAAM,YAAY,mBAAmB,QAAQ;AAC7C,MAAI,CAAC,WAAW;AAEd,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,UAAU,KAAK,YAAY;AACpC;AAwBA,SAAS,sBACP,eACA,eACa;AACb,SAAO,CAAC,KAAK,iBAAiB;AAC5B,QAAI,IAAI,WAAW,OAAO;AAExB,UAAI,iBAAiB,OAAO;AAC1B,eAAO,OAAO,GAAG,GAAG;AAAA,MACtB;AAEA,YAAM,WAAW,cAAc,IAAI,MAAM;AACzC,aAAO,OAAO,GAAG,EAAE,QAAQ,cAAc,SAAS,CAAC;AAAA,IACrD;AAGA,QAAI,iBAAiB,UAAU,iBAAiB,QAAQ;AACtD,aAAO,OAAO,GAAG,EAAE,QAAQ,cAAc,UAAU,IAAI,SAAS,CAAC;AAAA,IACnE;AAGA,UAAM,SAAS,cAAc,IAAI,QAAQ;AACzC,WAAO,OAAO,GAAG,EAAE,QAAQ,OAAO,OAAO,CAAC;AAAA,EAC5C;AACF;AAGA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AACF;AAEA,IAAM,eAAe;AAAA,EACnB;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AACF;AAEA,IAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB;AAAA,EACrB,oBAAoB,SAAS;AAAA,EAC7B,oBAAoB,WAAW,qBAAqB;AACtD;AAEA,IAAM,uBAAuB;AAAA,EAC3B,oBAAoB,YAAY;AAAA,EAChC,oBAAoB,cAAc,qBAAqB;AACzD;AAEA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AACF;AAEA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB;AAAA,EACrB,oBAAoB,YAAY;AAAA,EAChC,oBAAoB,cAAc,qBAAqB;AACzD;AAKA,IAAM,qBAAqB;AAAA;AAAA,EAEzB,SAAS;AAAA;AAAA,EAET,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,OAAO;AAAA,EACP,aAAa;AAAA,EACb,SAAS;AAAA;AAAA,EAET,SAAS;AAAA,EACT,WAAW;AAAA,EACX,oBAAoB;AAAA;AAAA,EAEpB,eAAe;AAAA;AAAA,EAEf,SAAS;AAAA,EACT,kBAAkB;AAAA;AAAA,EAElB,SAAS;AAAA;AAAA,EAET,UAAU;AAAA;AAAA,EAEV,YAAY;AACd;;;ACzMO,SAAS,kBAAkB,UAAyC;AAEzE,QAAM,eAAe,SAAS;AAAA,IAC5B,CAAC,MACC,EAAE,OAAO,SAAS,0BAClB,EAAE,OAAO,SAAS,cAClB,EAAE,OAAO,SAAS,kBAClB,EAAE,OAAO,SAAS;AAAA,EACtB;AACA,QAAM,kBAAkB,SAAS;AAAA,IAC/B,CAAC,MAAM,EAAE,OAAO,SAAS;AAAA,EAC3B;AAGA,MAAI,aAAa,WAAW,KAAK,gBAAgB,WAAW,GAAG;AAC7D,WAAO,IAAI,WAAW,CAAC;AAAA,EACzB;AAEA,QAAM,iBAAiB;AAGvB,QAAM,WAAmE,CAAC;AAC1E,QAAM,WAAmE,CAAC;AAE1E,aAAW,OAAO,cAAc;AAC9B,QAAI,IAAI,OAAO,SAAS,wBAAwB;AAC9C,YAAM,OAAO,eAAe,IAAI,MAAM,IAAI,OAAO,MAAM;AACvD,eAAS,KAAK,EAAE,KAAK,uBAAuB,MAAM,GAAG,KAAK,CAAC;AAAA,IAC7D,WAAW,IAAI,OAAO,SAAS,YAAY;AACzC,YAAM,OAAO,eAAe,IAAI,MAAM,IAAI,OAAO,MAAM;AACvD,eAAS,KAAK,EAAE,KAAK,UAAU,MAAM,GAAG,KAAK,CAAC;AAAA,IAChD,WAAW,IAAI,OAAO,SAAS,gBAAgB;AAC7C,YAAM,OAAO,eAAe,IAAI,IAAI;AACpC,eAAS,KAAK,EAAE,KAAK,cAAc,MAAM,GAAG,KAAK,CAAC;AAAA,IACpD,WAAW,IAAI,OAAO,SAAS,oBAAoB;AACjD,YAAM,OAAO,eAAe,IAAI,IAAI;AACpC,eAAS,KAAK,EAAE,KAAK,mBAAmB,MAAM,GAAG,KAAK,CAAC;AAAA,IACzD;AAAA,EACF;AAEA,aAAW,OAAO,iBAAiB;AACjC,QAAI,IAAI,OAAO,SAAS,mBAAmB;AACzC,YAAM,OAAO,kBAAkB,IAAI,IAAI;AACvC,eAAS,KAAK,EAAE,KAAK,kBAAkB,MAAM,GAAG,KAAK,CAAC;AAAA,IACxD;AAAA,EACF;AAEA,QAAM,aAAa,SAAS,SAAS;AACrC,MAAI,YAAY;AACd,aAAS,KAAK;AAAA,MACZ,KAAK;AAAA,MACL,MAAM;AAAA,MACN,MAAM,IAAI,WAAW,CAAC;AAAA,IACxB,CAAC;AAAA,EACH;AAGA,WAAS,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG;AACrC,WAAS,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG;AAGrC,QAAM,aAAa;AACnB,QAAM,iBAAiB,SAAS;AAChC,QAAM,WAAW,IAAI,KAAK,iBAAiB;AAC3C,QAAM,iBAAiB,SAAS;AAChC,QAAM,cAAc,aAAa,IAAI,KAAK,iBAAiB,IAAI;AAE/D,QAAM,aAAa;AACnB,QAAM,gBAAgB,aAAa;AACnC,MAAI,aAAa,gBAAgB;AAGjC,MAAI,YAAY;AACd,UAAM,aAAa,SAAS,KAAK,CAAC,MAAM,EAAE,QAAQ,oBAAoB;AACtE,QAAI,YAAY;AACd,kBAAY,WAAW,MAAM,GAAG,eAAe,cAAc;AAAA,IAC/D;AAAA,EACF;AAGA,QAAM,iBAAiB,oBAAI,IAGzB;AAEF,aAAW,OAAO,CAAC,GAAG,UAAU,GAAG,QAAQ,GAAG;AAC5C,QAAI,IAAI,KAAK,SAAS,GAAG;AACvB,qBAAe,IAAI,KAAK,UAAU;AAClC,oBAAc,IAAI,KAAK;AACvB,UAAI,IAAI,KAAK,SAAS,MAAM,GAAG;AAC7B,sBAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAGA,QAAM,YAAY;AAClB,QAAM,SAAS,IAAI,WAAW,SAAS;AAGvC,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,cAAY,QAAQ,GAAG,IAAI,cAAc;AACzC,cAAY,QAAQ,GAAG,YAAY,cAAc;AAGjD,MAAI,SAAS;AACb,cAAY,QAAQ,QAAQ,gBAAgB,cAAc;AAC1D,YAAU;AAEV,aAAW,OAAO,UAAU;AAC1B,kBAAc,QAAQ,QAAQ,KAAK,eAAe,IAAI,GAAG,GAAG,cAAc;AAC1E,cAAU;AAAA,EACZ;AAEA,cAAY,QAAQ,QAAQ,GAAG,cAAc;AAC7C,YAAU;AAGV,MAAI,YAAY;AACd,gBAAY,QAAQ,QAAQ,gBAAgB,cAAc;AAC1D,cAAU;AAEV,eAAW,OAAO,UAAU;AAC1B;AAAA,QACE;AAAA,QACA;AAAA,QACA;AAAA,QACA,eAAe,IAAI,GAAG;AAAA,QACtB;AAAA,MACF;AACA,gBAAU;AAAA,IACZ;AAEA,gBAAY,QAAQ,QAAQ,GAAG,cAAc;AAAA,EAC/C;AAGA,aAAW,CAAC,KAAK,OAAO,KAAK,gBAAgB;AAC3C,WAAO,IAAI,IAAI,MAAM,OAAO;AAAA,EAC9B;AAEA,SAAO;AACT;AAKA,SAAS,cACP,MACA,QACA,KACA,YACA,gBACM;AACN,cAAY,MAAM,QAAQ,IAAI,KAAK,cAAc;AACjD,cAAY,MAAM,SAAS,GAAG,IAAI,MAAM,cAAc;AACtD,cAAY,MAAM,SAAS,GAAG,IAAI,KAAK,QAAQ,cAAc;AAE7D,MAAI,IAAI,KAAK,UAAU,GAAG;AACxB,SAAK,IAAI,IAAI,MAAM,SAAS,CAAC;AAAA,EAC/B,OAAO;AACL,gBAAY,MAAM,SAAS,GAAG,cAAc,GAAG,cAAc;AAAA,EAC/D;AACF;AAUA,SAAS,kBAAkB,MAA0B;AACnD,QAAM,YAAsB,CAAC;AAC7B,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,OAAO,KAAK,WAAW,CAAC;AAC9B,cAAU,KAAK,OAAO,GAAI;AAC1B,cAAU,KAAM,QAAQ,IAAK,GAAI;AAAA,EACnC;AAEA,QAAM,SAAS,IAAI,WAAW,IAAI,UAAU,MAAM;AAGlD,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AACZ,SAAO,CAAC,IAAI;AAEZ,SAAO,IAAI,IAAI,WAAW,SAAS,GAAG,CAAC;AACvC,SAAO;AACT;AASA,SAAS,eAAe,MAAc,QAA6B;AACjE,QAAM,WAAW,SAAS,GAAG,MAAM,KAAK,IAAI,KAAK;AACjD,QAAM,YAAY,IAAI,YAAY,EAAE,OAAO,QAAQ;AACnD,QAAM,SAAS,IAAI,WAAW,UAAU,SAAS,CAAC;AAClD,SAAO,IAAI,WAAW,CAAC;AACvB,SAAO,UAAU,MAAM,IAAI;AAC3B,SAAO;AACT;;;ACvOA,IAAMC,eAAc;AAGpB,IAAMC,cAAa;AAGnB,IAAM,aAAa;AAGnB,IAAM,aAAa;AAGnB,IAAMC,eAAc,IAAI,WAAW,CAAC,IAAM,KAAM,KAAM,KAAM,GAAM,CAAI,CAAC;AAchE,SAAS,kBACd,MACA,UACiB;AAEjB,MAAI,CAAC,OAAO,IAAI,GAAG;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAGA,QAAM,cAAc,SAAS,OAAO,CAAC,MAAM,EAAE,OAAO,SAAS,SAAS;AACtE,QAAM,eAAe,SAAS;AAAA,IAC5B,CAAC,MACC,EAAE,OAAO,SAAS,qBAClB,EAAE,OAAO,SAAS,0BAClB,EAAE,OAAO,SAAS,cAClB,EAAE,OAAO,SAAS,kBAClB,EAAE,OAAO,SAAS;AAAA,EACtB;AAGA,QAAM,gBAAgB,2BAA2B,IAAI;AACrD,MAAI,CAAC,cAAc,IAAI;AACrB,WAAO;AAAA,EACT;AAEA,QAAM,EAAE,WAAW,SAAS,IAAI,cAAc;AAG9C,QAAM,cACJ,aAAa,SAAS,IAAI,iBAAiB,YAAY,IAAI;AAG7D,QAAM,iBAAiB,YAAY,IAAI,CAAC,MAAM,gBAAgB,EAAE,IAAI,CAAC;AAGrE,MAAI,YAAY;AAChB,MAAI,aAAa;AACf,iBAAa,YAAY;AAAA,EAC3B;AACA,aAAW,OAAO,WAAW;AAC3B,iBAAa,IAAI;AAAA,EACnB;AACA,aAAW,OAAO,gBAAgB;AAChC,iBAAa,IAAI;AAAA,EACnB;AACA,eAAa,SAAS;AAGtB,QAAM,SAAS,IAAI,WAAW,SAAS;AACvC,MAAI,SAAS;AAGb,SAAO,QAAQ,IAAI;AACnB,SAAO,QAAQ,IAAI;AAGnB,MAAI,aAAa;AACf,WAAO,IAAI,aAAa,MAAM;AAC9B,cAAU,YAAY;AAAA,EACxB;AAGA,aAAW,OAAO,WAAW;AAC3B,WAAO,IAAI,KAAK,MAAM;AACtB,cAAU,IAAI;AAAA,EAChB;AAGA,aAAW,OAAO,gBAAgB;AAChC,WAAO,IAAI,KAAK,MAAM;AACtB,cAAU,IAAI;AAAA,EAChB;AAGA,SAAO,IAAI,UAAU,MAAM;AAE3B,SAAO,OAAO,GAAG,MAAM;AACzB;AAOA,SAAS,2BACP,MAIA;AACA,QAAM,YAA0B,CAAC;AACjC,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,QAAI,KAAK,MAAM,MAAM,KAAM;AACzB,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS,6BAA6B,MAAM;AAAA,MAC9C,CAAC;AAAA,IACH;AAGA,WAAO,KAAK,MAAM,MAAM,OAAQ,SAAS,KAAK,SAAS,GAAG;AACxD;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,MAAM;AAC1B;AAGA,QAAI,WAAW,YAAY;AAEzB,YAAM,WAAW,KAAK,MAAM,SAAS,CAAC;AACtC,aAAO,OAAO,GAAG,EAAE,WAAW,SAAS,CAAC;AAAA,IAC1C;AAGA,QAAI,WAAW,YAAY;AACzB,aAAO,OAAO,GAAG,EAAE,WAAW,UAAU,IAAI,WAAW,CAAC,KAAM,GAAI,CAAC,EAAE,CAAC;AAAA,IACxE;AAGA,QAAI,SAAS,IAAI,KAAK,QAAQ;AAC5B,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,UAAW,KAAK,MAAM,KAAK,MAAM,KAAM,KAAK,SAAS,CAAC,KAAK;AACjE,UAAM,eAAe,SAAS;AAC9B,UAAM,aAAa,SAAS;AAE5B,QAAI,aAAa,KAAK,QAAQ;AAC5B,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,UAAM,aACJ,WAAWF,gBACX,SAAS,IAAI,KAAK,KAAK,UACvB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAAA,IACrB,KAAK,SAAS,CAAC,MAAM;AAEvB,UAAM,QAAQ,WAAWC;AAGzB,QAAI,CAAC,cAAc,CAAC,OAAO;AACzB,gBAAU,KAAK,KAAK,MAAM,cAAc,UAAU,CAAC;AAAA,IACrD;AAEA,aAAS;AAAA,EACX;AAGA,SAAO,OAAO,MAAM;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,EACX,CAAC;AACH;AAKA,SAAS,iBAAiB,UAAyC;AACjE,QAAM,WAAW,kBAAkB,QAAQ;AAE3C,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO,IAAI,WAAW,CAAC;AAAA,EACzB;AAGA,QAAM,gBAAgB,IAAIC,aAAY,SAAS,SAAS;AACxD,QAAM,UAAU,IAAI,WAAW,IAAI,aAAa;AAEhD,UAAQ,CAAC,IAAI;AACb,UAAQ,CAAC,IAAIF;AACb,UAAQ,CAAC,IAAK,iBAAiB,IAAK;AACpC,UAAQ,CAAC,IAAI,gBAAgB;AAC7B,UAAQ,IAAIE,cAAa,CAAC;AAC1B,UAAQ,IAAI,UAAU,IAAIA,aAAY,MAAM;AAE5C,SAAO;AACT;AAKA,SAAS,gBAAgB,MAA0B;AACjD,QAAM,YAAY,IAAI,YAAY,EAAE,OAAO,IAAI;AAC/C,QAAM,gBAAgB,IAAI,UAAU;AAEpC,QAAM,UAAU,IAAI,WAAW,IAAI,aAAa;AAChD,UAAQ,CAAC,IAAI;AACb,UAAQ,CAAC,IAAID;AACb,UAAQ,CAAC,IAAK,iBAAiB,IAAK;AACpC,UAAQ,CAAC,IAAI,gBAAgB;AAC7B,UAAQ,IAAI,WAAW,CAAC;AAExB,SAAO;AACT;;;ACtOA,IAAM,gBAAgB,IAAI,WAAW,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,EAAE,CAAC;AAY/D,SAAS,iBACd,MACA,QACgB;AAEhB,MAAI,CAAC,MAAM,IAAI,GAAG;AAChB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAGA,QAAM,UAAU,iBAAiB,IAAI;AACrC,MAAI,YAAY,IAAI;AAClB,WAAO,OAAO,MAAM,EAAE,MAAM,cAAc,CAAC;AAAA,EAC7C;AAGA,QAAM,iBAAiB,qBAAqB,IAAI;AAGhD,QAAM,uBAAuB,OAAO;AAAA,IAAI,CAAC,UACvC,MAAM,SAAS,SACX,mBAAmB,KAAK,IACxB,mBAAmB,KAAK;AAAA,EAC9B;AAGA,QAAM,YACJ,cAAc,SACd,eAAe,KAAK,SACpB,qBAAqB,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,CAAC,IACjE,eAAe,OAAO,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,CAAC;AAGpE,QAAM,SAAS,IAAI,WAAW,SAAS;AACvC,MAAI,SAAS;AAGb,SAAO,IAAI,eAAe,MAAM;AAChC,YAAU,cAAc;AAGxB,SAAO,IAAI,eAAe,MAAM,MAAM;AACtC,YAAU,eAAe,KAAK;AAG9B,aAAW,SAAS,sBAAsB;AACxC,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EAClB;AAGA,aAAW,SAAS,eAAe,QAAQ;AACzC,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EAClB;AAEA,SAAO,OAAO,GAAG,MAAM;AACzB;AAMA,SAAS,iBAAiB,MAA0B;AAClD,QAAM,SAAS,cAAc;AAE7B,MAAI,SAAS,IAAI,KAAK,QAAQ;AAC5B,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,aAAa,MAAM,MAAM;AACxC,QAAM,YAAY,cAAc,MAAM,SAAS,CAAC;AAEhD,MAAI,cAAc,QAAQ;AACxB,WAAO;AAAA,EACT;AAGA,SAAO,SAAS,IAAI,IAAI,SAAS;AACnC;AAKA,SAAS,qBAAqB,MAG5B;AACA,QAAM,SAAuB,CAAC;AAC9B,MAAI,SAAS,cAAc;AAC3B,MAAI,OAAmB,IAAI,WAAW,CAAC;AAEvC,SAAO,SAAS,KAAK,QAAQ;AAC3B,UAAM,aAAa;AAGnB,QAAI,SAAS,IAAI,KAAK,OAAQ;AAC9B,UAAM,SAAS,aAAa,MAAM,MAAM;AACxC,cAAU;AAGV,QAAI,SAAS,IAAI,KAAK,OAAQ;AAC9B,UAAM,YAAY,cAAc,MAAM,MAAM;AAC5C,cAAU;AAGV,cAAU;AAGV,cAAU;AAEV,UAAM,WAAW;AACjB,UAAM,YAAY,KAAK,MAAM,YAAY,QAAQ;AAEjD,QAAI,cAAc,QAAQ;AACxB,aAAO;AAAA,IACT,WAAW,cAAc,UAAU,cAAc,QAAQ;AACvD,aAAO,KAAK,SAAS;AAAA,IACvB;AAEA,QAAI,cAAc,QAAQ;AACxB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,OAAO;AACxB;AAUA,SAAS,mBAAmB,OAA8B;AAExD,QAAM,UAAU,aAAa,MAAM,OAAO;AAE1C,QAAM,OAAO,WAAW,MAAM,IAAI;AAGlC,QAAM,YAAY,IAAI,WAAW,QAAQ,SAAS,IAAI,KAAK,MAAM;AACjE,YAAU,IAAI,SAAS,CAAC;AACxB,YAAU,QAAQ,MAAM,IAAI;AAC5B,YAAU,IAAI,MAAM,QAAQ,SAAS,CAAC;AAEtC,SAAO,WAAW,QAAQ,SAAS;AACrC;AAKA,SAAS,mBAAmB,OAA8B;AAExD,QAAM,UAAU,WAAW,MAAM,OAAO;AACxC,QAAM,cAAc,WAAW,MAAM,WAAW;AAChD,QAAM,oBAAoB,WAAW,MAAM,iBAAiB;AAC5D,QAAM,OAAO,WAAW,MAAM,IAAI;AAGlC,QAAM,WACJ,QAAQ,SACR;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY,SACZ;AAAA,EACA,kBAAkB,SAClB;AAAA,EACA,KAAK;AAEP,QAAM,YAAY,IAAI,WAAW,QAAQ;AACzC,MAAI,SAAS;AAGb,YAAU,IAAI,SAAS,MAAM;AAC7B,YAAU,QAAQ;AAClB,YAAU,QAAQ,IAAI;AAGtB,YAAU,QAAQ,IAAI,MAAM;AAC5B,YAAU,QAAQ,IAAI,MAAM;AAG5B,YAAU,IAAI,aAAa,MAAM;AACjC,YAAU,YAAY;AACtB,YAAU,QAAQ,IAAI;AAGtB,YAAU,IAAI,mBAAmB,MAAM;AACvC,YAAU,kBAAkB;AAC5B,YAAU,QAAQ,IAAI;AAGtB,YAAU,IAAI,MAAM,MAAM;AAE1B,SAAO,WAAW,QAAQ,SAAS;AACrC;AAKA,SAAS,WAAW,MAAc,MAA8B;AAC9D,QAAM,QAAQ,IAAI,WAAW,IAAI,IAAI,KAAK,SAAS,CAAC;AAGpD,gBAAc,OAAO,GAAG,KAAK,MAAM;AAGnC,WAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAM,IAAI,CAAC,IAAI,KAAK,WAAW,CAAC;AAAA,EAClC;AAGA,QAAM,IAAI,MAAM,CAAC;AAGjB,QAAM,UAAU,MAAM,MAAM,GAAG,IAAI,KAAK,MAAM;AAC9C,QAAM,MAAM,eAAe,OAAO;AAClC,gBAAc,OAAO,IAAI,KAAK,QAAQ,GAAG;AAEzC,SAAO;AACT;AAKA,SAAS,aAAa,KAAyB;AAC7C,QAAM,QAAQ,IAAI,WAAW,IAAI,MAAM;AACvC,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,UAAM,CAAC,IAAI,IAAI,WAAW,CAAC,IAAI;AAAA,EACjC;AACA,SAAO;AACT;AAKA,SAAS,WAAW,KAAyB;AAC3C,SAAO,IAAI,YAAY,EAAE,OAAO,GAAG;AACrC;AAOA,IAAM,YAAY,aAAa;AAK/B,SAAS,eAA4B;AACnC,QAAM,QAAQ,IAAI,YAAY,GAAG;AACjC,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,QAAI,IAAI;AACR,aAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAI,IAAI,GAAG;AACT,YAAI,aAAc,MAAM;AAAA,MAC1B,OAAO;AACL,YAAI,MAAM;AAAA,MACZ;AAAA,IACF;AACA,UAAM,CAAC,IAAI,MAAM;AAAA,EACnB;AACA,SAAO;AACT;AAKA,SAAS,eAAe,MAA0B;AAChD,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,WAAO,WAAW,OAAO,KAAK,CAAC,KAAK,MAAM,GAAI,KAAK,KAAM,QAAQ;AAAA,EACnE;AACA,UAAQ,MAAM,gBAAgB;AAChC;;;ACvSA,IAAM,iBAAiB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAG9D,IAAM,cAAc,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAG3D,IAAME,mBAAkB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAYxD,SAAS,kBACd,MACA,UACiB;AAEjB,MAAI,CAAC,OAAO,IAAI,GAAG;AACjB,WAAO,OAAO,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAAA,EAClD;AAGA,QAAM,gBAAgB,qBAAqB,IAAI;AAC/C,MAAI,CAAC,cAAc,IAAI;AACrB,WAAO;AAAA,EACT;AAEA,QAAM,EAAE,OAAO,IAAI,cAAc;AAGjC,QAAM,YAAY,eAAe,QAAQ;AAGzC,MAAI,cAAc;AAClB,aAAW,SAAS,QAAQ;AAC1B,mBAAe,MAAM;AAAA,EACvB;AACA,MAAI,WAAW;AACb,mBAAe,UAAU;AAAA,EAC3B;AAGA,QAAM,SAAS,IAAI,WAAW,IAAI,WAAW;AAC7C,MAAI,SAAS;AAGb,SAAO,IAAI,gBAAgB,MAAM;AACjC,YAAU;AACV,gBAAc,QAAQ,QAAQ,WAAW;AACzC,YAAU;AAGV,SAAO,IAAI,aAAa,MAAM;AAC9B,YAAU;AAIV,MAAI,cAAc;AAElB,aAAW,SAAS,QAAQ;AAE1B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAGhB,QAAI,CAAC,eAAe,aAAa,aAAa,KAAK,GAAG;AACpD,aAAO,IAAI,WAAW,MAAM;AAC5B,gBAAU,UAAU;AACpB,oBAAc;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,CAAC,eAAe,WAAW;AAC7B,WAAO,IAAI,WAAW,MAAM;AAAA,EAC9B;AAEA,SAAO,OAAO,GAAG,MAAM;AACzB;AAKA,SAAS,aAAa,OAA4B;AAChD,MAAI,MAAM,SAAS,EAAG,QAAO;AAC7B,QAAM,OAAO,OAAO;AAAA,IAClB,MAAM,CAAC,KAAK;AAAA,IACZ,MAAM,CAAC,KAAK;AAAA,IACZ,MAAM,CAAC,KAAK;AAAA,IACZ,MAAM,CAAC,KAAK;AAAA,EACd;AACA,SAAO,SAAS,UAAU,SAAS,UAAU,SAAS;AACxD;AAKA,SAAS,qBACP,MAIA;AACA,QAAM,SAAuB,CAAC;AAC9B,MAAI,iBAAiB;AAGrB,MAAI,SAAS;AAEb,SAAO,SAAS,KAAK,SAAS,GAAG;AAE/B,UAAM,YAAY,KAAK,MAAM,QAAQ,SAAS,CAAC;AAC/C,UAAM,UAAU,OAAO;AAAA,MACrB,UAAU,CAAC,KAAK;AAAA,MAChB,UAAU,CAAC,KAAK;AAAA,MAChB,UAAU,CAAC,KAAK;AAAA,MAChB,UAAU,CAAC,KAAK;AAAA,IAClB;AAEA,QAAI,CAAC,gBAAgB;AACnB,uBAAiB;AAAA,IACnB;AAGA,UAAM,aACH,KAAK,SAAS,CAAC,KAAK,MACnB,KAAK,SAAS,CAAC,KAAK,MAAM,KAC1B,KAAK,SAAS,CAAC,KAAK,MAAM,MAC1B,KAAK,SAAS,CAAC,KAAK,MAAM;AAG9B,QAAI,SAAS,IAAI,YAAY,KAAK,QAAQ;AACxC,aAAO,OAAO,MAAM;AAAA,QAClB,MAAM;AAAA,QACN,SAAS,uCAAuC,MAAM;AAAA,MACxD,CAAC;AAAA,IACH;AAGA,QAAI,CAAC,YAAY,WAAWA,gBAAe,GAAG;AAE5C,YAAMC,cAAa,YAAa,YAAY;AAC5C,YAAM,YAAY,KAAK,MAAM,QAAQ,SAAS,IAAIA,WAAU;AAC5D,aAAO,KAAK,SAAS;AAAA,IACvB;AAIA,UAAM,aAAa,YAAa,YAAY;AAC5C,cAAU,IAAI;AAAA,EAChB;AAEA,SAAO,OAAO,GAAG,EAAE,QAAQ,eAAe,CAAC;AAC7C;AAKA,SAAS,eAAe,UAAgD;AAEtE,QAAM,eAAe,SAAS;AAAA,IAC5B,CAAC,MACC,EAAE,OAAO,SAAS,qBAClB,EAAE,OAAO,SAAS,0BAClB,EAAE,OAAO,SAAS,cAClB,EAAE,OAAO,SAAS,kBAClB,EAAE,OAAO,SAAS;AAAA,EACtB;AAEA,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,kBAAkB,YAAY;AAE/C,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,SAAS;AAC3B,QAAM,aAAa,YAAa,YAAY;AAC5C,QAAM,QAAQ,IAAI,WAAW,IAAI,UAAU;AAE3C,QAAM,IAAID,kBAAiB,CAAC;AAC5B,gBAAc,OAAO,GAAG,SAAS;AACjC,QAAM,IAAI,UAAU,CAAC;AAErB,SAAO;AACT;;;ACnJO,SAAS,MAAM,MAAkB,UAAoC;AAC1E,QAAM,eAAe,aAAa,IAAI;AACtC,MAAI,CAAC,cAAc;AACjB,WAAO,EAAE,IAAI,OAAO,OAAO,EAAE,MAAM,oBAAoB,EAAE;AAAA,EAC3D;AAGA,MAAI,SAAS,WAAW,SAAS;AAC/B,UAAM,SAASE,SAAQ,YAAY,EAAE,WAAW,MAAM,CAAC,CAAC;AACxD,QAAI,CAAC,OAAO,IAAI;AACd,aAAO;AAAA,QACL,IAAI;AAAA,QACJ,OAAO,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK;AAAA,MAC3D;AAAA,IACF;AACA,WAAO,EAAE,IAAI,MAAM,OAAO,OAAO,MAAM;AAAA,EACzC;AAGA,MAAI,SAAS,WAAW,WAAW;AACjC,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,OAAO,EAAE,MAAM,eAAe,SAAS,gCAAgC;AAAA,IACzE;AAAA,EACF;AAGA,MAAI,SAAS,WAAW,gBAAgB;AACtC,UAAM,eAAe,SAAS,IAAI;AAGlC,QAAI,iBAAiB,cAAc;AACjC,aAAO,SAAS,MAAM,cAAc,SAAS,GAAG;AAAA,IAClD;AAGA,UAAM,SAASA,SAAQ,YAAY,EAAE,WAAW,MAAM,CAAC,CAAC;AACxD,QAAI,CAAC,OAAO,IAAI;AACd,aAAO;AAAA,QACL,IAAI;AAAA,QACJ,OAAO,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK;AAAA,MAC3D;AAAA,IACF;AACA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,OAAO,OAAO;AAAA,MACd,SAAS,EAAE,MAAM,mBAAmB,QAAQ,0BAA0B;AAAA,IACxE;AAAA,EACF;AAGA,QAAM,mBAAmB,gBAAgB,UAAU,YAAY;AAE/D,MAAI,CAAC,iBAAiB,IAAI;AACxB,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,+BAA+B,iBAAiB,MAAM,IAAI;AAAA,MACrE;AAAA,IACF;AAAA,EACF;AAEA,SAAO,SAAS,MAAM,cAAc,iBAAiB,KAAK;AAC5D;AAKA,SAAS,SACP,MACA,cACA,KACa;AACb,MAAI,iBAAiB,SAAS,IAAI,WAAW,OAAO;AAClD,UAAM,SAAS,iBAAiB,MAAM,IAAI,MAAM;AAChD,QAAI,CAAC,OAAO,IAAI;AACd,aAAO;AAAA,QACL,IAAI;AAAA,QACJ,OAAO,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK;AAAA,MAC3D;AAAA,IACF;AACA,WAAO,EAAE,IAAI,MAAM,OAAO,OAAO,MAAM;AAAA,EACzC;AAEA,MAAI,iBAAiB,UAAU,IAAI,WAAW,QAAQ;AACpD,UAAM,SAAS,kBAAkB,MAAM,IAAI,QAAQ;AACnD,QAAI,CAAC,OAAO,IAAI;AACd,aAAO;AAAA,QACL,IAAI;AAAA,QACJ,OAAO,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK;AAAA,MAC3D;AAAA,IACF;AACA,WAAO,EAAE,IAAI,MAAM,OAAO,OAAO,MAAM;AAAA,EACzC;AAEA,MAAI,iBAAiB,UAAU,IAAI,WAAW,QAAQ;AACpD,UAAM,SAAS,kBAAkB,MAAM,IAAI,QAAQ;AACnD,QAAI,CAAC,OAAO,IAAI;AACd,aAAO;AAAA,QACL,IAAI;AAAA,QACJ,OAAO,EAAE,MAAM,eAAe,SAAS,OAAO,MAAM,KAAK;AAAA,MAC3D;AAAA,IACF;AACA,WAAO,EAAE,IAAI,MAAM,OAAO,OAAO,MAAM;AAAA,EACzC;AAEA,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,OAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,EACF;AACF;AAOA,IAAMA,WAAU;AAAA,EACd,KAAK;AAAA,IACH,YAAY;AAAA,EACd;AAAA,EACA,MAAM;AAAA,IACJ,YAAY;AAAA,EACd;AAAA,EACA,MAAM;AAAA,IACJ,YAAY;AAAA,EACd;AACF;;;ACrKA,SAAS,qBAAqB,MAAsB;AAClD,SAAO,KAAK,QAAQ,SAAS,IAAI,EAAE,QAAQ,OAAO,IAAI;AACxD;AAYA,SAAS,kBACP,OACA,SAC2B;AAE3B,MAAI,OAAO;AACT,WAAO;AAAA,EACT;AAGA,MAAI,SAAS;AACX,WAAO;AAAA,MACL,OAAO,QAAQ;AAAA,MACf,UAAU,QAAQ;AAAA;AAAA,IAEpB;AAAA,EACF;AAEA,SAAO;AACT;AAUA,SAAS,kBAAkB,UAAsC;AAC/D,QAAM,QAAkB,CAAC;AAGzB,MAAI,SAAS,UAAU,UAAU,QAAW;AAC1C,UAAM,KAAK,UAAU,SAAS,SAAS,KAAK,EAAE;AAAA,EAChD;AAEA,MAAI,SAAS,UAAU,SAAS;AAC9B,UAAM,KAAK,YAAY,SAAS,SAAS,OAAO,EAAE;AAAA,EACpD;AAEA,MAAI,SAAS,UAAU,WAAW;AAChC,UAAM,KAAK,kBAAkB,SAAS,SAAS,SAAS,EAAE;AAAA,EAC5D;AAEA,MAAI,SAAS,UAAU,QAAQ,QAAW;AACxC,UAAM,KAAK,cAAc,SAAS,SAAS,GAAG,EAAE;AAAA,EAClD;AAEA,MAAI,SAAS,UAAU,SAAS,QAAW;AACzC,UAAM,KAAK,SAAS,SAAS,SAAS,IAAI,EAAE;AAAA,EAC9C;AAGA,MAAI,SAAS,QAAQ,KAAK,SAAS,SAAS,GAAG;AAC7C,UAAM,KAAK,SAAS,SAAS,KAAK,IAAI,SAAS,MAAM,EAAE;AAAA,EACzD;AAGA,MAAI,SAAS,OAAO,MAAM;AACxB,UAAM,KAAK,eAAe,SAAS,MAAM,IAAI,EAAE;AAAA,EACjD;AAEA,MAAI,SAAS,OAAO,MAAM;AACxB,UAAM,KAAK,UAAU,SAAS,MAAM,IAAI,EAAE;AAAA,EAC5C;AAGA,MAAI,SAAS,UAAU,aAAa,QAAW;AAC7C,UAAM,KAAK,cAAc,SAAS,SAAS,QAAQ,EAAE;AAAA,EACvD;AAGA,QAAM,cAAc,kBAAkB,SAAS,OAAO,SAAS,OAAO;AAEtE,MAAI,aAAa;AACf,QAAI,YAAY,YAAY,QAAW;AACrC,YAAM,KAAK,uBAAuB,YAAY,OAAO,EAAE;AAAA,IACzD;AAEA,QAAI,YAAY,UAAU,QAAW;AACnC,YAAM,KAAK,kBAAkB,YAAY,KAAK,EAAE;AAAA,IAClD;AAEA,QAAI,YAAY,UAAU,QAAW;AACnC,YAAM,KAAK,gBAAgB,YAAY,KAAK,EAAE;AAAA,IAChD;AAEA,QAAI,YAAY,UAAU;AACxB,YAAM,KAAK,mBAAmB,YAAY,QAAQ,EAAE;AAAA,IACtD;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAWA,SAAS,6BAA6B,UAAqC;AACzE,MAAI,CAAC,SAAS,oBAAoB,SAAS,iBAAiB,WAAW,GAAG;AACxE,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,QAAkB,CAAC;AAEzB,aAAW,CAAC,OAAO,EAAE,KAAK,SAAS,iBAAiB,QAAQ,GAAG;AAC7D,UAAM,eAAe,QAAQ;AAC7B,UAAM,SAAS,GAAG,SAAS,KAAK,GAAG,OAAO,CAAC,KAAK,GAAG,OAAO,CAAC,MAAM;AAGjE,UAAM,KAAK,eAAe,YAAY,GAAG,MAAM,GAAG;AAGlD,UAAM,KAAK,qBAAqB,GAAG,MAAM,CAAC;AAAA,EAC5C;AAEA,SAAO;AACT;AAmCO,SAAS,cAAc,UAAsC;AAClE,QAAM,WAAqB,CAAC;AAG5B,WAAS,KAAK,qBAAqB,SAAS,MAAM,CAAC;AAGnD,MAAI,SAAS,aAAa,WAAW;AACnC,UAAM,iBAAiB,6BAA6B,QAAQ;AAC5D,QAAI,eAAe,SAAS,GAAG;AAC7B,eAAS,KAAK,eAAe,KAAK,IAAI,CAAC;AAAA,IACzC;AAAA,EACF;AAGA,MAAI,SAAS,gBAAgB;AAC3B,aAAS;AAAA,MACP,oBAAoB,qBAAqB,SAAS,cAAc,CAAC;AAAA,IACnE;AAAA,EACF;AAGA,QAAM,eAAe,kBAAkB,QAAQ;AAC/C,MAAI,cAAc;AAChB,aAAS,KAAK,YAAY;AAAA,EAC5B;AAGA,SAAO,SAAS,KAAK,IAAI;AAC3B;;;ACpKO,SAAS,aACd,MACA,UACa;AAEb,QAAM,SAAS,aAAa,IAAI;AAChC,MAAI,CAAC,QAAQ;AACX,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,QAAM,OAAO,cAAc,QAAQ;AAGnC,MAAI;AAKJ,MAAI,WAAW,OAAO;AAEpB,UAAM,SAAS,gBAAgB,IAAI;AACnC,kBAAc,iBAAiB,MAAM,MAAM;AAAA,EAC7C,WAAW,WAAW,QAAQ;AAE5B,UAAM,WAAW,mBAAmB,IAAI;AACxC,kBAAc,kBAAkB,MAAM,QAAQ;AAAA,EAChD,WAAW,WAAW,QAAQ;AAE5B,UAAM,WAAW,mBAAmB,IAAI;AACxC,kBAAc,kBAAkB,MAAM,QAAQ;AAAA,EAChD,OAAO;AAEL,WAAO,OAAO,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAAA,EACnD;AAGA,MAAI,CAAC,YAAY,IAAI;AACnB,WAAO,OAAO,MAAM;AAAA,MAClB,MAAM;AAAA,MACN,SAAS,YAAY,MAAM;AAAA,IAC7B,CAAC;AAAA,EACH;AAEA,SAAO,OAAO,GAAG,YAAY,KAAK;AACpC;AAUA,SAAS,gBAAgB,MAA8B;AACrD,SAAO,mBAAmB,cAAc,MAAM,SAAS;AACzD;AAQA,SAAS,mBAAmB,MAAiC;AAC3D,SAAO;AAAA,IACL;AAAA,MACE,QAAQ,EAAE,MAAM,kBAAkB;AAAA,MAClC,MAAM;AAAA,IACR;AAAA,EACF;AACF;;;ACrGO,SAAS,UAAU,KAA0B;AAClD,UAAQ,IAAI,QAAQ;AAAA,IAClB,KAAK;AACH,aAAO,IAAI,OAAO,IAAI,CAAC,UAAU,MAAM,IAAI,EAAE,KAAK,MAAM;AAAA,IAE1D,KAAK;AAAA,IACL,KAAK;AACH,aAAO,IAAI,SAAS,IAAI,CAAC,YAAY,QAAQ,IAAI,EAAE,KAAK,MAAM;AAAA,EAClE;AACF;","names":["settingsStart","PNG_SIGNATURE_LENGTH","APP1_MARKER","COM_MARKER","EXIF_HEADER","EXIF_CHUNK_TYPE","paddedSize","HELPERS"]}