@lokascript/semantic 1.2.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core.d.ts +1246 -0
- package/dist/core.js +3073 -0
- package/dist/core.js.map +1 -0
- package/package.json +5 -1
- package/src/core.ts +155 -0
package/dist/core.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/types.ts","../src/parser/utils/type-validation.ts","../src/parser/utils/possessive-keywords.ts","../src/registry.ts","../src/parser/pattern-matcher.ts","../src/tokenizers/index.ts","../src/cache/semantic-cache.ts","../src/core-bridge.ts","../src/ast-builder/expression-parser/tokenizer.ts","../src/ast-builder/expression-parser/parser.ts","../src/ast-builder/value-converters.ts","../src/ast-builder/command-mappers.ts","../src/ast-builder/index.ts"],"sourcesContent":["/**\n * Semantic-First Multilingual Hyperscript Types\n *\n * This module defines the canonical semantic representation that all languages\n * parse to and render from. The semantic layer is language-neutral - it captures\n * the MEANING of hyperscript commands independent of surface syntax.\n */\n\n// Re-export the SemanticRole type from local grammar-types for consistency\nexport type {\n SemanticRole,\n WordOrder,\n AdpositionType,\n MorphologyType,\n GrammaticalMarker,\n LanguageProfile,\n} from './types/grammar-types';\n\nimport type { SemanticRole } from './types/grammar-types';\n\n// =============================================================================\n// Action Types\n// =============================================================================\n\n/**\n * Canonical action names (English-based internally, but not visible to users)\n * These map to hyperscript commands and are used in the semantic AST.\n */\nexport type ActionType =\n // Class/Attribute operations\n | 'toggle'\n | 'add'\n | 'remove'\n // Content operations\n | 'put'\n | 'append'\n | 'prepend'\n | 'take'\n | 'make'\n | 'clone'\n | 'swap'\n | 'morph'\n // Variable operations\n | 'set'\n | 'get'\n | 'increment'\n | 'decrement'\n | 'log'\n // Visibility\n | 'show'\n | 'hide'\n | 'transition'\n // Events\n | 'on'\n | 'trigger'\n | 'send'\n // DOM focus\n | 'focus'\n | 'blur'\n // Navigation\n | 'go'\n // Async\n | 'wait'\n | 'fetch'\n | 'settle'\n // Animation/Measurement\n | 'measure'\n // Behavior system\n | 'install'\n // Control flow\n | 'if'\n | 'unless'\n | 'else'\n | 'repeat'\n | 'for'\n | 'while'\n | 'continue'\n | 'halt'\n | 'throw'\n | 'call'\n | 'return'\n // Advanced\n | 'js'\n | 'async'\n | 'tell'\n | 'default'\n | 'init'\n | 'behavior'\n // Meta (for compound nodes)\n | 'compound';\n\n// =============================================================================\n// Semantic Values\n// =============================================================================\n\n/**\n * A semantic value represents a typed piece of data in a semantic node.\n * Values are language-neutral - they capture what something IS, not how it's written.\n */\nexport type SemanticValue =\n | LiteralValue\n | SelectorValue\n | ReferenceValue\n | PropertyPathValue\n | ExpressionValue;\n\nexport interface LiteralValue {\n readonly type: 'literal';\n readonly value: string | number | boolean;\n readonly dataType?: 'string' | 'number' | 'boolean' | 'duration';\n}\n\nexport interface SelectorValue {\n readonly type: 'selector';\n readonly value: string; // The CSS selector: #id, .class, [attr], etc.\n readonly selectorKind: 'id' | 'class' | 'attribute' | 'element' | 'complex';\n}\n\nexport interface ReferenceValue {\n readonly type: 'reference';\n readonly value: 'me' | 'you' | 'it' | 'result' | 'event' | 'target' | 'body';\n}\n\nexport interface PropertyPathValue {\n readonly type: 'property-path';\n readonly object: SemanticValue;\n readonly property: string;\n}\n\nexport interface ExpressionValue {\n readonly type: 'expression';\n /** Raw expression string for complex expressions that need further parsing */\n readonly raw: string;\n}\n\n// =============================================================================\n// Semantic Nodes\n// =============================================================================\n\n/**\n * Base interface for all semantic nodes.\n * Semantic nodes capture the MEANING of hyperscript constructs.\n */\nexport interface SemanticNode {\n readonly kind: 'command' | 'event-handler' | 'conditional' | 'compound' | 'loop';\n readonly action: ActionType;\n readonly roles: ReadonlyMap<SemanticRole, SemanticValue>;\n readonly metadata?: SemanticMetadata;\n}\n\n/**\n * Metadata about the source of a semantic node.\n * Useful for debugging, error messages, and round-trip conversion.\n */\nexport interface SemanticMetadata {\n readonly sourceLanguage?: string;\n readonly sourceText?: string;\n readonly sourcePosition?: SourcePosition;\n readonly patternId?: string;\n /**\n * Confidence score for the parse (0-1).\n * Higher values indicate more certain matches.\n * - 1.0: Exact match with all roles captured\n * - 0.8-0.99: High confidence with minor uncertainty (stem matching, optional roles)\n * - 0.6-0.8: Medium confidence (morphological normalization, defaults applied)\n * - <0.6: Low confidence (may need fallback to traditional parser)\n */\n readonly confidence?: number;\n}\n\nexport interface SourcePosition {\n readonly start: number;\n readonly end: number;\n readonly line?: number;\n readonly column?: number;\n}\n\n/**\n * A command semantic node - represents a single hyperscript command.\n */\nexport interface CommandSemanticNode extends SemanticNode {\n readonly kind: 'command';\n}\n\n/**\n * An event handler semantic node - represents \"on [event] [commands]\".\n */\nexport interface EventHandlerSemanticNode extends SemanticNode {\n readonly kind: 'event-handler';\n readonly action: 'on';\n readonly body: SemanticNode[];\n readonly eventModifiers?: EventModifiers;\n /**\n * Event parameter names for destructuring.\n * E.g., for \"on click(clientX, clientY)\", this would be ['clientX', 'clientY']\n */\n readonly parameterNames?: readonly string[];\n}\n\nexport interface EventModifiers {\n readonly once?: boolean;\n readonly debounce?: number;\n readonly throttle?: number;\n readonly queue?: 'first' | 'last' | 'all' | 'none';\n readonly from?: SemanticValue; // Event source filter\n}\n\n/**\n * A conditional semantic node - represents \"if [condition] then [body] else [body]\".\n */\nexport interface ConditionalSemanticNode extends SemanticNode {\n readonly kind: 'conditional';\n readonly action: 'if';\n readonly thenBranch: SemanticNode[];\n readonly elseBranch?: SemanticNode[];\n}\n\n/**\n * A compound semantic node - represents multiple chained statements.\n */\nexport interface CompoundSemanticNode extends SemanticNode {\n readonly kind: 'compound';\n readonly statements: SemanticNode[];\n readonly chainType: 'then' | 'and' | 'async';\n}\n\n/**\n * Loop variant discriminant for different loop types.\n */\nexport type LoopVariant =\n | 'forever' // repeat forever\n | 'times' // repeat 5 times\n | 'for' // for item in collection\n | 'while' // while condition\n | 'until'; // until condition\n\n/**\n * A loop semantic node - represents repeat/for/while loops.\n */\nexport interface LoopSemanticNode extends SemanticNode {\n readonly kind: 'loop';\n readonly action: 'repeat' | 'for' | 'while';\n /** The type of loop (forever, times, for, while, until) */\n readonly loopVariant: LoopVariant;\n /** Commands to execute in each iteration */\n readonly body: SemanticNode[];\n /** Loop variable name for 'for' loops (e.g., 'item' in 'for item in list') */\n readonly loopVariable?: string;\n /** Index variable name if specified (e.g., 'i' in 'for item with index i') */\n readonly indexVariable?: string;\n}\n\n// =============================================================================\n// Language Patterns\n// =============================================================================\n\n/**\n * A pattern defines how a semantic structure appears in a specific language.\n * Patterns enable bidirectional conversion: parse (natural → semantic) and\n * render (semantic → natural).\n */\nexport interface LanguagePattern {\n /** Unique identifier for this pattern */\n readonly id: string;\n\n /** ISO 639-1 language code */\n readonly language: string;\n\n /** Which command this pattern matches */\n readonly command: ActionType;\n\n /** Priority for disambiguation (higher = checked first) */\n readonly priority: number;\n\n /** The pattern template with role placeholders */\n readonly template: PatternTemplate;\n\n /** Rules for extracting semantic roles from matched tokens */\n readonly extraction: ExtractionRules;\n\n /** Optional constraints on when this pattern applies */\n readonly constraints?: PatternConstraints;\n}\n\n/**\n * A pattern template defines the expected token sequence.\n *\n * Template syntax:\n * - Literal tokens: \"toggle\", \"を\", \"على\"\n * - Role placeholders: {patient}, {target}, {destination}\n * - Optional groups: [on {target}]\n * - Alternatives in extraction (not in template string)\n *\n * Example templates:\n * - English: \"toggle {patient} [on {target}]\"\n * - Japanese: \"{target} の {patient} を 切り替え\"\n * - Arabic: \"بدّل {patient} [على {target}]\"\n */\nexport interface PatternTemplate {\n /** Human-readable template string */\n readonly format: string;\n\n /** Parsed token sequence for matching */\n readonly tokens: PatternToken[];\n}\n\nexport type PatternToken = LiteralPatternToken | RolePatternToken | GroupPatternToken;\n\nexport interface LiteralPatternToken {\n readonly type: 'literal';\n readonly value: string;\n /** Alternative spellings/forms that also match */\n readonly alternatives?: string[];\n}\n\nexport interface RolePatternToken {\n readonly type: 'role';\n readonly role: SemanticRole;\n readonly optional?: boolean;\n /** Expected value types (for validation) */\n readonly expectedTypes?: Array<SemanticValue['type']>;\n}\n\nexport interface GroupPatternToken {\n readonly type: 'group';\n readonly tokens: PatternToken[];\n readonly optional?: boolean;\n}\n\n/**\n * Rules for extracting semantic values from matched tokens.\n */\nexport interface ExtractionRules {\n readonly [role: string]: ExtractionRule;\n}\n\nexport interface ExtractionRule {\n /** Position-based extraction (0-indexed from pattern start) */\n readonly position?: number;\n /** Marker-based extraction (find value after this marker) */\n readonly marker?: string;\n /** Alternative markers that also work */\n readonly markerAlternatives?: string[];\n /** Transform the extracted value */\n readonly transform?: (raw: string) => SemanticValue;\n /** Default value if not found (for optional roles) */\n readonly default?: SemanticValue;\n /** Static value extraction (for event handler wrapped commands) */\n readonly value?: string;\n /** Extract value from a pattern role by name */\n readonly fromRole?: string;\n}\n\n/**\n * Additional constraints on pattern applicability.\n */\nexport interface PatternConstraints {\n /** Required roles that must be present */\n readonly requiredRoles?: SemanticRole[];\n /** Roles that must NOT be present */\n readonly forbiddenRoles?: SemanticRole[];\n /** Valid selector types for the patient role */\n readonly validPatientTypes?: Array<SelectorValue['selectorKind']>;\n /** Pattern IDs this conflicts with */\n readonly conflictsWith?: string[];\n}\n\n// =============================================================================\n// Token Stream (for pattern matching)\n// =============================================================================\n\n/**\n * A token from language-specific tokenization.\n */\nexport interface LanguageToken {\n readonly value: string;\n readonly kind: TokenKind;\n readonly position: SourcePosition;\n /** Normalized form from explicit keyword map (e.g., 切り替え → toggle) */\n readonly normalized?: string;\n /** Morphologically normalized stem (e.g., 切り替えた → 切り替え) */\n readonly stem?: string;\n /** Confidence in the morphological stem (0.0-1.0) */\n readonly stemConfidence?: number;\n /** Additional metadata for specific token types (e.g., event modifier data) */\n readonly metadata?: Record<string, unknown>;\n}\n\nexport type TokenKind =\n | 'keyword' // Command or modifier keyword\n | 'selector' // CSS selector (#id, .class, [attr])\n | 'literal' // String or number literal\n | 'particle' // Grammatical particle (を, に, من)\n | 'conjunction' // Grammatical conjunction (Arabic و/ف proclitics)\n | 'event-modifier' // Event modifier (.once, .debounce(300), .throttle(100))\n | 'identifier' // Variable or property name\n | 'operator' // Comparison or logical operator\n | 'punctuation' // Brackets, quotes, etc.\n | 'url'; // URL/path (/path, https://...)\n\n/**\n * A stream of tokens with navigation capabilities.\n */\nexport interface TokenStream {\n readonly tokens: readonly LanguageToken[];\n readonly language: string;\n\n /** Look at token at current position + offset without consuming */\n peek(offset?: number): LanguageToken | null;\n\n /** Consume and return current token, advance position */\n advance(): LanguageToken;\n\n /** Check if we've consumed all tokens */\n isAtEnd(): boolean;\n\n /** Save current position for backtracking */\n mark(): StreamMark;\n\n /** Restore to a saved position */\n reset(mark: StreamMark): void;\n\n /** Get current position */\n position(): number;\n}\n\nexport interface StreamMark {\n readonly position: number;\n}\n\n// =============================================================================\n// Pattern Matching Results\n// =============================================================================\n\n/**\n * Result of successfully matching a pattern.\n */\nexport interface PatternMatchResult {\n readonly pattern: LanguagePattern;\n readonly captured: ReadonlyMap<SemanticRole, SemanticValue>;\n readonly consumedTokens: number;\n readonly confidence: number; // 0-1, how well the pattern matched\n}\n\n/**\n * Error when pattern matching fails.\n */\nexport interface PatternMatchError {\n readonly message: string;\n readonly position: SourcePosition;\n readonly expectedPatterns?: string[];\n readonly partialMatch?: Partial<PatternMatchResult>;\n}\n\n// =============================================================================\n// Tokenizer Interface\n// =============================================================================\n\n/**\n * Language-specific tokenizer interface.\n * Each language implements its own tokenizer to handle:\n * - Word boundaries (spaces for English, particles for Japanese)\n * - Character sets (ASCII, CJK, Arabic, etc.)\n * - Special markers (particles, prefixes, suffixes)\n */\nexport interface LanguageTokenizer {\n readonly language: string;\n readonly direction: 'ltr' | 'rtl';\n\n /** Convert input string to token stream */\n tokenize(input: string): TokenStream;\n\n /** Classify a single token */\n classifyToken(token: string): TokenKind;\n}\n\n// =============================================================================\n// Semantic Parser Interface\n// =============================================================================\n\n/**\n * Main parser interface - converts natural language to semantic nodes.\n */\nexport interface SemanticParser {\n /** Parse input in specified language to semantic node */\n parse(input: string, language: string): SemanticNode;\n\n /** Check if input can be parsed in the specified language */\n canParse(input: string, language: string): boolean;\n\n /** Get all supported languages */\n supportedLanguages(): string[];\n}\n\n// =============================================================================\n// Semantic Renderer Interface\n// =============================================================================\n\n/**\n * Renderer interface - converts semantic nodes to natural language.\n */\nexport interface SemanticRenderer {\n /** Render semantic node in specified language */\n render(node: SemanticNode, language: string): string;\n\n /** Render semantic node in explicit mode */\n renderExplicit(node: SemanticNode): string;\n\n /** Get all supported languages */\n supportedLanguages(): string[];\n}\n\n// =============================================================================\n// Helper Functions\n// =============================================================================\n\n/**\n * Create a selector semantic value from a CSS selector string.\n */\nexport function createSelector(value: string): SelectorValue {\n let selectorKind: SelectorValue['selectorKind'] = 'complex';\n\n if (value.startsWith('#') && !value.includes(' ')) {\n selectorKind = 'id';\n } else if (value.startsWith('.') && !value.includes(' ')) {\n selectorKind = 'class';\n } else if (value.startsWith('[') && value.endsWith(']')) {\n selectorKind = 'attribute';\n } else if (/^[a-z][a-z0-9]*$/i.test(value)) {\n selectorKind = 'element';\n }\n\n return { type: 'selector', value, selectorKind };\n}\n\n/**\n * Create a literal semantic value.\n */\nexport function createLiteral(\n value: string | number | boolean,\n dataType?: LiteralValue['dataType']\n): LiteralValue {\n const result: LiteralValue = { type: 'literal', value };\n if (dataType !== undefined) {\n return { type: 'literal', value, dataType };\n }\n return result;\n}\n\n/**\n * Create a reference semantic value.\n */\nexport function createReference(value: ReferenceValue['value']): ReferenceValue {\n return { type: 'reference', value };\n}\n\n/**\n * Create a property path semantic value.\n */\nexport function createPropertyPath(object: SemanticValue, property: string): PropertyPathValue {\n return { type: 'property-path', object, property };\n}\n\n/**\n * Create a semantic node with the given action and roles.\n */\nexport function createCommandNode(\n action: ActionType,\n roles: Record<string, SemanticValue>,\n metadata?: SemanticMetadata\n): CommandSemanticNode {\n const node: CommandSemanticNode = {\n kind: 'command',\n action,\n roles: new Map(Object.entries(roles) as [SemanticRole, SemanticValue][]),\n };\n if (metadata !== undefined) {\n return { ...node, metadata };\n }\n return node;\n}\n\n/**\n * Create an event handler semantic node.\n */\nexport function createEventHandler(\n event: SemanticValue,\n body: SemanticNode[],\n modifiers?: EventModifiers,\n metadata?: SemanticMetadata,\n parameterNames?: string[]\n): EventHandlerSemanticNode {\n const roles = new Map<SemanticRole, SemanticValue>();\n roles.set('event', event);\n\n const node: EventHandlerSemanticNode = {\n kind: 'event-handler',\n action: 'on',\n roles,\n body,\n };\n\n if (modifiers !== undefined) {\n (node as { eventModifiers?: EventModifiers }).eventModifiers = modifiers;\n }\n if (metadata !== undefined) {\n (node as { metadata?: SemanticMetadata }).metadata = metadata;\n }\n if (parameterNames !== undefined && parameterNames.length > 0) {\n (node as { parameterNames?: readonly string[] }).parameterNames = parameterNames;\n }\n\n return node;\n}\n\n/**\n * Create a compound semantic node (for chained statements).\n */\nexport function createCompoundNode(\n statements: SemanticNode[],\n chainType: 'then' | 'and' | 'async' = 'then',\n metadata?: SemanticMetadata\n): CompoundSemanticNode {\n const node: CompoundSemanticNode = {\n kind: 'compound',\n action: 'compound' as ActionType, // Compound doesn't have a specific action\n roles: new Map(),\n statements,\n chainType,\n };\n if (metadata !== undefined) {\n (node as { metadata?: SemanticMetadata }).metadata = metadata;\n }\n return node;\n}\n\n/**\n * Create a conditional semantic node (if/else).\n */\nexport function createConditionalNode(\n condition: SemanticValue,\n thenBranch: SemanticNode[],\n elseBranch?: SemanticNode[],\n metadata?: SemanticMetadata\n): ConditionalSemanticNode {\n const roles = new Map<SemanticRole, SemanticValue>();\n roles.set('condition' as SemanticRole, condition);\n\n const node: ConditionalSemanticNode = {\n kind: 'conditional',\n action: 'if',\n roles,\n thenBranch,\n };\n if (elseBranch !== undefined) {\n (node as { elseBranch?: SemanticNode[] }).elseBranch = elseBranch;\n }\n if (metadata !== undefined) {\n (node as { metadata?: SemanticMetadata }).metadata = metadata;\n }\n return node;\n}\n\n/**\n * Create a loop semantic node.\n */\nexport function createLoopNode(\n action: 'repeat' | 'for' | 'while',\n loopVariant: LoopVariant,\n roles: Record<string, SemanticValue>,\n body: SemanticNode[],\n options?: {\n loopVariable?: string;\n indexVariable?: string;\n metadata?: SemanticMetadata;\n }\n): LoopSemanticNode {\n const node: LoopSemanticNode = {\n kind: 'loop',\n action,\n loopVariant,\n roles: new Map(Object.entries(roles) as [SemanticRole, SemanticValue][]),\n body,\n };\n\n if (options?.loopVariable) {\n (node as { loopVariable?: string }).loopVariable = options.loopVariable;\n }\n if (options?.indexVariable) {\n (node as { indexVariable?: string }).indexVariable = options.indexVariable;\n }\n if (options?.metadata) {\n (node as { metadata?: SemanticMetadata }).metadata = options.metadata;\n }\n\n return node;\n}\n\n// =============================================================================\n// Semantic Parse Result (for validation)\n// =============================================================================\n\n/**\n * Argument with semantic role attached.\n */\nexport type SemanticArgument = SemanticValue & {\n role?: SemanticRole;\n};\n\n/**\n * Result of semantic parsing (used by command validator).\n */\nexport interface SemanticParseResult {\n /** The action/command type */\n readonly action: ActionType;\n /** Confidence score (0-1) */\n readonly confidence: number;\n /** Source language code */\n readonly language: string;\n /** Parsed arguments with roles */\n readonly arguments: SemanticArgument[];\n}\n","/**\n * Type Validation Utility\n *\n * Shared utilities for validating semantic value types against expected types.\n * Used by pattern-matcher to validate captured role values.\n */\n\nimport type { SemanticValue } from '../../types';\n\n/**\n * Semantic value type strings.\n */\nexport type SemanticValueType = SemanticValue['type'];\n\n/**\n * Check if a value type matches any of the expected types.\n *\n * The 'expression' type is treated as a wildcard that matches any value.\n * The 'property-path' type is compatible with selector, reference, and expression.\n *\n * @param actualType The actual type of the value\n * @param expectedTypes Array of expected types\n * @returns True if the type is compatible\n */\nexport function isTypeCompatible(\n actualType: SemanticValueType | string,\n expectedTypes: (SemanticValueType | string)[]\n): boolean {\n // Empty expected types means any type is valid\n if (!expectedTypes || expectedTypes.length === 0) {\n return true;\n }\n\n // Direct match\n if (expectedTypes.includes(actualType)) {\n return true;\n }\n\n // 'expression' is always compatible (wildcard)\n if (expectedTypes.includes('expression')) {\n return true;\n }\n\n // 'property-path' is compatible with selector, reference, and expression\n if (actualType === 'property-path') {\n return expectedTypes.some(t => ['selector', 'reference', 'expression'].includes(t));\n }\n\n return false;\n}\n\n/**\n * Validate a semantic value against expected types.\n *\n * @param value The semantic value to validate\n * @param expectedTypes Array of expected types (empty means any type is valid)\n * @returns True if the value type is valid\n */\nexport function validateValueType(\n value: SemanticValue,\n expectedTypes?: (SemanticValueType | string)[]\n): boolean {\n if (!expectedTypes || expectedTypes.length === 0) {\n return true;\n }\n\n return isTypeCompatible(value.type, expectedTypes);\n}\n\n/**\n * Check if a value is a CSS selector.\n */\nexport function isCSSSelector(value: string): boolean {\n return value.startsWith('.') || value.startsWith('#') || value.startsWith('<');\n}\n\n/**\n * Check if a value is a class name (starts with .).\n */\nexport function isClassName(value: string): boolean {\n return value.startsWith('.');\n}\n\n/**\n * Check if a value is an ID selector (starts with #).\n */\nexport function isIdSelector(value: string): boolean {\n return value.startsWith('#');\n}\n\n/**\n * Check if a value is a CSS property reference (starts with *).\n */\nexport function isCSSPropertyRef(value: string): boolean {\n return value.startsWith('*');\n}\n\n/**\n * Check if a value is a numeric value.\n */\nexport function isNumericValue(value: string): boolean {\n // Handle duration values (e.g., \"100ms\", \"2s\")\n const durationMatch = value.match(/^(\\d+(?:\\.\\d+)?)(ms|s|m|h)?$/);\n if (durationMatch) {\n return true;\n }\n\n // Plain number\n const num = parseFloat(value);\n return !isNaN(num) && isFinite(num);\n}\n\n/**\n * Check if a value is a property name (identifier-like).\n */\nexport function isPropertyName(value: string): boolean {\n // Property names are identifiers (start with letter or _, contain alphanumeric)\n return /^[a-zA-Z_][a-zA-Z0-9_]*$/.test(value);\n}\n\n/**\n * Check if a value is a variable reference (starts with :).\n */\nexport function isVariableRef(value: string): boolean {\n return value.startsWith(':');\n}\n\n/**\n * Check if a value is a built-in reference (me, you, it, etc.).\n */\nexport function isBuiltInReference(value: string): boolean {\n const builtIns = new Set(['me', 'you', 'it', 'result', 'event', 'target', 'body']);\n return builtIns.has(value.toLowerCase());\n}\n","/**\n * Possessive Keywords Utility\n *\n * Provides functions to look up possessive keywords from language profiles.\n * Used by pattern-matcher to recognize possessive expressions like \"my value\".\n */\n\nimport type { LanguageProfile } from '../../generators/profiles/types';\n\n/**\n * Get the reference for a possessive keyword from a language profile.\n *\n * @param profile Language profile\n * @param keyword Possessive keyword (e.g., 'my', 'your', 'its')\n * @returns The reference (e.g., 'me', 'you', 'it') or undefined if not found\n */\nexport function getPossessiveReference(\n profile: LanguageProfile,\n keyword: string\n): string | undefined {\n return profile.possessive?.keywords?.[keyword];\n}\n\n/**\n * Check if a keyword is a possessive keyword in the given profile.\n *\n * @param profile Language profile\n * @param keyword Keyword to check\n * @returns True if the keyword is a possessive keyword\n */\nexport function isPossessiveKeyword(profile: LanguageProfile, keyword: string): boolean {\n return profile.possessive?.keywords?.[keyword] !== undefined;\n}\n\n/**\n * Get all possessive keywords from a language profile.\n *\n * @param profile Language profile\n * @returns Record of possessive keywords to references, or empty object\n */\nexport function getAllPossessiveKeywords(profile: LanguageProfile): Record<string, string> {\n return profile.possessive?.keywords ?? {};\n}\n","/**\n * Language Registry\n *\n * Central registration point for language support in the semantic parser.\n * Languages self-register when their modules are imported, enabling\n * tree-shaking for minimal bundles.\n *\n * @example\n * ```typescript\n * // Import only the languages you need\n * import '@lokascript/semantic/languages/en';\n * import '@lokascript/semantic/languages/es';\n *\n * // Now parse works for registered languages\n * import { parse } from '@lokascript/semantic';\n * parse('toggle .active', 'en'); // Works\n * parse('alternar .activo', 'es'); // Works\n * parse('切り替え .active', 'ja'); // Error: Language not registered\n * ```\n */\n\nimport type { LanguageTokenizer, LanguagePattern, TokenStream } from './types';\n\n// Re-export profile types from generators for convenience\nexport type {\n LanguageProfile,\n WordOrder,\n MarkingStrategy,\n RoleMarker,\n VerbConfig,\n PossessiveConfig,\n KeywordTranslation,\n TokenizationConfig,\n} from './generators/language-profiles';\n\nimport type { LanguageProfile } from './generators/language-profiles';\n\n// =============================================================================\n// External Pattern Source Interface\n// =============================================================================\n\n/**\n * Interface for external pattern sources (e.g., @lokascript/patterns-reference database).\n * External sources can provide additional patterns at runtime.\n */\nexport interface ExternalPatternsSource {\n /** Unique identifier for the source */\n id: string;\n /** Human-readable name */\n name: string;\n /** Get patterns for a specific language */\n getPatternsForLanguage(language: string): Promise<ExternalPatternEntry[]>;\n /** Get patterns for a specific command */\n getPatternsForCommand(command: string, language?: string): Promise<ExternalPatternEntry[]>;\n /** Check if source has patterns for a language */\n hasPatterns(language: string): Promise<boolean>;\n /** Get all supported languages */\n getSupportedLanguages(): Promise<string[]>;\n}\n\n/**\n * Pattern entry from external source.\n */\nexport interface ExternalPatternEntry {\n id: string;\n code: string;\n command: string | null;\n language: string;\n confidence: number;\n verified: boolean;\n title?: string;\n category?: string;\n}\n\n// =============================================================================\n// Registry State\n// =============================================================================\n\nconst tokenizers = new Map<string, LanguageTokenizer>();\nconst profiles = new Map<string, LanguageProfile>();\nconst patternCache = new Map<string, LanguagePattern[]>();\n\n// External pattern sources (e.g., @lokascript/patterns-reference database)\nconst externalSources = new Map<string, ExternalPatternsSource>();\n\n// Pattern generator function - set by patterns module to avoid circular deps\nlet patternGenerator: ((profile: LanguageProfile) => LanguagePattern[]) | null = null;\n\n// =============================================================================\n// Profile Inheritance\n// =============================================================================\n\n/**\n * Deep merge two objects, with variant values overriding base values.\n * Arrays are replaced, not merged.\n */\nfunction deepMerge<T extends object>(base: T, variant: Partial<T>): T {\n const result = { ...base } as T;\n\n for (const key of Object.keys(variant) as (keyof T)[]) {\n const variantValue = variant[key];\n const baseValue = base[key];\n\n if (variantValue === undefined) {\n continue;\n }\n\n // If both are objects (but not arrays), merge recursively\n if (\n typeof variantValue === 'object' &&\n variantValue !== null &&\n !Array.isArray(variantValue) &&\n typeof baseValue === 'object' &&\n baseValue !== null &&\n !Array.isArray(baseValue)\n ) {\n result[key] = deepMerge(\n baseValue as object,\n variantValue as Partial<typeof baseValue>\n ) as T[keyof T];\n } else {\n // Replace value (including arrays)\n result[key] = variantValue as T[keyof T];\n }\n }\n\n return result;\n}\n\n/**\n * Merge a variant profile with its base profile.\n * The variant's fields override the base, with deep merging for nested objects.\n *\n * @example\n * ```typescript\n * const esMX = mergeProfiles(spanishProfile, {\n * code: 'es-MX',\n * name: 'Spanish (Mexico)',\n * keywords: {\n * toggle: { primary: 'alternar', alternatives: ['dale', 'cambiar'] },\n * },\n * });\n * ```\n */\nexport function mergeProfiles(\n base: LanguageProfile,\n variant: Partial<LanguageProfile>\n): LanguageProfile {\n return deepMerge(base, variant);\n}\n\n/**\n * Resolve a profile, applying inheritance if the profile has an `extends` field.\n * Returns the merged profile with base language properties inherited.\n */\nexport function resolveProfile(profile: LanguageProfile): LanguageProfile {\n if (!profile.extends) {\n return profile;\n }\n\n const baseProfile = profiles.get(profile.extends);\n if (!baseProfile) {\n console.warn(\n `[Registry] Profile '${profile.code}' extends '${profile.extends}' but base is not registered. ` +\n `Make sure to import the base language before the variant.`\n );\n return profile;\n }\n\n // Recursively resolve base profile (in case it also extends something)\n const resolvedBase = resolveProfile(baseProfile);\n\n // Merge, with variant overriding base\n return mergeProfiles(resolvedBase, profile);\n}\n\n// =============================================================================\n// Registration Functions\n// =============================================================================\n\n/**\n * Register a language with its tokenizer and profile.\n * Called automatically by language modules when imported.\n * If the profile has an `extends` field, it will inherit from the base profile.\n */\nexport function registerLanguage(\n code: string,\n tokenizer: LanguageTokenizer,\n profile: LanguageProfile\n): void {\n tokenizers.set(code, tokenizer);\n // Store the original profile (inheritance is resolved at query time)\n profiles.set(code, profile);\n // Clear pattern cache for this language if it was previously cached\n patternCache.delete(code);\n}\n\n/**\n * Register only a tokenizer (for backwards compatibility).\n */\nexport function registerTokenizer(tokenizer: LanguageTokenizer): void {\n tokenizers.set(tokenizer.language, tokenizer);\n}\n\n/**\n * Register only a profile (for backwards compatibility).\n */\nexport function registerProfile(profile: LanguageProfile): void {\n profiles.set(profile.code, profile);\n patternCache.delete(profile.code);\n}\n\n/**\n * Set the pattern generator function.\n * Called by patterns module to inject the generator without circular deps.\n */\nexport function setPatternGenerator(\n generator: (profile: LanguageProfile) => LanguagePattern[]\n): void {\n patternGenerator = generator;\n}\n\n// Direct pattern registration map (for tree-shaking)\nconst registeredPatterns = new Map<string, LanguagePattern[]>();\n\n/**\n * Register patterns directly for a language.\n * This enables tree-shaking by allowing each language module to register\n * only its own patterns.\n */\nexport function registerPatterns(code: string, patterns: LanguagePattern[]): void {\n registeredPatterns.set(code, patterns);\n // Clear cached patterns if any\n patternCache.delete(code);\n}\n\n/**\n * Check if patterns are directly registered for a language.\n */\nexport function hasRegisteredPatterns(code: string): boolean {\n return registeredPatterns.has(code);\n}\n\n/**\n * Get directly registered patterns for a language.\n */\nexport function getRegisteredPatterns(code: string): LanguagePattern[] | undefined {\n return registeredPatterns.get(code);\n}\n\n// =============================================================================\n// External Pattern Sources\n// =============================================================================\n\n/**\n * Register an external pattern source.\n * External sources (like @lokascript/patterns-reference) can provide\n * additional patterns at runtime.\n *\n * @example\n * ```typescript\n * import { registerPatternsSource } from '@lokascript/semantic';\n * import { createPatternsProvider } from '@lokascript/patterns-reference';\n *\n * const provider = createPatternsProvider();\n * registerPatternsSource(provider);\n * ```\n */\nexport function registerPatternsSource(source: ExternalPatternsSource): void {\n externalSources.set(source.id, source);\n}\n\n/**\n * Unregister an external pattern source.\n */\nexport function unregisterPatternsSource(sourceId: string): boolean {\n return externalSources.delete(sourceId);\n}\n\n/**\n * Get a registered external pattern source.\n */\nexport function getPatternsSource(sourceId: string): ExternalPatternsSource | undefined {\n return externalSources.get(sourceId);\n}\n\n/**\n * Get all registered external pattern sources.\n */\nexport function getAllPatternsSources(): ExternalPatternsSource[] {\n return Array.from(externalSources.values());\n}\n\n/**\n * Check if any external pattern sources are registered.\n */\nexport function hasExternalSources(): boolean {\n return externalSources.size > 0;\n}\n\n/**\n * Query patterns from all external sources for a language.\n * Returns patterns sorted by confidence.\n */\nexport async function queryExternalPatterns(language: string): Promise<ExternalPatternEntry[]> {\n if (externalSources.size === 0) {\n return [];\n }\n\n const allPatterns: ExternalPatternEntry[] = [];\n\n for (const source of externalSources.values()) {\n try {\n const patterns = await source.getPatternsForLanguage(language);\n allPatterns.push(...patterns);\n } catch (error) {\n console.warn(\n `[Registry] Failed to query patterns from ${source.name}:`,\n error instanceof Error ? error.message : String(error)\n );\n }\n }\n\n // Sort by confidence (highest first)\n return allPatterns.sort((a, b) => b.confidence - a.confidence);\n}\n\n/**\n * Query patterns from all external sources for a command.\n */\nexport async function queryExternalPatternsForCommand(\n command: string,\n language?: string\n): Promise<ExternalPatternEntry[]> {\n if (externalSources.size === 0) {\n return [];\n }\n\n const allPatterns: ExternalPatternEntry[] = [];\n\n for (const source of externalSources.values()) {\n try {\n const patterns = await source.getPatternsForCommand(command, language);\n allPatterns.push(...patterns);\n } catch (error) {\n console.warn(\n `[Registry] Failed to query patterns from ${source.name}:`,\n error instanceof Error ? error.message : String(error)\n );\n }\n }\n\n return allPatterns.sort((a, b) => b.confidence - a.confidence);\n}\n\n// =============================================================================\n// Language Code Utilities\n// =============================================================================\n\n/**\n * Extract the base language code from a BCP 47 tag.\n * Examples: 'es-MX' → 'es', 'pt-BR' → 'pt', 'en' → 'en'\n */\nexport function getBaseLanguageCode(code: string): string {\n return code.split('-')[0];\n}\n\n/**\n * Check if a code is a language variant (has region subtag).\n * Examples: 'es-MX' → true, 'pt' → false\n */\nexport function isLanguageVariant(code: string): boolean {\n return code.includes('-');\n}\n\n// =============================================================================\n// Query Functions\n// =============================================================================\n\n/**\n * Get a tokenizer for the specified language.\n * Supports fallback: if 'es-MX' is not registered, falls back to 'es'.\n * @throws Error if neither the variant nor base language is registered\n */\nexport function getTokenizer(code: string): LanguageTokenizer {\n // Try exact match first\n let tokenizer = tokenizers.get(code);\n\n // Fallback: es-MX → es\n if (!tokenizer && isLanguageVariant(code)) {\n const baseCode = getBaseLanguageCode(code);\n tokenizer = tokenizers.get(baseCode);\n }\n\n if (!tokenizer) {\n const registered = Array.from(tokenizers.keys()).join(', ');\n throw new Error(\n `Language '${code}' is not registered. ` +\n `Registered languages: ${registered || 'none'}. ` +\n `Import the language module first: import '@lokascript/semantic/languages/${code}';`\n );\n }\n return tokenizer;\n}\n\n/**\n * Get a profile for the specified language.\n * Supports fallback: if 'es-MX' is not registered, falls back to 'es'.\n * @throws Error if neither the variant nor base language is registered\n */\nexport function getProfile(code: string): LanguageProfile {\n // Try exact match first\n let profile = profiles.get(code);\n\n // Fallback: es-MX → es\n if (!profile && isLanguageVariant(code)) {\n const baseCode = getBaseLanguageCode(code);\n profile = profiles.get(baseCode);\n }\n\n if (!profile) {\n const registered = Array.from(profiles.keys()).join(', ');\n throw new Error(\n `Language profile '${code}' is not registered. ` +\n `Registered languages: ${registered || 'none'}. ` +\n `Import the language module first: import '@lokascript/semantic/languages/${code}';`\n );\n }\n\n // Resolve inheritance if profile extends another\n return resolveProfile(profile);\n}\n\n/**\n * Try to get a tokenizer, returning undefined if not registered.\n * Supports fallback: if 'es-MX' is not registered, falls back to 'es'.\n */\nexport function tryGetTokenizer(code: string): LanguageTokenizer | undefined {\n let tokenizer = tokenizers.get(code);\n if (!tokenizer && isLanguageVariant(code)) {\n tokenizer = tokenizers.get(getBaseLanguageCode(code));\n }\n return tokenizer;\n}\n\n/**\n * Try to get a profile, returning undefined if not registered.\n * Supports fallback: if 'es-MX' is not registered, falls back to 'es'.\n */\nexport function tryGetProfile(code: string): LanguageProfile | undefined {\n let profile = profiles.get(code);\n if (!profile && isLanguageVariant(code)) {\n profile = profiles.get(getBaseLanguageCode(code));\n }\n // Resolve inheritance if profile extends another\n return profile ? resolveProfile(profile) : undefined;\n}\n\n/**\n * Get all registered language codes.\n */\nexport function getRegisteredLanguages(): string[] {\n return Array.from(tokenizers.keys());\n}\n\n/**\n * Check if a language is registered (exact match or base language fallback).\n */\nexport function isLanguageRegistered(code: string): boolean {\n if (tokenizers.has(code) && profiles.has(code)) {\n return true;\n }\n // Check fallback for variants\n if (isLanguageVariant(code)) {\n const baseCode = getBaseLanguageCode(code);\n return tokenizers.has(baseCode) && profiles.has(baseCode);\n }\n return false;\n}\n\n/**\n * Check if a language is supported (exact match or base language fallback).\n * For backwards compatibility with tokenizers API.\n */\nexport function isLanguageSupported(code: string): boolean {\n if (tokenizers.has(code)) {\n return true;\n }\n // Check fallback for variants\n if (isLanguageVariant(code)) {\n return tokenizers.has(getBaseLanguageCode(code));\n }\n return false;\n}\n\n// =============================================================================\n// Tokenization\n// =============================================================================\n\n/**\n * Tokenize input in the specified language.\n * @throws Error if language is not registered\n */\nexport function tokenize(input: string, language: string): TokenStream {\n const tokenizer = getTokenizer(language);\n return tokenizer.tokenize(input);\n}\n\n// =============================================================================\n// Pattern Access (Lazy Generation)\n// =============================================================================\n\n/**\n * Get patterns for a specific language.\n * First checks for directly registered patterns (for tree-shaking),\n * then falls back to pattern generator.\n * Supports fallback: if 'es-MX' is not registered, falls back to 'es'.\n * @throws Error if language is not registered\n */\nexport function getPatternsForLanguage(code: string): LanguagePattern[] {\n // Check cache first (try exact, then base language)\n let cached = patternCache.get(code);\n if (!cached && isLanguageVariant(code)) {\n cached = patternCache.get(getBaseLanguageCode(code));\n }\n if (cached) {\n return cached;\n }\n\n // Check for directly registered patterns (tree-shakeable path)\n // Try exact match, then base language fallback\n let registered = registeredPatterns.get(code);\n if (!registered && isLanguageVariant(code)) {\n registered = registeredPatterns.get(getBaseLanguageCode(code));\n }\n if (registered) {\n patternCache.set(code, registered);\n return registered;\n }\n\n // Fall back to pattern generator\n if (!patternGenerator) {\n throw new Error(\n `No patterns registered for language '${code}'. ` +\n 'Either import the language module or set a pattern generator.'\n );\n }\n\n // Get profile (throws if not registered) - has built-in fallback\n const profile = getProfile(code);\n const patterns = patternGenerator(profile);\n patternCache.set(code, patterns);\n return patterns;\n}\n\n/**\n * Get patterns for a specific language and command.\n */\nexport function getPatternsForLanguageAndCommand(\n language: string,\n command: string\n): LanguagePattern[] {\n return getPatternsForLanguage(language)\n .filter(p => p.command === command)\n .sort((a, b) => b.priority - a.priority);\n}\n\n/**\n * Clear the pattern cache for a language (useful for testing).\n */\nexport function clearPatternCache(code?: string): void {\n if (code) {\n patternCache.delete(code);\n } else {\n patternCache.clear();\n }\n}\n\n// =============================================================================\n// Backwards Compatibility\n// =============================================================================\n\n/**\n * Get all profiles as a record (for backwards compatibility).\n * Note: Only returns registered profiles.\n */\nexport function getAllProfiles(): Record<string, LanguageProfile> {\n const result: Record<string, LanguageProfile> = {};\n for (const [code, profile] of profiles) {\n result[code] = profile;\n }\n return result;\n}\n\n/**\n * Get all tokenizers as a record (for backwards compatibility).\n * Note: Only returns registered tokenizers.\n */\nexport function getAllTokenizers(): Record<string, LanguageTokenizer> {\n const result: Record<string, LanguageTokenizer> = {};\n for (const [code, tokenizer] of tokenizers) {\n result[code] = tokenizer;\n }\n return result;\n}\n","/**\n * Pattern Matcher\n *\n * Matches tokenized input against language patterns to extract semantic roles.\n * This is the core algorithm for multilingual parsing.\n */\n\nimport type {\n LanguagePattern,\n PatternToken,\n PatternMatchResult,\n SemanticRole,\n SemanticValue,\n TokenStream,\n LanguageToken,\n} from '../types';\nimport { createSelector, createLiteral, createReference, createPropertyPath } from '../types';\nimport { isTypeCompatible } from './utils/type-validation';\nimport { getPossessiveReference } from './utils/possessive-keywords';\nimport type { LanguageProfile } from '../generators/profiles/types';\nimport { tryGetProfile } from '../registry';\n\n// =============================================================================\n// Pattern Matcher\n// =============================================================================\n\nexport class PatternMatcher {\n /** Current language profile for the pattern being matched */\n private currentProfile: LanguageProfile | undefined;\n\n /**\n * Try to match a single pattern against the token stream.\n * Returns the match result or null if no match.\n */\n matchPattern(tokens: TokenStream, pattern: LanguagePattern): PatternMatchResult | null {\n const mark = tokens.mark();\n const captured = new Map<SemanticRole, SemanticValue>();\n\n // Get language profile for possessive keyword lookup\n this.currentProfile = tryGetProfile(pattern.language);\n\n // Reset match counters for this pattern\n this.stemMatchCount = 0;\n this.totalKeywordMatches = 0;\n\n const success = this.matchTokenSequence(tokens, pattern.template.tokens, captured);\n\n if (!success) {\n tokens.reset(mark);\n return null;\n }\n\n // Calculate confidence BEFORE applying defaults\n // This ensures defaulted roles don't artificially inflate confidence\n const confidence = this.calculateConfidence(pattern, captured);\n\n // Apply extraction rules to fill in any missing roles with defaults\n this.applyExtractionRules(pattern, captured);\n\n return {\n pattern,\n captured,\n consumedTokens: tokens.position() - mark.position,\n confidence,\n };\n }\n\n /**\n * Try to match multiple patterns, return the best match.\n */\n matchBest(tokens: TokenStream, patterns: LanguagePattern[]): PatternMatchResult | null {\n const matches: PatternMatchResult[] = [];\n\n for (const pattern of patterns) {\n const mark = tokens.mark();\n const result = this.matchPattern(tokens, pattern);\n\n if (result) {\n matches.push(result);\n }\n\n tokens.reset(mark);\n }\n\n if (matches.length === 0) {\n return null;\n }\n\n // Sort by confidence and priority\n matches.sort((a, b) => {\n // First by priority\n const priorityDiff = b.pattern.priority - a.pattern.priority;\n if (priorityDiff !== 0) return priorityDiff;\n\n // Then by confidence\n return b.confidence - a.confidence;\n });\n\n // Re-consume tokens for the best match\n const best = matches[0];\n this.matchPattern(tokens, best.pattern);\n\n return best;\n }\n\n /**\n * Match a sequence of pattern tokens against the token stream.\n */\n private matchTokenSequence(\n tokens: TokenStream,\n patternTokens: PatternToken[],\n captured: Map<SemanticRole, SemanticValue>\n ): boolean {\n // Skip leading conjunctions for Arabic (proclitics: و, ف, ول, وب, etc.)\n // BUT NOT if the pattern explicitly expects a conjunction (proclitic patterns)\n const firstPatternToken = patternTokens[0];\n const patternExpectsConjunction =\n firstPatternToken?.type === 'literal' &&\n (firstPatternToken.value === 'and' ||\n firstPatternToken.value === 'then' ||\n firstPatternToken.alternatives?.includes('and') ||\n firstPatternToken.alternatives?.includes('then'));\n\n if (this.currentProfile?.code === 'ar' && !patternExpectsConjunction) {\n while (tokens.peek()?.kind === 'conjunction') {\n tokens.advance();\n }\n }\n\n for (const patternToken of patternTokens) {\n const matched = this.matchPatternToken(tokens, patternToken, captured);\n\n if (!matched) {\n // If token is optional, continue\n if (this.isOptional(patternToken)) {\n continue;\n }\n return false;\n }\n }\n\n return true;\n }\n\n /**\n * Match a single pattern token against the current position in the stream.\n */\n private matchPatternToken(\n tokens: TokenStream,\n patternToken: PatternToken,\n captured: Map<SemanticRole, SemanticValue>\n ): boolean {\n switch (patternToken.type) {\n case 'literal':\n return this.matchLiteralToken(tokens, patternToken);\n\n case 'role':\n return this.matchRoleToken(tokens, patternToken, captured);\n\n case 'group':\n return this.matchGroupToken(tokens, patternToken, captured);\n\n default:\n return false;\n }\n }\n\n /**\n * Match a literal pattern token (keyword or particle).\n */\n private matchLiteralToken(\n tokens: TokenStream,\n patternToken: PatternToken & { type: 'literal' }\n ): boolean {\n const token = tokens.peek();\n if (!token) return false;\n\n // Check main value\n const matchType = this.getMatchType(token, patternToken.value);\n if (matchType !== 'none') {\n this.totalKeywordMatches++;\n if (matchType === 'stem') {\n this.stemMatchCount++;\n }\n tokens.advance();\n return true;\n }\n\n // Check alternatives\n if (patternToken.alternatives) {\n for (const alt of patternToken.alternatives) {\n const altMatchType = this.getMatchType(token, alt);\n if (altMatchType !== 'none') {\n this.totalKeywordMatches++;\n if (altMatchType === 'stem') {\n this.stemMatchCount++;\n }\n tokens.advance();\n return true;\n }\n }\n }\n\n return false;\n }\n\n /**\n * Match a role pattern token (captures a semantic value).\n * Handles multi-token expressions like:\n * - 'my value' (possessive keyword + property)\n * - '#dialog.showModal()' (method call)\n * - \"#element's *opacity\" (possessive selector + property)\n */\n private matchRoleToken(\n tokens: TokenStream,\n patternToken: PatternToken & { type: 'role' },\n captured: Map<SemanticRole, SemanticValue>\n ): boolean {\n // Skip noise words like \"the\" before selectors (English idiom support)\n this.skipNoiseWords(tokens);\n\n const token = tokens.peek();\n if (!token) {\n return patternToken.optional || false;\n }\n\n // Check for possessive expression (e.g., 'my value', 'its innerHTML')\n const possessiveValue = this.tryMatchPossessiveExpression(tokens);\n if (possessiveValue) {\n // Validate expected types if specified\n if (patternToken.expectedTypes && patternToken.expectedTypes.length > 0) {\n if (\n !patternToken.expectedTypes.includes(possessiveValue.type) &&\n !patternToken.expectedTypes.includes('expression')\n ) {\n return patternToken.optional || false;\n }\n }\n captured.set(patternToken.role, possessiveValue);\n return true;\n }\n\n // Check for method call expression (e.g., '#dialog.showModal()')\n const methodCallValue = this.tryMatchMethodCallExpression(tokens);\n if (methodCallValue) {\n if (patternToken.expectedTypes && patternToken.expectedTypes.length > 0) {\n if (\n !patternToken.expectedTypes.includes(methodCallValue.type) &&\n !patternToken.expectedTypes.includes('expression')\n ) {\n return patternToken.optional || false;\n }\n }\n captured.set(patternToken.role, methodCallValue);\n return true;\n }\n\n // Check for possessive selector expression (e.g., \"#element's *opacity\")\n const possessiveSelectorValue = this.tryMatchPossessiveSelectorExpression(tokens);\n if (possessiveSelectorValue) {\n if (patternToken.expectedTypes && patternToken.expectedTypes.length > 0) {\n // property-path is compatible with selector, reference, and expression\n if (!isTypeCompatible(possessiveSelectorValue.type, patternToken.expectedTypes)) {\n return patternToken.optional || false;\n }\n }\n captured.set(patternToken.role, possessiveSelectorValue);\n return true;\n }\n\n // Check for property access expression (e.g., 'userData.name', 'it.data')\n const propertyAccessValue = this.tryMatchPropertyAccessExpression(tokens);\n if (propertyAccessValue) {\n if (patternToken.expectedTypes && patternToken.expectedTypes.length > 0) {\n if (\n !patternToken.expectedTypes.includes(propertyAccessValue.type) &&\n !patternToken.expectedTypes.includes('expression')\n ) {\n return patternToken.optional || false;\n }\n }\n captured.set(patternToken.role, propertyAccessValue);\n return true;\n }\n\n // Check for selector + property expression (e.g., '#output.innerText')\n // This handles cases where the tokenizer produces two selector tokens\n const selectorPropertyValue = this.tryMatchSelectorPropertyExpression(tokens);\n if (selectorPropertyValue) {\n if (patternToken.expectedTypes && patternToken.expectedTypes.length > 0) {\n if (!isTypeCompatible(selectorPropertyValue.type, patternToken.expectedTypes)) {\n return patternToken.optional || false;\n }\n }\n captured.set(patternToken.role, selectorPropertyValue);\n return true;\n }\n\n // Try to extract a semantic value from the token\n const value = this.tokenToSemanticValue(token);\n if (!value) {\n return patternToken.optional || false;\n }\n\n // Validate expected types if specified\n if (patternToken.expectedTypes && patternToken.expectedTypes.length > 0) {\n if (!patternToken.expectedTypes.includes(value.type)) {\n return patternToken.optional || false;\n }\n }\n\n captured.set(patternToken.role, value);\n tokens.advance();\n return true;\n }\n\n /**\n * Try to match a possessive expression like 'my value' or 'its innerHTML'.\n * Returns the PropertyPathValue if matched, or null if not.\n */\n private tryMatchPossessiveExpression(tokens: TokenStream): SemanticValue | null {\n const token = tokens.peek();\n if (!token) return null;\n\n // Use profile-based possessive keyword lookup\n if (!this.currentProfile) return null;\n\n const tokenLower = (token.normalized || token.value).toLowerCase();\n const baseRef = getPossessiveReference(this.currentProfile, tokenLower);\n\n if (!baseRef) return null;\n\n // We have a possessive keyword, look ahead for property name\n const mark = tokens.mark();\n tokens.advance();\n\n const propertyToken = tokens.peek();\n if (!propertyToken) {\n // Just the possessive keyword, no property - revert\n tokens.reset(mark);\n return null;\n }\n\n // Property should be an identifier, keyword (not structural), or selector (for style props)\n // Examples: \"my value\", \"my innerHTML\", \"my *background\", \"my *opacity\"\n if (\n propertyToken.kind === 'identifier' ||\n (propertyToken.kind === 'keyword' && !this.isStructuralKeyword(propertyToken.value)) ||\n (propertyToken.kind === 'selector' && propertyToken.value.startsWith('*'))\n ) {\n tokens.advance();\n\n // Create property-path: my value -> { object: me, property: 'value' }\n return createPropertyPath(createReference(baseRef as any), propertyToken.value);\n }\n\n // Not a valid property, revert\n tokens.reset(mark);\n return null;\n }\n\n /**\n * Check if a keyword is a structural keyword (preposition, control flow, etc.)\n * that shouldn't be consumed as a property name.\n */\n private isStructuralKeyword(value: string): boolean {\n const structural = new Set([\n // Prepositions\n 'into',\n 'in',\n 'to',\n 'from',\n 'at',\n 'by',\n 'with',\n 'without',\n 'before',\n 'after',\n 'of',\n 'as',\n 'on',\n // Control flow\n 'then',\n 'end',\n 'else',\n 'if',\n 'repeat',\n 'while',\n 'for',\n // Commands (shouldn't be property names)\n 'toggle',\n 'add',\n 'remove',\n 'put',\n 'set',\n 'show',\n 'hide',\n 'increment',\n 'decrement',\n 'send',\n 'trigger',\n 'call',\n ]);\n return structural.has(value.toLowerCase());\n }\n\n /**\n * Try to match a method call expression like '#dialog.showModal()'.\n * Pattern: selector + '.' + identifier + '(' + [args] + ')'\n * Returns an expression value if matched, or null if not.\n */\n private tryMatchMethodCallExpression(tokens: TokenStream): SemanticValue | null {\n const token = tokens.peek();\n if (!token || token.kind !== 'selector') return null;\n\n // Look ahead for: . identifier (\n const mark = tokens.mark();\n tokens.advance(); // consume selector\n\n const dotToken = tokens.peek();\n if (!dotToken || dotToken.kind !== 'operator' || dotToken.value !== '.') {\n tokens.reset(mark);\n return null;\n }\n tokens.advance(); // consume .\n\n const methodToken = tokens.peek();\n if (!methodToken || methodToken.kind !== 'identifier') {\n tokens.reset(mark);\n return null;\n }\n tokens.advance(); // consume method name\n\n const openParen = tokens.peek();\n if (!openParen || openParen.kind !== 'punctuation' || openParen.value !== '(') {\n tokens.reset(mark);\n return null;\n }\n tokens.advance(); // consume (\n\n // Consume arguments until we find ) (with depth limit for security)\n const args: string[] = [];\n while (!tokens.isAtEnd() && args.length < PatternMatcher.MAX_METHOD_ARGS) {\n const argToken = tokens.peek();\n if (!argToken) break;\n if (argToken.kind === 'punctuation' && argToken.value === ')') {\n tokens.advance(); // consume )\n break;\n }\n // Skip commas\n if (argToken.kind === 'punctuation' && argToken.value === ',') {\n tokens.advance();\n continue;\n }\n // Collect arg value\n args.push(argToken.value);\n tokens.advance();\n }\n\n // Create expression value: #dialog.showModal()\n const methodCall = `${token.value}.${methodToken.value}(${args.join(', ')})`;\n return {\n type: 'expression',\n raw: methodCall,\n } as SemanticValue;\n }\n\n /**\n * Try to match a property access expression like 'userData.name' or 'it.data'.\n * Pattern: (identifier | keyword) + '.' + identifier [+ '.' + identifier ...]\n * Returns an expression value if matched, or null if not.\n */\n private tryMatchPropertyAccessExpression(tokens: TokenStream): SemanticValue | null {\n const token = tokens.peek();\n if (!token) return null;\n\n // Must start with an identifier or keyword reference\n if (token.kind !== 'identifier' && token.kind !== 'keyword') return null;\n\n // Look ahead for: . identifier\n const mark = tokens.mark();\n tokens.advance(); // consume first token\n\n const dotToken = tokens.peek();\n if (!dotToken || dotToken.kind !== 'operator' || dotToken.value !== '.') {\n tokens.reset(mark);\n return null;\n }\n tokens.advance(); // consume .\n\n const propertyToken = tokens.peek();\n if (!propertyToken || propertyToken.kind !== 'identifier') {\n tokens.reset(mark);\n return null;\n }\n tokens.advance(); // consume property name\n\n // Build the property chain\n let chain = `${token.value}.${propertyToken.value}`;\n let depth = 1; // Already have one property access\n\n // Continue for nested property access (e.g., userData.address.city)\n // With depth limit for security\n while (!tokens.isAtEnd() && depth < PatternMatcher.MAX_PROPERTY_DEPTH) {\n const nextDot = tokens.peek();\n if (!nextDot || nextDot.kind !== 'operator' || nextDot.value !== '.') {\n break;\n }\n tokens.advance(); // consume .\n\n const nextProp = tokens.peek();\n if (!nextProp || nextProp.kind !== 'identifier') {\n // Dot without property - put the dot back and stop\n // Can't easily put a single token back, so we'll include it\n break;\n }\n tokens.advance(); // consume property\n chain += `.${nextProp.value}`;\n depth++;\n }\n\n // Check for method call: chain + '(' + args + ')'\n // e.g., me.insertBefore(draggedItem, dropTarget)\n const openParen = tokens.peek();\n if (openParen && openParen.kind === 'punctuation' && openParen.value === '(') {\n tokens.advance(); // consume (\n\n // Collect arguments (comma-separated values)\n const args: string[] = [];\n let argDepth = 0; // Track nested parentheses\n while (!tokens.isAtEnd() && args.length < PatternMatcher.MAX_METHOD_ARGS) {\n const argToken = tokens.peek();\n if (!argToken) break;\n\n // Handle close paren - respecting nesting\n if (argToken.kind === 'punctuation' && argToken.value === ')') {\n if (argDepth === 0) {\n tokens.advance(); // consume )\n break;\n }\n argDepth--;\n }\n // Track nested open parens\n if (argToken.kind === 'punctuation' && argToken.value === '(') {\n argDepth++;\n }\n // Skip commas between arguments\n if (argToken.kind === 'punctuation' && argToken.value === ',') {\n tokens.advance();\n continue;\n }\n // Collect arg value\n args.push(argToken.value);\n tokens.advance();\n }\n\n // Create expression value with method call: me.insertBefore(a, b)\n const methodCall = `${chain}(${args.join(', ')})`;\n return {\n type: 'expression',\n raw: methodCall,\n } as SemanticValue;\n }\n\n // Create expression value: userData.name\n return {\n type: 'expression',\n raw: chain,\n } as SemanticValue;\n }\n\n /**\n * Try to match a possessive selector expression like \"#element's *opacity\".\n * Pattern: selector + \"'s\" + (selector | identifier)\n * Returns a property-path value if matched, or null if not.\n */\n private tryMatchPossessiveSelectorExpression(tokens: TokenStream): SemanticValue | null {\n const token = tokens.peek();\n if (!token || token.kind !== 'selector') return null;\n\n // Look ahead for: 's (possessive marker)\n const mark = tokens.mark();\n tokens.advance(); // consume selector\n\n const possessiveToken = tokens.peek();\n if (\n !possessiveToken ||\n possessiveToken.kind !== 'punctuation' ||\n possessiveToken.value !== \"'s\"\n ) {\n tokens.reset(mark);\n return null;\n }\n tokens.advance(); // consume 's\n\n const propertyToken = tokens.peek();\n if (!propertyToken) {\n tokens.reset(mark);\n return null;\n }\n\n // Property can be a selector (*opacity) or identifier\n if (propertyToken.kind !== 'selector' && propertyToken.kind !== 'identifier') {\n tokens.reset(mark);\n return null;\n }\n tokens.advance(); // consume property\n\n // Create property-path: #element's *opacity\n return createPropertyPath(createSelector(token.value), propertyToken.value);\n }\n\n /**\n * Try to match a selector + property expression like \"#output.innerText\".\n * This handles cases where the tokenizer produces two selector tokens:\n * - #output (id selector)\n * - .innerText (looks like class selector, but is actually property)\n *\n * Pattern: id-selector + class-selector-that-is-actually-property\n * Returns a property-path value if matched, or null if not.\n */\n private tryMatchSelectorPropertyExpression(tokens: TokenStream): SemanticValue | null {\n const token = tokens.peek();\n if (!token || token.kind !== 'selector') return null;\n\n // Must be an ID selector (starts with #)\n if (!token.value.startsWith('#')) return null;\n\n // Look ahead for: selector that looks like a property (.something)\n const mark = tokens.mark();\n tokens.advance(); // consume first selector\n\n const propertyToken = tokens.peek();\n if (!propertyToken || propertyToken.kind !== 'selector') {\n tokens.reset(mark);\n return null;\n }\n\n // Second token must look like a class selector (starts with .)\n // but we interpret it as a property access\n if (!propertyToken.value.startsWith('.')) {\n tokens.reset(mark);\n return null;\n }\n\n // Verify the next token is not a selector (to avoid consuming too many)\n // This helps distinguish \"#output.innerText\" from \"#box .child\"\n const peek2 = tokens.peek(1);\n if (peek2 && peek2.kind === 'selector') {\n // Could be a compound selector chain - only take first two\n }\n\n tokens.advance(); // consume property selector\n\n // Create property-path: #output.innerText\n // Extract property name without the leading dot\n const propertyName = propertyToken.value.slice(1);\n\n return createPropertyPath(createSelector(token.value), propertyName);\n }\n\n /**\n * Match a group pattern token (optional sequence).\n */\n private matchGroupToken(\n tokens: TokenStream,\n patternToken: PatternToken & { type: 'group' },\n captured: Map<SemanticRole, SemanticValue>\n ): boolean {\n const mark = tokens.mark();\n\n // Track which roles were captured before this group\n const capturedBefore = new Set(captured.keys());\n\n const success = this.matchTokenSequence(tokens, patternToken.tokens, captured);\n\n if (!success) {\n tokens.reset(mark);\n // Clear any roles that were partially captured during the failed group match\n for (const role of captured.keys()) {\n if (!capturedBefore.has(role)) {\n captured.delete(role);\n }\n }\n return patternToken.optional || false;\n }\n\n return true;\n }\n\n /**\n * Get the type of match for a token against a value.\n * Used for confidence calculation.\n */\n private getMatchType(\n token: LanguageToken,\n value: string\n ): 'exact' | 'normalized' | 'stem' | 'case-insensitive' | 'none' {\n // Exact match (highest confidence)\n if (token.value === value) return 'exact';\n\n // Explicit keyword map normalized match (high confidence)\n if (token.normalized === value) return 'normalized';\n\n // Morphologically normalized stem match (medium-high confidence)\n // Only accept if stem confidence is reasonable\n if (token.stem === value && token.stemConfidence !== undefined && token.stemConfidence >= 0.7) {\n return 'stem';\n }\n\n // Case-insensitive match for keywords (medium confidence)\n if (token.kind === 'keyword' && token.value.toLowerCase() === value.toLowerCase()) {\n return 'case-insensitive';\n }\n\n return 'none';\n }\n\n /**\n * Track stem matches for confidence calculation.\n * This is set during matching and read during confidence calculation.\n */\n private stemMatchCount: number = 0;\n private totalKeywordMatches: number = 0;\n\n // ==========================================================================\n // Depth Limits for Expression Parsing (security hardening)\n // ==========================================================================\n\n /** Maximum depth for nested property access (e.g., a.b.c.d...) */\n private static readonly MAX_PROPERTY_DEPTH = 10;\n\n /** Maximum number of arguments in method calls */\n private static readonly MAX_METHOD_ARGS = 20;\n\n /**\n * Convert a language token to a semantic value.\n */\n private tokenToSemanticValue(token: LanguageToken): SemanticValue | null {\n switch (token.kind) {\n case 'selector':\n return createSelector(token.value);\n\n case 'literal':\n return this.parseLiteralValue(token.value);\n\n case 'keyword':\n // Keywords might be references or values\n const lower = (token.normalized || token.value).toLowerCase();\n if (['me', 'you', 'it', 'result', 'event', 'target', 'body'].includes(lower)) {\n return createReference(lower as any);\n }\n return createLiteral(token.normalized || token.value);\n\n case 'identifier':\n // Check if it's a variable reference (:varname)\n if (token.value.startsWith(':')) {\n return createReference(token.value as any);\n }\n // Check if it's a built-in reference\n const identLower = token.value.toLowerCase();\n if (['me', 'you', 'it', 'result', 'event', 'target', 'body'].includes(identLower)) {\n return createReference(identLower as any);\n }\n // Regular identifiers are variable references - use 'expression' type\n // which gets converted to 'identifier' AST nodes by semantic-integration.ts\n return { type: 'expression', raw: token.value } as const;\n\n case 'url':\n // URLs are treated as string literals (paths/URLs for navigation/fetch)\n return createLiteral(token.value, 'string');\n\n default:\n return null;\n }\n }\n\n /**\n * Parse a literal value (string, number, boolean).\n */\n private parseLiteralValue(value: string): SemanticValue {\n // String literal\n if (\n value.startsWith('\"') ||\n value.startsWith(\"'\") ||\n value.startsWith('`') ||\n value.startsWith('「')\n ) {\n const inner = value.slice(1, -1);\n return createLiteral(inner, 'string');\n }\n\n // Boolean\n if (value === 'true') return createLiteral(true, 'boolean');\n if (value === 'false') return createLiteral(false, 'boolean');\n\n // Duration (number with suffix)\n const durationMatch = value.match(/^(\\d+(?:\\.\\d+)?)(ms|s|m|h)?$/);\n if (durationMatch) {\n const num = parseFloat(durationMatch[1]);\n const unit = durationMatch[2];\n if (unit) {\n return createLiteral(value, 'duration');\n }\n return createLiteral(num, 'number');\n }\n\n // Plain number\n const num = parseFloat(value);\n if (!isNaN(num)) {\n return createLiteral(num, 'number');\n }\n\n // Default to string\n return createLiteral(value, 'string');\n }\n\n /**\n * Apply extraction rules to fill in default values for missing roles.\n */\n private applyExtractionRules(\n pattern: LanguagePattern,\n captured: Map<SemanticRole, SemanticValue>\n ): void {\n for (const [role, rule] of Object.entries(pattern.extraction)) {\n if (!captured.has(role as SemanticRole) && rule.default) {\n captured.set(role as SemanticRole, rule.default);\n }\n }\n }\n\n /**\n * Check if a pattern token is optional.\n */\n private isOptional(patternToken: PatternToken): boolean {\n return (patternToken as any).optional === true;\n }\n\n /**\n * Calculate confidence score for a match (0-1).\n *\n * Confidence is reduced for:\n * - Stem matches (morphological normalization has inherent uncertainty)\n * - Missing optional roles (but less penalty if role has a default value)\n *\n * Confidence is increased for:\n * - VSO languages (Arabic) when pattern starts with a verb\n */\n private calculateConfidence(\n pattern: LanguagePattern,\n captured: Map<SemanticRole, SemanticValue>\n ): number {\n let score = 0;\n let maxScore = 0;\n\n // Helper to check if a role has a default value in extraction rules\n const hasDefault = (role: SemanticRole): boolean => {\n return pattern.extraction?.[role]?.default !== undefined;\n };\n\n // Score based on captured roles\n for (const token of pattern.template.tokens) {\n if (token.type === 'role') {\n maxScore += 1;\n if (captured.has(token.role)) {\n score += 1;\n }\n } else if (token.type === 'group') {\n // Group tokens are optional - weight depends on whether they have defaults\n for (const subToken of token.tokens) {\n if (subToken.type === 'role') {\n const roleHasDefault = hasDefault(subToken.role);\n const weight = 0.8; // Optional roles: 80% weight\n maxScore += weight;\n\n if (captured.has(subToken.role)) {\n // Role was explicitly provided by user\n score += weight;\n } else if (roleHasDefault) {\n // Role has a default - give 60% partial credit since command is semantically complete\n // This prevents penalizing common patterns like \"toggle .active\" (default: me)\n score += weight * 0.6;\n }\n // If no default and not captured, score += 0 (true penalty for missing info)\n }\n }\n }\n }\n\n let baseConfidence = maxScore > 0 ? score / maxScore : 1;\n\n // Apply penalty for stem matches\n // Each stem match reduces confidence slightly (e.g., 5% per stem match)\n // This ensures exact matches are preferred over morphological matches\n if (this.stemMatchCount > 0 && this.totalKeywordMatches > 0) {\n const stemPenalty = (this.stemMatchCount / this.totalKeywordMatches) * 0.15;\n baseConfidence = Math.max(0.5, baseConfidence - stemPenalty);\n }\n\n // Apply VSO confidence boost for Arabic verb-first patterns\n const vsoBoost = this.calculateVSOConfidenceBoost(pattern);\n baseConfidence = Math.min(1.0, baseConfidence + vsoBoost);\n\n // Apply preposition disambiguation adjustment for Arabic\n const prepositionAdjustment = this.arabicPrepositionDisambiguation(pattern, captured);\n baseConfidence = Math.max(0.0, Math.min(1.0, baseConfidence + prepositionAdjustment));\n\n return baseConfidence;\n }\n\n /**\n * Calculate confidence boost for VSO (Verb-Subject-Object) language patterns.\n * Arabic naturally uses VSO word order, so patterns that start with a verb\n * should receive a confidence boost.\n *\n * Returns +0.15 confidence boost if:\n * - Language is Arabic ('ar')\n * - Pattern's first token is a verb keyword\n *\n * @param pattern The language pattern being matched\n * @returns Confidence boost (0 or 0.15)\n */\n private calculateVSOConfidenceBoost(pattern: LanguagePattern): number {\n // Only apply to Arabic\n if (pattern.language !== 'ar') {\n return 0;\n }\n\n // Check if first token in pattern is a literal (keyword)\n const firstToken = pattern.template.tokens[0];\n if (!firstToken || firstToken.type !== 'literal') {\n return 0;\n }\n\n // List of Arabic verb keywords (command verbs)\n const ARABIC_VERBS = new Set([\n 'بدل',\n 'غير',\n 'أضف',\n 'أزل',\n 'ضع',\n 'اجعل',\n 'عين',\n 'زد',\n 'انقص',\n 'سجل',\n 'أظهر',\n 'أخف',\n 'شغل',\n 'أرسل',\n 'ركز',\n 'شوش',\n 'توقف',\n 'انسخ',\n 'احذف',\n 'اصنع',\n 'انتظر',\n 'انتقال',\n 'أو',\n ]);\n\n // Check if first token value is a verb\n if (ARABIC_VERBS.has(firstToken.value)) {\n return 0.15;\n }\n\n // Check alternatives\n if (firstToken.alternatives) {\n for (const alt of firstToken.alternatives) {\n if (ARABIC_VERBS.has(alt)) {\n return 0.15;\n }\n }\n }\n\n return 0;\n }\n\n /**\n * Arabic preposition disambiguation for confidence adjustment.\n *\n * Different Arabic prepositions are more or less natural for different semantic roles:\n * - على (on/upon) is preferred for patient/target roles (element selectors)\n * - إلى (to) is preferred for destination roles\n * - من (from) is preferred for source roles\n * - في (in) is preferred for location roles\n *\n * This method analyzes the prepositions used with captured semantic roles and\n * adjusts confidence based on idiomaticity:\n * - +0.10 for highly idiomatic preposition choices\n * - -0.10 for less natural preposition choices\n *\n * @param pattern The language pattern being matched\n * @param captured The captured semantic values\n * @returns Confidence adjustment (-0.10 to +0.10)\n */\n private arabicPrepositionDisambiguation(\n pattern: LanguagePattern,\n captured: Map<SemanticRole, SemanticValue>\n ): number {\n // Only apply to Arabic\n if (pattern.language !== 'ar') {\n return 0;\n }\n\n let adjustment = 0;\n\n // Preferred prepositions for each semantic role\n // Only including roles that commonly use prepositions in Arabic\n const PREFERRED_PREPOSITIONS: Partial<Record<SemanticRole, string[]>> = {\n patient: ['على'], // element selectors prefer على (on/upon)\n destination: ['إلى', 'الى'], // destination prefers إلى (to)\n source: ['من'], // source prefers من (from)\n agent: ['من'], // agent/by prefers من (from/by)\n manner: ['ب'], // manner prefers ب (with/by)\n style: ['ب'], // style prefers ب (with)\n goal: ['إلى', 'الى'], // target state prefers إلى (to)\n method: ['ب'], // method prefers ب (with/by)\n };\n\n // Check each captured role for preposition metadata\n for (const [role, value] of captured.entries()) {\n // Skip if no preferred prepositions defined for this role\n const preferred = PREFERRED_PREPOSITIONS[role];\n if (!preferred || preferred.length === 0) {\n continue;\n }\n\n // Check if the value has preposition metadata (from Arabic tokenizer)\n // This metadata is attached when a preposition particle token is consumed\n const metadata = (value as any).metadata;\n if (metadata && typeof metadata.prepositionValue === 'string') {\n const usedPreposition = metadata.prepositionValue;\n\n // Check if the used preposition is in the preferred list\n if (preferred.includes(usedPreposition)) {\n // Idiomatic choice - boost confidence\n adjustment += 0.1;\n } else {\n // Less natural choice - reduce confidence\n adjustment -= 0.1;\n }\n }\n }\n\n // Cap total adjustment at ±0.10 (even if multiple roles analyzed)\n return Math.max(-0.1, Math.min(0.1, adjustment));\n }\n\n // ===========================================================================\n // English Idiom Support - Noise Word Handling\n // ===========================================================================\n\n /**\n * Noise words that can be skipped in English for more natural syntax.\n * - \"the\" before selectors: \"toggle the .active\" → \"toggle .active\"\n * - \"class\" after class selectors: \"add the .visible class\" → \"add .visible\"\n */\n private static readonly ENGLISH_NOISE_WORDS = new Set(['the', 'a', 'an']);\n\n /**\n * Skip noise words like \"the\" before selectors.\n * This enables more natural English syntax like \"toggle the .active\".\n */\n private skipNoiseWords(tokens: TokenStream): void {\n const token = tokens.peek();\n if (!token) return;\n\n const tokenLower = token.value.toLowerCase();\n\n // Check if current token is a noise word (like \"the\")\n if (PatternMatcher.ENGLISH_NOISE_WORDS.has(tokenLower)) {\n // Look ahead to see if the next token is a selector\n const mark = tokens.mark();\n tokens.advance();\n const nextToken = tokens.peek();\n\n if (nextToken && nextToken.kind === 'selector') {\n // Keep the position after \"the\" - effectively skipping it\n return;\n }\n\n // Not followed by a selector, revert\n tokens.reset(mark);\n }\n\n // Also handle \"class\" after class selectors: \".visible class\" → \".visible\"\n // This is handled when the selector has already been consumed,\n // so we check if current token is \"class\" and skip it\n if (tokenLower === 'class') {\n // Skip \"class\" as it's just noise after a class selector\n tokens.advance();\n }\n }\n\n /**\n * Extract event modifiers from the token stream.\n * Event modifiers are .once, .debounce(N), .throttle(N), .queue(strategy)\n * that can appear after event names.\n *\n * Returns EventModifiers object or undefined if no modifiers found.\n */\n extractEventModifiers(tokens: TokenStream): import('../types').EventModifiers | undefined {\n const modifiers: {\n once?: boolean;\n debounce?: number;\n throttle?: number;\n queue?: 'first' | 'last' | 'all' | 'none';\n from?: SemanticValue;\n } = {};\n\n let foundModifier = false;\n\n // Consume all consecutive event modifier tokens\n while (!tokens.isAtEnd()) {\n const token = tokens.peek();\n if (!token || token.kind !== 'event-modifier') {\n break;\n }\n\n const metadata = token.metadata as\n | { modifierName: string; value?: number | string }\n | undefined;\n if (!metadata) {\n break;\n }\n\n foundModifier = true;\n\n switch (metadata.modifierName) {\n case 'once':\n modifiers.once = true;\n break;\n case 'debounce':\n if (typeof metadata.value === 'number') {\n modifiers.debounce = metadata.value;\n }\n break;\n case 'throttle':\n if (typeof metadata.value === 'number') {\n modifiers.throttle = metadata.value;\n }\n break;\n case 'queue':\n if (\n metadata.value === 'first' ||\n metadata.value === 'last' ||\n metadata.value === 'all' ||\n metadata.value === 'none'\n ) {\n modifiers.queue = metadata.value;\n }\n break;\n }\n\n tokens.advance();\n }\n\n return foundModifier ? modifiers : undefined;\n }\n}\n\n// =============================================================================\n// Convenience Functions\n// =============================================================================\n\n/**\n * Singleton pattern matcher instance.\n */\nexport const patternMatcher = new PatternMatcher();\n\n/**\n * Match tokens against a pattern.\n */\nexport function matchPattern(\n tokens: TokenStream,\n pattern: LanguagePattern\n): PatternMatchResult | null {\n return patternMatcher.matchPattern(tokens, pattern);\n}\n\n/**\n * Match tokens against multiple patterns, return best match.\n */\nexport function matchBest(\n tokens: TokenStream,\n patterns: LanguagePattern[]\n): PatternMatchResult | null {\n return patternMatcher.matchBest(tokens, patterns);\n}\n","/**\n * Tokenizer Registry\n *\n * Provides a unified interface for tokenization.\n * Delegates to the central registry for language lookups.\n *\n * For tree-shaking, import specific tokenizers directly:\n * import { englishTokenizer } from './tokenizers/english';\n *\n * To register languages, import the language modules:\n * import '@lokascript/semantic/languages/en';\n */\n\nimport type { LanguageTokenizer, TokenStream } from '../types';\nimport {\n tryGetTokenizer,\n getRegisteredLanguages,\n isLanguageRegistered,\n tokenize as registryTokenize,\n registerTokenizer as registryRegisterTokenizer,\n} from '../registry';\n\n// =============================================================================\n// Registry Delegation\n// =============================================================================\n\n/**\n * Get a tokenizer for the specified language.\n * Returns undefined if language is not registered.\n */\nexport function getTokenizer(language: string): LanguageTokenizer | undefined {\n return tryGetTokenizer(language);\n}\n\n/**\n * Tokenize input in the specified language.\n * @throws Error if language is not registered\n */\nexport function tokenize(input: string, language: string): TokenStream {\n return registryTokenize(input, language);\n}\n\n/**\n * Get all supported languages.\n * Returns only languages that have been registered.\n */\nexport function getSupportedLanguages(): string[] {\n return getRegisteredLanguages();\n}\n\n/**\n * Check if a language is supported.\n */\nexport function isLanguageSupported(language: string): boolean {\n return isLanguageRegistered(language);\n}\n\n/**\n * Register a custom tokenizer.\n * Note: For full language support, use registerLanguage() from registry instead.\n */\nexport function registerTokenizer(tokenizer: LanguageTokenizer): void {\n registryRegisterTokenizer(tokenizer);\n}\n\n// =============================================================================\n// Re-exports (tree-shakeable - only included if imported)\n// =============================================================================\n\nexport { englishTokenizer } from './english';\nexport { japaneseTokenizer } from './japanese';\nexport { koreanTokenizer } from './korean';\nexport { arabicTokenizer } from './arabic';\nexport { spanishTokenizer } from './spanish';\nexport { turkishTokenizer } from './turkish';\nexport { chineseTokenizer } from './chinese';\nexport { portugueseTokenizer } from './portuguese';\nexport { frenchTokenizer } from './french';\nexport { germanTokenizer } from './german';\nexport { indonesianTokenizer } from './indonesian';\nexport { quechuaTokenizer } from './quechua';\nexport { swahiliTokenizer } from './swahili';\nexport { italianTokenizer } from './italian';\nexport { vietnameseTokenizer } from './vietnamese';\nexport { polishTokenizer } from './polish';\nexport { russianTokenizer } from './russian';\nexport { ukrainianTokenizer } from './ukrainian';\nexport { hebrewTokenizer } from './he';\nexport { hindiTokenizer } from './hindi';\nexport { bengaliTokenizer } from './bengali';\nexport { thaiTokenizer } from './thai';\n\nexport { malayTokenizer } from './ms';\n\nexport { tagalogTokenizer } from './tl';\n\nexport {\n BaseTokenizer,\n TokenStreamImpl,\n createToken,\n createPosition,\n extractCssSelector,\n extractStringLiteral,\n extractNumber,\n isUrlStart,\n extractUrl,\n // Character classification utilities\n isWhitespace,\n isSelectorStart,\n isQuote,\n isDigit,\n isAsciiLetter,\n isAsciiIdentifierChar,\n // Factory functions for custom tokenizers\n createUnicodeRangeClassifier,\n combineClassifiers,\n createLatinCharClassifiers,\n} from './base';\n\n// Types for profile-derived keyword initialization and custom tokenizers\nexport type {\n KeywordEntry,\n TokenizerProfile,\n TimeUnitMapping,\n UnicodeRange,\n LatinCharClassifiers,\n CreateTokenOptions,\n} from './base';\n","/**\n * Semantic Result Cache\n *\n * LRU cache for semantic analysis results to optimize repeated parsing.\n *\n * Design:\n * - Cache key: `${language}:${input}` for simple, fast lookups\n * - LRU eviction when max size reached\n * - Optional TTL (time-to-live) for cache entries\n * - Statistics for monitoring cache effectiveness\n * - Thread-safe for browser environments (single-threaded)\n */\n\nimport type { SemanticAnalysisResult } from '../core-bridge';\n\n// =============================================================================\n// Types\n// =============================================================================\n\n/**\n * Cache configuration options.\n */\nexport interface SemanticCacheConfig {\n /** Maximum number of entries to cache. Default: 1000 */\n maxSize?: number;\n /** Time-to-live in milliseconds. 0 = no expiration. Default: 0 */\n ttlMs?: number;\n /** Enable/disable caching. Default: true */\n enabled?: boolean;\n}\n\n/**\n * Cache entry with metadata.\n */\ninterface CacheEntry {\n /** The cached result */\n result: SemanticAnalysisResult;\n /** Timestamp when entry was created */\n createdAt: number;\n /** Last access timestamp (for LRU) */\n lastAccessed: number;\n}\n\n/**\n * Cache statistics.\n */\nexport interface CacheStats {\n /** Total cache hits */\n hits: number;\n /** Total cache misses */\n misses: number;\n /** Current cache size */\n size: number;\n /** Maximum cache size */\n maxSize: number;\n /** Hit rate (0-1) */\n hitRate: number;\n /** Total evictions due to size limit */\n evictions: number;\n /** Total expirations due to TTL */\n expirations: number;\n /** Whether caching is enabled */\n enabled: boolean;\n}\n\n// =============================================================================\n// LRU Cache Implementation\n// =============================================================================\n\n/**\n * LRU Cache for semantic analysis results.\n *\n * Uses Map's insertion order for LRU eviction - when we access an entry,\n * we delete and re-insert it to move it to the end (most recently used).\n */\nexport class SemanticCache {\n private cache: Map<string, CacheEntry>;\n private config: Required<SemanticCacheConfig>;\n private stats: {\n hits: number;\n misses: number;\n evictions: number;\n expirations: number;\n };\n\n constructor(config: SemanticCacheConfig = {}) {\n this.cache = new Map();\n this.config = {\n maxSize: config.maxSize ?? 1000,\n ttlMs: config.ttlMs ?? 0,\n enabled: config.enabled ?? true,\n };\n this.stats = {\n hits: 0,\n misses: 0,\n evictions: 0,\n expirations: 0,\n };\n }\n\n /**\n * Generate cache key from input and language.\n */\n private makeKey(input: string, language: string): string {\n return `${language}:${input}`;\n }\n\n /**\n * Check if an entry has expired.\n */\n private isExpired(entry: CacheEntry): boolean {\n if (this.config.ttlMs === 0) return false;\n return Date.now() - entry.createdAt > this.config.ttlMs;\n }\n\n /**\n * Evict the least recently used entry.\n */\n private evictLRU(): void {\n // Map preserves insertion order, so first entry is oldest\n const firstKey = this.cache.keys().next().value;\n if (firstKey !== undefined) {\n this.cache.delete(firstKey);\n this.stats.evictions++;\n }\n }\n\n /**\n * Get a cached result.\n *\n * @param input - The input string\n * @param language - The language code\n * @returns The cached result, or undefined if not found/expired\n */\n get(input: string, language: string): SemanticAnalysisResult | undefined {\n if (!this.config.enabled) {\n this.stats.misses++;\n return undefined;\n }\n\n const key = this.makeKey(input, language);\n const entry = this.cache.get(key);\n\n if (!entry) {\n this.stats.misses++;\n return undefined;\n }\n\n // Check expiration\n if (this.isExpired(entry)) {\n this.cache.delete(key);\n this.stats.expirations++;\n this.stats.misses++;\n return undefined;\n }\n\n // Move to end for LRU (delete and re-insert)\n this.cache.delete(key);\n entry.lastAccessed = Date.now();\n this.cache.set(key, entry);\n\n this.stats.hits++;\n return entry.result;\n }\n\n /**\n * Store a result in the cache.\n *\n * @param input - The input string\n * @param language - The language code\n * @param result - The analysis result to cache\n */\n set(input: string, language: string, result: SemanticAnalysisResult): void {\n if (!this.config.enabled) return;\n\n // Don't cache failed results (confidence 0)\n if (result.confidence === 0) return;\n\n const key = this.makeKey(input, language);\n const now = Date.now();\n\n // Evict if at max size\n while (this.cache.size >= this.config.maxSize) {\n this.evictLRU();\n }\n\n this.cache.set(key, {\n result,\n createdAt: now,\n lastAccessed: now,\n });\n }\n\n /**\n * Check if a result is cached (without updating LRU).\n */\n has(input: string, language: string): boolean {\n if (!this.config.enabled) return false;\n\n const key = this.makeKey(input, language);\n const entry = this.cache.get(key);\n\n if (!entry) return false;\n if (this.isExpired(entry)) {\n this.cache.delete(key);\n this.stats.expirations++;\n return false;\n }\n\n return true;\n }\n\n /**\n * Remove a specific entry from the cache.\n */\n delete(input: string, language: string): boolean {\n const key = this.makeKey(input, language);\n return this.cache.delete(key);\n }\n\n /**\n * Clear all cached entries.\n */\n clear(): void {\n this.cache.clear();\n }\n\n /**\n * Reset statistics.\n */\n resetStats(): void {\n this.stats = {\n hits: 0,\n misses: 0,\n evictions: 0,\n expirations: 0,\n };\n }\n\n /**\n * Get cache statistics.\n */\n getStats(): CacheStats {\n const total = this.stats.hits + this.stats.misses;\n return {\n hits: this.stats.hits,\n misses: this.stats.misses,\n size: this.cache.size,\n maxSize: this.config.maxSize,\n hitRate: total > 0 ? this.stats.hits / total : 0,\n evictions: this.stats.evictions,\n expirations: this.stats.expirations,\n enabled: this.config.enabled,\n };\n }\n\n /**\n * Update cache configuration.\n */\n configure(config: Partial<SemanticCacheConfig>): void {\n if (config.maxSize !== undefined) {\n this.config.maxSize = config.maxSize;\n // Evict if now over limit\n while (this.cache.size > this.config.maxSize) {\n this.evictLRU();\n }\n }\n if (config.ttlMs !== undefined) {\n this.config.ttlMs = config.ttlMs;\n }\n if (config.enabled !== undefined) {\n this.config.enabled = config.enabled;\n }\n }\n\n /**\n * Enable caching.\n */\n enable(): void {\n this.config.enabled = true;\n }\n\n /**\n * Disable caching.\n */\n disable(): void {\n this.config.enabled = false;\n }\n\n /**\n * Get current configuration.\n */\n getConfig(): Readonly<Required<SemanticCacheConfig>> {\n return { ...this.config };\n }\n}\n\n// =============================================================================\n// Default Instance\n// =============================================================================\n\n/**\n * Default global cache instance.\n */\nexport const semanticCache = new SemanticCache();\n\n// =============================================================================\n// Utility Functions\n// =============================================================================\n\n/**\n * Create a cache with custom configuration.\n */\nexport function createSemanticCache(config?: SemanticCacheConfig): SemanticCache {\n return new SemanticCache(config);\n}\n\n/**\n * Decorator/wrapper for adding caching to an analyze function.\n *\n * @param analyzeFn - The analyze function to wrap\n * @param cache - The cache instance to use\n * @returns Wrapped function with caching\n */\nexport function withCache<T extends (input: string, language: string) => SemanticAnalysisResult>(\n analyzeFn: T,\n cache: SemanticCache = semanticCache\n): T {\n return ((input: string, language: string): SemanticAnalysisResult => {\n // Check cache first\n const cached = cache.get(input, language);\n if (cached) {\n return cached;\n }\n\n // Run analysis\n const result = analyzeFn(input, language);\n\n // Cache result\n cache.set(input, language, result);\n\n return result;\n }) as T;\n}\n","/**\n * Core Parser Bridge\n *\n * Provides the SemanticAnalyzer interface that integrates semantic parsing\n * into the core hyperscript parser. This bridge enables confidence-driven\n * fallback between semantic and traditional parsing.\n */\n\nimport type {\n SemanticNode,\n SemanticValue,\n ActionType,\n PatternMatchResult,\n SemanticRole,\n} from './types';\nimport { PatternMatcher } from './parser/pattern-matcher';\nimport { getTokenizer } from './tokenizers';\n// Import from registry for tree-shaking (registry uses directly-registered patterns first)\nimport { getPatternsForLanguage, getRegisteredLanguages } from './registry';\nimport { SemanticCache, semanticCache, type SemanticCacheConfig, type CacheStats } from './cache';\n\n// =============================================================================\n// SemanticAnalyzer Interface\n// =============================================================================\n\n/**\n * Result of semantic analysis.\n */\nexport interface SemanticAnalysisResult {\n /** Confidence score (0-1) for this analysis */\n readonly confidence: number;\n /** The parsed command info (if successful) */\n readonly command?: {\n readonly name: ActionType;\n readonly roles: ReadonlyMap<SemanticRole, SemanticValue>;\n };\n /** The full semantic node (if successful) */\n readonly node?: SemanticNode;\n /** Any errors encountered */\n readonly errors?: string[];\n /** Number of tokens consumed */\n readonly tokensConsumed?: number;\n}\n\n/**\n * Interface for semantic analysis that can be integrated into the core parser.\n * This allows the core parser to optionally use semantic parsing with\n * confidence-based fallback to traditional parsing.\n */\nexport interface SemanticAnalyzer {\n /**\n * Analyze input in the specified language.\n *\n * @param input The input string to analyze\n * @param language ISO 639-1 language code\n * @returns Analysis result with confidence score\n */\n analyze(input: string, language: string): SemanticAnalysisResult;\n\n /**\n * Check if semantic parsing is available for a language.\n */\n supportsLanguage(language: string): boolean;\n\n /**\n * Get the list of supported languages.\n */\n supportedLanguages(): string[];\n\n /**\n * Get cache statistics.\n */\n getCacheStats(): CacheStats;\n\n /**\n * Clear the result cache.\n */\n clearCache(): void;\n\n /**\n * Configure the cache.\n */\n configureCache(config: Partial<SemanticCacheConfig>): void;\n}\n\n// =============================================================================\n// SemanticAnalyzer Implementation\n// =============================================================================\n\n/**\n * Options for creating a SemanticAnalyzer.\n */\nexport interface SemanticAnalyzerOptions {\n /** Cache configuration. Pass false to disable caching. */\n cache?: SemanticCacheConfig | false;\n}\n\n/**\n * Implementation of SemanticAnalyzer that wraps the semantic parser.\n * Includes LRU caching for performance optimization on repeated inputs.\n */\nexport class SemanticAnalyzerImpl implements SemanticAnalyzer {\n private readonly patternMatcher: PatternMatcher;\n private readonly languages: Set<string>;\n private readonly cache: SemanticCache;\n\n constructor(options: SemanticAnalyzerOptions = {}) {\n this.patternMatcher = new PatternMatcher();\n // Get all registered languages from the registry (dynamically updated as languages are loaded)\n this.languages = new Set(getRegisteredLanguages());\n\n // Initialize cache\n if (options.cache === false) {\n this.cache = new SemanticCache({ enabled: false });\n } else {\n this.cache = options.cache ? new SemanticCache(options.cache) : semanticCache;\n }\n }\n\n analyze(input: string, language: string): SemanticAnalysisResult {\n // Check language support\n if (!this.supportsLanguage(language)) {\n return {\n confidence: 0,\n errors: [`Language '${language}' is not supported for semantic parsing`],\n };\n }\n\n // Check cache first\n const cached = this.cache.get(input, language);\n if (cached) {\n return cached;\n }\n\n // Perform analysis\n const result = this.analyzeUncached(input, language);\n\n // Cache successful results\n this.cache.set(input, language, result);\n\n return result;\n }\n\n /**\n * Perform analysis without cache lookup.\n */\n private analyzeUncached(input: string, language: string): SemanticAnalysisResult {\n try {\n // Tokenize\n const tokenizer = getTokenizer(language);\n if (!tokenizer) {\n return {\n confidence: 0,\n errors: [`No tokenizer available for language '${language}'`],\n };\n }\n\n const tokenStream = tokenizer.tokenize(input);\n\n // Get patterns for this language\n const patterns = getPatternsForLanguage(language);\n if (patterns.length === 0) {\n return {\n confidence: 0,\n errors: [`No patterns available for language '${language}'`],\n };\n }\n\n // Try to match patterns\n const match = this.patternMatcher.matchBest(tokenStream, patterns);\n\n if (!match) {\n return {\n confidence: 0,\n errors: ['No pattern matched the input'],\n };\n }\n\n // Build semantic node from match\n const node = this.buildSemanticNode(match);\n\n return {\n confidence: match.confidence,\n command: {\n name: match.pattern.command,\n roles: match.captured,\n },\n node,\n tokensConsumed: match.consumedTokens,\n };\n } catch (error) {\n return {\n confidence: 0,\n errors: [error instanceof Error ? error.message : String(error)],\n };\n }\n }\n\n supportsLanguage(language: string): boolean {\n return this.languages.has(language);\n }\n\n supportedLanguages(): string[] {\n return Array.from(this.languages);\n }\n\n getCacheStats(): CacheStats {\n return this.cache.getStats();\n }\n\n clearCache(): void {\n this.cache.clear();\n }\n\n configureCache(config: Partial<SemanticCacheConfig>): void {\n this.cache.configure(config);\n }\n\n private buildSemanticNode(match: PatternMatchResult): SemanticNode {\n return {\n kind: 'command',\n action: match.pattern.command,\n roles: match.captured,\n metadata: {\n patternId: match.pattern.id,\n },\n };\n }\n}\n\n// =============================================================================\n// Factory Function\n// =============================================================================\n\n/**\n * Create a SemanticAnalyzer instance.\n *\n * @param options - Configuration options including cache settings\n * @returns A new SemanticAnalyzer\n *\n * @example\n * // Default: uses shared global cache\n * const analyzer = createSemanticAnalyzer();\n *\n * @example\n * // Custom cache size\n * const analyzer = createSemanticAnalyzer({ cache: { maxSize: 500 } });\n *\n * @example\n * // Disable caching\n * const analyzer = createSemanticAnalyzer({ cache: false });\n */\nexport function createSemanticAnalyzer(options?: SemanticAnalyzerOptions): SemanticAnalyzer {\n return new SemanticAnalyzerImpl(options);\n}\n\n// Re-export cache types for convenience\nexport type { SemanticCacheConfig, CacheStats } from './cache';\n\n// =============================================================================\n// Confidence Thresholds\n// =============================================================================\n\n/**\n * Default confidence threshold for preferring semantic parsing.\n * If confidence is above this, use semantic result; otherwise fallback.\n */\nexport const DEFAULT_CONFIDENCE_THRESHOLD = 0.5;\n\n/**\n * High confidence threshold for very certain matches.\n */\nexport const HIGH_CONFIDENCE_THRESHOLD = 0.8;\n\n// =============================================================================\n// Integration Helpers\n// =============================================================================\n\n/**\n * Determine if semantic analysis should be used based on confidence.\n */\nexport function shouldUseSemanticResult(\n result: SemanticAnalysisResult,\n threshold: number = DEFAULT_CONFIDENCE_THRESHOLD\n): boolean {\n return result.confidence >= threshold && result.command !== undefined;\n}\n\n/**\n * Convert semantic roles to the format expected by core parser commands.\n * This maps semantic roles to the positional/modifier structure used by\n * the core command implementations.\n *\n * Role to preposition mapping:\n * - patient → first positional arg\n * - event → first positional arg\n * - destination → 'into' (put) or 'on' (others)\n * - source → 'from'\n * - quantity → 'by'\n * - duration → 'over' or 'for'\n * - method → 'as'\n * - style → 'with'\n * - condition → 'if'\n */\nexport function rolesToCommandArgs(\n roles: ReadonlyMap<SemanticRole, SemanticValue>,\n command: ActionType\n): {\n args: SemanticValue[];\n modifiers: Record<string, SemanticValue>;\n} {\n const args: SemanticValue[] = [];\n const modifiers: Record<string, SemanticValue> = {};\n\n for (const [role, value] of roles) {\n switch (role) {\n // Primary arguments (positional)\n case 'patient':\n case 'event':\n args.push(value);\n break;\n\n // Destination: context-dependent preposition\n case 'destination':\n if (command === 'put') {\n modifiers['into'] = value;\n } else {\n modifiers['on'] = value;\n }\n break;\n\n // Source: always 'from'\n case 'source':\n modifiers['from'] = value;\n break;\n\n // Quantitative roles\n case 'quantity':\n modifiers['by'] = value;\n break;\n\n case 'duration':\n modifiers['over'] = value;\n break;\n\n // Adverbial roles\n case 'method':\n modifiers['as'] = value;\n break;\n\n case 'style':\n modifiers['with'] = value;\n break;\n\n // Conditional\n case 'condition':\n modifiers['if'] = value;\n break;\n\n // Agent (for future multi-actor systems)\n case 'agent':\n modifiers['agent'] = value;\n break;\n\n default:\n // Unknown roles become modifiers using role name as key\n modifiers[role] = value;\n }\n }\n\n return { args, modifiers };\n}\n","/**\n * Expression Tokenizer\n *\n * Tokenizes expression strings into tokens for parsing.\n * Focused on expression-level constructs, not full hyperscript syntax.\n */\n\n// =============================================================================\n// Token Types\n// =============================================================================\n\nexport enum TokenType {\n // Literals\n NUMBER = 'NUMBER',\n STRING = 'STRING',\n BOOLEAN = 'BOOLEAN',\n TEMPLATE_LITERAL = 'TEMPLATE_LITERAL',\n\n // Selectors\n ID_SELECTOR = 'ID_SELECTOR',\n CLASS_SELECTOR = 'CLASS_SELECTOR',\n ATTRIBUTE_SELECTOR = 'ATTRIBUTE_SELECTOR',\n QUERY_SELECTOR = 'QUERY_SELECTOR',\n\n // References\n CONTEXT_VAR = 'CONTEXT_VAR',\n IDENTIFIER = 'IDENTIFIER',\n\n // Operators\n OPERATOR = 'OPERATOR',\n COMPARISON = 'COMPARISON',\n LOGICAL = 'LOGICAL',\n POSSESSIVE = 'POSSESSIVE',\n\n // Punctuation\n LPAREN = 'LPAREN',\n RPAREN = 'RPAREN',\n LBRACKET = 'LBRACKET',\n RBRACKET = 'RBRACKET',\n LBRACE = 'LBRACE',\n RBRACE = 'RBRACE',\n COMMA = 'COMMA',\n DOT = 'DOT',\n COLON = 'COLON',\n\n // Special\n TIME_EXPRESSION = 'TIME_EXPRESSION',\n EOF = 'EOF',\n ERROR = 'ERROR',\n}\n\nexport interface Token {\n type: TokenType;\n value: string;\n start: number;\n end: number;\n line?: number;\n column?: number;\n}\n\n// =============================================================================\n// Constants\n// =============================================================================\n\nconst CONTEXT_VARS = new Set([\n 'me',\n 'my',\n 'myself',\n 'you',\n 'your',\n 'yourself',\n 'it',\n 'its',\n 'result',\n 'event',\n 'target',\n 'body',\n 'detail',\n 'window',\n 'document',\n]);\n\nconst LOGICAL_OPERATORS = new Set(['and', 'or', 'not', 'no']);\n\nconst BOOLEAN_LITERALS = new Set(['true', 'false', 'null', 'undefined']);\n\nconst TIME_UNITS = new Set([\n 'ms',\n 's',\n 'seconds',\n 'second',\n 'milliseconds',\n 'millisecond',\n 'minutes',\n 'minute',\n 'hours',\n 'hour',\n]);\n\n// =============================================================================\n// Tokenizer\n// =============================================================================\n\nexport function tokenize(input: string): Token[] {\n const tokens: Token[] = [];\n let pos = 0;\n let line = 1;\n let column = 1;\n\n // Track if the previous token allows a selector\n // Selectors are only valid at the start or after operators, opening brackets, etc.\n function previousTokenAllowsSelector(): boolean {\n if (tokens.length === 0) return true;\n const prev = tokens[tokens.length - 1];\n // After these token types, a selector is valid\n return [\n TokenType.OPERATOR,\n TokenType.COMPARISON,\n TokenType.LOGICAL,\n TokenType.LPAREN,\n TokenType.LBRACKET,\n TokenType.LBRACE,\n TokenType.COMMA,\n TokenType.COLON,\n ].includes(prev.type);\n }\n\n function peek(offset = 0): string {\n return input[pos + offset] ?? '';\n }\n\n function advance(): string {\n const char = input[pos];\n pos++;\n if (char === '\\n') {\n line++;\n column = 1;\n } else {\n column++;\n }\n return char;\n }\n\n function skipWhitespace(): void {\n while (pos < input.length && /\\s/.test(input[pos])) {\n advance();\n }\n }\n\n function readWhile(predicate: (char: string) => boolean): string {\n let result = '';\n while (pos < input.length && predicate(input[pos])) {\n result += advance();\n }\n return result;\n }\n\n function readString(quote: string): string {\n let result = quote;\n advance(); // consume opening quote\n while (pos < input.length && input[pos] !== quote) {\n if (input[pos] === '\\\\' && pos + 1 < input.length) {\n result += advance(); // backslash\n result += advance(); // escaped char\n } else {\n result += advance();\n }\n }\n if (pos < input.length) {\n result += advance(); // closing quote\n }\n return result;\n }\n\n function readTemplateLiteral(): string {\n let result = '`';\n advance(); // consume opening backtick\n while (pos < input.length && input[pos] !== '`') {\n if (input[pos] === '\\\\' && pos + 1 < input.length) {\n result += advance();\n result += advance();\n } else {\n result += advance();\n }\n }\n if (pos < input.length) {\n result += advance(); // closing backtick\n }\n return result;\n }\n\n function readQuerySelector(): string {\n let result = '<';\n advance(); // consume <\n while (pos < input.length) {\n result += advance();\n if (result.endsWith('/>')) {\n break;\n }\n }\n return result;\n }\n\n function makeToken(type: TokenType, value: string, start: number): Token {\n return {\n type,\n value,\n start,\n end: pos,\n line,\n column: column - value.length,\n };\n }\n\n while (pos < input.length) {\n skipWhitespace();\n if (pos >= input.length) break;\n\n const start = pos;\n const char = peek();\n\n // Possessive 's (check before string literals)\n if (char === \"'\" && peek(1) === 's' && !/\\w/.test(peek(2))) {\n advance(); // '\n advance(); // s\n tokens.push(makeToken(TokenType.POSSESSIVE, \"'s\", start));\n continue;\n }\n\n // String literals\n if (char === '\"' || char === \"'\") {\n const value = readString(char);\n tokens.push(makeToken(TokenType.STRING, value, start));\n continue;\n }\n\n // Template literals\n if (char === '`') {\n const value = readTemplateLiteral();\n tokens.push(makeToken(TokenType.TEMPLATE_LITERAL, value, start));\n continue;\n }\n\n // Query selectors <tag/>\n if (char === '<' && /[a-zA-Z.#\\[]/.test(peek(1))) {\n const value = readQuerySelector();\n tokens.push(makeToken(TokenType.QUERY_SELECTOR, value, start));\n continue;\n }\n\n // ID selectors #id (only at start or after operators)\n if (char === '#' && previousTokenAllowsSelector()) {\n advance();\n const name = readWhile(c => /[\\w-]/.test(c));\n tokens.push(makeToken(TokenType.ID_SELECTOR, '#' + name, start));\n continue;\n }\n\n // Class selectors .class (only at start or after operators)\n if (char === '.' && /[a-zA-Z_-]/.test(peek(1)) && previousTokenAllowsSelector()) {\n advance();\n const name = readWhile(c => /[\\w-]/.test(c));\n tokens.push(makeToken(TokenType.CLASS_SELECTOR, '.' + name, start));\n continue;\n }\n\n // Attribute selectors [attr] or [attr=\"value\"] (only at start or after operators)\n if (char === '[' && previousTokenAllowsSelector()) {\n // Check if this looks like an attribute selector (starts with @ or identifier)\n const nextChar = peek(1);\n if (nextChar === '@' || /[a-zA-Z]/.test(nextChar)) {\n let value = '';\n value += advance(); // [\n while (pos < input.length && input[pos] !== ']') {\n if (input[pos] === '\"' || input[pos] === \"'\") {\n value += readString(input[pos]);\n } else {\n value += advance();\n }\n }\n if (pos < input.length) {\n value += advance(); // ]\n }\n tokens.push(makeToken(TokenType.ATTRIBUTE_SELECTOR, value, start));\n continue;\n }\n }\n\n // Array brackets [ and ]\n if (char === '[') {\n advance();\n tokens.push(makeToken(TokenType.LBRACKET, '[', start));\n continue;\n }\n if (char === ']') {\n advance();\n tokens.push(makeToken(TokenType.RBRACKET, ']', start));\n continue;\n }\n\n // Numbers (including time expressions like 2s, 500ms)\n if (/\\d/.test(char)) {\n const num = readWhile(c => /[\\d.]/.test(c));\n const unitStart = pos;\n const unit = readWhile(c => /[a-zA-Z]/.test(c));\n\n if (TIME_UNITS.has(unit)) {\n tokens.push(makeToken(TokenType.TIME_EXPRESSION, num + unit, start));\n } else {\n // Put back the unit if it's not a time unit\n pos = unitStart;\n tokens.push(makeToken(TokenType.NUMBER, num, start));\n }\n continue;\n }\n\n // Punctuation\n if (char === '(') {\n advance();\n tokens.push(makeToken(TokenType.LPAREN, '(', start));\n continue;\n }\n if (char === ')') {\n advance();\n tokens.push(makeToken(TokenType.RPAREN, ')', start));\n continue;\n }\n if (char === '{') {\n advance();\n tokens.push(makeToken(TokenType.LBRACE, '{', start));\n continue;\n }\n if (char === '}') {\n advance();\n tokens.push(makeToken(TokenType.RBRACE, '}', start));\n continue;\n }\n if (char === ',') {\n advance();\n tokens.push(makeToken(TokenType.COMMA, ',', start));\n continue;\n }\n if (char === ':') {\n advance();\n tokens.push(makeToken(TokenType.COLON, ':', start));\n continue;\n }\n if (char === '.') {\n advance();\n tokens.push(makeToken(TokenType.DOT, '.', start));\n continue;\n }\n\n // Operators\n if (char === '+' || char === '-' || char === '*' || char === '/' || char === '%') {\n advance();\n tokens.push(makeToken(TokenType.OPERATOR, char, start));\n continue;\n }\n\n // Comparison operators\n if (char === '=' || char === '!' || char === '<' || char === '>') {\n let op = advance();\n if (peek() === '=') {\n op += advance();\n }\n tokens.push(makeToken(TokenType.COMPARISON, op, start));\n continue;\n }\n\n // Identifiers and keywords\n if (/[a-zA-Z_$]/.test(char)) {\n const word = readWhile(c => /[\\w$]/.test(c));\n const lower = word.toLowerCase();\n\n if (CONTEXT_VARS.has(lower)) {\n tokens.push(makeToken(TokenType.CONTEXT_VAR, word, start));\n } else if (LOGICAL_OPERATORS.has(lower)) {\n tokens.push(makeToken(TokenType.LOGICAL, word, start));\n } else if (BOOLEAN_LITERALS.has(lower)) {\n tokens.push(makeToken(TokenType.BOOLEAN, word, start));\n } else {\n tokens.push(makeToken(TokenType.IDENTIFIER, word, start));\n }\n continue;\n }\n\n // Unknown character - skip it\n advance();\n }\n\n tokens.push(makeToken(TokenType.EOF, '', pos));\n return tokens;\n}\n","/**\n * Expression Parser\n *\n * Parses expression tokens into AST nodes.\n * Uses recursive descent parsing with operator precedence.\n */\n\nimport { tokenize, Token, TokenType } from './tokenizer';\nimport type {\n ExpressionNode,\n LiteralNode,\n TemplateLiteralNode,\n SelectorNode,\n ContextReferenceNode,\n IdentifierNode,\n PropertyAccessNode,\n PossessiveExpressionNode,\n BinaryExpressionNode,\n UnaryExpressionNode,\n CallExpressionNode,\n ArrayLiteralNode,\n ObjectLiteralNode,\n TimeExpressionNode,\n ExpressionParseResult,\n ContextType,\n SelectorKind,\n} from './types';\n\n// =============================================================================\n// Parser Class\n// =============================================================================\n\nexport class ExpressionParser {\n private tokens: Token[] = [];\n private current = 0;\n\n parse(input: string): ExpressionParseResult {\n try {\n this.tokens = tokenize(input);\n this.current = 0;\n\n if (this.isAtEnd()) {\n return { success: false, error: 'Empty expression' };\n }\n\n const node = this.parseExpression();\n return { success: true, node, consumed: this.current };\n } catch (e) {\n return {\n success: false,\n error: e instanceof Error ? e.message : 'Parse error',\n };\n }\n }\n\n // =============================================================================\n // Token Navigation\n // =============================================================================\n\n private peek(): Token {\n return this.tokens[this.current] ?? { type: TokenType.EOF, value: '', start: 0, end: 0 };\n }\n\n private previous(): Token {\n return this.tokens[this.current - 1] ?? { type: TokenType.EOF, value: '', start: 0, end: 0 };\n }\n\n private isAtEnd(): boolean {\n return this.peek().type === TokenType.EOF;\n }\n\n private advance(): Token {\n if (!this.isAtEnd()) {\n this.current++;\n }\n return this.previous();\n }\n\n private check(type: TokenType): boolean {\n return this.peek().type === type;\n }\n\n private checkValue(value: string): boolean {\n return this.peek().value.toLowerCase() === value.toLowerCase();\n }\n\n private match(...types: TokenType[]): boolean {\n for (const type of types) {\n if (this.check(type)) {\n this.advance();\n return true;\n }\n }\n return false;\n }\n\n // =============================================================================\n // Expression Parsing (Precedence Climbing)\n // =============================================================================\n\n private parseExpression(): ExpressionNode {\n return this.parseOr();\n }\n\n private parseOr(): ExpressionNode {\n let left = this.parseAnd();\n\n while (this.checkValue('or')) {\n const operator = this.advance().value;\n const right = this.parseAnd();\n left = this.createBinaryExpression(operator, left, right);\n }\n\n return left;\n }\n\n private parseAnd(): ExpressionNode {\n let left = this.parseEquality();\n\n while (this.checkValue('and')) {\n const operator = this.advance().value;\n const right = this.parseEquality();\n left = this.createBinaryExpression(operator, left, right);\n }\n\n return left;\n }\n\n private parseEquality(): ExpressionNode {\n let left = this.parseComparison();\n\n while (\n this.match(TokenType.COMPARISON) ||\n this.checkValue('is') ||\n this.checkValue('matches') ||\n this.checkValue('contains') ||\n this.checkValue('in')\n ) {\n const operator = this.previous().value;\n const right = this.parseComparison();\n left = this.createBinaryExpression(operator, left, right);\n }\n\n return left;\n }\n\n private parseComparison(): ExpressionNode {\n let left = this.parseAddition();\n\n while (this.check(TokenType.COMPARISON)) {\n const operator = this.advance().value;\n const right = this.parseAddition();\n left = this.createBinaryExpression(operator, left, right);\n }\n\n return left;\n }\n\n private parseAddition(): ExpressionNode {\n let left = this.parseMultiplication();\n\n while (this.peek().value === '+' || this.peek().value === '-') {\n const operator = this.advance().value;\n const right = this.parseMultiplication();\n left = this.createBinaryExpression(operator, left, right);\n }\n\n return left;\n }\n\n private parseMultiplication(): ExpressionNode {\n let left = this.parseUnary();\n\n while (this.peek().value === '*' || this.peek().value === '/' || this.peek().value === '%') {\n const operator = this.advance().value;\n const right = this.parseUnary();\n left = this.createBinaryExpression(operator, left, right);\n }\n\n return left;\n }\n\n private parseUnary(): ExpressionNode {\n if (this.checkValue('not') || this.checkValue('no') || this.peek().value === '-') {\n const operator = this.advance().value;\n const operand = this.parseUnary();\n return this.createUnaryExpression(operator, operand);\n }\n\n return this.parsePostfix();\n }\n\n private parsePostfix(): ExpressionNode {\n let expr = this.parsePrimary();\n\n while (true) {\n // Property access with dot: expr.property\n if (this.match(TokenType.DOT)) {\n // Accept IDENTIFIER or CONTEXT_VAR as property name\n if (this.check(TokenType.IDENTIFIER) || this.check(TokenType.CONTEXT_VAR)) {\n const property = this.advance().value;\n expr = this.createPropertyAccess(expr, property);\n } else {\n break;\n }\n }\n // Possessive: expr's property\n else if (this.match(TokenType.POSSESSIVE)) {\n // Accept IDENTIFIER or CONTEXT_VAR as property name\n if (this.check(TokenType.IDENTIFIER) || this.check(TokenType.CONTEXT_VAR)) {\n const property = this.advance().value;\n expr = this.createPossessiveExpression(expr, property);\n } else {\n break;\n }\n }\n // Function call: expr(args)\n else if (this.match(TokenType.LPAREN)) {\n const args = this.parseArguments();\n expr = this.createCallExpression(expr, args);\n }\n // Array access: expr[index]\n else if (this.match(TokenType.LBRACKET)) {\n const index = this.parseExpression();\n if (!this.match(TokenType.RBRACKET)) {\n throw new Error('Expected ] after index');\n }\n expr = this.createPropertyAccess(expr, index);\n } else {\n break;\n }\n }\n\n return expr;\n }\n\n private parsePrimary(): ExpressionNode {\n const token = this.peek();\n\n // Literals\n if (this.match(TokenType.NUMBER)) {\n return this.createLiteral(parseFloat(token.value), 'number', token);\n }\n\n if (this.match(TokenType.STRING)) {\n const value = token.value.slice(1, -1); // Remove quotes\n return this.createLiteral(value, 'string', token);\n }\n\n if (this.match(TokenType.BOOLEAN)) {\n const value =\n token.value === 'true'\n ? true\n : token.value === 'false'\n ? false\n : token.value === 'null'\n ? null\n : undefined;\n return this.createLiteral(value, token.value as any, token);\n }\n\n if (this.match(TokenType.TEMPLATE_LITERAL)) {\n const templateNode: TemplateLiteralNode = {\n type: 'templateLiteral',\n value: token.value,\n start: token.start,\n end: token.end,\n line: token.line,\n column: token.column,\n };\n return templateNode;\n }\n\n if (this.match(TokenType.TIME_EXPRESSION)) {\n return this.parseTimeExpression(token);\n }\n\n // Selectors\n if (this.match(TokenType.ID_SELECTOR)) {\n return this.createSelector(token.value, 'id', token);\n }\n\n if (this.match(TokenType.CLASS_SELECTOR)) {\n return this.createSelector(token.value, 'class', token);\n }\n\n if (this.match(TokenType.ATTRIBUTE_SELECTOR)) {\n return this.createSelector(token.value, 'attribute', token);\n }\n\n if (this.match(TokenType.QUERY_SELECTOR)) {\n // Extract selector from <.../>\n const selector = token.value.slice(1, -2);\n return this.createSelector(selector, 'query', token);\n }\n\n // Context references\n if (this.match(TokenType.CONTEXT_VAR)) {\n return this.createContextReference(token.value as ContextType, token);\n }\n\n // Identifiers\n if (this.match(TokenType.IDENTIFIER)) {\n return this.createIdentifier(token.value, token);\n }\n\n // Parenthesized expression\n if (this.match(TokenType.LPAREN)) {\n const expr = this.parseExpression();\n if (!this.match(TokenType.RPAREN)) {\n throw new Error('Expected ) after expression');\n }\n return expr;\n }\n\n // Array literal\n if (this.match(TokenType.LBRACKET)) {\n return this.parseArrayLiteral();\n }\n\n // Object literal\n if (this.match(TokenType.LBRACE)) {\n return this.parseObjectLiteral();\n }\n\n throw new Error(`Unexpected token: ${token.value}`);\n }\n\n private parseArguments(): ExpressionNode[] {\n const args: ExpressionNode[] = [];\n\n if (!this.check(TokenType.RPAREN)) {\n do {\n args.push(this.parseExpression());\n } while (this.match(TokenType.COMMA));\n }\n\n if (!this.match(TokenType.RPAREN)) {\n throw new Error('Expected ) after arguments');\n }\n\n return args;\n }\n\n private parseArrayLiteral(): ArrayLiteralNode {\n const elements: ExpressionNode[] = [];\n const start = this.previous().start;\n\n if (!this.check(TokenType.RBRACKET)) {\n do {\n elements.push(this.parseExpression());\n } while (this.match(TokenType.COMMA));\n }\n\n if (!this.match(TokenType.RBRACKET)) {\n throw new Error('Expected ] after array elements');\n }\n\n return {\n type: 'arrayLiteral',\n elements,\n start,\n end: this.previous().end,\n };\n }\n\n private parseObjectLiteral(): ObjectLiteralNode {\n const properties: Array<{ key: string; value: ExpressionNode }> = [];\n const start = this.previous().start;\n\n if (!this.check(TokenType.RBRACE)) {\n do {\n let key: string;\n if (this.check(TokenType.STRING)) {\n key = this.advance().value.slice(1, -1);\n } else if (this.check(TokenType.IDENTIFIER)) {\n key = this.advance().value;\n } else {\n throw new Error('Expected property name');\n }\n\n if (!this.match(TokenType.COLON)) {\n throw new Error('Expected : after property name');\n }\n\n const value = this.parseExpression();\n properties.push({ key, value });\n } while (this.match(TokenType.COMMA));\n }\n\n if (!this.match(TokenType.RBRACE)) {\n throw new Error('Expected } after object properties');\n }\n\n return {\n type: 'objectLiteral',\n properties: properties.map(p => ({\n type: 'objectProperty' as const,\n key: p.key,\n value: p.value,\n })),\n start,\n end: this.previous().end,\n };\n }\n\n private parseTimeExpression(token: Token): TimeExpressionNode {\n const match = token.value.match(\n /^(\\d+(?:\\.\\d+)?)(ms|s|seconds?|milliseconds?|minutes?|hours?)$/i\n );\n if (!match) {\n throw new Error(`Invalid time expression: ${token.value}`);\n }\n\n const value = parseFloat(match[1]);\n const unit = match[2].toLowerCase() as TimeExpressionNode['unit'];\n\n return {\n type: 'timeExpression',\n value,\n unit,\n raw: token.value,\n start: token.start,\n end: token.end,\n line: token.line,\n column: token.column,\n };\n }\n\n // =============================================================================\n // Node Factories\n // =============================================================================\n\n private createLiteral(\n value: string | number | boolean | null | undefined,\n dataType: LiteralNode['dataType'],\n token: Token\n ): LiteralNode {\n return {\n type: 'literal',\n value,\n dataType,\n raw: token.value,\n start: token.start,\n end: token.end,\n line: token.line,\n column: token.column,\n };\n }\n\n private createSelector(value: string, kind: SelectorKind, token: Token): SelectorNode {\n return {\n type: 'selector',\n value,\n selector: value,\n selectorType: kind,\n start: token.start,\n end: token.end,\n line: token.line,\n column: token.column,\n };\n }\n\n private createContextReference(contextType: ContextType, token: Token): ContextReferenceNode {\n return {\n type: 'contextReference',\n contextType,\n name: token.value,\n start: token.start,\n end: token.end,\n line: token.line,\n column: token.column,\n };\n }\n\n private createIdentifier(name: string, token: Token): IdentifierNode {\n return {\n type: 'identifier',\n name,\n start: token.start,\n end: token.end,\n line: token.line,\n column: token.column,\n };\n }\n\n private createPropertyAccess(\n object: ExpressionNode,\n property: string | ExpressionNode\n ): PropertyAccessNode {\n return {\n type: 'propertyAccess',\n object,\n property: typeof property === 'string' ? property : ((property as any).name ?? ''),\n start: object.start,\n end: this.previous().end,\n };\n }\n\n private createPossessiveExpression(\n object: ExpressionNode,\n property: string\n ): PossessiveExpressionNode {\n return {\n type: 'possessiveExpression',\n object,\n property,\n start: object.start,\n end: this.previous().end,\n };\n }\n\n private createBinaryExpression(\n operator: string,\n left: ExpressionNode,\n right: ExpressionNode\n ): BinaryExpressionNode {\n return {\n type: 'binaryExpression',\n operator,\n left,\n right,\n start: left.start,\n end: right.end,\n };\n }\n\n private createUnaryExpression(operator: string, operand: ExpressionNode): UnaryExpressionNode {\n return {\n type: 'unaryExpression',\n operator,\n operand,\n prefix: true,\n start: this.previous().start,\n end: operand.end,\n };\n }\n\n private createCallExpression(callee: ExpressionNode, args: ExpressionNode[]): CallExpressionNode {\n return {\n type: 'callExpression',\n callee,\n arguments: args,\n start: callee.start,\n end: this.previous().end,\n };\n }\n}\n\n// =============================================================================\n// Convenience Function\n// =============================================================================\n\n/**\n * Parse an expression string into an AST node.\n *\n * @param input - The expression string to parse\n * @returns The parse result with success status and node or error\n */\nexport function parseExpression(input: string): ExpressionParseResult {\n const parser = new ExpressionParser();\n return parser.parse(input);\n}\n","/**\n * Semantic Value to AST Node Converters\n *\n * Converts SemanticValue types to AST expression nodes.\n * Used by the AST builder to construct expression trees from semantic parsing results.\n */\n\nimport type {\n SemanticValue,\n LiteralValue,\n SelectorValue,\n ReferenceValue,\n PropertyPathValue,\n ExpressionValue,\n} from '../types';\n\nimport {\n parseExpression,\n type ExpressionNode,\n type LiteralNode,\n type SelectorNode,\n type ContextReferenceNode,\n type PropertyAccessNode,\n type IdentifierNode,\n type ContextType,\n type SelectorKind,\n} from './expression-parser';\n\n// =============================================================================\n// Value Converters\n// =============================================================================\n\n/**\n * Convert a SemanticValue to an AST ExpressionNode.\n *\n * @param value - The semantic value to convert\n * @param warnings - Optional array to collect warnings about potentially incorrect type choices\n * @returns The corresponding AST expression node\n */\nexport function convertValue(value: SemanticValue, warnings?: string[]): ExpressionNode {\n switch (value.type) {\n case 'literal':\n return convertLiteral(value);\n case 'selector':\n return convertSelector(value, warnings);\n case 'reference':\n return convertReference(value);\n case 'property-path':\n return convertPropertyPath(value, warnings);\n case 'expression':\n return convertExpression(value);\n default:\n // Exhaustive check\n const _exhaustive: never = value;\n throw new Error(`Unknown semantic value type: ${(_exhaustive as SemanticValue).type}`);\n }\n}\n\n/**\n * Convert a LiteralValue to a LiteralNode.\n */\nexport function convertLiteral(value: LiteralValue): LiteralNode {\n const result: LiteralNode = {\n type: 'literal',\n value: value.value,\n };\n\n // Only add dataType if defined (exactOptionalPropertyTypes)\n if (value.dataType) {\n return { ...result, dataType: value.dataType };\n }\n\n return result;\n}\n\n/**\n * Convert a SelectorValue to a SelectorNode.\n *\n * @param value - The selector value to convert\n * @param warnings - Optional array to collect warnings\n */\nexport function convertSelector(value: SelectorValue, warnings?: string[]): SelectorNode {\n // Warn if selector looks like a CSS property (starts with * followed by a letter/hyphen)\n // This catches cases like \"*background-color\" which should likely be a literal string\n if (warnings && value.value.startsWith('*') && /^[a-zA-Z-]/.test(value.value.slice(1))) {\n warnings.push(\n `Converted '${value.value}' to a CSS selector, but it looks like a CSS property name. ` +\n `CSS properties in commands like 'transition' should be literal strings, not selectors. ` +\n `Consider using expectedTypes: ['literal'] instead of ['literal', 'selector'] in the command schema.`\n );\n }\n\n return {\n type: 'selector',\n value: value.value,\n selector: value.value,\n selectorType: value.selectorKind as SelectorKind,\n };\n}\n\n/**\n * Convert a ReferenceValue to a ContextReferenceNode.\n */\nexport function convertReference(value: ReferenceValue): ContextReferenceNode {\n return {\n type: 'contextReference',\n contextType: value.value as ContextType,\n name: value.value,\n };\n}\n\n/**\n * Convert a PropertyPathValue to a PropertyAccessNode.\n * Recursively converts the object part.\n *\n * @param value - The property path value to convert\n * @param warnings - Optional array to collect warnings\n */\nexport function convertPropertyPath(\n value: PropertyPathValue,\n warnings?: string[]\n): PropertyAccessNode {\n return {\n type: 'propertyAccess',\n object: convertValue(value.object, warnings),\n property: value.property,\n };\n}\n\n/**\n * Convert an ExpressionValue (raw string) by parsing it with the expression parser.\n * This is the fallback for complex expressions that couldn't be fully parsed\n * at the semantic level.\n */\nexport function convertExpression(value: ExpressionValue): ExpressionNode {\n const result = parseExpression(value.raw);\n\n if (!result.success || !result.node) {\n // If parsing fails, return an identifier node with the raw value\n const identifier: IdentifierNode = {\n type: 'identifier',\n name: value.raw,\n };\n return identifier;\n }\n\n return result.node;\n}\n\n// =============================================================================\n// Type Guards\n// =============================================================================\n\nexport function isLiteralValue(value: SemanticValue): value is LiteralValue {\n return value.type === 'literal';\n}\n\nexport function isSelectorValue(value: SemanticValue): value is SelectorValue {\n return value.type === 'selector';\n}\n\nexport function isReferenceValue(value: SemanticValue): value is ReferenceValue {\n return value.type === 'reference';\n}\n\nexport function isPropertyPathValue(value: SemanticValue): value is PropertyPathValue {\n return value.type === 'property-path';\n}\n\nexport function isExpressionValue(value: SemanticValue): value is ExpressionValue {\n return value.type === 'expression';\n}\n","/**\n * Command-specific AST Mappers\n *\n * Each command can have a custom mapper that knows how to convert\n * its semantic roles to the appropriate AST structure.\n */\n\nimport type { CommandSemanticNode, ActionType, SemanticValue, SemanticRole } from '../types';\nimport { convertValue } from './value-converters';\nimport type { ASTBuilder, CommandNode } from './index';\nimport type { ExpressionNode } from './expression-parser';\n\n// =============================================================================\n// Command Mapper Interface\n// =============================================================================\n\n/**\n * Result from command mapping, including the AST and any warnings.\n */\nexport interface CommandMapperResult {\n ast: CommandNode;\n warnings: string[];\n}\n\n/**\n * Interface for command-specific AST mappers.\n */\nexport interface CommandMapper {\n /**\n * The action type this mapper handles.\n */\n readonly action: ActionType;\n\n /**\n * Convert a CommandSemanticNode to a CommandNode.\n *\n * @param node - The semantic command node\n * @param builder - The AST builder (for recursive building if needed)\n * @returns The AST command node with any warnings, or just the AST node for backward compatibility\n */\n toAST(node: CommandSemanticNode, builder: ASTBuilder): CommandMapperResult | CommandNode;\n}\n\n// =============================================================================\n// Helper Functions\n// =============================================================================\n\n/**\n * Get a semantic value from a node's roles, returning undefined if not present.\n */\nfunction getRole(node: CommandSemanticNode, role: SemanticRole): SemanticValue | undefined {\n return node.roles.get(role);\n}\n\n/**\n * Convert a semantic value to an AST expression, or return undefined.\n *\n * @param node - The semantic node containing roles\n * @param role - The semantic role to extract\n * @param warnings - Optional array to collect warnings\n */\nfunction convertRoleValue(\n node: CommandSemanticNode,\n role: SemanticRole,\n warnings?: string[]\n): ExpressionNode | undefined {\n const value = getRole(node, role);\n return value ? convertValue(value, warnings) : undefined;\n}\n\n/**\n * Create a basic command node with standard structure.\n * Handles exactOptionalPropertyTypes by not including undefined properties.\n */\nfunction createCommandNode(\n name: string,\n args: ExpressionNode[] = [],\n modifiers?: Record<string, ExpressionNode>,\n options: { isBlocking?: boolean; implicitTarget?: ExpressionNode } = {}\n): CommandNode {\n const result: CommandNode = {\n type: 'command',\n name,\n args,\n };\n\n // Only add optional properties if they have values (exactOptionalPropertyTypes)\n if (modifiers && Object.keys(modifiers).length > 0) {\n (result as { modifiers: Record<string, ExpressionNode> }).modifiers = modifiers;\n }\n\n if (options.isBlocking) {\n (result as { isBlocking: boolean }).isBlocking = options.isBlocking;\n }\n\n if (options.implicitTarget) {\n (result as { implicitTarget: ExpressionNode }).implicitTarget = options.implicitTarget;\n }\n\n return result;\n}\n\n// =============================================================================\n// Command Mappers\n// =============================================================================\n\n/**\n * Toggle command mapper.\n *\n * Semantic: toggle patient:.active destination:#button\n * AST: { name: 'toggle', args: ['.active'], modifiers: { on: '#button' } }\n */\nconst toggleMapper: CommandMapper = {\n action: 'toggle',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n const destination = convertRoleValue(node, 'destination');\n const duration = convertRoleValue(node, 'duration');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (destination) modifiers['on'] = destination;\n if (duration) modifiers['for'] = duration;\n\n return createCommandNode('toggle', args, modifiers);\n },\n};\n\n/**\n * Add command mapper.\n *\n * Semantic: add patient:.active destination:#button\n * AST: { name: 'add', args: ['.active'], modifiers: { to: '#button' } }\n */\nconst addMapper: CommandMapper = {\n action: 'add',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n const destination = convertRoleValue(node, 'destination');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (destination) modifiers['to'] = destination;\n\n return createCommandNode('add', args, modifiers);\n },\n};\n\n/**\n * Remove command mapper.\n *\n * Semantic: remove patient:.active source:#button\n * AST: { name: 'remove', args: ['.active'], modifiers: { from: '#button' } }\n */\nconst removeMapper: CommandMapper = {\n action: 'remove',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n const source = convertRoleValue(node, 'source');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (source) modifiers['from'] = source;\n\n return createCommandNode('remove', args, modifiers);\n },\n};\n\n/**\n * Set command mapper.\n *\n * Semantic: set destination:#element's value patient:\"hello\"\n * AST: { name: 'set', args: [#element's value], modifiers: { to: \"hello\" } }\n *\n * Note: The destination typically includes the property path (e.g., #element's value)\n * and patient is the value being set.\n */\nconst setMapper: CommandMapper = {\n action: 'set',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n // The destination is typically the property path to set\n if (destination) {\n args.push(destination);\n }\n\n // The patient is the value being set\n if (patient) modifiers['to'] = patient;\n\n return createCommandNode('set', args, modifiers);\n },\n};\n\n/**\n * Show command mapper.\n */\nconst showMapper: CommandMapper = {\n action: 'show',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n const duration = convertRoleValue(node, 'duration');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n // Target can be in destination or patient\n const target = destination ?? patient;\n if (target) args.push(target);\n if (duration) modifiers['with'] = duration;\n\n return createCommandNode('show', args, modifiers);\n },\n};\n\n/**\n * Hide command mapper.\n */\nconst hideMapper: CommandMapper = {\n action: 'hide',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n const duration = convertRoleValue(node, 'duration');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n const target = destination ?? patient;\n if (target) args.push(target);\n if (duration) modifiers['with'] = duration;\n\n return createCommandNode('hide', args, modifiers);\n },\n};\n\n/**\n * Increment command mapper.\n *\n * Semantic: increment patient:#count quantity:5\n * AST: { name: 'increment', args: [#count], modifiers: { by: 5 } }\n */\nconst incrementMapper: CommandMapper = {\n action: 'increment',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n const quantity = convertRoleValue(node, 'quantity'); // Amount\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n const target = destination ?? patient;\n if (target) args.push(target);\n if (quantity) modifiers['by'] = quantity;\n\n return createCommandNode('increment', args, modifiers);\n },\n};\n\n/**\n * Decrement command mapper.\n *\n * Semantic: decrement patient:#count quantity:5\n * AST: { name: 'decrement', args: [#count], modifiers: { by: 5 } }\n */\nconst decrementMapper: CommandMapper = {\n action: 'decrement',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n const quantity = convertRoleValue(node, 'quantity');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n const target = destination ?? patient;\n if (target) args.push(target);\n if (quantity) modifiers['by'] = quantity;\n\n return createCommandNode('decrement', args, modifiers);\n },\n};\n\n/**\n * Wait command mapper.\n */\nconst waitMapper: CommandMapper = {\n action: 'wait',\n toAST(node, _builder) {\n const duration = convertRoleValue(node, 'duration');\n\n const args: ExpressionNode[] = duration ? [duration] : [];\n\n return createCommandNode('wait', args, undefined, { isBlocking: true });\n },\n};\n\n/**\n * Log command mapper.\n *\n * Semantic: log patient:\"hello\"\n * AST: { name: 'log', args: [\"hello\"] }\n */\nconst logMapper: CommandMapper = {\n action: 'log',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n if (patient) args.push(patient);\n\n return createCommandNode('log', args);\n },\n};\n\n/**\n * Put command mapper.\n *\n * Semantic: put patient:\"hello\" destination:#output method:into\n * AST: { name: 'put', args: [\"hello\"], modifiers: { into: #output } }\n */\nconst putMapper: CommandMapper = {\n action: 'put',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n const destination = convertRoleValue(node, 'destination');\n const method = getRole(node, 'method'); // before, after, into, etc.\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (destination) {\n // Determine the preposition based on method or default to 'into'\n const prep = method?.type === 'literal' ? String(method.value) : 'into';\n modifiers[prep] = destination;\n }\n\n return createCommandNode('put', args, modifiers);\n },\n};\n\n/**\n * Fetch command mapper.\n *\n * Semantic: fetch source:\"/api/data\" responseType:json method:GET\n * AST: { name: 'fetch', args: [\"/api/data\"], modifiers: { as: json, with: GET } }\n */\nconst fetchMapper: CommandMapper = {\n action: 'fetch',\n toAST(node, _builder) {\n const source = convertRoleValue(node, 'source'); // URL\n const method = convertRoleValue(node, 'method'); // GET, POST, etc.\n const responseType = convertRoleValue(node, 'responseType'); // json, text, etc.\n const patient = convertRoleValue(node, 'patient'); // Body\n\n const args: ExpressionNode[] = source ? [source] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (method) modifiers['with'] = method;\n if (responseType) modifiers['as'] = responseType;\n if (patient) modifiers['body'] = patient;\n\n return createCommandNode('fetch', args, modifiers, { isBlocking: true });\n },\n};\n\n/**\n * Append command mapper.\n *\n * Semantic: append patient:\"text\" destination:#output\n * AST: { name: 'append', args: [\"text\"], modifiers: { to: #output } }\n */\nconst appendMapper: CommandMapper = {\n action: 'append',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n const destination = convertRoleValue(node, 'destination');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (destination) modifiers['to'] = destination;\n\n return createCommandNode('append', args, modifiers);\n },\n};\n\n/**\n * Prepend command mapper.\n *\n * Semantic: prepend patient:\"text\" destination:#output\n * AST: { name: 'prepend', args: [\"text\"], modifiers: { to: #output } }\n */\nconst prependMapper: CommandMapper = {\n action: 'prepend',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n const destination = convertRoleValue(node, 'destination');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (destination) modifiers['to'] = destination;\n\n return createCommandNode('prepend', args, modifiers);\n },\n};\n\n/**\n * Trigger command mapper.\n *\n * Semantic: trigger event:click destination:#button\n * AST: { name: 'trigger', args: [click], modifiers: { on: #button } }\n */\nconst triggerMapper: CommandMapper = {\n action: 'trigger',\n toAST(node, _builder) {\n const event = convertRoleValue(node, 'event');\n const destination = convertRoleValue(node, 'destination');\n\n const args: ExpressionNode[] = event ? [event] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (destination) modifiers['on'] = destination;\n\n return createCommandNode('trigger', args, modifiers);\n },\n};\n\n/**\n * Send command mapper.\n *\n * Semantic: send event:customEvent destination:#target patient:{detail}\n * AST: { name: 'send', args: [customEvent], modifiers: { to: #target, detail: ... } }\n */\nconst sendMapper: CommandMapper = {\n action: 'send',\n toAST(node, _builder) {\n const event = convertRoleValue(node, 'event');\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = event ? [event] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (destination) modifiers['to'] = destination;\n if (patient) modifiers['detail'] = patient;\n\n return createCommandNode('send', args, modifiers);\n },\n};\n\n/**\n * Go command mapper (navigation).\n *\n * Semantic: go source:/page destination:url\n * AST: { name: 'go', args: [/page], modifiers: { to: url } }\n */\nconst goMapper: CommandMapper = {\n action: 'go',\n toAST(node, _builder) {\n const source = convertRoleValue(node, 'source');\n const destination = convertRoleValue(node, 'destination');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n // Source is the URL/location to navigate to\n if (source) args.push(source);\n if (destination) modifiers['to'] = destination;\n\n return createCommandNode('go', args, modifiers);\n },\n};\n\n/**\n * Transition command mapper.\n *\n * Semantic: transition patient:*background-color goal:'red' duration:500ms destination:#element\n * AST: { name: 'transition', args: [*background-color], modifiers: { to: 'red', over: 500ms, on: #element } }\n */\nconst transitionMapper: CommandMapper = {\n action: 'transition',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n const goal = convertRoleValue(node, 'goal');\n const duration = convertRoleValue(node, 'duration');\n const destination = convertRoleValue(node, 'destination');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (goal) modifiers['to'] = goal;\n if (duration) modifiers['over'] = duration;\n if (destination) modifiers['on'] = destination;\n\n return createCommandNode('transition', args, modifiers);\n },\n};\n\n/**\n * Focus command mapper.\n *\n * Semantic: focus destination:#input\n * AST: { name: 'focus', args: [], modifiers: { on: #input } }\n */\nconst focusMapper: CommandMapper = {\n action: 'focus',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n // Target can be in destination or patient\n const target = destination ?? patient;\n if (target) args.push(target);\n\n return createCommandNode('focus', args, modifiers);\n },\n};\n\n/**\n * Blur command mapper.\n *\n * Semantic: blur destination:#input\n * AST: { name: 'blur', args: [#input] }\n */\nconst blurMapper: CommandMapper = {\n action: 'blur',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const target = destination ?? patient;\n if (target) args.push(target);\n\n return createCommandNode('blur', args);\n },\n};\n\n/**\n * Get command mapper.\n *\n * Semantic: get source:myValue\n * AST: { name: 'get', args: [myValue] }\n */\nconst getMapper: CommandMapper = {\n action: 'get',\n toAST(node, _builder) {\n const source = convertRoleValue(node, 'source');\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const value = source ?? patient;\n if (value) args.push(value);\n\n return createCommandNode('get', args);\n },\n};\n\n/**\n * Take command mapper.\n *\n * Semantic: take patient:.active source:#parent\n * AST: { name: 'take', args: [.active], modifiers: { from: #parent } }\n */\nconst takeMapper: CommandMapper = {\n action: 'take',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n const source = convertRoleValue(node, 'source');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (source) modifiers['from'] = source;\n\n return createCommandNode('take', args, modifiers);\n },\n};\n\n/**\n * Call command mapper.\n *\n * Semantic: call patient:functionName\n * AST: { name: 'call', args: [functionName] }\n */\nconst callMapper: CommandMapper = {\n action: 'call',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n\n return createCommandNode('call', args);\n },\n};\n\n/**\n * Return command mapper.\n *\n * Semantic: return patient:value\n * AST: { name: 'return', args: [value] }\n */\nconst returnMapper: CommandMapper = {\n action: 'return',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n\n return createCommandNode('return', args);\n },\n};\n\n/**\n * Halt command mapper.\n *\n * Semantic: halt\n * AST: { name: 'halt', args: [] }\n */\nconst haltMapper: CommandMapper = {\n action: 'halt',\n toAST(_node, _builder) {\n return createCommandNode('halt', []);\n },\n};\n\n/**\n * Throw command mapper.\n *\n * Semantic: throw patient:\"error message\"\n * AST: { name: 'throw', args: [\"error message\"] }\n */\nconst throwMapper: CommandMapper = {\n action: 'throw',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n\n return createCommandNode('throw', args);\n },\n};\n\n/**\n * Settle command mapper.\n *\n * Semantic: settle destination:#element\n * AST: { name: 'settle', args: [#element] }\n */\nconst settleMapper: CommandMapper = {\n action: 'settle',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const target = destination ?? patient;\n if (target) args.push(target);\n\n return createCommandNode('settle', args, undefined, { isBlocking: true });\n },\n};\n\n// =============================================================================\n// Tier 3: Advanced Commands\n// =============================================================================\n\n/**\n * Swap command mapper.\n *\n * Semantic: swap patient:innerHTML destination:#element source:\"<p>new</p>\"\n * AST: { name: 'swap', args: [innerHTML, \"<p>new</p>\"], modifiers: { on: #element } }\n */\nconst swapMapper: CommandMapper = {\n action: 'swap',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient'); // What to swap (e.g., innerHTML)\n const source = convertRoleValue(node, 'source'); // New content\n const destination = convertRoleValue(node, 'destination'); // Target element\n const style = convertRoleValue(node, 'style'); // Swap strategy (innerHTML, outerHTML, etc.)\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (patient) args.push(patient);\n if (source) args.push(source);\n if (destination) modifiers['on'] = destination;\n if (style) modifiers['with'] = style;\n\n return createCommandNode('swap', args, modifiers);\n },\n};\n\n/**\n * Morph command mapper.\n *\n * Semantic: morph destination:#element source:\"<div>new</div>\"\n * AST: { name: 'morph', args: [\"<div>new</div>\"], modifiers: { on: #element } }\n */\nconst morphMapper: CommandMapper = {\n action: 'morph',\n toAST(node, _builder) {\n const source = convertRoleValue(node, 'source'); // New HTML\n const destination = convertRoleValue(node, 'destination'); // Target element\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n const content = source ?? patient;\n if (content) args.push(content);\n if (destination) modifiers['on'] = destination;\n\n return createCommandNode('morph', args, modifiers);\n },\n};\n\n/**\n * Clone command mapper.\n *\n * Semantic: clone source:#template destination:#container\n * AST: { name: 'clone', args: [#template], modifiers: { into: #container } }\n */\nconst cloneMapper: CommandMapper = {\n action: 'clone',\n toAST(node, _builder) {\n const source = convertRoleValue(node, 'source'); // Element to clone\n const destination = convertRoleValue(node, 'destination'); // Where to put clone\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n const target = source ?? patient;\n if (target) args.push(target);\n if (destination) modifiers['into'] = destination;\n\n return createCommandNode('clone', args, modifiers);\n },\n};\n\n/**\n * Make command mapper.\n *\n * Semantic: make patient:Date\n * AST: { name: 'make', args: [Date] }\n */\nconst makeMapper: CommandMapper = {\n action: 'make',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient'); // Constructor/type\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n\n return createCommandNode('make', args);\n },\n};\n\n/**\n * Measure command mapper.\n *\n * Semantic: measure destination:#element patient:width\n * AST: { name: 'measure', args: [width], modifiers: { of: #element } }\n */\nconst measureMapper: CommandMapper = {\n action: 'measure',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient'); // What to measure\n const destination = convertRoleValue(node, 'destination'); // Element\n const source = convertRoleValue(node, 'source');\n\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (patient) args.push(patient);\n const element = destination ?? source;\n if (element) modifiers['of'] = element;\n\n return createCommandNode('measure', args, modifiers);\n },\n};\n\n/**\n * Tell command mapper.\n *\n * Semantic: tell destination:#element\n * AST: { name: 'tell', args: [#element] }\n */\nconst tellMapper: CommandMapper = {\n action: 'tell',\n toAST(node, _builder) {\n const destination = convertRoleValue(node, 'destination');\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const target = destination ?? patient;\n if (target) args.push(target);\n\n return createCommandNode('tell', args);\n },\n};\n\n/**\n * JS command mapper (inline JavaScript).\n *\n * Semantic: js patient:\"console.log('hello')\"\n * AST: { name: 'js', args: [\"console.log('hello')\"] }\n */\nconst jsMapper: CommandMapper = {\n action: 'js',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient'); // JS code\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n\n return createCommandNode('js', args);\n },\n};\n\n/**\n * Async command mapper.\n *\n * Semantic: async\n * AST: { name: 'async', args: [] }\n */\nconst asyncMapper: CommandMapper = {\n action: 'async',\n toAST(_node, _builder) {\n return createCommandNode('async', []);\n },\n};\n\n/**\n * If command mapper.\n *\n * Semantic: if condition:x > 5\n * AST: { name: 'if', args: [], modifiers: { condition: x > 5 } }\n */\nconst ifMapper: CommandMapper = {\n action: 'if',\n toAST(node, _builder) {\n const condition = convertRoleValue(node, 'condition');\n\n const args: ExpressionNode[] = condition ? [condition] : [];\n\n return createCommandNode('if', args);\n },\n};\n\n/**\n * Unless command mapper.\n *\n * Semantic: unless condition:x < 5\n * AST: { name: 'unless', args: [x < 5] }\n */\nconst unlessMapper: CommandMapper = {\n action: 'unless',\n toAST(node, _builder) {\n const condition = convertRoleValue(node, 'condition');\n\n const args: ExpressionNode[] = condition ? [condition] : [];\n\n return createCommandNode('unless', args);\n },\n};\n\n/**\n * Repeat command mapper.\n *\n * Semantic: repeat quantity:5\n * AST: { name: 'repeat', args: [5] }\n */\nconst repeatMapper: CommandMapper = {\n action: 'repeat',\n toAST(node, _builder) {\n const quantity = convertRoleValue(node, 'quantity');\n const patient = convertRoleValue(node, 'patient');\n\n const args: ExpressionNode[] = [];\n const count = quantity ?? patient;\n if (count) args.push(count);\n\n return createCommandNode('repeat', args);\n },\n};\n\n/**\n * For command mapper.\n *\n * Semantic: for patient:item source:items\n * AST: { name: 'for', args: [item], modifiers: { in: items } }\n */\nconst forMapper: CommandMapper = {\n action: 'for',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient'); // Loop variable\n const source = convertRoleValue(node, 'source'); // Collection\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (source) modifiers['in'] = source;\n\n return createCommandNode('for', args, modifiers);\n },\n};\n\n/**\n * While command mapper.\n *\n * Semantic: while condition:x < 10\n * AST: { name: 'while', args: [x < 10] }\n */\nconst whileMapper: CommandMapper = {\n action: 'while',\n toAST(node, _builder) {\n const condition = convertRoleValue(node, 'condition');\n\n const args: ExpressionNode[] = condition ? [condition] : [];\n\n return createCommandNode('while', args);\n },\n};\n\n/**\n * Continue command mapper.\n *\n * Semantic: continue\n * AST: { name: 'continue', args: [] }\n */\nconst continueMapper: CommandMapper = {\n action: 'continue',\n toAST(_node, _builder) {\n return createCommandNode('continue', []);\n },\n};\n\n/**\n * Default command mapper.\n *\n * Semantic: default patient:myVar source:0\n * AST: { name: 'default', args: [myVar], modifiers: { to: 0 } }\n */\nconst defaultMapper: CommandMapper = {\n action: 'default',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient'); // Variable\n const source = convertRoleValue(node, 'source'); // Default value\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (source) modifiers['to'] = source;\n\n return createCommandNode('default', args, modifiers);\n },\n};\n\n/**\n * Init command mapper.\n *\n * Semantic: init\n * AST: { name: 'init', args: [] }\n */\nconst initMapper: CommandMapper = {\n action: 'init',\n toAST(_node, _builder) {\n return createCommandNode('init', []);\n },\n};\n\n/**\n * Behavior command mapper.\n *\n * Semantic: behavior patient:MyBehavior\n * AST: { name: 'behavior', args: [MyBehavior] }\n */\nconst behaviorMapper: CommandMapper = {\n action: 'behavior',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient'); // Behavior name\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n\n return createCommandNode('behavior', args);\n },\n};\n\n/**\n * Install command mapper.\n *\n * Semantic: install patient:MyBehavior destination:#element\n * AST: { name: 'install', args: [MyBehavior], modifiers: { on: #element } }\n */\nconst installMapper: CommandMapper = {\n action: 'install',\n toAST(node, _builder) {\n const patient = convertRoleValue(node, 'patient'); // Behavior to install\n const destination = convertRoleValue(node, 'destination'); // Target element\n\n const args: ExpressionNode[] = patient ? [patient] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (destination) modifiers['on'] = destination;\n\n return createCommandNode('install', args, modifiers);\n },\n};\n\n/**\n * On command mapper (event handler declaration).\n *\n * Semantic: on event:click\n * AST: { name: 'on', args: [click] }\n */\nconst onMapper: CommandMapper = {\n action: 'on',\n toAST(node, _builder) {\n const event = convertRoleValue(node, 'event');\n const source = convertRoleValue(node, 'source'); // 'from' clause\n\n const args: ExpressionNode[] = event ? [event] : [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n if (source) modifiers['from'] = source;\n\n return createCommandNode('on', args, modifiers);\n },\n};\n\n// =============================================================================\n// Mapper Registry\n// =============================================================================\n\nconst mappers: Map<ActionType, CommandMapper> = new Map([\n // Tier 1: Core commands\n ['toggle', toggleMapper],\n ['add', addMapper],\n ['remove', removeMapper],\n ['set', setMapper],\n ['show', showMapper],\n ['hide', hideMapper],\n ['increment', incrementMapper],\n ['decrement', decrementMapper],\n ['wait', waitMapper],\n ['log', logMapper],\n ['put', putMapper],\n ['fetch', fetchMapper],\n // Tier 2: Content manipulation\n ['append', appendMapper],\n ['prepend', prependMapper],\n ['get', getMapper],\n ['take', takeMapper],\n // Tier 2: Events\n ['trigger', triggerMapper],\n ['send', sendMapper],\n ['on', onMapper],\n // Tier 2: Navigation & DOM\n ['go', goMapper],\n ['transition', transitionMapper],\n ['focus', focusMapper],\n ['blur', blurMapper],\n // Tier 2: Control flow\n ['call', callMapper],\n ['return', returnMapper],\n ['halt', haltMapper],\n ['throw', throwMapper],\n ['settle', settleMapper],\n // Tier 3: Advanced DOM\n ['swap', swapMapper],\n ['morph', morphMapper],\n ['clone', cloneMapper],\n ['measure', measureMapper],\n // Tier 3: Object/Types\n ['make', makeMapper],\n ['tell', tellMapper],\n ['default', defaultMapper],\n // Tier 3: JavaScript integration\n ['js', jsMapper],\n ['async', asyncMapper],\n // Tier 3: Conditionals\n ['if', ifMapper],\n ['unless', unlessMapper],\n // Tier 3: Loops\n ['repeat', repeatMapper],\n ['for', forMapper],\n ['while', whileMapper],\n ['continue', continueMapper],\n // Tier 3: Behaviors\n ['init', initMapper],\n ['behavior', behaviorMapper],\n ['install', installMapper],\n]);\n\n/**\n * Get the command mapper for an action type.\n *\n * @param action - The action type\n * @returns The mapper, or undefined if no specific mapper exists\n */\nexport function getCommandMapper(action: ActionType): CommandMapper | undefined {\n return mappers.get(action);\n}\n\n/**\n * Register a custom command mapper.\n *\n * @param mapper - The command mapper to register\n */\nexport function registerCommandMapper(mapper: CommandMapper): void {\n mappers.set(mapper.action, mapper);\n}\n\n/**\n * Get all registered command mappers.\n */\nexport function getRegisteredMappers(): Map<ActionType, CommandMapper> {\n return new Map(mappers);\n}\n","/**\n * Semantic to AST Builder\n *\n * Converts SemanticNodes directly to AST nodes, bypassing the English text\n * generation and re-parsing step.\n *\n * Flow:\n * Japanese → Semantic Parser → SemanticNode → AST Builder → AST\n *\n * Instead of:\n * Japanese → Semantic Parser → SemanticNode → English Text → Parser → AST\n */\n\nimport type {\n SemanticNode,\n CommandSemanticNode,\n EventHandlerSemanticNode,\n ConditionalSemanticNode,\n CompoundSemanticNode,\n LoopSemanticNode,\n SemanticRole,\n} from '../types';\n\nimport { convertValue } from './value-converters';\nimport { getCommandMapper, type CommandMapperResult } from './command-mappers';\nimport type { ExpressionNode } from './expression-parser';\n\n// =============================================================================\n// AST Types (compatible with @lokascript/core)\n// =============================================================================\n\n/**\n * Base AST node interface\n */\nexport interface ASTNode {\n readonly type: string;\n readonly start?: number;\n readonly end?: number;\n readonly line?: number;\n readonly column?: number;\n [key: string]: unknown;\n}\n\n/**\n * Command AST node\n */\nexport interface CommandNode extends ASTNode {\n readonly type: 'command';\n readonly name: string;\n readonly args: ExpressionNode[];\n readonly modifiers?: Record<string, ExpressionNode>;\n readonly isBlocking?: boolean;\n readonly implicitTarget?: ExpressionNode;\n}\n\n/**\n * Event handler AST node (compatible with @lokascript/core)\n */\nexport interface EventHandlerNode extends ASTNode {\n readonly type: 'eventHandler';\n /** Primary event name */\n readonly event: string;\n /** All event names when using \"on event1 or event2\" syntax */\n readonly events?: string[];\n /** CSS selector for event delegation (\"from\" keyword) */\n readonly selector?: string;\n /** Target for \"from\" clause (as string or expression) */\n readonly target?: string;\n /** Optional event condition (\"[condition]\" syntax) */\n readonly condition?: ASTNode;\n /** Attribute name for mutation events (\"of @attribute\" syntax) */\n readonly attributeName?: string;\n /** Target element to watch for changes (\"in <target>\" syntax) */\n readonly watchTarget?: ExpressionNode;\n /** Event parameter names to destructure (e.g., ['clientX', 'clientY']) */\n readonly args?: string[];\n /** Event parameters (alias for args) */\n readonly params?: string[];\n /** Handler commands */\n readonly commands: ASTNode[];\n}\n\n/**\n * Conditional AST node (if/else)\n *\n * Note: For runtime compatibility, buildConditional() now produces a CommandNode\n * with condition and branches as args, matching what IfCommand expects.\n * This interface is retained for reference but not used as output.\n */\nexport interface ConditionalNode extends ASTNode {\n readonly type: 'if';\n readonly condition: ExpressionNode;\n readonly thenBranch: ASTNode[];\n readonly elseBranch?: ASTNode[];\n}\n\n/**\n * Command sequence node (runtime-compatible format for chained commands)\n */\nexport interface CommandSequenceNode extends ASTNode {\n readonly type: 'CommandSequence';\n /** Commands in the sequence */\n readonly commands: ASTNode[];\n}\n\n/**\n * Block node (for grouping commands)\n */\nexport interface BlockNode extends ASTNode {\n readonly type: 'block';\n readonly commands: ASTNode[];\n}\n\n// =============================================================================\n// AST Builder\n// =============================================================================\n\nexport interface ASTBuilderOptions {\n /**\n * Fallback function to parse complex expressions that can't be handled\n * directly by the AST builder. Uses the expression-parser by default.\n */\n parseExpression?: (input: string) => ExpressionNode | null;\n}\n\n/**\n * Builds AST nodes directly from SemanticNodes.\n */\nexport class ASTBuilder {\n /**\n * Warnings collected during AST building (e.g., type inference issues).\n */\n public warnings: string[] = [];\n\n constructor(_options: ASTBuilderOptions = {}) {\n // Options reserved for future use (e.g., custom expression parser)\n }\n\n /**\n * Build an AST from a SemanticNode.\n *\n * @param node - The semantic node to convert\n * @returns The corresponding AST node\n */\n build(node: SemanticNode): ASTNode {\n switch (node.kind) {\n case 'command':\n return this.buildCommand(node as CommandSemanticNode);\n case 'event-handler':\n return this.buildEventHandler(node as EventHandlerSemanticNode);\n case 'conditional':\n return this.buildConditional(node as ConditionalSemanticNode);\n case 'compound':\n return this.buildCompound(node as CompoundSemanticNode);\n case 'loop':\n return this.buildLoop(node as LoopSemanticNode);\n default:\n throw new Error(`Unknown semantic node kind: ${(node as SemanticNode).kind}`);\n }\n }\n\n /**\n * Build a CommandNode from a CommandSemanticNode.\n */\n private buildCommand(node: CommandSemanticNode): CommandNode {\n const mapper = getCommandMapper(node.action);\n\n if (mapper) {\n // Use command-specific mapper\n const result = mapper.toAST(node, this);\n\n // Handle both new CommandMapperResult format and legacy CommandNode format\n if ('ast' in result && 'warnings' in result) {\n // New format with warnings\n const mapperResult = result as CommandMapperResult;\n this.warnings.push(...mapperResult.warnings);\n return mapperResult.ast;\n } else {\n // Legacy format (just CommandNode)\n return result as CommandNode;\n }\n }\n\n // Fallback: generic command mapping\n return this.buildGenericCommand(node);\n }\n\n /**\n * Generic command builder when no specific mapper is available.\n * Maps roles to args in a predictable order.\n */\n private buildGenericCommand(node: CommandSemanticNode): CommandNode {\n const args: ExpressionNode[] = [];\n const modifiers: Record<string, ExpressionNode> = {};\n\n // Standard role-to-position mapping\n // Note: Using only valid SemanticRoles from the type definition\n const argRoles: SemanticRole[] = ['patient', 'source', 'quantity'];\n const modifierRoles: SemanticRole[] = ['destination', 'duration', 'method', 'style'];\n\n // Convert argument roles\n for (const role of argRoles) {\n const value = node.roles.get(role);\n if (value) {\n args.push(convertValue(value));\n }\n }\n\n // Convert modifier roles\n for (const role of modifierRoles) {\n const value = node.roles.get(role);\n if (value) {\n // Map semantic roles to hyperscript modifier keywords\n const modifierKey = this.roleToModifierKey(role);\n modifiers[modifierKey] = convertValue(value);\n }\n }\n\n const result: CommandNode = {\n type: 'command',\n name: node.action,\n args,\n };\n\n // Only add modifiers if there are any (avoid exactOptionalPropertyTypes issue)\n if (Object.keys(modifiers).length > 0) {\n return { ...result, modifiers };\n }\n\n return result;\n }\n\n /**\n * Map semantic roles to hyperscript modifier keywords.\n */\n private roleToModifierKey(role: SemanticRole): string {\n const mapping: Partial<Record<SemanticRole, string>> = {\n destination: 'on',\n duration: 'for',\n source: 'from',\n condition: 'if',\n method: 'via',\n style: 'with',\n };\n return mapping[role] ?? role;\n }\n\n /**\n * Build an EventHandlerNode from an EventHandlerSemanticNode.\n */\n private buildEventHandler(node: EventHandlerSemanticNode): EventHandlerNode {\n // Extract event name(s)\n const eventValue = node.roles.get('event');\n let event: string;\n let events: string[] | undefined;\n\n if (eventValue?.type === 'literal') {\n const eventStr = String(eventValue.value);\n // Handle \"click or keydown\" syntax\n if (eventStr.includes('|') || eventStr.includes(' or ')) {\n events = eventStr.split(/\\s+or\\s+|\\|/).map(e => e.trim());\n event = events[0];\n } else {\n event = eventStr;\n }\n } else if (eventValue?.type === 'reference') {\n event = eventValue.value;\n } else {\n event = 'click'; // Default event\n }\n\n // Build body commands recursively\n const commands = node.body.map(child => this.build(child));\n\n // Get selector/target from 'source' role if present\n const fromValue = node.roles.get('source');\n let selector: string | undefined;\n let target: string | undefined;\n\n if (fromValue?.type === 'selector') {\n selector = fromValue.value;\n target = fromValue.value;\n } else if (fromValue?.type === 'reference') {\n target = fromValue.value;\n } else if (fromValue?.type === 'literal') {\n target = String(fromValue.value);\n }\n\n // Get condition from 'condition' role if present\n const conditionValue = node.roles.get('condition');\n const condition = conditionValue ? convertValue(conditionValue) : undefined;\n\n // Get destination (watchTarget) if present\n const destinationValue = node.roles.get('destination');\n const watchTarget = destinationValue ? convertValue(destinationValue) : undefined;\n\n // Extract event modifiers\n const modifiers = node.eventModifiers;\n\n // Handle queue modifier (debounce, throttle, etc. are runtime concerns)\n let finalSelector = selector;\n if (modifiers?.from) {\n const fromMod = modifiers.from;\n if (fromMod.type === 'selector' && !selector) {\n finalSelector = fromMod.value;\n }\n }\n\n // Extract event parameter names for destructuring (e.g., on click(clientX, clientY))\n const args = node.parameterNames ? [...node.parameterNames] : undefined;\n\n // Build result with spread for optional properties (exactOptionalPropertyTypes compliant)\n return {\n type: 'eventHandler' as const,\n event,\n commands,\n ...(events && events.length > 1 ? { events } : {}),\n ...(finalSelector ? { selector: finalSelector } : {}),\n ...(target ? { target } : {}),\n ...(condition ? { condition: condition as ASTNode } : {}),\n ...(watchTarget ? { watchTarget } : {}),\n ...(args && args.length > 0 ? { args, params: args } : {}),\n };\n }\n\n /**\n * Build a CommandNode from a ConditionalSemanticNode.\n *\n * Produces a command node with:\n * - args[0]: condition expression\n * - args[1]: then block (wrapped in { type: 'block', commands: [...] })\n * - args[2]: else block (optional, same format)\n *\n * This format matches what IfCommand.parseInput() expects.\n */\n private buildConditional(node: ConditionalSemanticNode): CommandNode {\n const conditionValue = node.roles.get('condition');\n if (!conditionValue) {\n throw new Error('Conditional node missing condition');\n }\n\n const condition = convertValue(conditionValue);\n const thenBranch = node.thenBranch.map(child => this.build(child));\n const elseBranch = node.elseBranch?.map(child => this.build(child));\n\n // Build args array matching IfCommand expected format\n const args: ExpressionNode[] = [\n condition,\n // args[1]: then block wrapped as block node\n {\n type: 'block',\n commands: thenBranch,\n } as unknown as ExpressionNode,\n ];\n\n // args[2]: else block (if present)\n if (elseBranch && elseBranch.length > 0) {\n args.push({\n type: 'block',\n commands: elseBranch,\n } as unknown as ExpressionNode);\n }\n\n return {\n type: 'command',\n name: 'if',\n args,\n };\n }\n\n /**\n * Build AST nodes from a CompoundSemanticNode.\n *\n * Converts to CommandSequence for runtime compatibility.\n * The runtime recognizes 'CommandSequence' type and executes commands in order.\n */\n private buildCompound(node: CompoundSemanticNode): ASTNode {\n // Build all statements recursively\n const statements = node.statements.map(child => this.build(child));\n\n // Single statement: unwrap and return directly\n if (statements.length === 1) {\n return statements[0];\n }\n\n // Empty: return a no-op block\n if (statements.length === 0) {\n return {\n type: 'block',\n commands: [],\n };\n }\n\n // Convert to CommandSequence for runtime compatibility\n // Runtime handles 'CommandSequence' type in executeCommandSequence()\n const result: CommandSequenceNode = {\n type: 'CommandSequence',\n commands: statements,\n };\n\n return result;\n }\n\n /**\n * Build a CommandNode from a LoopSemanticNode.\n *\n * Produces a 'repeat' command with:\n * - args[0]: loop type identifier (forever, times, for, while, until)\n * - args[1]: count/condition/variable depending on loop type\n * - args[2]: collection (for 'for' loops)\n * - args[last]: body block\n *\n * This format matches what the repeat command parser produces.\n */\n private buildLoop(node: LoopSemanticNode): CommandNode {\n // Build body commands recursively\n const bodyCommands = node.body.map(child => this.build(child));\n\n const args: ExpressionNode[] = [\n // args[0]: loop type identifier\n {\n type: 'identifier',\n name: node.loopVariant,\n } as unknown as ExpressionNode,\n ];\n\n // Add loop-specific arguments based on variant\n switch (node.loopVariant) {\n case 'times': {\n // args[1]: count expression\n const quantity = node.roles.get('quantity');\n if (quantity) {\n args.push(convertValue(quantity));\n }\n break;\n }\n case 'for': {\n // args[1]: loop variable name\n if (node.loopVariable) {\n args.push({\n type: 'string',\n value: node.loopVariable,\n } as unknown as ExpressionNode);\n }\n // args[2]: collection/source\n const source = node.roles.get('source');\n if (source) {\n args.push(convertValue(source));\n }\n break;\n }\n case 'while':\n case 'until': {\n // args[1]: condition expression\n const condition = node.roles.get('condition');\n if (condition) {\n args.push(convertValue(condition));\n }\n break;\n }\n case 'forever':\n // No additional args needed for forever loops\n break;\n }\n\n // args[last]: body block\n args.push({\n type: 'block',\n commands: bodyCommands,\n } as unknown as ExpressionNode);\n\n return {\n type: 'command',\n name: 'repeat',\n args,\n };\n }\n\n /**\n * Build a BlockNode from an array of semantic nodes.\n * Useful for grouping commands in if/else branches.\n */\n buildBlock(nodes: SemanticNode[]): BlockNode {\n const commands = nodes.map(child => this.build(child));\n return {\n type: 'block',\n commands,\n };\n }\n}\n\n// =============================================================================\n// Convenience Function\n// =============================================================================\n\n/**\n * Result from building an AST, including any warnings.\n */\nexport interface BuildASTResult {\n ast: ASTNode;\n warnings: string[];\n}\n\n/**\n * Build an AST from a SemanticNode using default options.\n *\n * @param node - The semantic node to convert\n * @returns The corresponding AST node and any warnings\n */\nexport function buildAST(node: SemanticNode): BuildASTResult {\n const builder = new ASTBuilder();\n const ast = builder.build(node);\n return {\n ast,\n warnings: builder.warnings,\n };\n}\n\n// Re-exports from value-converters\nexport {\n convertValue,\n convertLiteral,\n convertSelector,\n convertReference,\n convertPropertyPath,\n convertExpression,\n} from './value-converters';\n\n// Re-exports from command-mappers\nexport {\n getCommandMapper,\n registerCommandMapper,\n getRegisteredMappers,\n type CommandMapper,\n type CommandMapperResult,\n} from './command-mappers';\n"],"mappings":";AAugBO,SAAS,eAAe,OAA8B;AAC3D,MAAI,eAA8C;AAElD,MAAI,MAAM,WAAW,GAAG,KAAK,CAAC,MAAM,SAAS,GAAG,GAAG;AACjD,mBAAe;AAAA,EACjB,WAAW,MAAM,WAAW,GAAG,KAAK,CAAC,MAAM,SAAS,GAAG,GAAG;AACxD,mBAAe;AAAA,EACjB,WAAW,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAG;AACvD,mBAAe;AAAA,EACjB,WAAW,oBAAoB,KAAK,KAAK,GAAG;AAC1C,mBAAe;AAAA,EACjB;AAEA,SAAO,EAAE,MAAM,YAAY,OAAO,aAAa;AACjD;AAKO,SAAS,cACd,OACA,UACc;AACd,QAAM,SAAuB,EAAE,MAAM,WAAW,MAAM;AACtD,MAAI,aAAa,QAAW;AAC1B,WAAO,EAAE,MAAM,WAAW,OAAO,SAAS;AAAA,EAC5C;AACA,SAAO;AACT;AAKO,SAAS,gBAAgB,OAAgD;AAC9E,SAAO,EAAE,MAAM,aAAa,MAAM;AACpC;AAKO,SAAS,mBAAmB,QAAuB,UAAqC;AAC7F,SAAO,EAAE,MAAM,iBAAiB,QAAQ,SAAS;AACnD;AAKO,SAAS,kBACd,QACA,OACA,UACqB;AACrB,QAAM,OAA4B;AAAA,IAChC,MAAM;AAAA,IACN;AAAA,IACA,OAAO,IAAI,IAAI,OAAO,QAAQ,KAAK,CAAoC;AAAA,EACzE;AACA,MAAI,aAAa,QAAW;AAC1B,WAAO,EAAE,GAAG,MAAM,SAAS;AAAA,EAC7B;AACA,SAAO;AACT;AAKO,SAAS,mBACd,OACA,MACA,WACA,UACA,gBAC0B;AAC1B,QAAM,QAAQ,oBAAI,IAAiC;AACnD,QAAM,IAAI,SAAS,KAAK;AAExB,QAAM,OAAiC;AAAA,IACrC,MAAM;AAAA,IACN,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF;AAEA,MAAI,cAAc,QAAW;AAC3B,IAAC,KAA6C,iBAAiB;AAAA,EACjE;AACA,MAAI,aAAa,QAAW;AAC1B,IAAC,KAAyC,WAAW;AAAA,EACvD;AACA,MAAI,mBAAmB,UAAa,eAAe,SAAS,GAAG;AAC7D,IAAC,KAAgD,iBAAiB;AAAA,EACpE;AAEA,SAAO;AACT;;;AC7kBO,SAAS,iBACd,YACA,eACS;AAET,MAAI,CAAC,iBAAiB,cAAc,WAAW,GAAG;AAChD,WAAO;AAAA,EACT;AAGA,MAAI,cAAc,SAAS,UAAU,GAAG;AACtC,WAAO;AAAA,EACT;AAGA,MAAI,cAAc,SAAS,YAAY,GAAG;AACxC,WAAO;AAAA,EACT;AAGA,MAAI,eAAe,iBAAiB;AAClC,WAAO,cAAc,KAAK,OAAK,CAAC,YAAY,aAAa,YAAY,EAAE,SAAS,CAAC,CAAC;AAAA,EACpF;AAEA,SAAO;AACT;;;ACjCO,SAAS,uBACd,SACA,SACoB;AACpB,SAAO,QAAQ,YAAY,WAAW,OAAO;AAC/C;;;ACyDA,IAAM,aAAa,oBAAI,IAA+B;AACtD,IAAM,WAAW,oBAAI,IAA6B;AAClD,IAAM,eAAe,oBAAI,IAA+B;AAMxD,IAAI,mBAA6E;AAUjF,SAAS,UAA4B,MAAS,SAAwB;AACpE,QAAM,SAAS,EAAE,GAAG,KAAK;AAEzB,aAAW,OAAO,OAAO,KAAK,OAAO,GAAkB;AACrD,UAAM,eAAe,QAAQ,GAAG;AAChC,UAAM,YAAY,KAAK,GAAG;AAE1B,QAAI,iBAAiB,QAAW;AAC9B;AAAA,IACF;AAGA,QACE,OAAO,iBAAiB,YACxB,iBAAiB,QACjB,CAAC,MAAM,QAAQ,YAAY,KAC3B,OAAO,cAAc,YACrB,cAAc,QACd,CAAC,MAAM,QAAQ,SAAS,GACxB;AACA,aAAO,GAAG,IAAI;AAAA,QACZ;AAAA,QACA;AAAA,MACF;AAAA,IACF,OAAO;AAEL,aAAO,GAAG,IAAI;AAAA,IAChB;AAAA,EACF;AAEA,SAAO;AACT;AAiBO,SAAS,cACd,MACA,SACiB;AACjB,SAAO,UAAU,MAAM,OAAO;AAChC;AAMO,SAAS,eAAe,SAA2C;AACxE,MAAI,CAAC,QAAQ,SAAS;AACpB,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,SAAS,IAAI,QAAQ,OAAO;AAChD,MAAI,CAAC,aAAa;AAChB,YAAQ;AAAA,MACN,uBAAuB,QAAQ,IAAI,cAAc,QAAQ,OAAO;AAAA,IAElE;AACA,WAAO;AAAA,EACT;AAGA,QAAM,eAAe,eAAe,WAAW;AAG/C,SAAO,cAAc,cAAc,OAAO;AAC5C;AAWO,SAAS,iBACd,MACA,WACA,SACM;AACN,aAAW,IAAI,MAAM,SAAS;AAE9B,WAAS,IAAI,MAAM,OAAO;AAE1B,eAAa,OAAO,IAAI;AAC1B;AAqBO,SAAS,oBACd,WACM;AACN,qBAAmB;AACrB;AAGA,IAAM,qBAAqB,oBAAI,IAA+B;AAOvD,SAAS,iBAAiB,MAAc,UAAmC;AAChF,qBAAmB,IAAI,MAAM,QAAQ;AAErC,eAAa,OAAO,IAAI;AAC1B;AAiIO,SAAS,oBAAoB,MAAsB;AACxD,SAAO,KAAK,MAAM,GAAG,EAAE,CAAC;AAC1B;AAMO,SAAS,kBAAkB,MAAuB;AACvD,SAAO,KAAK,SAAS,GAAG;AAC1B;AAWO,SAAS,aAAa,MAAiC;AAE5D,MAAI,YAAY,WAAW,IAAI,IAAI;AAGnC,MAAI,CAAC,aAAa,kBAAkB,IAAI,GAAG;AACzC,UAAM,WAAW,oBAAoB,IAAI;AACzC,gBAAY,WAAW,IAAI,QAAQ;AAAA,EACrC;AAEA,MAAI,CAAC,WAAW;AACd,UAAM,aAAa,MAAM,KAAK,WAAW,KAAK,CAAC,EAAE,KAAK,IAAI;AAC1D,UAAM,IAAI;AAAA,MACR,aAAa,IAAI,8CACU,cAAc,MAAM,8EAC+B,IAAI;AAAA,IACpF;AAAA,EACF;AACA,SAAO;AACT;AAOO,SAAS,WAAW,MAA+B;AAExD,MAAI,UAAU,SAAS,IAAI,IAAI;AAG/B,MAAI,CAAC,WAAW,kBAAkB,IAAI,GAAG;AACvC,UAAM,WAAW,oBAAoB,IAAI;AACzC,cAAU,SAAS,IAAI,QAAQ;AAAA,EACjC;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,aAAa,MAAM,KAAK,SAAS,KAAK,CAAC,EAAE,KAAK,IAAI;AACxD,UAAM,IAAI;AAAA,MACR,qBAAqB,IAAI,8CACE,cAAc,MAAM,8EAC+B,IAAI;AAAA,IACpF;AAAA,EACF;AAGA,SAAO,eAAe,OAAO;AAC/B;AAMO,SAAS,gBAAgB,MAA6C;AAC3E,MAAI,YAAY,WAAW,IAAI,IAAI;AACnC,MAAI,CAAC,aAAa,kBAAkB,IAAI,GAAG;AACzC,gBAAY,WAAW,IAAI,oBAAoB,IAAI,CAAC;AAAA,EACtD;AACA,SAAO;AACT;AAMO,SAAS,cAAc,MAA2C;AACvE,MAAI,UAAU,SAAS,IAAI,IAAI;AAC/B,MAAI,CAAC,WAAW,kBAAkB,IAAI,GAAG;AACvC,cAAU,SAAS,IAAI,oBAAoB,IAAI,CAAC;AAAA,EAClD;AAEA,SAAO,UAAU,eAAe,OAAO,IAAI;AAC7C;AAKO,SAAS,yBAAmC;AACjD,SAAO,MAAM,KAAK,WAAW,KAAK,CAAC;AACrC;AAKO,SAAS,qBAAqB,MAAuB;AAC1D,MAAI,WAAW,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,GAAG;AAC9C,WAAO;AAAA,EACT;AAEA,MAAI,kBAAkB,IAAI,GAAG;AAC3B,UAAM,WAAW,oBAAoB,IAAI;AACzC,WAAO,WAAW,IAAI,QAAQ,KAAK,SAAS,IAAI,QAAQ;AAAA,EAC1D;AACA,SAAO;AACT;AAMO,SAAS,oBAAoB,MAAuB;AACzD,MAAI,WAAW,IAAI,IAAI,GAAG;AACxB,WAAO;AAAA,EACT;AAEA,MAAI,kBAAkB,IAAI,GAAG;AAC3B,WAAO,WAAW,IAAI,oBAAoB,IAAI,CAAC;AAAA,EACjD;AACA,SAAO;AACT;AA0BO,SAAS,uBAAuB,MAAiC;AAEtE,MAAI,SAAS,aAAa,IAAI,IAAI;AAClC,MAAI,CAAC,UAAU,kBAAkB,IAAI,GAAG;AACtC,aAAS,aAAa,IAAI,oBAAoB,IAAI,CAAC;AAAA,EACrD;AACA,MAAI,QAAQ;AACV,WAAO;AAAA,EACT;AAIA,MAAI,aAAa,mBAAmB,IAAI,IAAI;AAC5C,MAAI,CAAC,cAAc,kBAAkB,IAAI,GAAG;AAC1C,iBAAa,mBAAmB,IAAI,oBAAoB,IAAI,CAAC;AAAA,EAC/D;AACA,MAAI,YAAY;AACd,iBAAa,IAAI,MAAM,UAAU;AACjC,WAAO;AAAA,EACT;AAGA,MAAI,CAAC,kBAAkB;AACrB,UAAM,IAAI;AAAA,MACR,wCAAwC,IAAI;AAAA,IAE9C;AAAA,EACF;AAGA,QAAM,UAAU,WAAW,IAAI;AAC/B,QAAM,WAAW,iBAAiB,OAAO;AACzC,eAAa,IAAI,MAAM,QAAQ;AAC/B,SAAO;AACT;AAKO,SAAS,iCACd,UACA,SACmB;AACnB,SAAO,uBAAuB,QAAQ,EACnC,OAAO,OAAK,EAAE,YAAY,OAAO,EACjC,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ;AAC3C;;;AC3hBO,IAAM,kBAAN,MAAM,gBAAe;AAAA,EAArB;AAwrBL;AAAA;AAAA;AAAA;AAAA,SAAQ,iBAAyB;AACjC,SAAQ,sBAA8B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAjrBtC,aAAa,QAAqB,SAAqD;AACrF,UAAM,OAAO,OAAO,KAAK;AACzB,UAAM,WAAW,oBAAI,IAAiC;AAGtD,SAAK,iBAAiB,cAAc,QAAQ,QAAQ;AAGpD,SAAK,iBAAiB;AACtB,SAAK,sBAAsB;AAE3B,UAAM,UAAU,KAAK,mBAAmB,QAAQ,QAAQ,SAAS,QAAQ,QAAQ;AAEjF,QAAI,CAAC,SAAS;AACZ,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AAIA,UAAM,aAAa,KAAK,oBAAoB,SAAS,QAAQ;AAG7D,SAAK,qBAAqB,SAAS,QAAQ;AAE3C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,gBAAgB,OAAO,SAAS,IAAI,KAAK;AAAA,MACzC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,QAAqB,UAAwD;AACrF,UAAM,UAAgC,CAAC;AAEvC,eAAW,WAAW,UAAU;AAC9B,YAAM,OAAO,OAAO,KAAK;AACzB,YAAM,SAAS,KAAK,aAAa,QAAQ,OAAO;AAEhD,UAAI,QAAQ;AACV,gBAAQ,KAAK,MAAM;AAAA,MACrB;AAEA,aAAO,MAAM,IAAI;AAAA,IACnB;AAEA,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO;AAAA,IACT;AAGA,YAAQ,KAAK,CAAC,GAAG,MAAM;AAErB,YAAM,eAAe,EAAE,QAAQ,WAAW,EAAE,QAAQ;AACpD,UAAI,iBAAiB,EAAG,QAAO;AAG/B,aAAO,EAAE,aAAa,EAAE;AAAA,IAC1B,CAAC;AAGD,UAAM,OAAO,QAAQ,CAAC;AACtB,SAAK,aAAa,QAAQ,KAAK,OAAO;AAEtC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,mBACN,QACA,eACA,UACS;AAGT,UAAM,oBAAoB,cAAc,CAAC;AACzC,UAAM,4BACJ,mBAAmB,SAAS,cAC3B,kBAAkB,UAAU,SAC3B,kBAAkB,UAAU,UAC5B,kBAAkB,cAAc,SAAS,KAAK,KAC9C,kBAAkB,cAAc,SAAS,MAAM;AAEnD,QAAI,KAAK,gBAAgB,SAAS,QAAQ,CAAC,2BAA2B;AACpE,aAAO,OAAO,KAAK,GAAG,SAAS,eAAe;AAC5C,eAAO,QAAQ;AAAA,MACjB;AAAA,IACF;AAEA,eAAW,gBAAgB,eAAe;AACxC,YAAM,UAAU,KAAK,kBAAkB,QAAQ,cAAc,QAAQ;AAErE,UAAI,CAAC,SAAS;AAEZ,YAAI,KAAK,WAAW,YAAY,GAAG;AACjC;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,QACA,cACA,UACS;AACT,YAAQ,aAAa,MAAM;AAAA,MACzB,KAAK;AACH,eAAO,KAAK,kBAAkB,QAAQ,YAAY;AAAA,MAEpD,KAAK;AACH,eAAO,KAAK,eAAe,QAAQ,cAAc,QAAQ;AAAA,MAE3D,KAAK;AACH,eAAO,KAAK,gBAAgB,QAAQ,cAAc,QAAQ;AAAA,MAE5D;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,QACA,cACS;AACT,UAAM,QAAQ,OAAO,KAAK;AAC1B,QAAI,CAAC,MAAO,QAAO;AAGnB,UAAM,YAAY,KAAK,aAAa,OAAO,aAAa,KAAK;AAC7D,QAAI,cAAc,QAAQ;AACxB,WAAK;AACL,UAAI,cAAc,QAAQ;AACxB,aAAK;AAAA,MACP;AACA,aAAO,QAAQ;AACf,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,cAAc;AAC7B,iBAAW,OAAO,aAAa,cAAc;AAC3C,cAAM,eAAe,KAAK,aAAa,OAAO,GAAG;AACjD,YAAI,iBAAiB,QAAQ;AAC3B,eAAK;AACL,cAAI,iBAAiB,QAAQ;AAC3B,iBAAK;AAAA,UACP;AACA,iBAAO,QAAQ;AACf,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,eACN,QACA,cACA,UACS;AAET,SAAK,eAAe,MAAM;AAE1B,UAAM,QAAQ,OAAO,KAAK;AAC1B,QAAI,CAAC,OAAO;AACV,aAAO,aAAa,YAAY;AAAA,IAClC;AAGA,UAAM,kBAAkB,KAAK,6BAA6B,MAAM;AAChE,QAAI,iBAAiB;AAEnB,UAAI,aAAa,iBAAiB,aAAa,cAAc,SAAS,GAAG;AACvE,YACE,CAAC,aAAa,cAAc,SAAS,gBAAgB,IAAI,KACzD,CAAC,aAAa,cAAc,SAAS,YAAY,GACjD;AACA,iBAAO,aAAa,YAAY;AAAA,QAClC;AAAA,MACF;AACA,eAAS,IAAI,aAAa,MAAM,eAAe;AAC/C,aAAO;AAAA,IACT;AAGA,UAAM,kBAAkB,KAAK,6BAA6B,MAAM;AAChE,QAAI,iBAAiB;AACnB,UAAI,aAAa,iBAAiB,aAAa,cAAc,SAAS,GAAG;AACvE,YACE,CAAC,aAAa,cAAc,SAAS,gBAAgB,IAAI,KACzD,CAAC,aAAa,cAAc,SAAS,YAAY,GACjD;AACA,iBAAO,aAAa,YAAY;AAAA,QAClC;AAAA,MACF;AACA,eAAS,IAAI,aAAa,MAAM,eAAe;AAC/C,aAAO;AAAA,IACT;AAGA,UAAM,0BAA0B,KAAK,qCAAqC,MAAM;AAChF,QAAI,yBAAyB;AAC3B,UAAI,aAAa,iBAAiB,aAAa,cAAc,SAAS,GAAG;AAEvE,YAAI,CAAC,iBAAiB,wBAAwB,MAAM,aAAa,aAAa,GAAG;AAC/E,iBAAO,aAAa,YAAY;AAAA,QAClC;AAAA,MACF;AACA,eAAS,IAAI,aAAa,MAAM,uBAAuB;AACvD,aAAO;AAAA,IACT;AAGA,UAAM,sBAAsB,KAAK,iCAAiC,MAAM;AACxE,QAAI,qBAAqB;AACvB,UAAI,aAAa,iBAAiB,aAAa,cAAc,SAAS,GAAG;AACvE,YACE,CAAC,aAAa,cAAc,SAAS,oBAAoB,IAAI,KAC7D,CAAC,aAAa,cAAc,SAAS,YAAY,GACjD;AACA,iBAAO,aAAa,YAAY;AAAA,QAClC;AAAA,MACF;AACA,eAAS,IAAI,aAAa,MAAM,mBAAmB;AACnD,aAAO;AAAA,IACT;AAIA,UAAM,wBAAwB,KAAK,mCAAmC,MAAM;AAC5E,QAAI,uBAAuB;AACzB,UAAI,aAAa,iBAAiB,aAAa,cAAc,SAAS,GAAG;AACvE,YAAI,CAAC,iBAAiB,sBAAsB,MAAM,aAAa,aAAa,GAAG;AAC7E,iBAAO,aAAa,YAAY;AAAA,QAClC;AAAA,MACF;AACA,eAAS,IAAI,aAAa,MAAM,qBAAqB;AACrD,aAAO;AAAA,IACT;AAGA,UAAM,QAAQ,KAAK,qBAAqB,KAAK;AAC7C,QAAI,CAAC,OAAO;AACV,aAAO,aAAa,YAAY;AAAA,IAClC;AAGA,QAAI,aAAa,iBAAiB,aAAa,cAAc,SAAS,GAAG;AACvE,UAAI,CAAC,aAAa,cAAc,SAAS,MAAM,IAAI,GAAG;AACpD,eAAO,aAAa,YAAY;AAAA,MAClC;AAAA,IACF;AAEA,aAAS,IAAI,aAAa,MAAM,KAAK;AACrC,WAAO,QAAQ;AACf,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,6BAA6B,QAA2C;AAC9E,UAAM,QAAQ,OAAO,KAAK;AAC1B,QAAI,CAAC,MAAO,QAAO;AAGnB,QAAI,CAAC,KAAK,eAAgB,QAAO;AAEjC,UAAM,cAAc,MAAM,cAAc,MAAM,OAAO,YAAY;AACjE,UAAM,UAAU,uBAAuB,KAAK,gBAAgB,UAAU;AAEtE,QAAI,CAAC,QAAS,QAAO;AAGrB,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO,QAAQ;AAEf,UAAM,gBAAgB,OAAO,KAAK;AAClC,QAAI,CAAC,eAAe;AAElB,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AAIA,QACE,cAAc,SAAS,gBACtB,cAAc,SAAS,aAAa,CAAC,KAAK,oBAAoB,cAAc,KAAK,KACjF,cAAc,SAAS,cAAc,cAAc,MAAM,WAAW,GAAG,GACxE;AACA,aAAO,QAAQ;AAGf,aAAO,mBAAmB,gBAAgB,OAAc,GAAG,cAAc,KAAK;AAAA,IAChF;AAGA,WAAO,MAAM,IAAI;AACjB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAAoB,OAAwB;AAClD,UAAM,aAAa,oBAAI,IAAI;AAAA;AAAA,MAEzB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,WAAO,WAAW,IAAI,MAAM,YAAY,CAAC;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,6BAA6B,QAA2C;AAC9E,UAAM,QAAQ,OAAO,KAAK;AAC1B,QAAI,CAAC,SAAS,MAAM,SAAS,WAAY,QAAO;AAGhD,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO,QAAQ;AAEf,UAAM,WAAW,OAAO,KAAK;AAC7B,QAAI,CAAC,YAAY,SAAS,SAAS,cAAc,SAAS,UAAU,KAAK;AACvE,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AACA,WAAO,QAAQ;AAEf,UAAM,cAAc,OAAO,KAAK;AAChC,QAAI,CAAC,eAAe,YAAY,SAAS,cAAc;AACrD,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AACA,WAAO,QAAQ;AAEf,UAAM,YAAY,OAAO,KAAK;AAC9B,QAAI,CAAC,aAAa,UAAU,SAAS,iBAAiB,UAAU,UAAU,KAAK;AAC7E,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AACA,WAAO,QAAQ;AAGf,UAAM,OAAiB,CAAC;AACxB,WAAO,CAAC,OAAO,QAAQ,KAAK,KAAK,SAAS,gBAAe,iBAAiB;AACxE,YAAM,WAAW,OAAO,KAAK;AAC7B,UAAI,CAAC,SAAU;AACf,UAAI,SAAS,SAAS,iBAAiB,SAAS,UAAU,KAAK;AAC7D,eAAO,QAAQ;AACf;AAAA,MACF;AAEA,UAAI,SAAS,SAAS,iBAAiB,SAAS,UAAU,KAAK;AAC7D,eAAO,QAAQ;AACf;AAAA,MACF;AAEA,WAAK,KAAK,SAAS,KAAK;AACxB,aAAO,QAAQ;AAAA,IACjB;AAGA,UAAM,aAAa,GAAG,MAAM,KAAK,IAAI,YAAY,KAAK,IAAI,KAAK,KAAK,IAAI,CAAC;AACzE,WAAO;AAAA,MACL,MAAM;AAAA,MACN,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,iCAAiC,QAA2C;AAClF,UAAM,QAAQ,OAAO,KAAK;AAC1B,QAAI,CAAC,MAAO,QAAO;AAGnB,QAAI,MAAM,SAAS,gBAAgB,MAAM,SAAS,UAAW,QAAO;AAGpE,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO,QAAQ;AAEf,UAAM,WAAW,OAAO,KAAK;AAC7B,QAAI,CAAC,YAAY,SAAS,SAAS,cAAc,SAAS,UAAU,KAAK;AACvE,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AACA,WAAO,QAAQ;AAEf,UAAM,gBAAgB,OAAO,KAAK;AAClC,QAAI,CAAC,iBAAiB,cAAc,SAAS,cAAc;AACzD,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AACA,WAAO,QAAQ;AAGf,QAAI,QAAQ,GAAG,MAAM,KAAK,IAAI,cAAc,KAAK;AACjD,QAAI,QAAQ;AAIZ,WAAO,CAAC,OAAO,QAAQ,KAAK,QAAQ,gBAAe,oBAAoB;AACrE,YAAM,UAAU,OAAO,KAAK;AAC5B,UAAI,CAAC,WAAW,QAAQ,SAAS,cAAc,QAAQ,UAAU,KAAK;AACpE;AAAA,MACF;AACA,aAAO,QAAQ;AAEf,YAAM,WAAW,OAAO,KAAK;AAC7B,UAAI,CAAC,YAAY,SAAS,SAAS,cAAc;AAG/C;AAAA,MACF;AACA,aAAO,QAAQ;AACf,eAAS,IAAI,SAAS,KAAK;AAC3B;AAAA,IACF;AAIA,UAAM,YAAY,OAAO,KAAK;AAC9B,QAAI,aAAa,UAAU,SAAS,iBAAiB,UAAU,UAAU,KAAK;AAC5E,aAAO,QAAQ;AAGf,YAAM,OAAiB,CAAC;AACxB,UAAI,WAAW;AACf,aAAO,CAAC,OAAO,QAAQ,KAAK,KAAK,SAAS,gBAAe,iBAAiB;AACxE,cAAM,WAAW,OAAO,KAAK;AAC7B,YAAI,CAAC,SAAU;AAGf,YAAI,SAAS,SAAS,iBAAiB,SAAS,UAAU,KAAK;AAC7D,cAAI,aAAa,GAAG;AAClB,mBAAO,QAAQ;AACf;AAAA,UACF;AACA;AAAA,QACF;AAEA,YAAI,SAAS,SAAS,iBAAiB,SAAS,UAAU,KAAK;AAC7D;AAAA,QACF;AAEA,YAAI,SAAS,SAAS,iBAAiB,SAAS,UAAU,KAAK;AAC7D,iBAAO,QAAQ;AACf;AAAA,QACF;AAEA,aAAK,KAAK,SAAS,KAAK;AACxB,eAAO,QAAQ;AAAA,MACjB;AAGA,YAAM,aAAa,GAAG,KAAK,IAAI,KAAK,KAAK,IAAI,CAAC;AAC9C,aAAO;AAAA,QACL,MAAM;AAAA,QACN,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,qCAAqC,QAA2C;AACtF,UAAM,QAAQ,OAAO,KAAK;AAC1B,QAAI,CAAC,SAAS,MAAM,SAAS,WAAY,QAAO;AAGhD,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO,QAAQ;AAEf,UAAM,kBAAkB,OAAO,KAAK;AACpC,QACE,CAAC,mBACD,gBAAgB,SAAS,iBACzB,gBAAgB,UAAU,MAC1B;AACA,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AACA,WAAO,QAAQ;AAEf,UAAM,gBAAgB,OAAO,KAAK;AAClC,QAAI,CAAC,eAAe;AAClB,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AAGA,QAAI,cAAc,SAAS,cAAc,cAAc,SAAS,cAAc;AAC5E,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AACA,WAAO,QAAQ;AAGf,WAAO,mBAAmB,eAAe,MAAM,KAAK,GAAG,cAAc,KAAK;AAAA,EAC5E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,mCAAmC,QAA2C;AACpF,UAAM,QAAQ,OAAO,KAAK;AAC1B,QAAI,CAAC,SAAS,MAAM,SAAS,WAAY,QAAO;AAGhD,QAAI,CAAC,MAAM,MAAM,WAAW,GAAG,EAAG,QAAO;AAGzC,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO,QAAQ;AAEf,UAAM,gBAAgB,OAAO,KAAK;AAClC,QAAI,CAAC,iBAAiB,cAAc,SAAS,YAAY;AACvD,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AAIA,QAAI,CAAC,cAAc,MAAM,WAAW,GAAG,GAAG;AACxC,aAAO,MAAM,IAAI;AACjB,aAAO;AAAA,IACT;AAIA,UAAM,QAAQ,OAAO,KAAK,CAAC;AAC3B,QAAI,SAAS,MAAM,SAAS,YAAY;AAAA,IAExC;AAEA,WAAO,QAAQ;AAIf,UAAM,eAAe,cAAc,MAAM,MAAM,CAAC;AAEhD,WAAO,mBAAmB,eAAe,MAAM,KAAK,GAAG,YAAY;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,QACA,cACA,UACS;AACT,UAAM,OAAO,OAAO,KAAK;AAGzB,UAAM,iBAAiB,IAAI,IAAI,SAAS,KAAK,CAAC;AAE9C,UAAM,UAAU,KAAK,mBAAmB,QAAQ,aAAa,QAAQ,QAAQ;AAE7E,QAAI,CAAC,SAAS;AACZ,aAAO,MAAM,IAAI;AAEjB,iBAAW,QAAQ,SAAS,KAAK,GAAG;AAClC,YAAI,CAAC,eAAe,IAAI,IAAI,GAAG;AAC7B,mBAAS,OAAO,IAAI;AAAA,QACtB;AAAA,MACF;AACA,aAAO,aAAa,YAAY;AAAA,IAClC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,aACN,OACA,OAC+D;AAE/D,QAAI,MAAM,UAAU,MAAO,QAAO;AAGlC,QAAI,MAAM,eAAe,MAAO,QAAO;AAIvC,QAAI,MAAM,SAAS,SAAS,MAAM,mBAAmB,UAAa,MAAM,kBAAkB,KAAK;AAC7F,aAAO;AAAA,IACT;AAGA,QAAI,MAAM,SAAS,aAAa,MAAM,MAAM,YAAY,MAAM,MAAM,YAAY,GAAG;AACjF,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAsBQ,qBAAqB,OAA4C;AACvE,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK;AACH,eAAO,eAAe,MAAM,KAAK;AAAA,MAEnC,KAAK;AACH,eAAO,KAAK,kBAAkB,MAAM,KAAK;AAAA,MAE3C,KAAK;AAEH,cAAM,SAAS,MAAM,cAAc,MAAM,OAAO,YAAY;AAC5D,YAAI,CAAC,MAAM,OAAO,MAAM,UAAU,SAAS,UAAU,MAAM,EAAE,SAAS,KAAK,GAAG;AAC5E,iBAAO,gBAAgB,KAAY;AAAA,QACrC;AACA,eAAO,cAAc,MAAM,cAAc,MAAM,KAAK;AAAA,MAEtD,KAAK;AAEH,YAAI,MAAM,MAAM,WAAW,GAAG,GAAG;AAC/B,iBAAO,gBAAgB,MAAM,KAAY;AAAA,QAC3C;AAEA,cAAM,aAAa,MAAM,MAAM,YAAY;AAC3C,YAAI,CAAC,MAAM,OAAO,MAAM,UAAU,SAAS,UAAU,MAAM,EAAE,SAAS,UAAU,GAAG;AACjF,iBAAO,gBAAgB,UAAiB;AAAA,QAC1C;AAGA,eAAO,EAAE,MAAM,cAAc,KAAK,MAAM,MAAM;AAAA,MAEhD,KAAK;AAEH,eAAO,cAAc,MAAM,OAAO,QAAQ;AAAA,MAE5C;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,OAA8B;AAEtD,QACE,MAAM,WAAW,GAAG,KACpB,MAAM,WAAW,GAAG,KACpB,MAAM,WAAW,GAAG,KACpB,MAAM,WAAW,QAAG,GACpB;AACA,YAAM,QAAQ,MAAM,MAAM,GAAG,EAAE;AAC/B,aAAO,cAAc,OAAO,QAAQ;AAAA,IACtC;AAGA,QAAI,UAAU,OAAQ,QAAO,cAAc,MAAM,SAAS;AAC1D,QAAI,UAAU,QAAS,QAAO,cAAc,OAAO,SAAS;AAG5D,UAAM,gBAAgB,MAAM,MAAM,8BAA8B;AAChE,QAAI,eAAe;AACjB,YAAMA,OAAM,WAAW,cAAc,CAAC,CAAC;AACvC,YAAM,OAAO,cAAc,CAAC;AAC5B,UAAI,MAAM;AACR,eAAO,cAAc,OAAO,UAAU;AAAA,MACxC;AACA,aAAO,cAAcA,MAAK,QAAQ;AAAA,IACpC;AAGA,UAAM,MAAM,WAAW,KAAK;AAC5B,QAAI,CAAC,MAAM,GAAG,GAAG;AACf,aAAO,cAAc,KAAK,QAAQ;AAAA,IACpC;AAGA,WAAO,cAAc,OAAO,QAAQ;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,qBACN,SACA,UACM;AACN,eAAW,CAAC,MAAM,IAAI,KAAK,OAAO,QAAQ,QAAQ,UAAU,GAAG;AAC7D,UAAI,CAAC,SAAS,IAAI,IAAoB,KAAK,KAAK,SAAS;AACvD,iBAAS,IAAI,MAAsB,KAAK,OAAO;AAAA,MACjD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,cAAqC;AACtD,WAAQ,aAAqB,aAAa;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYQ,oBACN,SACA,UACQ;AACR,QAAI,QAAQ;AACZ,QAAI,WAAW;AAGf,UAAM,aAAa,CAAC,SAAgC;AAClD,aAAO,QAAQ,aAAa,IAAI,GAAG,YAAY;AAAA,IACjD;AAGA,eAAW,SAAS,QAAQ,SAAS,QAAQ;AAC3C,UAAI,MAAM,SAAS,QAAQ;AACzB,oBAAY;AACZ,YAAI,SAAS,IAAI,MAAM,IAAI,GAAG;AAC5B,mBAAS;AAAA,QACX;AAAA,MACF,WAAW,MAAM,SAAS,SAAS;AAEjC,mBAAW,YAAY,MAAM,QAAQ;AACnC,cAAI,SAAS,SAAS,QAAQ;AAC5B,kBAAM,iBAAiB,WAAW,SAAS,IAAI;AAC/C,kBAAM,SAAS;AACf,wBAAY;AAEZ,gBAAI,SAAS,IAAI,SAAS,IAAI,GAAG;AAE/B,uBAAS;AAAA,YACX,WAAW,gBAAgB;AAGzB,uBAAS,SAAS;AAAA,YACpB;AAAA,UAEF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,iBAAiB,WAAW,IAAI,QAAQ,WAAW;AAKvD,QAAI,KAAK,iBAAiB,KAAK,KAAK,sBAAsB,GAAG;AAC3D,YAAM,cAAe,KAAK,iBAAiB,KAAK,sBAAuB;AACvE,uBAAiB,KAAK,IAAI,KAAK,iBAAiB,WAAW;AAAA,IAC7D;AAGA,UAAM,WAAW,KAAK,4BAA4B,OAAO;AACzD,qBAAiB,KAAK,IAAI,GAAK,iBAAiB,QAAQ;AAGxD,UAAM,wBAAwB,KAAK,gCAAgC,SAAS,QAAQ;AACpF,qBAAiB,KAAK,IAAI,GAAK,KAAK,IAAI,GAAK,iBAAiB,qBAAqB,CAAC;AAEpF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcQ,4BAA4B,SAAkC;AAEpE,QAAI,QAAQ,aAAa,MAAM;AAC7B,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,QAAQ,SAAS,OAAO,CAAC;AAC5C,QAAI,CAAC,cAAc,WAAW,SAAS,WAAW;AAChD,aAAO;AAAA,IACT;AAGA,UAAM,eAAe,oBAAI,IAAI;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,QAAI,aAAa,IAAI,WAAW,KAAK,GAAG;AACtC,aAAO;AAAA,IACT;AAGA,QAAI,WAAW,cAAc;AAC3B,iBAAW,OAAO,WAAW,cAAc;AACzC,YAAI,aAAa,IAAI,GAAG,GAAG;AACzB,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBQ,gCACN,SACA,UACQ;AAER,QAAI,QAAQ,aAAa,MAAM;AAC7B,aAAO;AAAA,IACT;AAEA,QAAI,aAAa;AAIjB,UAAM,yBAAkE;AAAA,MACtE,SAAS,CAAC,oBAAK;AAAA;AAAA,MACf,aAAa,CAAC,sBAAO,oBAAK;AAAA;AAAA,MAC1B,QAAQ,CAAC,cAAI;AAAA;AAAA,MACb,OAAO,CAAC,cAAI;AAAA;AAAA,MACZ,QAAQ,CAAC,QAAG;AAAA;AAAA,MACZ,OAAO,CAAC,QAAG;AAAA;AAAA,MACX,MAAM,CAAC,sBAAO,oBAAK;AAAA;AAAA,MACnB,QAAQ,CAAC,QAAG;AAAA;AAAA,IACd;AAGA,eAAW,CAAC,MAAM,KAAK,KAAK,SAAS,QAAQ,GAAG;AAE9C,YAAM,YAAY,uBAAuB,IAAI;AAC7C,UAAI,CAAC,aAAa,UAAU,WAAW,GAAG;AACxC;AAAA,MACF;AAIA,YAAM,WAAY,MAAc;AAChC,UAAI,YAAY,OAAO,SAAS,qBAAqB,UAAU;AAC7D,cAAM,kBAAkB,SAAS;AAGjC,YAAI,UAAU,SAAS,eAAe,GAAG;AAEvC,wBAAc;AAAA,QAChB,OAAO;AAEL,wBAAc;AAAA,QAChB;AAAA,MACF;AAAA,IACF;AAGA,WAAO,KAAK,IAAI,MAAM,KAAK,IAAI,KAAK,UAAU,CAAC;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBQ,eAAe,QAA2B;AAChD,UAAM,QAAQ,OAAO,KAAK;AAC1B,QAAI,CAAC,MAAO;AAEZ,UAAM,aAAa,MAAM,MAAM,YAAY;AAG3C,QAAI,gBAAe,oBAAoB,IAAI,UAAU,GAAG;AAEtD,YAAM,OAAO,OAAO,KAAK;AACzB,aAAO,QAAQ;AACf,YAAM,YAAY,OAAO,KAAK;AAE9B,UAAI,aAAa,UAAU,SAAS,YAAY;AAE9C;AAAA,MACF;AAGA,aAAO,MAAM,IAAI;AAAA,IACnB;AAKA,QAAI,eAAe,SAAS;AAE1B,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,sBAAsB,QAAoE;AACxF,UAAM,YAMF,CAAC;AAEL,QAAI,gBAAgB;AAGpB,WAAO,CAAC,OAAO,QAAQ,GAAG;AACxB,YAAM,QAAQ,OAAO,KAAK;AAC1B,UAAI,CAAC,SAAS,MAAM,SAAS,kBAAkB;AAC7C;AAAA,MACF;AAEA,YAAM,WAAW,MAAM;AAGvB,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,sBAAgB;AAEhB,cAAQ,SAAS,cAAc;AAAA,QAC7B,KAAK;AACH,oBAAU,OAAO;AACjB;AAAA,QACF,KAAK;AACH,cAAI,OAAO,SAAS,UAAU,UAAU;AACtC,sBAAU,WAAW,SAAS;AAAA,UAChC;AACA;AAAA,QACF,KAAK;AACH,cAAI,OAAO,SAAS,UAAU,UAAU;AACtC,sBAAU,WAAW,SAAS;AAAA,UAChC;AACA;AAAA,QACF,KAAK;AACH,cACE,SAAS,UAAU,WACnB,SAAS,UAAU,UACnB,SAAS,UAAU,SACnB,SAAS,UAAU,QACnB;AACA,sBAAU,QAAQ,SAAS;AAAA,UAC7B;AACA;AAAA,MACJ;AAEA,aAAO,QAAQ;AAAA,IACjB;AAEA,WAAO,gBAAgB,YAAY;AAAA,EACrC;AACF;AAAA;AAAA;AAAA;AAAA;AA9mCa,gBAgsBa,qBAAqB;AAAA;AAhsBlC,gBAmsBa,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAnsB/B,gBAwgCa,sBAAsB,oBAAI,IAAI,CAAC,OAAO,KAAK,IAAI,CAAC;AAxgCnE,IAAM,iBAAN;AAunCA,IAAM,iBAAiB,IAAI,eAAe;;;ACnnC1C,SAASC,cAAa,UAAiD;AAC5E,SAAO,gBAAgB,QAAQ;AACjC;;;AC2CO,IAAM,gBAAN,MAAoB;AAAA,EAUzB,YAAY,SAA8B,CAAC,GAAG;AAC5C,SAAK,QAAQ,oBAAI,IAAI;AACrB,SAAK,SAAS;AAAA,MACZ,SAAS,OAAO,WAAW;AAAA,MAC3B,OAAO,OAAO,SAAS;AAAA,MACvB,SAAS,OAAO,WAAW;AAAA,IAC7B;AACA,SAAK,QAAQ;AAAA,MACX,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,aAAa;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,QAAQ,OAAe,UAA0B;AACvD,WAAO,GAAG,QAAQ,IAAI,KAAK;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,OAA4B;AAC5C,QAAI,KAAK,OAAO,UAAU,EAAG,QAAO;AACpC,WAAO,KAAK,IAAI,IAAI,MAAM,YAAY,KAAK,OAAO;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAiB;AAEvB,UAAM,WAAW,KAAK,MAAM,KAAK,EAAE,KAAK,EAAE;AAC1C,QAAI,aAAa,QAAW;AAC1B,WAAK,MAAM,OAAO,QAAQ;AAC1B,WAAK,MAAM;AAAA,IACb;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,IAAI,OAAe,UAAsD;AACvE,QAAI,CAAC,KAAK,OAAO,SAAS;AACxB,WAAK,MAAM;AACX,aAAO;AAAA,IACT;AAEA,UAAM,MAAM,KAAK,QAAQ,OAAO,QAAQ;AACxC,UAAM,QAAQ,KAAK,MAAM,IAAI,GAAG;AAEhC,QAAI,CAAC,OAAO;AACV,WAAK,MAAM;AACX,aAAO;AAAA,IACT;AAGA,QAAI,KAAK,UAAU,KAAK,GAAG;AACzB,WAAK,MAAM,OAAO,GAAG;AACrB,WAAK,MAAM;AACX,WAAK,MAAM;AACX,aAAO;AAAA,IACT;AAGA,SAAK,MAAM,OAAO,GAAG;AACrB,UAAM,eAAe,KAAK,IAAI;AAC9B,SAAK,MAAM,IAAI,KAAK,KAAK;AAEzB,SAAK,MAAM;AACX,WAAO,MAAM;AAAA,EACf;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,IAAI,OAAe,UAAkB,QAAsC;AACzE,QAAI,CAAC,KAAK,OAAO,QAAS;AAG1B,QAAI,OAAO,eAAe,EAAG;AAE7B,UAAM,MAAM,KAAK,QAAQ,OAAO,QAAQ;AACxC,UAAM,MAAM,KAAK,IAAI;AAGrB,WAAO,KAAK,MAAM,QAAQ,KAAK,OAAO,SAAS;AAC7C,WAAK,SAAS;AAAA,IAChB;AAEA,SAAK,MAAM,IAAI,KAAK;AAAA,MAClB;AAAA,MACA,WAAW;AAAA,MACX,cAAc;AAAA,IAChB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,OAAe,UAA2B;AAC5C,QAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAEjC,UAAM,MAAM,KAAK,QAAQ,OAAO,QAAQ;AACxC,UAAM,QAAQ,KAAK,MAAM,IAAI,GAAG;AAEhC,QAAI,CAAC,MAAO,QAAO;AACnB,QAAI,KAAK,UAAU,KAAK,GAAG;AACzB,WAAK,MAAM,OAAO,GAAG;AACrB,WAAK,MAAM;AACX,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAe,UAA2B;AAC/C,UAAM,MAAM,KAAK,QAAQ,OAAO,QAAQ;AACxC,WAAO,KAAK,MAAM,OAAO,GAAG;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,MAAM,MAAM;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,QAAQ;AAAA,MACX,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,aAAa;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAuB;AACrB,UAAM,QAAQ,KAAK,MAAM,OAAO,KAAK,MAAM;AAC3C,WAAO;AAAA,MACL,MAAM,KAAK,MAAM;AAAA,MACjB,QAAQ,KAAK,MAAM;AAAA,MACnB,MAAM,KAAK,MAAM;AAAA,MACjB,SAAS,KAAK,OAAO;AAAA,MACrB,SAAS,QAAQ,IAAI,KAAK,MAAM,OAAO,QAAQ;AAAA,MAC/C,WAAW,KAAK,MAAM;AAAA,MACtB,aAAa,KAAK,MAAM;AAAA,MACxB,SAAS,KAAK,OAAO;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,QAA4C;AACpD,QAAI,OAAO,YAAY,QAAW;AAChC,WAAK,OAAO,UAAU,OAAO;AAE7B,aAAO,KAAK,MAAM,OAAO,KAAK,OAAO,SAAS;AAC5C,aAAK,SAAS;AAAA,MAChB;AAAA,IACF;AACA,QAAI,OAAO,UAAU,QAAW;AAC9B,WAAK,OAAO,QAAQ,OAAO;AAAA,IAC7B;AACA,QAAI,OAAO,YAAY,QAAW;AAChC,WAAK,OAAO,UAAU,OAAO;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAe;AACb,SAAK,OAAO,UAAU;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,SAAK,OAAO,UAAU;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,YAAqD;AACnD,WAAO,EAAE,GAAG,KAAK,OAAO;AAAA,EAC1B;AACF;AASO,IAAM,gBAAgB,IAAI,cAAc;AASxC,SAAS,oBAAoB,QAA6C;AAC/E,SAAO,IAAI,cAAc,MAAM;AACjC;AASO,SAAS,UACd,WACA,QAAuB,eACpB;AACH,UAAQ,CAAC,OAAe,aAA6C;AAEnE,UAAM,SAAS,MAAM,IAAI,OAAO,QAAQ;AACxC,QAAI,QAAQ;AACV,aAAO;AAAA,IACT;AAGA,UAAM,SAAS,UAAU,OAAO,QAAQ;AAGxC,UAAM,IAAI,OAAO,UAAU,MAAM;AAEjC,WAAO;AAAA,EACT;AACF;;;AClPO,IAAM,uBAAN,MAAuD;AAAA,EAK5D,YAAY,UAAmC,CAAC,GAAG;AACjD,SAAK,iBAAiB,IAAI,eAAe;AAEzC,SAAK,YAAY,IAAI,IAAI,uBAAuB,CAAC;AAGjD,QAAI,QAAQ,UAAU,OAAO;AAC3B,WAAK,QAAQ,IAAI,cAAc,EAAE,SAAS,MAAM,CAAC;AAAA,IACnD,OAAO;AACL,WAAK,QAAQ,QAAQ,QAAQ,IAAI,cAAc,QAAQ,KAAK,IAAI;AAAA,IAClE;AAAA,EACF;AAAA,EAEA,QAAQ,OAAe,UAA0C;AAE/D,QAAI,CAAC,KAAK,iBAAiB,QAAQ,GAAG;AACpC,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC,aAAa,QAAQ,yCAAyC;AAAA,MACzE;AAAA,IACF;AAGA,UAAM,SAAS,KAAK,MAAM,IAAI,OAAO,QAAQ;AAC7C,QAAI,QAAQ;AACV,aAAO;AAAA,IACT;AAGA,UAAM,SAAS,KAAK,gBAAgB,OAAO,QAAQ;AAGnD,SAAK,MAAM,IAAI,OAAO,UAAU,MAAM;AAEtC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,OAAe,UAA0C;AAC/E,QAAI;AAEF,YAAM,YAAYC,cAAa,QAAQ;AACvC,UAAI,CAAC,WAAW;AACd,eAAO;AAAA,UACL,YAAY;AAAA,UACZ,QAAQ,CAAC,wCAAwC,QAAQ,GAAG;AAAA,QAC9D;AAAA,MACF;AAEA,YAAM,cAAc,UAAU,SAAS,KAAK;AAG5C,YAAM,WAAW,uBAAuB,QAAQ;AAChD,UAAI,SAAS,WAAW,GAAG;AACzB,eAAO;AAAA,UACL,YAAY;AAAA,UACZ,QAAQ,CAAC,uCAAuC,QAAQ,GAAG;AAAA,QAC7D;AAAA,MACF;AAGA,YAAM,QAAQ,KAAK,eAAe,UAAU,aAAa,QAAQ;AAEjE,UAAI,CAAC,OAAO;AACV,eAAO;AAAA,UACL,YAAY;AAAA,UACZ,QAAQ,CAAC,8BAA8B;AAAA,QACzC;AAAA,MACF;AAGA,YAAM,OAAO,KAAK,kBAAkB,KAAK;AAEzC,aAAO;AAAA,QACL,YAAY,MAAM;AAAA,QAClB,SAAS;AAAA,UACP,MAAM,MAAM,QAAQ;AAAA,UACpB,OAAO,MAAM;AAAA,QACf;AAAA,QACA;AAAA,QACA,gBAAgB,MAAM;AAAA,MACxB;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACjE;AAAA,IACF;AAAA,EACF;AAAA,EAEA,iBAAiB,UAA2B;AAC1C,WAAO,KAAK,UAAU,IAAI,QAAQ;AAAA,EACpC;AAAA,EAEA,qBAA+B;AAC7B,WAAO,MAAM,KAAK,KAAK,SAAS;AAAA,EAClC;AAAA,EAEA,gBAA4B;AAC1B,WAAO,KAAK,MAAM,SAAS;AAAA,EAC7B;AAAA,EAEA,aAAmB;AACjB,SAAK,MAAM,MAAM;AAAA,EACnB;AAAA,EAEA,eAAe,QAA4C;AACzD,SAAK,MAAM,UAAU,MAAM;AAAA,EAC7B;AAAA,EAEQ,kBAAkB,OAAyC;AACjE,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QAAQ,MAAM,QAAQ;AAAA,MACtB,OAAO,MAAM;AAAA,MACb,UAAU;AAAA,QACR,WAAW,MAAM,QAAQ;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AACF;AAwBO,SAAS,uBAAuB,SAAqD;AAC1F,SAAO,IAAI,qBAAqB,OAAO;AACzC;AAaO,IAAM,+BAA+B;AAKrC,IAAM,4BAA4B;AASlC,SAAS,wBACd,QACA,YAAoB,8BACX;AACT,SAAO,OAAO,cAAc,aAAa,OAAO,YAAY;AAC9D;AAkBO,SAAS,mBACd,OACA,SAIA;AACA,QAAM,OAAwB,CAAC;AAC/B,QAAM,YAA2C,CAAC;AAElD,aAAW,CAAC,MAAM,KAAK,KAAK,OAAO;AACjC,YAAQ,MAAM;AAAA;AAAA,MAEZ,KAAK;AAAA,MACL,KAAK;AACH,aAAK,KAAK,KAAK;AACf;AAAA;AAAA,MAGF,KAAK;AACH,YAAI,YAAY,OAAO;AACrB,oBAAU,MAAM,IAAI;AAAA,QACtB,OAAO;AACL,oBAAU,IAAI,IAAI;AAAA,QACpB;AACA;AAAA;AAAA,MAGF,KAAK;AACH,kBAAU,MAAM,IAAI;AACpB;AAAA;AAAA,MAGF,KAAK;AACH,kBAAU,IAAI,IAAI;AAClB;AAAA,MAEF,KAAK;AACH,kBAAU,MAAM,IAAI;AACpB;AAAA;AAAA,MAGF,KAAK;AACH,kBAAU,IAAI,IAAI;AAClB;AAAA,MAEF,KAAK;AACH,kBAAU,MAAM,IAAI;AACpB;AAAA;AAAA,MAGF,KAAK;AACH,kBAAU,IAAI,IAAI;AAClB;AAAA;AAAA,MAGF,KAAK;AACH,kBAAU,OAAO,IAAI;AACrB;AAAA,MAEF;AAEE,kBAAU,IAAI,IAAI;AAAA,IACtB;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,UAAU;AAC3B;;;ACnTA,IAAM,eAAe,oBAAI,IAAI;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAED,IAAM,oBAAoB,oBAAI,IAAI,CAAC,OAAO,MAAM,OAAO,IAAI,CAAC;AAE5D,IAAM,mBAAmB,oBAAI,IAAI,CAAC,QAAQ,SAAS,QAAQ,WAAW,CAAC;AAEvE,IAAM,aAAa,oBAAI,IAAI;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAMM,SAASC,UAAS,OAAwB;AAC/C,QAAM,SAAkB,CAAC;AACzB,MAAI,MAAM;AACV,MAAI,OAAO;AACX,MAAI,SAAS;AAIb,WAAS,8BAAuC;AAC9C,QAAI,OAAO,WAAW,EAAG,QAAO;AAChC,UAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AAErC,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,EAAE,SAAS,KAAK,IAAI;AAAA,EACtB;AAEA,WAAS,KAAK,SAAS,GAAW;AAChC,WAAO,MAAM,MAAM,MAAM,KAAK;AAAA,EAChC;AAEA,WAAS,UAAkB;AACzB,UAAM,OAAO,MAAM,GAAG;AACtB;AACA,QAAI,SAAS,MAAM;AACjB;AACA,eAAS;AAAA,IACX,OAAO;AACL;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,WAAS,iBAAuB;AAC9B,WAAO,MAAM,MAAM,UAAU,KAAK,KAAK,MAAM,GAAG,CAAC,GAAG;AAClD,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,WAAS,UAAU,WAA8C;AAC/D,QAAI,SAAS;AACb,WAAO,MAAM,MAAM,UAAU,UAAU,MAAM,GAAG,CAAC,GAAG;AAClD,gBAAU,QAAQ;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAEA,WAAS,WAAW,OAAuB;AACzC,QAAI,SAAS;AACb,YAAQ;AACR,WAAO,MAAM,MAAM,UAAU,MAAM,GAAG,MAAM,OAAO;AACjD,UAAI,MAAM,GAAG,MAAM,QAAQ,MAAM,IAAI,MAAM,QAAQ;AACjD,kBAAU,QAAQ;AAClB,kBAAU,QAAQ;AAAA,MACpB,OAAO;AACL,kBAAU,QAAQ;AAAA,MACpB;AAAA,IACF;AACA,QAAI,MAAM,MAAM,QAAQ;AACtB,gBAAU,QAAQ;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAEA,WAAS,sBAA8B;AACrC,QAAI,SAAS;AACb,YAAQ;AACR,WAAO,MAAM,MAAM,UAAU,MAAM,GAAG,MAAM,KAAK;AAC/C,UAAI,MAAM,GAAG,MAAM,QAAQ,MAAM,IAAI,MAAM,QAAQ;AACjD,kBAAU,QAAQ;AAClB,kBAAU,QAAQ;AAAA,MACpB,OAAO;AACL,kBAAU,QAAQ;AAAA,MACpB;AAAA,IACF;AACA,QAAI,MAAM,MAAM,QAAQ;AACtB,gBAAU,QAAQ;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAEA,WAAS,oBAA4B;AACnC,QAAI,SAAS;AACb,YAAQ;AACR,WAAO,MAAM,MAAM,QAAQ;AACzB,gBAAU,QAAQ;AAClB,UAAI,OAAO,SAAS,IAAI,GAAG;AACzB;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,WAAS,UAAU,MAAiB,OAAe,OAAsB;AACvE,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA,QAAQ,SAAS,MAAM;AAAA,IACzB;AAAA,EACF;AAEA,SAAO,MAAM,MAAM,QAAQ;AACzB,mBAAe;AACf,QAAI,OAAO,MAAM,OAAQ;AAEzB,UAAM,QAAQ;AACd,UAAM,OAAO,KAAK;AAGlB,QAAI,SAAS,OAAO,KAAK,CAAC,MAAM,OAAO,CAAC,KAAK,KAAK,KAAK,CAAC,CAAC,GAAG;AAC1D,cAAQ;AACR,cAAQ;AACR,aAAO,KAAK,UAAU,+BAAsB,MAAM,KAAK,CAAC;AACxD;AAAA,IACF;AAGA,QAAI,SAAS,OAAO,SAAS,KAAK;AAChC,YAAM,QAAQ,WAAW,IAAI;AAC7B,aAAO,KAAK,UAAU,uBAAkB,OAAO,KAAK,CAAC;AACrD;AAAA,IACF;AAGA,QAAI,SAAS,KAAK;AAChB,YAAM,QAAQ,oBAAoB;AAClC,aAAO,KAAK,UAAU,2CAA4B,OAAO,KAAK,CAAC;AAC/D;AAAA,IACF;AAGA,QAAI,SAAS,OAAO,eAAe,KAAK,KAAK,CAAC,CAAC,GAAG;AAChD,YAAM,QAAQ,kBAAkB;AAChC,aAAO,KAAK,UAAU,uCAA0B,OAAO,KAAK,CAAC;AAC7D;AAAA,IACF;AAGA,QAAI,SAAS,OAAO,4BAA4B,GAAG;AACjD,cAAQ;AACR,YAAM,OAAO,UAAU,OAAK,QAAQ,KAAK,CAAC,CAAC;AAC3C,aAAO,KAAK,UAAU,iCAAuB,MAAM,MAAM,KAAK,CAAC;AAC/D;AAAA,IACF;AAGA,QAAI,SAAS,OAAO,aAAa,KAAK,KAAK,CAAC,CAAC,KAAK,4BAA4B,GAAG;AAC/E,cAAQ;AACR,YAAM,OAAO,UAAU,OAAK,QAAQ,KAAK,CAAC,CAAC;AAC3C,aAAO,KAAK,UAAU,uCAA0B,MAAM,MAAM,KAAK,CAAC;AAClE;AAAA,IACF;AAGA,QAAI,SAAS,OAAO,4BAA4B,GAAG;AAEjD,YAAM,WAAW,KAAK,CAAC;AACvB,UAAI,aAAa,OAAO,WAAW,KAAK,QAAQ,GAAG;AACjD,YAAI,QAAQ;AACZ,iBAAS,QAAQ;AACjB,eAAO,MAAM,MAAM,UAAU,MAAM,GAAG,MAAM,KAAK;AAC/C,cAAI,MAAM,GAAG,MAAM,OAAO,MAAM,GAAG,MAAM,KAAK;AAC5C,qBAAS,WAAW,MAAM,GAAG,CAAC;AAAA,UAChC,OAAO;AACL,qBAAS,QAAQ;AAAA,UACnB;AAAA,QACF;AACA,YAAI,MAAM,MAAM,QAAQ;AACtB,mBAAS,QAAQ;AAAA,QACnB;AACA,eAAO,KAAK,UAAU,+CAA8B,OAAO,KAAK,CAAC;AACjE;AAAA,MACF;AAAA,IACF;AAGA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,2BAAoB,KAAK,KAAK,CAAC;AACrD;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,2BAAoB,KAAK,KAAK,CAAC;AACrD;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,IAAI,GAAG;AACnB,YAAM,MAAM,UAAU,OAAK,QAAQ,KAAK,CAAC,CAAC;AAC1C,YAAM,YAAY;AAClB,YAAM,OAAO,UAAU,OAAK,WAAW,KAAK,CAAC,CAAC;AAE9C,UAAI,WAAW,IAAI,IAAI,GAAG;AACxB,eAAO,KAAK,UAAU,yCAA2B,MAAM,MAAM,KAAK,CAAC;AAAA,MACrE,OAAO;AAEL,cAAM;AACN,eAAO,KAAK,UAAU,uBAAkB,KAAK,KAAK,CAAC;AAAA,MACrD;AACA;AAAA,IACF;AAGA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,uBAAkB,KAAK,KAAK,CAAC;AACnD;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,uBAAkB,KAAK,KAAK,CAAC;AACnD;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,uBAAkB,KAAK,KAAK,CAAC;AACnD;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,uBAAkB,KAAK,KAAK,CAAC;AACnD;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,qBAAiB,KAAK,KAAK,CAAC;AAClD;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,qBAAiB,KAAK,KAAK,CAAC;AAClD;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,cAAQ;AACR,aAAO,KAAK,UAAU,iBAAe,KAAK,KAAK,CAAC;AAChD;AAAA,IACF;AAGA,QAAI,SAAS,OAAO,SAAS,OAAO,SAAS,OAAO,SAAS,OAAO,SAAS,KAAK;AAChF,cAAQ;AACR,aAAO,KAAK,UAAU,2BAAoB,MAAM,KAAK,CAAC;AACtD;AAAA,IACF;AAGA,QAAI,SAAS,OAAO,SAAS,OAAO,SAAS,OAAO,SAAS,KAAK;AAChE,UAAI,KAAK,QAAQ;AACjB,UAAI,KAAK,MAAM,KAAK;AAClB,cAAM,QAAQ;AAAA,MAChB;AACA,aAAO,KAAK,UAAU,+BAAsB,IAAI,KAAK,CAAC;AACtD;AAAA,IACF;AAGA,QAAI,aAAa,KAAK,IAAI,GAAG;AAC3B,YAAM,OAAO,UAAU,OAAK,QAAQ,KAAK,CAAC,CAAC;AAC3C,YAAM,QAAQ,KAAK,YAAY;AAE/B,UAAI,aAAa,IAAI,KAAK,GAAG;AAC3B,eAAO,KAAK,UAAU,iCAAuB,MAAM,KAAK,CAAC;AAAA,MAC3D,WAAW,kBAAkB,IAAI,KAAK,GAAG;AACvC,eAAO,KAAK,UAAU,yBAAmB,MAAM,KAAK,CAAC;AAAA,MACvD,WAAW,iBAAiB,IAAI,KAAK,GAAG;AACtC,eAAO,KAAK,UAAU,yBAAmB,MAAM,KAAK,CAAC;AAAA,MACvD,OAAO;AACL,eAAO,KAAK,UAAU,+BAAsB,MAAM,KAAK,CAAC;AAAA,MAC1D;AACA;AAAA,IACF;AAGA,YAAQ;AAAA,EACV;AAEA,SAAO,KAAK,UAAU,iBAAe,IAAI,GAAG,CAAC;AAC7C,SAAO;AACT;;;ACzWO,IAAM,mBAAN,MAAuB;AAAA,EAAvB;AACL,SAAQ,SAAkB,CAAC;AAC3B,SAAQ,UAAU;AAAA;AAAA,EAElB,MAAM,OAAsC;AAC1C,QAAI;AACF,WAAK,SAASC,UAAS,KAAK;AAC5B,WAAK,UAAU;AAEf,UAAI,KAAK,QAAQ,GAAG;AAClB,eAAO,EAAE,SAAS,OAAO,OAAO,mBAAmB;AAAA,MACrD;AAEA,YAAM,OAAO,KAAK,gBAAgB;AAClC,aAAO,EAAE,SAAS,MAAM,MAAM,UAAU,KAAK,QAAQ;AAAA,IACvD,SAAS,GAAG;AACV,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO,aAAa,QAAQ,EAAE,UAAU;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMQ,OAAc;AACpB,WAAO,KAAK,OAAO,KAAK,OAAO,KAAK,EAAE,uBAAqB,OAAO,IAAI,OAAO,GAAG,KAAK,EAAE;AAAA,EACzF;AAAA,EAEQ,WAAkB;AACxB,WAAO,KAAK,OAAO,KAAK,UAAU,CAAC,KAAK,EAAE,uBAAqB,OAAO,IAAI,OAAO,GAAG,KAAK,EAAE;AAAA,EAC7F;AAAA,EAEQ,UAAmB;AACzB,WAAO,KAAK,KAAK,EAAE;AAAA,EACrB;AAAA,EAEQ,UAAiB;AACvB,QAAI,CAAC,KAAK,QAAQ,GAAG;AACnB,WAAK;AAAA,IACP;AACA,WAAO,KAAK,SAAS;AAAA,EACvB;AAAA,EAEQ,MAAM,MAA0B;AACtC,WAAO,KAAK,KAAK,EAAE,SAAS;AAAA,EAC9B;AAAA,EAEQ,WAAW,OAAwB;AACzC,WAAO,KAAK,KAAK,EAAE,MAAM,YAAY,MAAM,MAAM,YAAY;AAAA,EAC/D;AAAA,EAEQ,SAAS,OAA6B;AAC5C,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,MAAM,IAAI,GAAG;AACpB,aAAK,QAAQ;AACb,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkC;AACxC,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA,EAEQ,UAA0B;AAChC,QAAI,OAAO,KAAK,SAAS;AAEzB,WAAO,KAAK,WAAW,IAAI,GAAG;AAC5B,YAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,YAAM,QAAQ,KAAK,SAAS;AAC5B,aAAO,KAAK,uBAAuB,UAAU,MAAM,KAAK;AAAA,IAC1D;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,WAA2B;AACjC,QAAI,OAAO,KAAK,cAAc;AAE9B,WAAO,KAAK,WAAW,KAAK,GAAG;AAC7B,YAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,YAAM,QAAQ,KAAK,cAAc;AACjC,aAAO,KAAK,uBAAuB,UAAU,MAAM,KAAK;AAAA,IAC1D;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,gBAAgC;AACtC,QAAI,OAAO,KAAK,gBAAgB;AAEhC,WACE,KAAK,mCAA0B,KAC/B,KAAK,WAAW,IAAI,KACpB,KAAK,WAAW,SAAS,KACzB,KAAK,WAAW,UAAU,KAC1B,KAAK,WAAW,IAAI,GACpB;AACA,YAAM,WAAW,KAAK,SAAS,EAAE;AACjC,YAAM,QAAQ,KAAK,gBAAgB;AACnC,aAAO,KAAK,uBAAuB,UAAU,MAAM,KAAK;AAAA,IAC1D;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,kBAAkC;AACxC,QAAI,OAAO,KAAK,cAAc;AAE9B,WAAO,KAAK,mCAA0B,GAAG;AACvC,YAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,YAAM,QAAQ,KAAK,cAAc;AACjC,aAAO,KAAK,uBAAuB,UAAU,MAAM,KAAK;AAAA,IAC1D;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,gBAAgC;AACtC,QAAI,OAAO,KAAK,oBAAoB;AAEpC,WAAO,KAAK,KAAK,EAAE,UAAU,OAAO,KAAK,KAAK,EAAE,UAAU,KAAK;AAC7D,YAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,YAAM,QAAQ,KAAK,oBAAoB;AACvC,aAAO,KAAK,uBAAuB,UAAU,MAAM,KAAK;AAAA,IAC1D;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,sBAAsC;AAC5C,QAAI,OAAO,KAAK,WAAW;AAE3B,WAAO,KAAK,KAAK,EAAE,UAAU,OAAO,KAAK,KAAK,EAAE,UAAU,OAAO,KAAK,KAAK,EAAE,UAAU,KAAK;AAC1F,YAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,YAAM,QAAQ,KAAK,WAAW;AAC9B,aAAO,KAAK,uBAAuB,UAAU,MAAM,KAAK;AAAA,IAC1D;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,aAA6B;AACnC,QAAI,KAAK,WAAW,KAAK,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,KAAK,EAAE,UAAU,KAAK;AAChF,YAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,YAAM,UAAU,KAAK,WAAW;AAChC,aAAO,KAAK,sBAAsB,UAAU,OAAO;AAAA,IACrD;AAEA,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA,EAEQ,eAA+B;AACrC,QAAI,OAAO,KAAK,aAAa;AAE7B,WAAO,MAAM;AAEX,UAAI,KAAK,qBAAmB,GAAG;AAE7B,YAAI,KAAK,mCAA0B,KAAK,KAAK,qCAA2B,GAAG;AACzE,gBAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,iBAAO,KAAK,qBAAqB,MAAM,QAAQ;AAAA,QACjD,OAAO;AACL;AAAA,QACF;AAAA,MACF,WAES,KAAK,mCAA0B,GAAG;AAEzC,YAAI,KAAK,mCAA0B,KAAK,KAAK,qCAA2B,GAAG;AACzE,gBAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,iBAAO,KAAK,2BAA2B,MAAM,QAAQ;AAAA,QACvD,OAAO;AACL;AAAA,QACF;AAAA,MACF,WAES,KAAK,2BAAsB,GAAG;AACrC,cAAM,OAAO,KAAK,eAAe;AACjC,eAAO,KAAK,qBAAqB,MAAM,IAAI;AAAA,MAC7C,WAES,KAAK,+BAAwB,GAAG;AACvC,cAAM,QAAQ,KAAK,gBAAgB;AACnC,YAAI,CAAC,KAAK,+BAAwB,GAAG;AACnC,gBAAM,IAAI,MAAM,wBAAwB;AAAA,QAC1C;AACA,eAAO,KAAK,qBAAqB,MAAM,KAAK;AAAA,MAC9C,OAAO;AACL;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,eAA+B;AACrC,UAAM,QAAQ,KAAK,KAAK;AAGxB,QAAI,KAAK,2BAAsB,GAAG;AAChC,aAAO,KAAK,cAAc,WAAW,MAAM,KAAK,GAAG,UAAU,KAAK;AAAA,IACpE;AAEA,QAAI,KAAK,2BAAsB,GAAG;AAChC,YAAM,QAAQ,MAAM,MAAM,MAAM,GAAG,EAAE;AACrC,aAAO,KAAK,cAAc,OAAO,UAAU,KAAK;AAAA,IAClD;AAEA,QAAI,KAAK,6BAAuB,GAAG;AACjC,YAAM,QACJ,MAAM,UAAU,SACZ,OACA,MAAM,UAAU,UACd,QACA,MAAM,UAAU,SACd,OACA;AACV,aAAO,KAAK,cAAc,OAAO,MAAM,OAAc,KAAK;AAAA,IAC5D;AAEA,QAAI,KAAK,+CAAgC,GAAG;AAC1C,YAAM,eAAoC;AAAA,QACxC,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,MAAM;AAAA,QACb,KAAK,MAAM;AAAA,QACX,MAAM,MAAM;AAAA,QACZ,QAAQ,MAAM;AAAA,MAChB;AACA,aAAO;AAAA,IACT;AAEA,QAAI,KAAK,6CAA+B,GAAG;AACzC,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAGA,QAAI,KAAK,qCAA2B,GAAG;AACrC,aAAO,KAAK,eAAe,MAAM,OAAO,MAAM,KAAK;AAAA,IACrD;AAEA,QAAI,KAAK,2CAA8B,GAAG;AACxC,aAAO,KAAK,eAAe,MAAM,OAAO,SAAS,KAAK;AAAA,IACxD;AAEA,QAAI,KAAK,mDAAkC,GAAG;AAC5C,aAAO,KAAK,eAAe,MAAM,OAAO,aAAa,KAAK;AAAA,IAC5D;AAEA,QAAI,KAAK,2CAA8B,GAAG;AAExC,YAAM,WAAW,MAAM,MAAM,MAAM,GAAG,EAAE;AACxC,aAAO,KAAK,eAAe,UAAU,SAAS,KAAK;AAAA,IACrD;AAGA,QAAI,KAAK,qCAA2B,GAAG;AACrC,aAAO,KAAK,uBAAuB,MAAM,OAAsB,KAAK;AAAA,IACtE;AAGA,QAAI,KAAK,mCAA0B,GAAG;AACpC,aAAO,KAAK,iBAAiB,MAAM,OAAO,KAAK;AAAA,IACjD;AAGA,QAAI,KAAK,2BAAsB,GAAG;AAChC,YAAM,OAAO,KAAK,gBAAgB;AAClC,UAAI,CAAC,KAAK,2BAAsB,GAAG;AACjC,cAAM,IAAI,MAAM,6BAA6B;AAAA,MAC/C;AACA,aAAO;AAAA,IACT;AAGA,QAAI,KAAK,+BAAwB,GAAG;AAClC,aAAO,KAAK,kBAAkB;AAAA,IAChC;AAGA,QAAI,KAAK,2BAAsB,GAAG;AAChC,aAAO,KAAK,mBAAmB;AAAA,IACjC;AAEA,UAAM,IAAI,MAAM,qBAAqB,MAAM,KAAK,EAAE;AAAA,EACpD;AAAA,EAEQ,iBAAmC;AACzC,UAAM,OAAyB,CAAC;AAEhC,QAAI,CAAC,KAAK,2BAAsB,GAAG;AACjC,SAAG;AACD,aAAK,KAAK,KAAK,gBAAgB,CAAC;AAAA,MAClC,SAAS,KAAK,yBAAqB;AAAA,IACrC;AAEA,QAAI,CAAC,KAAK,2BAAsB,GAAG;AACjC,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,oBAAsC;AAC5C,UAAM,WAA6B,CAAC;AACpC,UAAM,QAAQ,KAAK,SAAS,EAAE;AAE9B,QAAI,CAAC,KAAK,+BAAwB,GAAG;AACnC,SAAG;AACD,iBAAS,KAAK,KAAK,gBAAgB,CAAC;AAAA,MACtC,SAAS,KAAK,yBAAqB;AAAA,IACrC;AAEA,QAAI,CAAC,KAAK,+BAAwB,GAAG;AACnC,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,KAAK,KAAK,SAAS,EAAE;AAAA,IACvB;AAAA,EACF;AAAA,EAEQ,qBAAwC;AAC9C,UAAM,aAA4D,CAAC;AACnE,UAAM,QAAQ,KAAK,SAAS,EAAE;AAE9B,QAAI,CAAC,KAAK,2BAAsB,GAAG;AACjC,SAAG;AACD,YAAI;AACJ,YAAI,KAAK,2BAAsB,GAAG;AAChC,gBAAM,KAAK,QAAQ,EAAE,MAAM,MAAM,GAAG,EAAE;AAAA,QACxC,WAAW,KAAK,mCAA0B,GAAG;AAC3C,gBAAM,KAAK,QAAQ,EAAE;AAAA,QACvB,OAAO;AACL,gBAAM,IAAI,MAAM,wBAAwB;AAAA,QAC1C;AAEA,YAAI,CAAC,KAAK,yBAAqB,GAAG;AAChC,gBAAM,IAAI,MAAM,gCAAgC;AAAA,QAClD;AAEA,cAAM,QAAQ,KAAK,gBAAgB;AACnC,mBAAW,KAAK,EAAE,KAAK,MAAM,CAAC;AAAA,MAChC,SAAS,KAAK,yBAAqB;AAAA,IACrC;AAEA,QAAI,CAAC,KAAK,2BAAsB,GAAG;AACjC,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,YAAY,WAAW,IAAI,QAAM;AAAA,QAC/B,MAAM;AAAA,QACN,KAAK,EAAE;AAAA,QACP,OAAO,EAAE;AAAA,MACX,EAAE;AAAA,MACF;AAAA,MACA,KAAK,KAAK,SAAS,EAAE;AAAA,IACvB;AAAA,EACF;AAAA,EAEQ,oBAAoB,OAAkC;AAC5D,UAAM,QAAQ,MAAM,MAAM;AAAA,MACxB;AAAA,IACF;AACA,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,4BAA4B,MAAM,KAAK,EAAE;AAAA,IAC3D;AAEA,UAAM,QAAQ,WAAW,MAAM,CAAC,CAAC;AACjC,UAAM,OAAO,MAAM,CAAC,EAAE,YAAY;AAElC,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,KAAK,MAAM;AAAA,MACX,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,QAAQ,MAAM;AAAA,IAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMQ,cACN,OACA,UACA,OACa;AACb,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,KAAK,MAAM;AAAA,MACX,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,QAAQ,MAAM;AAAA,IAChB;AAAA,EACF;AAAA,EAEQ,eAAe,OAAe,MAAoB,OAA4B;AACpF,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,UAAU;AAAA,MACV,cAAc;AAAA,MACd,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,QAAQ,MAAM;AAAA,IAChB;AAAA,EACF;AAAA,EAEQ,uBAAuB,aAA0B,OAAoC;AAC3F,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,QAAQ,MAAM;AAAA,IAChB;AAAA,EACF;AAAA,EAEQ,iBAAiB,MAAc,OAA8B;AACnE,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,QAAQ,MAAM;AAAA,IAChB;AAAA,EACF;AAAA,EAEQ,qBACN,QACA,UACoB;AACpB,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,UAAU,OAAO,aAAa,WAAW,WAAa,SAAiB,QAAQ;AAAA,MAC/E,OAAO,OAAO;AAAA,MACd,KAAK,KAAK,SAAS,EAAE;AAAA,IACvB;AAAA,EACF;AAAA,EAEQ,2BACN,QACA,UAC0B;AAC1B,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,OAAO,OAAO;AAAA,MACd,KAAK,KAAK,SAAS,EAAE;AAAA,IACvB;AAAA,EACF;AAAA,EAEQ,uBACN,UACA,MACA,OACsB;AACtB,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,KAAK;AAAA,MACZ,KAAK,MAAM;AAAA,IACb;AAAA,EACF;AAAA,EAEQ,sBAAsB,UAAkB,SAA8C;AAC5F,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,OAAO,KAAK,SAAS,EAAE;AAAA,MACvB,KAAK,QAAQ;AAAA,IACf;AAAA,EACF;AAAA,EAEQ,qBAAqB,QAAwB,MAA4C;AAC/F,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,WAAW;AAAA,MACX,OAAO,OAAO;AAAA,MACd,KAAK,KAAK,SAAS,EAAE;AAAA,IACvB;AAAA,EACF;AACF;AAYO,SAAS,gBAAgB,OAAsC;AACpE,QAAM,SAAS,IAAI,iBAAiB;AACpC,SAAO,OAAO,MAAM,KAAK;AAC3B;;;AC3gBO,SAAS,aAAa,OAAsB,UAAqC;AACtF,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,eAAe,KAAK;AAAA,IAC7B,KAAK;AACH,aAAO,gBAAgB,OAAO,QAAQ;AAAA,IACxC,KAAK;AACH,aAAO,iBAAiB,KAAK;AAAA,IAC/B,KAAK;AACH,aAAO,oBAAoB,OAAO,QAAQ;AAAA,IAC5C,KAAK;AACH,aAAO,kBAAkB,KAAK;AAAA,IAChC;AAEE,YAAM,cAAqB;AAC3B,YAAM,IAAI,MAAM,gCAAiC,YAA8B,IAAI,EAAE;AAAA,EACzF;AACF;AAKO,SAAS,eAAe,OAAkC;AAC/D,QAAM,SAAsB;AAAA,IAC1B,MAAM;AAAA,IACN,OAAO,MAAM;AAAA,EACf;AAGA,MAAI,MAAM,UAAU;AAClB,WAAO,EAAE,GAAG,QAAQ,UAAU,MAAM,SAAS;AAAA,EAC/C;AAEA,SAAO;AACT;AAQO,SAAS,gBAAgB,OAAsB,UAAmC;AAGvF,MAAI,YAAY,MAAM,MAAM,WAAW,GAAG,KAAK,aAAa,KAAK,MAAM,MAAM,MAAM,CAAC,CAAC,GAAG;AACtF,aAAS;AAAA,MACP,cAAc,MAAM,KAAK;AAAA,IAG3B;AAAA,EACF;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO,MAAM;AAAA,IACb,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,EACtB;AACF;AAKO,SAAS,iBAAiB,OAA6C;AAC5E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,aAAa,MAAM;AAAA,IACnB,MAAM,MAAM;AAAA,EACd;AACF;AASO,SAAS,oBACd,OACA,UACoB;AACpB,SAAO;AAAA,IACL,MAAM;AAAA,IACN,QAAQ,aAAa,MAAM,QAAQ,QAAQ;AAAA,IAC3C,UAAU,MAAM;AAAA,EAClB;AACF;AAOO,SAAS,kBAAkB,OAAwC;AACxE,QAAM,SAAS,gBAAgB,MAAM,GAAG;AAExC,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;AAEnC,UAAM,aAA6B;AAAA,MACjC,MAAM;AAAA,MACN,MAAM,MAAM;AAAA,IACd;AACA,WAAO;AAAA,EACT;AAEA,SAAO,OAAO;AAChB;;;ACjGA,SAAS,QAAQ,MAA2B,MAA+C;AACzF,SAAO,KAAK,MAAM,IAAI,IAAI;AAC5B;AASA,SAAS,iBACP,MACA,MACA,UAC4B;AAC5B,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,SAAO,QAAQ,aAAa,OAAO,QAAQ,IAAI;AACjD;AAMA,SAASC,mBACP,MACA,OAAyB,CAAC,GAC1B,WACA,UAAqE,CAAC,GACzD;AACb,QAAM,SAAsB;AAAA,IAC1B,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF;AAGA,MAAI,aAAa,OAAO,KAAK,SAAS,EAAE,SAAS,GAAG;AAClD,IAAC,OAAyD,YAAY;AAAA,EACxE;AAEA,MAAI,QAAQ,YAAY;AACtB,IAAC,OAAmC,aAAa,QAAQ;AAAA,EAC3D;AAEA,MAAI,QAAQ,gBAAgB;AAC1B,IAAC,OAA8C,iBAAiB,QAAQ;AAAA,EAC1E;AAEA,SAAO;AACT;AAYA,IAAM,eAA8B;AAAA,EAClC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,WAAW,iBAAiB,MAAM,UAAU;AAElD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,YAAa,WAAU,IAAI,IAAI;AACnC,QAAI,SAAU,WAAU,KAAK,IAAI;AAEjC,WAAOA,mBAAkB,UAAU,MAAM,SAAS;AAAA,EACpD;AACF;AAQA,IAAM,YAA2B;AAAA,EAC/B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AAExD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,YAAa,WAAU,IAAI,IAAI;AAEnC,WAAOA,mBAAkB,OAAO,MAAM,SAAS;AAAA,EACjD;AACF;AAQA,IAAM,eAA8B;AAAA,EAClC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAE9C,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,OAAQ,WAAU,MAAM,IAAI;AAEhC,WAAOA,mBAAkB,UAAU,MAAM,SAAS;AAAA,EACpD;AACF;AAWA,IAAM,YAA2B;AAAA,EAC/B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAGnD,QAAI,aAAa;AACf,WAAK,KAAK,WAAW;AAAA,IACvB;AAGA,QAAI,QAAS,WAAU,IAAI,IAAI;AAE/B,WAAOA,mBAAkB,OAAO,MAAM,SAAS;AAAA,EACjD;AACF;AAKA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,WAAW,iBAAiB,MAAM,UAAU;AAElD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAGnD,UAAM,SAAS,eAAe;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAC5B,QAAI,SAAU,WAAU,MAAM,IAAI;AAElC,WAAOA,mBAAkB,QAAQ,MAAM,SAAS;AAAA,EAClD;AACF;AAKA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,WAAW,iBAAiB,MAAM,UAAU;AAElD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAEnD,UAAM,SAAS,eAAe;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAC5B,QAAI,SAAU,WAAU,MAAM,IAAI;AAElC,WAAOA,mBAAkB,QAAQ,MAAM,SAAS;AAAA,EAClD;AACF;AAQA,IAAM,kBAAiC;AAAA,EACrC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,WAAW,iBAAiB,MAAM,UAAU;AAElD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAEnD,UAAM,SAAS,eAAe;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAC5B,QAAI,SAAU,WAAU,IAAI,IAAI;AAEhC,WAAOA,mBAAkB,aAAa,MAAM,SAAS;AAAA,EACvD;AACF;AAQA,IAAM,kBAAiC;AAAA,EACrC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,WAAW,iBAAiB,MAAM,UAAU;AAElD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAEnD,UAAM,SAAS,eAAe;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAC5B,QAAI,SAAU,WAAU,IAAI,IAAI;AAEhC,WAAOA,mBAAkB,aAAa,MAAM,SAAS;AAAA,EACvD;AACF;AAKA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,WAAW,iBAAiB,MAAM,UAAU;AAElD,UAAM,OAAyB,WAAW,CAAC,QAAQ,IAAI,CAAC;AAExD,WAAOA,mBAAkB,QAAQ,MAAM,QAAW,EAAE,YAAY,KAAK,CAAC;AAAA,EACxE;AACF;AAQA,IAAM,YAA2B;AAAA,EAC/B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,QAAI,QAAS,MAAK,KAAK,OAAO;AAE9B,WAAOA,mBAAkB,OAAO,IAAI;AAAA,EACtC;AACF;AAQA,IAAM,YAA2B;AAAA,EAC/B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,SAAS,QAAQ,MAAM,QAAQ;AAErC,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,aAAa;AAEf,YAAM,OAAO,QAAQ,SAAS,YAAY,OAAO,OAAO,KAAK,IAAI;AACjE,gBAAU,IAAI,IAAI;AAAA,IACpB;AAEA,WAAOA,mBAAkB,OAAO,MAAM,SAAS;AAAA,EACjD;AACF;AAQA,IAAM,cAA6B;AAAA,EACjC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAC9C,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAC9C,UAAM,eAAe,iBAAiB,MAAM,cAAc;AAC1D,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,SAAS,CAAC,MAAM,IAAI,CAAC;AACpD,UAAM,YAA4C,CAAC;AAEnD,QAAI,OAAQ,WAAU,MAAM,IAAI;AAChC,QAAI,aAAc,WAAU,IAAI,IAAI;AACpC,QAAI,QAAS,WAAU,MAAM,IAAI;AAEjC,WAAOA,mBAAkB,SAAS,MAAM,WAAW,EAAE,YAAY,KAAK,CAAC;AAAA,EACzE;AACF;AAQA,IAAM,eAA8B;AAAA,EAClC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AAExD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,YAAa,WAAU,IAAI,IAAI;AAEnC,WAAOA,mBAAkB,UAAU,MAAM,SAAS;AAAA,EACpD;AACF;AAQA,IAAM,gBAA+B;AAAA,EACnC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AAExD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,YAAa,WAAU,IAAI,IAAI;AAEnC,WAAOA,mBAAkB,WAAW,MAAM,SAAS;AAAA,EACrD;AACF;AAQA,IAAM,gBAA+B;AAAA,EACnC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,QAAQ,iBAAiB,MAAM,OAAO;AAC5C,UAAM,cAAc,iBAAiB,MAAM,aAAa;AAExD,UAAM,OAAyB,QAAQ,CAAC,KAAK,IAAI,CAAC;AAClD,UAAM,YAA4C,CAAC;AAEnD,QAAI,YAAa,WAAU,IAAI,IAAI;AAEnC,WAAOA,mBAAkB,WAAW,MAAM,SAAS;AAAA,EACrD;AACF;AAQA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,QAAQ,iBAAiB,MAAM,OAAO;AAC5C,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,QAAQ,CAAC,KAAK,IAAI,CAAC;AAClD,UAAM,YAA4C,CAAC;AAEnD,QAAI,YAAa,WAAU,IAAI,IAAI;AACnC,QAAI,QAAS,WAAU,QAAQ,IAAI;AAEnC,WAAOA,mBAAkB,QAAQ,MAAM,SAAS;AAAA,EAClD;AACF;AAQA,IAAM,WAA0B;AAAA,EAC9B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAC9C,UAAM,cAAc,iBAAiB,MAAM,aAAa;AAExD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAGnD,QAAI,OAAQ,MAAK,KAAK,MAAM;AAC5B,QAAI,YAAa,WAAU,IAAI,IAAI;AAEnC,WAAOA,mBAAkB,MAAM,MAAM,SAAS;AAAA,EAChD;AACF;AAQA,IAAM,mBAAkC;AAAA,EACtC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,OAAO,iBAAiB,MAAM,MAAM;AAC1C,UAAM,WAAW,iBAAiB,MAAM,UAAU;AAClD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AAExD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,KAAM,WAAU,IAAI,IAAI;AAC5B,QAAI,SAAU,WAAU,MAAM,IAAI;AAClC,QAAI,YAAa,WAAU,IAAI,IAAI;AAEnC,WAAOA,mBAAkB,cAAc,MAAM,SAAS;AAAA,EACxD;AACF;AAQA,IAAM,cAA6B;AAAA,EACjC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAGnD,UAAM,SAAS,eAAe;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAE5B,WAAOA,mBAAkB,SAAS,MAAM,SAAS;AAAA,EACnD;AACF;AAQA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,SAAS,eAAe;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAE5B,WAAOA,mBAAkB,QAAQ,IAAI;AAAA,EACvC;AACF;AAQA,IAAM,YAA2B;AAAA,EAC/B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAC9C,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,QAAQ,UAAU;AACxB,QAAI,MAAO,MAAK,KAAK,KAAK;AAE1B,WAAOA,mBAAkB,OAAO,IAAI;AAAA,EACtC;AACF;AAQA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAE9C,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,OAAQ,WAAU,MAAM,IAAI;AAEhC,WAAOA,mBAAkB,QAAQ,MAAM,SAAS;AAAA,EAClD;AACF;AAQA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AAEtD,WAAOA,mBAAkB,QAAQ,IAAI;AAAA,EACvC;AACF;AAQA,IAAM,eAA8B;AAAA,EAClC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AAEtD,WAAOA,mBAAkB,UAAU,IAAI;AAAA,EACzC;AACF;AAQA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,OAAO,UAAU;AACrB,WAAOA,mBAAkB,QAAQ,CAAC,CAAC;AAAA,EACrC;AACF;AAQA,IAAM,cAA6B;AAAA,EACjC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AAEtD,WAAOA,mBAAkB,SAAS,IAAI;AAAA,EACxC;AACF;AAQA,IAAM,eAA8B;AAAA,EAClC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,SAAS,eAAe;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAE5B,WAAOA,mBAAkB,UAAU,MAAM,QAAW,EAAE,YAAY,KAAK,CAAC;AAAA,EAC1E;AACF;AAYA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAC9C,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,QAAQ,iBAAiB,MAAM,OAAO;AAE5C,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAEnD,QAAI,QAAS,MAAK,KAAK,OAAO;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAC5B,QAAI,YAAa,WAAU,IAAI,IAAI;AACnC,QAAI,MAAO,WAAU,MAAM,IAAI;AAE/B,WAAOA,mBAAkB,QAAQ,MAAM,SAAS;AAAA,EAClD;AACF;AAQA,IAAM,cAA6B;AAAA,EACjC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAC9C,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAEnD,UAAM,UAAU,UAAU;AAC1B,QAAI,QAAS,MAAK,KAAK,OAAO;AAC9B,QAAI,YAAa,WAAU,IAAI,IAAI;AAEnC,WAAOA,mBAAkB,SAAS,MAAM,SAAS;AAAA,EACnD;AACF;AAQA,IAAM,cAA6B;AAAA,EACjC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAC9C,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAEnD,UAAM,SAAS,UAAU;AACzB,QAAI,OAAQ,MAAK,KAAK,MAAM;AAC5B,QAAI,YAAa,WAAU,MAAM,IAAI;AAErC,WAAOA,mBAAkB,SAAS,MAAM,SAAS;AAAA,EACnD;AACF;AAQA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AAEtD,WAAOA,mBAAkB,QAAQ,IAAI;AAAA,EACvC;AACF;AAQA,IAAM,gBAA+B;AAAA,EACnC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAE9C,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAEnD,QAAI,QAAS,MAAK,KAAK,OAAO;AAC9B,UAAM,UAAU,eAAe;AAC/B,QAAI,QAAS,WAAU,IAAI,IAAI;AAE/B,WAAOA,mBAAkB,WAAW,MAAM,SAAS;AAAA,EACrD;AACF;AAQA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,cAAc,iBAAiB,MAAM,aAAa;AACxD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,SAAS,eAAe;AAC9B,QAAI,OAAQ,MAAK,KAAK,MAAM;AAE5B,WAAOA,mBAAkB,QAAQ,IAAI;AAAA,EACvC;AACF;AAQA,IAAM,WAA0B;AAAA,EAC9B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AAEtD,WAAOA,mBAAkB,MAAM,IAAI;AAAA,EACrC;AACF;AAQA,IAAM,cAA6B;AAAA,EACjC,QAAQ;AAAA,EACR,MAAM,OAAO,UAAU;AACrB,WAAOA,mBAAkB,SAAS,CAAC,CAAC;AAAA,EACtC;AACF;AAQA,IAAM,WAA0B;AAAA,EAC9B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,YAAY,iBAAiB,MAAM,WAAW;AAEpD,UAAM,OAAyB,YAAY,CAAC,SAAS,IAAI,CAAC;AAE1D,WAAOA,mBAAkB,MAAM,IAAI;AAAA,EACrC;AACF;AAQA,IAAM,eAA8B;AAAA,EAClC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,YAAY,iBAAiB,MAAM,WAAW;AAEpD,UAAM,OAAyB,YAAY,CAAC,SAAS,IAAI,CAAC;AAE1D,WAAOA,mBAAkB,UAAU,IAAI;AAAA,EACzC;AACF;AAQA,IAAM,eAA8B;AAAA,EAClC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,WAAW,iBAAiB,MAAM,UAAU;AAClD,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,CAAC;AAChC,UAAM,QAAQ,YAAY;AAC1B,QAAI,MAAO,MAAK,KAAK,KAAK;AAE1B,WAAOA,mBAAkB,UAAU,IAAI;AAAA,EACzC;AACF;AAQA,IAAM,YAA2B;AAAA,EAC/B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAE9C,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,OAAQ,WAAU,IAAI,IAAI;AAE9B,WAAOA,mBAAkB,OAAO,MAAM,SAAS;AAAA,EACjD;AACF;AAQA,IAAM,cAA6B;AAAA,EACjC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,YAAY,iBAAiB,MAAM,WAAW;AAEpD,UAAM,OAAyB,YAAY,CAAC,SAAS,IAAI,CAAC;AAE1D,WAAOA,mBAAkB,SAAS,IAAI;AAAA,EACxC;AACF;AAQA,IAAM,iBAAgC;AAAA,EACpC,QAAQ;AAAA,EACR,MAAM,OAAO,UAAU;AACrB,WAAOA,mBAAkB,YAAY,CAAC,CAAC;AAAA,EACzC;AACF;AAQA,IAAM,gBAA+B;AAAA,EACnC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAE9C,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,OAAQ,WAAU,IAAI,IAAI;AAE9B,WAAOA,mBAAkB,WAAW,MAAM,SAAS;AAAA,EACrD;AACF;AAQA,IAAM,aAA4B;AAAA,EAChC,QAAQ;AAAA,EACR,MAAM,OAAO,UAAU;AACrB,WAAOA,mBAAkB,QAAQ,CAAC,CAAC;AAAA,EACrC;AACF;AAQA,IAAM,iBAAgC;AAAA,EACpC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAEhD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AAEtD,WAAOA,mBAAkB,YAAY,IAAI;AAAA,EAC3C;AACF;AAQA,IAAM,gBAA+B;AAAA,EACnC,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,UAAU,iBAAiB,MAAM,SAAS;AAChD,UAAM,cAAc,iBAAiB,MAAM,aAAa;AAExD,UAAM,OAAyB,UAAU,CAAC,OAAO,IAAI,CAAC;AACtD,UAAM,YAA4C,CAAC;AAEnD,QAAI,YAAa,WAAU,IAAI,IAAI;AAEnC,WAAOA,mBAAkB,WAAW,MAAM,SAAS;AAAA,EACrD;AACF;AAQA,IAAM,WAA0B;AAAA,EAC9B,QAAQ;AAAA,EACR,MAAM,MAAM,UAAU;AACpB,UAAM,QAAQ,iBAAiB,MAAM,OAAO;AAC5C,UAAM,SAAS,iBAAiB,MAAM,QAAQ;AAE9C,UAAM,OAAyB,QAAQ,CAAC,KAAK,IAAI,CAAC;AAClD,UAAM,YAA4C,CAAC;AAEnD,QAAI,OAAQ,WAAU,MAAM,IAAI;AAEhC,WAAOA,mBAAkB,MAAM,MAAM,SAAS;AAAA,EAChD;AACF;AAMA,IAAM,UAA0C,oBAAI,IAAI;AAAA;AAAA,EAEtD,CAAC,UAAU,YAAY;AAAA,EACvB,CAAC,OAAO,SAAS;AAAA,EACjB,CAAC,UAAU,YAAY;AAAA,EACvB,CAAC,OAAO,SAAS;AAAA,EACjB,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,aAAa,eAAe;AAAA,EAC7B,CAAC,aAAa,eAAe;AAAA,EAC7B,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,OAAO,SAAS;AAAA,EACjB,CAAC,OAAO,SAAS;AAAA,EACjB,CAAC,SAAS,WAAW;AAAA;AAAA,EAErB,CAAC,UAAU,YAAY;AAAA,EACvB,CAAC,WAAW,aAAa;AAAA,EACzB,CAAC,OAAO,SAAS;AAAA,EACjB,CAAC,QAAQ,UAAU;AAAA;AAAA,EAEnB,CAAC,WAAW,aAAa;AAAA,EACzB,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,MAAM,QAAQ;AAAA;AAAA,EAEf,CAAC,MAAM,QAAQ;AAAA,EACf,CAAC,cAAc,gBAAgB;AAAA,EAC/B,CAAC,SAAS,WAAW;AAAA,EACrB,CAAC,QAAQ,UAAU;AAAA;AAAA,EAEnB,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,UAAU,YAAY;AAAA,EACvB,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,SAAS,WAAW;AAAA,EACrB,CAAC,UAAU,YAAY;AAAA;AAAA,EAEvB,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,SAAS,WAAW;AAAA,EACrB,CAAC,SAAS,WAAW;AAAA,EACrB,CAAC,WAAW,aAAa;AAAA;AAAA,EAEzB,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,WAAW,aAAa;AAAA;AAAA,EAEzB,CAAC,MAAM,QAAQ;AAAA,EACf,CAAC,SAAS,WAAW;AAAA;AAAA,EAErB,CAAC,MAAM,QAAQ;AAAA,EACf,CAAC,UAAU,YAAY;AAAA;AAAA,EAEvB,CAAC,UAAU,YAAY;AAAA,EACvB,CAAC,OAAO,SAAS;AAAA,EACjB,CAAC,SAAS,WAAW;AAAA,EACrB,CAAC,YAAY,cAAc;AAAA;AAAA,EAE3B,CAAC,QAAQ,UAAU;AAAA,EACnB,CAAC,YAAY,cAAc;AAAA,EAC3B,CAAC,WAAW,aAAa;AAC3B,CAAC;AAQM,SAAS,iBAAiB,QAA+C;AAC9E,SAAO,QAAQ,IAAI,MAAM;AAC3B;AAOO,SAAS,sBAAsB,QAA6B;AACjE,UAAQ,IAAI,OAAO,QAAQ,MAAM;AACnC;AAKO,SAAS,uBAAuD;AACrE,SAAO,IAAI,IAAI,OAAO;AACxB;;;AC5+BO,IAAM,aAAN,MAAiB;AAAA,EAMtB,YAAY,WAA8B,CAAC,GAAG;AAF9C;AAAA;AAAA;AAAA,SAAO,WAAqB,CAAC;AAAA,EAI7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,MAA6B;AACjC,YAAQ,KAAK,MAAM;AAAA,MACjB,KAAK;AACH,eAAO,KAAK,aAAa,IAA2B;AAAA,MACtD,KAAK;AACH,eAAO,KAAK,kBAAkB,IAAgC;AAAA,MAChE,KAAK;AACH,eAAO,KAAK,iBAAiB,IAA+B;AAAA,MAC9D,KAAK;AACH,eAAO,KAAK,cAAc,IAA4B;AAAA,MACxD,KAAK;AACH,eAAO,KAAK,UAAU,IAAwB;AAAA,MAChD;AACE,cAAM,IAAI,MAAM,+BAAgC,KAAsB,IAAI,EAAE;AAAA,IAChF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAwC;AAC3D,UAAM,SAAS,iBAAiB,KAAK,MAAM;AAE3C,QAAI,QAAQ;AAEV,YAAM,SAAS,OAAO,MAAM,MAAM,IAAI;AAGtC,UAAI,SAAS,UAAU,cAAc,QAAQ;AAE3C,cAAM,eAAe;AACrB,aAAK,SAAS,KAAK,GAAG,aAAa,QAAQ;AAC3C,eAAO,aAAa;AAAA,MACtB,OAAO;AAEL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO,KAAK,oBAAoB,IAAI;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAAoB,MAAwC;AAClE,UAAM,OAAyB,CAAC;AAChC,UAAM,YAA4C,CAAC;AAInD,UAAM,WAA2B,CAAC,WAAW,UAAU,UAAU;AACjE,UAAM,gBAAgC,CAAC,eAAe,YAAY,UAAU,OAAO;AAGnF,eAAW,QAAQ,UAAU;AAC3B,YAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,UAAI,OAAO;AACT,aAAK,KAAK,aAAa,KAAK,CAAC;AAAA,MAC/B;AAAA,IACF;AAGA,eAAW,QAAQ,eAAe;AAChC,YAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,UAAI,OAAO;AAET,cAAM,cAAc,KAAK,kBAAkB,IAAI;AAC/C,kBAAU,WAAW,IAAI,aAAa,KAAK;AAAA,MAC7C;AAAA,IACF;AAEA,UAAM,SAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,MAAM,KAAK;AAAA,MACX;AAAA,IACF;AAGA,QAAI,OAAO,KAAK,SAAS,EAAE,SAAS,GAAG;AACrC,aAAO,EAAE,GAAG,QAAQ,UAAU;AAAA,IAChC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,MAA4B;AACpD,UAAM,UAAiD;AAAA,MACrD,aAAa;AAAA,MACb,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ;AAAA,MACR,OAAO;AAAA,IACT;AACA,WAAO,QAAQ,IAAI,KAAK;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,MAAkD;AAE1E,UAAM,aAAa,KAAK,MAAM,IAAI,OAAO;AACzC,QAAI;AACJ,QAAI;AAEJ,QAAI,YAAY,SAAS,WAAW;AAClC,YAAM,WAAW,OAAO,WAAW,KAAK;AAExC,UAAI,SAAS,SAAS,GAAG,KAAK,SAAS,SAAS,MAAM,GAAG;AACvD,iBAAS,SAAS,MAAM,aAAa,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AACxD,gBAAQ,OAAO,CAAC;AAAA,MAClB,OAAO;AACL,gBAAQ;AAAA,MACV;AAAA,IACF,WAAW,YAAY,SAAS,aAAa;AAC3C,cAAQ,WAAW;AAAA,IACrB,OAAO;AACL,cAAQ;AAAA,IACV;AAGA,UAAM,WAAW,KAAK,KAAK,IAAI,WAAS,KAAK,MAAM,KAAK,CAAC;AAGzD,UAAM,YAAY,KAAK,MAAM,IAAI,QAAQ;AACzC,QAAI;AACJ,QAAI;AAEJ,QAAI,WAAW,SAAS,YAAY;AAClC,iBAAW,UAAU;AACrB,eAAS,UAAU;AAAA,IACrB,WAAW,WAAW,SAAS,aAAa;AAC1C,eAAS,UAAU;AAAA,IACrB,WAAW,WAAW,SAAS,WAAW;AACxC,eAAS,OAAO,UAAU,KAAK;AAAA,IACjC;AAGA,UAAM,iBAAiB,KAAK,MAAM,IAAI,WAAW;AACjD,UAAM,YAAY,iBAAiB,aAAa,cAAc,IAAI;AAGlE,UAAM,mBAAmB,KAAK,MAAM,IAAI,aAAa;AACrD,UAAM,cAAc,mBAAmB,aAAa,gBAAgB,IAAI;AAGxE,UAAM,YAAY,KAAK;AAGvB,QAAI,gBAAgB;AACpB,QAAI,WAAW,MAAM;AACnB,YAAM,UAAU,UAAU;AAC1B,UAAI,QAAQ,SAAS,cAAc,CAAC,UAAU;AAC5C,wBAAgB,QAAQ;AAAA,MAC1B;AAAA,IACF;AAGA,UAAM,OAAO,KAAK,iBAAiB,CAAC,GAAG,KAAK,cAAc,IAAI;AAG9D,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,GAAI,UAAU,OAAO,SAAS,IAAI,EAAE,OAAO,IAAI,CAAC;AAAA,MAChD,GAAI,gBAAgB,EAAE,UAAU,cAAc,IAAI,CAAC;AAAA,MACnD,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,MAC3B,GAAI,YAAY,EAAE,UAAgC,IAAI,CAAC;AAAA,MACvD,GAAI,cAAc,EAAE,YAAY,IAAI,CAAC;AAAA,MACrC,GAAI,QAAQ,KAAK,SAAS,IAAI,EAAE,MAAM,QAAQ,KAAK,IAAI,CAAC;AAAA,IAC1D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYQ,iBAAiB,MAA4C;AACnE,UAAM,iBAAiB,KAAK,MAAM,IAAI,WAAW;AACjD,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAEA,UAAM,YAAY,aAAa,cAAc;AAC7C,UAAM,aAAa,KAAK,WAAW,IAAI,WAAS,KAAK,MAAM,KAAK,CAAC;AACjE,UAAM,aAAa,KAAK,YAAY,IAAI,WAAS,KAAK,MAAM,KAAK,CAAC;AAGlE,UAAM,OAAyB;AAAA,MAC7B;AAAA;AAAA,MAEA;AAAA,QACE,MAAM;AAAA,QACN,UAAU;AAAA,MACZ;AAAA,IACF;AAGA,QAAI,cAAc,WAAW,SAAS,GAAG;AACvC,WAAK,KAAK;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,MACZ,CAA8B;AAAA,IAChC;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,cAAc,MAAqC;AAEzD,UAAM,aAAa,KAAK,WAAW,IAAI,WAAS,KAAK,MAAM,KAAK,CAAC;AAGjE,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO,WAAW,CAAC;AAAA,IACrB;AAGA,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO;AAAA,QACL,MAAM;AAAA,QACN,UAAU,CAAC;AAAA,MACb;AAAA,IACF;AAIA,UAAM,SAA8B;AAAA,MAClC,MAAM;AAAA,MACN,UAAU;AAAA,IACZ;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaQ,UAAU,MAAqC;AAErD,UAAM,eAAe,KAAK,KAAK,IAAI,WAAS,KAAK,MAAM,KAAK,CAAC;AAE7D,UAAM,OAAyB;AAAA;AAAA,MAE7B;AAAA,QACE,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,MACb;AAAA,IACF;AAGA,YAAQ,KAAK,aAAa;AAAA,MACxB,KAAK,SAAS;AAEZ,cAAM,WAAW,KAAK,MAAM,IAAI,UAAU;AAC1C,YAAI,UAAU;AACZ,eAAK,KAAK,aAAa,QAAQ,CAAC;AAAA,QAClC;AACA;AAAA,MACF;AAAA,MACA,KAAK,OAAO;AAEV,YAAI,KAAK,cAAc;AACrB,eAAK,KAAK;AAAA,YACR,MAAM;AAAA,YACN,OAAO,KAAK;AAAA,UACd,CAA8B;AAAA,QAChC;AAEA,cAAM,SAAS,KAAK,MAAM,IAAI,QAAQ;AACtC,YAAI,QAAQ;AACV,eAAK,KAAK,aAAa,MAAM,CAAC;AAAA,QAChC;AACA;AAAA,MACF;AAAA,MACA,KAAK;AAAA,MACL,KAAK,SAAS;AAEZ,cAAM,YAAY,KAAK,MAAM,IAAI,WAAW;AAC5C,YAAI,WAAW;AACb,eAAK,KAAK,aAAa,SAAS,CAAC;AAAA,QACnC;AACA;AAAA,MACF;AAAA,MACA,KAAK;AAEH;AAAA,IACJ;AAGA,SAAK,KAAK;AAAA,MACR,MAAM;AAAA,MACN,UAAU;AAAA,IACZ,CAA8B;AAE9B,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,WAAW,OAAkC;AAC3C,UAAM,WAAW,MAAM,IAAI,WAAS,KAAK,MAAM,KAAK,CAAC;AACrD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;AAoBO,SAAS,SAAS,MAAoC;AAC3D,QAAM,UAAU,IAAI,WAAW;AAC/B,QAAM,MAAM,QAAQ,MAAM,IAAI;AAC9B,SAAO;AAAA,IACL;AAAA,IACA,UAAU,QAAQ;AAAA,EACpB;AACF;","names":["num","getTokenizer","getTokenizer","tokenize","tokenize","createCommandNode"]}
|