@ai-sdk-tool/parser 3.2.1 → 3.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -116
- package/dist/{chunk-DCK5APVO.js → chunk-7E6UFDFQ.js} +15 -12
- package/dist/chunk-7E6UFDFQ.js.map +1 -0
- package/dist/chunk-EW3A6Y7O.js +2216 -0
- package/dist/chunk-EW3A6Y7O.js.map +1 -0
- package/dist/chunk-IX4FJELL.js +671 -0
- package/dist/chunk-IX4FJELL.js.map +1 -0
- package/dist/chunk-OUGMLYAW.js +389 -0
- package/dist/chunk-OUGMLYAW.js.map +1 -0
- package/dist/community.cjs +3442 -276
- package/dist/community.cjs.map +1 -1
- package/dist/community.js +4 -1
- package/dist/community.js.map +1 -1
- package/dist/index.cjs +3505 -254
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +11 -1
- package/dist/rjson.cjs +697 -0
- package/dist/rjson.cjs.map +1 -0
- package/dist/rjson.d.cts +108 -0
- package/dist/rjson.d.ts +108 -0
- package/dist/rjson.js +11 -0
- package/dist/rjson.js.map +1 -0
- package/dist/rxml.cjs +2620 -0
- package/dist/rxml.cjs.map +1 -0
- package/dist/rxml.d.cts +90 -0
- package/dist/rxml.d.ts +90 -0
- package/dist/rxml.js +10 -0
- package/dist/rxml.js.map +1 -0
- package/dist/schema-coerce.cjs +415 -0
- package/dist/schema-coerce.cjs.map +1 -0
- package/dist/schema-coerce.d.cts +5 -0
- package/dist/schema-coerce.d.ts +5 -0
- package/dist/schema-coerce.js +11 -0
- package/dist/schema-coerce.js.map +1 -0
- package/package.json +46 -15
- package/dist/chunk-DCK5APVO.js.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/rjson/index.ts"],"sourcesContent":["/*\n Copyright (c) 2013, Oleg Grenrus\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are met:\n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n * Neither the name of the Oleg Grenrus nor the\n names of its contributors may be used to endorse or promote products\n derived from this software without specific prior written permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND\n ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n DISCLAIMED. IN NO EVENT SHALL OLEG GRENRUS BE LIABLE FOR ANY\n DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND\n ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/\n\n/*\n https://github.com/phadej/relaxed-json\n TypeScript porting based on the original code.\n Follows the license of the original code.\n*/\n\n// --- Regex Constants (for performance) ---\nconst WHITESPACE_TEST_REGEX = /\\s/;\nconst WHITESPACE_REGEX = /^\\s+/;\nconst OBJECT_START_REGEX = /^\\{/;\nconst OBJECT_END_REGEX = /^\\}/;\nconst ARRAY_START_REGEX = /^\\[/;\nconst ARRAY_END_REGEX = /^\\]/;\nconst COMMA_REGEX = /^,/;\nconst COLON_REGEX = /^:/;\nconst KEYWORD_REGEX = /^(?:true|false|null)/;\nconst NUMBER_REGEX = /^-?\\d+(?:\\.\\d+)?(?:[eE][+-]?\\d+)?/;\nconst STRING_DOUBLE_REGEX = /^\"(?:[^\"\\\\]|\\\\[\"bnrtf\\\\/]|\\\\u[0-9a-fA-F]{4})*\"/;\nconst STRING_SINGLE_REGEX = /^'((?:[^'\\\\]|\\\\['bnrtf\\\\/]|\\\\u[0-9a-fA-F]{4})*)'/;\nconst COMMENT_SINGLE_REGEX = /^\\/\\/.*?(?:\\r\\n|\\r|\\n)/;\nconst COMMENT_MULTI_REGEX = /^\\/\\*[\\s\\S]*?\\*\\//;\nconst IDENTIFIER_REGEX = /^[$a-zA-Z0-9_\\-+.*?!|&%^/#\\\\]+/;\n\n// Custom 'some' function definition (slightly different from ES5, returns the truthy value directly)\n// :: array -> fn -> *\nfunction some<T, R>(\n array: T[],\n f: (item: T, index: number, arr: T[]) => R | undefined | false\n): R | false {\n let acc: R | false = false;\n for (let i = 0; i < array.length; i += 1) {\n // We assume R is a truthy type if the condition is met, or undefined/false otherwise.\n const result = f(array[i], i, array);\n acc = result === undefined ? false : result;\n if (acc) {\n return acc; // Return the actual truthy value found\n }\n }\n return acc; // Returns false if no truthy value was returned by f\n}\n\n// --- Type Definitions ---\n\n// Type for the specification of a single token type\ninterface TokenSpec {\n re: RegExp;\n // Function to process the regex match and return a RawToken\n f: (match: RegExpExecArray) => RawToken;\n}\n\n// Literal types for possible token types\ntype TokenType =\n | \"atom\" // null, true, false\n | \"number\"\n | \"string\"\n | \"[\"\n | \"]\"\n | \"{\"\n | \"}\"\n | \":\"\n | \",\"\n | \" \" // Whitespace / Comments\n | \"eof\"; // End of file\n\n// Type for a token right after regex matching, before line number is added\n// Value is optional as punctuation/whitespace tokens might not have a semantic value\ninterface RawToken {\n type: TokenType;\n match: string; // The raw matched text\n value?: unknown; // The parsed value (for strings, numbers, atoms)\n}\n\n// Type for a token including line number information\ntype Token = RawToken & {\n line: number;\n};\n\n// Type for parse warnings\ninterface ParseWarning {\n message: string;\n line: number;\n}\n\n// Type for the state object used during parsing\ninterface ParseState {\n pos: number; // Current position in the token array\n warnings: ParseWarning[];\n // Options passed to the parser\n tolerant: boolean;\n duplicate: boolean; // true = allow duplicate keys (use last value), false = reject duplicate keys with error\n reviver?: (key: string, value: unknown) => unknown; // Optional JSON reviver function\n}\n\n/**\n * Options for configuring JSON parsing behavior\n */\ninterface ParseOptions {\n /**\n * Enable relaxed JSON syntax parsing (unquoted keys, single quotes, trailing commas, comments)\n * @default true\n */\n relaxed?: boolean;\n\n /**\n * Collect parsing warnings instead of throwing immediately. Implies tolerant mode.\n * At the end of parsing, if warnings exist, throws with warning details.\n * @default false\n */\n warnings?: boolean;\n\n /**\n * Continue parsing when encountering recoverable errors, collecting warnings.\n * In strict mode (false), throws immediately on first error.\n * @default false\n */\n tolerant?: boolean;\n\n /**\n * Allow duplicate object keys in JSON.\n * - true: Allow duplicates (uses last value, like native JSON.parse)\n * - false: Reject duplicates with error (enforces JSON specification)\n * @default false\n */\n duplicate?: boolean;\n\n /**\n * Optional reviver function to transform parsed values (same as JSON.parse reviver)\n * @param key - The object key or array index\n * @param value - The parsed value\n * @returns The transformed value\n */\n reviver?: (key: string, value: unknown) => unknown;\n}\n\n// Type for options specific to the parseMany function\ninterface ParseManyOpts<T> {\n skip: TokenType[]; // Token types to skip initially\n elementParser: (tokens: Token[], state: ParseState, obj: T) => void; // Function to parse an element/pair\n elementName: string; // Name of the expected element for error messages\n endSymbol: TokenType; // The token type that marks the end of the structure (']' or '}')\n}\n\n// --- Lexer Implementation ---\n\n// Factory function to create a lexer\n// :: array tokenSpec -> fn\nfunction makeLexer(tokenSpecs: TokenSpec[]): (contents: string) => Token[] {\n // The returned lexer function\n // :: string -> array token\n return (contents: string): Token[] => {\n const tokens: Token[] = [];\n let line = 1; // Start at line 1\n let remainingContents = contents;\n\n // Helper function to find the next token in the input string\n // :: -> { raw: string, matched: RawToken } | undefined\n function findToken(): { raw: string; matched: RawToken } | undefined {\n // Use the custom 'some' function to iterate through token specifications\n const result = some(tokenSpecs, (tokenSpec) => {\n const m = tokenSpec.re.exec(remainingContents); // Try to match the regex at the current position\n if (m) {\n const raw = m[0]; // The matched raw string\n remainingContents = remainingContents.slice(raw.length); // Consume the matched part from the input\n return {\n raw,\n matched: tokenSpec.f(m), // Process the match using the spec's function\n };\n }\n return; // No match for this spec\n });\n return result === false ? undefined : result;\n }\n\n // Main lexing loop\n while (remainingContents !== \"\") {\n const matched = findToken(); // Find the next token\n\n if (!matched) {\n // If no token spec matches, it's a syntax error\n const err = new SyntaxError(\n `Unexpected character: ${remainingContents[0]}; input: ${remainingContents.substr(\n 0,\n 100\n )}`\n );\n // Attach line number to the error object (standard Error doesn't have it by default)\n (err as { line?: number }).line = line;\n throw err;\n }\n\n // Add line number information to the matched token\n // We need type assertion because 'matched.matched' is initially RawToken\n const tokenWithLine = matched.matched as Token;\n tokenWithLine.line = line;\n\n // Update line number count based on newlines in the matched raw string\n line += matched.raw.replace(/[^\\n]/g, \"\").length;\n\n tokens.push(tokenWithLine); // Add the finalized token to the list\n }\n\n // Add an EOF token (useful for the parser) - Optional, depends on parser needs.\n // The current parser handles end-of-input via state.pos check, so EOF token isn't strictly needed here\n // tokens.push({ type: 'eof', match: '', value: undefined, line: line });\n\n return tokens;\n };\n}\n\n// --- Token Creation Helper Functions ---\n\n// :: tuple string string -> rawToken\nfunction fStringSingle(m: RegExpExecArray): RawToken {\n // Handles strings in single quotes, converting them to standard JSON double-quoted strings\n const content = m[1].replace(\n /([^'\\\\]|\\\\['bnrtf\\\\]|\\\\u[0-9a-fA-F]{4})/g,\n (mm) => {\n if (mm === '\"') {\n return '\\\\\"'; // Escape double quotes inside\n }\n if (mm === \"\\\\'\") {\n return \"'\"; // Unescape escaped single quotes\n }\n return mm;\n }\n );\n\n const match = `\"${content}\"`;\n return {\n type: \"string\",\n match, // The transformed, double-quoted string representation\n // Use JSON.parse on the transformed string to handle escape sequences correctly\n value: JSON.parse(match),\n };\n}\n\n// :: tuple string -> rawToken\nfunction fStringDouble(m: RegExpExecArray): RawToken {\n // Handles standard JSON double-quoted strings\n return {\n type: \"string\",\n match: m[0], // The raw matched string (including quotes)\n value: JSON.parse(m[0]), // Use JSON.parse to handle escapes and get the value\n };\n}\n\n// :: tuple string -> rawToken\nfunction fIdentifier(m: RegExpExecArray): RawToken {\n // Transforms unquoted identifiers into JSON strings\n const value = m[0];\n const match =\n '\"' +\n value.replace(/\\\\/g, \"\\\\\\\\\").replace(/\"/g, '\\\\\"') + // Escape backslashes and quotes\n '\"';\n return {\n type: \"string\", // Treat identifiers as strings\n value, // The original identifier name\n match, // The double-quoted string representation\n };\n}\n\n// :: tuple string -> rawToken\nfunction fComment(m: RegExpExecArray): RawToken {\n // Treats comments as whitespace, preserving only newlines\n const match = m[0].replace(/./g, (c) =>\n WHITESPACE_TEST_REGEX.test(c) ? c : \" \"\n );\n return {\n type: \" \", // Represent comments as whitespace tokens\n match, // String containing original newlines and spaces for other chars\n value: undefined, // Comments don't have a semantic value\n };\n}\n\n// :: tuple string -> rawToken\nfunction fNumber(m: RegExpExecArray): RawToken {\n // Handles numbers (integers, floats, exponents)\n return {\n type: \"number\",\n match: m[0], // The raw matched number string\n value: Number.parseFloat(m[0]), // Convert string to number\n };\n}\n\n// :: tuple (\"null\" | \"true\" | \"false\") -> rawToken\nfunction fKeyword(m: RegExpExecArray): RawToken {\n // Handles JSON keywords: null, true, false\n let value: null | boolean;\n switch (m[0]) {\n case \"null\":\n value = null;\n break;\n case \"true\":\n value = true;\n break;\n case \"false\":\n value = false;\n break;\n default:\n // Should be unreachable due to regex, but satisfies TypeScript exhaustiveness check\n throw new Error(`Unexpected keyword: ${m[0]}`);\n }\n return {\n type: \"atom\", // Use 'atom' type for these literals\n match: m[0], // The raw matched keyword\n value, // The corresponding JavaScript value\n };\n}\n\n// --- Token Specification Creation ---\n\n// :: boolean -> array tokenSpec\nfunction makeTokenSpecs(relaxed: boolean): TokenSpec[] {\n // Helper to create a simple token spec function\n // :: string -> fn\n function f(type: TokenType): (m: RegExpExecArray) => RawToken {\n // :: tuple string -> rawToken\n return (m: RegExpExecArray): RawToken => {\n // For simple tokens like punctuation, value is not needed\n return { type, match: m[0], value: undefined };\n };\n }\n\n // Base JSON token specifications (strict)\n let tokenSpecs: TokenSpec[] = [\n { re: WHITESPACE_REGEX, f: f(\" \") }, // Whitespace\n { re: OBJECT_START_REGEX, f: f(\"{\") }, // Object start\n { re: OBJECT_END_REGEX, f: f(\"}\") }, // Object end\n { re: ARRAY_START_REGEX, f: f(\"[\") }, // Array start\n { re: ARRAY_END_REGEX, f: f(\"]\") }, // Array end\n { re: COMMA_REGEX, f: f(\",\") }, // Comma separator\n { re: COLON_REGEX, f: f(\":\") }, // Key-value separator\n { re: KEYWORD_REGEX, f: fKeyword }, // Keywords\n // Number: optional sign, digits, optional decimal part, optional exponent\n { re: NUMBER_REGEX, f: fNumber },\n // String: double-quoted, handles escapes\n { re: STRING_DOUBLE_REGEX, f: fStringDouble },\n ];\n\n // Add relaxed syntax rules if requested\n if (relaxed) {\n tokenSpecs = tokenSpecs.concat([\n // Single-quoted strings\n {\n re: STRING_SINGLE_REGEX,\n f: fStringSingle,\n },\n // Single-line comments (// ...)\n { re: COMMENT_SINGLE_REGEX, f: fComment },\n // Multi-line comments (/* ... */)\n { re: COMMENT_MULTI_REGEX, f: fComment },\n // Unquoted identifiers (treated as strings)\n // Allows letters, numbers, _, -, +, ., *, ?, !, |, &, %, ^, /, #, \\\n { re: IDENTIFIER_REGEX, f: fIdentifier },\n // Note: The order matters here. Identifiers are checked after keywords/numbers.\n ]);\n }\n\n return tokenSpecs;\n}\n\n// Create lexer instances\nconst lexer = makeLexer(makeTokenSpecs(true)); // Relaxed syntax lexer\nconst strictLexer = makeLexer(makeTokenSpecs(false)); // Strict JSON lexer\n\n// --- Parser Helper Functions ---\n\n// Find the index of the previous non-whitespace token\n// :: array token -> nat -> nat?\nfunction previousNWSToken(tokens: Token[], index: number): number | undefined {\n let currentIndex = index;\n for (; currentIndex >= 0; currentIndex -= 1) {\n if (tokens[currentIndex].type !== \" \") {\n return currentIndex; // Return index of the non-whitespace token\n }\n }\n return; // Not found\n}\n\n// Removes trailing commas from arrays and objects in a token stream\n// :: array token -> array token\nfunction stripTrailingComma(tokens: Token[]): Token[] {\n const res: Token[] = [];\n\n tokens.forEach((token, index) => {\n // Check if the current token is a closing bracket or brace\n if (index > 0 && (token.type === \"]\" || token.type === \"}\")) {\n // Find the last non-whitespace token *before* this closing token in the result array 'res'\n const prevNWSTokenIndex = previousNWSToken(res, res.length - 1); // Look in `res`, not `tokens`!\n\n // Check if it's a comma\n if (\n prevNWSTokenIndex !== undefined &&\n res[prevNWSTokenIndex].type === \",\"\n ) {\n // Find the token *before* the comma\n const preCommaIndex = previousNWSToken(res, prevNWSTokenIndex - 1);\n\n // Ensure there *was* a token before the comma, and it wasn't an opening bracket/brace\n // This prevents removing the comma in `[,1]` or `{, \"a\":1}` which is invalid anyway\n if (\n preCommaIndex !== undefined &&\n res[preCommaIndex].type !== \"[\" &&\n res[preCommaIndex].type !== \"{\"\n ) {\n // Replace the trailing comma with a whitespace token\n res[prevNWSTokenIndex] = {\n type: \" \",\n match: \" \", // Represent as a single space\n value: undefined, // Whitespace has no value\n line: res[prevNWSTokenIndex].line, // Preserve original line number\n };\n }\n }\n }\n\n res.push(token); // Add the current token (or the original closing bracket/brace)\n });\n\n return res;\n}\n\n/**\n * Transform relaxed JSON syntax to standard JSON string\n *\n * Converts relaxed JSON features (unquoted keys, single quotes, trailing commas, comments)\n * into valid standard JSON syntax that can be parsed by native JSON.parse().\n *\n * @param text - The relaxed JSON string to transform\n * @returns A standard JSON string\n *\n * @example\n * ```typescript\n * transform('{key: \"value\", trailing: \"comma\",}')\n * // Returns: '{\"key\": \"value\", \"trailing\": \"comma\"}'\n *\n * transform(\"{'single': 'quotes'}\")\n * // Returns: '{\"single\": \"quotes\"}'\n * ```\n */\nfunction transform(text: string): string {\n // Tokenize contents using the relaxed lexer\n let tokens = lexer(text);\n\n // Remove trailing commas if present\n tokens = stripTrailingComma(tokens);\n\n // Concatenate the 'match' part of each token back into a single string\n return tokens.reduce((str, token) => str + token.match, \"\");\n}\n\n// --- Parsing Core Functions ---\n\n// Get the next token from the stream and advance the position\n// :: array parseToken -> parseState -> *\nfunction popToken(tokens: Token[], state: ParseState): Token {\n const token = tokens[state.pos];\n state.pos += 1;\n\n if (!token) {\n // If we are past the end of the token array, return an EOF token\n const lastLine = tokens.length !== 0 ? (tokens.at(-1)?.line ?? 1) : 1;\n return { type: \"eof\", match: \"\", value: undefined, line: lastLine };\n }\n\n return token;\n}\n\n// Get a string representation of a token for error messages\n// :: token -> string\nfunction strToken(token: Token): string {\n switch (token.type) {\n case \"atom\":\n case \"string\":\n case \"number\":\n // Show type and the matched text (or value, match is usually better for context)\n return `${token.type} ${token.match}`;\n case \"eof\":\n return \"end-of-file\";\n default:\n // For punctuation, just show the symbol itself in quotes\n return `'${token.type}'`;\n }\n}\n\n// Expects and consumes a colon token, raises error/warning otherwise\n// :: array token -> parseState -> undefined\nfunction skipColon(tokens: Token[], state: ParseState): void {\n const colon = popToken(tokens, state);\n if (colon.type !== \":\") {\n const message = `Unexpected token: ${strToken(colon)}, expected ':'`;\n if (state.tolerant) {\n state.warnings.push({\n message,\n line: colon.line,\n });\n // If tolerant, put the unexpected token back by decrementing pos\n // This allows the parser to potentially recover\n state.pos -= 1;\n } else {\n const err = new SyntaxError(message);\n (err as { line?: number }).line = colon.line;\n throw err;\n }\n }\n}\n\n// Skips over any punctuation tokens until a valid data token or EOF is found.\n// Used to recover in tolerant mode or find the start of the next value.\n// :: array token -> parseState -> (array string)? -> token\nfunction skipPunctuation(\n tokens: Token[],\n state: ParseState,\n valid?: TokenType[]\n): Token {\n // Define common punctuation tokens that might appear unexpectedly\n const punctuation: TokenType[] = [\",\", \":\", \"]\", \"}\"];\n let token = popToken(tokens, state);\n\n while (true) {\n // If the token is one of the valid types we're looking for, return it\n if (valid?.includes(token.type)) {\n return token;\n }\n if (token.type === \"eof\") {\n // If we hit EOF, return it\n return token;\n }\n if (punctuation.includes(token.type)) {\n // If it's unexpected punctuation...\n const message = `Unexpected token: ${strToken(\n token\n )}, expected '[', '{', number, string or atom`;\n if (state.tolerant) {\n // In tolerant mode, record a warning and get the next token\n state.warnings.push({\n message,\n line: token.line,\n });\n token = popToken(tokens, state); // Continue skipping\n } else {\n // In strict mode, throw an error\n const err = new SyntaxError(message);\n (err as { line?: number }).line = token.line;\n throw err;\n }\n } else {\n // If it's not punctuation, EOF, or a specifically valid token,\n // it must be the start of a value/object/array, so return it.\n return token;\n }\n }\n}\n\n// Helper to raise an error or add a warning based on tolerant mode\n// :: parseState -> token -> string -> undefined\nfunction raiseError(state: ParseState, token: Token, message: string): void {\n if (state.tolerant) {\n state.warnings.push({\n message,\n line: token.line,\n });\n } else {\n const err = new SyntaxError(message);\n (err as { line?: number }).line = token.line;\n throw err;\n }\n}\n\n// Helper for common \"Unexpected token X, expected Y\" errors\n// :: parseState -> token -> string -> undefined\nfunction raiseUnexpected(\n state: ParseState,\n token: Token,\n expected: string\n): void {\n raiseError(\n state,\n token,\n `Unexpected token: ${strToken(token)}, expected ${expected}`\n );\n}\n\n// Checks for duplicate keys in objects when duplicate checking is enabled (state.duplicate = false).\n// If a duplicate key is found, raises an error (respecting tolerant mode).\n// This enforces JSON specification compliance for duplicate key handling.\n// :: parseState -> {} -> parseToken -> undefined\nfunction checkDuplicates(\n state: ParseState,\n obj: { [key: string]: unknown },\n token: Token\n): void {\n // We assume token.type is 'string' here based on where it's called in parsePair\n // If other types could be keys, this check needs adjustment.\n const key = String(token.value); // Ensure key is string for lookup\n\n // Only check for duplicates when duplicate checking is enabled\n // state.duplicate = false means \"reject duplicates\", so we check when !state.duplicate\n if (!state.duplicate && Object.hasOwn(obj, key)) {\n raiseError(state, token, `Duplicate key: ${key}`);\n // Note: In tolerant mode, this adds a warning and continues parsing.\n // In strict mode, this throws immediately. Either way, last value wins for the duplicate key.\n }\n}\n\n// Appends a key-value pair to an object, applying the reviver function if present\n// :: parseState -> any -> any -> any -> undefined\nfunction appendPair(\n state: ParseState,\n obj: { [objKey: string]: unknown },\n key: string,\n value: unknown\n): void {\n // Apply reviver function if it exists\n const finalValue = state.reviver ? state.reviver(key, value) : value;\n // The reviver can return undefined to omit the key/value pair\n if (finalValue !== undefined) {\n obj[key] = finalValue;\n }\n}\n\n// Parses a key-value pair within an object\n// :: array parseToken -> parseState -> map -> undefined\nfunction parsePair(\n tokens: Token[],\n state: ParseState,\n obj: { [key: string]: unknown }\n): void {\n // Skip leading punctuation, expecting a string key (or ':' in tolerant mode)\n let token = skipPunctuation(tokens, state, [\":\", \"string\", \"number\", \"atom\"]); // Allow recovery\n let value: unknown;\n\n // --- Key Parsing ---\n if (token.type !== \"string\") {\n // Handle unexpected token where a string key was expected\n raiseUnexpected(state, token, \"string key\");\n\n // Attempt recovery in tolerant mode\n if (state.tolerant) {\n switch (token.type) {\n case \":\": // If colon found directly, assume missing key, use \"null\"\n token = {\n type: \"string\",\n value: \"null\",\n match: '\"null\"',\n line: token.line,\n };\n state.pos -= 1; // Put the colon back for skipColon\n break;\n case \"number\": // Use number as string key\n case \"atom\": // Use atom value as string key\n token = {\n type: \"string\",\n value: String(token.value),\n match: `\"${token.value}\"`,\n line: token.line,\n };\n break;\n case \"[\": // Assume missing key before an array\n case \"{\": // Assume missing key before an object\n state.pos -= 1; // Put back the bracket/brace\n value = parseAny(tokens, state); // Parse the value directly\n checkDuplicates(state, obj, {\n type: \"string\",\n value: \"null\",\n match: '\"null\"',\n line: token.line,\n }); // Check duplicate for \"null\" key\n appendPair(state, obj, \"null\", value); // Append with \"null\" key\n return; // Finished parsing this \"pair\"\n case \"eof\": // Reached end unexpectedly\n return; // Cannot recover\n default: // Other unexpected token (like comma, closing brace)\n // raiseUnexpected already issued a warning/error. Try to advance.\n // This might lead to cascading errors, but it's tolerant mode.\n return;\n }\n } else {\n // In non-tolerant mode, raiseUnexpected already threw.\n return; // Should be unreachable\n }\n }\n\n // Now we have a string token (potentially recovered)\n checkDuplicates(state, obj, token);\n const key = String(token.value); // Ensure key is string\n\n // --- Colon and Value Parsing ---\n skipColon(tokens, state); // Expect and consume ':'\n value = parseAny(tokens, state); // Parse the value recursively\n\n // --- Appending Pair ---\n appendPair(state, obj, key, value);\n}\n\n// Parses an element within an array\n// :: array parseToken -> parseState -> array -> undefined\nfunction parseElement(\n tokens: Token[],\n state: ParseState,\n arr: unknown[]\n): void {\n const key = arr.length; // Key is the current array index\n // Skip potential leading punctuation (like extra commas in tolerant mode)\n // skipPunctuation used inside parseAny handles this implicitly\n const value = parseAny(tokens, state); // Recursively parse the element value\n // Apply reviver using the index as a string key\n arr[key] = state.reviver ? state.reviver(String(key), value) : value;\n}\n\n// Parses a JSON object structure: '{' key:value, ... '}'\n// :: array parseToken -> parseState -> {}\nfunction parseObject(\n tokens: Token[],\n state: ParseState\n): { [key: string]: unknown } {\n const obj = {};\n // Call parseMany to handle the structure { pair1, pair2, ... }\n return parseMany<{ [key: string]: unknown }>(tokens, state, obj, {\n skip: [\":\", \"}\"], // Initially skip over colon or closing brace (for empty/tolerant cases)\n elementParser: parsePair, // Use parsePair to parse each key-value element\n elementName: \"string key\", // Expected element type for errors\n endSymbol: \"}\", // The closing token for an object\n });\n}\n\n// Parses a JSON array structure: '[' element, ... ']'\n// :: array parseToken -> parseState -> array\nfunction parseArray(tokens: Token[], state: ParseState): unknown[] {\n const arr: unknown[] = [];\n // Call parseMany to handle the structure [ element1, element2, ... ]\n return parseMany<unknown[]>(tokens, state, arr, {\n skip: [\"]\"], // Initially skip over closing bracket (for empty/tolerant cases)\n elementParser: parseElement, // Use parseElement to parse each array item\n elementName: \"json value\", // Expected element type for errors\n endSymbol: \"]\", // The closing token for an array\n });\n}\n\n// Helper to handle invalid tokens in parseMany\nfunction handleInvalidToken<T>(\n token: Token,\n state: ParseState,\n opts: ParseManyOpts<T>,\n result: T\n): T | null {\n raiseUnexpected(state, token, `',' or '${opts.endSymbol}'`);\n\n if (state.tolerant) {\n if (token.type === \"eof\") {\n return result;\n }\n // Assume a comma was missing and put the token back\n state.pos -= 1;\n return null; // Signal to continue parsing\n }\n return result; // Should be unreachable in strict mode\n}\n\n// Helper to handle comma tokens in parseMany\ninterface HandleCommaTokenParams<T> {\n token: Token;\n tokens: Token[];\n state: ParseState;\n opts: ParseManyOpts<T>;\n result: T;\n}\n\nfunction handleCommaToken<T>(params: HandleCommaTokenParams<T>): T | null {\n const { token, tokens, state, opts, result } = params;\n const nextToken = tokens[state.pos];\n if (state.tolerant && nextToken && nextToken.type === opts.endSymbol) {\n raiseError(state, token, `Trailing comma before '${opts.endSymbol}'`);\n popToken(tokens, state);\n return result;\n }\n opts.elementParser(tokens, state, result);\n return null; // Signal to continue parsing\n}\n\n// Helper to handle the initial element in parseMany\nfunction parseManyInitialElement<T>(\n tokens: Token[],\n state: ParseState,\n result: T,\n opts: ParseManyOpts<T>\n): T | undefined {\n const token = skipPunctuation(tokens, state, opts.skip);\n\n if (token.type === \"eof\") {\n raiseUnexpected(state, token, `'${opts.endSymbol}' or ${opts.elementName}`);\n return result;\n }\n\n if (token.type === opts.endSymbol) {\n return result;\n }\n\n state.pos -= 1;\n opts.elementParser(tokens, state, result);\n return; // Signal to continue parsing\n}\n\n// Helper to process a token in parseMany loop\nfunction parseManyProcessToken<T>(params: {\n token: Token;\n tokens: Token[];\n state: ParseState;\n opts: ParseManyOpts<T>;\n result: T;\n}): T | undefined {\n const { token, tokens, state, opts, result } = params;\n if (token.type !== opts.endSymbol && token.type !== \",\") {\n const handledResult = handleInvalidToken(token, state, opts, result);\n if (handledResult !== null) {\n return handledResult;\n }\n }\n\n if (token.type === opts.endSymbol) {\n return result;\n }\n\n if (token.type === \",\") {\n const handledResult = handleCommaToken({\n token,\n tokens,\n state,\n opts,\n result,\n });\n if (handledResult !== null) {\n return handledResult;\n }\n return; // Continue loop\n }\n\n opts.elementParser(tokens, state, result);\n return; // Continue loop\n}\n\n// Generic function to parse comma-separated elements within enclosing symbols (like objects or arrays)\n// :: t : array | {} => array parseToken -> parseState -> t -> parseManyOpts -> t\nfunction parseMany<T>(\n tokens: Token[],\n state: ParseState,\n result: T,\n opts: ParseManyOpts<T>\n): T {\n const initialResult = parseManyInitialElement(tokens, state, result, opts);\n if (initialResult !== undefined) {\n return initialResult;\n }\n\n while (true) {\n const token = popToken(tokens, state);\n const processedResult = parseManyProcessToken({\n token,\n tokens,\n state,\n opts,\n result,\n });\n if (processedResult !== undefined) {\n return processedResult;\n }\n }\n}\n\n// Perform final checks after parsing the main value\n// :: array parseToken -> parseState -> any -> undefined\nfunction endChecks(tokens: Token[], state: ParseState, ret: unknown): void {\n // Check if there are unparsed tokens remaining\n if (state.pos < tokens.length) {\n // In tolerant mode, skip trailing whitespace/punctuation before declaring error\n if (state.tolerant) {\n skipPunctuation(tokens, state); // Try skipping junk\n }\n // If still not at the end, raise error/warning\n if (state.pos < tokens.length) {\n raiseError(\n state,\n tokens[state.pos],\n `Unexpected token: ${strToken(tokens[state.pos])}, expected end-of-input`\n );\n }\n }\n\n // If in tolerant mode and warnings were generated, throw a summary error at the end\n if (state.tolerant && state.warnings.length > 0) {\n const message =\n state.warnings.length === 1\n ? state.warnings[0].message // Single warning message\n : `${state.warnings.length} parse warnings`; // Multiple warnings summary\n const err = new SyntaxError(message);\n // Attach details to the error object\n (err as { line?: number; warnings?: ParseWarning[]; obj?: unknown }).line =\n state.warnings[0].line; // Line of the first warning\n (\n err as { line?: number; warnings?: ParseWarning[]; obj?: unknown }\n ).warnings = state.warnings; // Array of all warnings\n (err as { line?: number; warnings?: ParseWarning[]; obj?: unknown }).obj =\n ret; // The partially parsed object (might be useful)\n throw err;\n }\n}\n\n// Main recursive parsing function for any JSON value type\n// :: array parseToken -> parseState -> boolean? -> any\nfunction parseAny(tokens: Token[], state: ParseState, end = false): unknown {\n // Skip any leading punctuation (useful for recovery in tolerant mode)\n const token = skipPunctuation(tokens, state);\n let ret: unknown; // Variable to hold the parsed result\n\n // Check for premature end of file\n if (token.type === \"eof\") {\n // Only raise error if we expected a value (not called recursively within a structure)\n // If 'end' is true, we are at the top level.\n if (end) {\n raiseUnexpected(state, token, \"json value\");\n }\n // If called recursively (e.g., after a comma), returning undefined might be handled\n // by the caller (like parseElement/parsePair). However, hitting EOF here usually\n // means an incomplete structure. Let's raise an error/warning.\n raiseUnexpected(state, token, \"json value\");\n return; // Return undefined in tolerant mode after warning\n }\n\n // Parse based on the token type\n switch (token.type) {\n case \"{\": // Start of an object\n ret = parseObject(tokens, state);\n break;\n case \"[\": // Start of an array\n ret = parseArray(tokens, state);\n break;\n case \"string\": // String literal\n case \"number\": // Number literal\n case \"atom\": // Keyword literal (true, false, null)\n ret = token.value;\n break;\n default:\n // Unexpected token type to start a value\n raiseUnexpected(state, token, \"json value\");\n // Attempt recovery in tolerant mode by returning null\n if (state.tolerant) {\n ret = null;\n } else {\n // Error already thrown\n return; // Should be unreachable\n }\n }\n\n // If this is the top-level call (end === true)\n if (end) {\n // Apply the top-level reviver function (key is empty string)\n ret = state.reviver ? state.reviver(\"\", ret) : ret;\n // Perform final checks for trailing tokens or accumulated warnings\n endChecks(tokens, state, ret);\n }\n\n return ret;\n}\n\n// Helper to normalize parse options\nfunction normalizeParseOptions(\n optsOrReviver?: ParseOptions | ((key: string, value: unknown) => unknown)\n): ParseOptions {\n let options: ParseOptions = {};\n\n if (typeof optsOrReviver === \"function\") {\n options.reviver = optsOrReviver;\n } else if (optsOrReviver !== null && typeof optsOrReviver === \"object\") {\n options = { ...optsOrReviver };\n } else if (optsOrReviver !== undefined) {\n throw new TypeError(\n \"Second argument must be a reviver function or an options object.\"\n );\n }\n\n // Set default for relaxed mode\n if (options.relaxed === undefined) {\n if (options.warnings === true || options.tolerant === true) {\n options.relaxed = true;\n } else if (options.warnings === false && options.tolerant === false) {\n options.relaxed = false;\n } else {\n options.relaxed = true;\n }\n }\n\n options.tolerant = options.tolerant || options.warnings;\n options.duplicate = options.duplicate ?? false;\n\n return options;\n}\n\n// Helper to create parser state\nfunction createParseState(options: ParseOptions): ParseState {\n return {\n pos: 0,\n reviver: options.reviver,\n tolerant: options.tolerant ?? false,\n duplicate: options.duplicate ?? false,\n warnings: [],\n };\n}\n\n// Helper to use custom parser with tokens\nfunction parseWithCustomParser(text: string, options: ParseOptions): unknown {\n const lexerToUse = options.relaxed ? lexer : strictLexer;\n let tokens = lexerToUse(text);\n\n if (options.relaxed) {\n tokens = stripTrailingComma(tokens);\n }\n\n tokens = tokens.filter((token) => token.type !== \" \");\n const state = createParseState(options);\n return parseAny(tokens, state, true);\n}\n\n// Helper to use native JSON.parse with transformation\nfunction parseWithTransform(text: string, options: ParseOptions): unknown {\n let tokens = lexer(text);\n tokens = stripTrailingComma(tokens);\n const newtext = tokens.reduce((str, token) => str + token.match, \"\");\n return JSON.parse(\n newtext,\n options.reviver as (key: string, value: unknown) => unknown\n );\n}\n\n// --- Main Parse Function ---\n\n/**\n * Parse a JSON string with enhanced features beyond standard JSON.parse()\n *\n * Supports both strict JSON and relaxed JSON syntax with configurable error handling\n * and duplicate key validation.\n *\n * @param text - The JSON string to parse\n * @param optsOrReviver - Either a ParseOptions object for configuration, or a reviver function (like JSON.parse)\n *\n * @returns The parsed JavaScript value\n *\n * @throws {SyntaxError} When parsing fails in strict mode, or when warnings are collected in tolerant mode\n *\n * @example\n * ```typescript\n * // Standard JSON parsing\n * parse('{\"key\": \"value\"}')\n *\n * // Relaxed JSON with unquoted keys and trailing commas\n * parse('{key: \"value\", trailing: \"comma\",}', { relaxed: true })\n *\n * // Strict duplicate key validation\n * parse('{\"key\": 1, \"key\": 2}', { duplicate: false }) // throws error\n *\n * // Allow duplicates (uses last value)\n * parse('{\"key\": 1, \"key\": 2}', { duplicate: true }) // returns {key: 2}\n *\n * // Tolerant mode with warning collection\n * parse('malformed json', { tolerant: true, warnings: true })\n * ```\n */\nfunction parse(\n text: string,\n optsOrReviver?: ParseOptions | ((key: string, value: unknown) => unknown)\n): unknown {\n const options = normalizeParseOptions(optsOrReviver);\n\n // Strategy 1: Strict JSON with duplicate allowance -> use native JSON.parse\n if (\n !(options.relaxed || options.warnings || options.tolerant) &&\n options.duplicate\n ) {\n return JSON.parse(\n text,\n options.reviver as (key: string, value: unknown) => unknown\n );\n }\n\n // Strategy 2: Need custom parser (warnings, tolerant, or duplicate checking)\n if (options.warnings || options.tolerant || !options.duplicate) {\n return parseWithCustomParser(text, options);\n }\n\n // Strategy 3: Relaxed syntax without warnings/tolerance -> transform and use native\n return parseWithTransform(text, options);\n}\n\n// --- Stringify Function (Basic Implementation) ---\n// Note: This is a basic, non-configurable stringifier, mainly for potential internal use or testing.\n// It doesn't handle replacer/space arguments like JSON.stringify.\n\n// Helper for stringifying object pairs\n// :: any -> string -> ... -> string\nfunction stringifyPair(\n obj: { [objKey: string]: unknown },\n key: string\n): string {\n // Stringify key and value, then join with colon\n // Recursively calls stringify for the value\n return `${JSON.stringify(key)}:${stringify(obj[key])}`;\n}\n\n/**\n * Convert JavaScript value to JSON string with sorted object keys\n *\n * Similar to JSON.stringify but with consistent key ordering (sorted alphabetically).\n * Handles undefined values by converting them to null.\n *\n * @param obj - The value to convert to JSON string\n * @returns A JSON string representation\n *\n * @example\n * ```typescript\n * stringify({z: 1, a: 2, m: 3})\n * // Returns: '{\"a\":2,\"m\":3,\"z\":1}' (keys sorted)\n *\n * stringify({key: undefined})\n * // Returns: '{\"key\":null}' (undefined becomes null)\n * ```\n */\nfunction stringify(obj: unknown): string {\n const type = typeof obj;\n\n // Handle primitives directly using JSON.stringify (handles escaping etc.)\n if (\n type === \"string\" ||\n type === \"number\" ||\n type === \"boolean\" ||\n obj === null\n ) {\n return JSON.stringify(obj);\n }\n\n // Handle undefined (represented as null in this basic version, JSON.stringify omits in objects/returns undefined at top level)\n if (type === \"undefined\") {\n return \"null\";\n }\n\n // Handle arrays\n if (Array.isArray(obj)) {\n // Recursively stringify each element and join with commas\n const elements = obj.map(stringify).join(\",\");\n return `[${elements}]`;\n }\n\n // Handle objects\n // Check if it's a non-null object (using constructor check is less robust than typeof + null check)\n if (type === \"object\") {\n // Already checked for null and Array above\n // Get keys, sort them for consistent output (optional, but good practice)\n const keys = Object.keys(obj as object);\n keys.sort();\n // Stringify each key-value pair and join with commas\n const pairs = keys\n .map((key) => stringifyPair(obj as { [objKey: string]: unknown }, key))\n .join(\",\");\n return `{${pairs}}`;\n }\n\n // Fallback for unsupported types (like functions, symbols) - represent as null\n return \"null\";\n}\n\nexport { parse, stringify, transform };\nexport type { ParseOptions };\n"],"mappings":";AAkCA,IAAM,wBAAwB;AAC9B,IAAM,mBAAmB;AACzB,IAAM,qBAAqB;AAC3B,IAAM,mBAAmB;AACzB,IAAM,oBAAoB;AAC1B,IAAM,kBAAkB;AACxB,IAAM,cAAc;AACpB,IAAM,cAAc;AACpB,IAAM,gBAAgB;AACtB,IAAM,eAAe;AACrB,IAAM,sBAAsB;AAC5B,IAAM,sBAAsB;AAC5B,IAAM,uBAAuB;AAC7B,IAAM,sBAAsB;AAC5B,IAAM,mBAAmB;AAIzB,SAAS,KACP,OACA,GACW;AACX,MAAI,MAAiB;AACrB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;AAExC,UAAM,SAAS,EAAE,MAAM,CAAC,GAAG,GAAG,KAAK;AACnC,UAAM,WAAW,SAAY,QAAQ;AACrC,QAAI,KAAK;AACP,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AA2GA,SAAS,UAAU,YAAwD;AAGzE,SAAO,CAAC,aAA8B;AACpC,UAAM,SAAkB,CAAC;AACzB,QAAI,OAAO;AACX,QAAI,oBAAoB;AAIxB,aAAS,YAA4D;AAEnE,YAAM,SAAS,KAAK,YAAY,CAAC,cAAc;AAC7C,cAAM,IAAI,UAAU,GAAG,KAAK,iBAAiB;AAC7C,YAAI,GAAG;AACL,gBAAM,MAAM,EAAE,CAAC;AACf,8BAAoB,kBAAkB,MAAM,IAAI,MAAM;AACtD,iBAAO;AAAA,YACL;AAAA,YACA,SAAS,UAAU,EAAE,CAAC;AAAA;AAAA,UACxB;AAAA,QACF;AACA;AAAA,MACF,CAAC;AACD,aAAO,WAAW,QAAQ,SAAY;AAAA,IACxC;AAGA,WAAO,sBAAsB,IAAI;AAC/B,YAAM,UAAU,UAAU;AAE1B,UAAI,CAAC,SAAS;AAEZ,cAAM,MAAM,IAAI;AAAA,UACd,yBAAyB,kBAAkB,CAAC,CAAC,YAAY,kBAAkB;AAAA,YACzE;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAEA,QAAC,IAA0B,OAAO;AAClC,cAAM;AAAA,MACR;AAIA,YAAM,gBAAgB,QAAQ;AAC9B,oBAAc,OAAO;AAGrB,cAAQ,QAAQ,IAAI,QAAQ,UAAU,EAAE,EAAE;AAE1C,aAAO,KAAK,aAAa;AAAA,IAC3B;AAMA,WAAO;AAAA,EACT;AACF;AAKA,SAAS,cAAc,GAA8B;AAEnD,QAAM,UAAU,EAAE,CAAC,EAAE;AAAA,IACnB;AAAA,IACA,CAAC,OAAO;AACN,UAAI,OAAO,KAAK;AACd,eAAO;AAAA,MACT;AACA,UAAI,OAAO,OAAO;AAChB,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI,OAAO;AACzB,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA;AAAA;AAAA,IAEA,OAAO,KAAK,MAAM,KAAK;AAAA,EACzB;AACF;AAGA,SAAS,cAAc,GAA8B;AAEnD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO,EAAE,CAAC;AAAA;AAAA,IACV,OAAO,KAAK,MAAM,EAAE,CAAC,CAAC;AAAA;AAAA,EACxB;AACF;AAGA,SAAS,YAAY,GAA8B;AAEjD,QAAM,QAAQ,EAAE,CAAC;AACjB,QAAM,QACJ,MACA,MAAM,QAAQ,OAAO,MAAM,EAAE,QAAQ,MAAM,KAAK;AAAA,EAChD;AACF,SAAO;AAAA,IACL,MAAM;AAAA;AAAA,IACN;AAAA;AAAA,IACA;AAAA;AAAA,EACF;AACF;AAGA,SAAS,SAAS,GAA8B;AAE9C,QAAM,QAAQ,EAAE,CAAC,EAAE;AAAA,IAAQ;AAAA,IAAM,CAAC,MAChC,sBAAsB,KAAK,CAAC,IAAI,IAAI;AAAA,EACtC;AACA,SAAO;AAAA,IACL,MAAM;AAAA;AAAA,IACN;AAAA;AAAA,IACA,OAAO;AAAA;AAAA,EACT;AACF;AAGA,SAAS,QAAQ,GAA8B;AAE7C,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO,EAAE,CAAC;AAAA;AAAA,IACV,OAAO,OAAO,WAAW,EAAE,CAAC,CAAC;AAAA;AAAA,EAC/B;AACF;AAGA,SAAS,SAAS,GAA8B;AAE9C,MAAI;AACJ,UAAQ,EAAE,CAAC,GAAG;AAAA,IACZ,KAAK;AACH,cAAQ;AACR;AAAA,IACF,KAAK;AACH,cAAQ;AACR;AAAA,IACF,KAAK;AACH,cAAQ;AACR;AAAA,IACF;AAEE,YAAM,IAAI,MAAM,uBAAuB,EAAE,CAAC,CAAC,EAAE;AAAA,EACjD;AACA,SAAO;AAAA,IACL,MAAM;AAAA;AAAA,IACN,OAAO,EAAE,CAAC;AAAA;AAAA,IACV;AAAA;AAAA,EACF;AACF;AAKA,SAAS,eAAe,SAA+B;AAGrD,WAAS,EAAE,MAAmD;AAE5D,WAAO,CAAC,MAAiC;AAEvC,aAAO,EAAE,MAAM,OAAO,EAAE,CAAC,GAAG,OAAO,OAAU;AAAA,IAC/C;AAAA,EACF;AAGA,MAAI,aAA0B;AAAA,IAC5B,EAAE,IAAI,kBAAkB,GAAG,EAAE,GAAG,EAAE;AAAA;AAAA,IAClC,EAAE,IAAI,oBAAoB,GAAG,EAAE,GAAG,EAAE;AAAA;AAAA,IACpC,EAAE,IAAI,kBAAkB,GAAG,EAAE,GAAG,EAAE;AAAA;AAAA,IAClC,EAAE,IAAI,mBAAmB,GAAG,EAAE,GAAG,EAAE;AAAA;AAAA,IACnC,EAAE,IAAI,iBAAiB,GAAG,EAAE,GAAG,EAAE;AAAA;AAAA,IACjC,EAAE,IAAI,aAAa,GAAG,EAAE,GAAG,EAAE;AAAA;AAAA,IAC7B,EAAE,IAAI,aAAa,GAAG,EAAE,GAAG,EAAE;AAAA;AAAA,IAC7B,EAAE,IAAI,eAAe,GAAG,SAAS;AAAA;AAAA;AAAA,IAEjC,EAAE,IAAI,cAAc,GAAG,QAAQ;AAAA;AAAA,IAE/B,EAAE,IAAI,qBAAqB,GAAG,cAAc;AAAA,EAC9C;AAGA,MAAI,SAAS;AACX,iBAAa,WAAW,OAAO;AAAA;AAAA,MAE7B;AAAA,QACE,IAAI;AAAA,QACJ,GAAG;AAAA,MACL;AAAA;AAAA,MAEA,EAAE,IAAI,sBAAsB,GAAG,SAAS;AAAA;AAAA,MAExC,EAAE,IAAI,qBAAqB,GAAG,SAAS;AAAA;AAAA;AAAA,MAGvC,EAAE,IAAI,kBAAkB,GAAG,YAAY;AAAA;AAAA,IAEzC,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAGA,IAAM,QAAQ,UAAU,eAAe,IAAI,CAAC;AAC5C,IAAM,cAAc,UAAU,eAAe,KAAK,CAAC;AAMnD,SAAS,iBAAiB,QAAiB,OAAmC;AAC5E,MAAI,eAAe;AACnB,SAAO,gBAAgB,GAAG,gBAAgB,GAAG;AAC3C,QAAI,OAAO,YAAY,EAAE,SAAS,KAAK;AACrC,aAAO;AAAA,IACT;AAAA,EACF;AACA;AACF;AAIA,SAAS,mBAAmB,QAA0B;AACpD,QAAM,MAAe,CAAC;AAEtB,SAAO,QAAQ,CAAC,OAAO,UAAU;AAE/B,QAAI,QAAQ,MAAM,MAAM,SAAS,OAAO,MAAM,SAAS,MAAM;AAE3D,YAAM,oBAAoB,iBAAiB,KAAK,IAAI,SAAS,CAAC;AAG9D,UACE,sBAAsB,UACtB,IAAI,iBAAiB,EAAE,SAAS,KAChC;AAEA,cAAM,gBAAgB,iBAAiB,KAAK,oBAAoB,CAAC;AAIjE,YACE,kBAAkB,UAClB,IAAI,aAAa,EAAE,SAAS,OAC5B,IAAI,aAAa,EAAE,SAAS,KAC5B;AAEA,cAAI,iBAAiB,IAAI;AAAA,YACvB,MAAM;AAAA,YACN,OAAO;AAAA;AAAA,YACP,OAAO;AAAA;AAAA,YACP,MAAM,IAAI,iBAAiB,EAAE;AAAA;AAAA,UAC/B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,KAAK,KAAK;AAAA,EAChB,CAAC;AAED,SAAO;AACT;AAoBA,SAAS,UAAU,MAAsB;AAEvC,MAAI,SAAS,MAAM,IAAI;AAGvB,WAAS,mBAAmB,MAAM;AAGlC,SAAO,OAAO,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,OAAO,EAAE;AAC5D;AAMA,SAAS,SAAS,QAAiB,OAA0B;AAle7D;AAmeE,QAAM,QAAQ,OAAO,MAAM,GAAG;AAC9B,QAAM,OAAO;AAEb,MAAI,CAAC,OAAO;AAEV,UAAM,WAAW,OAAO,WAAW,KAAK,kBAAO,GAAG,EAAE,MAAZ,mBAAe,SAAf,YAAuB,IAAK;AACpE,WAAO,EAAE,MAAM,OAAO,OAAO,IAAI,OAAO,QAAW,MAAM,SAAS;AAAA,EACpE;AAEA,SAAO;AACT;AAIA,SAAS,SAAS,OAAsB;AACtC,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAEH,aAAO,GAAG,MAAM,IAAI,IAAI,MAAM,KAAK;AAAA,IACrC,KAAK;AACH,aAAO;AAAA,IACT;AAEE,aAAO,IAAI,MAAM,IAAI;AAAA,EACzB;AACF;AAIA,SAAS,UAAU,QAAiB,OAAyB;AAC3D,QAAM,QAAQ,SAAS,QAAQ,KAAK;AACpC,MAAI,MAAM,SAAS,KAAK;AACtB,UAAM,UAAU,qBAAqB,SAAS,KAAK,CAAC;AACpD,QAAI,MAAM,UAAU;AAClB,YAAM,SAAS,KAAK;AAAA,QAClB;AAAA,QACA,MAAM,MAAM;AAAA,MACd,CAAC;AAGD,YAAM,OAAO;AAAA,IACf,OAAO;AACL,YAAM,MAAM,IAAI,YAAY,OAAO;AACnC,MAAC,IAA0B,OAAO,MAAM;AACxC,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,gBACP,QACA,OACA,OACO;AAEP,QAAM,cAA2B,CAAC,KAAK,KAAK,KAAK,GAAG;AACpD,MAAI,QAAQ,SAAS,QAAQ,KAAK;AAElC,SAAO,MAAM;AAEX,QAAI,+BAAO,SAAS,MAAM,OAAO;AAC/B,aAAO;AAAA,IACT;AACA,QAAI,MAAM,SAAS,OAAO;AAExB,aAAO;AAAA,IACT;AACA,QAAI,YAAY,SAAS,MAAM,IAAI,GAAG;AAEpC,YAAM,UAAU,qBAAqB;AAAA,QACnC;AAAA,MACF,CAAC;AACD,UAAI,MAAM,UAAU;AAElB,cAAM,SAAS,KAAK;AAAA,UAClB;AAAA,UACA,MAAM,MAAM;AAAA,QACd,CAAC;AACD,gBAAQ,SAAS,QAAQ,KAAK;AAAA,MAChC,OAAO;AAEL,cAAM,MAAM,IAAI,YAAY,OAAO;AACnC,QAAC,IAA0B,OAAO,MAAM;AACxC,cAAM;AAAA,MACR;AAAA,IACF,OAAO;AAGL,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAIA,SAAS,WAAW,OAAmB,OAAc,SAAuB;AAC1E,MAAI,MAAM,UAAU;AAClB,UAAM,SAAS,KAAK;AAAA,MAClB;AAAA,MACA,MAAM,MAAM;AAAA,IACd,CAAC;AAAA,EACH,OAAO;AACL,UAAM,MAAM,IAAI,YAAY,OAAO;AACnC,IAAC,IAA0B,OAAO,MAAM;AACxC,UAAM;AAAA,EACR;AACF;AAIA,SAAS,gBACP,OACA,OACA,UACM;AACN;AAAA,IACE;AAAA,IACA;AAAA,IACA,qBAAqB,SAAS,KAAK,CAAC,cAAc,QAAQ;AAAA,EAC5D;AACF;AAMA,SAAS,gBACP,OACA,KACA,OACM;AAGN,QAAM,MAAM,OAAO,MAAM,KAAK;AAI9B,MAAI,CAAC,MAAM,aAAa,OAAO,OAAO,KAAK,GAAG,GAAG;AAC/C,eAAW,OAAO,OAAO,kBAAkB,GAAG,EAAE;AAAA,EAGlD;AACF;AAIA,SAAS,WACP,OACA,KACA,KACA,OACM;AAEN,QAAM,aAAa,MAAM,UAAU,MAAM,QAAQ,KAAK,KAAK,IAAI;AAE/D,MAAI,eAAe,QAAW;AAC5B,QAAI,GAAG,IAAI;AAAA,EACb;AACF;AAIA,SAAS,UACP,QACA,OACA,KACM;AAEN,MAAI,QAAQ,gBAAgB,QAAQ,OAAO,CAAC,KAAK,UAAU,UAAU,MAAM,CAAC;AAC5E,MAAI;AAGJ,MAAI,MAAM,SAAS,UAAU;AAE3B,oBAAgB,OAAO,OAAO,YAAY;AAG1C,QAAI,MAAM,UAAU;AAClB,cAAQ,MAAM,MAAM;AAAA,QAClB,KAAK;AACH,kBAAQ;AAAA,YACN,MAAM;AAAA,YACN,OAAO;AAAA,YACP,OAAO;AAAA,YACP,MAAM,MAAM;AAAA,UACd;AACA,gBAAM,OAAO;AACb;AAAA,QACF,KAAK;AAAA;AAAA,QACL,KAAK;AACH,kBAAQ;AAAA,YACN,MAAM;AAAA,YACN,OAAO,OAAO,MAAM,KAAK;AAAA,YACzB,OAAO,IAAI,MAAM,KAAK;AAAA,YACtB,MAAM,MAAM;AAAA,UACd;AACA;AAAA,QACF,KAAK;AAAA;AAAA,QACL,KAAK;AACH,gBAAM,OAAO;AACb,kBAAQ,SAAS,QAAQ,KAAK;AAC9B,0BAAgB,OAAO,KAAK;AAAA,YAC1B,MAAM;AAAA,YACN,OAAO;AAAA,YACP,OAAO;AAAA,YACP,MAAM,MAAM;AAAA,UACd,CAAC;AACD,qBAAW,OAAO,KAAK,QAAQ,KAAK;AACpC;AAAA;AAAA,QACF,KAAK;AACH;AAAA;AAAA,QACF;AAGE;AAAA,MACJ;AAAA,IACF,OAAO;AAEL;AAAA,IACF;AAAA,EACF;AAGA,kBAAgB,OAAO,KAAK,KAAK;AACjC,QAAM,MAAM,OAAO,MAAM,KAAK;AAG9B,YAAU,QAAQ,KAAK;AACvB,UAAQ,SAAS,QAAQ,KAAK;AAG9B,aAAW,OAAO,KAAK,KAAK,KAAK;AACnC;AAIA,SAAS,aACP,QACA,OACA,KACM;AACN,QAAM,MAAM,IAAI;AAGhB,QAAM,QAAQ,SAAS,QAAQ,KAAK;AAEpC,MAAI,GAAG,IAAI,MAAM,UAAU,MAAM,QAAQ,OAAO,GAAG,GAAG,KAAK,IAAI;AACjE;AAIA,SAAS,YACP,QACA,OAC4B;AAC5B,QAAM,MAAM,CAAC;AAEb,SAAO,UAAsC,QAAQ,OAAO,KAAK;AAAA,IAC/D,MAAM,CAAC,KAAK,GAAG;AAAA;AAAA,IACf,eAAe;AAAA;AAAA,IACf,aAAa;AAAA;AAAA,IACb,WAAW;AAAA;AAAA,EACb,CAAC;AACH;AAIA,SAAS,WAAW,QAAiB,OAA8B;AACjE,QAAM,MAAiB,CAAC;AAExB,SAAO,UAAqB,QAAQ,OAAO,KAAK;AAAA,IAC9C,MAAM,CAAC,GAAG;AAAA;AAAA,IACV,eAAe;AAAA;AAAA,IACf,aAAa;AAAA;AAAA,IACb,WAAW;AAAA;AAAA,EACb,CAAC;AACH;AAGA,SAAS,mBACP,OACA,OACA,MACA,QACU;AACV,kBAAgB,OAAO,OAAO,WAAW,KAAK,SAAS,GAAG;AAE1D,MAAI,MAAM,UAAU;AAClB,QAAI,MAAM,SAAS,OAAO;AACxB,aAAO;AAAA,IACT;AAEA,UAAM,OAAO;AACb,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAWA,SAAS,iBAAoB,QAA6C;AACxE,QAAM,EAAE,OAAO,QAAQ,OAAO,MAAM,OAAO,IAAI;AAC/C,QAAM,YAAY,OAAO,MAAM,GAAG;AAClC,MAAI,MAAM,YAAY,aAAa,UAAU,SAAS,KAAK,WAAW;AACpE,eAAW,OAAO,OAAO,0BAA0B,KAAK,SAAS,GAAG;AACpE,aAAS,QAAQ,KAAK;AACtB,WAAO;AAAA,EACT;AACA,OAAK,cAAc,QAAQ,OAAO,MAAM;AACxC,SAAO;AACT;AAGA,SAAS,wBACP,QACA,OACA,QACA,MACe;AACf,QAAM,QAAQ,gBAAgB,QAAQ,OAAO,KAAK,IAAI;AAEtD,MAAI,MAAM,SAAS,OAAO;AACxB,oBAAgB,OAAO,OAAO,IAAI,KAAK,SAAS,QAAQ,KAAK,WAAW,EAAE;AAC1E,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,SAAS,KAAK,WAAW;AACjC,WAAO;AAAA,EACT;AAEA,QAAM,OAAO;AACb,OAAK,cAAc,QAAQ,OAAO,MAAM;AACxC;AACF;AAGA,SAAS,sBAAyB,QAMhB;AAChB,QAAM,EAAE,OAAO,QAAQ,OAAO,MAAM,OAAO,IAAI;AAC/C,MAAI,MAAM,SAAS,KAAK,aAAa,MAAM,SAAS,KAAK;AACvD,UAAM,gBAAgB,mBAAmB,OAAO,OAAO,MAAM,MAAM;AACnE,QAAI,kBAAkB,MAAM;AAC1B,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI,MAAM,SAAS,KAAK,WAAW;AACjC,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,SAAS,KAAK;AACtB,UAAM,gBAAgB,iBAAiB;AAAA,MACrC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,QAAI,kBAAkB,MAAM;AAC1B,aAAO;AAAA,IACT;AACA;AAAA,EACF;AAEA,OAAK,cAAc,QAAQ,OAAO,MAAM;AACxC;AACF;AAIA,SAAS,UACP,QACA,OACA,QACA,MACG;AACH,QAAM,gBAAgB,wBAAwB,QAAQ,OAAO,QAAQ,IAAI;AACzE,MAAI,kBAAkB,QAAW;AAC/B,WAAO;AAAA,EACT;AAEA,SAAO,MAAM;AACX,UAAM,QAAQ,SAAS,QAAQ,KAAK;AACpC,UAAM,kBAAkB,sBAAsB;AAAA,MAC5C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,QAAI,oBAAoB,QAAW;AACjC,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAIA,SAAS,UAAU,QAAiB,OAAmB,KAAoB;AAEzE,MAAI,MAAM,MAAM,OAAO,QAAQ;AAE7B,QAAI,MAAM,UAAU;AAClB,sBAAgB,QAAQ,KAAK;AAAA,IAC/B;AAEA,QAAI,MAAM,MAAM,OAAO,QAAQ;AAC7B;AAAA,QACE;AAAA,QACA,OAAO,MAAM,GAAG;AAAA,QAChB,qBAAqB,SAAS,OAAO,MAAM,GAAG,CAAC,CAAC;AAAA,MAClD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,UAAM,UACJ,MAAM,SAAS,WAAW,IACtB,MAAM,SAAS,CAAC,EAAE,UAClB,GAAG,MAAM,SAAS,MAAM;AAC9B,UAAM,MAAM,IAAI,YAAY,OAAO;AAEnC,IAAC,IAAoE,OACnE,MAAM,SAAS,CAAC,EAAE;AACpB,IACE,IACA,WAAW,MAAM;AACnB,IAAC,IAAoE,MACnE;AACF,UAAM;AAAA,EACR;AACF;AAIA,SAAS,SAAS,QAAiB,OAAmB,MAAM,OAAgB;AAE1E,QAAM,QAAQ,gBAAgB,QAAQ,KAAK;AAC3C,MAAI;AAGJ,MAAI,MAAM,SAAS,OAAO;AAGxB,QAAI,KAAK;AACP,sBAAgB,OAAO,OAAO,YAAY;AAAA,IAC5C;AAIA,oBAAgB,OAAO,OAAO,YAAY;AAC1C;AAAA,EACF;AAGA,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,YAAM,YAAY,QAAQ,KAAK;AAC/B;AAAA,IACF,KAAK;AACH,YAAM,WAAW,QAAQ,KAAK;AAC9B;AAAA,IACF,KAAK;AAAA;AAAA,IACL,KAAK;AAAA;AAAA,IACL,KAAK;AACH,YAAM,MAAM;AACZ;AAAA,IACF;AAEE,sBAAgB,OAAO,OAAO,YAAY;AAE1C,UAAI,MAAM,UAAU;AAClB,cAAM;AAAA,MACR,OAAO;AAEL;AAAA,MACF;AAAA,EACJ;AAGA,MAAI,KAAK;AAEP,UAAM,MAAM,UAAU,MAAM,QAAQ,IAAI,GAAG,IAAI;AAE/C,cAAU,QAAQ,OAAO,GAAG;AAAA,EAC9B;AAEA,SAAO;AACT;AAGA,SAAS,sBACP,eACc;AAl+BhB;AAm+BE,MAAI,UAAwB,CAAC;AAE7B,MAAI,OAAO,kBAAkB,YAAY;AACvC,YAAQ,UAAU;AAAA,EACpB,WAAW,kBAAkB,QAAQ,OAAO,kBAAkB,UAAU;AACtE,cAAU,EAAE,GAAG,cAAc;AAAA,EAC/B,WAAW,kBAAkB,QAAW;AACtC,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAGA,MAAI,QAAQ,YAAY,QAAW;AACjC,QAAI,QAAQ,aAAa,QAAQ,QAAQ,aAAa,MAAM;AAC1D,cAAQ,UAAU;AAAA,IACpB,WAAW,QAAQ,aAAa,SAAS,QAAQ,aAAa,OAAO;AACnE,cAAQ,UAAU;AAAA,IACpB,OAAO;AACL,cAAQ,UAAU;AAAA,IACpB;AAAA,EACF;AAEA,UAAQ,WAAW,QAAQ,YAAY,QAAQ;AAC/C,UAAQ,aAAY,aAAQ,cAAR,YAAqB;AAEzC,SAAO;AACT;AAGA,SAAS,iBAAiB,SAAmC;AAjgC7D;AAkgCE,SAAO;AAAA,IACL,KAAK;AAAA,IACL,SAAS,QAAQ;AAAA,IACjB,WAAU,aAAQ,aAAR,YAAoB;AAAA,IAC9B,YAAW,aAAQ,cAAR,YAAqB;AAAA,IAChC,UAAU,CAAC;AAAA,EACb;AACF;AAGA,SAAS,sBAAsB,MAAc,SAAgC;AAC3E,QAAM,aAAa,QAAQ,UAAU,QAAQ;AAC7C,MAAI,SAAS,WAAW,IAAI;AAE5B,MAAI,QAAQ,SAAS;AACnB,aAAS,mBAAmB,MAAM;AAAA,EACpC;AAEA,WAAS,OAAO,OAAO,CAAC,UAAU,MAAM,SAAS,GAAG;AACpD,QAAM,QAAQ,iBAAiB,OAAO;AACtC,SAAO,SAAS,QAAQ,OAAO,IAAI;AACrC;AAGA,SAAS,mBAAmB,MAAc,SAAgC;AACxE,MAAI,SAAS,MAAM,IAAI;AACvB,WAAS,mBAAmB,MAAM;AAClC,QAAM,UAAU,OAAO,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,OAAO,EAAE;AACnE,SAAO,KAAK;AAAA,IACV;AAAA,IACA,QAAQ;AAAA,EACV;AACF;AAmCA,SAAS,MACP,MACA,eACS;AACT,QAAM,UAAU,sBAAsB,aAAa;AAGnD,MACE,EAAE,QAAQ,WAAW,QAAQ,YAAY,QAAQ,aACjD,QAAQ,WACR;AACA,WAAO,KAAK;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV;AAAA,EACF;AAGA,MAAI,QAAQ,YAAY,QAAQ,YAAY,CAAC,QAAQ,WAAW;AAC9D,WAAO,sBAAsB,MAAM,OAAO;AAAA,EAC5C;AAGA,SAAO,mBAAmB,MAAM,OAAO;AACzC;AAQA,SAAS,cACP,KACA,KACQ;AAGR,SAAO,GAAG,KAAK,UAAU,GAAG,CAAC,IAAI,UAAU,IAAI,GAAG,CAAC,CAAC;AACtD;AAoBA,SAAS,UAAU,KAAsB;AACvC,QAAM,OAAO,OAAO;AAGpB,MACE,SAAS,YACT,SAAS,YACT,SAAS,aACT,QAAQ,MACR;AACA,WAAO,KAAK,UAAU,GAAG;AAAA,EAC3B;AAGA,MAAI,SAAS,aAAa;AACxB,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,QAAQ,GAAG,GAAG;AAEtB,UAAM,WAAW,IAAI,IAAI,SAAS,EAAE,KAAK,GAAG;AAC5C,WAAO,IAAI,QAAQ;AAAA,EACrB;AAIA,MAAI,SAAS,UAAU;AAGrB,UAAM,OAAO,OAAO,KAAK,GAAa;AACtC,SAAK,KAAK;AAEV,UAAM,QAAQ,KACX,IAAI,CAAC,QAAQ,cAAc,KAAsC,GAAG,CAAC,EACrE,KAAK,GAAG;AACX,WAAO,IAAI,KAAK;AAAA,EAClB;AAGA,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,389 @@
|
|
|
1
|
+
// src/schema-coerce/index.ts
|
|
2
|
+
var NUMERIC_REGEX = /^-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?$/;
|
|
3
|
+
var EMPTY_OBJECT_REGEX = /^\{\s*\}$/s;
|
|
4
|
+
var NEWLINE_SPLIT_REGEX = /\n+/;
|
|
5
|
+
var COMMA_SPLIT_REGEX = /,\s*/;
|
|
6
|
+
var DIGIT_KEY_REGEX = /^\d+$/;
|
|
7
|
+
function unwrapJsonSchema(schema) {
|
|
8
|
+
if (!schema || typeof schema !== "object") {
|
|
9
|
+
return schema;
|
|
10
|
+
}
|
|
11
|
+
const s = schema;
|
|
12
|
+
if (s.jsonSchema && typeof s.jsonSchema === "object") {
|
|
13
|
+
return unwrapJsonSchema(s.jsonSchema);
|
|
14
|
+
}
|
|
15
|
+
return schema;
|
|
16
|
+
}
|
|
17
|
+
function getSchemaType(schema) {
|
|
18
|
+
const unwrapped = unwrapJsonSchema(schema);
|
|
19
|
+
if (!unwrapped || typeof unwrapped !== "object") {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
const t = unwrapped.type;
|
|
23
|
+
if (typeof t === "string") {
|
|
24
|
+
return t;
|
|
25
|
+
}
|
|
26
|
+
if (Array.isArray(t)) {
|
|
27
|
+
const preferred = [
|
|
28
|
+
"object",
|
|
29
|
+
"array",
|
|
30
|
+
"boolean",
|
|
31
|
+
"number",
|
|
32
|
+
"integer",
|
|
33
|
+
"string"
|
|
34
|
+
];
|
|
35
|
+
for (const p of preferred) {
|
|
36
|
+
if (t.includes(p)) {
|
|
37
|
+
return p;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
const s = unwrapped;
|
|
42
|
+
if (s && typeof s === "object" && (s.properties || s.additionalProperties)) {
|
|
43
|
+
return "object";
|
|
44
|
+
}
|
|
45
|
+
if (s && typeof s === "object" && (s.items || s.prefixItems)) {
|
|
46
|
+
return "array";
|
|
47
|
+
}
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
function schemaAllowsPropertyViaCombinators(s, key, depth) {
|
|
51
|
+
const anyOfValues = s.anyOf;
|
|
52
|
+
const oneOfValues = s.oneOf;
|
|
53
|
+
const allOfValues = s.allOf;
|
|
54
|
+
let hasCombinator = false;
|
|
55
|
+
let anyOfAllows = true;
|
|
56
|
+
let oneOfAllows = true;
|
|
57
|
+
let allOfAllows = true;
|
|
58
|
+
if (Array.isArray(anyOfValues)) {
|
|
59
|
+
hasCombinator = true;
|
|
60
|
+
anyOfAllows = anyOfValues.some(
|
|
61
|
+
(sub) => schemaHasProperty(sub, key, depth + 1)
|
|
62
|
+
);
|
|
63
|
+
}
|
|
64
|
+
if (Array.isArray(oneOfValues)) {
|
|
65
|
+
hasCombinator = true;
|
|
66
|
+
oneOfAllows = oneOfValues.some(
|
|
67
|
+
(sub) => schemaHasProperty(sub, key, depth + 1)
|
|
68
|
+
);
|
|
69
|
+
}
|
|
70
|
+
if (Array.isArray(allOfValues)) {
|
|
71
|
+
hasCombinator = true;
|
|
72
|
+
allOfAllows = allOfValues.every(
|
|
73
|
+
(sub) => schemaHasProperty(sub, key, depth + 1)
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
if (!hasCombinator) {
|
|
77
|
+
return false;
|
|
78
|
+
}
|
|
79
|
+
return anyOfAllows && oneOfAllows && allOfAllows;
|
|
80
|
+
}
|
|
81
|
+
function schemaHasPropertyDirectly(s, key) {
|
|
82
|
+
const props = s.properties;
|
|
83
|
+
if (props && typeof props === "object" && !Array.isArray(props) && Object.hasOwn(props, key) && props[key] !== false) {
|
|
84
|
+
return true;
|
|
85
|
+
}
|
|
86
|
+
const required = s.required;
|
|
87
|
+
if (Array.isArray(required) && required.includes(key)) {
|
|
88
|
+
return true;
|
|
89
|
+
}
|
|
90
|
+
const patternSchemas = getPatternSchemasForKey(s.patternProperties, key);
|
|
91
|
+
return patternSchemas.some((schema) => schema !== false);
|
|
92
|
+
}
|
|
93
|
+
function schemaHasPropertyViaAdditional(s) {
|
|
94
|
+
const additional = s.additionalProperties;
|
|
95
|
+
if (additional === true || additional && typeof additional === "object" && !Array.isArray(additional)) {
|
|
96
|
+
return true;
|
|
97
|
+
}
|
|
98
|
+
if (Object.hasOwn(s, "additionalProperties")) {
|
|
99
|
+
return false;
|
|
100
|
+
}
|
|
101
|
+
const type = s.type;
|
|
102
|
+
const isObjectType = type === "object" || Array.isArray(type) && type.includes("object");
|
|
103
|
+
const hasObjectKeywords = s.properties && typeof s.properties === "object" && !Array.isArray(s.properties) || s.patternProperties && typeof s.patternProperties === "object" && !Array.isArray(s.patternProperties) || Array.isArray(s.required) && s.required.length > 0;
|
|
104
|
+
return !!(isObjectType || hasObjectKeywords);
|
|
105
|
+
}
|
|
106
|
+
function schemaDisallowsPropertyDirectly(s, key) {
|
|
107
|
+
const props = s.properties;
|
|
108
|
+
if (props && typeof props === "object" && !Array.isArray(props) && Object.hasOwn(props, key) && props[key] === false) {
|
|
109
|
+
return true;
|
|
110
|
+
}
|
|
111
|
+
const patternSchemas = getPatternSchemasForKey(s.patternProperties, key);
|
|
112
|
+
return patternSchemas.some((schema) => schema === false);
|
|
113
|
+
}
|
|
114
|
+
function schemaHasProperty(schema, key, depth = 0) {
|
|
115
|
+
if (depth > 5) {
|
|
116
|
+
return true;
|
|
117
|
+
}
|
|
118
|
+
const unwrapped = unwrapJsonSchema(schema);
|
|
119
|
+
if (schemaIsUnconstrained(unwrapped)) {
|
|
120
|
+
return true;
|
|
121
|
+
}
|
|
122
|
+
if (!unwrapped || typeof unwrapped !== "object") {
|
|
123
|
+
return false;
|
|
124
|
+
}
|
|
125
|
+
const s = unwrapped;
|
|
126
|
+
if (schemaDisallowsPropertyDirectly(s, key)) {
|
|
127
|
+
return false;
|
|
128
|
+
}
|
|
129
|
+
if (schemaHasPropertyDirectly(s, key)) {
|
|
130
|
+
return true;
|
|
131
|
+
}
|
|
132
|
+
if (schemaHasPropertyViaAdditional(s)) {
|
|
133
|
+
return true;
|
|
134
|
+
}
|
|
135
|
+
return schemaAllowsPropertyViaCombinators(s, key, depth);
|
|
136
|
+
}
|
|
137
|
+
function schemaIsUnconstrained(schema) {
|
|
138
|
+
const unwrapped = unwrapJsonSchema(schema);
|
|
139
|
+
if (unwrapped == null || unwrapped === true) {
|
|
140
|
+
return true;
|
|
141
|
+
}
|
|
142
|
+
if (typeof unwrapped !== "object" || Array.isArray(unwrapped)) {
|
|
143
|
+
return false;
|
|
144
|
+
}
|
|
145
|
+
return Object.keys(unwrapped).length === 0;
|
|
146
|
+
}
|
|
147
|
+
function getPatternSchemasForKey(patternProperties, key) {
|
|
148
|
+
if (!patternProperties || typeof patternProperties !== "object" || Array.isArray(patternProperties)) {
|
|
149
|
+
return [];
|
|
150
|
+
}
|
|
151
|
+
const schemas = [];
|
|
152
|
+
for (const [pattern, schema] of Object.entries(
|
|
153
|
+
patternProperties
|
|
154
|
+
)) {
|
|
155
|
+
try {
|
|
156
|
+
const regex = new RegExp(pattern);
|
|
157
|
+
if (regex.test(key)) {
|
|
158
|
+
schemas.push(schema);
|
|
159
|
+
}
|
|
160
|
+
} catch (e) {
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
return schemas;
|
|
164
|
+
}
|
|
165
|
+
function coerceValueForKey(value, key, unwrapped) {
|
|
166
|
+
const schemas = [];
|
|
167
|
+
const props = unwrapped.properties;
|
|
168
|
+
if (props && Object.hasOwn(props, key)) {
|
|
169
|
+
schemas.push(props[key]);
|
|
170
|
+
}
|
|
171
|
+
const patternSchemas = getPatternSchemasForKey(
|
|
172
|
+
unwrapped.patternProperties,
|
|
173
|
+
key
|
|
174
|
+
);
|
|
175
|
+
if (patternSchemas.length > 0) {
|
|
176
|
+
schemas.push(...patternSchemas);
|
|
177
|
+
}
|
|
178
|
+
if (schemas.length > 0) {
|
|
179
|
+
let out = value;
|
|
180
|
+
for (const schema of schemas) {
|
|
181
|
+
if (typeof schema === "boolean") {
|
|
182
|
+
continue;
|
|
183
|
+
}
|
|
184
|
+
out = coerceBySchema(out, schema);
|
|
185
|
+
}
|
|
186
|
+
return out;
|
|
187
|
+
}
|
|
188
|
+
const additional = unwrapped.additionalProperties;
|
|
189
|
+
if (additional && typeof additional === "object" && !Array.isArray(additional)) {
|
|
190
|
+
return coerceBySchema(value, additional);
|
|
191
|
+
}
|
|
192
|
+
if (additional === true || additional === false) {
|
|
193
|
+
return value;
|
|
194
|
+
}
|
|
195
|
+
return coerceBySchema(value, void 0);
|
|
196
|
+
}
|
|
197
|
+
function coerceStringWithoutSchema(value) {
|
|
198
|
+
const s = value.trim();
|
|
199
|
+
const lower = s.toLowerCase();
|
|
200
|
+
if (lower === "true") {
|
|
201
|
+
return true;
|
|
202
|
+
}
|
|
203
|
+
if (lower === "false") {
|
|
204
|
+
return false;
|
|
205
|
+
}
|
|
206
|
+
if (NUMERIC_REGEX.test(s)) {
|
|
207
|
+
const num = Number(s);
|
|
208
|
+
if (Number.isFinite(num)) {
|
|
209
|
+
return num;
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
if (s.startsWith("{") && s.endsWith("}") || s.startsWith("[") && s.endsWith("]")) {
|
|
213
|
+
try {
|
|
214
|
+
const parsed = JSON.parse(s);
|
|
215
|
+
return coerceBySchema(parsed, void 0);
|
|
216
|
+
} catch (e) {
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
return value;
|
|
220
|
+
}
|
|
221
|
+
function coerceStringToObject(s, unwrapped) {
|
|
222
|
+
try {
|
|
223
|
+
let normalized = s.replace(/'/g, '"');
|
|
224
|
+
normalized = normalized.replace(EMPTY_OBJECT_REGEX, "{}");
|
|
225
|
+
const obj = JSON.parse(normalized);
|
|
226
|
+
if (obj && typeof obj === "object" && !Array.isArray(obj)) {
|
|
227
|
+
return coerceObjectToObject(obj, unwrapped);
|
|
228
|
+
}
|
|
229
|
+
} catch (e) {
|
|
230
|
+
}
|
|
231
|
+
return null;
|
|
232
|
+
}
|
|
233
|
+
function coerceStringToArray(s, unwrapped) {
|
|
234
|
+
const prefixItems = Array.isArray(unwrapped.prefixItems) ? unwrapped.prefixItems : void 0;
|
|
235
|
+
const itemsSchema = unwrapped.items;
|
|
236
|
+
try {
|
|
237
|
+
const normalized = s.replace(/'/g, '"');
|
|
238
|
+
const arr = JSON.parse(normalized);
|
|
239
|
+
if (Array.isArray(arr)) {
|
|
240
|
+
if (prefixItems && arr.length === prefixItems.length) {
|
|
241
|
+
return arr.map((v, i) => coerceBySchema(v, prefixItems[i]));
|
|
242
|
+
}
|
|
243
|
+
return arr.map((v) => coerceBySchema(v, itemsSchema));
|
|
244
|
+
}
|
|
245
|
+
} catch (e) {
|
|
246
|
+
const csv = s.includes("\n") ? s.split(NEWLINE_SPLIT_REGEX) : s.split(COMMA_SPLIT_REGEX);
|
|
247
|
+
const trimmed = csv.map((x) => x.trim()).filter((x) => x.length > 0);
|
|
248
|
+
if (prefixItems && trimmed.length === prefixItems.length) {
|
|
249
|
+
return trimmed.map((x, i) => coerceBySchema(x, prefixItems[i]));
|
|
250
|
+
}
|
|
251
|
+
return trimmed.map((x) => coerceBySchema(x, itemsSchema));
|
|
252
|
+
}
|
|
253
|
+
return null;
|
|
254
|
+
}
|
|
255
|
+
function coerceObjectToObject(value, unwrapped) {
|
|
256
|
+
const out = {};
|
|
257
|
+
for (const [k, v] of Object.entries(value)) {
|
|
258
|
+
out[k] = coerceValueForKey(v, k, unwrapped);
|
|
259
|
+
}
|
|
260
|
+
return out;
|
|
261
|
+
}
|
|
262
|
+
function coerceArrayToArray(value, prefixItems, itemsSchema) {
|
|
263
|
+
if (prefixItems && value.length === prefixItems.length) {
|
|
264
|
+
return value.map((v, i) => coerceBySchema(v, prefixItems[i]));
|
|
265
|
+
}
|
|
266
|
+
return value.map((v) => coerceBySchema(v, itemsSchema));
|
|
267
|
+
}
|
|
268
|
+
function coerceObjectToArray(maybe, prefixItems, itemsSchema) {
|
|
269
|
+
if (Object.hasOwn(maybe, "item")) {
|
|
270
|
+
const items = maybe.item;
|
|
271
|
+
const arr = Array.isArray(items) ? items : [items];
|
|
272
|
+
return coerceArrayToArray(arr, prefixItems, itemsSchema);
|
|
273
|
+
}
|
|
274
|
+
const keys = Object.keys(maybe);
|
|
275
|
+
if (keys.length > 0 && keys.every((k) => DIGIT_KEY_REGEX.test(k))) {
|
|
276
|
+
const arr = keys.sort((a, b) => Number(a) - Number(b)).map((k) => maybe[k]);
|
|
277
|
+
return coerceArrayToArray(arr, prefixItems, itemsSchema);
|
|
278
|
+
}
|
|
279
|
+
if (keys.length === 1) {
|
|
280
|
+
const singleKey = keys[0];
|
|
281
|
+
if (!(schemaIsUnconstrained(itemsSchema) || schemaHasProperty(itemsSchema, singleKey))) {
|
|
282
|
+
const singleValue = maybe[singleKey];
|
|
283
|
+
if (Array.isArray(singleValue)) {
|
|
284
|
+
return singleValue.map((v) => coerceBySchema(v, itemsSchema));
|
|
285
|
+
}
|
|
286
|
+
if (singleValue && typeof singleValue === "object") {
|
|
287
|
+
return [coerceBySchema(singleValue, itemsSchema)];
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
return null;
|
|
292
|
+
}
|
|
293
|
+
function coercePrimitiveToArray(value, prefixItems, itemsSchema) {
|
|
294
|
+
if (prefixItems && prefixItems.length > 0) {
|
|
295
|
+
return [coerceBySchema(value, prefixItems[0])];
|
|
296
|
+
}
|
|
297
|
+
return [coerceBySchema(value, itemsSchema)];
|
|
298
|
+
}
|
|
299
|
+
function coerceStringToPrimitive(s, schemaType) {
|
|
300
|
+
if (schemaType === "boolean") {
|
|
301
|
+
const lower = s.toLowerCase();
|
|
302
|
+
if (lower === "true") {
|
|
303
|
+
return true;
|
|
304
|
+
}
|
|
305
|
+
if (lower === "false") {
|
|
306
|
+
return false;
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
if ((schemaType === "number" || schemaType === "integer") && NUMERIC_REGEX.test(s)) {
|
|
310
|
+
const num = Number(s);
|
|
311
|
+
if (Number.isFinite(num)) {
|
|
312
|
+
return num;
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
return null;
|
|
316
|
+
}
|
|
317
|
+
function coerceStringValue(value, schemaType, u) {
|
|
318
|
+
const s = value.trim();
|
|
319
|
+
if (schemaType === "object") {
|
|
320
|
+
const result = coerceStringToObject(s, u);
|
|
321
|
+
if (result !== null) {
|
|
322
|
+
return result;
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
if (schemaType === "array") {
|
|
326
|
+
const result = coerceStringToArray(s, u);
|
|
327
|
+
if (result !== null) {
|
|
328
|
+
return result;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
const primitiveResult = coerceStringToPrimitive(s, schemaType);
|
|
332
|
+
if (primitiveResult !== null) {
|
|
333
|
+
return primitiveResult;
|
|
334
|
+
}
|
|
335
|
+
return value;
|
|
336
|
+
}
|
|
337
|
+
function coerceArrayValue(value, prefixItems, itemsSchema) {
|
|
338
|
+
if (Array.isArray(value)) {
|
|
339
|
+
return coerceArrayToArray(value, prefixItems, itemsSchema);
|
|
340
|
+
}
|
|
341
|
+
if (value && typeof value === "object") {
|
|
342
|
+
const result = coerceObjectToArray(
|
|
343
|
+
value,
|
|
344
|
+
prefixItems,
|
|
345
|
+
itemsSchema
|
|
346
|
+
);
|
|
347
|
+
if (result !== null) {
|
|
348
|
+
return result;
|
|
349
|
+
}
|
|
350
|
+
if (getSchemaType(itemsSchema) === "array") {
|
|
351
|
+
return [value];
|
|
352
|
+
}
|
|
353
|
+
return [coerceBySchema(value, itemsSchema)];
|
|
354
|
+
}
|
|
355
|
+
if (value == null || typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|
356
|
+
return coercePrimitiveToArray(value, prefixItems, itemsSchema);
|
|
357
|
+
}
|
|
358
|
+
return [value];
|
|
359
|
+
}
|
|
360
|
+
function coerceBySchema(value, schema) {
|
|
361
|
+
const unwrapped = unwrapJsonSchema(schema);
|
|
362
|
+
if (!unwrapped || typeof unwrapped !== "object") {
|
|
363
|
+
if (typeof value === "string") {
|
|
364
|
+
return coerceStringWithoutSchema(value);
|
|
365
|
+
}
|
|
366
|
+
return value;
|
|
367
|
+
}
|
|
368
|
+
const schemaType = getSchemaType(unwrapped);
|
|
369
|
+
const u = unwrapped;
|
|
370
|
+
if (typeof value === "string") {
|
|
371
|
+
return coerceStringValue(value, schemaType, u);
|
|
372
|
+
}
|
|
373
|
+
if (schemaType === "object" && value && typeof value === "object" && !Array.isArray(value)) {
|
|
374
|
+
return coerceObjectToObject(value, u);
|
|
375
|
+
}
|
|
376
|
+
if (schemaType === "array") {
|
|
377
|
+
const prefixItems = Array.isArray(u.prefixItems) ? u.prefixItems : void 0;
|
|
378
|
+
const itemsSchema = u.items;
|
|
379
|
+
return coerceArrayValue(value, prefixItems, itemsSchema);
|
|
380
|
+
}
|
|
381
|
+
return value;
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
export {
|
|
385
|
+
unwrapJsonSchema,
|
|
386
|
+
getSchemaType,
|
|
387
|
+
coerceBySchema
|
|
388
|
+
};
|
|
389
|
+
//# sourceMappingURL=chunk-OUGMLYAW.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/schema-coerce/index.ts"],"sourcesContent":["// Regex constants for performance\nconst NUMERIC_REGEX = /^-?\\d+(?:\\.\\d+)?(?:[eE][+-]?\\d+)?$/;\nconst EMPTY_OBJECT_REGEX = /^\\{\\s*\\}$/s;\nconst NEWLINE_SPLIT_REGEX = /\\n+/;\nconst COMMA_SPLIT_REGEX = /,\\s*/;\nconst DIGIT_KEY_REGEX = /^\\d+$/;\n\nexport function unwrapJsonSchema(schema: unknown): unknown {\n if (!schema || typeof schema !== \"object\") {\n return schema;\n }\n const s = schema as Record<string, unknown>;\n if (s.jsonSchema && typeof s.jsonSchema === \"object\") {\n return unwrapJsonSchema(s.jsonSchema);\n }\n return schema;\n}\n\nexport function getSchemaType(schema: unknown): string | undefined {\n const unwrapped = unwrapJsonSchema(schema);\n if (!unwrapped || typeof unwrapped !== \"object\") {\n return;\n }\n const t: unknown = (unwrapped as Record<string, unknown>).type;\n if (typeof t === \"string\") {\n return t;\n }\n if (Array.isArray(t)) {\n const preferred = [\n \"object\",\n \"array\",\n \"boolean\",\n \"number\",\n \"integer\",\n \"string\",\n ];\n for (const p of preferred) {\n if (t.includes(p)) {\n return p;\n }\n }\n }\n const s = unwrapped as Record<string, unknown>;\n if (s && typeof s === \"object\" && (s.properties || s.additionalProperties)) {\n return \"object\";\n }\n if (\n s &&\n typeof s === \"object\" &&\n (s.items || (s as Record<string, unknown>).prefixItems)\n ) {\n return \"array\";\n }\n return;\n}\n\n/**\n * Checks if a property is allowed through schema combinators (anyOf, oneOf, allOf).\n *\n * @param s - The schema object to check\n * @param key - The property key to look for\n * @param depth - Current recursion depth\n * @returns `true` if at least one combinator exists AND allows the property;\n * `false` if no combinators exist OR none allow the property.\n * When no combinators are present, returns `false` so the caller can\n * fall back to other property-checking methods.\n *\n * **oneOf semantics**: JSON Schema's `oneOf` requires exactly one schema to match,\n * but for coercion heuristics we treat it like `anyOf` (at least one allows).\n * This is intentional because:\n * 1. We're determining if a property CAN exist, not validating exact matches\n * 2. Coercion should be permissive - if any branch allows the property, we allow it\n * 3. Strict oneOf validation would require runtime value inspection, not just schema analysis\n */\nfunction schemaAllowsPropertyViaCombinators(\n s: Record<string, unknown>,\n key: string,\n depth: number\n): boolean {\n const anyOfValues = s.anyOf;\n const oneOfValues = s.oneOf;\n const allOfValues = s.allOf;\n\n let hasCombinator = false;\n let anyOfAllows = true;\n let oneOfAllows = true;\n let allOfAllows = true;\n\n if (Array.isArray(anyOfValues)) {\n hasCombinator = true;\n anyOfAllows = anyOfValues.some((sub) =>\n schemaHasProperty(sub, key, depth + 1)\n );\n }\n\n if (Array.isArray(oneOfValues)) {\n hasCombinator = true;\n oneOfAllows = oneOfValues.some((sub) =>\n schemaHasProperty(sub, key, depth + 1)\n );\n }\n\n if (Array.isArray(allOfValues)) {\n hasCombinator = true;\n allOfAllows = allOfValues.every((sub) =>\n schemaHasProperty(sub, key, depth + 1)\n );\n }\n\n if (!hasCombinator) {\n return false;\n }\n\n return anyOfAllows && oneOfAllows && allOfAllows;\n}\n\nfunction schemaHasPropertyDirectly(\n s: Record<string, unknown>,\n key: string\n): boolean {\n const props = s.properties;\n if (\n props &&\n typeof props === \"object\" &&\n !Array.isArray(props) &&\n Object.hasOwn(props, key) &&\n (props as Record<string, unknown>)[key] !== false\n ) {\n return true;\n }\n const required = s.required;\n if (Array.isArray(required) && required.includes(key)) {\n return true;\n }\n const patternSchemas = getPatternSchemasForKey(s.patternProperties, key);\n return patternSchemas.some((schema) => schema !== false);\n}\n\n/**\n * Checks if a schema allows additional properties beyond those explicitly defined.\n *\n * JSON Schema behavior for additionalProperties:\n * - `additionalProperties: true` or `additionalProperties: { schema }`: Explicitly allows additional properties\n * - `additionalProperties: false`: Explicitly disallows additional properties\n * - `additionalProperties` not specified: Defaults to allowing additional properties (JSON Schema spec)\n *\n * When `additionalProperties` is not explicitly set, this function returns `true` if the schema\n * appears to be an object schema (has `type: \"object\"`, `properties`, `patternProperties`, or `required`).\n * This follows the JSON Schema specification where omitting `additionalProperties` is equivalent to `true`.\n *\n * **Important**: This means schemas like `{ type: \"object\", properties: { foo: ... } }` without\n * `additionalProperties: false` will be treated as allowing any additional property, which affects\n * single-key object unwrapping behavior in array coercion.\n *\n * @param s - The schema object to check\n * @returns `true` if the schema allows additional properties, `false` otherwise\n */\nfunction schemaHasPropertyViaAdditional(s: Record<string, unknown>): boolean {\n const additional = s.additionalProperties;\n if (\n additional === true ||\n (additional && typeof additional === \"object\" && !Array.isArray(additional))\n ) {\n return true;\n }\n if (Object.hasOwn(s, \"additionalProperties\")) {\n return false;\n }\n const type = s.type;\n const isObjectType =\n type === \"object\" || (Array.isArray(type) && type.includes(\"object\"));\n const hasObjectKeywords =\n (s.properties &&\n typeof s.properties === \"object\" &&\n !Array.isArray(s.properties)) ||\n (s.patternProperties &&\n typeof s.patternProperties === \"object\" &&\n !Array.isArray(s.patternProperties)) ||\n (Array.isArray(s.required) && s.required.length > 0);\n return !!(isObjectType || hasObjectKeywords);\n}\n\nfunction schemaDisallowsPropertyDirectly(\n s: Record<string, unknown>,\n key: string\n): boolean {\n const props = s.properties;\n if (\n props &&\n typeof props === \"object\" &&\n !Array.isArray(props) &&\n Object.hasOwn(props, key) &&\n (props as Record<string, unknown>)[key] === false\n ) {\n return true;\n }\n const patternSchemas = getPatternSchemasForKey(s.patternProperties, key);\n return patternSchemas.some((schema) => schema === false);\n}\n\n/**\n * Checks if a schema allows a specific property key.\n *\n * Recursively checks through schema combinators (allOf, anyOf, oneOf) to determine\n * if the given key is allowed by the schema.\n *\n * @param schema - The JSON Schema to check\n * @param key - The property key to check for\n * @param depth - Current recursion depth (default: 0)\n * @returns `true` if the schema allows the property, `false` otherwise\n *\n * @remarks\n * The depth limit of 5 prevents infinite recursion in deeply nested or circular\n * schema references. This limit is sufficient for most real-world schemas while\n * protecting against pathological cases. When the limit is exceeded, the function\n * conservatively returns `true` to prevent unwrapping - it's safer to keep a\n * wrapper key than to incorrectly remove it and lose data.\n */\nfunction schemaHasProperty(schema: unknown, key: string, depth = 0): boolean {\n if (depth > 5) {\n return true;\n }\n const unwrapped = unwrapJsonSchema(schema);\n // Unconstrained schemas (true, null, {}) allow any property\n if (schemaIsUnconstrained(unwrapped)) {\n return true;\n }\n if (!unwrapped || typeof unwrapped !== \"object\") {\n return false;\n }\n const s = unwrapped as Record<string, unknown>;\n\n if (schemaDisallowsPropertyDirectly(s, key)) {\n return false;\n }\n if (schemaHasPropertyDirectly(s, key)) {\n return true;\n }\n if (schemaHasPropertyViaAdditional(s)) {\n return true;\n }\n return schemaAllowsPropertyViaCombinators(s, key, depth);\n}\n\nfunction schemaIsUnconstrained(schema: unknown): boolean {\n const unwrapped = unwrapJsonSchema(schema);\n if (unwrapped == null || unwrapped === true) {\n return true;\n }\n if (typeof unwrapped !== \"object\" || Array.isArray(unwrapped)) {\n return false;\n }\n return Object.keys(unwrapped).length === 0;\n}\n\n/**\n * Gets all schemas from patternProperties that match the given key.\n *\n * @param patternProperties - The patternProperties object from a JSON Schema\n * @param key - The property key to match against patterns\n * @returns Array of schemas whose patterns match the key\n *\n * @remarks\n * **Security consideration**: This function executes regex patterns from the schema.\n * In typical usage (AI SDK tool parsing), schemas come from trusted application code.\n * However, if schemas can originate from untrusted sources, be aware of potential\n * ReDoS (Regular Expression Denial of Service) with malicious patterns like `(a+)+$`.\n * Consider adding regex timeout or safe-regex validation if processing untrusted schemas.\n */\nfunction getPatternSchemasForKey(\n patternProperties: unknown,\n key: string\n): unknown[] {\n if (\n !patternProperties ||\n typeof patternProperties !== \"object\" ||\n Array.isArray(patternProperties)\n ) {\n return [];\n }\n const schemas: unknown[] = [];\n for (const [pattern, schema] of Object.entries(\n patternProperties as Record<string, unknown>\n )) {\n try {\n const regex = new RegExp(pattern);\n if (regex.test(key)) {\n schemas.push(schema);\n }\n } catch {\n // Ignore invalid regex patterns.\n }\n }\n return schemas;\n}\n\nfunction coerceValueForKey(\n value: unknown,\n key: string,\n unwrapped: Record<string, unknown>\n): unknown {\n const schemas: unknown[] = [];\n const props = unwrapped.properties as Record<string, unknown> | undefined;\n if (props && Object.hasOwn(props, key)) {\n schemas.push(props[key]);\n }\n const patternSchemas = getPatternSchemasForKey(\n unwrapped.patternProperties,\n key\n );\n if (patternSchemas.length > 0) {\n schemas.push(...patternSchemas);\n }\n\n if (schemas.length > 0) {\n let out = value;\n for (const schema of schemas) {\n if (typeof schema === \"boolean\") {\n continue;\n }\n out = coerceBySchema(out, schema);\n }\n return out;\n }\n\n const additional = unwrapped.additionalProperties;\n if (\n additional &&\n typeof additional === \"object\" &&\n !Array.isArray(additional)\n ) {\n return coerceBySchema(value, additional);\n }\n if (additional === true || additional === false) {\n return value;\n }\n\n return coerceBySchema(value, undefined);\n}\n\n/**\n * Coerce string value without schema information\n */\nfunction coerceStringWithoutSchema(value: string): unknown {\n const s = value.trim();\n const lower = s.toLowerCase();\n if (lower === \"true\") {\n return true;\n }\n if (lower === \"false\") {\n return false;\n }\n if (NUMERIC_REGEX.test(s)) {\n const num = Number(s);\n if (Number.isFinite(num)) {\n return num;\n }\n }\n\n // Fallback: try parsing JSON-like strings when no schema info\n if (\n (s.startsWith(\"{\") && s.endsWith(\"}\")) ||\n (s.startsWith(\"[\") && s.endsWith(\"]\"))\n ) {\n try {\n const parsed = JSON.parse(s);\n return coerceBySchema(parsed, undefined);\n } catch {\n // If parsing fails, return original value\n }\n }\n return value;\n}\n\n/**\n * Coerce string to object using schema\n */\nfunction coerceStringToObject(\n s: string,\n unwrapped: Record<string, unknown>\n): unknown {\n try {\n let normalized = s.replace(/'/g, '\"');\n normalized = normalized.replace(EMPTY_OBJECT_REGEX, \"{}\");\n\n const obj = JSON.parse(normalized);\n if (obj && typeof obj === \"object\" && !Array.isArray(obj)) {\n return coerceObjectToObject(obj as Record<string, unknown>, unwrapped);\n }\n } catch {\n // fallthrough\n }\n return null;\n}\n\n/**\n * Coerce string to array using schema\n */\nfunction coerceStringToArray(\n s: string,\n unwrapped: Record<string, unknown>\n): unknown {\n const prefixItems = Array.isArray(unwrapped.prefixItems)\n ? (unwrapped.prefixItems as unknown[])\n : undefined;\n const itemsSchema = unwrapped.items as unknown;\n\n try {\n const normalized = s.replace(/'/g, '\"');\n const arr = JSON.parse(normalized);\n if (Array.isArray(arr)) {\n if (prefixItems && arr.length === prefixItems.length) {\n return arr.map((v, i) => coerceBySchema(v, prefixItems[i]));\n }\n return arr.map((v) => coerceBySchema(v, itemsSchema));\n }\n } catch {\n const csv = s.includes(\"\\n\")\n ? s.split(NEWLINE_SPLIT_REGEX)\n : s.split(COMMA_SPLIT_REGEX);\n const trimmed = csv.map((x) => x.trim()).filter((x) => x.length > 0);\n if (prefixItems && trimmed.length === prefixItems.length) {\n return trimmed.map((x, i) => coerceBySchema(x, prefixItems[i]));\n }\n return trimmed.map((x) => coerceBySchema(x, itemsSchema));\n }\n return null;\n}\n\n/**\n * Coerce object to object using schema\n */\nfunction coerceObjectToObject(\n value: Record<string, unknown>,\n unwrapped: Record<string, unknown>\n): Record<string, unknown> {\n const out: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(value)) {\n out[k] = coerceValueForKey(v, k, unwrapped);\n }\n return out;\n}\n\n/**\n * Coerce array to array using schema\n */\nfunction coerceArrayToArray(\n value: unknown[],\n prefixItems: unknown[] | undefined,\n itemsSchema: unknown\n): unknown[] {\n if (prefixItems && value.length === prefixItems.length) {\n return value.map((v, i) => coerceBySchema(v, prefixItems[i]));\n }\n return value.map((v) => coerceBySchema(v, itemsSchema));\n}\n\n/**\n * Coerce object to array using schema\n */\nfunction coerceObjectToArray(\n maybe: Record<string, unknown>,\n prefixItems: unknown[] | undefined,\n itemsSchema: unknown\n): unknown {\n if (Object.hasOwn(maybe, \"item\")) {\n const items = maybe.item as unknown;\n const arr = Array.isArray(items) ? items : [items];\n return coerceArrayToArray(arr, prefixItems, itemsSchema);\n }\n\n const keys = Object.keys(maybe);\n\n // Check for numeric keys (traditional tuple handling)\n if (keys.length > 0 && keys.every((k) => DIGIT_KEY_REGEX.test(k))) {\n const arr = keys.sort((a, b) => Number(a) - Number(b)).map((k) => maybe[k]);\n return coerceArrayToArray(arr, prefixItems, itemsSchema);\n }\n\n // Check for single field that contains an array or object (common XML pattern)\n // This handles both: { user: [{ name: \"A\" }, { name: \"B\" }] } and { user: { name: \"A\" } }\n if (keys.length === 1) {\n const singleKey = keys[0];\n if (\n !(\n schemaIsUnconstrained(itemsSchema) ||\n schemaHasProperty(itemsSchema, singleKey)\n )\n ) {\n const singleValue = maybe[singleKey];\n if (Array.isArray(singleValue)) {\n return singleValue.map((v) => coerceBySchema(v, itemsSchema));\n }\n // Also extract when single key's value is an object and wrap in array (single/multiple element consistency)\n if (singleValue && typeof singleValue === \"object\") {\n return [coerceBySchema(singleValue, itemsSchema)];\n }\n }\n }\n\n return null;\n}\n\n/**\n * Coerce primitive to array using schema\n */\nfunction coercePrimitiveToArray(\n value: unknown,\n prefixItems: unknown[] | undefined,\n itemsSchema: unknown\n): unknown[] {\n if (prefixItems && prefixItems.length > 0) {\n return [coerceBySchema(value, prefixItems[0])];\n }\n return [coerceBySchema(value, itemsSchema)];\n}\n\n/**\n * Coerce string to primitive type using schema\n */\nfunction coerceStringToPrimitive(\n s: string,\n schemaType: string | undefined\n): unknown {\n if (schemaType === \"boolean\") {\n const lower = s.toLowerCase();\n if (lower === \"true\") {\n return true;\n }\n if (lower === \"false\") {\n return false;\n }\n }\n if (\n (schemaType === \"number\" || schemaType === \"integer\") &&\n NUMERIC_REGEX.test(s)\n ) {\n const num = Number(s);\n if (Number.isFinite(num)) {\n return num;\n }\n }\n return null;\n}\n\nfunction coerceStringValue(\n value: string,\n schemaType: string | undefined,\n u: Record<string, unknown>\n): unknown {\n const s = value.trim();\n\n if (schemaType === \"object\") {\n const result = coerceStringToObject(s, u);\n if (result !== null) {\n return result;\n }\n }\n\n if (schemaType === \"array\") {\n const result = coerceStringToArray(s, u);\n if (result !== null) {\n return result;\n }\n }\n\n const primitiveResult = coerceStringToPrimitive(s, schemaType);\n if (primitiveResult !== null) {\n return primitiveResult;\n }\n\n return value;\n}\n\nfunction coerceArrayValue(\n value: unknown,\n prefixItems: unknown[] | undefined,\n itemsSchema: unknown\n): unknown {\n if (Array.isArray(value)) {\n return coerceArrayToArray(value, prefixItems, itemsSchema);\n }\n\n if (value && typeof value === \"object\") {\n const result = coerceObjectToArray(\n value as Record<string, unknown>,\n prefixItems,\n itemsSchema\n );\n if (result !== null) {\n return result;\n }\n // To prevent infinite recursion, check if the itemsSchema is also for an array.\n // If so, just wrap the object. Otherwise, coerce it against the itemsSchema.\n if (getSchemaType(itemsSchema) === \"array\") {\n return [value];\n }\n return [coerceBySchema(value, itemsSchema)];\n }\n\n if (\n value == null ||\n typeof value === \"string\" ||\n typeof value === \"number\" ||\n typeof value === \"boolean\"\n ) {\n return coercePrimitiveToArray(value, prefixItems, itemsSchema);\n }\n\n return [value];\n}\n\nexport function coerceBySchema(value: unknown, schema?: unknown): unknown {\n const unwrapped = unwrapJsonSchema(schema);\n if (!unwrapped || typeof unwrapped !== \"object\") {\n if (typeof value === \"string\") {\n return coerceStringWithoutSchema(value);\n }\n return value;\n }\n\n const schemaType = getSchemaType(unwrapped);\n const u = unwrapped as Record<string, unknown>;\n\n // Handle string values\n if (typeof value === \"string\") {\n return coerceStringValue(value, schemaType, u);\n }\n\n // Handle object to object coercion\n if (\n schemaType === \"object\" &&\n value &&\n typeof value === \"object\" &&\n !Array.isArray(value)\n ) {\n return coerceObjectToObject(value as Record<string, unknown>, u);\n }\n\n // Handle array coercion\n if (schemaType === \"array\") {\n const prefixItems = Array.isArray(u.prefixItems)\n ? (u.prefixItems as unknown[])\n : undefined;\n const itemsSchema = u.items as unknown;\n\n return coerceArrayValue(value, prefixItems, itemsSchema);\n }\n\n return value;\n}\n"],"mappings":";AACA,IAAM,gBAAgB;AACtB,IAAM,qBAAqB;AAC3B,IAAM,sBAAsB;AAC5B,IAAM,oBAAoB;AAC1B,IAAM,kBAAkB;AAEjB,SAAS,iBAAiB,QAA0B;AACzD,MAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,WAAO;AAAA,EACT;AACA,QAAM,IAAI;AACV,MAAI,EAAE,cAAc,OAAO,EAAE,eAAe,UAAU;AACpD,WAAO,iBAAiB,EAAE,UAAU;AAAA,EACtC;AACA,SAAO;AACT;AAEO,SAAS,cAAc,QAAqC;AACjE,QAAM,YAAY,iBAAiB,MAAM;AACzC,MAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC/C;AAAA,EACF;AACA,QAAM,IAAc,UAAsC;AAC1D,MAAI,OAAO,MAAM,UAAU;AACzB,WAAO;AAAA,EACT;AACA,MAAI,MAAM,QAAQ,CAAC,GAAG;AACpB,UAAM,YAAY;AAAA,MAChB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,eAAW,KAAK,WAAW;AACzB,UAAI,EAAE,SAAS,CAAC,GAAG;AACjB,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACA,QAAM,IAAI;AACV,MAAI,KAAK,OAAO,MAAM,aAAa,EAAE,cAAc,EAAE,uBAAuB;AAC1E,WAAO;AAAA,EACT;AACA,MACE,KACA,OAAO,MAAM,aACZ,EAAE,SAAU,EAA8B,cAC3C;AACA,WAAO;AAAA,EACT;AACA;AACF;AAoBA,SAAS,mCACP,GACA,KACA,OACS;AACT,QAAM,cAAc,EAAE;AACtB,QAAM,cAAc,EAAE;AACtB,QAAM,cAAc,EAAE;AAEtB,MAAI,gBAAgB;AACpB,MAAI,cAAc;AAClB,MAAI,cAAc;AAClB,MAAI,cAAc;AAElB,MAAI,MAAM,QAAQ,WAAW,GAAG;AAC9B,oBAAgB;AAChB,kBAAc,YAAY;AAAA,MAAK,CAAC,QAC9B,kBAAkB,KAAK,KAAK,QAAQ,CAAC;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,MAAM,QAAQ,WAAW,GAAG;AAC9B,oBAAgB;AAChB,kBAAc,YAAY;AAAA,MAAK,CAAC,QAC9B,kBAAkB,KAAK,KAAK,QAAQ,CAAC;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,MAAM,QAAQ,WAAW,GAAG;AAC9B,oBAAgB;AAChB,kBAAc,YAAY;AAAA,MAAM,CAAC,QAC/B,kBAAkB,KAAK,KAAK,QAAQ,CAAC;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,CAAC,eAAe;AAClB,WAAO;AAAA,EACT;AAEA,SAAO,eAAe,eAAe;AACvC;AAEA,SAAS,0BACP,GACA,KACS;AACT,QAAM,QAAQ,EAAE;AAChB,MACE,SACA,OAAO,UAAU,YACjB,CAAC,MAAM,QAAQ,KAAK,KACpB,OAAO,OAAO,OAAO,GAAG,KACvB,MAAkC,GAAG,MAAM,OAC5C;AACA,WAAO;AAAA,EACT;AACA,QAAM,WAAW,EAAE;AACnB,MAAI,MAAM,QAAQ,QAAQ,KAAK,SAAS,SAAS,GAAG,GAAG;AACrD,WAAO;AAAA,EACT;AACA,QAAM,iBAAiB,wBAAwB,EAAE,mBAAmB,GAAG;AACvE,SAAO,eAAe,KAAK,CAAC,WAAW,WAAW,KAAK;AACzD;AAqBA,SAAS,+BAA+B,GAAqC;AAC3E,QAAM,aAAa,EAAE;AACrB,MACE,eAAe,QACd,cAAc,OAAO,eAAe,YAAY,CAAC,MAAM,QAAQ,UAAU,GAC1E;AACA,WAAO;AAAA,EACT;AACA,MAAI,OAAO,OAAO,GAAG,sBAAsB,GAAG;AAC5C,WAAO;AAAA,EACT;AACA,QAAM,OAAO,EAAE;AACf,QAAM,eACJ,SAAS,YAAa,MAAM,QAAQ,IAAI,KAAK,KAAK,SAAS,QAAQ;AACrE,QAAM,oBACH,EAAE,cACD,OAAO,EAAE,eAAe,YACxB,CAAC,MAAM,QAAQ,EAAE,UAAU,KAC5B,EAAE,qBACD,OAAO,EAAE,sBAAsB,YAC/B,CAAC,MAAM,QAAQ,EAAE,iBAAiB,KACnC,MAAM,QAAQ,EAAE,QAAQ,KAAK,EAAE,SAAS,SAAS;AACpD,SAAO,CAAC,EAAE,gBAAgB;AAC5B;AAEA,SAAS,gCACP,GACA,KACS;AACT,QAAM,QAAQ,EAAE;AAChB,MACE,SACA,OAAO,UAAU,YACjB,CAAC,MAAM,QAAQ,KAAK,KACpB,OAAO,OAAO,OAAO,GAAG,KACvB,MAAkC,GAAG,MAAM,OAC5C;AACA,WAAO;AAAA,EACT;AACA,QAAM,iBAAiB,wBAAwB,EAAE,mBAAmB,GAAG;AACvE,SAAO,eAAe,KAAK,CAAC,WAAW,WAAW,KAAK;AACzD;AAoBA,SAAS,kBAAkB,QAAiB,KAAa,QAAQ,GAAY;AAC3E,MAAI,QAAQ,GAAG;AACb,WAAO;AAAA,EACT;AACA,QAAM,YAAY,iBAAiB,MAAM;AAEzC,MAAI,sBAAsB,SAAS,GAAG;AACpC,WAAO;AAAA,EACT;AACA,MAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC/C,WAAO;AAAA,EACT;AACA,QAAM,IAAI;AAEV,MAAI,gCAAgC,GAAG,GAAG,GAAG;AAC3C,WAAO;AAAA,EACT;AACA,MAAI,0BAA0B,GAAG,GAAG,GAAG;AACrC,WAAO;AAAA,EACT;AACA,MAAI,+BAA+B,CAAC,GAAG;AACrC,WAAO;AAAA,EACT;AACA,SAAO,mCAAmC,GAAG,KAAK,KAAK;AACzD;AAEA,SAAS,sBAAsB,QAA0B;AACvD,QAAM,YAAY,iBAAiB,MAAM;AACzC,MAAI,aAAa,QAAQ,cAAc,MAAM;AAC3C,WAAO;AAAA,EACT;AACA,MAAI,OAAO,cAAc,YAAY,MAAM,QAAQ,SAAS,GAAG;AAC7D,WAAO;AAAA,EACT;AACA,SAAO,OAAO,KAAK,SAAS,EAAE,WAAW;AAC3C;AAgBA,SAAS,wBACP,mBACA,KACW;AACX,MACE,CAAC,qBACD,OAAO,sBAAsB,YAC7B,MAAM,QAAQ,iBAAiB,GAC/B;AACA,WAAO,CAAC;AAAA,EACV;AACA,QAAM,UAAqB,CAAC;AAC5B,aAAW,CAAC,SAAS,MAAM,KAAK,OAAO;AAAA,IACrC;AAAA,EACF,GAAG;AACD,QAAI;AACF,YAAM,QAAQ,IAAI,OAAO,OAAO;AAChC,UAAI,MAAM,KAAK,GAAG,GAAG;AACnB,gBAAQ,KAAK,MAAM;AAAA,MACrB;AAAA,IACF,SAAQ;AAAA,IAER;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,kBACP,OACA,KACA,WACS;AACT,QAAM,UAAqB,CAAC;AAC5B,QAAM,QAAQ,UAAU;AACxB,MAAI,SAAS,OAAO,OAAO,OAAO,GAAG,GAAG;AACtC,YAAQ,KAAK,MAAM,GAAG,CAAC;AAAA,EACzB;AACA,QAAM,iBAAiB;AAAA,IACrB,UAAU;AAAA,IACV;AAAA,EACF;AACA,MAAI,eAAe,SAAS,GAAG;AAC7B,YAAQ,KAAK,GAAG,cAAc;AAAA,EAChC;AAEA,MAAI,QAAQ,SAAS,GAAG;AACtB,QAAI,MAAM;AACV,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,WAAW,WAAW;AAC/B;AAAA,MACF;AACA,YAAM,eAAe,KAAK,MAAM;AAAA,IAClC;AACA,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,UAAU;AAC7B,MACE,cACA,OAAO,eAAe,YACtB,CAAC,MAAM,QAAQ,UAAU,GACzB;AACA,WAAO,eAAe,OAAO,UAAU;AAAA,EACzC;AACA,MAAI,eAAe,QAAQ,eAAe,OAAO;AAC/C,WAAO;AAAA,EACT;AAEA,SAAO,eAAe,OAAO,MAAS;AACxC;AAKA,SAAS,0BAA0B,OAAwB;AACzD,QAAM,IAAI,MAAM,KAAK;AACrB,QAAM,QAAQ,EAAE,YAAY;AAC5B,MAAI,UAAU,QAAQ;AACpB,WAAO;AAAA,EACT;AACA,MAAI,UAAU,SAAS;AACrB,WAAO;AAAA,EACT;AACA,MAAI,cAAc,KAAK,CAAC,GAAG;AACzB,UAAM,MAAM,OAAO,CAAC;AACpB,QAAI,OAAO,SAAS,GAAG,GAAG;AACxB,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MACG,EAAE,WAAW,GAAG,KAAK,EAAE,SAAS,GAAG,KACnC,EAAE,WAAW,GAAG,KAAK,EAAE,SAAS,GAAG,GACpC;AACA,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,CAAC;AAC3B,aAAO,eAAe,QAAQ,MAAS;AAAA,IACzC,SAAQ;AAAA,IAER;AAAA,EACF;AACA,SAAO;AACT;AAKA,SAAS,qBACP,GACA,WACS;AACT,MAAI;AACF,QAAI,aAAa,EAAE,QAAQ,MAAM,GAAG;AACpC,iBAAa,WAAW,QAAQ,oBAAoB,IAAI;AAExD,UAAM,MAAM,KAAK,MAAM,UAAU;AACjC,QAAI,OAAO,OAAO,QAAQ,YAAY,CAAC,MAAM,QAAQ,GAAG,GAAG;AACzD,aAAO,qBAAqB,KAAgC,SAAS;AAAA,IACvE;AAAA,EACF,SAAQ;AAAA,EAER;AACA,SAAO;AACT;AAKA,SAAS,oBACP,GACA,WACS;AACT,QAAM,cAAc,MAAM,QAAQ,UAAU,WAAW,IAClD,UAAU,cACX;AACJ,QAAM,cAAc,UAAU;AAE9B,MAAI;AACF,UAAM,aAAa,EAAE,QAAQ,MAAM,GAAG;AACtC,UAAM,MAAM,KAAK,MAAM,UAAU;AACjC,QAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAI,eAAe,IAAI,WAAW,YAAY,QAAQ;AACpD,eAAO,IAAI,IAAI,CAAC,GAAG,MAAM,eAAe,GAAG,YAAY,CAAC,CAAC,CAAC;AAAA,MAC5D;AACA,aAAO,IAAI,IAAI,CAAC,MAAM,eAAe,GAAG,WAAW,CAAC;AAAA,IACtD;AAAA,EACF,SAAQ;AACN,UAAM,MAAM,EAAE,SAAS,IAAI,IACvB,EAAE,MAAM,mBAAmB,IAC3B,EAAE,MAAM,iBAAiB;AAC7B,UAAM,UAAU,IAAI,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AACnE,QAAI,eAAe,QAAQ,WAAW,YAAY,QAAQ;AACxD,aAAO,QAAQ,IAAI,CAAC,GAAG,MAAM,eAAe,GAAG,YAAY,CAAC,CAAC,CAAC;AAAA,IAChE;AACA,WAAO,QAAQ,IAAI,CAAC,MAAM,eAAe,GAAG,WAAW,CAAC;AAAA,EAC1D;AACA,SAAO;AACT;AAKA,SAAS,qBACP,OACA,WACyB;AACzB,QAAM,MAA+B,CAAC;AACtC,aAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,QAAI,CAAC,IAAI,kBAAkB,GAAG,GAAG,SAAS;AAAA,EAC5C;AACA,SAAO;AACT;AAKA,SAAS,mBACP,OACA,aACA,aACW;AACX,MAAI,eAAe,MAAM,WAAW,YAAY,QAAQ;AACtD,WAAO,MAAM,IAAI,CAAC,GAAG,MAAM,eAAe,GAAG,YAAY,CAAC,CAAC,CAAC;AAAA,EAC9D;AACA,SAAO,MAAM,IAAI,CAAC,MAAM,eAAe,GAAG,WAAW,CAAC;AACxD;AAKA,SAAS,oBACP,OACA,aACA,aACS;AACT,MAAI,OAAO,OAAO,OAAO,MAAM,GAAG;AAChC,UAAM,QAAQ,MAAM;AACpB,UAAM,MAAM,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC,KAAK;AACjD,WAAO,mBAAmB,KAAK,aAAa,WAAW;AAAA,EACzD;AAEA,QAAM,OAAO,OAAO,KAAK,KAAK;AAG9B,MAAI,KAAK,SAAS,KAAK,KAAK,MAAM,CAAC,MAAM,gBAAgB,KAAK,CAAC,CAAC,GAAG;AACjE,UAAM,MAAM,KAAK,KAAK,CAAC,GAAG,MAAM,OAAO,CAAC,IAAI,OAAO,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,MAAM,CAAC,CAAC;AAC1E,WAAO,mBAAmB,KAAK,aAAa,WAAW;AAAA,EACzD;AAIA,MAAI,KAAK,WAAW,GAAG;AACrB,UAAM,YAAY,KAAK,CAAC;AACxB,QACE,EACE,sBAAsB,WAAW,KACjC,kBAAkB,aAAa,SAAS,IAE1C;AACA,YAAM,cAAc,MAAM,SAAS;AACnC,UAAI,MAAM,QAAQ,WAAW,GAAG;AAC9B,eAAO,YAAY,IAAI,CAAC,MAAM,eAAe,GAAG,WAAW,CAAC;AAAA,MAC9D;AAEA,UAAI,eAAe,OAAO,gBAAgB,UAAU;AAClD,eAAO,CAAC,eAAe,aAAa,WAAW,CAAC;AAAA,MAClD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,uBACP,OACA,aACA,aACW;AACX,MAAI,eAAe,YAAY,SAAS,GAAG;AACzC,WAAO,CAAC,eAAe,OAAO,YAAY,CAAC,CAAC,CAAC;AAAA,EAC/C;AACA,SAAO,CAAC,eAAe,OAAO,WAAW,CAAC;AAC5C;AAKA,SAAS,wBACP,GACA,YACS;AACT,MAAI,eAAe,WAAW;AAC5B,UAAM,QAAQ,EAAE,YAAY;AAC5B,QAAI,UAAU,QAAQ;AACpB,aAAO;AAAA,IACT;AACA,QAAI,UAAU,SAAS;AACrB,aAAO;AAAA,IACT;AAAA,EACF;AACA,OACG,eAAe,YAAY,eAAe,cAC3C,cAAc,KAAK,CAAC,GACpB;AACA,UAAM,MAAM,OAAO,CAAC;AACpB,QAAI,OAAO,SAAS,GAAG,GAAG;AACxB,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,kBACP,OACA,YACA,GACS;AACT,QAAM,IAAI,MAAM,KAAK;AAErB,MAAI,eAAe,UAAU;AAC3B,UAAM,SAAS,qBAAqB,GAAG,CAAC;AACxC,QAAI,WAAW,MAAM;AACnB,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI,eAAe,SAAS;AAC1B,UAAM,SAAS,oBAAoB,GAAG,CAAC;AACvC,QAAI,WAAW,MAAM;AACnB,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,kBAAkB,wBAAwB,GAAG,UAAU;AAC7D,MAAI,oBAAoB,MAAM;AAC5B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEA,SAAS,iBACP,OACA,aACA,aACS;AACT,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,mBAAmB,OAAO,aAAa,WAAW;AAAA,EAC3D;AAEA,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,UAAM,SAAS;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,QAAI,WAAW,MAAM;AACnB,aAAO;AAAA,IACT;AAGA,QAAI,cAAc,WAAW,MAAM,SAAS;AAC1C,aAAO,CAAC,KAAK;AAAA,IACf;AACA,WAAO,CAAC,eAAe,OAAO,WAAW,CAAC;AAAA,EAC5C;AAEA,MACE,SAAS,QACT,OAAO,UAAU,YACjB,OAAO,UAAU,YACjB,OAAO,UAAU,WACjB;AACA,WAAO,uBAAuB,OAAO,aAAa,WAAW;AAAA,EAC/D;AAEA,SAAO,CAAC,KAAK;AACf;AAEO,SAAS,eAAe,OAAgB,QAA2B;AACxE,QAAM,YAAY,iBAAiB,MAAM;AACzC,MAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC/C,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO,0BAA0B,KAAK;AAAA,IACxC;AACA,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,cAAc,SAAS;AAC1C,QAAM,IAAI;AAGV,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO,kBAAkB,OAAO,YAAY,CAAC;AAAA,EAC/C;AAGA,MACE,eAAe,YACf,SACA,OAAO,UAAU,YACjB,CAAC,MAAM,QAAQ,KAAK,GACpB;AACA,WAAO,qBAAqB,OAAkC,CAAC;AAAA,EACjE;AAGA,MAAI,eAAe,SAAS;AAC1B,UAAM,cAAc,MAAM,QAAQ,EAAE,WAAW,IAC1C,EAAE,cACH;AACJ,UAAM,cAAc,EAAE;AAEtB,WAAO,iBAAiB,OAAO,aAAa,WAAW;AAAA,EACzD;AAEA,SAAO;AACT;","names":[]}
|