@hey-api/json-schema-ref-parser 1.2.3 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -84
- package/dist/index.d.mts +629 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +1887 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +42 -78
- package/src/__tests__/bundle.test.ts +59 -0
- package/src/__tests__/index.test.ts +43 -0
- package/src/__tests__/pointer.test.ts +34 -0
- package/src/__tests__/utils.ts +3 -0
- package/{lib → src}/bundle.ts +143 -229
- package/{lib → src}/dereference.ts +20 -43
- package/{lib → src}/index.ts +103 -125
- package/{lib → src}/options.ts +13 -9
- package/{lib → src}/parse.ts +19 -15
- package/src/parsers/binary.ts +13 -0
- package/{lib → src}/parsers/json.ts +5 -6
- package/src/parsers/text.ts +21 -0
- package/{lib → src}/parsers/yaml.ts +9 -9
- package/{lib → src}/pointer.ts +42 -23
- package/{lib → src}/ref.ts +25 -21
- package/{lib → src}/refs.ts +23 -26
- package/{lib → src}/resolve-external.ts +91 -60
- package/{lib → src}/resolvers/file.ts +7 -10
- package/{lib → src}/resolvers/url.ts +12 -8
- package/{lib → src}/types/index.ts +9 -2
- package/src/util/convert-path-to-posix.ts +8 -0
- package/{lib → src}/util/errors.ts +38 -36
- package/{lib → src}/util/is-windows.ts +1 -1
- package/{lib → src}/util/plugins.ts +7 -8
- package/{lib → src}/util/url.ts +41 -42
- package/dist/lib/__tests__/bundle.test.d.ts +0 -1
- package/dist/lib/__tests__/bundle.test.js +0 -50
- package/dist/lib/__tests__/index.test.d.ts +0 -1
- package/dist/lib/__tests__/index.test.js +0 -43
- package/dist/lib/__tests__/pointer.test.d.ts +0 -1
- package/dist/lib/__tests__/pointer.test.js +0 -27
- package/dist/lib/bundle.d.ts +0 -26
- package/dist/lib/bundle.js +0 -600
- package/dist/lib/dereference.d.ts +0 -11
- package/dist/lib/dereference.js +0 -226
- package/dist/lib/index.d.ts +0 -92
- package/dist/lib/index.js +0 -525
- package/dist/lib/options.d.ts +0 -61
- package/dist/lib/options.js +0 -45
- package/dist/lib/parse.d.ts +0 -13
- package/dist/lib/parse.js +0 -87
- package/dist/lib/parsers/binary.d.ts +0 -2
- package/dist/lib/parsers/binary.js +0 -12
- package/dist/lib/parsers/json.d.ts +0 -2
- package/dist/lib/parsers/json.js +0 -38
- package/dist/lib/parsers/text.d.ts +0 -2
- package/dist/lib/parsers/text.js +0 -18
- package/dist/lib/parsers/yaml.d.ts +0 -2
- package/dist/lib/parsers/yaml.js +0 -28
- package/dist/lib/pointer.d.ts +0 -88
- package/dist/lib/pointer.js +0 -297
- package/dist/lib/ref.d.ts +0 -180
- package/dist/lib/ref.js +0 -226
- package/dist/lib/refs.d.ts +0 -127
- package/dist/lib/refs.js +0 -232
- package/dist/lib/resolve-external.d.ts +0 -13
- package/dist/lib/resolve-external.js +0 -151
- package/dist/lib/resolvers/file.d.ts +0 -6
- package/dist/lib/resolvers/file.js +0 -61
- package/dist/lib/resolvers/url.d.ts +0 -17
- package/dist/lib/resolvers/url.js +0 -62
- package/dist/lib/types/index.d.ts +0 -43
- package/dist/lib/types/index.js +0 -2
- package/dist/lib/util/convert-path-to-posix.d.ts +0 -1
- package/dist/lib/util/convert-path-to-posix.js +0 -14
- package/dist/lib/util/errors.d.ts +0 -56
- package/dist/lib/util/errors.js +0 -112
- package/dist/lib/util/is-windows.d.ts +0 -1
- package/dist/lib/util/is-windows.js +0 -6
- package/dist/lib/util/plugins.d.ts +0 -16
- package/dist/lib/util/plugins.js +0 -45
- package/dist/lib/util/url.d.ts +0 -79
- package/dist/lib/util/url.js +0 -285
- package/dist/vite.config.d.ts +0 -2
- package/dist/vite.config.js +0 -19
- package/lib/__tests__/bundle.test.ts +0 -52
- package/lib/__tests__/index.test.ts +0 -45
- package/lib/__tests__/pointer.test.ts +0 -26
- package/lib/__tests__/spec/circular-ref-with-description.json +0 -11
- package/lib/__tests__/spec/multiple-refs.json +0 -34
- package/lib/__tests__/spec/openapi-paths-ref.json +0 -46
- package/lib/__tests__/spec/path-parameter.json +0 -16
- package/lib/parsers/binary.ts +0 -13
- package/lib/parsers/text.ts +0 -21
- package/lib/util/convert-path-to-posix.ts +0 -11
- /package/{LICENSE → LICENSE.md} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["Pointer","errors: MissingPointerError[]","url.resolve","url.getHash","$Ref","Pointer","url.resolve","url.stripHash","url.getHash","$Ref","newEntry: InventoryEntry","crawl","key","parserAny: any","inventory: Array<InventoryEntry>","binaryParser: Plugin","jsonParser: Plugin","error: any","error","textParser: Plugin","yamlParser: Plugin","error: any","lastError: PluginResult","plugin: Pick<Plugin, 'handler'>","resolve","plugins.run","error: any","url.resolve","url.stripHash","$Ref","url.toFileSystemPath","path: string | undefined","url.toFileSystemPath","error: any","error: any","promises: Array<Promise<unknown>>","$Ref","Pointer","url.resolve","url.stripHash","promises: ReadonlyArray<Promise<unknown>>","resolvedInput: ResolvedInput","url.isFileSystemPath","url.fromFileSystemPath","url.resolve","url.cwd","merged: any","chosenOpenapi: string | undefined","chosenSwagger: string | undefined","infoAccumulator: any","servers: any[]","tags: any[]","out: any","url.getProtocol","schema: any","url.stripHash","srcTags: any[]"],"sources":["../src/util/convert-path-to-posix.ts","../src/util/is-windows.ts","../src/util/url.ts","../src/util/errors.ts","../src/ref.ts","../src/pointer.ts","../src/bundle.ts","../src/parsers/binary.ts","../src/parsers/json.ts","../src/parsers/text.ts","../src/parsers/yaml.ts","../src/options.ts","../src/util/plugins.ts","../src/parse.ts","../src/refs.ts","../src/resolvers/file.ts","../src/resolvers/url.ts","../src/resolve-external.ts","../src/index.ts"],"sourcesContent":["export default function convertPathToPosix(filePath: string): string {\n // Extended-length paths on Windows should not be converted\n if (filePath.startsWith('\\\\\\\\?\\\\')) {\n return filePath;\n }\n\n return filePath.replaceAll('\\\\', '/');\n}\n","const isWindowsConst = /^win/.test(globalThis.process ? globalThis.process.platform : '');\nexport const isWindows = () => isWindowsConst;\n","import path, { join, win32 } from 'node:path';\n\nimport convertPathToPosix from './convert-path-to-posix';\nimport { isWindows } from './is-windows';\n\nconst forwardSlashPattern = /\\//g;\nconst protocolPattern = /^(\\w{2,}):\\/\\//i;\n\n// RegExp patterns to URL-encode special characters in local filesystem paths\nconst urlEncodePatterns = [\n [/\\?/g, '%3F'],\n [/#/g, '%23'],\n] as [RegExp, string][];\n\n// RegExp patterns to URL-decode special characters for local filesystem paths\nconst urlDecodePatterns = [/%23/g, '#', /%24/g, '$', /%26/g, '&', /%2C/g, ',', /%40/g, '@'];\n\n/**\n * Returns resolved target URL relative to a base URL in a manner similar to that of a Web browser resolving an anchor tag HREF.\n *\n * @returns\n */\nexport function resolve(from: string, to: string) {\n const fromUrl = new URL(convertPathToPosix(from), 'resolve://');\n const resolvedUrl = new URL(convertPathToPosix(to), fromUrl);\n const endSpaces = to.match(/(\\s*)$/)?.[1] || '';\n if (resolvedUrl.protocol === 'resolve:') {\n // `from` is a relative URL.\n const { hash, pathname, search } = resolvedUrl;\n return pathname + search + hash + endSpaces;\n }\n return resolvedUrl.toString() + endSpaces;\n}\n\n/**\n * Returns the current working directory (in Node) or the current page URL (in browsers).\n *\n * @returns\n */\nexport function cwd() {\n if (typeof window !== 'undefined') {\n return location.href;\n }\n\n const path = process.cwd();\n\n const lastChar = path.slice(-1);\n if (lastChar === '/' || lastChar === '\\\\') {\n return path;\n } else {\n return path + '/';\n }\n}\n\n/**\n * Returns the protocol of the given URL, or `undefined` if it has no protocol.\n *\n * @param path\n * @returns\n */\nexport function getProtocol(path: string | undefined) {\n const match = protocolPattern.exec(path || '');\n if (match) {\n return match[1]!.toLowerCase();\n }\n return undefined;\n}\n\n/**\n * Returns the lowercased file extension of the given URL,\n * or an empty string if it has no extension.\n *\n * @param path\n * @returns\n */\nexport function getExtension(path: any) {\n const lastDot = path.lastIndexOf('.');\n if (lastDot > -1) {\n return stripQuery(path.substr(lastDot).toLowerCase());\n }\n return '';\n}\n\n/**\n * Removes the query, if any, from the given path.\n *\n * @param path\n * @returns\n */\nexport function stripQuery(path: any) {\n const queryIndex = path.indexOf('?');\n if (queryIndex > -1) {\n path = path.substr(0, queryIndex);\n }\n return path;\n}\n\n/**\n * Returns the hash (URL fragment), of the given path.\n * If there is no hash, then the root hash (\"#\") is returned.\n *\n * @param path\n * @returns\n */\nexport function getHash(path: undefined | string) {\n if (!path) {\n return '#';\n }\n const hashIndex = path.indexOf('#');\n if (hashIndex > -1) {\n return path.substring(hashIndex);\n }\n return '#';\n}\n\n/**\n * Removes the hash (URL fragment), if any, from the given path.\n *\n * @param path\n * @returns\n */\nexport function stripHash(path?: string | undefined) {\n if (!path) {\n return '';\n }\n const hashIndex = path.indexOf('#');\n if (hashIndex > -1) {\n path = path.substring(0, hashIndex);\n }\n return path;\n}\n\n/**\n * Determines whether the given path is a filesystem path.\n * This includes \"file://\" URLs.\n *\n * @param path\n * @returns\n */\nexport function isFileSystemPath(path: string | undefined) {\n // @ts-ignore\n if (typeof window !== 'undefined' || (typeof process !== 'undefined' && process.browser)) {\n // We're running in a browser, so assume that all paths are URLs.\n // This way, even relative paths will be treated as URLs rather than as filesystem paths\n return false;\n }\n\n const protocol = getProtocol(path);\n return protocol === undefined || protocol === 'file';\n}\n\n/**\n * Converts a filesystem path to a properly-encoded URL.\n *\n * This is intended to handle situations where JSON Schema $Ref Parser is called\n * with a filesystem path that contains characters which are not allowed in URLs.\n *\n * @example\n * The following filesystem paths would be converted to the following URLs:\n *\n * <\"!@#$%^&*+=?'>.json ==> %3C%22!@%23$%25%5E&*+=%3F\\'%3E.json\n * C:\\\\My Documents\\\\File (1).json ==> C:/My%20Documents/File%20(1).json\n * file://Project #42/file.json ==> file://Project%20%2342/file.json\n *\n * @param path\n * @returns\n */\nexport function fromFileSystemPath(path: string) {\n // Step 1: On Windows, replace backslashes with forward slashes,\n // rather than encoding them as \"%5C\"\n if (isWindows()) {\n const projectDir = cwd();\n const upperPath = path.toUpperCase();\n const projectDirPosixPath = convertPathToPosix(projectDir);\n const posixUpper = projectDirPosixPath.toUpperCase();\n const hasProjectDir = upperPath.includes(posixUpper);\n const hasProjectUri = upperPath.includes(posixUpper);\n const isAbsolutePath =\n win32.isAbsolute(path) ||\n path.startsWith('http://') ||\n path.startsWith('https://') ||\n path.startsWith('file://');\n\n if (!(hasProjectDir || hasProjectUri || isAbsolutePath) && !projectDir.startsWith('http')) {\n path = join(projectDir, path);\n }\n path = convertPathToPosix(path);\n }\n\n // Step 2: `encodeURI` will take care of MOST characters\n path = encodeURI(path);\n\n // Step 3: Manually encode characters that are not encoded by `encodeURI`.\n // This includes characters such as \"#\" and \"?\", which have special meaning in URLs,\n // but are just normal characters in a filesystem path.\n for (const pattern of urlEncodePatterns) {\n path = path.replace(pattern[0], pattern[1]);\n }\n\n return path;\n}\n\n/**\n * Converts a URL to a local filesystem path.\n */\nexport function toFileSystemPath(path: string | undefined, keepFileProtocol?: boolean): string {\n // Step 1: `decodeURI` will decode characters such as Cyrillic characters, spaces, etc.\n path = decodeURI(path!);\n\n // Step 2: Manually decode characters that are not decoded by `decodeURI`.\n // This includes characters such as \"#\" and \"?\", which have special meaning in URLs,\n // but are just normal characters in a filesystem path.\n for (let i = 0; i < urlDecodePatterns.length; i += 2) {\n path = path.replace(urlDecodePatterns[i]!, urlDecodePatterns[i + 1] as string);\n }\n\n // Step 3: If it's a \"file://\" URL, then format it consistently\n // or convert it to a local filesystem path\n let isFileUrl = path.substr(0, 7).toLowerCase() === 'file://';\n if (isFileUrl) {\n // Strip-off the protocol, and the initial \"/\", if there is one\n path = path[7] === '/' ? path.substr(8) : path.substr(7);\n\n // insert a colon (\":\") after the drive letter on Windows\n if (isWindows() && path[1] === '/') {\n path = path[0] + ':' + path.substr(1);\n }\n\n if (keepFileProtocol) {\n // Return the consistently-formatted \"file://\" URL\n path = 'file:///' + path;\n } else {\n // Convert the \"file://\" URL to a local filesystem path.\n // On Windows, it will start with something like \"C:/\".\n // On Posix, it will start with \"/\"\n isFileUrl = false;\n path = isWindows() ? path : '/' + path;\n }\n }\n\n // Step 4: Normalize Windows paths (unless it's a \"file://\" URL)\n if (isWindows() && !isFileUrl) {\n // Replace forward slashes with backslashes\n path = path.replace(forwardSlashPattern, '\\\\');\n\n // Capitalize the drive letter\n if (path.substr(1, 2) === ':\\\\') {\n path = path[0]!.toUpperCase() + path.substr(1);\n }\n }\n\n return path;\n}\n\nexport function relative(from: string, to: string) {\n if (!isFileSystemPath(from) || !isFileSystemPath(to)) {\n return resolve(from, to);\n }\n\n const fromDir = path.dirname(stripHash(from));\n const toPath = stripHash(to);\n\n const result = path.relative(fromDir, toPath);\n return result + getHash(to);\n}\n","import { Ono } from '@jsdevtools/ono';\n\nimport type { $RefParser } from '..';\nimport type $Ref from '../ref';\nimport type { JSONSchema } from '../types';\nimport { getHash, stripHash, toFileSystemPath } from './url';\n\nexport type JSONParserErrorType =\n | 'EUNKNOWN'\n | 'EPARSER'\n | 'EUNMATCHEDPARSER'\n | 'ETIMEOUT'\n | 'ERESOLVER'\n | 'EUNMATCHEDRESOLVER'\n | 'EMISSINGPOINTER'\n | 'EINVALIDPOINTER';\n\nexport class JSONParserError extends Error {\n public readonly name: string;\n public readonly message: string;\n public source: string | undefined;\n public path: Array<string | number> | null;\n public readonly code: JSONParserErrorType;\n public constructor(message: string, source?: string) {\n super();\n\n this.code = 'EUNKNOWN';\n this.name = 'JSONParserError';\n this.message = message;\n this.source = source;\n this.path = null;\n\n Ono.extend(this);\n }\n\n get footprint() {\n return `${this.path}+${this.source}+${this.code}+${this.message}`;\n }\n}\n\nexport class JSONParserErrorGroup<S extends object = JSONSchema> extends Error {\n files: $RefParser;\n\n constructor(parser: $RefParser) {\n super();\n\n this.files = parser;\n this.name = 'JSONParserErrorGroup';\n this.message = `${this.errors.length} error${\n this.errors.length > 1 ? 's' : ''\n } occurred while reading '${toFileSystemPath(parser.$refs._root$Ref!.path)}'`;\n\n Ono.extend(this);\n }\n\n static getParserErrors<S extends object = JSONSchema>(parser: $RefParser) {\n const errors = [];\n\n for (const $ref of Object.values(parser.$refs._$refs) as $Ref<S>[]) {\n if ($ref.errors) {\n errors.push(...$ref.errors);\n }\n }\n\n return errors;\n }\n\n get errors(): Array<\n | JSONParserError\n | InvalidPointerError\n | ResolverError\n | ParserError\n | MissingPointerError\n | UnmatchedParserError\n | UnmatchedResolverError\n > {\n return JSONParserErrorGroup.getParserErrors<S>(this.files);\n }\n}\n\nexport class ParserError extends JSONParserError {\n code = 'EPARSER' as JSONParserErrorType;\n name = 'ParserError';\n constructor(message: any, source: any) {\n super(`Error parsing ${source}: ${message}`, source);\n }\n}\n\nexport class UnmatchedParserError extends JSONParserError {\n code = 'EUNMATCHEDPARSER' as JSONParserErrorType;\n name = 'UnmatchedParserError';\n\n constructor(source: string) {\n super(`Could not find parser for \"${source}\"`, source);\n }\n}\n\nexport class ResolverError extends JSONParserError {\n code = 'ERESOLVER' as JSONParserErrorType;\n name = 'ResolverError';\n ioErrorCode?: string;\n constructor(ex: Error | any, source?: string) {\n super(ex.message || `Error reading file \"${source}\"`, source);\n if ('code' in ex) {\n this.ioErrorCode = String(ex.code);\n }\n }\n}\n\nexport class UnmatchedResolverError extends JSONParserError {\n code = 'EUNMATCHEDRESOLVER' as JSONParserErrorType;\n name = 'UnmatchedResolverError';\n constructor(source: any) {\n super(`Could not find resolver for \"${source}\"`, source);\n }\n}\n\nexport class MissingPointerError extends JSONParserError {\n code = 'EMISSINGPOINTER' as JSONParserErrorType;\n name = 'MissingPointerError';\n constructor(token: string, path: string) {\n super(\n `Missing $ref pointer \"${getHash(path)}\". Token \"${token}\" does not exist.`,\n stripHash(path),\n );\n }\n}\n\nexport class TimeoutError extends JSONParserError {\n code = 'ETIMEOUT' as JSONParserErrorType;\n name = 'TimeoutError';\n constructor(timeout: number) {\n super(`Dereferencing timeout reached: ${timeout}ms`);\n }\n}\n\nexport class InvalidPointerError extends JSONParserError {\n code = 'EUNMATCHEDRESOLVER' as JSONParserErrorType;\n name = 'InvalidPointerError';\n constructor(pointer: string, path: string) {\n super(`Invalid $ref pointer \"${pointer}\". Pointers must begin with \"#/\"`, stripHash(path));\n }\n}\n\nexport function isHandledError(err: any): err is JSONParserError {\n return err instanceof JSONParserError || err instanceof JSONParserErrorGroup;\n}\n\nexport function normalizeError(err: any) {\n if (err.path === null) {\n err.path = [];\n }\n\n return err;\n}\n","import type { ParserOptions } from './options';\nimport Pointer from './pointer';\nimport type $Refs from './refs';\nimport type { JSONSchema } from './types';\nimport type {\n JSONParserError,\n MissingPointerError,\n ParserError,\n ResolverError,\n} from './util/errors';\nimport { normalizeError } from './util/errors';\n\nexport type $RefError = JSONParserError | ResolverError | ParserError | MissingPointerError;\n\n/**\n * This class represents a single JSON reference and its resolved value.\n *\n * @class\n */\nclass $Ref<S extends object = JSONSchema> {\n /**\n * The file path or URL of the referenced file.\n * This path is relative to the path of the main JSON schema file.\n *\n * This path does NOT contain document fragments (JSON pointers). It always references an ENTIRE file.\n * Use methods such as {@link $Ref#get}, {@link $Ref#resolve}, and {@link $Ref#exists} to get\n * specific JSON pointers within the file.\n *\n * @type {string}\n */\n path: undefined | string;\n\n /**\n * The resolved value of the JSON reference.\n * Can be any JSON type, not just objects. Unknown file types are represented as Buffers (byte arrays).\n *\n * @type {?*}\n */\n value: any;\n\n /**\n * The {@link $Refs} object that contains this {@link $Ref} object.\n *\n * @type {$Refs}\n */\n $refs: $Refs<S>;\n\n /**\n * Indicates the type of {@link $Ref#path} (e.g. \"file\", \"http\", etc.)\n */\n pathType: string | unknown;\n\n /**\n * List of all errors. Undefined if no errors.\n */\n errors: Array<$RefError> = [];\n\n constructor($refs: $Refs<S>) {\n this.$refs = $refs;\n }\n\n /**\n * Pushes an error to errors array.\n *\n * @param err - The error to be pushed\n * @returns\n */\n addError(err: $RefError) {\n if (this.errors === undefined) {\n this.errors = [];\n }\n\n const existingErrors = this.errors.map(({ footprint }: any) => footprint);\n\n // the path has been almost certainly set at this point,\n // but just in case something went wrong, normalizeError injects path if necessary\n // moreover, certain errors might point at the same spot, so filter them out to reduce noise\n if ('errors' in err && Array.isArray(err.errors)) {\n this.errors.push(\n ...err.errors\n .map(normalizeError)\n .filter(({ footprint }: any) => !existingErrors.includes(footprint)),\n );\n } else if (!('footprint' in err) || !existingErrors.includes(err.footprint)) {\n this.errors.push(normalizeError(err));\n }\n }\n\n /**\n * Determines whether the given JSON reference exists within this {@link $Ref#value}.\n *\n * @param path - The full path being resolved, optionally with a JSON pointer in the hash\n * @param options\n * @returns\n */\n exists(path: string, options?: ParserOptions) {\n try {\n this.resolve(path, options);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Resolves the given JSON reference within this {@link $Ref#value} and returns the resolved value.\n *\n * @param path - The full path being resolved, optionally with a JSON pointer in the hash\n * @param options\n * @returns - Returns the resolved value\n */\n get(path: string, options?: ParserOptions) {\n return this.resolve(path, options)?.value;\n }\n\n /**\n * Resolves the given JSON reference within this {@link $Ref#value}.\n *\n * @param path - The full path being resolved, optionally with a JSON pointer in the hash\n * @param options\n * @param friendlyPath - The original user-specified path (used for error messages)\n * @param pathFromRoot - The path of `obj` from the schema root\n * @returns\n */\n resolve(path: string, options?: ParserOptions, friendlyPath?: string, pathFromRoot?: string) {\n const pointer = new Pointer<S>(this, path, friendlyPath);\n return pointer.resolve(this.value, options, pathFromRoot);\n }\n\n /**\n * Sets the value of a nested property within this {@link $Ref#value}.\n * If the property, or any of its parents don't exist, they will be created.\n *\n * @param path - The full path of the property to set, optionally with a JSON pointer in the hash\n * @param value - The value to assign\n */\n set(path: string, value: any) {\n const pointer = new Pointer(this, path);\n this.value = pointer.set(this.value, value);\n }\n\n /**\n * Determines whether the given value is a JSON reference.\n *\n * @param value - The value to inspect\n * @returns\n */\n static is$Ref(value: unknown): value is { $ref: string; length?: number } {\n return (\n Boolean(value) &&\n typeof value === 'object' &&\n value !== null &&\n '$ref' in value &&\n typeof value.$ref === 'string' &&\n value.$ref.length > 0\n );\n }\n\n /**\n * Determines whether the given value is an external JSON reference.\n *\n * @param value - The value to inspect\n * @returns\n */\n static isExternal$Ref(value: unknown): boolean {\n return $Ref.is$Ref(value) && value.$ref![0] !== '#';\n }\n\n /**\n * Determines whether the given value is a JSON reference, and whether it is allowed by the options.\n *\n * @param value - The value to inspect\n * @param options\n * @returns\n */\n static isAllowed$Ref(value: unknown) {\n if (this.is$Ref(value)) {\n if (value.$ref.substring(0, 2) === '#/' || value.$ref === '#') {\n // It's a JSON Pointer reference, which is always allowed\n return true;\n } else if (value.$ref[0] !== '#') {\n // It's an external reference, which is allowed by the options\n return true;\n }\n }\n return undefined;\n }\n\n /**\n * Determines whether the given value is a JSON reference that \"extends\" its resolved value.\n * That is, it has extra properties (in addition to \"$ref\"), so rather than simply pointing to\n * an existing value, this $ref actually creates a NEW value that is a shallow copy of the resolved\n * value, plus the extra properties.\n *\n * @example: {\n person: {\n properties: {\n firstName: { type: string }\n lastName: { type: string }\n }\n }\n employee: {\n properties: {\n $ref: #/person/properties\n salary: { type: number }\n }\n }\n }\n * In this example, \"employee\" is an extended $ref, since it extends \"person\" with an additional\n * property (salary). The result is a NEW value that looks like this:\n *\n * {\n * properties: {\n * firstName: { type: string }\n * lastName: { type: string }\n * salary: { type: number }\n * }\n * }\n *\n * @param value - The value to inspect\n * @returns\n */\n static isExtended$Ref(value: unknown) {\n return $Ref.is$Ref(value) && Object.keys(value).length > 1;\n }\n\n /**\n * Returns the resolved value of a JSON Reference.\n * If necessary, the resolved value is merged with the JSON Reference to create a new object\n *\n * @example: {\n person: {\n properties: {\n firstName: { type: string }\n lastName: { type: string }\n }\n }\n employee: {\n properties: {\n $ref: #/person/properties\n salary: { type: number }\n }\n }\n } When \"person\" and \"employee\" are merged, you end up with the following object:\n *\n * {\n * properties: {\n * firstName: { type: string }\n * lastName: { type: string }\n * salary: { type: number }\n * }\n * }\n *\n * @param $ref - The JSON reference object (the one with the \"$ref\" property)\n * @param resolvedValue - The resolved value, which can be any type\n * @returns - Returns the dereferenced value\n */\n static dereference<S extends object = JSONSchema>($ref: $Ref<S>, resolvedValue: S): S {\n if (resolvedValue && typeof resolvedValue === 'object' && $Ref.isExtended$Ref($ref)) {\n const merged = {};\n for (const key of Object.keys($ref)) {\n if (key !== '$ref') {\n // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message\n merged[key] = $ref[key];\n }\n }\n\n for (const key of Object.keys(resolvedValue)) {\n if (!(key in merged)) {\n // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message\n merged[key] = resolvedValue[key];\n }\n }\n\n return merged as S;\n } else {\n // Completely replace the original reference with the resolved value\n return resolvedValue;\n }\n }\n}\n\nexport default $Ref;\n","import type { ParserOptions } from './options';\nimport $Ref from './ref';\nimport type { JSONSchema } from './types';\nimport {\n InvalidPointerError,\n isHandledError,\n JSONParserError,\n MissingPointerError,\n} from './util/errors';\nimport * as url from './util/url';\n\nconst slashes = /\\//g;\nconst tildes = /~/g;\nconst escapedSlash = /~1/g;\nconst escapedTilde = /~0/g;\n\nconst safeDecodeURIComponent = (encodedURIComponent: string): string => {\n try {\n return decodeURIComponent(encodedURIComponent);\n } catch {\n return encodedURIComponent;\n }\n};\n\n/**\n * This class represents a single JSON pointer and its resolved value.\n *\n * @param $ref\n * @param path\n * @param [friendlyPath] - The original user-specified path (used for error messages)\n * @class\n */\nclass Pointer<S extends object = JSONSchema> {\n /**\n * The {@link $Ref} object that contains this {@link Pointer} object.\n */\n $ref: $Ref<S>;\n\n /**\n * The file path or URL, containing the JSON pointer in the hash.\n * This path is relative to the path of the main JSON schema file.\n */\n path: string;\n\n /**\n * The original path or URL, used for error messages.\n */\n originalPath: string;\n\n /**\n * The value of the JSON pointer.\n * Can be any JSON type, not just objects. Unknown file types are represented as Buffers (byte arrays).\n */\n\n value: any;\n /**\n * Indicates whether the pointer references itself.\n */\n circular: boolean;\n /**\n * The number of indirect references that were traversed to resolve the value.\n * Resolving a single pointer may require resolving multiple $Refs.\n */\n indirections: number;\n\n constructor($ref: $Ref<S>, path: string, friendlyPath?: string) {\n this.$ref = $ref;\n\n this.path = path;\n\n this.originalPath = friendlyPath || path;\n\n this.value = undefined;\n\n this.circular = false;\n\n this.indirections = 0;\n }\n\n /**\n * Resolves the value of a nested property within the given object.\n *\n * @param obj - The object that will be crawled\n * @param options\n * @param pathFromRoot - the path of place that initiated resolving\n *\n * @returns\n * Returns a JSON pointer whose {@link Pointer#value} is the resolved value.\n * If resolving this value required resolving other JSON references, then\n * the {@link Pointer#$ref} and {@link Pointer#path} will reflect the resolution path\n * of the resolved value.\n */\n resolve(obj: S, options?: ParserOptions, pathFromRoot?: string) {\n const tokens = Pointer.parse(this.path, this.originalPath);\n\n // Crawl the object, one token at a time\n this.value = unwrapOrThrow(obj);\n\n const errors: MissingPointerError[] = [];\n\n for (let i = 0; i < tokens.length; i++) {\n if (resolveIf$Ref(this, options, pathFromRoot)) {\n // The $ref path has changed, so append the remaining tokens to the path\n this.path = Pointer.join(this.path, tokens.slice(i));\n }\n\n if (\n typeof this.value === 'object' &&\n this.value !== null &&\n !isRootPath(pathFromRoot) &&\n '$ref' in this.value\n ) {\n return this;\n }\n\n const token = tokens[i]!;\n if (\n this.value[token] === undefined ||\n (this.value[token] === null && i === tokens.length - 1)\n ) {\n // one final case is if the entry itself includes slashes, and was parsed out as a token - we can join the remaining tokens and try again\n let didFindSubstringSlashMatch = false;\n for (let j = tokens.length - 1; j > i; j--) {\n const joinedToken = tokens.slice(i, j + 1).join('/');\n if (this.value[joinedToken] !== undefined) {\n this.value = this.value[joinedToken];\n i = j;\n didFindSubstringSlashMatch = true;\n break;\n }\n }\n if (didFindSubstringSlashMatch) {\n continue;\n }\n\n this.value = null;\n errors.push(new MissingPointerError(token, decodeURI(this.originalPath)));\n } else {\n this.value = this.value[token];\n }\n }\n\n if (errors.length > 0) {\n throw errors.length === 1\n ? errors[0]\n : new AggregateError(errors, 'Multiple missing pointer errors');\n }\n\n // Resolve the final value\n if (\n !this.value ||\n (this.value.$ref && url.resolve(this.path, this.value.$ref) !== pathFromRoot)\n ) {\n resolveIf$Ref(this, options, pathFromRoot);\n }\n\n return this;\n }\n\n /**\n * Sets the value of a nested property within the given object.\n *\n * @param obj - The object that will be crawled\n * @param value - the value to assign\n * @param options\n *\n * @returns\n * Returns the modified object, or an entirely new object if the entire object is overwritten.\n */\n set(obj: S, value: any, options?: ParserOptions) {\n const tokens = Pointer.parse(this.path);\n let token;\n\n if (tokens.length === 0) {\n // There are no tokens, replace the entire object with the new value\n this.value = value;\n return value;\n }\n\n // Crawl the object, one token at a time\n this.value = unwrapOrThrow(obj);\n\n for (let i = 0; i < tokens.length - 1; i++) {\n resolveIf$Ref(this, options);\n\n token = tokens[i]!;\n if (this.value && this.value[token] !== undefined) {\n // The token exists\n this.value = this.value[token];\n } else {\n // The token doesn't exist, so create it\n this.value = setValue(this, token, {});\n }\n }\n\n // Set the value of the final token\n resolveIf$Ref(this, options);\n token = tokens[tokens.length - 1];\n setValue(this, token, value);\n\n // Return the updated object\n return obj;\n }\n\n /**\n * Parses a JSON pointer (or a path containing a JSON pointer in the hash)\n * and returns an array of the pointer's tokens.\n * (e.g. \"schema.json#/definitions/person/name\" => [\"definitions\", \"person\", \"name\"])\n *\n * The pointer is parsed according to RFC 6901\n * {@link https://tools.ietf.org/html/rfc6901#section-3}\n *\n * @param path\n * @param [originalPath]\n * @returns\n */\n static parse(path: string, originalPath?: string): string[] {\n // Get the JSON pointer from the path's hash\n const pointer = url.getHash(path).substring(1);\n\n // If there's no pointer, then there are no tokens,\n // so return an empty array\n if (!pointer) {\n return [];\n }\n\n // Split into an array\n const split = pointer.split('/');\n\n // Decode each part, according to RFC 6901\n for (let i = 0; i < split.length; i++) {\n split[i] = safeDecodeURIComponent(\n split[i]!.replace(escapedSlash, '/').replace(escapedTilde, '~'),\n );\n }\n\n if (split[0] !== '') {\n throw new InvalidPointerError(pointer, originalPath === undefined ? path : originalPath);\n }\n\n return split.slice(1);\n }\n\n /**\n * Creates a JSON pointer path, by joining one or more tokens to a base path.\n *\n * @param base - The base path (e.g. \"schema.json#/definitions/person\")\n * @param tokens - The token(s) to append (e.g. [\"name\", \"first\"])\n * @returns\n */\n static join(base: string, tokens: string | string[]) {\n // Ensure that the base path contains a hash\n if (base.indexOf('#') === -1) {\n base += '#';\n }\n\n // Append each token to the base path\n tokens = Array.isArray(tokens) ? tokens : [tokens];\n for (let i = 0; i < tokens.length; i++) {\n const token = tokens[i]!;\n // Encode the token, according to RFC 6901\n base += '/' + encodeURIComponent(token.replace(tildes, '~0').replace(slashes, '~1'));\n }\n\n return base;\n }\n}\n\n/**\n * If the given pointer's {@link Pointer#value} is a JSON reference,\n * then the reference is resolved and {@link Pointer#value} is replaced with the resolved value.\n * In addition, {@link Pointer#path} and {@link Pointer#$ref} are updated to reflect the\n * resolution path of the new value.\n *\n * @param pointer\n * @param options\n * @param [pathFromRoot] - the path of place that initiated resolving\n * @returns - Returns `true` if the resolution path changed\n */\nfunction resolveIf$Ref(pointer: any, options: any, pathFromRoot?: any) {\n // Is the value a JSON reference? (and allowed?)\n\n if ($Ref.isAllowed$Ref(pointer.value)) {\n const $refPath = url.resolve(pointer.path, pointer.value.$ref);\n\n if ($refPath === pointer.path && !isRootPath(pathFromRoot)) {\n // The value is a reference to itself, so there's nothing to do.\n pointer.circular = true;\n } else {\n const resolved = pointer.$ref.$refs._resolve($refPath, pointer.path, options);\n if (resolved === null) {\n return false;\n }\n\n pointer.indirections += resolved.indirections + 1;\n\n if ($Ref.isExtended$Ref(pointer.value)) {\n // This JSON reference \"extends\" the resolved value, rather than simply pointing to it.\n // So the resolved path does NOT change. Just the value does.\n pointer.value = $Ref.dereference(pointer.value, resolved.value);\n return false;\n } else {\n // Resolve the reference\n pointer.$ref = resolved.$ref;\n pointer.path = resolved.path;\n pointer.value = resolved.value;\n }\n\n return true;\n }\n }\n return undefined;\n}\nexport default Pointer;\n\n/**\n * Sets the specified token value of the {@link Pointer#value}.\n *\n * The token is evaluated according to RFC 6901.\n * {@link https://tools.ietf.org/html/rfc6901#section-4}\n *\n * @param pointer - The JSON Pointer whose value will be modified\n * @param token - A JSON Pointer token that indicates how to modify `obj`\n * @param value - The value to assign\n * @returns - Returns the assigned value\n */\nfunction setValue(pointer: any, token: any, value: any) {\n if (pointer.value && typeof pointer.value === 'object') {\n if (token === '-' && Array.isArray(pointer.value)) {\n pointer.value.push(value);\n } else {\n pointer.value[token] = value;\n }\n } else {\n throw new JSONParserError(\n `Error assigning $ref pointer \"${pointer.path}\". \\nCannot set \"${token}\" of a non-object.`,\n );\n }\n return value;\n}\n\nfunction unwrapOrThrow(value: any) {\n if (isHandledError(value)) {\n throw value;\n }\n\n return value;\n}\n\nfunction isRootPath(pathFromRoot: any): boolean {\n return typeof pathFromRoot == 'string' && Pointer.parse(pathFromRoot).length == 0;\n}\n","import type { $RefParser } from '.';\nimport type { ParserOptions } from './options';\nimport Pointer from './pointer';\nimport $Ref from './ref';\nimport type $Refs from './refs';\nimport type { JSONSchema } from './types';\nimport { MissingPointerError } from './util/errors';\nimport * as url from './util/url';\n\nexport interface InventoryEntry {\n $ref: any;\n circular: any;\n depth: any;\n extended: any;\n external: any;\n file: any;\n hash: any;\n indirections: any;\n key: any;\n originalContainerType?: 'schemas' | 'parameters' | 'requestBodies' | 'responses' | 'headers';\n parent: any;\n pathFromRoot: any;\n value: any;\n}\n\n/**\n * Fast lookup using Map instead of linear search with deep equality\n */\nconst createInventoryLookup = () => {\n const lookup = new Map<string, InventoryEntry>();\n const objectIds = new WeakMap<object, string>(); // Use WeakMap to avoid polluting objects\n let idCounter = 0;\n\n const getObjectId = (obj: any) => {\n if (!objectIds.has(obj)) {\n objectIds.set(obj, `obj_${++idCounter}`);\n }\n return objectIds.get(obj)!;\n };\n\n const createInventoryKey = ($refParent: any, $refKey: any) =>\n // Use WeakMap-based lookup to avoid polluting the actual schema objects\n `${getObjectId($refParent)}_${$refKey}`;\n\n return {\n add: (entry: InventoryEntry) => {\n const key = createInventoryKey(entry.parent, entry.key);\n lookup.set(key, entry);\n },\n find: ($refParent: any, $refKey: any) => {\n const key = createInventoryKey($refParent, $refKey);\n const result = lookup.get(key);\n return result;\n },\n remove: (entry: InventoryEntry) => {\n const key = createInventoryKey(entry.parent, entry.key);\n lookup.delete(key);\n },\n };\n};\n\n/**\n * Determine the container type from a JSON Pointer path.\n * Analyzes the path tokens to identify the appropriate OpenAPI component container.\n *\n * @param path - The JSON Pointer path to analyze\n * @returns The container type: \"schemas\", \"parameters\", \"requestBodies\", \"responses\", or \"headers\"\n */\nconst getContainerTypeFromPath = (\n path: string,\n): 'schemas' | 'parameters' | 'requestBodies' | 'responses' | 'headers' => {\n const tokens = Pointer.parse(path);\n const has = (t: string) => tokens.includes(t);\n // Prefer more specific containers first\n if (has('parameters')) {\n return 'parameters';\n }\n if (has('requestBody')) {\n return 'requestBodies';\n }\n if (has('headers')) {\n return 'headers';\n }\n if (has('responses')) {\n return 'responses';\n }\n if (has('schema')) {\n return 'schemas';\n }\n // default: treat as schema-like\n return 'schemas';\n};\n\n/**\n * Inventories the given JSON Reference (i.e. records detailed information about it so we can\n * optimize all $refs in the schema), and then crawls the resolved value.\n */\nconst inventory$Ref = <S extends object = JSONSchema>({\n $refKey,\n $refParent,\n $refs,\n indirections,\n inventory,\n inventoryLookup,\n options,\n path,\n pathFromRoot,\n resolvedRefs = new Map(),\n visitedObjects = new WeakSet(),\n}: {\n /**\n * The key in `$refParent` that is a JSON Reference\n */\n $refKey: string | null;\n /**\n * The object that contains a JSON Reference as one of its keys\n */\n $refParent: any;\n $refs: $Refs<S>;\n /**\n * unknown\n */\n indirections: number;\n /**\n * An array of already-inventoried $ref pointers\n */\n inventory: Array<InventoryEntry>;\n /**\n * Fast lookup for inventory entries\n */\n inventoryLookup: ReturnType<typeof createInventoryLookup>;\n options: ParserOptions;\n /**\n * The full path of the JSON Reference at `$refKey`, possibly with a JSON Pointer in the hash\n */\n path: string;\n /**\n * The path of the JSON Reference at `$refKey`, from the schema root\n */\n pathFromRoot: string;\n /**\n * Cache for resolved $ref targets to avoid redundant resolution\n */\n resolvedRefs?: Map<string, any>;\n /**\n * Set of already visited objects to avoid infinite loops and redundant processing\n */\n visitedObjects?: WeakSet<object>;\n}) => {\n const $ref = $refKey === null ? $refParent : $refParent[$refKey];\n const $refPath = url.resolve(path, $ref.$ref);\n\n // Check cache first to avoid redundant resolution\n let pointer = resolvedRefs.get($refPath);\n if (!pointer) {\n try {\n pointer = $refs._resolve($refPath, pathFromRoot, options);\n } catch (error) {\n if (error instanceof MissingPointerError) {\n // Log warning but continue - common in complex schema ecosystems\n console.warn(`Skipping unresolvable $ref: ${$refPath}`);\n return;\n }\n throw error; // Re-throw unexpected errors\n }\n\n if (pointer) {\n resolvedRefs.set($refPath, pointer);\n }\n }\n\n if (pointer === null) return;\n\n const parsed = Pointer.parse(pathFromRoot);\n const depth = parsed.length;\n const file = url.stripHash(pointer.path);\n const hash = url.getHash(pointer.path);\n const external = file !== $refs._root$Ref.path;\n const extended = $Ref.isExtended$Ref($ref);\n indirections += pointer.indirections;\n\n // Check if this exact location (parent + key + pathFromRoot) has already been inventoried\n const existingEntry = inventoryLookup.find($refParent, $refKey);\n\n if (existingEntry && existingEntry.pathFromRoot === pathFromRoot) {\n // This exact location has already been inventoried, so we don't need to process it again\n if (depth < existingEntry.depth || indirections < existingEntry.indirections) {\n removeFromInventory(inventory, existingEntry);\n inventoryLookup.remove(existingEntry);\n } else {\n return;\n }\n }\n\n const newEntry: InventoryEntry = {\n $ref, // The JSON Reference (e.g. {$ref: string})\n circular: pointer.circular, // Is this $ref pointer DIRECTLY circular? (i.e. it references itself)\n depth, // How far from the JSON Schema root is this $ref pointer?\n extended, // Does this $ref extend its resolved value? (i.e. it has extra properties, in addition to \"$ref\")\n external, // Does this $ref pointer point to a file other than the main JSON Schema file?\n file, // The file that the $ref pointer resolves to\n hash, // The hash within `file` that the $ref pointer resolves to\n indirections, // The number of indirect references that were traversed to resolve the value\n key: $refKey,\n // The resolved value of the $ref pointer\n originalContainerType: external ? getContainerTypeFromPath(pointer.path) : undefined,\n\n // The key in `parent` that is the $ref pointer\n parent: $refParent,\n\n // The object that contains this $ref pointer\n pathFromRoot,\n // The path to the $ref pointer, from the JSON Schema root\n value: pointer.value, // The original container type in the external file\n };\n\n inventory.push(newEntry);\n inventoryLookup.add(newEntry);\n\n // Recursively crawl the resolved value\n if (!existingEntry || external) {\n crawl({\n $refs,\n indirections: indirections + 1,\n inventory,\n inventoryLookup,\n key: null,\n options,\n parent: pointer.value,\n path: pointer.path,\n pathFromRoot,\n resolvedRefs,\n visitedObjects,\n });\n }\n};\n\n/**\n * Recursively crawls the given value, and inventories all JSON references.\n */\nconst crawl = <S extends object = JSONSchema>({\n $refs,\n indirections,\n inventory,\n inventoryLookup,\n key,\n options,\n parent,\n path,\n pathFromRoot,\n resolvedRefs = new Map(),\n visitedObjects = new WeakSet(),\n}: {\n $refs: $Refs<S>;\n indirections: number;\n /**\n * An array of already-inventoried $ref pointers\n */\n inventory: Array<InventoryEntry>;\n /**\n * Fast lookup for inventory entries\n */\n inventoryLookup: ReturnType<typeof createInventoryLookup>;\n /**\n * The property key of `parent` to be crawled\n */\n key: string | null;\n options: ParserOptions;\n /**\n * The object containing the value to crawl. If the value is not an object or array, it will be ignored.\n */\n parent: object | $RefParser;\n /**\n * The full path of the property being crawled, possibly with a JSON Pointer in the hash\n */\n path: string;\n /**\n * The path of the property being crawled, from the schema root\n */\n pathFromRoot: string;\n /**\n * Cache for resolved $ref targets to avoid redundant resolution\n */\n resolvedRefs?: Map<string, any>;\n /**\n * Set of already visited objects to avoid infinite loops and redundant processing\n */\n visitedObjects?: WeakSet<object>;\n}) => {\n const obj = key === null ? parent : parent[key as keyof typeof parent];\n\n if (obj && typeof obj === 'object' && !ArrayBuffer.isView(obj)) {\n // Early exit if we've already processed this exact object\n if (visitedObjects.has(obj)) return;\n\n if ($Ref.isAllowed$Ref(obj)) {\n inventory$Ref({\n $refKey: key,\n $refParent: parent,\n $refs,\n indirections,\n inventory,\n inventoryLookup,\n options,\n path,\n pathFromRoot,\n resolvedRefs,\n visitedObjects,\n });\n } else {\n // Mark this object as visited BEFORE processing its children\n visitedObjects.add(obj);\n\n // Crawl the object in a specific order that's optimized for bundling.\n // This is important because it determines how `pathFromRoot` gets built,\n // which later determines which keys get dereferenced and which ones get remapped\n const keys = Object.keys(obj).sort((a, b) => {\n // Most people will expect references to be bundled into the \"definitions\" property,\n // so we always crawl that property first, if it exists.\n if (a === 'definitions') {\n return -1;\n } else if (b === 'definitions') {\n return 1;\n } else {\n // Otherwise, crawl the keys based on their length.\n // This produces the shortest possible bundled references\n return a.length - b.length;\n }\n }) as Array<keyof typeof obj>;\n\n for (const key of keys) {\n const keyPath = Pointer.join(path, key);\n const keyPathFromRoot = Pointer.join(pathFromRoot, key);\n const value = obj[key];\n\n if ($Ref.isAllowed$Ref(value)) {\n inventory$Ref({\n $refKey: key,\n $refParent: obj,\n $refs,\n indirections,\n inventory,\n inventoryLookup,\n options,\n path,\n pathFromRoot: keyPathFromRoot,\n resolvedRefs,\n visitedObjects,\n });\n } else {\n crawl({\n $refs,\n indirections,\n inventory,\n inventoryLookup,\n key,\n options,\n parent: obj,\n path: keyPath,\n pathFromRoot: keyPathFromRoot,\n resolvedRefs,\n visitedObjects,\n });\n }\n }\n }\n }\n};\n\n/**\n * Remap external refs by hoisting resolved values into a shared container in the root schema\n * and pointing all occurrences to those internal definitions. Internal refs remain internal.\n */\nfunction remap(parser: $RefParser, inventory: Array<InventoryEntry>) {\n const root = parser.schema as any;\n\n // Group & sort all the $ref pointers, so they're in the order that we need to dereference/remap them\n inventory.sort((a: InventoryEntry, b: InventoryEntry) => {\n if (a.file !== b.file) {\n // Group all the $refs that point to the same file\n return a.file < b.file ? -1 : +1;\n } else if (a.hash !== b.hash) {\n // Group all the $refs that point to the same part of the file\n return a.hash < b.hash ? -1 : +1;\n } else if (a.circular !== b.circular) {\n // If the $ref points to itself, then sort it higher than other $refs that point to this $ref\n return a.circular ? -1 : +1;\n } else if (a.extended !== b.extended) {\n // If the $ref extends the resolved value, then sort it lower than other $refs that don't extend the value\n return a.extended ? +1 : -1;\n } else if (a.indirections !== b.indirections) {\n // Sort direct references higher than indirect references\n return a.indirections - b.indirections;\n } else if (a.depth !== b.depth) {\n // Sort $refs by how close they are to the JSON Schema root\n return a.depth - b.depth;\n } else {\n // Determine how far each $ref is from the \"definitions\" property.\n // Most people will expect references to be bundled into the the \"definitions\" property if possible.\n const aDefinitionsIndex = a.pathFromRoot.lastIndexOf('/definitions');\n const bDefinitionsIndex = b.pathFromRoot.lastIndexOf('/definitions');\n if (aDefinitionsIndex !== bDefinitionsIndex) {\n // Give higher priority to the $ref that's closer to the \"definitions\" property\n return bDefinitionsIndex - aDefinitionsIndex;\n } else {\n // All else is equal, so use the shorter path, which will produce the shortest possible reference\n return a.pathFromRoot.length - b.pathFromRoot.length;\n }\n }\n });\n\n // Ensure or return a container by component type. Prefer OpenAPI-aware placement;\n // otherwise use existing root containers; otherwise create components/*.\n const ensureContainer = (\n type: 'schemas' | 'parameters' | 'requestBodies' | 'responses' | 'headers',\n ) => {\n const isOas3 = !!(root && typeof root === 'object' && typeof root.openapi === 'string');\n const isOas2 = !!(root && typeof root === 'object' && typeof root.swagger === 'string');\n\n if (isOas3) {\n if (!root.components || typeof root.components !== 'object') {\n root.components = {};\n }\n if (!root.components[type] || typeof root.components[type] !== 'object') {\n root.components[type] = {};\n }\n return { obj: root.components[type], prefix: `#/components/${type}` } as const;\n }\n\n if (isOas2) {\n if (type === 'schemas') {\n if (!root.definitions || typeof root.definitions !== 'object') {\n root.definitions = {};\n }\n return { obj: root.definitions, prefix: '#/definitions' } as const;\n }\n if (type === 'parameters') {\n if (!root.parameters || typeof root.parameters !== 'object') {\n root.parameters = {};\n }\n return { obj: root.parameters, prefix: '#/parameters' } as const;\n }\n if (type === 'responses') {\n if (!root.responses || typeof root.responses !== 'object') {\n root.responses = {};\n }\n return { obj: root.responses, prefix: '#/responses' } as const;\n }\n // requestBodies/headers don't exist as reusable containers in OAS2; fallback to definitions\n if (!root.definitions || typeof root.definitions !== 'object') {\n root.definitions = {};\n }\n return { obj: root.definitions, prefix: '#/definitions' } as const;\n }\n\n // No explicit version: prefer existing containers\n if (root && typeof root === 'object') {\n if (root.components && typeof root.components === 'object') {\n if (!root.components[type] || typeof root.components[type] !== 'object') {\n root.components[type] = {};\n }\n return { obj: root.components[type], prefix: `#/components/${type}` } as const;\n }\n if (root.definitions && typeof root.definitions === 'object') {\n return { obj: root.definitions, prefix: '#/definitions' } as const;\n }\n // Create components/* by default if nothing exists\n if (!root.components || typeof root.components !== 'object') {\n root.components = {};\n }\n if (!root.components[type] || typeof root.components[type] !== 'object') {\n root.components[type] = {};\n }\n return { obj: root.components[type], prefix: `#/components/${type}` } as const;\n }\n\n // Fallback\n root.definitions = root.definitions || {};\n return { obj: root.definitions, prefix: '#/definitions' } as const;\n };\n\n /**\n * Choose the appropriate component container for bundling.\n * Prioritizes the original container type from external files over usage location.\n *\n * @param entry - The inventory entry containing reference information\n * @returns The container type to use for bundling\n */\n const chooseComponent = (entry: InventoryEntry) => {\n // If we have the original container type from the external file, use it\n if (entry.originalContainerType) {\n return entry.originalContainerType;\n }\n\n // Fallback to usage path for internal references or when original type is not available\n return getContainerTypeFromPath(entry.pathFromRoot);\n };\n\n // Track names per (container prefix) and per target\n const targetToNameByPrefix = new Map<string, Map<string, string>>();\n const usedNamesByObj = new Map<any, Set<string>>();\n\n const sanitize = (name: string) => name.replace(/[^A-Za-z0-9_-]/g, '_');\n const baseName = (filePath: string) => {\n try {\n const withoutHash = filePath.split('#')[0]!;\n const parts = withoutHash.split('/');\n const filename = parts[parts.length - 1] || 'schema';\n const dot = filename.lastIndexOf('.');\n return sanitize(dot > 0 ? filename.substring(0, dot) : filename);\n } catch {\n return 'schema';\n }\n };\n const lastToken = (hash: string) => {\n if (!hash || hash === '#') {\n return 'root';\n }\n const tokens = hash.replace(/^#\\//, '').split('/');\n return sanitize(tokens[tokens.length - 1] || 'root');\n };\n const uniqueName = (containerObj: any, proposed: string) => {\n if (!usedNamesByObj.has(containerObj)) {\n usedNamesByObj.set(containerObj, new Set<string>(Object.keys(containerObj || {})));\n }\n const used = usedNamesByObj.get(containerObj)!;\n let name = proposed;\n let i = 2;\n while (used.has(name)) {\n name = `${proposed}_${i++}`;\n }\n used.add(name);\n return name;\n };\n for (const entry of inventory) {\n // Safety check: ensure entry and entry.$ref are valid objects\n if (!entry || !entry.$ref || typeof entry.$ref !== 'object') {\n continue;\n }\n\n // Keep internal refs internal. However, if the $ref extends the resolved value\n // (i.e. it has additional properties in addition to \"$ref\"), then we must\n // preserve the original $ref rather than rewriting it to the resolved hash.\n if (!entry.external) {\n if (!entry.extended && entry.$ref && typeof entry.$ref === 'object') {\n entry.$ref.$ref = entry.hash;\n }\n continue;\n }\n\n // Avoid changing direct self-references; keep them internal\n if (entry.circular) {\n if (entry.$ref && typeof entry.$ref === 'object') {\n entry.$ref.$ref = entry.pathFromRoot;\n }\n continue;\n }\n\n // Choose appropriate container based on original location in external file\n const component = chooseComponent(entry);\n const { obj: container, prefix } = ensureContainer(component);\n\n const targetKey = `${entry.file}::${entry.hash}`;\n if (!targetToNameByPrefix.has(prefix)) {\n targetToNameByPrefix.set(prefix, new Map<string, string>());\n }\n const namesForPrefix = targetToNameByPrefix.get(prefix)!;\n\n let defName = namesForPrefix.get(targetKey);\n if (!defName) {\n // If the external file is one of the original input sources, prefer its assigned prefix\n let proposedBase = baseName(entry.file);\n try {\n const parserAny: any = parser as any;\n if (\n parserAny &&\n parserAny.sourcePathToPrefix &&\n typeof parserAny.sourcePathToPrefix.get === 'function'\n ) {\n const withoutHash = (entry.file || '').split('#')[0];\n const mapped = parserAny.sourcePathToPrefix.get(withoutHash);\n if (mapped && typeof mapped === 'string') {\n proposedBase = mapped;\n }\n }\n } catch {\n // Ignore errors\n }\n const proposed = `${proposedBase}_${lastToken(entry.hash)}`;\n defName = uniqueName(container, proposed);\n namesForPrefix.set(targetKey, defName);\n // Store the resolved value under the container\n container[defName] = entry.value;\n }\n\n // Point the occurrence to the internal definition, preserving extensions\n const refPath = `${prefix}/${defName}`;\n if (entry.extended && entry.$ref && typeof entry.$ref === 'object') {\n entry.$ref.$ref = refPath;\n } else {\n entry.parent[entry.key] = { $ref: refPath };\n }\n }\n}\n\nfunction removeFromInventory(inventory: Array<InventoryEntry>, entry: any) {\n const index = inventory.indexOf(entry);\n inventory.splice(index, 1);\n}\n\n/**\n * Bundles all external JSON references into the main JSON schema, thus resulting in a schema that\n * only has *internal* references, not any *external* references.\n * This method mutates the JSON schema object, adding new references and re-mapping existing ones.\n *\n * @param parser\n * @param options\n */\nexport function bundle(parser: $RefParser, options: ParserOptions): void {\n const inventory: Array<InventoryEntry> = [];\n const inventoryLookup = createInventoryLookup();\n\n const visitedObjects = new WeakSet<object>();\n const resolvedRefs = new Map<string, any>();\n\n crawl<JSONSchema>({\n $refs: parser.$refs,\n indirections: 0,\n inventory,\n inventoryLookup,\n key: 'schema',\n options,\n parent: parser,\n path: parser.$refs._root$Ref.path + '#',\n pathFromRoot: '#',\n resolvedRefs,\n visitedObjects,\n });\n\n remap(parser, inventory);\n}\n","import type { FileInfo, Plugin } from '../types';\n\nconst BINARY_REGEXP = /\\.(jpeg|jpg|gif|png|bmp|ico)$/i;\n\nexport const binaryParser: Plugin = {\n canHandle: (file: FileInfo) => Buffer.isBuffer(file.data) && BINARY_REGEXP.test(file.url),\n handler: (file: FileInfo): Buffer =>\n Buffer.isBuffer(file.data)\n ? file.data\n : // This will reject if data is anything other than a string or typed array\n Buffer.from(file.data),\n name: 'binary',\n};\n","import type { FileInfo, Plugin } from '../types';\nimport { ParserError } from '../util/errors';\n\nexport const jsonParser: Plugin = {\n canHandle: (file: FileInfo) => file.extension === '.json',\n async handler(file: FileInfo): Promise<object | undefined> {\n let data = file.data;\n if (Buffer.isBuffer(data)) {\n data = data.toString();\n }\n\n if (typeof data !== 'string') {\n // data is already a JavaScript value (object, array, number, null, NaN, etc.)\n return data as object;\n }\n\n if (!data.trim().length) {\n // this mirrors the YAML behavior\n return;\n }\n\n try {\n return JSON.parse(data);\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n } catch (error: any) {\n try {\n // find the first curly brace\n const firstCurlyBrace = data.indexOf('{');\n // remove any characters before the first curly brace\n data = data.slice(firstCurlyBrace);\n return JSON.parse(data);\n } catch (error: any) {\n throw new ParserError(error.message, file.url);\n }\n }\n },\n name: 'json',\n};\n","import type { FileInfo, Plugin } from '../types';\nimport { ParserError } from '../util/errors';\n\nconst TEXT_REGEXP = /\\.(txt|htm|html|md|xml|js|min|map|css|scss|less|svg)$/i;\n\nexport const textParser: Plugin = {\n canHandle: (file: FileInfo) =>\n (typeof file.data === 'string' || Buffer.isBuffer(file.data)) && TEXT_REGEXP.test(file.url),\n handler(file: FileInfo): string {\n if (typeof file.data === 'string') {\n return file.data;\n }\n\n if (!Buffer.isBuffer(file.data)) {\n throw new ParserError('data is not text', file.url);\n }\n\n return file.data.toString('utf-8');\n },\n name: 'text',\n};\n","import yaml from 'js-yaml';\nimport { JSON_SCHEMA } from 'js-yaml';\n\nimport type { FileInfo, JSONSchema, Plugin } from '../types';\nimport { ParserError } from '../util/errors';\n\nexport const yamlParser: Plugin = {\n // JSON is valid YAML\n canHandle: (file: FileInfo) => ['.yaml', '.yml', '.json'].includes(file.extension),\n handler: async (file: FileInfo): Promise<JSONSchema> => {\n const data = Buffer.isBuffer(file.data) ? file.data.toString() : file.data;\n\n if (typeof data !== 'string') {\n // data is already a JavaScript value (object, array, number, null, NaN, etc.)\n return data;\n }\n\n try {\n const yamlSchema = yaml.load(data, { schema: JSON_SCHEMA }) as JSONSchema;\n return yamlSchema;\n } catch (error: any) {\n throw new ParserError(error?.message || 'Parser Error', file.url);\n }\n },\n name: 'yaml',\n};\n","import { binaryParser } from './parsers/binary';\nimport { jsonParser } from './parsers/json';\nimport { textParser } from './parsers/text';\nimport { yamlParser } from './parsers/yaml';\nimport type { JSONSchemaObject, Plugin } from './types';\n\nexport interface DereferenceOptions {\n /**\n * Determines whether circular `$ref` pointers are handled.\n *\n * If set to `false`, then a `ReferenceError` will be thrown if the schema contains any circular references.\n *\n * If set to `\"ignore\"`, then circular references will simply be ignored. No error will be thrown, but the `$Refs.circular` property will still be set to `true`.\n */\n circular?: boolean | 'ignore';\n /**\n * A function, called for each path, which can return true to stop this path and all\n * subpaths from being dereferenced further. This is useful in schemas where some\n * subpaths contain literal $ref keys that should not be dereferenced.\n */\n excludedPathMatcher?(path: string): boolean;\n /**\n * Callback invoked during dereferencing.\n *\n * @argument {string} path - The path being dereferenced (ie. the `$ref` string)\n * @argument {JSONSchemaObject} value - The JSON-Schema that the `$ref` resolved to\n * @argument {JSONSchemaObject} parent - The parent of the dereferenced object\n * @argument {string} parentPropName - The prop name of the parent object whose value was dereferenced\n */\n onDereference?(\n path: string,\n value: JSONSchemaObject,\n parent?: JSONSchemaObject,\n parentPropName?: string,\n ): void;\n}\n\n/**\n * Options that determine how JSON schemas are parsed, resolved, and dereferenced.\n *\n * @param [options] - Overridden options\n * @class\n */\nexport interface $RefParserOptions {\n /**\n * The `dereference` options control how JSON Schema `$Ref` Parser will dereference `$ref` pointers within the JSON schema.\n */\n dereference: DereferenceOptions;\n /**\n * The `parse` options determine how different types of files will be parsed.\n *\n * JSON Schema `$Ref` Parser comes with built-in JSON, YAML, plain-text, and binary parsers, any of which you can configure or disable. You can also add your own custom parsers if you want.\n */\n parse: {\n binary: Plugin;\n json: Plugin;\n text: Plugin;\n yaml: Plugin;\n };\n /**\n * The maximum amount of time (in milliseconds) that JSON Schema $Ref Parser will spend dereferencing a single schema.\n * It will throw a timeout error if the operation takes longer than this.\n */\n timeoutMs?: number;\n}\n\nexport const getJsonSchemaRefParserDefaultOptions = (): $RefParserOptions => ({\n /**\n * Determines the types of JSON references that are allowed.\n */\n dereference: {\n /**\n * Dereference circular (recursive) JSON references?\n * If false, then a {@link ReferenceError} will be thrown if a circular reference is found.\n * If \"ignore\", then circular references will not be dereferenced.\n *\n * @type {boolean|string}\n */\n circular: true,\n /**\n * A function, called for each path, which can return true to stop this path and all\n * subpaths from being dereferenced further. This is useful in schemas where some\n * subpaths contain literal $ref keys that should not be dereferenced.\n *\n * @type {function}\n */\n excludedPathMatcher: () => false,\n // @ts-expect-error\n referenceResolution: 'relative',\n },\n /**\n * Determines how different types of files will be parsed.\n *\n * You can add additional parsers of your own, replace an existing one with\n * your own implementation, or disable any parser by setting it to false.\n */\n parse: {\n binary: { ...binaryParser },\n json: { ...jsonParser },\n text: { ...textParser },\n yaml: { ...yamlParser },\n },\n});\n\nexport type Options = $RefParserOptions;\n\ntype DeepPartial<T> = T extends object\n ? {\n [P in keyof T]?: DeepPartial<T[P]>;\n }\n : T;\nexport type ParserOptions = DeepPartial<$RefParserOptions>;\n","import type { FileInfo, JSONSchema, Plugin } from '../types';\n\nexport interface PluginResult {\n error?: any;\n plugin: Pick<Plugin, 'handler'>;\n result?: string | Buffer | JSONSchema;\n}\n\n/**\n * Runs the specified method of the given plugins, in order, until one of them returns a successful result.\n * Each method can return a synchronous value, a Promise, or call an error-first callback.\n * If the promise resolves successfully, or the callback is called without an error, then the result\n * is immediately returned and no further plugins are called.\n * If the promise rejects, or the callback is called with an error, then the next plugin is called.\n * If ALL plugins fail, then the last error is thrown.\n */\nexport async function run(plugins: Pick<Plugin, 'handler'>[], file: FileInfo) {\n let index = 0;\n let lastError: PluginResult;\n let plugin: Pick<Plugin, 'handler'>;\n\n return new Promise<PluginResult>((resolve, reject) => {\n const runNextPlugin = async () => {\n plugin = plugins[index++]!;\n\n if (!plugin) {\n // there are no more functions, re-throw the last error\n return reject(lastError);\n }\n\n try {\n const result = await plugin.handler(file);\n\n if (result !== undefined) {\n return resolve({\n plugin,\n result,\n });\n }\n\n if (index === plugins.length) {\n throw new Error('No promise has been returned.');\n }\n } catch (error) {\n lastError = {\n error,\n plugin,\n };\n runNextPlugin();\n }\n };\n\n runNextPlugin();\n });\n}\n","import { ono } from '@jsdevtools/ono';\n\nimport type { $RefParserOptions } from './options';\nimport type { FileInfo } from './types';\nimport { ParserError } from './util/errors';\nimport type { PluginResult } from './util/plugins';\nimport * as plugins from './util/plugins';\nimport { getExtension } from './util/url';\n\n/**\n * Prepares the file object so we can populate it with data and other values\n * when it's read and parsed. This \"file object\" will be passed to all\n * resolvers and parsers.\n */\nexport function newFile(path: string): FileInfo {\n let url = path;\n // Remove the URL fragment, if any\n const hashIndex = url.indexOf('#');\n let hash = '';\n if (hashIndex > -1) {\n hash = url.substring(hashIndex);\n url = url.substring(0, hashIndex);\n }\n return {\n extension: getExtension(url),\n hash,\n url,\n } as FileInfo;\n}\n\n/**\n * Parses the given file's contents, using the configured parser plugins.\n */\nexport async function parseFile(\n file: FileInfo,\n options: $RefParserOptions['parse'],\n): Promise<PluginResult> {\n try {\n // If none of the parsers are a match for this file, try all of them. This\n // handles situations where the file is a supported type, just with an\n // unknown extension.\n const parsers = [options.json, options.yaml, options.text, options.binary];\n const filtered = parsers.filter((plugin) => plugin.canHandle(file));\n return await plugins.run(filtered.length ? filtered : parsers, file);\n } catch (error: any) {\n if (error && error.message && error.message.startsWith('Error parsing')) {\n throw error;\n }\n\n if (!error || !('error' in error)) {\n throw ono.syntax(`Unable to parse ${file.url}`);\n }\n\n if (error.error instanceof ParserError) {\n throw error.error;\n }\n\n throw new ParserError(error.error.message, file.url);\n }\n}\n","import { ono } from '@jsdevtools/ono';\nimport type { JSONSchema4Type, JSONSchema6Type, JSONSchema7Type } from 'json-schema';\n\nimport type { ParserOptions } from './options';\nimport $Ref from './ref';\nimport type { JSONSchema } from './types';\nimport convertPathToPosix from './util/convert-path-to-posix';\nimport * as url from './util/url';\n\ninterface $RefsMap<S extends object = JSONSchema> {\n [url: string]: $Ref<S>;\n}\n/**\n * When you call the resolve method, the value that gets passed to the callback function (or Promise) is a $Refs object. This same object is accessible via the parser.$refs property of $RefParser objects.\n *\n * This object is a map of JSON References and their resolved values. It also has several convenient helper methods that make it easy for you to navigate and manipulate the JSON References.\n *\n * See https://apitools.dev/json-schema-ref-parser/docs/refs.html\n */\nexport default class $Refs<S extends object = JSONSchema> {\n /**\n * This property is true if the schema contains any circular references. You may want to check this property before serializing the dereferenced schema as JSON, since JSON.stringify() does not support circular references by default.\n *\n * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#circular\n */\n public circular: boolean;\n\n /**\n * Returns the paths/URLs of all the files in your schema (including the main schema file).\n *\n * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#pathstypes\n *\n * @param types (optional) Optionally only return certain types of paths (\"file\", \"http\", etc.)\n */\n paths(...types: (string | string[])[]): string[] {\n const paths = getPaths(this._$refs, types.flat());\n return paths.map((path) => convertPathToPosix(path.decoded));\n }\n\n /**\n * Returns a map of paths/URLs and their correspond values.\n *\n * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#valuestypes\n *\n * @param types (optional) Optionally only return values from certain locations (\"file\", \"http\", etc.)\n */\n values(...types: (string | string[])[]): S {\n const $refs = this._$refs;\n const paths = getPaths($refs, types.flat());\n return paths.reduce<Record<string, any>>((obj, path) => {\n obj[convertPathToPosix(path.decoded)] = $refs[path.encoded]!.value;\n return obj;\n }, {}) as S;\n }\n\n /**\n * Returns `true` if the given path exists in the schema; otherwise, returns `false`\n *\n * See https://apitools.dev/json-schema-ref-parser/docs/refs.html#existsref\n *\n * @param $ref The JSON Reference path, optionally with a JSON Pointer in the hash\n */\n /**\n * Determines whether the given JSON reference exists.\n *\n * @param path - The path being resolved, optionally with a JSON pointer in the hash\n * @param [options]\n * @returns\n */\n exists(path: string, options: any) {\n try {\n this._resolve(path, '', options);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Resolves the given JSON reference and returns the resolved value.\n *\n * @param path - The path being resolved, with a JSON pointer in the hash\n * @param [options]\n * @returns - Returns the resolved value\n */\n get(path: string, options?: ParserOptions): JSONSchema4Type | JSONSchema6Type | JSONSchema7Type {\n return this._resolve(path, '', options)!.value;\n }\n\n /**\n * Sets the value at the given path in the schema. If the property, or any of its parents, don't exist, they will be created.\n *\n * @param path The JSON Reference path, optionally with a JSON Pointer in the hash\n * @param value The value to assign. Can be anything (object, string, number, etc.)\n */\n set(path: string, value: JSONSchema4Type | JSONSchema6Type | JSONSchema7Type) {\n const absPath = url.resolve(this._root$Ref.path!, path);\n const withoutHash = url.stripHash(absPath);\n const $ref = this._$refs[withoutHash];\n\n if (!$ref) {\n throw ono(`Error resolving $ref pointer \"${path}\". \\n\"${withoutHash}\" not found.`);\n }\n\n $ref.set(absPath, value);\n }\n /**\n * Returns the specified {@link $Ref} object, or undefined.\n *\n * @param path - The path being resolved, optionally with a JSON pointer in the hash\n * @returns\n * @protected\n */\n _get$Ref(path: string) {\n path = url.resolve(this._root$Ref.path!, path);\n const withoutHash = url.stripHash(path);\n return this._$refs[withoutHash];\n }\n\n /**\n * Creates a new {@link $Ref} object and adds it to this {@link $Refs} object.\n *\n * @param path - The file path or URL of the referenced file\n */\n _add(path: string) {\n const withoutHash = url.stripHash(path);\n\n const $ref = new $Ref<S>(this);\n $ref.path = withoutHash;\n\n this._$refs[withoutHash] = $ref;\n this._root$Ref = this._root$Ref || $ref;\n\n return $ref;\n }\n\n /**\n * Resolves the given JSON reference.\n *\n * @param path - The path being resolved, optionally with a JSON pointer in the hash\n * @param pathFromRoot - The path of `obj` from the schema root\n * @param [options]\n * @returns\n * @protected\n */\n _resolve(path: string, pathFromRoot: string, options?: ParserOptions) {\n const absPath = url.resolve(this._root$Ref.path!, path);\n const withoutHash = url.stripHash(absPath);\n const $ref = this._$refs[withoutHash];\n\n if (!$ref) {\n throw ono(`Error resolving $ref pointer \"${path}\". \\n\"${withoutHash}\" not found.`);\n }\n\n if ($ref.value === undefined) {\n console.warn(`$ref entry exists but value is undefined: ${withoutHash}`);\n return null; // Treat as unresolved\n }\n\n return $ref.resolve(absPath, options, path, pathFromRoot);\n }\n\n /**\n * A map of paths/urls to {@link $Ref} objects\n *\n * @type {object}\n * @protected\n */\n _$refs: $RefsMap<S> = {};\n\n /**\n * The {@link $Ref} object that is the root of the JSON schema.\n *\n * @type {$Ref}\n * @protected\n */\n _root$Ref: $Ref<S>;\n\n constructor() {\n /**\n * Indicates whether the schema contains any circular references.\n *\n * @type {boolean}\n */\n this.circular = false;\n\n this._$refs = {};\n\n // @ts-ignore\n this._root$Ref = null;\n }\n\n /**\n * Returns the paths of all the files/URLs that are referenced by the JSON schema,\n * including the schema itself.\n *\n * @param [types] - Only return paths of the given types (\"file\", \"http\", etc.)\n * @returns\n */\n /**\n * Returns the map of JSON references and their resolved values.\n *\n * @param [types] - Only return references of the given types (\"file\", \"http\", etc.)\n * @returns\n */\n\n /**\n * Returns a POJO (plain old JavaScript object) for serialization as JSON.\n *\n * @returns {object}\n */\n toJSON = this.values;\n}\n\n/**\n * Returns the encoded and decoded paths keys of the given object.\n *\n * @param $refs - The object whose keys are URL-encoded paths\n * @param [types] - Only return paths of the given types (\"file\", \"http\", etc.)\n * @returns\n */\nfunction getPaths<S extends object = JSONSchema>($refs: $RefsMap<S>, types: string[]) {\n let paths = Object.keys($refs);\n\n // Filter the paths by type\n types = Array.isArray(types[0]) ? types[0] : Array.prototype.slice.call(types);\n if (types.length > 0 && types[0]) {\n paths = paths.filter((key) => types.includes($refs[key]!.pathType as string));\n }\n\n // Decode local filesystem paths\n return paths.map((path) => ({\n decoded: $refs[path]!.pathType === 'file' ? url.toFileSystemPath(path, true) : path,\n encoded: path,\n }));\n}\n","import { ono } from '@jsdevtools/ono';\nimport fs from 'fs';\n\nimport type { FileInfo } from '../types';\nimport { ResolverError } from '../util/errors';\nimport * as url from '../util/url';\n\nexport const fileResolver = {\n handler: async ({ file }: { file: FileInfo }): Promise<void> => {\n let path: string | undefined;\n\n try {\n path = url.toFileSystemPath(file.url);\n } catch (error: any) {\n throw new ResolverError(ono.uri(error, `Malformed URI: ${file.url}`), file.url);\n }\n\n try {\n const data = await fs.promises.readFile(path);\n file.data = data;\n } catch (error: any) {\n throw new ResolverError(ono(error, `Error opening file \"${path}\"`), path);\n }\n },\n};\n","import { ono } from '@jsdevtools/ono';\n\nimport type { FileInfo } from '../types';\nimport { ResolverError } from '../util/errors';\nimport { resolve } from '../util/url';\n\nexport const sendRequest = async ({\n fetchOptions,\n redirects = [],\n timeout = 60_000,\n url,\n}: {\n fetchOptions?: RequestInit;\n redirects?: string[];\n timeout?: number;\n url: URL | string;\n}): Promise<{\n fetchOptions?: RequestInit;\n response: Response;\n}> => {\n url = new URL(url);\n redirects.push(url.href);\n\n const controller = new AbortController();\n const timeoutId = setTimeout(() => {\n controller.abort();\n }, timeout);\n const response = await fetch(url, {\n signal: controller.signal,\n ...fetchOptions,\n });\n clearTimeout(timeoutId);\n\n if (response.status >= 300 && response.status <= 399) {\n if (redirects.length > 5) {\n throw new ResolverError(\n ono(\n { status: response.status },\n `Error requesting ${redirects[0]}. \\nToo many redirects: \\n ${redirects.join(' \\n ')}`,\n ),\n );\n }\n\n if (!('location' in response.headers) || !response.headers.location) {\n throw ono(\n { status: response.status },\n `HTTP ${response.status} redirect with no location header`,\n );\n }\n\n return sendRequest({\n fetchOptions,\n redirects,\n timeout,\n url: resolve(url.href, response.headers.location as string),\n });\n }\n\n return { fetchOptions, response };\n};\n\nexport const urlResolver = {\n handler: async ({\n arrayBuffer,\n fetch: _fetch,\n file,\n }: {\n arrayBuffer?: ArrayBuffer;\n fetch?: RequestInit;\n file: FileInfo;\n }): Promise<void> => {\n let data = arrayBuffer;\n\n if (!data) {\n try {\n const { fetchOptions, response } = await sendRequest({\n fetchOptions: {\n method: 'GET',\n ..._fetch,\n },\n url: file.url,\n });\n\n if (response.status >= 400) {\n // gracefully handle HEAD method not allowed\n if (response.status !== 405 || fetchOptions?.method !== 'HEAD') {\n throw ono({ status: response.status }, `HTTP ERROR ${response.status}`);\n }\n }\n\n data = response.body ? await response.arrayBuffer() : new ArrayBuffer(0);\n } catch (error: any) {\n throw new ResolverError(ono(error, `Error requesting ${file.url}`), file.url);\n }\n }\n\n file.data = Buffer.from(data!);\n },\n};\n","import type { $RefParser } from '.';\nimport { getResolvedInput } from '.';\nimport type { $RefParserOptions } from './options';\nimport { newFile, parseFile } from './parse';\nimport Pointer from './pointer';\nimport $Ref from './ref';\nimport type $Refs from './refs';\nimport { fileResolver } from './resolvers/file';\nimport { urlResolver } from './resolvers/url';\nimport type { JSONSchema } from './types';\nimport { isHandledError } from './util/errors';\nimport * as url from './util/url';\n\n/**\n * Crawls the JSON schema, finds all external JSON references, and resolves their values.\n * This method does not mutate the JSON schema. The resolved values are added to {@link $RefParser#$refs}.\n *\n * NOTE: We only care about EXTERNAL references here. INTERNAL references are only relevant when dereferencing.\n *\n * @returns\n * The promise resolves once all JSON references in the schema have been resolved,\n * including nested references that are contained in externally-referenced files.\n */\nexport async function resolveExternal(parser: $RefParser, options: $RefParserOptions) {\n const promises = crawl(parser.schema, {\n $refs: parser.$refs,\n options: options.parse,\n path: `${parser.$refs._root$Ref.path}#`,\n });\n await Promise.all(promises);\n}\n\n/**\n * Recursively crawls the given value, and resolves any external JSON references.\n *\n * @param obj - The value to crawl. If it's not an object or array, it will be ignored.\n * @returns An array of promises. There will be one promise for each JSON reference in `obj`.\n * If `obj` does not contain any JSON references, then the array will be empty.\n * If any of the JSON references point to files that contain additional JSON references,\n * then the corresponding promise will internally reference an array of promises.\n */\nfunction crawl<S extends object = JSONSchema>(\n obj: string | Buffer | S | undefined | null,\n {\n $refs,\n external = false,\n options,\n path,\n seen = new Set(),\n }: {\n $refs: $Refs<S>;\n /** Whether `obj` was found in an external document. */\n external?: boolean;\n options: $RefParserOptions['parse'];\n /** The full path of `obj`, possibly with a JSON Pointer in the hash. */\n path: string;\n seen?: Set<unknown>;\n },\n): ReadonlyArray<Promise<unknown>> {\n let promises: Array<Promise<unknown>> = [];\n\n if (obj && typeof obj === 'object' && !ArrayBuffer.isView(obj) && !seen.has(obj)) {\n seen.add(obj);\n\n if ($Ref.isExternal$Ref(obj)) {\n promises.push(\n resolve$Ref<S>(obj, {\n $refs,\n options,\n path,\n seen,\n }),\n );\n }\n\n for (const [key, value] of Object.entries(obj)) {\n promises = promises.concat(\n crawl(value, {\n $refs,\n external,\n options,\n path: Pointer.join(path, key),\n seen,\n }),\n );\n }\n }\n\n return promises;\n}\n\n/**\n * Resolves the given JSON Reference, and then crawls the resulting value.\n *\n * @param $ref - The JSON Reference to resolve\n * @param path - The full path of `$ref`, possibly with a JSON Pointer in the hash\n * @param $refs\n * @param options\n *\n * @returns\n * The promise resolves once all JSON references in the object have been resolved,\n * including nested references that are contained in externally-referenced files.\n */\nasync function resolve$Ref<S extends object = JSONSchema>(\n $ref: S,\n {\n $refs,\n options,\n path,\n seen,\n }: {\n $refs: $Refs<S>;\n options: $RefParserOptions['parse'];\n path: string;\n seen: Set<unknown>;\n },\n): Promise<unknown> {\n const resolvedPath = url.resolve(path, ($ref as JSONSchema).$ref!);\n const withoutHash = url.stripHash(resolvedPath);\n\n // If this ref points back to an input source we've already merged, avoid re-importing\n // by checking if the path (without hash) matches a known source in parser and we can serve it internally later.\n // We keep normal flow but ensure cache hit if already added.\n // Do we already have this $ref?\n const ref = $refs._$refs[withoutHash];\n if (ref) {\n // We've already parsed this $ref, so crawl it to resolve its own externals\n const promises = crawl(ref.value as S, {\n $refs,\n external: true,\n options,\n path: `${withoutHash}#`,\n seen,\n });\n return Promise.all(promises);\n }\n\n // Parse the $referenced file/url\n const file = newFile(resolvedPath);\n\n // Add a new $Ref for this file, even though we don't have the value yet.\n // This ensures that we don't simultaneously read & parse the same file multiple times\n const $refAdded = $refs._add(file.url);\n\n try {\n const resolvedInput = getResolvedInput({ pathOrUrlOrSchema: resolvedPath });\n\n $refAdded.pathType = resolvedInput.type;\n\n let promises: ReadonlyArray<Promise<unknown>> = [];\n\n if (resolvedInput.type !== 'json') {\n const resolver = resolvedInput.type === 'file' ? fileResolver : urlResolver;\n await resolver.handler({ file });\n const parseResult = await parseFile(file, options);\n $refAdded.value = parseResult.result;\n promises = crawl(parseResult.result, {\n $refs,\n external: true,\n options,\n path: `${withoutHash}#`,\n seen,\n });\n }\n\n return Promise.all(promises);\n } catch (error) {\n if (isHandledError(error)) {\n $refAdded.value = error;\n }\n throw error;\n }\n}\n","import { ono } from '@jsdevtools/ono';\n\nimport { bundle as _bundle } from './bundle';\nimport { getJsonSchemaRefParserDefaultOptions } from './options';\nimport { newFile, parseFile } from './parse';\nimport $Refs from './refs';\nimport { resolveExternal } from './resolve-external';\nimport { fileResolver } from './resolvers/file';\nimport { urlResolver } from './resolvers/url';\nimport type { JSONSchema } from './types';\nimport { isHandledError, JSONParserErrorGroup } from './util/errors';\nimport * as url from './util/url';\n\ninterface ResolvedInput {\n path: string;\n schema: string | JSONSchema | Buffer | Awaited<JSONSchema> | undefined;\n type: 'file' | 'json' | 'url';\n}\n\nexport function getResolvedInput({\n pathOrUrlOrSchema,\n}: {\n pathOrUrlOrSchema: JSONSchema | string | unknown;\n}): ResolvedInput {\n if (!pathOrUrlOrSchema) {\n throw ono(`Expected a file path, URL, or object. Got ${pathOrUrlOrSchema}`);\n }\n\n const resolvedInput: ResolvedInput = {\n path: typeof pathOrUrlOrSchema === 'string' ? pathOrUrlOrSchema : '',\n schema: undefined,\n type: 'url',\n };\n\n // If the path is a filesystem path, then convert it to a URL.\n // NOTE: According to the JSON Reference spec, these should already be URLs,\n // but, in practice, many people use local filesystem paths instead.\n // So we're being generous here and doing the conversion automatically.\n // This is not intended to be a 100% bulletproof solution.\n // If it doesn't work for your use-case, then use a URL instead.\n if (resolvedInput.path && url.isFileSystemPath(resolvedInput.path)) {\n resolvedInput.path = url.fromFileSystemPath(resolvedInput.path);\n resolvedInput.type = 'file';\n } else if (!resolvedInput.path && pathOrUrlOrSchema && typeof pathOrUrlOrSchema === 'object') {\n if ('$id' in pathOrUrlOrSchema && pathOrUrlOrSchema.$id) {\n // when schema id has defined an URL should use that hostname to request the references,\n // instead of using the current page URL\n const { hostname, protocol } = new URL(pathOrUrlOrSchema.$id as string);\n resolvedInput.path = `${protocol}//${hostname}:${protocol === 'https:' ? 443 : 80}`;\n resolvedInput.type = 'url';\n } else {\n resolvedInput.schema = pathOrUrlOrSchema;\n resolvedInput.type = 'json';\n }\n }\n\n if (resolvedInput.type !== 'json') {\n // resolve the absolute path of the schema\n resolvedInput.path = url.resolve(url.cwd(), resolvedInput.path);\n }\n\n return resolvedInput;\n}\n\n// NOTE: previously used helper removed as unused\n\n/**\n * This class parses a JSON schema, builds a map of its JSON references and their resolved values,\n * and provides methods for traversing, manipulating, and dereferencing those references.\n */\nexport class $RefParser {\n /**\n * The resolved JSON references\n *\n * @type {$Refs}\n * @readonly\n */\n $refs = new $Refs<JSONSchema>();\n public options = getJsonSchemaRefParserDefaultOptions();\n /**\n * The parsed (and possibly dereferenced) JSON schema object\n *\n * @type {object}\n * @readonly\n */\n public schema: JSONSchema | null = null;\n public schemaMany: JSONSchema[] = [];\n public schemaManySources: string[] = [];\n public sourcePathToPrefix: Map<string, string> = new Map();\n\n /**\n * Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.\n *\n * This also eliminates the risk of circular references, so the schema can be safely serialized using `JSON.stringify()`.\n *\n * See https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html#bundleschema-options-callback\n *\n * @param pathOrUrlOrSchema A JSON Schema object, or the file path or URL of a JSON Schema file.\n */\n public async bundle({\n arrayBuffer,\n fetch,\n pathOrUrlOrSchema,\n resolvedInput,\n }: {\n arrayBuffer?: ArrayBuffer;\n fetch?: RequestInit;\n pathOrUrlOrSchema: JSONSchema | string | unknown;\n resolvedInput?: ResolvedInput;\n }): Promise<JSONSchema> {\n await this.parse({\n arrayBuffer,\n fetch,\n pathOrUrlOrSchema,\n resolvedInput,\n });\n\n await resolveExternal(this, this.options);\n const errors = JSONParserErrorGroup.getParserErrors(this);\n if (errors.length > 0) {\n throw new JSONParserErrorGroup(this);\n }\n _bundle(this, this.options);\n const errors2 = JSONParserErrorGroup.getParserErrors(this);\n if (errors2.length > 0) {\n throw new JSONParserErrorGroup(this);\n }\n return this.schema!;\n }\n\n /**\n * Bundles multiple roots (files/URLs/objects) into a single schema by creating a synthetic root\n * that references each input, resolving all externals, and then hoisting via the existing bundler.\n */\n public async bundleMany({\n arrayBuffer,\n fetch,\n pathOrUrlOrSchemas,\n resolvedInputs,\n }: {\n arrayBuffer?: ArrayBuffer[];\n fetch?: RequestInit;\n pathOrUrlOrSchemas: Array<JSONSchema | string | unknown>;\n resolvedInputs?: ResolvedInput[];\n }): Promise<JSONSchema> {\n await this.parseMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs });\n this.mergeMany();\n\n await resolveExternal(this, this.options);\n const errors = JSONParserErrorGroup.getParserErrors(this);\n if (errors.length > 0) {\n throw new JSONParserErrorGroup(this);\n }\n _bundle(this, this.options);\n // Merged root is ready for bundling\n\n const errors2 = JSONParserErrorGroup.getParserErrors(this);\n if (errors2.length > 0) {\n throw new JSONParserErrorGroup(this);\n }\n return this.schema!;\n }\n\n /**\n * Parses the given JSON schema.\n * This method does not resolve any JSON references.\n * It just reads a single file in JSON or YAML format, and parse it as a JavaScript object.\n *\n * @param pathOrUrlOrSchema A JSON Schema object, or the file path or URL of a JSON Schema file.\n * @returns - The returned promise resolves with the parsed JSON schema object.\n */\n public async parse({\n arrayBuffer,\n fetch,\n pathOrUrlOrSchema,\n resolvedInput: _resolvedInput,\n }: {\n arrayBuffer?: ArrayBuffer;\n fetch?: RequestInit;\n pathOrUrlOrSchema: JSONSchema | string | unknown;\n resolvedInput?: ResolvedInput;\n }): Promise<{ schema: JSONSchema }> {\n const resolvedInput = _resolvedInput || getResolvedInput({ pathOrUrlOrSchema });\n const { path, type } = resolvedInput;\n let { schema } = resolvedInput;\n\n // reset everything\n this.schema = null;\n this.$refs = new $Refs();\n\n if (schema) {\n // immediately add a new $Ref with the schema object as value\n const $ref = this.$refs._add(path);\n $ref.pathType = url.isFileSystemPath(path) ? 'file' : 'http';\n $ref.value = schema;\n } else if (type !== 'json') {\n const file = newFile(path);\n\n // Add a new $Ref for this file, even though we don't have the value yet.\n // This ensures that we don't simultaneously read & parse the same file multiple times\n const $refAdded = this.$refs._add(file.url);\n $refAdded.pathType = type;\n try {\n const resolver = type === 'file' ? fileResolver : urlResolver;\n await resolver.handler({\n arrayBuffer,\n fetch,\n file,\n });\n const parseResult = await parseFile(file, this.options.parse);\n $refAdded.value = parseResult.result;\n schema = parseResult.result;\n } catch (error) {\n if (isHandledError(error)) {\n $refAdded.value = error;\n }\n throw error;\n }\n }\n\n if (schema === null || typeof schema !== 'object' || Buffer.isBuffer(schema)) {\n throw ono.syntax(`\"${this.$refs._root$Ref.path || schema}\" is not a valid JSON Schema`);\n }\n\n this.schema = schema;\n\n return {\n schema,\n };\n }\n\n private async parseMany({\n arrayBuffer,\n fetch,\n pathOrUrlOrSchemas,\n resolvedInputs: _resolvedInputs,\n }: {\n arrayBuffer?: ArrayBuffer[];\n fetch?: RequestInit;\n pathOrUrlOrSchemas: Array<JSONSchema | string | unknown>;\n resolvedInputs?: ResolvedInput[];\n }): Promise<{ schemaMany: JSONSchema[] }> {\n const resolvedInputs = [...(_resolvedInputs || [])];\n resolvedInputs.push(\n ...(pathOrUrlOrSchemas.map((schema) => getResolvedInput({ pathOrUrlOrSchema: schema })) ||\n []),\n );\n\n this.schemaMany = [];\n this.schemaManySources = [];\n this.sourcePathToPrefix = new Map();\n\n for (let i = 0; i < resolvedInputs.length; i++) {\n const resolvedInput = resolvedInputs[i]!;\n const { path, type } = resolvedInput;\n let { schema } = resolvedInput;\n\n if (schema) {\n // keep schema as-is\n } else if (type !== 'json') {\n const file = newFile(path);\n\n // Add a new $Ref for this file, even though we don't have the value yet.\n // This ensures that we don't simultaneously read & parse the same file multiple times\n const $refAdded = this.$refs._add(file.url);\n $refAdded.pathType = type;\n try {\n const resolver = type === 'file' ? fileResolver : urlResolver;\n await resolver.handler({\n arrayBuffer: arrayBuffer?.[i],\n fetch,\n file,\n });\n const parseResult = await parseFile(file, this.options.parse);\n $refAdded.value = parseResult.result;\n schema = parseResult.result;\n } catch (error) {\n if (isHandledError(error)) {\n $refAdded.value = error;\n }\n throw error;\n }\n }\n\n if (schema === null || typeof schema !== 'object' || Buffer.isBuffer(schema)) {\n throw ono.syntax(`\"${this.$refs._root$Ref.path || schema}\" is not a valid JSON Schema`);\n }\n\n this.schemaMany.push(schema);\n this.schemaManySources.push(path && path.length ? path : url.cwd());\n }\n\n return {\n schemaMany: this.schemaMany,\n };\n }\n\n public mergeMany(): JSONSchema {\n const schemas = this.schemaMany || [];\n if (schemas.length === 0) {\n throw ono('mergeMany called with no schemas. Did you run parseMany?');\n }\n\n const merged: any = {};\n\n // Determine spec version: prefer first occurrence of openapi, else swagger\n let chosenOpenapi: string | undefined;\n let chosenSwagger: string | undefined;\n for (const s of schemas) {\n if (!chosenOpenapi && s && typeof (s as any).openapi === 'string') {\n chosenOpenapi = (s as any).openapi;\n }\n if (!chosenSwagger && s && typeof (s as any).swagger === 'string') {\n chosenSwagger = (s as any).swagger;\n }\n if (chosenOpenapi && chosenSwagger) {\n break;\n }\n }\n if (typeof chosenOpenapi === 'string') {\n merged.openapi = chosenOpenapi;\n } else if (typeof chosenSwagger === 'string') {\n merged.swagger = chosenSwagger;\n }\n\n // Merge info: take first non-empty per-field across inputs\n const infoAccumulator: any = {};\n for (const s of schemas) {\n const info = (s as any)?.info;\n if (info && typeof info === 'object') {\n for (const [k, v] of Object.entries(info)) {\n if (infoAccumulator[k] === undefined && v !== undefined) {\n infoAccumulator[k] = JSON.parse(JSON.stringify(v));\n }\n }\n }\n }\n if (Object.keys(infoAccumulator).length > 0) {\n merged.info = infoAccumulator;\n }\n\n // Merge servers: union by url+description\n const servers: any[] = [];\n const seenServers = new Set<string>();\n for (const s of schemas) {\n const arr = (s as any)?.servers;\n if (Array.isArray(arr)) {\n for (const srv of arr) {\n if (srv && typeof srv === 'object') {\n const key = `${srv.url || ''}|${srv.description || ''}`;\n if (!seenServers.has(key)) {\n seenServers.add(key);\n servers.push(JSON.parse(JSON.stringify(srv)));\n }\n }\n }\n }\n }\n if (servers.length > 0) {\n merged.servers = servers;\n }\n\n merged.paths = {};\n merged.components = {};\n\n const componentSections = [\n 'schemas',\n 'parameters',\n 'requestBodies',\n 'responses',\n 'headers',\n 'securitySchemes',\n 'examples',\n 'links',\n 'callbacks',\n ];\n for (const sec of componentSections) {\n merged.components[sec] = {};\n }\n\n const tagNameSet = new Set<string>();\n const tags: any[] = [];\n const usedOpIds = new Set<string>();\n\n const baseName = (p: string) => {\n try {\n const withoutHash = p.split('#')[0]!;\n const parts = withoutHash.split('/');\n const filename = parts[parts.length - 1] || 'schema';\n const dot = filename.lastIndexOf('.');\n const raw = dot > 0 ? filename.substring(0, dot) : filename;\n return raw.replace(/[^A-Za-z0-9_-]/g, '_');\n } catch {\n return 'schema';\n }\n };\n const unique = (set: Set<string>, proposed: string) => {\n let name = proposed;\n let i = 2;\n while (set.has(name)) {\n name = `${proposed}_${i++}`;\n }\n set.add(name);\n return name;\n };\n\n const rewriteRef = (ref: string, refMap: Map<string, string>): string => {\n // OAS3: #/components/{section}/{name}...\n let m = ref.match(/^#\\/components\\/([^/]+)\\/([^/]+)(.*)$/);\n if (m) {\n const base = `#/components/${m[1]}/${m[2]}`;\n const mapped = refMap.get(base);\n if (mapped) {\n return mapped + (m[3] || '');\n }\n }\n // OAS2: #/definitions/{name}...\n m = ref.match(/^#\\/definitions\\/([^/]+)(.*)$/);\n if (m) {\n const base = `#/components/schemas/${m[1]}`;\n const mapped = refMap.get(base);\n if (mapped) {\n // map definitions -> components/schemas\n return mapped + (m[2] || '');\n }\n }\n return ref;\n };\n\n const cloneAndRewrite = (\n obj: any,\n refMap: Map<string, string>,\n tagMap: Map<string, string>,\n opIdPrefix: string,\n basePath: string,\n ): any => {\n if (obj === null || obj === undefined) {\n return obj;\n }\n if (Array.isArray(obj)) {\n return obj.map((v) => cloneAndRewrite(v, refMap, tagMap, opIdPrefix, basePath));\n }\n if (typeof obj !== 'object') {\n return obj;\n }\n\n const out: any = {};\n for (const [k, v] of Object.entries(obj)) {\n if (k === '$ref' && typeof v === 'string') {\n const s = v as string;\n if (s.startsWith('#')) {\n out[k] = rewriteRef(s, refMap);\n } else {\n const proto = url.getProtocol(s);\n if (proto === undefined) {\n // relative external ref -> absolutize against source base path\n out[k] = url.resolve(basePath + '#', s);\n } else {\n out[k] = s;\n }\n }\n } else if (k === 'tags' && Array.isArray(v) && v.every((x) => typeof x === 'string')) {\n out[k] = v.map((t) => tagMap.get(t) || t);\n } else if (k === 'operationId' && typeof v === 'string') {\n out[k] = unique(usedOpIds, `${opIdPrefix}_${v}`);\n } else {\n out[k] = cloneAndRewrite(v as any, refMap, tagMap, opIdPrefix, basePath);\n }\n }\n return out;\n };\n\n for (let i = 0; i < schemas.length; i++) {\n const schema: any = schemas[i] || {};\n const sourcePath = this.schemaManySources[i] || `multi://input/${i + 1}`;\n const prefix = baseName(sourcePath);\n\n // Track prefix for this source path (strip hash). Only map real file/http paths\n const withoutHash = url.stripHash(sourcePath);\n const protocol = url.getProtocol(withoutHash);\n if (\n protocol === undefined ||\n protocol === 'file' ||\n protocol === 'http' ||\n protocol === 'https'\n ) {\n this.sourcePathToPrefix.set(withoutHash, prefix);\n }\n\n const refMap = new Map<string, string>();\n const tagMap = new Map<string, string>();\n\n const srcComponents = (schema.components || {}) as any;\n for (const sec of componentSections) {\n const group = srcComponents[sec] || {};\n for (const [name] of Object.entries(group)) {\n const newName = `${prefix}_${name}`;\n refMap.set(`#/components/${sec}/${name}`, `#/components/${sec}/${newName}`);\n }\n }\n\n const srcTags: any[] = Array.isArray(schema.tags) ? schema.tags : [];\n for (const t of srcTags) {\n if (!t || typeof t !== 'object' || typeof t.name !== 'string') {\n continue;\n }\n const desired = t.name;\n const finalName = tagNameSet.has(desired) ? `${prefix}_${desired}` : desired;\n tagNameSet.add(finalName);\n tagMap.set(desired, finalName);\n if (!tags.find((x) => x && x.name === finalName)) {\n tags.push({ ...t, name: finalName });\n }\n }\n\n for (const sec of componentSections) {\n const group = (schema.components && schema.components[sec]) || {};\n for (const [name, val] of Object.entries(group)) {\n const newName = `${prefix}_${name}`;\n merged.components[sec][newName] = cloneAndRewrite(\n val,\n refMap,\n tagMap,\n prefix,\n url.stripHash(sourcePath),\n );\n }\n }\n\n const srcPaths = (schema.paths || {}) as Record<string, any>;\n for (const [p, item] of Object.entries(srcPaths)) {\n let targetPath = p;\n if (merged.paths[p]) {\n const trimmed = p.startsWith('/') ? p.substring(1) : p;\n targetPath = `/${prefix}/${trimmed}`;\n }\n merged.paths[targetPath] = cloneAndRewrite(\n item,\n refMap,\n tagMap,\n prefix,\n url.stripHash(sourcePath),\n );\n }\n }\n\n if (tags.length > 0) {\n merged.tags = tags;\n }\n\n // Rebuild $refs root using the first input's path to preserve external resolution semantics\n const rootPath = this.schemaManySources[0] || url.cwd();\n this.$refs = new $Refs();\n const rootRef = this.$refs._add(rootPath);\n rootRef.pathType = url.isFileSystemPath(rootPath) ? 'file' : 'http';\n rootRef.value = merged;\n this.schema = merged;\n return merged as JSONSchema;\n }\n}\n\nexport { sendRequest } from './resolvers/url';\nexport type { JSONSchema } from './types';\n"],"mappings":";;;;;;AAAA,SAAwB,mBAAmB,UAA0B;AAEnE,KAAI,SAAS,WAAW,UAAU,CAChC,QAAO;AAGT,QAAO,SAAS,WAAW,MAAM,IAAI;;;;;ACNvC,MAAM,iBAAiB,OAAO,KAAK,WAAW,UAAU,WAAW,QAAQ,WAAW,GAAG;AACzF,MAAa,kBAAkB;;;;ACI/B,MAAM,sBAAsB;AAC5B,MAAM,kBAAkB;AAGxB,MAAM,oBAAoB,CACxB,CAAC,OAAO,MAAM,EACd,CAAC,MAAM,MAAM,CACd;AAGD,MAAM,oBAAoB;CAAC;CAAQ;CAAK;CAAQ;CAAK;CAAQ;CAAK;CAAQ;CAAK;CAAQ;CAAI;;;;;;AAO3F,SAAgB,QAAQ,MAAc,IAAY;CAChD,MAAM,UAAU,IAAI,IAAI,mBAAmB,KAAK,EAAE,aAAa;CAC/D,MAAM,cAAc,IAAI,IAAI,mBAAmB,GAAG,EAAE,QAAQ;CAC5D,MAAM,YAAY,GAAG,MAAM,SAAS,GAAG,MAAM;AAC7C,KAAI,YAAY,aAAa,YAAY;EAEvC,MAAM,EAAE,MAAM,UAAU,WAAW;AACnC,SAAO,WAAW,SAAS,OAAO;;AAEpC,QAAO,YAAY,UAAU,GAAG;;;;;;;AAQlC,SAAgB,MAAM;AACpB,KAAI,OAAO,WAAW,YACpB,QAAO,SAAS;CAGlB,MAAM,OAAO,QAAQ,KAAK;CAE1B,MAAM,WAAW,KAAK,MAAM,GAAG;AAC/B,KAAI,aAAa,OAAO,aAAa,KACnC,QAAO;KAEP,QAAO,OAAO;;;;;;;;AAUlB,SAAgB,YAAY,MAA0B;CACpD,MAAM,QAAQ,gBAAgB,KAAK,QAAQ,GAAG;AAC9C,KAAI,MACF,QAAO,MAAM,GAAI,aAAa;;;;;;;;;AAYlC,SAAgB,aAAa,MAAW;CACtC,MAAM,UAAU,KAAK,YAAY,IAAI;AACrC,KAAI,UAAU,GACZ,QAAO,WAAW,KAAK,OAAO,QAAQ,CAAC,aAAa,CAAC;AAEvD,QAAO;;;;;;;;AAST,SAAgB,WAAW,MAAW;CACpC,MAAM,aAAa,KAAK,QAAQ,IAAI;AACpC,KAAI,aAAa,GACf,QAAO,KAAK,OAAO,GAAG,WAAW;AAEnC,QAAO;;;;;;;;;AAUT,SAAgB,QAAQ,MAA0B;AAChD,KAAI,CAAC,KACH,QAAO;CAET,MAAM,YAAY,KAAK,QAAQ,IAAI;AACnC,KAAI,YAAY,GACd,QAAO,KAAK,UAAU,UAAU;AAElC,QAAO;;;;;;;;AAST,SAAgB,UAAU,MAA2B;AACnD,KAAI,CAAC,KACH,QAAO;CAET,MAAM,YAAY,KAAK,QAAQ,IAAI;AACnC,KAAI,YAAY,GACd,QAAO,KAAK,UAAU,GAAG,UAAU;AAErC,QAAO;;;;;;;;;AAUT,SAAgB,iBAAiB,MAA0B;AAEzD,KAAI,OAAO,WAAW,eAAgB,OAAO,YAAY,eAAe,QAAQ,QAG9E,QAAO;CAGT,MAAM,WAAW,YAAY,KAAK;AAClC,QAAO,aAAa,UAAa,aAAa;;;;;;;;;;;;;;;;;;AAmBhD,SAAgB,mBAAmB,MAAc;AAG/C,KAAI,WAAW,EAAE;EACf,MAAM,aAAa,KAAK;EACxB,MAAM,YAAY,KAAK,aAAa;EAEpC,MAAM,aADsB,mBAAmB,WAAW,CACnB,aAAa;EACpD,MAAM,gBAAgB,UAAU,SAAS,WAAW;EACpD,MAAM,gBAAgB,UAAU,SAAS,WAAW;EACpD,MAAM,iBACJ,MAAM,WAAW,KAAK,IACtB,KAAK,WAAW,UAAU,IAC1B,KAAK,WAAW,WAAW,IAC3B,KAAK,WAAW,UAAU;AAE5B,MAAI,EAAE,iBAAiB,iBAAiB,mBAAmB,CAAC,WAAW,WAAW,OAAO,CACvF,QAAO,KAAK,YAAY,KAAK;AAE/B,SAAO,mBAAmB,KAAK;;AAIjC,QAAO,UAAU,KAAK;AAKtB,MAAK,MAAM,WAAW,kBACpB,QAAO,KAAK,QAAQ,QAAQ,IAAI,QAAQ,GAAG;AAG7C,QAAO;;;;;AAMT,SAAgB,iBAAiB,MAA0B,kBAAoC;AAE7F,QAAO,UAAU,KAAM;AAKvB,MAAK,IAAI,IAAI,GAAG,IAAI,kBAAkB,QAAQ,KAAK,EACjD,QAAO,KAAK,QAAQ,kBAAkB,IAAK,kBAAkB,IAAI,GAAa;CAKhF,IAAI,YAAY,KAAK,OAAO,GAAG,EAAE,CAAC,aAAa,KAAK;AACpD,KAAI,WAAW;AAEb,SAAO,KAAK,OAAO,MAAM,KAAK,OAAO,EAAE,GAAG,KAAK,OAAO,EAAE;AAGxD,MAAI,WAAW,IAAI,KAAK,OAAO,IAC7B,QAAO,KAAK,KAAK,MAAM,KAAK,OAAO,EAAE;AAGvC,MAAI,iBAEF,QAAO,aAAa;OACf;AAIL,eAAY;AACZ,UAAO,WAAW,GAAG,OAAO,MAAM;;;AAKtC,KAAI,WAAW,IAAI,CAAC,WAAW;AAE7B,SAAO,KAAK,QAAQ,qBAAqB,KAAK;AAG9C,MAAI,KAAK,OAAO,GAAG,EAAE,KAAK,MACxB,QAAO,KAAK,GAAI,aAAa,GAAG,KAAK,OAAO,EAAE;;AAIlD,QAAO;;;;;AC1OT,IAAa,kBAAb,cAAqC,MAAM;CACzC,AAAgB;CAChB,AAAgB;CAChB,AAAO;CACP,AAAO;CACP,AAAgB;CAChB,AAAO,YAAY,SAAiB,QAAiB;AACnD,SAAO;AAEP,OAAK,OAAO;AACZ,OAAK,OAAO;AACZ,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,OAAO;AAEZ,MAAI,OAAO,KAAK;;CAGlB,IAAI,YAAY;AACd,SAAO,GAAG,KAAK,KAAK,GAAG,KAAK,OAAO,GAAG,KAAK,KAAK,GAAG,KAAK;;;AAI5D,IAAa,uBAAb,MAAa,6BAA4D,MAAM;CAC7E;CAEA,YAAY,QAAoB;AAC9B,SAAO;AAEP,OAAK,QAAQ;AACb,OAAK,OAAO;AACZ,OAAK,UAAU,GAAG,KAAK,OAAO,OAAO,QACnC,KAAK,OAAO,SAAS,IAAI,MAAM,GAChC,2BAA2B,iBAAiB,OAAO,MAAM,UAAW,KAAK,CAAC;AAE3E,MAAI,OAAO,KAAK;;CAGlB,OAAO,gBAA+C,QAAoB;EACxE,MAAM,SAAS,EAAE;AAEjB,OAAK,MAAM,QAAQ,OAAO,OAAO,OAAO,MAAM,OAAO,CACnD,KAAI,KAAK,OACP,QAAO,KAAK,GAAG,KAAK,OAAO;AAI/B,SAAO;;CAGT,IAAI,SAQF;AACA,SAAO,qBAAqB,gBAAmB,KAAK,MAAM;;;AAI9D,IAAa,cAAb,cAAiC,gBAAgB;CAC/C,OAAO;CACP,OAAO;CACP,YAAY,SAAc,QAAa;AACrC,QAAM,iBAAiB,OAAO,IAAI,WAAW,OAAO;;;AAaxD,IAAa,gBAAb,cAAmC,gBAAgB;CACjD,OAAO;CACP,OAAO;CACP;CACA,YAAY,IAAiB,QAAiB;AAC5C,QAAM,GAAG,WAAW,uBAAuB,OAAO,IAAI,OAAO;AAC7D,MAAI,UAAU,GACZ,MAAK,cAAc,OAAO,GAAG,KAAK;;;AAaxC,IAAa,sBAAb,cAAyC,gBAAgB;CACvD,OAAO;CACP,OAAO;CACP,YAAY,OAAe,MAAc;AACvC,QACE,yBAAyB,QAAQ,KAAK,CAAC,YAAY,MAAM,oBACzD,UAAU,KAAK,CAChB;;;AAYL,IAAa,sBAAb,cAAyC,gBAAgB;CACvD,OAAO;CACP,OAAO;CACP,YAAY,SAAiB,MAAc;AACzC,QAAM,yBAAyB,QAAQ,mCAAmC,UAAU,KAAK,CAAC;;;AAI9F,SAAgB,eAAe,KAAkC;AAC/D,QAAO,eAAe,mBAAmB,eAAe;;AAG1D,SAAgB,eAAe,KAAU;AACvC,KAAI,IAAI,SAAS,KACf,KAAI,OAAO,EAAE;AAGf,QAAO;;;;;;;;;;ACtIT,IAAM,OAAN,MAAM,KAAoC;;;;;;;;;;;CAWxC;;;;;;;CAQA;;;;;;CAOA;;;;CAKA;;;;CAKA,SAA2B,EAAE;CAE7B,YAAY,OAAiB;AAC3B,OAAK,QAAQ;;;;;;;;CASf,SAAS,KAAgB;AACvB,MAAI,KAAK,WAAW,OAClB,MAAK,SAAS,EAAE;EAGlB,MAAM,iBAAiB,KAAK,OAAO,KAAK,EAAE,gBAAqB,UAAU;AAKzE,MAAI,YAAY,OAAO,MAAM,QAAQ,IAAI,OAAO,CAC9C,MAAK,OAAO,KACV,GAAG,IAAI,OACJ,IAAI,eAAe,CACnB,QAAQ,EAAE,gBAAqB,CAAC,eAAe,SAAS,UAAU,CAAC,CACvE;WACQ,EAAE,eAAe,QAAQ,CAAC,eAAe,SAAS,IAAI,UAAU,CACzE,MAAK,OAAO,KAAK,eAAe,IAAI,CAAC;;;;;;;;;CAWzC,OAAO,MAAc,SAAyB;AAC5C,MAAI;AACF,QAAK,QAAQ,MAAM,QAAQ;AAC3B,UAAO;UACD;AACN,UAAO;;;;;;;;;;CAWX,IAAI,MAAc,SAAyB;AACzC,SAAO,KAAK,QAAQ,MAAM,QAAQ,EAAE;;;;;;;;;;;CAYtC,QAAQ,MAAc,SAAyB,cAAuB,cAAuB;AAE3F,SADgB,IAAIA,gBAAW,MAAM,MAAM,aAAa,CACzC,QAAQ,KAAK,OAAO,SAAS,aAAa;;;;;;;;;CAU3D,IAAI,MAAc,OAAY;AAE5B,OAAK,QADW,IAAIA,gBAAQ,MAAM,KAAK,CAClB,IAAI,KAAK,OAAO,MAAM;;;;;;;;CAS7C,OAAO,OAAO,OAA4D;AACxE,SACE,QAAQ,MAAM,IACd,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACV,OAAO,MAAM,SAAS,YACtB,MAAM,KAAK,SAAS;;;;;;;;CAUxB,OAAO,eAAe,OAAyB;AAC7C,SAAO,KAAK,OAAO,MAAM,IAAI,MAAM,KAAM,OAAO;;;;;;;;;CAUlD,OAAO,cAAc,OAAgB;AACnC,MAAI,KAAK,OAAO,MAAM,EACpB;OAAI,MAAM,KAAK,UAAU,GAAG,EAAE,KAAK,QAAQ,MAAM,SAAS,IAExD,QAAO;YACE,MAAM,KAAK,OAAO,IAE3B,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwCb,OAAO,eAAe,OAAgB;AACpC,SAAO,KAAK,OAAO,MAAM,IAAI,OAAO,KAAK,MAAM,CAAC,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkC3D,OAAO,YAA2C,MAAe,eAAqB;AACpF,MAAI,iBAAiB,OAAO,kBAAkB,YAAY,KAAK,eAAe,KAAK,EAAE;GACnF,MAAM,SAAS,EAAE;AACjB,QAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,KAAI,QAAQ,OAEV,QAAO,OAAO,KAAK;AAIvB,QAAK,MAAM,OAAO,OAAO,KAAK,cAAc,CAC1C,KAAI,EAAE,OAAO,QAEX,QAAO,OAAO,cAAc;AAIhC,UAAO;QAGP,QAAO;;;AAKb,kBAAe;;;;AC/Qf,MAAM,UAAU;AAChB,MAAM,SAAS;AACf,MAAM,eAAe;AACrB,MAAM,eAAe;AAErB,MAAM,0BAA0B,wBAAwC;AACtE,KAAI;AACF,SAAO,mBAAmB,oBAAoB;SACxC;AACN,SAAO;;;;;;;;;;;AAYX,IAAM,UAAN,MAAM,QAAuC;;;;CAI3C;;;;;CAMA;;;;CAKA;;;;;CAOA;;;;CAIA;;;;;CAKA;CAEA,YAAY,MAAe,MAAc,cAAuB;AAC9D,OAAK,OAAO;AAEZ,OAAK,OAAO;AAEZ,OAAK,eAAe,gBAAgB;AAEpC,OAAK,QAAQ;AAEb,OAAK,WAAW;AAEhB,OAAK,eAAe;;;;;;;;;;;;;;;CAgBtB,QAAQ,KAAQ,SAAyB,cAAuB;EAC9D,MAAM,SAAS,QAAQ,MAAM,KAAK,MAAM,KAAK,aAAa;AAG1D,OAAK,QAAQ,cAAc,IAAI;EAE/B,MAAMC,SAAgC,EAAE;AAExC,OAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,OAAI,cAAc,MAAM,SAAS,aAAa,CAE5C,MAAK,OAAO,QAAQ,KAAK,KAAK,MAAM,OAAO,MAAM,EAAE,CAAC;AAGtD,OACE,OAAO,KAAK,UAAU,YACtB,KAAK,UAAU,QACf,CAAC,WAAW,aAAa,IACzB,UAAU,KAAK,MAEf,QAAO;GAGT,MAAM,QAAQ,OAAO;AACrB,OACE,KAAK,MAAM,WAAW,UACrB,KAAK,MAAM,WAAW,QAAQ,MAAM,OAAO,SAAS,GACrD;IAEA,IAAI,6BAA6B;AACjC,SAAK,IAAI,IAAI,OAAO,SAAS,GAAG,IAAI,GAAG,KAAK;KAC1C,MAAM,cAAc,OAAO,MAAM,GAAG,IAAI,EAAE,CAAC,KAAK,IAAI;AACpD,SAAI,KAAK,MAAM,iBAAiB,QAAW;AACzC,WAAK,QAAQ,KAAK,MAAM;AACxB,UAAI;AACJ,mCAA6B;AAC7B;;;AAGJ,QAAI,2BACF;AAGF,SAAK,QAAQ;AACb,WAAO,KAAK,IAAI,oBAAoB,OAAO,UAAU,KAAK,aAAa,CAAC,CAAC;SAEzE,MAAK,QAAQ,KAAK,MAAM;;AAI5B,MAAI,OAAO,SAAS,EAClB,OAAM,OAAO,WAAW,IACpB,OAAO,KACP,IAAI,eAAe,QAAQ,kCAAkC;AAInE,MACE,CAAC,KAAK,SACL,KAAK,MAAM,QAAQC,QAAY,KAAK,MAAM,KAAK,MAAM,KAAK,KAAK,aAEhE,eAAc,MAAM,SAAS,aAAa;AAG5C,SAAO;;;;;;;;;;;;CAaT,IAAI,KAAQ,OAAY,SAAyB;EAC/C,MAAM,SAAS,QAAQ,MAAM,KAAK,KAAK;EACvC,IAAI;AAEJ,MAAI,OAAO,WAAW,GAAG;AAEvB,QAAK,QAAQ;AACb,UAAO;;AAIT,OAAK,QAAQ,cAAc,IAAI;AAE/B,OAAK,IAAI,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,iBAAc,MAAM,QAAQ;AAE5B,WAAQ,OAAO;AACf,OAAI,KAAK,SAAS,KAAK,MAAM,WAAW,OAEtC,MAAK,QAAQ,KAAK,MAAM;OAGxB,MAAK,QAAQ,SAAS,MAAM,OAAO,EAAE,CAAC;;AAK1C,gBAAc,MAAM,QAAQ;AAC5B,UAAQ,OAAO,OAAO,SAAS;AAC/B,WAAS,MAAM,OAAO,MAAM;AAG5B,SAAO;;;;;;;;;;;;;;CAeT,OAAO,MAAM,MAAc,cAAiC;EAE1D,MAAM,UAAUC,QAAY,KAAK,CAAC,UAAU,EAAE;AAI9C,MAAI,CAAC,QACH,QAAO,EAAE;EAIX,MAAM,QAAQ,QAAQ,MAAM,IAAI;AAGhC,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,OAAM,KAAK,uBACT,MAAM,GAAI,QAAQ,cAAc,IAAI,CAAC,QAAQ,cAAc,IAAI,CAChE;AAGH,MAAI,MAAM,OAAO,GACf,OAAM,IAAI,oBAAoB,SAAS,iBAAiB,SAAY,OAAO,aAAa;AAG1F,SAAO,MAAM,MAAM,EAAE;;;;;;;;;CAUvB,OAAO,KAAK,MAAc,QAA2B;AAEnD,MAAI,KAAK,QAAQ,IAAI,KAAK,GACxB,SAAQ;AAIV,WAAS,MAAM,QAAQ,OAAO,GAAG,SAAS,CAAC,OAAO;AAClD,OAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;GACtC,MAAM,QAAQ,OAAO;AAErB,WAAQ,MAAM,mBAAmB,MAAM,QAAQ,QAAQ,KAAK,CAAC,QAAQ,SAAS,KAAK,CAAC;;AAGtF,SAAO;;;;;;;;;;;;;;AAeX,SAAS,cAAc,SAAc,SAAc,cAAoB;AAGrE,KAAIC,YAAK,cAAc,QAAQ,MAAM,EAAE;EACrC,MAAM,WAAWF,QAAY,QAAQ,MAAM,QAAQ,MAAM,KAAK;AAE9D,MAAI,aAAa,QAAQ,QAAQ,CAAC,WAAW,aAAa,CAExD,SAAQ,WAAW;OACd;GACL,MAAM,WAAW,QAAQ,KAAK,MAAM,SAAS,UAAU,QAAQ,MAAM,QAAQ;AAC7E,OAAI,aAAa,KACf,QAAO;AAGT,WAAQ,gBAAgB,SAAS,eAAe;AAEhD,OAAIE,YAAK,eAAe,QAAQ,MAAM,EAAE;AAGtC,YAAQ,QAAQA,YAAK,YAAY,QAAQ,OAAO,SAAS,MAAM;AAC/D,WAAO;UACF;AAEL,YAAQ,OAAO,SAAS;AACxB,YAAQ,OAAO,SAAS;AACxB,YAAQ,QAAQ,SAAS;;AAG3B,UAAO;;;;AAKb,sBAAe;;;;;;;;;;;;AAaf,SAAS,SAAS,SAAc,OAAY,OAAY;AACtD,KAAI,QAAQ,SAAS,OAAO,QAAQ,UAAU,SAC5C,KAAI,UAAU,OAAO,MAAM,QAAQ,QAAQ,MAAM,CAC/C,SAAQ,MAAM,KAAK,MAAM;KAEzB,SAAQ,MAAM,SAAS;KAGzB,OAAM,IAAI,gBACR,iCAAiC,QAAQ,KAAK,mBAAmB,MAAM,oBACxE;AAEH,QAAO;;AAGT,SAAS,cAAc,OAAY;AACjC,KAAI,eAAe,MAAM,CACvB,OAAM;AAGR,QAAO;;AAGT,SAAS,WAAW,cAA4B;AAC9C,QAAO,OAAO,gBAAgB,YAAY,QAAQ,MAAM,aAAa,CAAC,UAAU;;;;;;;;AClUlF,MAAM,8BAA8B;CAClC,MAAM,yBAAS,IAAI,KAA6B;CAChD,MAAM,4BAAY,IAAI,SAAyB;CAC/C,IAAI,YAAY;CAEhB,MAAM,eAAe,QAAa;AAChC,MAAI,CAAC,UAAU,IAAI,IAAI,CACrB,WAAU,IAAI,KAAK,OAAO,EAAE,YAAY;AAE1C,SAAO,UAAU,IAAI,IAAI;;CAG3B,MAAM,sBAAsB,YAAiB,YAE3C,GAAG,YAAY,WAAW,CAAC,GAAG;AAEhC,QAAO;EACL,MAAM,UAA0B;GAC9B,MAAM,MAAM,mBAAmB,MAAM,QAAQ,MAAM,IAAI;AACvD,UAAO,IAAI,KAAK,MAAM;;EAExB,OAAO,YAAiB,YAAiB;GACvC,MAAM,MAAM,mBAAmB,YAAY,QAAQ;AAEnD,UADe,OAAO,IAAI,IAAI;;EAGhC,SAAS,UAA0B;GACjC,MAAM,MAAM,mBAAmB,MAAM,QAAQ,MAAM,IAAI;AACvD,UAAO,OAAO,IAAI;;EAErB;;;;;;;;;AAUH,MAAM,4BACJ,SACyE;CACzE,MAAM,SAASC,gBAAQ,MAAM,KAAK;CAClC,MAAM,OAAO,MAAc,OAAO,SAAS,EAAE;AAE7C,KAAI,IAAI,aAAa,CACnB,QAAO;AAET,KAAI,IAAI,cAAc,CACpB,QAAO;AAET,KAAI,IAAI,UAAU,CAChB,QAAO;AAET,KAAI,IAAI,YAAY,CAClB,QAAO;AAET,KAAI,IAAI,SAAS,CACf,QAAO;AAGT,QAAO;;;;;;AAOT,MAAM,iBAAgD,EACpD,SACA,YACA,OACA,cACA,WACA,iBACA,SACA,MACA,cACA,+BAAe,IAAI,KAAK,EACxB,iCAAiB,IAAI,SAAS,OAwC1B;CACJ,MAAM,OAAO,YAAY,OAAO,aAAa,WAAW;CACxD,MAAM,WAAWC,QAAY,MAAM,KAAK,KAAK;CAG7C,IAAI,UAAU,aAAa,IAAI,SAAS;AACxC,KAAI,CAAC,SAAS;AACZ,MAAI;AACF,aAAU,MAAM,SAAS,UAAU,cAAc,QAAQ;WAClD,OAAO;AACd,OAAI,iBAAiB,qBAAqB;AAExC,YAAQ,KAAK,+BAA+B,WAAW;AACvD;;AAEF,SAAM;;AAGR,MAAI,QACF,cAAa,IAAI,UAAU,QAAQ;;AAIvC,KAAI,YAAY,KAAM;CAGtB,MAAM,QADSD,gBAAQ,MAAM,aAAa,CACrB;CACrB,MAAM,OAAOE,UAAc,QAAQ,KAAK;CACxC,MAAM,OAAOC,QAAY,QAAQ,KAAK;CACtC,MAAM,WAAW,SAAS,MAAM,UAAU;CAC1C,MAAM,WAAWC,YAAK,eAAe,KAAK;AAC1C,iBAAgB,QAAQ;CAGxB,MAAM,gBAAgB,gBAAgB,KAAK,YAAY,QAAQ;AAE/D,KAAI,iBAAiB,cAAc,iBAAiB,aAElD,KAAI,QAAQ,cAAc,SAAS,eAAe,cAAc,cAAc;AAC5E,sBAAoB,WAAW,cAAc;AAC7C,kBAAgB,OAAO,cAAc;OAErC;CAIJ,MAAMC,WAA2B;EAC/B;EACA,UAAU,QAAQ;EAClB;EACA;EACA;EACA;EACA;EACA;EACA,KAAK;EAEL,uBAAuB,WAAW,yBAAyB,QAAQ,KAAK,GAAG;EAG3E,QAAQ;EAGR;EAEA,OAAO,QAAQ;EAChB;AAED,WAAU,KAAK,SAAS;AACxB,iBAAgB,IAAI,SAAS;AAG7B,KAAI,CAAC,iBAAiB,SACpB,SAAM;EACJ;EACA,cAAc,eAAe;EAC7B;EACA;EACA,KAAK;EACL;EACA,QAAQ,QAAQ;EAChB,MAAM,QAAQ;EACd;EACA;EACA;EACD,CAAC;;;;;AAON,MAAMC,WAAwC,EAC5C,OACA,cACA,WACA,iBACA,KACA,SACA,QACA,MACA,cACA,+BAAe,IAAI,KAAK,EACxB,iCAAiB,IAAI,SAAS,OAqC1B;CACJ,MAAM,MAAM,QAAQ,OAAO,SAAS,OAAO;AAE3C,KAAI,OAAO,OAAO,QAAQ,YAAY,CAAC,YAAY,OAAO,IAAI,EAAE;AAE9D,MAAI,eAAe,IAAI,IAAI,CAAE;AAE7B,MAAIF,YAAK,cAAc,IAAI,CACzB,eAAc;GACZ,SAAS;GACT,YAAY;GACZ;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACD,CAAC;OACG;AAEL,kBAAe,IAAI,IAAI;GAKvB,MAAM,OAAO,OAAO,KAAK,IAAI,CAAC,MAAM,GAAG,MAAM;AAG3C,QAAI,MAAM,cACR,QAAO;aACE,MAAM,cACf,QAAO;QAIP,QAAO,EAAE,SAAS,EAAE;KAEtB;AAEF,QAAK,MAAMG,SAAO,MAAM;IACtB,MAAM,UAAUP,gBAAQ,KAAK,MAAMO,MAAI;IACvC,MAAM,kBAAkBP,gBAAQ,KAAK,cAAcO,MAAI;IACvD,MAAM,QAAQ,IAAIA;AAElB,QAAIH,YAAK,cAAc,MAAM,CAC3B,eAAc;KACZ,SAASG;KACT,YAAY;KACZ;KACA;KACA;KACA;KACA;KACA;KACA,cAAc;KACd;KACA;KACD,CAAC;QAEF,SAAM;KACJ;KACA;KACA;KACA;KACA;KACA;KACA,QAAQ;KACR,MAAM;KACN,cAAc;KACd;KACA;KACD,CAAC;;;;;;;;;AAWZ,SAAS,MAAM,QAAoB,WAAkC;CACnE,MAAM,OAAO,OAAO;AAGpB,WAAU,MAAM,GAAmB,MAAsB;AACvD,MAAI,EAAE,SAAS,EAAE,KAEf,QAAO,EAAE,OAAO,EAAE,OAAO,KAAK;WACrB,EAAE,SAAS,EAAE,KAEtB,QAAO,EAAE,OAAO,EAAE,OAAO,KAAK;WACrB,EAAE,aAAa,EAAE,SAE1B,QAAO,EAAE,WAAW,KAAK;WAChB,EAAE,aAAa,EAAE,SAE1B,QAAO,EAAE,WAAW,IAAK;WAChB,EAAE,iBAAiB,EAAE,aAE9B,QAAO,EAAE,eAAe,EAAE;WACjB,EAAE,UAAU,EAAE,MAEvB,QAAO,EAAE,QAAQ,EAAE;OACd;GAGL,MAAM,oBAAoB,EAAE,aAAa,YAAY,eAAe;GACpE,MAAM,oBAAoB,EAAE,aAAa,YAAY,eAAe;AACpE,OAAI,sBAAsB,kBAExB,QAAO,oBAAoB;OAG3B,QAAO,EAAE,aAAa,SAAS,EAAE,aAAa;;GAGlD;CAIF,MAAM,mBACJ,SACG;EACH,MAAM,SAAS,CAAC,EAAE,QAAQ,OAAO,SAAS,YAAY,OAAO,KAAK,YAAY;EAC9E,MAAM,SAAS,CAAC,EAAE,QAAQ,OAAO,SAAS,YAAY,OAAO,KAAK,YAAY;AAE9E,MAAI,QAAQ;AACV,OAAI,CAAC,KAAK,cAAc,OAAO,KAAK,eAAe,SACjD,MAAK,aAAa,EAAE;AAEtB,OAAI,CAAC,KAAK,WAAW,SAAS,OAAO,KAAK,WAAW,UAAU,SAC7D,MAAK,WAAW,QAAQ,EAAE;AAE5B,UAAO;IAAE,KAAK,KAAK,WAAW;IAAO,QAAQ,gBAAgB;IAAQ;;AAGvE,MAAI,QAAQ;AACV,OAAI,SAAS,WAAW;AACtB,QAAI,CAAC,KAAK,eAAe,OAAO,KAAK,gBAAgB,SACnD,MAAK,cAAc,EAAE;AAEvB,WAAO;KAAE,KAAK,KAAK;KAAa,QAAQ;KAAiB;;AAE3D,OAAI,SAAS,cAAc;AACzB,QAAI,CAAC,KAAK,cAAc,OAAO,KAAK,eAAe,SACjD,MAAK,aAAa,EAAE;AAEtB,WAAO;KAAE,KAAK,KAAK;KAAY,QAAQ;KAAgB;;AAEzD,OAAI,SAAS,aAAa;AACxB,QAAI,CAAC,KAAK,aAAa,OAAO,KAAK,cAAc,SAC/C,MAAK,YAAY,EAAE;AAErB,WAAO;KAAE,KAAK,KAAK;KAAW,QAAQ;KAAe;;AAGvD,OAAI,CAAC,KAAK,eAAe,OAAO,KAAK,gBAAgB,SACnD,MAAK,cAAc,EAAE;AAEvB,UAAO;IAAE,KAAK,KAAK;IAAa,QAAQ;IAAiB;;AAI3D,MAAI,QAAQ,OAAO,SAAS,UAAU;AACpC,OAAI,KAAK,cAAc,OAAO,KAAK,eAAe,UAAU;AAC1D,QAAI,CAAC,KAAK,WAAW,SAAS,OAAO,KAAK,WAAW,UAAU,SAC7D,MAAK,WAAW,QAAQ,EAAE;AAE5B,WAAO;KAAE,KAAK,KAAK,WAAW;KAAO,QAAQ,gBAAgB;KAAQ;;AAEvE,OAAI,KAAK,eAAe,OAAO,KAAK,gBAAgB,SAClD,QAAO;IAAE,KAAK,KAAK;IAAa,QAAQ;IAAiB;AAG3D,OAAI,CAAC,KAAK,cAAc,OAAO,KAAK,eAAe,SACjD,MAAK,aAAa,EAAE;AAEtB,OAAI,CAAC,KAAK,WAAW,SAAS,OAAO,KAAK,WAAW,UAAU,SAC7D,MAAK,WAAW,QAAQ,EAAE;AAE5B,UAAO;IAAE,KAAK,KAAK,WAAW;IAAO,QAAQ,gBAAgB;IAAQ;;AAIvE,OAAK,cAAc,KAAK,eAAe,EAAE;AACzC,SAAO;GAAE,KAAK,KAAK;GAAa,QAAQ;GAAiB;;;;;;;;;CAU3D,MAAM,mBAAmB,UAA0B;AAEjD,MAAI,MAAM,sBACR,QAAO,MAAM;AAIf,SAAO,yBAAyB,MAAM,aAAa;;CAIrD,MAAM,uCAAuB,IAAI,KAAkC;CACnE,MAAM,iCAAiB,IAAI,KAAuB;CAElD,MAAM,YAAY,SAAiB,KAAK,QAAQ,mBAAmB,IAAI;CACvE,MAAM,YAAY,aAAqB;AACrC,MAAI;GAEF,MAAM,QADc,SAAS,MAAM,IAAI,CAAC,GACd,MAAM,IAAI;GACpC,MAAM,WAAW,MAAM,MAAM,SAAS,MAAM;GAC5C,MAAM,MAAM,SAAS,YAAY,IAAI;AACrC,UAAO,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,IAAI,GAAG,SAAS;UAC1D;AACN,UAAO;;;CAGX,MAAM,aAAa,SAAiB;AAClC,MAAI,CAAC,QAAQ,SAAS,IACpB,QAAO;EAET,MAAM,SAAS,KAAK,QAAQ,QAAQ,GAAG,CAAC,MAAM,IAAI;AAClD,SAAO,SAAS,OAAO,OAAO,SAAS,MAAM,OAAO;;CAEtD,MAAM,cAAc,cAAmB,aAAqB;AAC1D,MAAI,CAAC,eAAe,IAAI,aAAa,CACnC,gBAAe,IAAI,cAAc,IAAI,IAAY,OAAO,KAAK,gBAAgB,EAAE,CAAC,CAAC,CAAC;EAEpF,MAAM,OAAO,eAAe,IAAI,aAAa;EAC7C,IAAI,OAAO;EACX,IAAI,IAAI;AACR,SAAO,KAAK,IAAI,KAAK,CACnB,QAAO,GAAG,SAAS,GAAG;AAExB,OAAK,IAAI,KAAK;AACd,SAAO;;AAET,MAAK,MAAM,SAAS,WAAW;AAE7B,MAAI,CAAC,SAAS,CAAC,MAAM,QAAQ,OAAO,MAAM,SAAS,SACjD;AAMF,MAAI,CAAC,MAAM,UAAU;AACnB,OAAI,CAAC,MAAM,YAAY,MAAM,QAAQ,OAAO,MAAM,SAAS,SACzD,OAAM,KAAK,OAAO,MAAM;AAE1B;;AAIF,MAAI,MAAM,UAAU;AAClB,OAAI,MAAM,QAAQ,OAAO,MAAM,SAAS,SACtC,OAAM,KAAK,OAAO,MAAM;AAE1B;;EAKF,MAAM,EAAE,KAAK,WAAW,WAAW,gBADjB,gBAAgB,MAAM,CACqB;EAE7D,MAAM,YAAY,GAAG,MAAM,KAAK,IAAI,MAAM;AAC1C,MAAI,CAAC,qBAAqB,IAAI,OAAO,CACnC,sBAAqB,IAAI,wBAAQ,IAAI,KAAqB,CAAC;EAE7D,MAAM,iBAAiB,qBAAqB,IAAI,OAAO;EAEvD,IAAI,UAAU,eAAe,IAAI,UAAU;AAC3C,MAAI,CAAC,SAAS;GAEZ,IAAI,eAAe,SAAS,MAAM,KAAK;AACvC,OAAI;IACF,MAAMC,YAAiB;AACvB,QACE,aACA,UAAU,sBACV,OAAO,UAAU,mBAAmB,QAAQ,YAC5C;KACA,MAAM,eAAe,MAAM,QAAQ,IAAI,MAAM,IAAI,CAAC;KAClD,MAAM,SAAS,UAAU,mBAAmB,IAAI,YAAY;AAC5D,SAAI,UAAU,OAAO,WAAW,SAC9B,gBAAe;;WAGb;AAIR,aAAU,WAAW,WADJ,GAAG,aAAa,GAAG,UAAU,MAAM,KAAK,GAChB;AACzC,kBAAe,IAAI,WAAW,QAAQ;AAEtC,aAAU,WAAW,MAAM;;EAI7B,MAAM,UAAU,GAAG,OAAO,GAAG;AAC7B,MAAI,MAAM,YAAY,MAAM,QAAQ,OAAO,MAAM,SAAS,SACxD,OAAM,KAAK,OAAO;MAElB,OAAM,OAAO,MAAM,OAAO,EAAE,MAAM,SAAS;;;AAKjD,SAAS,oBAAoB,WAAkC,OAAY;CACzE,MAAM,QAAQ,UAAU,QAAQ,MAAM;AACtC,WAAU,OAAO,OAAO,EAAE;;;;;;;;;;AAW5B,SAAgB,OAAO,QAAoB,SAA8B;CACvE,MAAMC,YAAmC,EAAE;CAC3C,MAAM,kBAAkB,uBAAuB;CAE/C,MAAM,iCAAiB,IAAI,SAAiB;CAC5C,MAAM,+BAAe,IAAI,KAAkB;AAE3C,SAAkB;EAChB,OAAO,OAAO;EACd,cAAc;EACd;EACA;EACA,KAAK;EACL;EACA,QAAQ;EACR,MAAM,OAAO,MAAM,UAAU,OAAO;EACpC,cAAc;EACd;EACA;EACD,CAAC;AAEF,OAAM,QAAQ,UAAU;;;;;AC7nB1B,MAAM,gBAAgB;AAEtB,MAAaC,eAAuB;CAClC,YAAY,SAAmB,OAAO,SAAS,KAAK,KAAK,IAAI,cAAc,KAAK,KAAK,IAAI;CACzF,UAAU,SACR,OAAO,SAAS,KAAK,KAAK,GACtB,KAAK,OAEL,OAAO,KAAK,KAAK,KAAK;CAC5B,MAAM;CACP;;;;ACTD,MAAaC,aAAqB;CAChC,YAAY,SAAmB,KAAK,cAAc;CAClD,MAAM,QAAQ,MAA6C;EACzD,IAAI,OAAO,KAAK;AAChB,MAAI,OAAO,SAAS,KAAK,CACvB,QAAO,KAAK,UAAU;AAGxB,MAAI,OAAO,SAAS,SAElB,QAAO;AAGT,MAAI,CAAC,KAAK,MAAM,CAAC,OAEf;AAGF,MAAI;AACF,UAAO,KAAK,MAAM,KAAK;WAEhBC,OAAY;AACnB,OAAI;IAEF,MAAM,kBAAkB,KAAK,QAAQ,IAAI;AAEzC,WAAO,KAAK,MAAM,gBAAgB;AAClC,WAAO,KAAK,MAAM,KAAK;YAChBA,SAAY;AACnB,UAAM,IAAI,YAAYC,QAAM,SAAS,KAAK,IAAI;;;;CAIpD,MAAM;CACP;;;;AClCD,MAAM,cAAc;AAEpB,MAAaC,aAAqB;CAChC,YAAY,UACT,OAAO,KAAK,SAAS,YAAY,OAAO,SAAS,KAAK,KAAK,KAAK,YAAY,KAAK,KAAK,IAAI;CAC7F,QAAQ,MAAwB;AAC9B,MAAI,OAAO,KAAK,SAAS,SACvB,QAAO,KAAK;AAGd,MAAI,CAAC,OAAO,SAAS,KAAK,KAAK,CAC7B,OAAM,IAAI,YAAY,oBAAoB,KAAK,IAAI;AAGrD,SAAO,KAAK,KAAK,SAAS,QAAQ;;CAEpC,MAAM;CACP;;;;ACdD,MAAaC,aAAqB;CAEhC,YAAY,SAAmB;EAAC;EAAS;EAAQ;EAAQ,CAAC,SAAS,KAAK,UAAU;CAClF,SAAS,OAAO,SAAwC;EACtD,MAAM,OAAO,OAAO,SAAS,KAAK,KAAK,GAAG,KAAK,KAAK,UAAU,GAAG,KAAK;AAEtE,MAAI,OAAO,SAAS,SAElB,QAAO;AAGT,MAAI;AAEF,UADmB,KAAK,KAAK,MAAM,EAAE,QAAQ,aAAa,CAAC;WAEpDC,OAAY;AACnB,SAAM,IAAI,YAAY,OAAO,WAAW,gBAAgB,KAAK,IAAI;;;CAGrE,MAAM;CACP;;;;ACyCD,MAAa,8CAAiE;CAI5E,aAAa;EAQX,UAAU;EAQV,2BAA2B;EAE3B,qBAAqB;EACtB;CAOD,OAAO;EACL,QAAQ,EAAE,GAAG,cAAc;EAC3B,MAAM,EAAE,GAAG,YAAY;EACvB,MAAM,EAAE,GAAG,YAAY;EACvB,MAAM,EAAE,GAAG,YAAY;EACxB;CACF;;;;;;;;;;;;ACtFD,eAAsB,IAAI,SAAoC,MAAgB;CAC5E,IAAI,QAAQ;CACZ,IAAIC;CACJ,IAAIC;AAEJ,QAAO,IAAI,SAAuB,WAAS,WAAW;EACpD,MAAM,gBAAgB,YAAY;AAChC,YAAS,QAAQ;AAEjB,OAAI,CAAC,OAEH,QAAO,OAAO,UAAU;AAG1B,OAAI;IACF,MAAM,SAAS,MAAM,OAAO,QAAQ,KAAK;AAEzC,QAAI,WAAW,OACb,QAAOC,UAAQ;KACb;KACA;KACD,CAAC;AAGJ,QAAI,UAAU,QAAQ,OACpB,OAAM,IAAI,MAAM,gCAAgC;YAE3C,OAAO;AACd,gBAAY;KACV;KACA;KACD;AACD,mBAAe;;;AAInB,iBAAe;GACf;;;;;;;;;;ACvCJ,SAAgB,QAAQ,MAAwB;CAC9C,IAAI,MAAM;CAEV,MAAM,YAAY,IAAI,QAAQ,IAAI;CAClC,IAAI,OAAO;AACX,KAAI,YAAY,IAAI;AAClB,SAAO,IAAI,UAAU,UAAU;AAC/B,QAAM,IAAI,UAAU,GAAG,UAAU;;AAEnC,QAAO;EACL,WAAW,aAAa,IAAI;EAC5B;EACA;EACD;;;;;AAMH,eAAsB,UACpB,MACA,SACuB;AACvB,KAAI;EAIF,MAAM,UAAU;GAAC,QAAQ;GAAM,QAAQ;GAAM,QAAQ;GAAM,QAAQ;GAAO;EAC1E,MAAM,WAAW,QAAQ,QAAQ,WAAW,OAAO,UAAU,KAAK,CAAC;AACnE,SAAO,MAAMC,IAAY,SAAS,SAAS,WAAW,SAAS,KAAK;UAC7DC,OAAY;AACnB,MAAI,SAAS,MAAM,WAAW,MAAM,QAAQ,WAAW,gBAAgB,CACrE,OAAM;AAGR,MAAI,CAAC,SAAS,EAAE,WAAW,OACzB,OAAM,IAAI,OAAO,mBAAmB,KAAK,MAAM;AAGjD,MAAI,MAAM,iBAAiB,YACzB,OAAM,MAAM;AAGd,QAAM,IAAI,YAAY,MAAM,MAAM,SAAS,KAAK,IAAI;;;;;;;;;;;;;ACtCxD,IAAqB,QAArB,MAA0D;;;;;;CAMxD,AAAO;;;;;;;;CASP,MAAM,GAAG,OAAwC;AAE/C,SADc,SAAS,KAAK,QAAQ,MAAM,MAAM,CAAC,CACpC,KAAK,SAAS,mBAAmB,KAAK,QAAQ,CAAC;;;;;;;;;CAU9D,OAAO,GAAG,OAAiC;EACzC,MAAM,QAAQ,KAAK;AAEnB,SADc,SAAS,OAAO,MAAM,MAAM,CAAC,CAC9B,QAA6B,KAAK,SAAS;AACtD,OAAI,mBAAmB,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAU;AAC7D,UAAO;KACN,EAAE,CAAC;;;;;;;;;;;;;;;;CAiBR,OAAO,MAAc,SAAc;AACjC,MAAI;AACF,QAAK,SAAS,MAAM,IAAI,QAAQ;AAChC,UAAO;UACD;AACN,UAAO;;;;;;;;;;CAWX,IAAI,MAAc,SAA8E;AAC9F,SAAO,KAAK,SAAS,MAAM,IAAI,QAAQ,CAAE;;;;;;;;CAS3C,IAAI,MAAc,OAA4D;EAC5E,MAAM,UAAUC,QAAY,KAAK,UAAU,MAAO,KAAK;EACvD,MAAM,cAAcC,UAAc,QAAQ;EAC1C,MAAM,OAAO,KAAK,OAAO;AAEzB,MAAI,CAAC,KACH,OAAM,IAAI,iCAAiC,KAAK,QAAQ,YAAY,cAAc;AAGpF,OAAK,IAAI,SAAS,MAAM;;;;;;;;;CAS1B,SAAS,MAAc;AACrB,SAAOD,QAAY,KAAK,UAAU,MAAO,KAAK;EAC9C,MAAM,cAAcC,UAAc,KAAK;AACvC,SAAO,KAAK,OAAO;;;;;;;CAQrB,KAAK,MAAc;EACjB,MAAM,cAAcA,UAAc,KAAK;EAEvC,MAAM,OAAO,IAAIC,YAAQ,KAAK;AAC9B,OAAK,OAAO;AAEZ,OAAK,OAAO,eAAe;AAC3B,OAAK,YAAY,KAAK,aAAa;AAEnC,SAAO;;;;;;;;;;;CAYT,SAAS,MAAc,cAAsB,SAAyB;EACpE,MAAM,UAAUF,QAAY,KAAK,UAAU,MAAO,KAAK;EACvD,MAAM,cAAcC,UAAc,QAAQ;EAC1C,MAAM,OAAO,KAAK,OAAO;AAEzB,MAAI,CAAC,KACH,OAAM,IAAI,iCAAiC,KAAK,QAAQ,YAAY,cAAc;AAGpF,MAAI,KAAK,UAAU,QAAW;AAC5B,WAAQ,KAAK,6CAA6C,cAAc;AACxE,UAAO;;AAGT,SAAO,KAAK,QAAQ,SAAS,SAAS,MAAM,aAAa;;;;;;;;CAS3D,SAAsB,EAAE;;;;;;;CAQxB;CAEA,cAAc;;;;;;AAMZ,OAAK,WAAW;AAEhB,OAAK,SAAS,EAAE;AAGhB,OAAK,YAAY;;;;;;;;;;;;;;;;;;;;CAsBnB,SAAS,KAAK;;;;;;;;;AAUhB,SAAS,SAAwC,OAAoB,OAAiB;CACpF,IAAI,QAAQ,OAAO,KAAK,MAAM;AAG9B,SAAQ,MAAM,QAAQ,MAAM,GAAG,GAAG,MAAM,KAAK,MAAM,UAAU,MAAM,KAAK,MAAM;AAC9E,KAAI,MAAM,SAAS,KAAK,MAAM,GAC5B,SAAQ,MAAM,QAAQ,QAAQ,MAAM,SAAS,MAAM,KAAM,SAAmB,CAAC;AAI/E,QAAO,MAAM,KAAK,UAAU;EAC1B,SAAS,MAAM,MAAO,aAAa,SAASE,iBAAqB,MAAM,KAAK,GAAG;EAC/E,SAAS;EACV,EAAE;;;;;ACnOL,MAAa,eAAe,EAC1B,SAAS,OAAO,EAAE,WAA8C;CAC9D,IAAIC;AAEJ,KAAI;AACF,SAAOC,iBAAqB,KAAK,IAAI;UAC9BC,OAAY;AACnB,QAAM,IAAI,cAAc,IAAI,IAAI,OAAO,kBAAkB,KAAK,MAAM,EAAE,KAAK,IAAI;;AAGjF,KAAI;AAEF,OAAK,OADQ,MAAM,GAAG,SAAS,SAAS,KAAK;UAEtCA,OAAY;AACnB,QAAM,IAAI,cAAc,IAAI,OAAO,uBAAuB,KAAK,GAAG,EAAE,KAAK;;GAG9E;;;;AClBD,MAAa,cAAc,OAAO,EAChC,cACA,YAAY,EAAE,EACd,UAAU,KACV,UASI;AACJ,OAAM,IAAI,IAAI,IAAI;AAClB,WAAU,KAAK,IAAI,KAAK;CAExB,MAAM,aAAa,IAAI,iBAAiB;CACxC,MAAM,YAAY,iBAAiB;AACjC,aAAW,OAAO;IACjB,QAAQ;CACX,MAAM,WAAW,MAAM,MAAM,KAAK;EAChC,QAAQ,WAAW;EACnB,GAAG;EACJ,CAAC;AACF,cAAa,UAAU;AAEvB,KAAI,SAAS,UAAU,OAAO,SAAS,UAAU,KAAK;AACpD,MAAI,UAAU,SAAS,EACrB,OAAM,IAAI,cACR,IACE,EAAE,QAAQ,SAAS,QAAQ,EAC3B,oBAAoB,UAAU,GAAG,8BAA8B,UAAU,KAAK,QAAQ,GACvF,CACF;AAGH,MAAI,EAAE,cAAc,SAAS,YAAY,CAAC,SAAS,QAAQ,SACzD,OAAM,IACJ,EAAE,QAAQ,SAAS,QAAQ,EAC3B,QAAQ,SAAS,OAAO,mCACzB;AAGH,SAAO,YAAY;GACjB;GACA;GACA;GACA,KAAK,QAAQ,IAAI,MAAM,SAAS,QAAQ,SAAmB;GAC5D,CAAC;;AAGJ,QAAO;EAAE;EAAc;EAAU;;AAGnC,MAAa,cAAc,EACzB,SAAS,OAAO,EACd,aACA,OAAO,QACP,WAKmB;CACnB,IAAI,OAAO;AAEX,KAAI,CAAC,KACH,KAAI;EACF,MAAM,EAAE,cAAc,aAAa,MAAM,YAAY;GACnD,cAAc;IACZ,QAAQ;IACR,GAAG;IACJ;GACD,KAAK,KAAK;GACX,CAAC;AAEF,MAAI,SAAS,UAAU,KAErB;OAAI,SAAS,WAAW,OAAO,cAAc,WAAW,OACtD,OAAM,IAAI,EAAE,QAAQ,SAAS,QAAQ,EAAE,cAAc,SAAS,SAAS;;AAI3E,SAAO,SAAS,OAAO,MAAM,SAAS,aAAa,mBAAG,IAAI,YAAY,EAAE;UACjEC,OAAY;AACnB,QAAM,IAAI,cAAc,IAAI,OAAO,oBAAoB,KAAK,MAAM,EAAE,KAAK,IAAI;;AAIjF,MAAK,OAAO,OAAO,KAAK,KAAM;GAEjC;;;;;;;;;;;;;;AC3ED,eAAsB,gBAAgB,QAAoB,SAA4B;CACpF,MAAM,WAAW,MAAM,OAAO,QAAQ;EACpC,OAAO,OAAO;EACd,SAAS,QAAQ;EACjB,MAAM,GAAG,OAAO,MAAM,UAAU,KAAK;EACtC,CAAC;AACF,OAAM,QAAQ,IAAI,SAAS;;;;;;;;;;;AAY7B,SAAS,MACP,KACA,EACE,OACA,WAAW,OACX,SACA,MACA,uBAAO,IAAI,KAAK,IAUe;CACjC,IAAIC,WAAoC,EAAE;AAE1C,KAAI,OAAO,OAAO,QAAQ,YAAY,CAAC,YAAY,OAAO,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,EAAE;AAChF,OAAK,IAAI,IAAI;AAEb,MAAIC,YAAK,eAAe,IAAI,CAC1B,UAAS,KACP,YAAe,KAAK;GAClB;GACA;GACA;GACA;GACD,CAAC,CACH;AAGH,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,IAAI,CAC5C,YAAW,SAAS,OAClB,MAAM,OAAO;GACX;GACA;GACA;GACA,MAAMC,gBAAQ,KAAK,MAAM,IAAI;GAC7B;GACD,CAAC,CACH;;AAIL,QAAO;;;;;;;;;;;;;;AAeT,eAAe,YACb,MACA,EACE,OACA,SACA,MACA,QAOgB;CAClB,MAAM,eAAeC,QAAY,MAAO,KAAoB,KAAM;CAClE,MAAM,cAAcC,UAAc,aAAa;CAM/C,MAAM,MAAM,MAAM,OAAO;AACzB,KAAI,KAAK;EAEP,MAAM,WAAW,MAAM,IAAI,OAAY;GACrC;GACA,UAAU;GACV;GACA,MAAM,GAAG,YAAY;GACrB;GACD,CAAC;AACF,SAAO,QAAQ,IAAI,SAAS;;CAI9B,MAAM,OAAO,QAAQ,aAAa;CAIlC,MAAM,YAAY,MAAM,KAAK,KAAK,IAAI;AAEtC,KAAI;EACF,MAAM,gBAAgB,iBAAiB,EAAE,mBAAmB,cAAc,CAAC;AAE3E,YAAU,WAAW,cAAc;EAEnC,IAAIC,WAA4C,EAAE;AAElD,MAAI,cAAc,SAAS,QAAQ;AAEjC,UADiB,cAAc,SAAS,SAAS,eAAe,aACjD,QAAQ,EAAE,MAAM,CAAC;GAChC,MAAM,cAAc,MAAM,UAAU,MAAM,QAAQ;AAClD,aAAU,QAAQ,YAAY;AAC9B,cAAW,MAAM,YAAY,QAAQ;IACnC;IACA,UAAU;IACV;IACA,MAAM,GAAG,YAAY;IACrB;IACD,CAAC;;AAGJ,SAAO,QAAQ,IAAI,SAAS;UACrB,OAAO;AACd,MAAI,eAAe,MAAM,CACvB,WAAU,QAAQ;AAEpB,QAAM;;;;;;ACvJV,SAAgB,iBAAiB,EAC/B,qBAGgB;AAChB,KAAI,CAAC,kBACH,OAAM,IAAI,6CAA6C,oBAAoB;CAG7E,MAAMC,gBAA+B;EACnC,MAAM,OAAO,sBAAsB,WAAW,oBAAoB;EAClE,QAAQ;EACR,MAAM;EACP;AAQD,KAAI,cAAc,QAAQC,iBAAqB,cAAc,KAAK,EAAE;AAClE,gBAAc,OAAOC,mBAAuB,cAAc,KAAK;AAC/D,gBAAc,OAAO;YACZ,CAAC,cAAc,QAAQ,qBAAqB,OAAO,sBAAsB,SAClF,KAAI,SAAS,qBAAqB,kBAAkB,KAAK;EAGvD,MAAM,EAAE,UAAU,aAAa,IAAI,IAAI,kBAAkB,IAAc;AACvE,gBAAc,OAAO,GAAG,SAAS,IAAI,SAAS,GAAG,aAAa,WAAW,MAAM;AAC/E,gBAAc,OAAO;QAChB;AACL,gBAAc,SAAS;AACvB,gBAAc,OAAO;;AAIzB,KAAI,cAAc,SAAS,OAEzB,eAAc,OAAOC,QAAYC,KAAS,EAAE,cAAc,KAAK;AAGjE,QAAO;;;;;;AAST,IAAa,aAAb,MAAwB;;;;;;;CAOtB,QAAQ,IAAI,OAAmB;CAC/B,AAAO,UAAU,sCAAsC;;;;;;;CAOvD,AAAO,SAA4B;CACnC,AAAO,aAA2B,EAAE;CACpC,AAAO,oBAA8B,EAAE;CACvC,AAAO,qCAA0C,IAAI,KAAK;;;;;;;;;;CAW1D,MAAa,OAAO,EAClB,aACA,gBACA,mBACA,iBAMsB;AACtB,QAAM,KAAK,MAAM;GACf;GACA;GACA;GACA;GACD,CAAC;AAEF,QAAM,gBAAgB,MAAM,KAAK,QAAQ;AAEzC,MADe,qBAAqB,gBAAgB,KAAK,CAC9C,SAAS,EAClB,OAAM,IAAI,qBAAqB,KAAK;AAEtC,SAAQ,MAAM,KAAK,QAAQ;AAE3B,MADgB,qBAAqB,gBAAgB,KAAK,CAC9C,SAAS,EACnB,OAAM,IAAI,qBAAqB,KAAK;AAEtC,SAAO,KAAK;;;;;;CAOd,MAAa,WAAW,EACtB,aACA,gBACA,oBACA,kBAMsB;AACtB,QAAM,KAAK,UAAU;GAAE;GAAa;GAAO;GAAoB;GAAgB,CAAC;AAChF,OAAK,WAAW;AAEhB,QAAM,gBAAgB,MAAM,KAAK,QAAQ;AAEzC,MADe,qBAAqB,gBAAgB,KAAK,CAC9C,SAAS,EAClB,OAAM,IAAI,qBAAqB,KAAK;AAEtC,SAAQ,MAAM,KAAK,QAAQ;AAI3B,MADgB,qBAAqB,gBAAgB,KAAK,CAC9C,SAAS,EACnB,OAAM,IAAI,qBAAqB,KAAK;AAEtC,SAAO,KAAK;;;;;;;;;;CAWd,MAAa,MAAM,EACjB,aACA,gBACA,mBACA,eAAe,kBAMmB;EAClC,MAAM,gBAAgB,kBAAkB,iBAAiB,EAAE,mBAAmB,CAAC;EAC/E,MAAM,EAAE,MAAM,SAAS;EACvB,IAAI,EAAE,WAAW;AAGjB,OAAK,SAAS;AACd,OAAK,QAAQ,IAAI,OAAO;AAExB,MAAI,QAAQ;GAEV,MAAM,OAAO,KAAK,MAAM,KAAK,KAAK;AAClC,QAAK,WAAWH,iBAAqB,KAAK,GAAG,SAAS;AACtD,QAAK,QAAQ;aACJ,SAAS,QAAQ;GAC1B,MAAM,OAAO,QAAQ,KAAK;GAI1B,MAAM,YAAY,KAAK,MAAM,KAAK,KAAK,IAAI;AAC3C,aAAU,WAAW;AACrB,OAAI;AAEF,WADiB,SAAS,SAAS,eAAe,aACnC,QAAQ;KACrB;KACA;KACA;KACD,CAAC;IACF,MAAM,cAAc,MAAM,UAAU,MAAM,KAAK,QAAQ,MAAM;AAC7D,cAAU,QAAQ,YAAY;AAC9B,aAAS,YAAY;YACd,OAAO;AACd,QAAI,eAAe,MAAM,CACvB,WAAU,QAAQ;AAEpB,UAAM;;;AAIV,MAAI,WAAW,QAAQ,OAAO,WAAW,YAAY,OAAO,SAAS,OAAO,CAC1E,OAAM,IAAI,OAAO,IAAI,KAAK,MAAM,UAAU,QAAQ,OAAO,8BAA8B;AAGzF,OAAK,SAAS;AAEd,SAAO,EACL,QACD;;CAGH,MAAc,UAAU,EACtB,aACA,gBACA,oBACA,gBAAgB,mBAMwB;EACxC,MAAM,iBAAiB,CAAC,GAAI,mBAAmB,EAAE,CAAE;AACnD,iBAAe,KACb,GAAI,mBAAmB,KAAK,WAAW,iBAAiB,EAAE,mBAAmB,QAAQ,CAAC,CAAC,IACrF,EAAE,CACL;AAED,OAAK,aAAa,EAAE;AACpB,OAAK,oBAAoB,EAAE;AAC3B,OAAK,qCAAqB,IAAI,KAAK;AAEnC,OAAK,IAAI,IAAI,GAAG,IAAI,eAAe,QAAQ,KAAK;GAC9C,MAAM,gBAAgB,eAAe;GACrC,MAAM,EAAE,MAAM,SAAS;GACvB,IAAI,EAAE,WAAW;AAEjB,OAAI,QAAQ,YAED,SAAS,QAAQ;IAC1B,MAAM,OAAO,QAAQ,KAAK;IAI1B,MAAM,YAAY,KAAK,MAAM,KAAK,KAAK,IAAI;AAC3C,cAAU,WAAW;AACrB,QAAI;AAEF,YADiB,SAAS,SAAS,eAAe,aACnC,QAAQ;MACrB,aAAa,cAAc;MAC3B;MACA;MACD,CAAC;KACF,MAAM,cAAc,MAAM,UAAU,MAAM,KAAK,QAAQ,MAAM;AAC7D,eAAU,QAAQ,YAAY;AAC9B,cAAS,YAAY;aACd,OAAO;AACd,SAAI,eAAe,MAAM,CACvB,WAAU,QAAQ;AAEpB,WAAM;;;AAIV,OAAI,WAAW,QAAQ,OAAO,WAAW,YAAY,OAAO,SAAS,OAAO,CAC1E,OAAM,IAAI,OAAO,IAAI,KAAK,MAAM,UAAU,QAAQ,OAAO,8BAA8B;AAGzF,QAAK,WAAW,KAAK,OAAO;AAC5B,QAAK,kBAAkB,KAAK,QAAQ,KAAK,SAAS,OAAOG,KAAS,CAAC;;AAGrE,SAAO,EACL,YAAY,KAAK,YAClB;;CAGH,AAAO,YAAwB;EAC7B,MAAM,UAAU,KAAK,cAAc,EAAE;AACrC,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,2DAA2D;EAGvE,MAAMC,SAAc,EAAE;EAGtB,IAAIC;EACJ,IAAIC;AACJ,OAAK,MAAM,KAAK,SAAS;AACvB,OAAI,CAAC,iBAAiB,KAAK,OAAQ,EAAU,YAAY,SACvD,iBAAiB,EAAU;AAE7B,OAAI,CAAC,iBAAiB,KAAK,OAAQ,EAAU,YAAY,SACvD,iBAAiB,EAAU;AAE7B,OAAI,iBAAiB,cACnB;;AAGJ,MAAI,OAAO,kBAAkB,SAC3B,QAAO,UAAU;WACR,OAAO,kBAAkB,SAClC,QAAO,UAAU;EAInB,MAAMC,kBAAuB,EAAE;AAC/B,OAAK,MAAM,KAAK,SAAS;GACvB,MAAM,OAAQ,GAAW;AACzB,OAAI,QAAQ,OAAO,SAAS,UAC1B;SAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,KAAK,CACvC,KAAI,gBAAgB,OAAO,UAAa,MAAM,OAC5C,iBAAgB,KAAK,KAAK,MAAM,KAAK,UAAU,EAAE,CAAC;;;AAK1D,MAAI,OAAO,KAAK,gBAAgB,CAAC,SAAS,EACxC,QAAO,OAAO;EAIhB,MAAMC,UAAiB,EAAE;EACzB,MAAM,8BAAc,IAAI,KAAa;AACrC,OAAK,MAAM,KAAK,SAAS;GACvB,MAAM,MAAO,GAAW;AACxB,OAAI,MAAM,QAAQ,IAAI,EACpB;SAAK,MAAM,OAAO,IAChB,KAAI,OAAO,OAAO,QAAQ,UAAU;KAClC,MAAM,MAAM,GAAG,IAAI,OAAO,GAAG,GAAG,IAAI,eAAe;AACnD,SAAI,CAAC,YAAY,IAAI,IAAI,EAAE;AACzB,kBAAY,IAAI,IAAI;AACpB,cAAQ,KAAK,KAAK,MAAM,KAAK,UAAU,IAAI,CAAC,CAAC;;;;;AAMvD,MAAI,QAAQ,SAAS,EACnB,QAAO,UAAU;AAGnB,SAAO,QAAQ,EAAE;AACjB,SAAO,aAAa,EAAE;EAEtB,MAAM,oBAAoB;GACxB;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACD;AACD,OAAK,MAAM,OAAO,kBAChB,QAAO,WAAW,OAAO,EAAE;EAG7B,MAAM,6BAAa,IAAI,KAAa;EACpC,MAAMC,OAAc,EAAE;EACtB,MAAM,4BAAY,IAAI,KAAa;EAEnC,MAAM,YAAY,MAAc;AAC9B,OAAI;IAEF,MAAM,QADc,EAAE,MAAM,IAAI,CAAC,GACP,MAAM,IAAI;IACpC,MAAM,WAAW,MAAM,MAAM,SAAS,MAAM;IAC5C,MAAM,MAAM,SAAS,YAAY,IAAI;AAErC,YADY,MAAM,IAAI,SAAS,UAAU,GAAG,IAAI,GAAG,UACxC,QAAQ,mBAAmB,IAAI;WACpC;AACN,WAAO;;;EAGX,MAAM,UAAU,KAAkB,aAAqB;GACrD,IAAI,OAAO;GACX,IAAI,IAAI;AACR,UAAO,IAAI,IAAI,KAAK,CAClB,QAAO,GAAG,SAAS,GAAG;AAExB,OAAI,IAAI,KAAK;AACb,UAAO;;EAGT,MAAM,cAAc,KAAa,WAAwC;GAEvE,IAAI,IAAI,IAAI,MAAM,wCAAwC;AAC1D,OAAI,GAAG;IACL,MAAM,OAAO,gBAAgB,EAAE,GAAG,GAAG,EAAE;IACvC,MAAM,SAAS,OAAO,IAAI,KAAK;AAC/B,QAAI,OACF,QAAO,UAAU,EAAE,MAAM;;AAI7B,OAAI,IAAI,MAAM,gCAAgC;AAC9C,OAAI,GAAG;IACL,MAAM,OAAO,wBAAwB,EAAE;IACvC,MAAM,SAAS,OAAO,IAAI,KAAK;AAC/B,QAAI,OAEF,QAAO,UAAU,EAAE,MAAM;;AAG7B,UAAO;;EAGT,MAAM,mBACJ,KACA,QACA,QACA,YACA,aACQ;AACR,OAAI,QAAQ,QAAQ,QAAQ,OAC1B,QAAO;AAET,OAAI,MAAM,QAAQ,IAAI,CACpB,QAAO,IAAI,KAAK,MAAM,gBAAgB,GAAG,QAAQ,QAAQ,YAAY,SAAS,CAAC;AAEjF,OAAI,OAAO,QAAQ,SACjB,QAAO;GAGT,MAAMC,MAAW,EAAE;AACnB,QAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,IAAI,CACtC,KAAI,MAAM,UAAU,OAAO,MAAM,UAAU;IACzC,MAAM,IAAI;AACV,QAAI,EAAE,WAAW,IAAI,CACnB,KAAI,KAAK,WAAW,GAAG,OAAO;aAEhBC,YAAgB,EAAE,KAClB,OAEZ,KAAI,KAAKT,QAAY,WAAW,KAAK,EAAE;QAEvC,KAAI,KAAK;cAGJ,MAAM,UAAU,MAAM,QAAQ,EAAE,IAAI,EAAE,OAAO,MAAM,OAAO,MAAM,SAAS,CAClF,KAAI,KAAK,EAAE,KAAK,MAAM,OAAO,IAAI,EAAE,IAAI,EAAE;YAChC,MAAM,iBAAiB,OAAO,MAAM,SAC7C,KAAI,KAAK,OAAO,WAAW,GAAG,WAAW,GAAG,IAAI;OAEhD,KAAI,KAAK,gBAAgB,GAAU,QAAQ,QAAQ,YAAY,SAAS;AAG5E,UAAO;;AAGT,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;GACvC,MAAMU,SAAc,QAAQ,MAAM,EAAE;GACpC,MAAM,aAAa,KAAK,kBAAkB,MAAM,iBAAiB,IAAI;GACrE,MAAM,SAAS,SAAS,WAAW;GAGnC,MAAM,cAAcC,UAAc,WAAW;GAC7C,MAAM,WAAWF,YAAgB,YAAY;AAC7C,OACE,aAAa,UACb,aAAa,UACb,aAAa,UACb,aAAa,QAEb,MAAK,mBAAmB,IAAI,aAAa,OAAO;GAGlD,MAAM,yBAAS,IAAI,KAAqB;GACxC,MAAM,yBAAS,IAAI,KAAqB;GAExC,MAAM,gBAAiB,OAAO,cAAc,EAAE;AAC9C,QAAK,MAAM,OAAO,mBAAmB;IACnC,MAAM,QAAQ,cAAc,QAAQ,EAAE;AACtC,SAAK,MAAM,CAAC,SAAS,OAAO,QAAQ,MAAM,EAAE;KAC1C,MAAM,UAAU,GAAG,OAAO,GAAG;AAC7B,YAAO,IAAI,gBAAgB,IAAI,GAAG,QAAQ,gBAAgB,IAAI,GAAG,UAAU;;;GAI/E,MAAMG,UAAiB,MAAM,QAAQ,OAAO,KAAK,GAAG,OAAO,OAAO,EAAE;AACpE,QAAK,MAAM,KAAK,SAAS;AACvB,QAAI,CAAC,KAAK,OAAO,MAAM,YAAY,OAAO,EAAE,SAAS,SACnD;IAEF,MAAM,UAAU,EAAE;IAClB,MAAM,YAAY,WAAW,IAAI,QAAQ,GAAG,GAAG,OAAO,GAAG,YAAY;AACrE,eAAW,IAAI,UAAU;AACzB,WAAO,IAAI,SAAS,UAAU;AAC9B,QAAI,CAAC,KAAK,MAAM,MAAM,KAAK,EAAE,SAAS,UAAU,CAC9C,MAAK,KAAK;KAAE,GAAG;KAAG,MAAM;KAAW,CAAC;;AAIxC,QAAK,MAAM,OAAO,mBAAmB;IACnC,MAAM,QAAS,OAAO,cAAc,OAAO,WAAW,QAAS,EAAE;AACjE,SAAK,MAAM,CAAC,MAAM,QAAQ,OAAO,QAAQ,MAAM,EAAE;KAC/C,MAAM,UAAU,GAAG,OAAO,GAAG;AAC7B,YAAO,WAAW,KAAK,WAAW,gBAChC,KACA,QACA,QACA,QACAD,UAAc,WAAW,CAC1B;;;GAIL,MAAM,WAAY,OAAO,SAAS,EAAE;AACpC,QAAK,MAAM,CAAC,GAAG,SAAS,OAAO,QAAQ,SAAS,EAAE;IAChD,IAAI,aAAa;AACjB,QAAI,OAAO,MAAM,GAEf,cAAa,IAAI,OAAO,GADR,EAAE,WAAW,IAAI,GAAG,EAAE,UAAU,EAAE,GAAG;AAGvD,WAAO,MAAM,cAAc,gBACzB,MACA,QACA,QACA,QACAA,UAAc,WAAW,CAC1B;;;AAIL,MAAI,KAAK,SAAS,EAChB,QAAO,OAAO;EAIhB,MAAM,WAAW,KAAK,kBAAkB,MAAMV,KAAS;AACvD,OAAK,QAAQ,IAAI,OAAO;EACxB,MAAM,UAAU,KAAK,MAAM,KAAK,SAAS;AACzC,UAAQ,WAAWH,iBAAqB,SAAS,GAAG,SAAS;AAC7D,UAAQ,QAAQ;AAChB,OAAK,SAAS;AACd,SAAO"}
|
package/package.json
CHANGED
|
@@ -1,99 +1,63 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hey-api/json-schema-ref-parser",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.3.0",
|
|
4
4
|
"description": "Parse, Resolve, and Dereference JSON Schema $ref pointers",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"$ref",
|
|
7
|
+
"dereference",
|
|
8
|
+
"json",
|
|
9
|
+
"json-pointer",
|
|
10
|
+
"json-schema",
|
|
11
|
+
"jsonschema",
|
|
12
|
+
"resolve",
|
|
13
|
+
"schema"
|
|
14
|
+
],
|
|
5
15
|
"homepage": "https://heyapi.dev/",
|
|
6
|
-
"repository": {
|
|
7
|
-
"type": "git",
|
|
8
|
-
"url": "git+https://github.com/hey-api/json-schema-ref-parser.git"
|
|
9
|
-
},
|
|
10
16
|
"bugs": {
|
|
11
|
-
"url": "https://github.com/hey-api/
|
|
17
|
+
"url": "https://github.com/hey-api/openapi-ts/issues"
|
|
12
18
|
},
|
|
13
19
|
"license": "MIT",
|
|
14
20
|
"author": {
|
|
15
|
-
"email": "lubos@heyapi.dev",
|
|
16
21
|
"name": "Hey API",
|
|
22
|
+
"email": "lubos@heyapi.dev",
|
|
17
23
|
"url": "https://heyapi.dev"
|
|
18
24
|
},
|
|
19
|
-
"
|
|
20
|
-
|
|
21
|
-
"
|
|
22
|
-
"schema",
|
|
23
|
-
"jsonschema",
|
|
24
|
-
"json-schema",
|
|
25
|
-
"json-pointer",
|
|
26
|
-
"$ref",
|
|
27
|
-
"dereference",
|
|
28
|
-
"resolve"
|
|
29
|
-
],
|
|
30
|
-
"types": "dist/lib/index.d.ts",
|
|
31
|
-
"main": "dist/lib/index.js",
|
|
32
|
-
"browser": {
|
|
33
|
-
"fs": false
|
|
34
|
-
},
|
|
35
|
-
"engines": {
|
|
36
|
-
"node": ">= 16"
|
|
25
|
+
"repository": {
|
|
26
|
+
"type": "git",
|
|
27
|
+
"url": "git+https://github.com/hey-api/openapi-ts.git"
|
|
37
28
|
},
|
|
29
|
+
"funding": "https://github.com/sponsors/hey-api",
|
|
38
30
|
"files": [
|
|
39
|
-
"
|
|
31
|
+
"src",
|
|
40
32
|
"dist",
|
|
41
33
|
"cjs"
|
|
42
34
|
],
|
|
43
|
-
"
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
"
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
"
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
"
|
|
35
|
+
"type": "module",
|
|
36
|
+
"main": "./dist/index.mjs",
|
|
37
|
+
"types": "./dist/index.d.mts",
|
|
38
|
+
"exports": {
|
|
39
|
+
".": {
|
|
40
|
+
"types": "./dist/index.d.mts",
|
|
41
|
+
"import": "./dist/index.mjs"
|
|
42
|
+
},
|
|
43
|
+
"./package.json": "./package.json"
|
|
44
|
+
},
|
|
45
|
+
"dependencies": {
|
|
46
|
+
"@jsdevtools/ono": "7.1.3",
|
|
47
|
+
"@types/json-schema": "7.0.15",
|
|
48
|
+
"js-yaml": "4.1.1"
|
|
55
49
|
},
|
|
56
50
|
"devDependencies": {
|
|
57
|
-
"@
|
|
58
|
-
"
|
|
59
|
-
"@types/eslint": "9.6.1",
|
|
60
|
-
"@types/js-yaml": "^4.0.9",
|
|
61
|
-
"@types/lodash": "^4",
|
|
62
|
-
"@types/node": "^22",
|
|
63
|
-
"@typescript-eslint/eslint-plugin": "^8.17.0",
|
|
64
|
-
"@typescript-eslint/parser": "^8.17.0",
|
|
65
|
-
"@vitest/coverage-v8": "^2.1.8",
|
|
66
|
-
"cross-env": "^7.0.3",
|
|
67
|
-
"eslint": "^9.16.0",
|
|
68
|
-
"eslint-config-prettier": "^9.1.0",
|
|
69
|
-
"eslint-config-standard": "^17.1.0",
|
|
70
|
-
"eslint-plugin-import": "^2.31.0",
|
|
71
|
-
"eslint-plugin-prettier": "^5.2.1",
|
|
72
|
-
"eslint-plugin-promise": "^7.2.1",
|
|
73
|
-
"eslint-plugin-unused-imports": "^4.1.4",
|
|
74
|
-
"globals": "^15.13.0",
|
|
75
|
-
"jsdom": "^25.0.1",
|
|
76
|
-
"prettier": "^3.4.2",
|
|
77
|
-
"rimraf": "^6.0.1",
|
|
78
|
-
"typescript": "^5.7.2",
|
|
79
|
-
"typescript-eslint": "^8.17.0",
|
|
80
|
-
"vitest": "^2.1.8"
|
|
51
|
+
"@types/js-yaml": "4.0.9",
|
|
52
|
+
"typescript": "5.9.3"
|
|
81
53
|
},
|
|
82
|
-
"
|
|
83
|
-
"
|
|
84
|
-
"@types/json-schema": "^7.0.15",
|
|
85
|
-
"js-yaml": "^4.1.1",
|
|
86
|
-
"lodash": "^4.17.21"
|
|
54
|
+
"engines": {
|
|
55
|
+
"node": ">=20.19.0"
|
|
87
56
|
},
|
|
88
|
-
"
|
|
89
|
-
"
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
"
|
|
93
|
-
"@semantic-release/commit-analyzer",
|
|
94
|
-
"@semantic-release/release-notes-generator",
|
|
95
|
-
"@semantic-release/npm",
|
|
96
|
-
"@semantic-release/github"
|
|
97
|
-
]
|
|
57
|
+
"scripts": {
|
|
58
|
+
"build": "tsdown && pnpm check-exports",
|
|
59
|
+
"check-exports": "attw --pack . --profile esm-only --ignore-rules cjs-resolves-to-esm",
|
|
60
|
+
"dev": "tsdown --watch",
|
|
61
|
+
"typecheck": "tsc --noEmit"
|
|
98
62
|
}
|
|
99
|
-
}
|
|
63
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
|
|
3
|
+
import { $RefParser } from '..';
|
|
4
|
+
import { getSpecsPath } from './utils';
|
|
5
|
+
|
|
6
|
+
describe('bundle', () => {
|
|
7
|
+
it('handles circular reference with description', async () => {
|
|
8
|
+
const refParser = new $RefParser();
|
|
9
|
+
const pathOrUrlOrSchema = path.join(
|
|
10
|
+
getSpecsPath(),
|
|
11
|
+
'json-schema-ref-parser',
|
|
12
|
+
'circular-ref-with-description.json',
|
|
13
|
+
);
|
|
14
|
+
const schema = await refParser.bundle({ pathOrUrlOrSchema });
|
|
15
|
+
expect(schema).toEqual({
|
|
16
|
+
schemas: {
|
|
17
|
+
Bar: {
|
|
18
|
+
$ref: '#/schemas/Foo',
|
|
19
|
+
description: 'ok',
|
|
20
|
+
},
|
|
21
|
+
Foo: {
|
|
22
|
+
$ref: '#/schemas/Bar',
|
|
23
|
+
},
|
|
24
|
+
},
|
|
25
|
+
});
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
it('bundles multiple references to the same file correctly', async () => {
|
|
29
|
+
const refParser = new $RefParser();
|
|
30
|
+
const pathOrUrlOrSchema = path.join(
|
|
31
|
+
getSpecsPath(),
|
|
32
|
+
'json-schema-ref-parser',
|
|
33
|
+
'multiple-refs.json',
|
|
34
|
+
);
|
|
35
|
+
const schema = (await refParser.bundle({ pathOrUrlOrSchema })) as any;
|
|
36
|
+
|
|
37
|
+
// Both parameters should now be $ref to the same internal definition
|
|
38
|
+
const firstParam = schema.paths['/test1/{pathId}'].get.parameters[0];
|
|
39
|
+
const secondParam = schema.paths['/test2/{pathId}'].get.parameters[0];
|
|
40
|
+
|
|
41
|
+
// The $ref should match the output structure in file_context_0
|
|
42
|
+
expect(firstParam.$ref).toBe('#/components/parameters/path-parameter_pathId');
|
|
43
|
+
expect(secondParam.$ref).toBe('#/components/parameters/path-parameter_pathId');
|
|
44
|
+
|
|
45
|
+
// The referenced parameter should exist and match the expected structure
|
|
46
|
+
expect(schema.components).toBeDefined();
|
|
47
|
+
expect(schema.components.parameters).toBeDefined();
|
|
48
|
+
expect(schema.components.parameters['path-parameter_pathId']).toEqual({
|
|
49
|
+
in: 'path',
|
|
50
|
+
name: 'pathId',
|
|
51
|
+
required: true,
|
|
52
|
+
schema: {
|
|
53
|
+
description: 'Unique identifier for the path',
|
|
54
|
+
format: 'uuid',
|
|
55
|
+
type: 'string',
|
|
56
|
+
},
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
});
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
|
|
3
|
+
import { getResolvedInput } from '../index';
|
|
4
|
+
|
|
5
|
+
describe('getResolvedInput', () => {
|
|
6
|
+
it('handles url', async () => {
|
|
7
|
+
const pathOrUrlOrSchema = 'https://foo.com';
|
|
8
|
+
const resolvedInput = await getResolvedInput({ pathOrUrlOrSchema });
|
|
9
|
+
expect(resolvedInput.type).toBe('url');
|
|
10
|
+
expect(resolvedInput.schema).toBeUndefined();
|
|
11
|
+
expect(resolvedInput.path).toBe('https://foo.com/');
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
it('handles file', async () => {
|
|
15
|
+
const pathOrUrlOrSchema = './path/to/openapi.json';
|
|
16
|
+
const resolvedInput = await getResolvedInput({ pathOrUrlOrSchema });
|
|
17
|
+
expect(resolvedInput.type).toBe('file');
|
|
18
|
+
expect(resolvedInput.schema).toBeUndefined();
|
|
19
|
+
expect(path.normalize(resolvedInput.path).toLowerCase()).toBe(
|
|
20
|
+
path.normalize(path.resolve('./path/to/openapi.json')).toLowerCase(),
|
|
21
|
+
);
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
it('handles raw spec', async () => {
|
|
25
|
+
const pathOrUrlOrSchema = {
|
|
26
|
+
info: {
|
|
27
|
+
version: '1.0.0',
|
|
28
|
+
},
|
|
29
|
+
openapi: '3.1.0',
|
|
30
|
+
paths: {},
|
|
31
|
+
};
|
|
32
|
+
const resolvedInput = await getResolvedInput({ pathOrUrlOrSchema });
|
|
33
|
+
expect(resolvedInput.type).toBe('json');
|
|
34
|
+
expect(resolvedInput.schema).toEqual({
|
|
35
|
+
info: {
|
|
36
|
+
version: '1.0.0',
|
|
37
|
+
},
|
|
38
|
+
openapi: '3.1.0',
|
|
39
|
+
paths: {},
|
|
40
|
+
});
|
|
41
|
+
expect(resolvedInput.path).toBe('');
|
|
42
|
+
});
|
|
43
|
+
});
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
|
|
3
|
+
import { $RefParser } from '..';
|
|
4
|
+
import { getSpecsPath } from './utils';
|
|
5
|
+
|
|
6
|
+
describe('pointer', () => {
|
|
7
|
+
it('inlines internal JSON Pointer refs under #/paths/ for OpenAPI bundling', async () => {
|
|
8
|
+
const refParser = new $RefParser();
|
|
9
|
+
const pathOrUrlOrSchema = path.join(
|
|
10
|
+
getSpecsPath(),
|
|
11
|
+
'json-schema-ref-parser',
|
|
12
|
+
'openapi-paths-ref.json',
|
|
13
|
+
);
|
|
14
|
+
const schema = (await refParser.bundle({ pathOrUrlOrSchema })) as any;
|
|
15
|
+
|
|
16
|
+
// The GET endpoint should have its schema defined inline
|
|
17
|
+
const getSchema = schema.paths['/foo'].get.responses['200'].content['application/json'].schema;
|
|
18
|
+
expect(getSchema.$ref).toBeUndefined();
|
|
19
|
+
expect(getSchema.type).toBe('object');
|
|
20
|
+
expect(getSchema.properties.bar.type).toBe('string');
|
|
21
|
+
|
|
22
|
+
// The POST endpoint should have its schema inlined (copied) instead of a $ref
|
|
23
|
+
const postSchema =
|
|
24
|
+
schema.paths['/foo'].post.responses['200'].content['application/json'].schema;
|
|
25
|
+
expect(postSchema.$ref).toBe(
|
|
26
|
+
'#/paths/~1foo/get/responses/200/content/application~1json/schema',
|
|
27
|
+
);
|
|
28
|
+
expect(postSchema.type).toBeUndefined();
|
|
29
|
+
expect(postSchema.properties?.bar?.type).toBeUndefined();
|
|
30
|
+
|
|
31
|
+
// Both schemas should be identical objects
|
|
32
|
+
expect(postSchema).not.toBe(getSchema);
|
|
33
|
+
});
|
|
34
|
+
});
|