@soda-gql/swc-transformer 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +79 -0
- package/dist/index.cjs +915 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +91 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.mts +91 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +913 -0
- package/dist/index.mjs.map +1 -0
- package/dist/native.cjs +254 -0
- package/dist/native.cjs.map +1 -0
- package/dist/native.d.cts +46 -0
- package/dist/native.d.cts.map +1 -0
- package/dist/native.d.mts +46 -0
- package/dist/native.d.mts.map +1 -0
- package/dist/native.mjs +256 -0
- package/dist/native.mjs.map +1 -0
- package/package.json +81 -0
- package/src/index.ts +290 -0
- package/src/lib.rs +87 -0
- package/src/native/index.d.ts +42 -0
- package/src/native/index.js +316 -0
- package/src/native/swc-transformer.linux-x64-gnu.node +0 -0
- package/src/transform/analysis.rs +240 -0
- package/src/transform/imports.rs +285 -0
- package/src/transform/metadata.rs +371 -0
- package/src/transform/mod.rs +7 -0
- package/src/transform/runtime.rs +197 -0
- package/src/transform/transformer.rs +438 -0
- package/src/types/artifact.rs +107 -0
- package/src/types/config.rs +72 -0
- package/src/types/error.rs +132 -0
- package/src/types/mod.rs +12 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs","names":["sortComparator","url","normalizePath","resolve","resolveUri","COLUMN","resolve","cast","source","nativeModule: NativeModule | null","filteredElements: BuilderArtifact[\"elements\"]","result: TransformResult","finalSourceMap: string | undefined"],"sources":["../../../node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs","../../../node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs","../../../node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs","../../../node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs","../../../node_modules/@ampproject/remapping/dist/remapping.mjs","../src/index.ts"],"sourcesContent":["// src/vlq.ts\nvar comma = \",\".charCodeAt(0);\nvar semicolon = \";\".charCodeAt(0);\nvar chars = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/\";\nvar intToChar = new Uint8Array(64);\nvar charToInt = new Uint8Array(128);\nfor (let i = 0; i < chars.length; i++) {\n const c = chars.charCodeAt(i);\n intToChar[i] = c;\n charToInt[c] = i;\n}\nfunction decodeInteger(reader, relative) {\n let value = 0;\n let shift = 0;\n let integer = 0;\n do {\n const c = reader.next();\n integer = charToInt[c];\n value |= (integer & 31) << shift;\n shift += 5;\n } while (integer & 32);\n const shouldNegate = value & 1;\n value >>>= 1;\n if (shouldNegate) {\n value = -2147483648 | -value;\n }\n return relative + value;\n}\nfunction encodeInteger(builder, num, relative) {\n let delta = num - relative;\n delta = delta < 0 ? -delta << 1 | 1 : delta << 1;\n do {\n let clamped = delta & 31;\n delta >>>= 5;\n if (delta > 0) clamped |= 32;\n builder.write(intToChar[clamped]);\n } while (delta > 0);\n return num;\n}\nfunction hasMoreVlq(reader, max) {\n if (reader.pos >= max) return false;\n return reader.peek() !== comma;\n}\n\n// src/strings.ts\nvar bufLength = 1024 * 16;\nvar td = typeof TextDecoder !== \"undefined\" ? /* @__PURE__ */ new TextDecoder() : typeof Buffer !== \"undefined\" ? {\n decode(buf) {\n const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);\n return out.toString();\n }\n} : {\n decode(buf) {\n let out = \"\";\n for (let i = 0; i < buf.length; i++) {\n out += String.fromCharCode(buf[i]);\n }\n return out;\n }\n};\nvar StringWriter = class {\n constructor() {\n this.pos = 0;\n this.out = \"\";\n this.buffer = new Uint8Array(bufLength);\n }\n write(v) {\n const { buffer } = this;\n buffer[this.pos++] = v;\n if (this.pos === bufLength) {\n this.out += td.decode(buffer);\n this.pos = 0;\n }\n }\n flush() {\n const { buffer, out, pos } = this;\n return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;\n }\n};\nvar StringReader = class {\n constructor(buffer) {\n this.pos = 0;\n this.buffer = buffer;\n }\n next() {\n return this.buffer.charCodeAt(this.pos++);\n }\n peek() {\n return this.buffer.charCodeAt(this.pos);\n }\n indexOf(char) {\n const { buffer, pos } = this;\n const idx = buffer.indexOf(char, pos);\n return idx === -1 ? buffer.length : idx;\n }\n};\n\n// src/scopes.ts\nvar EMPTY = [];\nfunction decodeOriginalScopes(input) {\n const { length } = input;\n const reader = new StringReader(input);\n const scopes = [];\n const stack = [];\n let line = 0;\n for (; reader.pos < length; reader.pos++) {\n line = decodeInteger(reader, line);\n const column = decodeInteger(reader, 0);\n if (!hasMoreVlq(reader, length)) {\n const last = stack.pop();\n last[2] = line;\n last[3] = column;\n continue;\n }\n const kind = decodeInteger(reader, 0);\n const fields = decodeInteger(reader, 0);\n const hasName = fields & 1;\n const scope = hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind];\n let vars = EMPTY;\n if (hasMoreVlq(reader, length)) {\n vars = [];\n do {\n const varsIndex = decodeInteger(reader, 0);\n vars.push(varsIndex);\n } while (hasMoreVlq(reader, length));\n }\n scope.vars = vars;\n scopes.push(scope);\n stack.push(scope);\n }\n return scopes;\n}\nfunction encodeOriginalScopes(scopes) {\n const writer = new StringWriter();\n for (let i = 0; i < scopes.length; ) {\n i = _encodeOriginalScopes(scopes, i, writer, [0]);\n }\n return writer.flush();\n}\nfunction _encodeOriginalScopes(scopes, index, writer, state) {\n const scope = scopes[index];\n const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;\n if (index > 0) writer.write(comma);\n state[0] = encodeInteger(writer, startLine, state[0]);\n encodeInteger(writer, startColumn, 0);\n encodeInteger(writer, kind, 0);\n const fields = scope.length === 6 ? 1 : 0;\n encodeInteger(writer, fields, 0);\n if (scope.length === 6) encodeInteger(writer, scope[5], 0);\n for (const v of vars) {\n encodeInteger(writer, v, 0);\n }\n for (index++; index < scopes.length; ) {\n const next = scopes[index];\n const { 0: l, 1: c } = next;\n if (l > endLine || l === endLine && c >= endColumn) {\n break;\n }\n index = _encodeOriginalScopes(scopes, index, writer, state);\n }\n writer.write(comma);\n state[0] = encodeInteger(writer, endLine, state[0]);\n encodeInteger(writer, endColumn, 0);\n return index;\n}\nfunction decodeGeneratedRanges(input) {\n const { length } = input;\n const reader = new StringReader(input);\n const ranges = [];\n const stack = [];\n let genLine = 0;\n let definitionSourcesIndex = 0;\n let definitionScopeIndex = 0;\n let callsiteSourcesIndex = 0;\n let callsiteLine = 0;\n let callsiteColumn = 0;\n let bindingLine = 0;\n let bindingColumn = 0;\n do {\n const semi = reader.indexOf(\";\");\n let genColumn = 0;\n for (; reader.pos < semi; reader.pos++) {\n genColumn = decodeInteger(reader, genColumn);\n if (!hasMoreVlq(reader, semi)) {\n const last = stack.pop();\n last[2] = genLine;\n last[3] = genColumn;\n continue;\n }\n const fields = decodeInteger(reader, 0);\n const hasDefinition = fields & 1;\n const hasCallsite = fields & 2;\n const hasScope = fields & 4;\n let callsite = null;\n let bindings = EMPTY;\n let range;\n if (hasDefinition) {\n const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);\n definitionScopeIndex = decodeInteger(\n reader,\n definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0\n );\n definitionSourcesIndex = defSourcesIndex;\n range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];\n } else {\n range = [genLine, genColumn, 0, 0];\n }\n range.isScope = !!hasScope;\n if (hasCallsite) {\n const prevCsi = callsiteSourcesIndex;\n const prevLine = callsiteLine;\n callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);\n const sameSource = prevCsi === callsiteSourcesIndex;\n callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);\n callsiteColumn = decodeInteger(\n reader,\n sameSource && prevLine === callsiteLine ? callsiteColumn : 0\n );\n callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];\n }\n range.callsite = callsite;\n if (hasMoreVlq(reader, semi)) {\n bindings = [];\n do {\n bindingLine = genLine;\n bindingColumn = genColumn;\n const expressionsCount = decodeInteger(reader, 0);\n let expressionRanges;\n if (expressionsCount < -1) {\n expressionRanges = [[decodeInteger(reader, 0)]];\n for (let i = -1; i > expressionsCount; i--) {\n const prevBl = bindingLine;\n bindingLine = decodeInteger(reader, bindingLine);\n bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);\n const expression = decodeInteger(reader, 0);\n expressionRanges.push([expression, bindingLine, bindingColumn]);\n }\n } else {\n expressionRanges = [[expressionsCount]];\n }\n bindings.push(expressionRanges);\n } while (hasMoreVlq(reader, semi));\n }\n range.bindings = bindings;\n ranges.push(range);\n stack.push(range);\n }\n genLine++;\n reader.pos = semi + 1;\n } while (reader.pos < length);\n return ranges;\n}\nfunction encodeGeneratedRanges(ranges) {\n if (ranges.length === 0) return \"\";\n const writer = new StringWriter();\n for (let i = 0; i < ranges.length; ) {\n i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);\n }\n return writer.flush();\n}\nfunction _encodeGeneratedRanges(ranges, index, writer, state) {\n const range = ranges[index];\n const {\n 0: startLine,\n 1: startColumn,\n 2: endLine,\n 3: endColumn,\n isScope,\n callsite,\n bindings\n } = range;\n if (state[0] < startLine) {\n catchupLine(writer, state[0], startLine);\n state[0] = startLine;\n state[1] = 0;\n } else if (index > 0) {\n writer.write(comma);\n }\n state[1] = encodeInteger(writer, range[1], state[1]);\n const fields = (range.length === 6 ? 1 : 0) | (callsite ? 2 : 0) | (isScope ? 4 : 0);\n encodeInteger(writer, fields, 0);\n if (range.length === 6) {\n const { 4: sourcesIndex, 5: scopesIndex } = range;\n if (sourcesIndex !== state[2]) {\n state[3] = 0;\n }\n state[2] = encodeInteger(writer, sourcesIndex, state[2]);\n state[3] = encodeInteger(writer, scopesIndex, state[3]);\n }\n if (callsite) {\n const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;\n if (sourcesIndex !== state[4]) {\n state[5] = 0;\n state[6] = 0;\n } else if (callLine !== state[5]) {\n state[6] = 0;\n }\n state[4] = encodeInteger(writer, sourcesIndex, state[4]);\n state[5] = encodeInteger(writer, callLine, state[5]);\n state[6] = encodeInteger(writer, callColumn, state[6]);\n }\n if (bindings) {\n for (const binding of bindings) {\n if (binding.length > 1) encodeInteger(writer, -binding.length, 0);\n const expression = binding[0][0];\n encodeInteger(writer, expression, 0);\n let bindingStartLine = startLine;\n let bindingStartColumn = startColumn;\n for (let i = 1; i < binding.length; i++) {\n const expRange = binding[i];\n bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);\n bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);\n encodeInteger(writer, expRange[0], 0);\n }\n }\n }\n for (index++; index < ranges.length; ) {\n const next = ranges[index];\n const { 0: l, 1: c } = next;\n if (l > endLine || l === endLine && c >= endColumn) {\n break;\n }\n index = _encodeGeneratedRanges(ranges, index, writer, state);\n }\n if (state[0] < endLine) {\n catchupLine(writer, state[0], endLine);\n state[0] = endLine;\n state[1] = 0;\n } else {\n writer.write(comma);\n }\n state[1] = encodeInteger(writer, endColumn, state[1]);\n return index;\n}\nfunction catchupLine(writer, lastLine, line) {\n do {\n writer.write(semicolon);\n } while (++lastLine < line);\n}\n\n// src/sourcemap-codec.ts\nfunction decode(mappings) {\n const { length } = mappings;\n const reader = new StringReader(mappings);\n const decoded = [];\n let genColumn = 0;\n let sourcesIndex = 0;\n let sourceLine = 0;\n let sourceColumn = 0;\n let namesIndex = 0;\n do {\n const semi = reader.indexOf(\";\");\n const line = [];\n let sorted = true;\n let lastCol = 0;\n genColumn = 0;\n while (reader.pos < semi) {\n let seg;\n genColumn = decodeInteger(reader, genColumn);\n if (genColumn < lastCol) sorted = false;\n lastCol = genColumn;\n if (hasMoreVlq(reader, semi)) {\n sourcesIndex = decodeInteger(reader, sourcesIndex);\n sourceLine = decodeInteger(reader, sourceLine);\n sourceColumn = decodeInteger(reader, sourceColumn);\n if (hasMoreVlq(reader, semi)) {\n namesIndex = decodeInteger(reader, namesIndex);\n seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];\n } else {\n seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];\n }\n } else {\n seg = [genColumn];\n }\n line.push(seg);\n reader.pos++;\n }\n if (!sorted) sort(line);\n decoded.push(line);\n reader.pos = semi + 1;\n } while (reader.pos <= length);\n return decoded;\n}\nfunction sort(line) {\n line.sort(sortComparator);\n}\nfunction sortComparator(a, b) {\n return a[0] - b[0];\n}\nfunction encode(decoded) {\n const writer = new StringWriter();\n let sourcesIndex = 0;\n let sourceLine = 0;\n let sourceColumn = 0;\n let namesIndex = 0;\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n if (i > 0) writer.write(semicolon);\n if (line.length === 0) continue;\n let genColumn = 0;\n for (let j = 0; j < line.length; j++) {\n const segment = line[j];\n if (j > 0) writer.write(comma);\n genColumn = encodeInteger(writer, segment[0], genColumn);\n if (segment.length === 1) continue;\n sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);\n sourceLine = encodeInteger(writer, segment[2], sourceLine);\n sourceColumn = encodeInteger(writer, segment[3], sourceColumn);\n if (segment.length === 4) continue;\n namesIndex = encodeInteger(writer, segment[4], namesIndex);\n }\n }\n return writer.flush();\n}\nexport {\n decode,\n decodeGeneratedRanges,\n decodeOriginalScopes,\n encode,\n encodeGeneratedRanges,\n encodeOriginalScopes\n};\n//# sourceMappingURL=sourcemap-codec.mjs.map\n","// Matches the scheme of a URL, eg \"http://\"\nconst schemeRegex = /^[\\w+.-]+:\\/\\//;\n/**\n * Matches the parts of a URL:\n * 1. Scheme, including \":\", guaranteed.\n * 2. User/password, including \"@\", optional.\n * 3. Host, guaranteed.\n * 4. Port, including \":\", optional.\n * 5. Path, including \"/\", optional.\n * 6. Query, including \"?\", optional.\n * 7. Hash, including \"#\", optional.\n */\nconst urlRegex = /^([\\w+.-]+:)\\/\\/([^@/#?]*@)?([^:/#?]*)(:\\d+)?(\\/[^#?]*)?(\\?[^#]*)?(#.*)?/;\n/**\n * File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start\n * with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).\n *\n * 1. Host, optional.\n * 2. Path, which may include \"/\", guaranteed.\n * 3. Query, including \"?\", optional.\n * 4. Hash, including \"#\", optional.\n */\nconst fileRegex = /^file:(?:\\/\\/((?![a-z]:)[^/#?]*)?)?(\\/?[^#?]*)(\\?[^#]*)?(#.*)?/i;\nfunction isAbsoluteUrl(input) {\n return schemeRegex.test(input);\n}\nfunction isSchemeRelativeUrl(input) {\n return input.startsWith('//');\n}\nfunction isAbsolutePath(input) {\n return input.startsWith('/');\n}\nfunction isFileUrl(input) {\n return input.startsWith('file:');\n}\nfunction isRelative(input) {\n return /^[.?#]/.test(input);\n}\nfunction parseAbsoluteUrl(input) {\n const match = urlRegex.exec(input);\n return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');\n}\nfunction parseFileUrl(input) {\n const match = fileRegex.exec(input);\n const path = match[2];\n return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');\n}\nfunction makeUrl(scheme, user, host, port, path, query, hash) {\n return {\n scheme,\n user,\n host,\n port,\n path,\n query,\n hash,\n type: 7 /* Absolute */,\n };\n}\nfunction parseUrl(input) {\n if (isSchemeRelativeUrl(input)) {\n const url = parseAbsoluteUrl('http:' + input);\n url.scheme = '';\n url.type = 6 /* SchemeRelative */;\n return url;\n }\n if (isAbsolutePath(input)) {\n const url = parseAbsoluteUrl('http://foo.com' + input);\n url.scheme = '';\n url.host = '';\n url.type = 5 /* AbsolutePath */;\n return url;\n }\n if (isFileUrl(input))\n return parseFileUrl(input);\n if (isAbsoluteUrl(input))\n return parseAbsoluteUrl(input);\n const url = parseAbsoluteUrl('http://foo.com/' + input);\n url.scheme = '';\n url.host = '';\n url.type = input\n ? input.startsWith('?')\n ? 3 /* Query */\n : input.startsWith('#')\n ? 2 /* Hash */\n : 4 /* RelativePath */\n : 1 /* Empty */;\n return url;\n}\nfunction stripPathFilename(path) {\n // If a path ends with a parent directory \"..\", then it's a relative path with excess parent\n // paths. It's not a file, so we can't strip it.\n if (path.endsWith('/..'))\n return path;\n const index = path.lastIndexOf('/');\n return path.slice(0, index + 1);\n}\nfunction mergePaths(url, base) {\n normalizePath(base, base.type);\n // If the path is just a \"/\", then it was an empty path to begin with (remember, we're a relative\n // path).\n if (url.path === '/') {\n url.path = base.path;\n }\n else {\n // Resolution happens relative to the base path's directory, not the file.\n url.path = stripPathFilename(base.path) + url.path;\n }\n}\n/**\n * The path can have empty directories \"//\", unneeded parents \"foo/..\", or current directory\n * \"foo/.\". We need to normalize to a standard representation.\n */\nfunction normalizePath(url, type) {\n const rel = type <= 4 /* RelativePath */;\n const pieces = url.path.split('/');\n // We need to preserve the first piece always, so that we output a leading slash. The item at\n // pieces[0] is an empty string.\n let pointer = 1;\n // Positive is the number of real directories we've output, used for popping a parent directory.\n // Eg, \"foo/bar/..\" will have a positive 2, and we can decrement to be left with just \"foo\".\n let positive = 0;\n // We need to keep a trailing slash if we encounter an empty directory (eg, splitting \"foo/\" will\n // generate `[\"foo\", \"\"]` pieces). And, if we pop a parent directory. But once we encounter a\n // real directory, we won't need to append, unless the other conditions happen again.\n let addTrailingSlash = false;\n for (let i = 1; i < pieces.length; i++) {\n const piece = pieces[i];\n // An empty directory, could be a trailing slash, or just a double \"//\" in the path.\n if (!piece) {\n addTrailingSlash = true;\n continue;\n }\n // If we encounter a real directory, then we don't need to append anymore.\n addTrailingSlash = false;\n // A current directory, which we can always drop.\n if (piece === '.')\n continue;\n // A parent directory, we need to see if there are any real directories we can pop. Else, we\n // have an excess of parents, and we'll need to keep the \"..\".\n if (piece === '..') {\n if (positive) {\n addTrailingSlash = true;\n positive--;\n pointer--;\n }\n else if (rel) {\n // If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute\n // URL, protocol relative URL, or an absolute path, we don't need to keep excess.\n pieces[pointer++] = piece;\n }\n continue;\n }\n // We've encountered a real directory. Move it to the next insertion pointer, which accounts for\n // any popped or dropped directories.\n pieces[pointer++] = piece;\n positive++;\n }\n let path = '';\n for (let i = 1; i < pointer; i++) {\n path += '/' + pieces[i];\n }\n if (!path || (addTrailingSlash && !path.endsWith('/..'))) {\n path += '/';\n }\n url.path = path;\n}\n/**\n * Attempts to resolve `input` URL/path relative to `base`.\n */\nfunction resolve(input, base) {\n if (!input && !base)\n return '';\n const url = parseUrl(input);\n let inputType = url.type;\n if (base && inputType !== 7 /* Absolute */) {\n const baseUrl = parseUrl(base);\n const baseType = baseUrl.type;\n switch (inputType) {\n case 1 /* Empty */:\n url.hash = baseUrl.hash;\n // fall through\n case 2 /* Hash */:\n url.query = baseUrl.query;\n // fall through\n case 3 /* Query */:\n case 4 /* RelativePath */:\n mergePaths(url, baseUrl);\n // fall through\n case 5 /* AbsolutePath */:\n // The host, user, and port are joined, you can't copy one without the others.\n url.user = baseUrl.user;\n url.host = baseUrl.host;\n url.port = baseUrl.port;\n // fall through\n case 6 /* SchemeRelative */:\n // The input doesn't have a schema at least, so we need to copy at least that over.\n url.scheme = baseUrl.scheme;\n }\n if (baseType > inputType)\n inputType = baseType;\n }\n normalizePath(url, inputType);\n const queryHash = url.query + url.hash;\n switch (inputType) {\n // This is impossible, because of the empty checks at the start of the function.\n // case UrlType.Empty:\n case 2 /* Hash */:\n case 3 /* Query */:\n return queryHash;\n case 4 /* RelativePath */: {\n // The first char is always a \"/\", and we need it to be relative.\n const path = url.path.slice(1);\n if (!path)\n return queryHash || '.';\n if (isRelative(base || input) && !isRelative(path)) {\n // If base started with a leading \".\", or there is no base and input started with a \".\",\n // then we need to ensure that the relative path starts with a \".\". We don't know if\n // relative starts with a \"..\", though, so check before prepending.\n return './' + path + queryHash;\n }\n return path + queryHash;\n }\n case 5 /* AbsolutePath */:\n return url.path + queryHash;\n default:\n return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;\n }\n}\n\nexport { resolve as default };\n//# sourceMappingURL=resolve-uri.mjs.map\n","// src/trace-mapping.ts\nimport { encode, decode } from \"@jridgewell/sourcemap-codec\";\n\n// src/resolve.ts\nimport resolveUri from \"@jridgewell/resolve-uri\";\n\n// src/strip-filename.ts\nfunction stripFilename(path) {\n if (!path) return \"\";\n const index = path.lastIndexOf(\"/\");\n return path.slice(0, index + 1);\n}\n\n// src/resolve.ts\nfunction resolver(mapUrl, sourceRoot) {\n const from = stripFilename(mapUrl);\n const prefix = sourceRoot ? sourceRoot + \"/\" : \"\";\n return (source) => resolveUri(prefix + (source || \"\"), from);\n}\n\n// src/sourcemap-segment.ts\nvar COLUMN = 0;\nvar SOURCES_INDEX = 1;\nvar SOURCE_LINE = 2;\nvar SOURCE_COLUMN = 3;\nvar NAMES_INDEX = 4;\nvar REV_GENERATED_LINE = 1;\nvar REV_GENERATED_COLUMN = 2;\n\n// src/sort.ts\nfunction maybeSort(mappings, owned) {\n const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);\n if (unsortedIndex === mappings.length) return mappings;\n if (!owned) mappings = mappings.slice();\n for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {\n mappings[i] = sortSegments(mappings[i], owned);\n }\n return mappings;\n}\nfunction nextUnsortedSegmentLine(mappings, start) {\n for (let i = start; i < mappings.length; i++) {\n if (!isSorted(mappings[i])) return i;\n }\n return mappings.length;\n}\nfunction isSorted(line) {\n for (let j = 1; j < line.length; j++) {\n if (line[j][COLUMN] < line[j - 1][COLUMN]) {\n return false;\n }\n }\n return true;\n}\nfunction sortSegments(line, owned) {\n if (!owned) line = line.slice();\n return line.sort(sortComparator);\n}\nfunction sortComparator(a, b) {\n return a[COLUMN] - b[COLUMN];\n}\n\n// src/binary-search.ts\nvar found = false;\nfunction binarySearch(haystack, needle, low, high) {\n while (low <= high) {\n const mid = low + (high - low >> 1);\n const cmp = haystack[mid][COLUMN] - needle;\n if (cmp === 0) {\n found = true;\n return mid;\n }\n if (cmp < 0) {\n low = mid + 1;\n } else {\n high = mid - 1;\n }\n }\n found = false;\n return low - 1;\n}\nfunction upperBound(haystack, needle, index) {\n for (let i = index + 1; i < haystack.length; index = i++) {\n if (haystack[i][COLUMN] !== needle) break;\n }\n return index;\n}\nfunction lowerBound(haystack, needle, index) {\n for (let i = index - 1; i >= 0; index = i--) {\n if (haystack[i][COLUMN] !== needle) break;\n }\n return index;\n}\nfunction memoizedState() {\n return {\n lastKey: -1,\n lastNeedle: -1,\n lastIndex: -1\n };\n}\nfunction memoizedBinarySearch(haystack, needle, state, key) {\n const { lastKey, lastNeedle, lastIndex } = state;\n let low = 0;\n let high = haystack.length - 1;\n if (key === lastKey) {\n if (needle === lastNeedle) {\n found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;\n return lastIndex;\n }\n if (needle >= lastNeedle) {\n low = lastIndex === -1 ? 0 : lastIndex;\n } else {\n high = lastIndex;\n }\n }\n state.lastKey = key;\n state.lastNeedle = needle;\n return state.lastIndex = binarySearch(haystack, needle, low, high);\n}\n\n// src/by-source.ts\nfunction buildBySources(decoded, memos) {\n const sources = memos.map(buildNullArray);\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n if (seg.length === 1) continue;\n const sourceIndex2 = seg[SOURCES_INDEX];\n const sourceLine = seg[SOURCE_LINE];\n const sourceColumn = seg[SOURCE_COLUMN];\n const originalSource = sources[sourceIndex2];\n const originalLine = originalSource[sourceLine] || (originalSource[sourceLine] = []);\n const memo = memos[sourceIndex2];\n let index = upperBound(\n originalLine,\n sourceColumn,\n memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine)\n );\n memo.lastIndex = ++index;\n insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);\n }\n }\n return sources;\n}\nfunction insert(array, index, value) {\n for (let i = array.length; i > index; i--) {\n array[i] = array[i - 1];\n }\n array[index] = value;\n}\nfunction buildNullArray() {\n return { __proto__: null };\n}\n\n// src/types.ts\nfunction parse(map) {\n return typeof map === \"string\" ? JSON.parse(map) : map;\n}\n\n// src/flatten-map.ts\nvar FlattenMap = function(map, mapUrl) {\n const parsed = parse(map);\n if (!(\"sections\" in parsed)) {\n return new TraceMap(parsed, mapUrl);\n }\n const mappings = [];\n const sources = [];\n const sourcesContent = [];\n const names = [];\n const ignoreList = [];\n recurse(\n parsed,\n mapUrl,\n mappings,\n sources,\n sourcesContent,\n names,\n ignoreList,\n 0,\n 0,\n Infinity,\n Infinity\n );\n const joined = {\n version: 3,\n file: parsed.file,\n names,\n sources,\n sourcesContent,\n mappings,\n ignoreList\n };\n return presortedDecodedMap(joined);\n};\nfunction recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {\n const { sections } = input;\n for (let i = 0; i < sections.length; i++) {\n const { map, offset } = sections[i];\n let sl = stopLine;\n let sc = stopColumn;\n if (i + 1 < sections.length) {\n const nextOffset = sections[i + 1].offset;\n sl = Math.min(stopLine, lineOffset + nextOffset.line);\n if (sl === stopLine) {\n sc = Math.min(stopColumn, columnOffset + nextOffset.column);\n } else if (sl < stopLine) {\n sc = columnOffset + nextOffset.column;\n }\n }\n addSection(\n map,\n mapUrl,\n mappings,\n sources,\n sourcesContent,\n names,\n ignoreList,\n lineOffset + offset.line,\n columnOffset + offset.column,\n sl,\n sc\n );\n }\n}\nfunction addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {\n const parsed = parse(input);\n if (\"sections\" in parsed) return recurse(...arguments);\n const map = new TraceMap(parsed, mapUrl);\n const sourcesOffset = sources.length;\n const namesOffset = names.length;\n const decoded = decodedMappings(map);\n const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;\n append(sources, resolvedSources);\n append(names, map.names);\n if (contents) append(sourcesContent, contents);\n else for (let i = 0; i < resolvedSources.length; i++) sourcesContent.push(null);\n if (ignores) for (let i = 0; i < ignores.length; i++) ignoreList.push(ignores[i] + sourcesOffset);\n for (let i = 0; i < decoded.length; i++) {\n const lineI = lineOffset + i;\n if (lineI > stopLine) return;\n const out = getLine(mappings, lineI);\n const cOffset = i === 0 ? columnOffset : 0;\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n const column = cOffset + seg[COLUMN];\n if (lineI === stopLine && column >= stopColumn) return;\n if (seg.length === 1) {\n out.push([column]);\n continue;\n }\n const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];\n const sourceLine = seg[SOURCE_LINE];\n const sourceColumn = seg[SOURCE_COLUMN];\n out.push(\n seg.length === 4 ? [column, sourcesIndex, sourceLine, sourceColumn] : [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]\n );\n }\n }\n}\nfunction append(arr, other) {\n for (let i = 0; i < other.length; i++) arr.push(other[i]);\n}\nfunction getLine(arr, index) {\n for (let i = arr.length; i <= index; i++) arr[i] = [];\n return arr[index];\n}\n\n// src/trace-mapping.ts\nvar LINE_GTR_ZERO = \"`line` must be greater than 0 (lines start at line 1)\";\nvar COL_GTR_EQ_ZERO = \"`column` must be greater than or equal to 0 (columns start at column 0)\";\nvar LEAST_UPPER_BOUND = -1;\nvar GREATEST_LOWER_BOUND = 1;\nvar TraceMap = class {\n constructor(map, mapUrl) {\n const isString = typeof map === \"string\";\n if (!isString && map._decodedMemo) return map;\n const parsed = parse(map);\n const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;\n this.version = version;\n this.file = file;\n this.names = names || [];\n this.sourceRoot = sourceRoot;\n this.sources = sources;\n this.sourcesContent = sourcesContent;\n this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || void 0;\n const resolve = resolver(mapUrl, sourceRoot);\n this.resolvedSources = sources.map(resolve);\n const { mappings } = parsed;\n if (typeof mappings === \"string\") {\n this._encoded = mappings;\n this._decoded = void 0;\n } else if (Array.isArray(mappings)) {\n this._encoded = void 0;\n this._decoded = maybeSort(mappings, isString);\n } else if (parsed.sections) {\n throw new Error(`TraceMap passed sectioned source map, please use FlattenMap export instead`);\n } else {\n throw new Error(`invalid source map: ${JSON.stringify(parsed)}`);\n }\n this._decodedMemo = memoizedState();\n this._bySources = void 0;\n this._bySourceMemos = void 0;\n }\n};\nfunction cast(map) {\n return map;\n}\nfunction encodedMappings(map) {\n var _a, _b;\n return (_b = (_a = cast(map))._encoded) != null ? _b : _a._encoded = encode(cast(map)._decoded);\n}\nfunction decodedMappings(map) {\n var _a;\n return (_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded));\n}\nfunction traceSegment(map, line, column) {\n const decoded = decodedMappings(map);\n if (line >= decoded.length) return null;\n const segments = decoded[line];\n const index = traceSegmentInternal(\n segments,\n cast(map)._decodedMemo,\n line,\n column,\n GREATEST_LOWER_BOUND\n );\n return index === -1 ? null : segments[index];\n}\nfunction originalPositionFor(map, needle) {\n let { line, column, bias } = needle;\n line--;\n if (line < 0) throw new Error(LINE_GTR_ZERO);\n if (column < 0) throw new Error(COL_GTR_EQ_ZERO);\n const decoded = decodedMappings(map);\n if (line >= decoded.length) return OMapping(null, null, null, null);\n const segments = decoded[line];\n const index = traceSegmentInternal(\n segments,\n cast(map)._decodedMemo,\n line,\n column,\n bias || GREATEST_LOWER_BOUND\n );\n if (index === -1) return OMapping(null, null, null, null);\n const segment = segments[index];\n if (segment.length === 1) return OMapping(null, null, null, null);\n const { names, resolvedSources } = map;\n return OMapping(\n resolvedSources[segment[SOURCES_INDEX]],\n segment[SOURCE_LINE] + 1,\n segment[SOURCE_COLUMN],\n segment.length === 5 ? names[segment[NAMES_INDEX]] : null\n );\n}\nfunction generatedPositionFor(map, needle) {\n const { source, line, column, bias } = needle;\n return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);\n}\nfunction allGeneratedPositionsFor(map, needle) {\n const { source, line, column, bias } = needle;\n return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);\n}\nfunction eachMapping(map, cb) {\n const decoded = decodedMappings(map);\n const { names, resolvedSources } = map;\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n const generatedLine = i + 1;\n const generatedColumn = seg[0];\n let source = null;\n let originalLine = null;\n let originalColumn = null;\n let name = null;\n if (seg.length !== 1) {\n source = resolvedSources[seg[1]];\n originalLine = seg[2] + 1;\n originalColumn = seg[3];\n }\n if (seg.length === 5) name = names[seg[4]];\n cb({\n generatedLine,\n generatedColumn,\n source,\n originalLine,\n originalColumn,\n name\n });\n }\n }\n}\nfunction sourceIndex(map, source) {\n const { sources, resolvedSources } = map;\n let index = sources.indexOf(source);\n if (index === -1) index = resolvedSources.indexOf(source);\n return index;\n}\nfunction sourceContentFor(map, source) {\n const { sourcesContent } = map;\n if (sourcesContent == null) return null;\n const index = sourceIndex(map, source);\n return index === -1 ? null : sourcesContent[index];\n}\nfunction isIgnored(map, source) {\n const { ignoreList } = map;\n if (ignoreList == null) return false;\n const index = sourceIndex(map, source);\n return index === -1 ? false : ignoreList.includes(index);\n}\nfunction presortedDecodedMap(map, mapUrl) {\n const tracer = new TraceMap(clone(map, []), mapUrl);\n cast(tracer)._decoded = map.mappings;\n return tracer;\n}\nfunction decodedMap(map) {\n return clone(map, decodedMappings(map));\n}\nfunction encodedMap(map) {\n return clone(map, encodedMappings(map));\n}\nfunction clone(map, mappings) {\n return {\n version: map.version,\n file: map.file,\n names: map.names,\n sourceRoot: map.sourceRoot,\n sources: map.sources,\n sourcesContent: map.sourcesContent,\n mappings,\n ignoreList: map.ignoreList || map.x_google_ignoreList\n };\n}\nfunction OMapping(source, line, column, name) {\n return { source, line, column, name };\n}\nfunction GMapping(line, column) {\n return { line, column };\n}\nfunction traceSegmentInternal(segments, memo, line, column, bias) {\n let index = memoizedBinarySearch(segments, column, memo, line);\n if (found) {\n index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);\n } else if (bias === LEAST_UPPER_BOUND) index++;\n if (index === -1 || index === segments.length) return -1;\n return index;\n}\nfunction sliceGeneratedPositions(segments, memo, line, column, bias) {\n let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);\n if (!found && bias === LEAST_UPPER_BOUND) min++;\n if (min === -1 || min === segments.length) return [];\n const matchedColumn = found ? column : segments[min][COLUMN];\n if (!found) min = lowerBound(segments, matchedColumn, min);\n const max = upperBound(segments, matchedColumn, min);\n const result = [];\n for (; min <= max; min++) {\n const segment = segments[min];\n result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));\n }\n return result;\n}\nfunction generatedPosition(map, source, line, column, bias, all) {\n var _a;\n line--;\n if (line < 0) throw new Error(LINE_GTR_ZERO);\n if (column < 0) throw new Error(COL_GTR_EQ_ZERO);\n const { sources, resolvedSources } = map;\n let sourceIndex2 = sources.indexOf(source);\n if (sourceIndex2 === -1) sourceIndex2 = resolvedSources.indexOf(source);\n if (sourceIndex2 === -1) return all ? [] : GMapping(null, null);\n const generated = (_a = cast(map))._bySources || (_a._bySources = buildBySources(\n decodedMappings(map),\n cast(map)._bySourceMemos = sources.map(memoizedState)\n ));\n const segments = generated[sourceIndex2][line];\n if (segments == null) return all ? [] : GMapping(null, null);\n const memo = cast(map)._bySourceMemos[sourceIndex2];\n if (all) return sliceGeneratedPositions(segments, memo, line, column, bias);\n const index = traceSegmentInternal(segments, memo, line, column, bias);\n if (index === -1) return GMapping(null, null);\n const segment = segments[index];\n return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);\n}\nexport {\n FlattenMap as AnyMap,\n FlattenMap,\n GREATEST_LOWER_BOUND,\n LEAST_UPPER_BOUND,\n TraceMap,\n allGeneratedPositionsFor,\n decodedMap,\n decodedMappings,\n eachMapping,\n encodedMap,\n encodedMappings,\n generatedPositionFor,\n isIgnored,\n originalPositionFor,\n presortedDecodedMap,\n sourceContentFor,\n traceSegment\n};\n//# sourceMappingURL=trace-mapping.mjs.map\n","// src/set-array.ts\nvar SetArray = class {\n constructor() {\n this._indexes = { __proto__: null };\n this.array = [];\n }\n};\nfunction cast(set) {\n return set;\n}\nfunction get(setarr, key) {\n return cast(setarr)._indexes[key];\n}\nfunction put(setarr, key) {\n const index = get(setarr, key);\n if (index !== void 0) return index;\n const { array, _indexes: indexes } = cast(setarr);\n const length = array.push(key);\n return indexes[key] = length - 1;\n}\nfunction remove(setarr, key) {\n const index = get(setarr, key);\n if (index === void 0) return;\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]--;\n }\n indexes[key] = void 0;\n array.pop();\n}\n\n// src/gen-mapping.ts\nimport {\n encode\n} from \"@jridgewell/sourcemap-codec\";\nimport { TraceMap, decodedMappings } from \"@jridgewell/trace-mapping\";\n\n// src/sourcemap-segment.ts\nvar COLUMN = 0;\nvar SOURCES_INDEX = 1;\nvar SOURCE_LINE = 2;\nvar SOURCE_COLUMN = 3;\nvar NAMES_INDEX = 4;\n\n// src/gen-mapping.ts\nvar NO_NAME = -1;\nvar GenMapping = class {\n constructor({ file, sourceRoot } = {}) {\n this._names = new SetArray();\n this._sources = new SetArray();\n this._sourcesContent = [];\n this._mappings = [];\n this.file = file;\n this.sourceRoot = sourceRoot;\n this._ignoreList = new SetArray();\n }\n};\nfunction cast2(map) {\n return map;\n}\nfunction addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {\n return addSegmentInternal(\n false,\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content\n );\n}\nfunction addMapping(map, mapping) {\n return addMappingInternal(false, map, mapping);\n}\nvar maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {\n return addSegmentInternal(\n true,\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content\n );\n};\nvar maybeAddMapping = (map, mapping) => {\n return addMappingInternal(true, map, mapping);\n};\nfunction setSourceContent(map, source, content) {\n const {\n _sources: sources,\n _sourcesContent: sourcesContent\n // _originalScopes: originalScopes,\n } = cast2(map);\n const index = put(sources, source);\n sourcesContent[index] = content;\n}\nfunction setIgnore(map, source, ignore = true) {\n const {\n _sources: sources,\n _sourcesContent: sourcesContent,\n _ignoreList: ignoreList\n // _originalScopes: originalScopes,\n } = cast2(map);\n const index = put(sources, source);\n if (index === sourcesContent.length) sourcesContent[index] = null;\n if (ignore) put(ignoreList, index);\n else remove(ignoreList, index);\n}\nfunction toDecodedMap(map) {\n const {\n _mappings: mappings,\n _sources: sources,\n _sourcesContent: sourcesContent,\n _names: names,\n _ignoreList: ignoreList\n // _originalScopes: originalScopes,\n // _generatedRanges: generatedRanges,\n } = cast2(map);\n removeEmptyFinalLines(mappings);\n return {\n version: 3,\n file: map.file || void 0,\n names: names.array,\n sourceRoot: map.sourceRoot || void 0,\n sources: sources.array,\n sourcesContent,\n mappings,\n // originalScopes,\n // generatedRanges,\n ignoreList: ignoreList.array\n };\n}\nfunction toEncodedMap(map) {\n const decoded = toDecodedMap(map);\n return Object.assign({}, decoded, {\n // originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),\n // generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),\n mappings: encode(decoded.mappings)\n });\n}\nfunction fromMap(input) {\n const map = new TraceMap(input);\n const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });\n putAll(cast2(gen)._names, map.names);\n putAll(cast2(gen)._sources, map.sources);\n cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);\n cast2(gen)._mappings = decodedMappings(map);\n if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);\n return gen;\n}\nfunction allMappings(map) {\n const out = [];\n const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);\n for (let i = 0; i < mappings.length; i++) {\n const line = mappings[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n const generated = { line: i + 1, column: seg[COLUMN] };\n let source = void 0;\n let original = void 0;\n let name = void 0;\n if (seg.length !== 1) {\n source = sources.array[seg[SOURCES_INDEX]];\n original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };\n if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];\n }\n out.push({ generated, source, original, name });\n }\n }\n return out;\n}\nfunction addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {\n const {\n _mappings: mappings,\n _sources: sources,\n _sourcesContent: sourcesContent,\n _names: names\n // _originalScopes: originalScopes,\n } = cast2(map);\n const line = getIndex(mappings, genLine);\n const index = getColumnIndex(line, genColumn);\n if (!source) {\n if (skipable && skipSourceless(line, index)) return;\n return insert(line, index, [genColumn]);\n }\n assert(sourceLine);\n assert(sourceColumn);\n const sourcesIndex = put(sources, source);\n const namesIndex = name ? put(names, name) : NO_NAME;\n if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;\n if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {\n return;\n }\n return insert(\n line,\n index,\n name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]\n );\n}\nfunction assert(_val) {\n}\nfunction getIndex(arr, index) {\n for (let i = arr.length; i <= index; i++) {\n arr[i] = [];\n }\n return arr[index];\n}\nfunction getColumnIndex(line, genColumn) {\n let index = line.length;\n for (let i = index - 1; i >= 0; index = i--) {\n const current = line[i];\n if (genColumn >= current[COLUMN]) break;\n }\n return index;\n}\nfunction insert(array, index, value) {\n for (let i = array.length; i > index; i--) {\n array[i] = array[i - 1];\n }\n array[index] = value;\n}\nfunction removeEmptyFinalLines(mappings) {\n const { length } = mappings;\n let len = length;\n for (let i = len - 1; i >= 0; len = i, i--) {\n if (mappings[i].length > 0) break;\n }\n if (len < length) mappings.length = len;\n}\nfunction putAll(setarr, array) {\n for (let i = 0; i < array.length; i++) put(setarr, array[i]);\n}\nfunction skipSourceless(line, index) {\n if (index === 0) return true;\n const prev = line[index - 1];\n return prev.length === 1;\n}\nfunction skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {\n if (index === 0) return false;\n const prev = line[index - 1];\n if (prev.length === 1) return false;\n return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);\n}\nfunction addMappingInternal(skipable, map, mapping) {\n const { generated, source, original, name, content } = mapping;\n if (!source) {\n return addSegmentInternal(\n skipable,\n map,\n generated.line - 1,\n generated.column,\n null,\n null,\n null,\n null,\n null\n );\n }\n assert(original);\n return addSegmentInternal(\n skipable,\n map,\n generated.line - 1,\n generated.column,\n source,\n original.line - 1,\n original.column,\n name,\n content\n );\n}\nexport {\n GenMapping,\n addMapping,\n addSegment,\n allMappings,\n fromMap,\n maybeAddMapping,\n maybeAddSegment,\n setIgnore,\n setSourceContent,\n toDecodedMap,\n toEncodedMap\n};\n//# sourceMappingURL=gen-mapping.mjs.map\n","import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';\nimport { GenMapping, maybeAddSegment, setSourceContent, setIgnore, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';\n\nconst SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);\nconst EMPTY_SOURCES = [];\nfunction SegmentObject(source, line, column, name, content, ignore) {\n return { source, line, column, name, content, ignore };\n}\nfunction Source(map, sources, source, content, ignore) {\n return {\n map,\n sources,\n source,\n content,\n ignore,\n };\n}\n/**\n * MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes\n * (which may themselves be SourceMapTrees).\n */\nfunction MapSource(map, sources) {\n return Source(map, sources, '', null, false);\n}\n/**\n * A \"leaf\" node in the sourcemap tree, representing an original, unmodified source file. Recursive\n * segment tracing ends at the `OriginalSource`.\n */\nfunction OriginalSource(source, content, ignore) {\n return Source(null, EMPTY_SOURCES, source, content, ignore);\n}\n/**\n * traceMappings is only called on the root level SourceMapTree, and begins the process of\n * resolving each mapping in terms of the original source files.\n */\nfunction traceMappings(tree) {\n // TODO: Eventually support sourceRoot, which has to be removed because the sources are already\n // fully resolved. We'll need to make sources relative to the sourceRoot before adding them.\n const gen = new GenMapping({ file: tree.map.file });\n const { sources: rootSources, map } = tree;\n const rootNames = map.names;\n const rootMappings = decodedMappings(map);\n for (let i = 0; i < rootMappings.length; i++) {\n const segments = rootMappings[i];\n for (let j = 0; j < segments.length; j++) {\n const segment = segments[j];\n const genCol = segment[0];\n let traced = SOURCELESS_MAPPING;\n // 1-length segments only move the current generated column, there's no source information\n // to gather from it.\n if (segment.length !== 1) {\n const source = rootSources[segment[1]];\n traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');\n // If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a\n // respective segment into an original source.\n if (traced == null)\n continue;\n }\n const { column, line, name, content, source, ignore } = traced;\n maybeAddSegment(gen, i, genCol, source, line, column, name);\n if (source && content != null)\n setSourceContent(gen, source, content);\n if (ignore)\n setIgnore(gen, source, true);\n }\n }\n return gen;\n}\n/**\n * originalPositionFor is only called on children SourceMapTrees. It recurses down into its own\n * child SourceMapTrees, until we find the original source map.\n */\nfunction originalPositionFor(source, line, column, name) {\n if (!source.map) {\n return SegmentObject(source.source, line, column, name, source.content, source.ignore);\n }\n const segment = traceSegment(source.map, line, column);\n // If we couldn't find a segment, then this doesn't exist in the sourcemap.\n if (segment == null)\n return null;\n // 1-length segments only move the current generated column, there's no source information\n // to gather from it.\n if (segment.length === 1)\n return SOURCELESS_MAPPING;\n return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);\n}\n\nfunction asArray(value) {\n if (Array.isArray(value))\n return value;\n return [value];\n}\n/**\n * Recursively builds a tree structure out of sourcemap files, with each node\n * being either an `OriginalSource` \"leaf\" or a `SourceMapTree` composed of\n * `OriginalSource`s and `SourceMapTree`s.\n *\n * Every sourcemap is composed of a collection of source files and mappings\n * into locations of those source files. When we generate a `SourceMapTree` for\n * the sourcemap, we attempt to load each source file's own sourcemap. If it\n * does not have an associated sourcemap, it is considered an original,\n * unmodified source file.\n */\nfunction buildSourceMapTree(input, loader) {\n const maps = asArray(input).map((m) => new TraceMap(m, ''));\n const map = maps.pop();\n for (let i = 0; i < maps.length; i++) {\n if (maps[i].sources.length > 1) {\n throw new Error(`Transformation map ${i} must have exactly one source file.\\n` +\n 'Did you specify these with the most recent transformation maps first?');\n }\n }\n let tree = build(map, loader, '', 0);\n for (let i = maps.length - 1; i >= 0; i--) {\n tree = MapSource(maps[i], [tree]);\n }\n return tree;\n}\nfunction build(map, loader, importer, importerDepth) {\n const { resolvedSources, sourcesContent, ignoreList } = map;\n const depth = importerDepth + 1;\n const children = resolvedSources.map((sourceFile, i) => {\n // The loading context gives the loader more information about why this file is being loaded\n // (eg, from which importer). It also allows the loader to override the location of the loaded\n // sourcemap/original source, or to override the content in the sourcesContent field if it's\n // an unmodified source file.\n const ctx = {\n importer,\n depth,\n source: sourceFile || '',\n content: undefined,\n ignore: undefined,\n };\n // Use the provided loader callback to retrieve the file's sourcemap.\n // TODO: We should eventually support async loading of sourcemap files.\n const sourceMap = loader(ctx.source, ctx);\n const { source, content, ignore } = ctx;\n // If there is a sourcemap, then we need to recurse into it to load its source files.\n if (sourceMap)\n return build(new TraceMap(sourceMap, source), loader, source, depth);\n // Else, it's an unmodified source file.\n // The contents of this unmodified source file can be overridden via the loader context,\n // allowing it to be explicitly null or a string. If it remains undefined, we fall back to\n // the importing sourcemap's `sourcesContent` field.\n const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;\n const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;\n return OriginalSource(source, sourceContent, ignored);\n });\n return MapSource(map, children);\n}\n\n/**\n * A SourceMap v3 compatible sourcemap, which only includes fields that were\n * provided to it.\n */\nclass SourceMap {\n constructor(map, options) {\n const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);\n this.version = out.version; // SourceMap spec says this should be first.\n this.file = out.file;\n this.mappings = out.mappings;\n this.names = out.names;\n this.ignoreList = out.ignoreList;\n this.sourceRoot = out.sourceRoot;\n this.sources = out.sources;\n if (!options.excludeContent) {\n this.sourcesContent = out.sourcesContent;\n }\n }\n toString() {\n return JSON.stringify(this);\n }\n}\n\n/**\n * Traces through all the mappings in the root sourcemap, through the sources\n * (and their sourcemaps), all the way back to the original source location.\n *\n * `loader` will be called every time we encounter a source file. If it returns\n * a sourcemap, we will recurse into that sourcemap to continue the trace. If\n * it returns a falsey value, that source file is treated as an original,\n * unmodified source file.\n *\n * Pass `excludeContent` to exclude any self-containing source file content\n * from the output sourcemap.\n *\n * Pass `decodedMappings` to receive a SourceMap with decoded (instead of\n * VLQ encoded) mappings.\n */\nfunction remapping(input, loader, options) {\n const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };\n const tree = buildSourceMapTree(input, loader);\n return new SourceMap(traceMappings(tree), opts);\n}\n\nexport { remapping as default };\n//# sourceMappingURL=remapping.mjs.map\n","/**\n * SWC-based transformer for soda-gql GraphQL code generation.\n *\n * This module provides a TypeScript wrapper around the native Rust transformer.\n */\n\nimport { realpathSync } from \"node:fs\";\nimport { resolve } from \"node:path\";\nimport remapping from \"@ampproject/remapping\";\nimport type { BuilderArtifact } from \"@soda-gql/builder\";\nimport type { ResolvedSodaGqlConfig } from \"@soda-gql/config\";\n\n// The native module will be loaded at runtime via the napi-rs generated loader\nlet nativeModule: NativeModule | null = null;\n\ninterface NativeModule {\n transform(inputJson: string): string;\n SwcTransformer: new (artifactJson: string, configJson: string) => NativeTransformer;\n}\n\ninterface NativeTransformer {\n transform(sourceCode: string, sourcePath: string): string;\n}\n\n/**\n * Plugin error from the SWC transformer.\n * This matches the Rust PluginError structure for consistent error reporting.\n */\nexport type SwcPluginError = {\n /** Always \"PluginError\" for type discrimination */\n readonly type: \"PluginError\";\n /** Error code for programmatic handling (e.g., \"SODA_GQL_METADATA_NOT_FOUND\") */\n readonly code: string;\n /** Human-readable error message */\n readonly message: string;\n /** Stage where the error occurred */\n readonly stage: \"analysis\" | \"transform\";\n /** Source filename if applicable */\n readonly filename?: string;\n /** Canonical ID if applicable */\n readonly canonicalId?: string;\n /** Artifact type if applicable */\n readonly artifactType?: string;\n /** Builder type if applicable */\n readonly builderType?: string;\n /** Argument name if applicable */\n readonly argName?: string;\n};\n\ninterface TransformResult {\n outputCode: string;\n transformed: boolean;\n sourceMap?: string;\n errors?: SwcPluginError[];\n}\n\n/**\n * Load the native module.\n * Uses the napi-rs generated loader which handles platform detection.\n */\nconst loadNativeModule = async (): Promise<NativeModule> => {\n if (nativeModule) {\n return nativeModule;\n }\n\n try {\n // Use require() for the napi-rs generated loader (CommonJS)\n const { createRequire } = await import(\"node:module\");\n const require = createRequire(import.meta.url);\n nativeModule = require(\"./native/index.js\") as NativeModule;\n return nativeModule;\n } catch (error) {\n throw new Error(\n \"Failed to load @soda-gql/swc-transformer native module. \" +\n \"Make sure the native module is built for your platform. \" +\n `Run 'bun run build' in the packages/swc-transformer directory. (${error})`,\n );\n }\n};\n\nexport type ModuleFormat = \"esm\" | \"cjs\";\n\nexport type TransformOptions = {\n /** Compiler options for output format */\n compilerOptions?: {\n /** Module format: CommonJS or ESNext */\n module?: \"CommonJS\" | \"ESNext\";\n };\n /** Resolved soda-gql configuration */\n config: ResolvedSodaGqlConfig;\n /** Pre-built artifact from the builder */\n artifact: BuilderArtifact;\n /** Whether to generate source maps */\n sourceMap?: boolean;\n};\n\nexport type TransformInput = {\n /** Source code to transform */\n sourceCode: string;\n /** Path to the source file */\n sourcePath: string;\n /** Input source map from previous transformer (JSON string) */\n inputSourceMap?: string;\n};\n\n/**\n * Normalize path separators to forward slashes (cross-platform).\n * This matches the behavior of @soda-gql/common normalizePath.\n */\nconst normalizePath = (value: string): string => value.replace(/\\\\/g, \"/\");\n\n/**\n * Filter artifact to only include elements for the given source file.\n * This significantly reduces JSON serialization overhead for large codebases.\n *\n * Canonical IDs have the format: \"filepath::astPath\"\n * We filter by matching the filepath prefix.\n */\nconst filterArtifactForFile = (artifact: BuilderArtifact, sourcePath: string): BuilderArtifact => {\n const prefix = `${sourcePath}::`;\n\n const filteredElements: BuilderArtifact[\"elements\"] = {};\n for (const [id, element] of Object.entries(artifact.elements)) {\n if (id.startsWith(prefix)) {\n (filteredElements as Record<string, typeof element>)[id] = element;\n }\n }\n\n return {\n elements: filteredElements,\n report: { stats: { hits: 0, misses: 0, skips: 0 }, durationMs: 0, warnings: [] },\n };\n};\n\n/**\n * Resolve the canonical path to the graphql-system file.\n * Uses realpath to resolve symlinks for accurate comparison.\n */\nconst resolveGraphqlSystemPath = (config: ResolvedSodaGqlConfig): string => {\n const graphqlSystemPath = resolve(config.outdir, \"index.ts\");\n try {\n return normalizePath(realpathSync(graphqlSystemPath));\n } catch {\n // If realpath fails (file doesn't exist yet), fall back to resolved path\n return normalizePath(resolve(graphqlSystemPath));\n }\n};\n\nexport type TransformOutput = {\n /** Whether any transformation was performed */\n transformed: boolean;\n /** The transformed source code (or original if no transformation) */\n sourceCode: string;\n /** Source map JSON, if source map generation was enabled */\n sourceMap?: string;\n /** Errors encountered during transformation (non-fatal) */\n errors: SwcPluginError[];\n};\n\n/**\n * Transformer interface.\n */\nexport interface Transformer {\n transform(input: TransformInput): TransformOutput;\n}\n\n/**\n * Create a transformer instance.\n *\n * @param options - Transform options including config and artifact\n * @returns A transformer that can transform source files\n */\nexport const createTransformer = async (options: TransformOptions): Promise<Transformer> => {\n const native = await loadNativeModule();\n\n const isCJS = options.compilerOptions?.module === \"CommonJS\";\n\n // Resolve the graphql-system file path for stubbing\n const graphqlSystemPath = resolveGraphqlSystemPath(options.config);\n\n const configJson = JSON.stringify({\n graphqlSystemAliases: options.config.graphqlSystemAliases,\n isCjs: isCJS,\n graphqlSystemPath,\n sourceMap: options.sourceMap ?? false,\n });\n\n // Store full artifact for per-file filtering\n const fullArtifact = options.artifact;\n\n return {\n transform: ({ sourceCode, sourcePath, inputSourceMap }: TransformInput): TransformOutput => {\n // Resolve to absolute path and normalize for canonical ID consistency\n // This ensures bundlers can pass relative paths safely\n const normalizedPath = normalizePath(resolve(sourcePath));\n\n // Filter artifact to only include elements for this file\n // This significantly reduces JSON serialization overhead for large codebases\n const filteredArtifact = filterArtifactForFile(fullArtifact, normalizedPath);\n const filteredArtifactJson = JSON.stringify(filteredArtifact);\n\n // Create per-file transformer with filtered artifact\n const fileTransformer = new native.SwcTransformer(filteredArtifactJson, configJson);\n const resultJson = fileTransformer.transform(sourceCode, normalizedPath);\n const result: TransformResult = JSON.parse(resultJson);\n\n // Handle source map chaining\n let finalSourceMap: string | undefined;\n if (result.sourceMap) {\n if (inputSourceMap) {\n // Chain source maps: our map -> input map -> original source\n const merged = remapping([JSON.parse(result.sourceMap), JSON.parse(inputSourceMap)], () => null);\n finalSourceMap = JSON.stringify(merged);\n } else {\n finalSourceMap = result.sourceMap;\n }\n }\n\n return {\n transformed: result.transformed,\n sourceCode: result.outputCode,\n sourceMap: finalSourceMap,\n errors: result.errors ?? [],\n };\n },\n };\n};\n\n/**\n * Transform a single source file (one-shot).\n *\n * For transforming multiple files, use createTransformer() to reuse the artifact.\n *\n * @param input - Transform input including source, path, artifact, and config\n * @returns Transform output\n */\nexport const transform = async (\n input: TransformInput & {\n artifact: BuilderArtifact;\n config: ResolvedSodaGqlConfig;\n isCjs?: boolean;\n sourceMap?: boolean;\n },\n): Promise<TransformOutput> => {\n const native = await loadNativeModule();\n\n // Resolve to absolute path and normalize for canonical ID consistency\n // This ensures bundlers can pass relative paths safely\n const normalizedPath = normalizePath(resolve(input.sourcePath));\n\n // Filter artifact to only include elements for this file\n const filteredArtifact = filterArtifactForFile(input.artifact, normalizedPath);\n\n // Resolve the graphql-system file path for stubbing\n const graphqlSystemPath = resolveGraphqlSystemPath(input.config);\n\n const inputJson = JSON.stringify({\n sourceCode: input.sourceCode,\n sourcePath: normalizedPath,\n artifactJson: JSON.stringify(filteredArtifact),\n config: {\n graphqlSystemAliases: input.config.graphqlSystemAliases,\n isCjs: input.isCjs ?? false,\n graphqlSystemPath,\n sourceMap: input.sourceMap ?? false,\n },\n });\n\n const resultJson = native.transform(inputJson);\n const result: TransformResult = JSON.parse(resultJson);\n\n // Handle source map chaining\n let finalSourceMap: string | undefined;\n if (result.sourceMap) {\n if (input.inputSourceMap) {\n // Chain source maps: our map -> input map -> original source\n const merged = remapping([JSON.parse(result.sourceMap), JSON.parse(input.inputSourceMap)], () => null);\n finalSourceMap = JSON.stringify(merged);\n } else {\n finalSourceMap = result.sourceMap;\n }\n }\n\n return {\n transformed: result.transformed,\n sourceCode: result.outputCode,\n sourceMap: finalSourceMap,\n errors: result.errors ?? [],\n };\n};\n"],"x_google_ignoreList":[0,1,2,3,4],"mappings":";;;;;AACA,IAAI,QAAQ,IAAI,WAAW,EAAE;AAC7B,IAAI,YAAY,IAAI,WAAW,EAAE;AACjC,IAAI,QAAQ;AACZ,IAAI,YAAY,IAAI,WAAW,GAAG;AAClC,IAAI,YAAY,IAAI,WAAW,IAAI;AACnC,KAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;CACrC,MAAM,IAAI,MAAM,WAAW,EAAE;AAC7B,WAAU,KAAK;AACf,WAAU,KAAK;;AAEjB,SAAS,cAAc,QAAQ,UAAU;CACvC,IAAI,QAAQ;CACZ,IAAI,QAAQ;CACZ,IAAI,UAAU;AACd,IAAG;AAED,YAAU,UADA,OAAO,MAAM;AAEvB,YAAU,UAAU,OAAO;AAC3B,WAAS;UACF,UAAU;CACnB,MAAM,eAAe,QAAQ;AAC7B,YAAW;AACX,KAAI,aACF,SAAQ,cAAc,CAAC;AAEzB,QAAO,WAAW;;AAEpB,SAAS,cAAc,SAAS,KAAK,UAAU;CAC7C,IAAI,QAAQ,MAAM;AAClB,SAAQ,QAAQ,IAAI,CAAC,SAAS,IAAI,IAAI,SAAS;AAC/C,IAAG;EACD,IAAI,UAAU,QAAQ;AACtB,aAAW;AACX,MAAI,QAAQ,EAAG,YAAW;AAC1B,UAAQ,MAAM,UAAU,SAAS;UAC1B,QAAQ;AACjB,QAAO;;AAET,SAAS,WAAW,QAAQ,KAAK;AAC/B,KAAI,OAAO,OAAO,IAAK,QAAO;AAC9B,QAAO,OAAO,MAAM,KAAK;;AAI3B,IAAI,YAAY,OAAO;AACvB,IAAI,KAAK,OAAO,gBAAgB,8BAA8B,IAAI,aAAa,GAAG,OAAO,WAAW,cAAc,EAChH,OAAO,KAAK;AAEV,QADY,OAAO,KAAK,IAAI,QAAQ,IAAI,YAAY,IAAI,WAAW,CACxD,UAAU;GAExB,GAAG,EACF,OAAO,KAAK;CACV,IAAI,MAAM;AACV,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,QAAO,OAAO,aAAa,IAAI,GAAG;AAEpC,QAAO;GAEV;AACD,IAAI,eAAe,MAAM;CACvB,cAAc;AACZ,OAAK,MAAM;AACX,OAAK,MAAM;AACX,OAAK,SAAS,IAAI,WAAW,UAAU;;CAEzC,MAAM,GAAG;EACP,MAAM,EAAE,WAAW;AACnB,SAAO,KAAK,SAAS;AACrB,MAAI,KAAK,QAAQ,WAAW;AAC1B,QAAK,OAAO,GAAG,OAAO,OAAO;AAC7B,QAAK,MAAM;;;CAGf,QAAQ;EACN,MAAM,EAAE,QAAQ,KAAK,QAAQ;AAC7B,SAAO,MAAM,IAAI,MAAM,GAAG,OAAO,OAAO,SAAS,GAAG,IAAI,CAAC,GAAG;;;AAGhE,IAAI,eAAe,MAAM;CACvB,YAAY,QAAQ;AAClB,OAAK,MAAM;AACX,OAAK,SAAS;;CAEhB,OAAO;AACL,SAAO,KAAK,OAAO,WAAW,KAAK,MAAM;;CAE3C,OAAO;AACL,SAAO,KAAK,OAAO,WAAW,KAAK,IAAI;;CAEzC,QAAQ,MAAM;EACZ,MAAM,EAAE,QAAQ,QAAQ;EACxB,MAAM,MAAM,OAAO,QAAQ,MAAM,IAAI;AACrC,SAAO,QAAQ,KAAK,OAAO,SAAS;;;AAwPxC,SAAS,OAAO,UAAU;CACxB,MAAM,EAAE,WAAW;CACnB,MAAM,SAAS,IAAI,aAAa,SAAS;CACzC,MAAM,UAAU,EAAE;CAClB,IAAI,YAAY;CAChB,IAAI,eAAe;CACnB,IAAI,aAAa;CACjB,IAAI,eAAe;CACnB,IAAI,aAAa;AACjB,IAAG;EACD,MAAM,OAAO,OAAO,QAAQ,IAAI;EAChC,MAAM,OAAO,EAAE;EACf,IAAI,SAAS;EACb,IAAI,UAAU;AACd,cAAY;AACZ,SAAO,OAAO,MAAM,MAAM;GACxB,IAAI;AACJ,eAAY,cAAc,QAAQ,UAAU;AAC5C,OAAI,YAAY,QAAS,UAAS;AAClC,aAAU;AACV,OAAI,WAAW,QAAQ,KAAK,EAAE;AAC5B,mBAAe,cAAc,QAAQ,aAAa;AAClD,iBAAa,cAAc,QAAQ,WAAW;AAC9C,mBAAe,cAAc,QAAQ,aAAa;AAClD,QAAI,WAAW,QAAQ,KAAK,EAAE;AAC5B,kBAAa,cAAc,QAAQ,WAAW;AAC9C,WAAM;MAAC;MAAW;MAAc;MAAY;MAAc;MAAW;UAErE,OAAM;KAAC;KAAW;KAAc;KAAY;KAAa;SAG3D,OAAM,CAAC,UAAU;AAEnB,QAAK,KAAK,IAAI;AACd,UAAO;;AAET,MAAI,CAAC,OAAQ,MAAK,KAAK;AACvB,UAAQ,KAAK,KAAK;AAClB,SAAO,MAAM,OAAO;UACb,OAAO,OAAO;AACvB,QAAO;;AAET,SAAS,KAAK,MAAM;AAClB,MAAK,KAAKA,iBAAe;;AAE3B,SAASA,iBAAe,GAAG,GAAG;AAC5B,QAAO,EAAE,KAAK,EAAE;;AAElB,SAAS,OAAO,SAAS;CACvB,MAAM,SAAS,IAAI,cAAc;CACjC,IAAI,eAAe;CACnB,IAAI,aAAa;CACjB,IAAI,eAAe;CACnB,IAAI,aAAa;AACjB,MAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;EACvC,MAAM,OAAO,QAAQ;AACrB,MAAI,IAAI,EAAG,QAAO,MAAM,UAAU;AAClC,MAAI,KAAK,WAAW,EAAG;EACvB,IAAI,YAAY;AAChB,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,UAAU,KAAK;AACrB,OAAI,IAAI,EAAG,QAAO,MAAM,MAAM;AAC9B,eAAY,cAAc,QAAQ,QAAQ,IAAI,UAAU;AACxD,OAAI,QAAQ,WAAW,EAAG;AAC1B,kBAAe,cAAc,QAAQ,QAAQ,IAAI,aAAa;AAC9D,gBAAa,cAAc,QAAQ,QAAQ,IAAI,WAAW;AAC1D,kBAAe,cAAc,QAAQ,QAAQ,IAAI,aAAa;AAC9D,OAAI,QAAQ,WAAW,EAAG;AAC1B,gBAAa,cAAc,QAAQ,QAAQ,IAAI,WAAW;;;AAG9D,QAAO,OAAO,OAAO;;;;;AC3ZvB,MAAM,cAAc;;;;;;;;;;;AAWpB,MAAM,WAAW;;;;;;;;;;AAUjB,MAAM,YAAY;AAClB,SAAS,cAAc,OAAO;AAC1B,QAAO,YAAY,KAAK,MAAM;;AAElC,SAAS,oBAAoB,OAAO;AAChC,QAAO,MAAM,WAAW,KAAK;;AAEjC,SAAS,eAAe,OAAO;AAC3B,QAAO,MAAM,WAAW,IAAI;;AAEhC,SAAS,UAAU,OAAO;AACtB,QAAO,MAAM,WAAW,QAAQ;;AAEpC,SAAS,WAAW,OAAO;AACvB,QAAO,SAAS,KAAK,MAAM;;AAE/B,SAAS,iBAAiB,OAAO;CAC7B,MAAM,QAAQ,SAAS,KAAK,MAAM;AAClC,QAAO,QAAQ,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,MAAM,KAAK,MAAM,MAAM,IAAI,MAAM,MAAM,GAAG;;AAEvH,SAAS,aAAa,OAAO;CACzB,MAAM,QAAQ,UAAU,KAAK,MAAM;CACnC,MAAM,OAAO,MAAM;AACnB,QAAO,QAAQ,SAAS,IAAI,MAAM,MAAM,IAAI,IAAI,eAAe,KAAK,GAAG,OAAO,MAAM,MAAM,MAAM,MAAM,IAAI,MAAM,MAAM,GAAG;;AAE7H,SAAS,QAAQ,QAAQ,MAAM,MAAM,MAAM,MAAM,OAAO,MAAM;AAC1D,QAAO;EACH;EACA;EACA;EACA;EACA;EACA;EACA;EACA,MAAM;EACT;;AAEL,SAAS,SAAS,OAAO;AACrB,KAAI,oBAAoB,MAAM,EAAE;EAC5B,MAAMC,QAAM,iBAAiB,UAAU,MAAM;AAC7C,QAAI,SAAS;AACb,QAAI,OAAO;AACX,SAAOA;;AAEX,KAAI,eAAe,MAAM,EAAE;EACvB,MAAMA,QAAM,iBAAiB,mBAAmB,MAAM;AACtD,QAAI,SAAS;AACb,QAAI,OAAO;AACX,QAAI,OAAO;AACX,SAAOA;;AAEX,KAAI,UAAU,MAAM,CAChB,QAAO,aAAa,MAAM;AAC9B,KAAI,cAAc,MAAM,CACpB,QAAO,iBAAiB,MAAM;CAClC,MAAM,MAAM,iBAAiB,oBAAoB,MAAM;AACvD,KAAI,SAAS;AACb,KAAI,OAAO;AACX,KAAI,OAAO,QACL,MAAM,WAAW,IAAI,GACjB,IACA,MAAM,WAAW,IAAI,GACjB,IACA,IACR;AACN,QAAO;;AAEX,SAAS,kBAAkB,MAAM;AAG7B,KAAI,KAAK,SAAS,MAAM,CACpB,QAAO;CACX,MAAM,QAAQ,KAAK,YAAY,IAAI;AACnC,QAAO,KAAK,MAAM,GAAG,QAAQ,EAAE;;AAEnC,SAAS,WAAW,KAAK,MAAM;AAC3B,iBAAc,MAAM,KAAK,KAAK;AAG9B,KAAI,IAAI,SAAS,IACb,KAAI,OAAO,KAAK;KAIhB,KAAI,OAAO,kBAAkB,KAAK,KAAK,GAAG,IAAI;;;;;;AAOtD,SAASC,gBAAc,KAAK,MAAM;CAC9B,MAAM,MAAM,QAAQ;CACpB,MAAM,SAAS,IAAI,KAAK,MAAM,IAAI;CAGlC,IAAI,UAAU;CAGd,IAAI,WAAW;CAIf,IAAI,mBAAmB;AACvB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACpC,MAAM,QAAQ,OAAO;AAErB,MAAI,CAAC,OAAO;AACR,sBAAmB;AACnB;;AAGJ,qBAAmB;AAEnB,MAAI,UAAU,IACV;AAGJ,MAAI,UAAU,MAAM;AAChB,OAAI,UAAU;AACV,uBAAmB;AACnB;AACA;cAEK,IAGL,QAAO,aAAa;AAExB;;AAIJ,SAAO,aAAa;AACpB;;CAEJ,IAAI,OAAO;AACX,MAAK,IAAI,IAAI,GAAG,IAAI,SAAS,IACzB,SAAQ,MAAM,OAAO;AAEzB,KAAI,CAAC,QAAS,oBAAoB,CAAC,KAAK,SAAS,MAAM,CACnD,SAAQ;AAEZ,KAAI,OAAO;;;;;AAKf,SAASC,UAAQ,OAAO,MAAM;AAC1B,KAAI,CAAC,SAAS,CAAC,KACX,QAAO;CACX,MAAM,MAAM,SAAS,MAAM;CAC3B,IAAI,YAAY,IAAI;AACpB,KAAI,QAAQ,cAAc,GAAkB;EACxC,MAAM,UAAU,SAAS,KAAK;EAC9B,MAAM,WAAW,QAAQ;AACzB,UAAQ,WAAR;GACI,KAAK,EACD,KAAI,OAAO,QAAQ;GAEvB,KAAK,EACD,KAAI,QAAQ,QAAQ;GAExB,KAAK;GACL,KAAK,EACD,YAAW,KAAK,QAAQ;GAE5B,KAAK;AAED,QAAI,OAAO,QAAQ;AACnB,QAAI,OAAO,QAAQ;AACnB,QAAI,OAAO,QAAQ;GAEvB,KAAK,EAED,KAAI,SAAS,QAAQ;;AAE7B,MAAI,WAAW,UACX,aAAY;;AAEpB,iBAAc,KAAK,UAAU;CAC7B,MAAM,YAAY,IAAI,QAAQ,IAAI;AAClC,SAAQ,WAAR;EAGI,KAAK;EACL,KAAK,EACD,QAAO;EACX,KAAK,GAAsB;GAEvB,MAAM,OAAO,IAAI,KAAK,MAAM,EAAE;AAC9B,OAAI,CAAC,KACD,QAAO,aAAa;AACxB,OAAI,WAAW,QAAQ,MAAM,IAAI,CAAC,WAAW,KAAK,CAI9C,QAAO,OAAO,OAAO;AAEzB,UAAO,OAAO;;EAElB,KAAK,EACD,QAAO,IAAI,OAAO;EACtB,QACI,QAAO,IAAI,SAAS,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI,OAAO;;;;;;AC3NnF,SAAS,cAAc,MAAM;AAC3B,KAAI,CAAC,KAAM,QAAO;CAClB,MAAM,QAAQ,KAAK,YAAY,IAAI;AACnC,QAAO,KAAK,MAAM,GAAG,QAAQ,EAAE;;AAIjC,SAAS,SAAS,QAAQ,YAAY;CACpC,MAAM,OAAO,cAAc,OAAO;CAClC,MAAM,SAAS,aAAa,aAAa,MAAM;AAC/C,SAAQ,WAAWC,UAAW,UAAU,UAAU,KAAK,KAAK;;AAI9D,IAAIC,WAAS;AASb,SAAS,UAAU,UAAU,OAAO;CAClC,MAAM,gBAAgB,wBAAwB,UAAU,EAAE;AAC1D,KAAI,kBAAkB,SAAS,OAAQ,QAAO;AAC9C,KAAI,CAAC,MAAO,YAAW,SAAS,OAAO;AACvC,MAAK,IAAI,IAAI,eAAe,IAAI,SAAS,QAAQ,IAAI,wBAAwB,UAAU,IAAI,EAAE,CAC3F,UAAS,KAAK,aAAa,SAAS,IAAI,MAAM;AAEhD,QAAO;;AAET,SAAS,wBAAwB,UAAU,OAAO;AAChD,MAAK,IAAI,IAAI,OAAO,IAAI,SAAS,QAAQ,IACvC,KAAI,CAAC,SAAS,SAAS,GAAG,CAAE,QAAO;AAErC,QAAO,SAAS;;AAElB,SAAS,SAAS,MAAM;AACtB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,KAAI,KAAK,GAAGA,YAAU,KAAK,IAAI,GAAGA,UAChC,QAAO;AAGX,QAAO;;AAET,SAAS,aAAa,MAAM,OAAO;AACjC,KAAI,CAAC,MAAO,QAAO,KAAK,OAAO;AAC/B,QAAO,KAAK,KAAK,eAAe;;AAElC,SAAS,eAAe,GAAG,GAAG;AAC5B,QAAO,EAAEA,YAAU,EAAEA;;AAIvB,IAAI,QAAQ;AACZ,SAAS,aAAa,UAAU,QAAQ,KAAK,MAAM;AACjD,QAAO,OAAO,MAAM;EAClB,MAAM,MAAM,OAAO,OAAO,OAAO;EACjC,MAAM,MAAM,SAAS,KAAKA,YAAU;AACpC,MAAI,QAAQ,GAAG;AACb,WAAQ;AACR,UAAO;;AAET,MAAI,MAAM,EACR,OAAM,MAAM;MAEZ,QAAO,MAAM;;AAGjB,SAAQ;AACR,QAAO,MAAM;;AAEf,SAAS,WAAW,UAAU,QAAQ,OAAO;AAC3C,MAAK,IAAI,IAAI,QAAQ,GAAG,IAAI,SAAS,QAAQ,QAAQ,IACnD,KAAI,SAAS,GAAGA,cAAY,OAAQ;AAEtC,QAAO;;AAET,SAAS,WAAW,UAAU,QAAQ,OAAO;AAC3C,MAAK,IAAI,IAAI,QAAQ,GAAG,KAAK,GAAG,QAAQ,IACtC,KAAI,SAAS,GAAGA,cAAY,OAAQ;AAEtC,QAAO;;AAET,SAAS,gBAAgB;AACvB,QAAO;EACL,SAAS;EACT,YAAY;EACZ,WAAW;EACZ;;AAEH,SAAS,qBAAqB,UAAU,QAAQ,OAAO,KAAK;CAC1D,MAAM,EAAE,SAAS,YAAY,cAAc;CAC3C,IAAI,MAAM;CACV,IAAI,OAAO,SAAS,SAAS;AAC7B,KAAI,QAAQ,SAAS;AACnB,MAAI,WAAW,YAAY;AACzB,WAAQ,cAAc,MAAM,SAAS,WAAWA,cAAY;AAC5D,UAAO;;AAET,MAAI,UAAU,WACZ,OAAM,cAAc,KAAK,IAAI;MAE7B,QAAO;;AAGX,OAAM,UAAU;AAChB,OAAM,aAAa;AACnB,QAAO,MAAM,YAAY,aAAa,UAAU,QAAQ,KAAK,KAAK;;AAuCpE,SAAS,MAAM,KAAK;AAClB,QAAO,OAAO,QAAQ,WAAW,KAAK,MAAM,IAAI,GAAG;;AAmHrD,IAAI,oBAAoB;AACxB,IAAI,uBAAuB;AAC3B,IAAI,WAAW,MAAM;CACnB,YAAY,KAAK,QAAQ;EACvB,MAAM,WAAW,OAAO,QAAQ;AAChC,MAAI,CAAC,YAAY,IAAI,aAAc,QAAO;EAC1C,MAAM,SAAS,MAAM,IAAI;EACzB,MAAM,EAAE,SAAS,MAAM,OAAO,YAAY,SAAS,mBAAmB;AACtE,OAAK,UAAU;AACf,OAAK,OAAO;AACZ,OAAK,QAAQ,SAAS,EAAE;AACxB,OAAK,aAAa;AAClB,OAAK,UAAU;AACf,OAAK,iBAAiB;AACtB,OAAK,aAAa,OAAO,cAAc,OAAO,uBAAuB,KAAK;EAC1E,MAAMC,YAAU,SAAS,QAAQ,WAAW;AAC5C,OAAK,kBAAkB,QAAQ,IAAIA,UAAQ;EAC3C,MAAM,EAAE,aAAa;AACrB,MAAI,OAAO,aAAa,UAAU;AAChC,QAAK,WAAW;AAChB,QAAK,WAAW,KAAK;aACZ,MAAM,QAAQ,SAAS,EAAE;AAClC,QAAK,WAAW,KAAK;AACrB,QAAK,WAAW,UAAU,UAAU,SAAS;aACpC,OAAO,SAChB,OAAM,IAAI,MAAM,6EAA6E;MAE7F,OAAM,IAAI,MAAM,uBAAuB,KAAK,UAAU,OAAO,GAAG;AAElE,OAAK,eAAe,eAAe;AACnC,OAAK,aAAa,KAAK;AACvB,OAAK,iBAAiB,KAAK;;;AAG/B,SAASC,OAAK,KAAK;AACjB,QAAO;;AAMT,SAAS,gBAAgB,KAAK;CAC5B,IAAI;AACJ,SAAQ,KAAKA,OAAK,IAAI,EAAE,aAAa,GAAG,WAAW,OAAOA,OAAK,IAAI,CAAC,SAAS;;AAE/E,SAAS,aAAa,KAAK,MAAM,QAAQ;CACvC,MAAM,UAAU,gBAAgB,IAAI;AACpC,KAAI,QAAQ,QAAQ,OAAQ,QAAO;CACnC,MAAM,WAAW,QAAQ;CACzB,MAAM,QAAQ,qBACZ,UACAA,OAAK,IAAI,CAAC,cACV,MACA,QACA,qBACD;AACD,QAAO,UAAU,KAAK,OAAO,SAAS;;AAiHxC,SAAS,qBAAqB,UAAU,MAAM,MAAM,QAAQ,MAAM;CAChE,IAAI,QAAQ,qBAAqB,UAAU,QAAQ,MAAM,KAAK;AAC9D,KAAI,MACF,UAAS,SAAS,oBAAoB,aAAa,YAAY,UAAU,QAAQ,MAAM;UAC9E,SAAS,kBAAmB;AACvC,KAAI,UAAU,MAAM,UAAU,SAAS,OAAQ,QAAO;AACtD,QAAO;;;;;AC7bT,IAAI,WAAW,MAAM;CACnB,cAAc;AACZ,OAAK,WAAW,EAAE,WAAW,MAAM;AACnC,OAAK,QAAQ,EAAE;;;AAGnB,SAAS,KAAK,KAAK;AACjB,QAAO;;AAET,SAAS,IAAI,QAAQ,KAAK;AACxB,QAAO,KAAK,OAAO,CAAC,SAAS;;AAE/B,SAAS,IAAI,QAAQ,KAAK;CACxB,MAAM,QAAQ,IAAI,QAAQ,IAAI;AAC9B,KAAI,UAAU,KAAK,EAAG,QAAO;CAC7B,MAAM,EAAE,OAAO,UAAU,YAAY,KAAK,OAAO;AAEjD,QAAO,QAAQ,OADA,MAAM,KAAK,IAAI,GACC;;AAEjC,SAAS,OAAO,QAAQ,KAAK;CAC3B,MAAM,QAAQ,IAAI,QAAQ,IAAI;AAC9B,KAAI,UAAU,KAAK,EAAG;CACtB,MAAM,EAAE,OAAO,UAAU,YAAY,KAAK,OAAO;AACjD,MAAK,IAAI,IAAI,QAAQ,GAAG,IAAI,MAAM,QAAQ,KAAK;EAC7C,MAAM,IAAI,MAAM;AAChB,QAAM,IAAI,KAAK;AACf,UAAQ;;AAEV,SAAQ,OAAO,KAAK;AACpB,OAAM,KAAK;;AAUb,IAAI,SAAS;AACb,IAAI,gBAAgB;AACpB,IAAI,cAAc;AAClB,IAAI,gBAAgB;AACpB,IAAI,cAAc;AAGlB,IAAI,UAAU;AACd,IAAI,aAAa,MAAM;CACrB,YAAY,EAAE,MAAM,eAAe,EAAE,EAAE;AACrC,OAAK,SAAS,IAAI,UAAU;AAC5B,OAAK,WAAW,IAAI,UAAU;AAC9B,OAAK,kBAAkB,EAAE;AACzB,OAAK,YAAY,EAAE;AACnB,OAAK,OAAO;AACZ,OAAK,aAAa;AAClB,OAAK,cAAc,IAAI,UAAU;;;AAGrC,SAAS,MAAM,KAAK;AAClB,QAAO;;AAkBT,IAAI,mBAAmB,KAAK,SAAS,WAAW,QAAQ,YAAY,cAAc,MAAM,YAAY;AAClG,QAAO,mBACL,MACA,KACA,SACA,WACA,QACA,YACA,cACA,MACA,QACD;;AAKH,SAAS,iBAAiB,KAAK,QAAQ,SAAS;CAC9C,MAAM,EACJ,UAAU,SACV,iBAAiB,mBAEf,MAAM,IAAI;CACd,MAAM,QAAQ,IAAI,SAAS,OAAO;AAClC,gBAAe,SAAS;;AAE1B,SAAS,UAAU,KAAK,QAAQ,SAAS,MAAM;CAC7C,MAAM,EACJ,UAAU,SACV,iBAAiB,gBACjB,aAAa,eAEX,MAAM,IAAI;CACd,MAAM,QAAQ,IAAI,SAAS,OAAO;AAClC,KAAI,UAAU,eAAe,OAAQ,gBAAe,SAAS;AAC7D,KAAI,OAAQ,KAAI,YAAY,MAAM;KAC7B,QAAO,YAAY,MAAM;;AAEhC,SAAS,aAAa,KAAK;CACzB,MAAM,EACJ,WAAW,UACX,UAAU,SACV,iBAAiB,gBACjB,QAAQ,OACR,aAAa,eAGX,MAAM,IAAI;AACd,uBAAsB,SAAS;AAC/B,QAAO;EACL,SAAS;EACT,MAAM,IAAI,QAAQ,KAAK;EACvB,OAAO,MAAM;EACb,YAAY,IAAI,cAAc,KAAK;EACnC,SAAS,QAAQ;EACjB;EACA;EAGA,YAAY,WAAW;EACxB;;AAEH,SAAS,aAAa,KAAK;CACzB,MAAM,UAAU,aAAa,IAAI;AACjC,QAAO,OAAO,OAAO,EAAE,EAAE,SAAS,EAGhC,UAAU,OAAO,QAAQ,SAAS,EACnC,CAAC;;AAiCJ,SAAS,mBAAmB,UAAU,KAAK,SAAS,WAAW,QAAQ,YAAY,cAAc,MAAM,SAAS;CAC9G,MAAM,EACJ,WAAW,UACX,UAAU,SACV,iBAAiB,gBACjB,QAAQ,UAEN,MAAM,IAAI;CACd,MAAM,OAAO,SAAS,UAAU,QAAQ;CACxC,MAAM,QAAQ,eAAe,MAAM,UAAU;AAC7C,KAAI,CAAC,QAAQ;AACX,MAAI,YAAY,eAAe,MAAM,MAAM,CAAE;AAC7C,SAAO,OAAO,MAAM,OAAO,CAAC,UAAU,CAAC;;AAEzC,QAAO,WAAW;AAClB,QAAO,aAAa;CACpB,MAAM,eAAe,IAAI,SAAS,OAAO;CACzC,MAAM,aAAa,OAAO,IAAI,OAAO,KAAK,GAAG;AAC7C,KAAI,iBAAiB,eAAe,OAAQ,gBAAe,gBAAgB,WAAW,OAAO,UAAU;AACvG,KAAI,YAAY,WAAW,MAAM,OAAO,cAAc,YAAY,cAAc,WAAW,CACzF;AAEF,QAAO,OACL,MACA,OACA,OAAO;EAAC;EAAW;EAAc;EAAY;EAAc;EAAW,GAAG;EAAC;EAAW;EAAc;EAAY;EAAa,CAC7H;;AAEH,SAAS,OAAO,MAAM;AAEtB,SAAS,SAAS,KAAK,OAAO;AAC5B,MAAK,IAAI,IAAI,IAAI,QAAQ,KAAK,OAAO,IACnC,KAAI,KAAK,EAAE;AAEb,QAAO,IAAI;;AAEb,SAAS,eAAe,MAAM,WAAW;CACvC,IAAI,QAAQ,KAAK;AACjB,MAAK,IAAI,IAAI,QAAQ,GAAG,KAAK,GAAG,QAAQ,IAEtC,KAAI,aADY,KAAK,GACI,QAAS;AAEpC,QAAO;;AAET,SAAS,OAAO,OAAO,OAAO,OAAO;AACnC,MAAK,IAAI,IAAI,MAAM,QAAQ,IAAI,OAAO,IACpC,OAAM,KAAK,MAAM,IAAI;AAEvB,OAAM,SAAS;;AAEjB,SAAS,sBAAsB,UAAU;CACvC,MAAM,EAAE,WAAW;CACnB,IAAI,MAAM;AACV,MAAK,IAAI,IAAI,MAAM,GAAG,KAAK,GAAG,MAAM,GAAG,IACrC,KAAI,SAAS,GAAG,SAAS,EAAG;AAE9B,KAAI,MAAM,OAAQ,UAAS,SAAS;;AAKtC,SAAS,eAAe,MAAM,OAAO;AACnC,KAAI,UAAU,EAAG,QAAO;AAExB,QADa,KAAK,QAAQ,GACd,WAAW;;AAEzB,SAAS,WAAW,MAAM,OAAO,cAAc,YAAY,cAAc,YAAY;AACnF,KAAI,UAAU,EAAG,QAAO;CACxB,MAAM,OAAO,KAAK,QAAQ;AAC1B,KAAI,KAAK,WAAW,EAAG,QAAO;AAC9B,QAAO,iBAAiB,KAAK,kBAAkB,eAAe,KAAK,gBAAgB,iBAAiB,KAAK,kBAAkB,gBAAgB,KAAK,WAAW,IAAI,KAAK,eAAe;;;;;ACrPrL,MAAM,qBAAqC,8BAAc,IAAI,IAAI,IAAI,IAAI,MAAM,MAAM;AACrF,MAAM,gBAAgB,EAAE;AACxB,SAAS,cAAc,QAAQ,MAAM,QAAQ,MAAM,SAAS,QAAQ;AAChE,QAAO;EAAE;EAAQ;EAAM;EAAQ;EAAM;EAAS;EAAQ;;AAE1D,SAAS,OAAO,KAAK,SAAS,QAAQ,SAAS,QAAQ;AACnD,QAAO;EACH;EACA;EACA;EACA;EACA;EACH;;;;;;AAML,SAAS,UAAU,KAAK,SAAS;AAC7B,QAAO,OAAO,KAAK,SAAS,IAAI,MAAM,MAAM;;;;;;AAMhD,SAAS,eAAe,QAAQ,SAAS,QAAQ;AAC7C,QAAO,OAAO,MAAM,eAAe,QAAQ,SAAS,OAAO;;;;;;AAM/D,SAAS,cAAc,MAAM;CAGzB,MAAM,MAAM,IAAI,WAAW,EAAE,MAAM,KAAK,IAAI,MAAM,CAAC;CACnD,MAAM,EAAE,SAAS,aAAa,QAAQ;CACtC,MAAM,YAAY,IAAI;CACtB,MAAM,eAAe,gBAAgB,IAAI;AACzC,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;EAC1C,MAAM,WAAW,aAAa;AAC9B,OAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;GACtC,MAAM,UAAU,SAAS;GACzB,MAAM,SAAS,QAAQ;GACvB,IAAI,SAAS;AAGb,OAAI,QAAQ,WAAW,GAAG;IACtB,MAAMC,WAAS,YAAY,QAAQ;AACnC,aAAS,oBAAoBA,UAAQ,QAAQ,IAAI,QAAQ,IAAI,QAAQ,WAAW,IAAI,UAAU,QAAQ,MAAM,GAAG;AAG/G,QAAI,UAAU,KACV;;GAER,MAAM,EAAE,QAAQ,MAAM,MAAM,SAAS,QAAQ,WAAW;AACxD,mBAAgB,KAAK,GAAG,QAAQ,QAAQ,MAAM,QAAQ,KAAK;AAC3D,OAAI,UAAU,WAAW,KACrB,kBAAiB,KAAK,QAAQ,QAAQ;AAC1C,OAAI,OACA,WAAU,KAAK,QAAQ,KAAK;;;AAGxC,QAAO;;;;;;AAMX,SAAS,oBAAoB,QAAQ,MAAM,QAAQ,MAAM;AACrD,KAAI,CAAC,OAAO,IACR,QAAO,cAAc,OAAO,QAAQ,MAAM,QAAQ,MAAM,OAAO,SAAS,OAAO,OAAO;CAE1F,MAAM,UAAU,aAAa,OAAO,KAAK,MAAM,OAAO;AAEtD,KAAI,WAAW,KACX,QAAO;AAGX,KAAI,QAAQ,WAAW,EACnB,QAAO;AACX,QAAO,oBAAoB,OAAO,QAAQ,QAAQ,KAAK,QAAQ,IAAI,QAAQ,IAAI,QAAQ,WAAW,IAAI,OAAO,IAAI,MAAM,QAAQ,MAAM,KAAK;;AAG9I,SAAS,QAAQ,OAAO;AACpB,KAAI,MAAM,QAAQ,MAAM,CACpB,QAAO;AACX,QAAO,CAAC,MAAM;;;;;;;;;;;;;AAalB,SAAS,mBAAmB,OAAO,QAAQ;CACvC,MAAM,OAAO,QAAQ,MAAM,CAAC,KAAK,MAAM,IAAI,SAAS,GAAG,GAAG,CAAC;CAC3D,MAAM,MAAM,KAAK,KAAK;AACtB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC7B,KAAI,KAAK,GAAG,QAAQ,SAAS,EACzB,OAAM,IAAI,MAAM,sBAAsB,EAAE,4GACoC;CAGpF,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,EAAE;AACpC,MAAK,IAAI,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,IAClC,QAAO,UAAU,KAAK,IAAI,CAAC,KAAK,CAAC;AAErC,QAAO;;AAEX,SAAS,MAAM,KAAK,QAAQ,UAAU,eAAe;CACjD,MAAM,EAAE,iBAAiB,gBAAgB,eAAe;CACxD,MAAM,QAAQ,gBAAgB;AA4B9B,QAAO,UAAU,KA3BA,gBAAgB,KAAK,YAAY,MAAM;EAKpD,MAAM,MAAM;GACR;GACA;GACA,QAAQ,cAAc;GACtB,SAAS;GACT,QAAQ;GACX;EAGD,MAAM,YAAY,OAAO,IAAI,QAAQ,IAAI;EACzC,MAAM,EAAE,QAAQ,SAAS,WAAW;AAEpC,MAAI,UACA,QAAO,MAAM,IAAI,SAAS,WAAW,OAAO,EAAE,QAAQ,QAAQ,MAAM;AAOxE,SAAO,eAAe,QAFA,YAAY,SAAY,UAAU,iBAAiB,eAAe,KAAK,MAC7E,WAAW,SAAY,SAAS,aAAa,WAAW,SAAS,EAAE,GAAG,MACjC;GACvD,CAC6B;;;;;;AAOnC,IAAM,YAAN,MAAgB;CACZ,YAAY,KAAK,SAAS;EACtB,MAAM,MAAM,QAAQ,kBAAkB,aAAa,IAAI,GAAG,aAAa,IAAI;AAC3E,OAAK,UAAU,IAAI;AACnB,OAAK,OAAO,IAAI;AAChB,OAAK,WAAW,IAAI;AACpB,OAAK,QAAQ,IAAI;AACjB,OAAK,aAAa,IAAI;AACtB,OAAK,aAAa,IAAI;AACtB,OAAK,UAAU,IAAI;AACnB,MAAI,CAAC,QAAQ,eACT,MAAK,iBAAiB,IAAI;;CAGlC,WAAW;AACP,SAAO,KAAK,UAAU,KAAK;;;;;;;;;;;;;;;;;;AAmBnC,SAAS,UAAU,OAAO,QAAQ,SAAS;CACvC,MAAM,OAAO,OAAO,YAAY,WAAW,UAAU;EAAE,gBAAgB,CAAC,CAAC;EAAS,iBAAiB;EAAO;AAE1G,QAAO,IAAI,UAAU,cADR,mBAAmB,OAAO,OAAO,CACN,EAAE,KAAK;;;;;;;;;;ACnLnD,IAAIC,eAAoC;;;;;AA+CxC,MAAM,mBAAmB,YAAmC;AAC1D,KAAI,aACF,QAAO;AAGT,KAAI;EAEF,MAAM,EAAE,kBAAkB,MAAM,OAAO;AAEvC,iBADgB,4DAA8B,CACvB,oBAAoB;AAC3C,SAAO;UACA,OAAO;AACd,QAAM,IAAI,MACR,mLAEqE,MAAM,GAC5E;;;;;;;AAiCL,MAAM,iBAAiB,UAA0B,MAAM,QAAQ,OAAO,IAAI;;;;;;;;AAS1E,MAAM,yBAAyB,UAA2B,eAAwC;CAChG,MAAM,SAAS,GAAG,WAAW;CAE7B,MAAMC,mBAAgD,EAAE;AACxD,MAAK,MAAM,CAAC,IAAI,YAAY,OAAO,QAAQ,SAAS,SAAS,CAC3D,KAAI,GAAG,WAAW,OAAO,CACvB,CAAC,iBAAoD,MAAM;AAI/D,QAAO;EACL,UAAU;EACV,QAAQ;GAAE,OAAO;IAAE,MAAM;IAAG,QAAQ;IAAG,OAAO;IAAG;GAAE,YAAY;GAAG,UAAU,EAAE;GAAE;EACjF;;;;;;AAOH,MAAM,4BAA4B,WAA0C;CAC1E,MAAM,2CAA4B,OAAO,QAAQ,WAAW;AAC5D,KAAI;AACF,SAAO,wCAA2B,kBAAkB,CAAC;SAC/C;AAEN,SAAO,qCAAsB,kBAAkB,CAAC;;;;;;;;;AA4BpD,MAAa,oBAAoB,OAAO,YAAoD;CAC1F,MAAM,SAAS,MAAM,kBAAkB;CAEvC,MAAM,QAAQ,QAAQ,iBAAiB,WAAW;CAGlD,MAAM,oBAAoB,yBAAyB,QAAQ,OAAO;CAElE,MAAM,aAAa,KAAK,UAAU;EAChC,sBAAsB,QAAQ,OAAO;EACrC,OAAO;EACP;EACA,WAAW,QAAQ,aAAa;EACjC,CAAC;CAGF,MAAM,eAAe,QAAQ;AAE7B,QAAO,EACL,YAAY,EAAE,YAAY,YAAY,qBAAsD;EAG1F,MAAM,iBAAiB,qCAAsB,WAAW,CAAC;EAIzD,MAAM,mBAAmB,sBAAsB,cAAc,eAAe;EAC5E,MAAM,uBAAuB,KAAK,UAAU,iBAAiB;EAI7D,MAAM,aADkB,IAAI,OAAO,eAAe,sBAAsB,WAAW,CAChD,UAAU,YAAY,eAAe;EACxE,MAAMC,SAA0B,KAAK,MAAM,WAAW;EAGtD,IAAIC;AACJ,MAAI,OAAO,UACT,KAAI,gBAAgB;GAElB,MAAM,SAAS,UAAU,CAAC,KAAK,MAAM,OAAO,UAAU,EAAE,KAAK,MAAM,eAAe,CAAC,QAAQ,KAAK;AAChG,oBAAiB,KAAK,UAAU,OAAO;QAEvC,kBAAiB,OAAO;AAI5B,SAAO;GACL,aAAa,OAAO;GACpB,YAAY,OAAO;GACnB,WAAW;GACX,QAAQ,OAAO,UAAU,EAAE;GAC5B;IAEJ;;;;;;;;;;AAWH,MAAa,YAAY,OACvB,UAM6B;CAC7B,MAAM,SAAS,MAAM,kBAAkB;CAIvC,MAAM,iBAAiB,qCAAsB,MAAM,WAAW,CAAC;CAG/D,MAAM,mBAAmB,sBAAsB,MAAM,UAAU,eAAe;CAG9E,MAAM,oBAAoB,yBAAyB,MAAM,OAAO;CAEhE,MAAM,YAAY,KAAK,UAAU;EAC/B,YAAY,MAAM;EAClB,YAAY;EACZ,cAAc,KAAK,UAAU,iBAAiB;EAC9C,QAAQ;GACN,sBAAsB,MAAM,OAAO;GACnC,OAAO,MAAM,SAAS;GACtB;GACA,WAAW,MAAM,aAAa;GAC/B;EACF,CAAC;CAEF,MAAM,aAAa,OAAO,UAAU,UAAU;CAC9C,MAAMD,SAA0B,KAAK,MAAM,WAAW;CAGtD,IAAIC;AACJ,KAAI,OAAO,UACT,KAAI,MAAM,gBAAgB;EAExB,MAAM,SAAS,UAAU,CAAC,KAAK,MAAM,OAAO,UAAU,EAAE,KAAK,MAAM,MAAM,eAAe,CAAC,QAAQ,KAAK;AACtG,mBAAiB,KAAK,UAAU,OAAO;OAEvC,kBAAiB,OAAO;AAI5B,QAAO;EACL,aAAa,OAAO;EACpB,YAAY,OAAO;EACnB,WAAW;EACX,QAAQ,OAAO,UAAU,EAAE;EAC5B"}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { BuilderArtifact } from "@soda-gql/builder";
|
|
2
|
+
import { ResolvedSodaGqlConfig } from "@soda-gql/config";
|
|
3
|
+
|
|
4
|
+
//#region packages/swc-transformer/src/index.d.ts
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Plugin error from the SWC transformer.
|
|
8
|
+
* This matches the Rust PluginError structure for consistent error reporting.
|
|
9
|
+
*/
|
|
10
|
+
type SwcPluginError = {
|
|
11
|
+
/** Always "PluginError" for type discrimination */
|
|
12
|
+
readonly type: "PluginError";
|
|
13
|
+
/** Error code for programmatic handling (e.g., "SODA_GQL_METADATA_NOT_FOUND") */
|
|
14
|
+
readonly code: string;
|
|
15
|
+
/** Human-readable error message */
|
|
16
|
+
readonly message: string;
|
|
17
|
+
/** Stage where the error occurred */
|
|
18
|
+
readonly stage: "analysis" | "transform";
|
|
19
|
+
/** Source filename if applicable */
|
|
20
|
+
readonly filename?: string;
|
|
21
|
+
/** Canonical ID if applicable */
|
|
22
|
+
readonly canonicalId?: string;
|
|
23
|
+
/** Artifact type if applicable */
|
|
24
|
+
readonly artifactType?: string;
|
|
25
|
+
/** Builder type if applicable */
|
|
26
|
+
readonly builderType?: string;
|
|
27
|
+
/** Argument name if applicable */
|
|
28
|
+
readonly argName?: string;
|
|
29
|
+
};
|
|
30
|
+
type ModuleFormat = "esm" | "cjs";
|
|
31
|
+
type TransformOptions = {
|
|
32
|
+
/** Compiler options for output format */
|
|
33
|
+
compilerOptions?: {
|
|
34
|
+
/** Module format: CommonJS or ESNext */
|
|
35
|
+
module?: "CommonJS" | "ESNext";
|
|
36
|
+
};
|
|
37
|
+
/** Resolved soda-gql configuration */
|
|
38
|
+
config: ResolvedSodaGqlConfig;
|
|
39
|
+
/** Pre-built artifact from the builder */
|
|
40
|
+
artifact: BuilderArtifact;
|
|
41
|
+
/** Whether to generate source maps */
|
|
42
|
+
sourceMap?: boolean;
|
|
43
|
+
};
|
|
44
|
+
type TransformInput = {
|
|
45
|
+
/** Source code to transform */
|
|
46
|
+
sourceCode: string;
|
|
47
|
+
/** Path to the source file */
|
|
48
|
+
sourcePath: string;
|
|
49
|
+
/** Input source map from previous transformer (JSON string) */
|
|
50
|
+
inputSourceMap?: string;
|
|
51
|
+
};
|
|
52
|
+
type TransformOutput = {
|
|
53
|
+
/** Whether any transformation was performed */
|
|
54
|
+
transformed: boolean;
|
|
55
|
+
/** The transformed source code (or original if no transformation) */
|
|
56
|
+
sourceCode: string;
|
|
57
|
+
/** Source map JSON, if source map generation was enabled */
|
|
58
|
+
sourceMap?: string;
|
|
59
|
+
/** Errors encountered during transformation (non-fatal) */
|
|
60
|
+
errors: SwcPluginError[];
|
|
61
|
+
};
|
|
62
|
+
/**
|
|
63
|
+
* Transformer interface.
|
|
64
|
+
*/
|
|
65
|
+
interface Transformer {
|
|
66
|
+
transform(input: TransformInput): TransformOutput;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Create a transformer instance.
|
|
70
|
+
*
|
|
71
|
+
* @param options - Transform options including config and artifact
|
|
72
|
+
* @returns A transformer that can transform source files
|
|
73
|
+
*/
|
|
74
|
+
declare const createTransformer: (options: TransformOptions) => Promise<Transformer>;
|
|
75
|
+
/**
|
|
76
|
+
* Transform a single source file (one-shot).
|
|
77
|
+
*
|
|
78
|
+
* For transforming multiple files, use createTransformer() to reuse the artifact.
|
|
79
|
+
*
|
|
80
|
+
* @param input - Transform input including source, path, artifact, and config
|
|
81
|
+
* @returns Transform output
|
|
82
|
+
*/
|
|
83
|
+
declare const transform: (input: TransformInput & {
|
|
84
|
+
artifact: BuilderArtifact;
|
|
85
|
+
config: ResolvedSodaGqlConfig;
|
|
86
|
+
isCjs?: boolean;
|
|
87
|
+
sourceMap?: boolean;
|
|
88
|
+
}) => Promise<TransformOutput>;
|
|
89
|
+
//#endregion
|
|
90
|
+
export { ModuleFormat, SwcPluginError, TransformInput, TransformOptions, TransformOutput, Transformer, createTransformer, transform };
|
|
91
|
+
//# sourceMappingURL=index.d.cts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.cts","names":[],"sources":["../src/index.ts"],"sourcesContent":[],"mappings":";;;;;AAgGA;AAoDA;AAcA;AAUA;AAAiD,KAhJrC,cAAA,GAgJqC;EAA2B;EAAR,SAAA,IAAA,EAAA,aAAA;EAAO;EAgE9D,SAAA,IAqDZ,EAAA,MAAA;EApDQ;EACK,SAAA,OAAA,EAAA,MAAA;EACF;EAID,SAAA,KAAA,EAAA,UAAA,GAAA,WAAA;EAAR;EAAO,SAAA,QAAA,CAAA,EAAA,MAAA;;;;;;;;;;KAnKE,YAAA;KAEA,gBAAA;;;;;;;UAOF;;YAEE;;;;KAKA,cAAA;;;;;;;;KAoDA,eAAA;;;;;;;;UAQF;;;;;UAMO,WAAA;mBACE,iBAAiB;;;;;;;;cASvB,6BAAoC,qBAAmB,QAAQ;;;;;;;;;cAgE/D,mBACJ;YACK;UACF;;;MAIT,QAAQ"}
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { BuilderArtifact } from "@soda-gql/builder";
|
|
2
|
+
import { ResolvedSodaGqlConfig } from "@soda-gql/config";
|
|
3
|
+
|
|
4
|
+
//#region packages/swc-transformer/src/index.d.ts
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Plugin error from the SWC transformer.
|
|
8
|
+
* This matches the Rust PluginError structure for consistent error reporting.
|
|
9
|
+
*/
|
|
10
|
+
type SwcPluginError = {
|
|
11
|
+
/** Always "PluginError" for type discrimination */
|
|
12
|
+
readonly type: "PluginError";
|
|
13
|
+
/** Error code for programmatic handling (e.g., "SODA_GQL_METADATA_NOT_FOUND") */
|
|
14
|
+
readonly code: string;
|
|
15
|
+
/** Human-readable error message */
|
|
16
|
+
readonly message: string;
|
|
17
|
+
/** Stage where the error occurred */
|
|
18
|
+
readonly stage: "analysis" | "transform";
|
|
19
|
+
/** Source filename if applicable */
|
|
20
|
+
readonly filename?: string;
|
|
21
|
+
/** Canonical ID if applicable */
|
|
22
|
+
readonly canonicalId?: string;
|
|
23
|
+
/** Artifact type if applicable */
|
|
24
|
+
readonly artifactType?: string;
|
|
25
|
+
/** Builder type if applicable */
|
|
26
|
+
readonly builderType?: string;
|
|
27
|
+
/** Argument name if applicable */
|
|
28
|
+
readonly argName?: string;
|
|
29
|
+
};
|
|
30
|
+
type ModuleFormat = "esm" | "cjs";
|
|
31
|
+
type TransformOptions = {
|
|
32
|
+
/** Compiler options for output format */
|
|
33
|
+
compilerOptions?: {
|
|
34
|
+
/** Module format: CommonJS or ESNext */
|
|
35
|
+
module?: "CommonJS" | "ESNext";
|
|
36
|
+
};
|
|
37
|
+
/** Resolved soda-gql configuration */
|
|
38
|
+
config: ResolvedSodaGqlConfig;
|
|
39
|
+
/** Pre-built artifact from the builder */
|
|
40
|
+
artifact: BuilderArtifact;
|
|
41
|
+
/** Whether to generate source maps */
|
|
42
|
+
sourceMap?: boolean;
|
|
43
|
+
};
|
|
44
|
+
type TransformInput = {
|
|
45
|
+
/** Source code to transform */
|
|
46
|
+
sourceCode: string;
|
|
47
|
+
/** Path to the source file */
|
|
48
|
+
sourcePath: string;
|
|
49
|
+
/** Input source map from previous transformer (JSON string) */
|
|
50
|
+
inputSourceMap?: string;
|
|
51
|
+
};
|
|
52
|
+
type TransformOutput = {
|
|
53
|
+
/** Whether any transformation was performed */
|
|
54
|
+
transformed: boolean;
|
|
55
|
+
/** The transformed source code (or original if no transformation) */
|
|
56
|
+
sourceCode: string;
|
|
57
|
+
/** Source map JSON, if source map generation was enabled */
|
|
58
|
+
sourceMap?: string;
|
|
59
|
+
/** Errors encountered during transformation (non-fatal) */
|
|
60
|
+
errors: SwcPluginError[];
|
|
61
|
+
};
|
|
62
|
+
/**
|
|
63
|
+
* Transformer interface.
|
|
64
|
+
*/
|
|
65
|
+
interface Transformer {
|
|
66
|
+
transform(input: TransformInput): TransformOutput;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Create a transformer instance.
|
|
70
|
+
*
|
|
71
|
+
* @param options - Transform options including config and artifact
|
|
72
|
+
* @returns A transformer that can transform source files
|
|
73
|
+
*/
|
|
74
|
+
declare const createTransformer: (options: TransformOptions) => Promise<Transformer>;
|
|
75
|
+
/**
|
|
76
|
+
* Transform a single source file (one-shot).
|
|
77
|
+
*
|
|
78
|
+
* For transforming multiple files, use createTransformer() to reuse the artifact.
|
|
79
|
+
*
|
|
80
|
+
* @param input - Transform input including source, path, artifact, and config
|
|
81
|
+
* @returns Transform output
|
|
82
|
+
*/
|
|
83
|
+
declare const transform: (input: TransformInput & {
|
|
84
|
+
artifact: BuilderArtifact;
|
|
85
|
+
config: ResolvedSodaGqlConfig;
|
|
86
|
+
isCjs?: boolean;
|
|
87
|
+
sourceMap?: boolean;
|
|
88
|
+
}) => Promise<TransformOutput>;
|
|
89
|
+
//#endregion
|
|
90
|
+
export { ModuleFormat, SwcPluginError, TransformInput, TransformOptions, TransformOutput, Transformer, createTransformer, transform };
|
|
91
|
+
//# sourceMappingURL=index.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.mts","names":[],"sources":["../src/index.ts"],"sourcesContent":[],"mappings":";;;;;AAgGA;AAoDA;AAcA;AAUA;AAAiD,KAhJrC,cAAA,GAgJqC;EAA2B;EAAR,SAAA,IAAA,EAAA,aAAA;EAAO;EAgE9D,SAAA,IAqDZ,EAAA,MAAA;EApDQ;EACK,SAAA,OAAA,EAAA,MAAA;EACF;EAID,SAAA,KAAA,EAAA,UAAA,GAAA,WAAA;EAAR;EAAO,SAAA,QAAA,CAAA,EAAA,MAAA;;;;;;;;;;KAnKE,YAAA;KAEA,gBAAA;;;;;;;UAOF;;YAEE;;;;KAKA,cAAA;;;;;;;;KAoDA,eAAA;;;;;;;;UAQF;;;;;UAMO,WAAA;mBACE,iBAAiB;;;;;;;;cASvB,6BAAoC,qBAAmB,QAAQ;;;;;;;;;cAgE/D,mBACJ;YACK;UACF;;;MAIT,QAAQ"}
|