@lark-apaas/devtool-kits 1.2.10-alpha.0 → 1.2.11-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +356 -205
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +352 -201
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/utils/index.ts","../src/helpers/gen-dbschema/postprocess.ts","../src/helpers/gen-dbschema/helper/header-format.ts","../src/helpers/gen-dbschema/helper/schema-conversion.ts","../src/helpers/gen-dbschema/helper/table-rename.ts","../src/helpers/gen-dbschema/helper/custom-types.ts","../src/helpers/gen-dbschema/helper/imports.ts","../src/helpers/gen-dbschema/helper/system-fields.ts","../src/helpers/gen-dbschema/helper/patch-helper.ts","../src/helpers/gen-dbschema/helper/timestamp-replacement.ts","../src/helpers/gen-dbschema/helper/appendTableAliases.ts","../src/helpers/gen-nest-resource/generator.ts","../src/helpers/gen-nest-resource/utils.ts","../src/helpers/gen-nest-resource/schema-parser.ts","../src/helpers/gen-nest-resource/index.ts","../src/helpers/proxy-error/index.ts","../src/middlewares/index.ts","../src/middlewares/openapi/router.ts","../src/middlewares/openapi/controller.ts","../src/middlewares/openapi/services.ts","../src/middlewares/openapi/utils.ts","../src/middlewares/openapi/index.ts","../src/middlewares/dev-logs/router.ts","../src/middlewares/dev-logs/utils.ts","../src/middlewares/dev-logs/helper/path-matcher.ts","../src/middlewares/dev-logs/controller.ts","../src/middlewares/dev-logs/services.ts","../src/middlewares/dev-logs/health.controller.ts","../src/middlewares/dev-logs/index.ts","../src/middlewares/collect-logs/router.ts","../src/middlewares/collect-logs/controller.ts","../src/middlewares/collect-logs/utils.ts","../src/middlewares/collect-logs/index.ts"],"sourcesContent":["/**\n * 标准化基础路径,确保以 '/' 开头且不包含 trailing slash\n * @param basePath 原始基础路径\n * @returns 标准化后的基础路径\n */\nexport function normalizeBasePath(basePath: string): string {\n const normalizedBasePath = basePath.startsWith('/')\n ? basePath\n : `/${basePath}`;\n const basePathWithoutTrailingSlash = normalizedBasePath.endsWith('/')\n ? normalizedBasePath.slice(0, -1)\n : normalizedBasePath;\n return basePathWithoutTrailingSlash;\n}\n","import fs from 'node:fs';\nimport path from 'node:path';\n\n// Import all module functions\nimport {\n // Constants\n HEADER_COMMENT,\n CUSTOM_TYPE_PATTERN,\n // Header & Format\n ensureHeaderComment,\n stripLeadingNewlines,\n collapseExtraBlankLines,\n // Schema Conversion\n removePgSchemaDeclarations,\n convertSchemaTableInvocations,\n // Table Rename\n renamePgTableConstants,\n updateTableReferenceIdentifiers,\n escapeRegExp,\n sanitizeIdentifier,\n toAsciiName,\n // Custom Types\n replaceUnknownColumns,\n replaceFollowingUnknown,\n // Imports\n tweakImports,\n ensureTypesImport,\n removeInlineTypeHelpers,\n inlineCustomTypes,\n inlineFromTemplate,\n ensureImportIdentifier,\n // System Fields\n addSystemFieldComments,\n removeConflictingSystemFields,\n // Patch\n patchDrizzleKitDefects,\n // Timestamp Replacement\n replaceTimestampWithCustomTypes,\n replaceDefaultNowWithSql,\n // Table Aliases\n appendTableAliases,\n} from './helper';\n\ntype PostprocessStats = {\n replacedUnknown: number;\n unmatchedUnknown: string[];\n patchedDefects: number;\n replacedTimestamps: number;\n replacedDefaultNow: number;\n};\n\n// Internal functions (exported for testing)\nexport const internal = {\n CUSTOM_TYPE_PATTERN,\n HEADER_COMMENT,\n ensureHeaderComment,\n stripLeadingNewlines,\n collapseExtraBlankLines,\n removePgSchemaDeclarations,\n convertSchemaTableInvocations,\n sanitizeIdentifier,\n toAsciiName,\n renamePgTableConstants,\n updateTableReferenceIdentifiers,\n escapeRegExp,\n replaceUnknownColumns,\n replaceFollowingUnknown,\n addSystemFieldComments,\n tweakImports,\n ensureTypesImport,\n removeInlineTypeHelpers,\n inlineCustomTypes,\n inlineFromTemplate,\n ensureImportIdentifier,\n removeConflictingSystemFields,\n patchDrizzleKitDefects,\n replaceTimestampWithCustomTypes,\n replaceDefaultNowWithSql,\n appendTableAliases,\n};\n\nexport function postprocessDrizzleSchema(targetPath: string): PostprocessStats | undefined {\n const resolvedPath = path.resolve(targetPath);\n if (!fs.existsSync(resolvedPath)) {\n console.warn(`[postprocess-drizzle-schema] File not found: ${resolvedPath}`);\n return undefined;\n }\n\n let text = fs.readFileSync(resolvedPath, 'utf8');\n\n // Processing pipeline - maintain exact order\n text = ensureHeaderComment(text);\n\n // Patch drizzle-kit defects first\n const patchResult = patchDrizzleKitDefects(text);\n text = patchResult.text;\n\n text = removePgSchemaDeclarations(text);\n const tableConversion = convertSchemaTableInvocations(text);\n text = tableConversion.text;\n const renameResult = renamePgTableConstants(text);\n text = renameResult.text;\n text = updateTableReferenceIdentifiers(text, renameResult.renames);\n\n const replacement = replaceUnknownColumns(text);\n text = replacement.text;\n\n // Replace timestamp with specific options to custom types\n const timestampReplacement = replaceTimestampWithCustomTypes(text);\n text = timestampReplacement.text;\n\n // Replace .defaultNow() with .default(sql`CURRENT_TIMESTAMP`)\n const defaultNowReplacement = replaceDefaultNowWithSql(text);\n text = defaultNowReplacement.text;\n\n // Remove conflicting system fields before adding comments\n text = removeConflictingSystemFields(text);\n\n text = addSystemFieldComments(text);\n\n text = tweakImports(text);\n // Inline custom types instead of importing from external file\n text = inlineCustomTypes(text);\n\n text = appendTableAliases(text);\n // Note: removeInlineTypeHelpers is no longer needed since we intentionally inline types now\n text = text.replace(/\\r?\\n/g, '\\n');\n text = collapseExtraBlankLines(text);\n\n fs.writeFileSync(resolvedPath, text, 'utf8');\n\n if (patchResult.fixed > 0) {\n console.info(`[postprocess-drizzle-schema] Patched ${patchResult.fixed} drizzle-kit defects (.default(') -> .default(''))`);\n }\n if (replacement.replaced > 0) {\n console.info(`[postprocess-drizzle-schema] Replaced ${replacement.replaced} unknown columns`);\n }\n if (replacement.unmatched.length > 0) {\n console.warn('[postprocess-drizzle-schema] Unmatched custom types:', replacement.unmatched.length);\n replacement.unmatched.forEach((line) => console.warn(` ${line}`));\n }\n if (tableConversion.converted > 0) {\n console.info(`[postprocess-drizzle-schema] Converted ${tableConversion.converted} schema.table invocations to pgTable`);\n }\n if (timestampReplacement.replaced > 0) {\n console.info(`[postprocess-drizzle-schema] Replaced ${timestampReplacement.replaced} timestamp fields with customTimestamptz`);\n }\n if (defaultNowReplacement.replaced > 0) {\n console.info(`[postprocess-drizzle-schema] Replaced ${defaultNowReplacement.replaced} .defaultNow() with .default(sql\\`CURRENT_TIMESTAMP\\`)`);\n }\n\n return {\n replacedUnknown: replacement.replaced,\n unmatchedUnknown: replacement.unmatched,\n patchedDefects: patchResult.fixed,\n replacedTimestamps: timestampReplacement.replaced,\n replacedDefaultNow: defaultNowReplacement.replaced,\n };\n}\n","const HEADER_COMMENT = '/** auto generated, do not edit */';\n\nexport { HEADER_COMMENT };\n\nexport function ensureHeaderComment(source: string): string {\n let text = source.startsWith('\\uFEFF') ? source.slice(1) : source;\n\n while (text.startsWith(HEADER_COMMENT)) {\n text = text.slice(HEADER_COMMENT.length);\n text = stripLeadingNewlines(text);\n }\n\n const trimmed = stripLeadingNewlines(text);\n if (trimmed.length === 0) {\n return `${HEADER_COMMENT}\\n`;\n }\n\n return `${HEADER_COMMENT}\\n${trimmed}`;\n}\n\nexport function stripLeadingNewlines(value: string): string {\n let current = value;\n while (current.startsWith('\\r\\n') || current.startsWith('\\n')) {\n current = current.startsWith('\\r\\n') ? current.slice(2) : current.slice(1);\n }\n return current;\n}\n\nexport function collapseExtraBlankLines(text: string): string {\n return text.replace(/\\n{3,}/g, '\\n\\n');\n}\n","export function removePgSchemaDeclarations(source: string): string {\n return source.replace(/export const \\w+ = pgSchema\\([\\s\\S]*?\\);\\n*/g, '');\n}\n\nexport function convertSchemaTableInvocations(source: string): { text: string; converted: number } {\n let converted = 0;\n let text = source.replace(/([A-Za-z0-9_]+)\\.table\\(/g, () => {\n converted += 1;\n return 'pgTable(';\n });\n text = text.replace(/([A-Za-z0-9_]+)\\.view\\(/g, () => {\n converted += 1;\n return 'pgView(';\n });\n text = text.replace(/([A-Za-z0-9_]+)\\.materializedView\\(/g, () => {\n converted += 1;\n return 'pgMaterializedView(';\n });\n text = text.replace(/([A-Za-z0-9_]+)\\.enum\\(/g, () => {\n converted += 1;\n return 'pgEnum(';\n });\n text = text.replace(/([A-Za-z0-9_]+)\\.sequence\\(/g, () => {\n converted += 1;\n return 'pgSequence(';\n });\n return { text, converted };\n}\n","import { pinyin } from 'pinyin-pro';\n\nexport type TableRename = {\n from: string;\n to: string;\n};\n\nexport function renamePgTableConstants(source: string): { text: string; renames: TableRename[] } {\n const pgTableRegex = /export const\\s+([^\\s=]+)\\s*=\\s*(pgTable|pgView|pgMaterializedView)\\(\\s*[\"'`]([^\"'`]+)[\"'`]/gu;\n const renames: TableRename[] = [];\n\n const updated = source.replace(pgTableRegex, (match, currentName: string, factory: string, tableName: string) => {\n const sanitized = sanitizeIdentifier(tableName);\n if (sanitized === currentName) {\n return match;\n }\n renames.push({ from: currentName, to: sanitized });\n const equalsIndex = match.indexOf('=');\n const suffix = equalsIndex >= 0 ? match.slice(equalsIndex) : ` = ${factory}(\"${tableName}\"`;\n const normalizedSuffix = suffix.trimStart();\n return `export const ${sanitized} ${normalizedSuffix}`;\n });\n\n return { text: updated, renames };\n}\n\nexport function updateTableReferenceIdentifiers(source: string, renames: TableRename[]): string {\n if (renames.length === 0) {\n return source;\n }\n\n return renames.reduce((acc, rename) => {\n if (!rename.from || rename.from === rename.to) {\n return acc;\n }\n const pattern = new RegExp(`\\\\b${escapeRegExp(rename.from)}(\\\\s*\\\\.)`, 'g');\n return acc.replace(pattern, `${rename.to}$1`);\n }, source);\n}\n\nexport function escapeRegExp(value: string): string {\n return value.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&').replace(/\\//g, '\\\\/');\n}\n\n/**\n * Convert a string to camelCase\n * Example: \"test_exam\" -> \"testExam\", \"user_profile_data\" -> \"userProfileData\"\n */\nfunction toCamelCase(str: string): string {\n // Split by underscores, hyphens, or spaces\n const words = str.split(/[_\\-\\s]+/).filter(Boolean);\n\n if (words.length === 0) {\n return '';\n }\n\n // First word lowercase, capitalize first letter of subsequent words\n return words\n .map((word, index) => {\n if (index === 0) {\n return word.toLowerCase();\n }\n return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();\n })\n .join('');\n}\n\nexport function sanitizeIdentifier(name: string): string {\n const asciiName = toAsciiName(name);\n // Replace non-alphanumeric characters with underscores\n let sanitized = asciiName.replace(/[^A-Za-z0-9_]/g, '_');\n // Collapse multiple underscores into one\n sanitized = sanitized.replace(/_+/g, '_');\n // Remove leading/trailing underscores\n sanitized = sanitized.replace(/^_|_$/g, '');\n\n // Convert to camelCase\n sanitized = toCamelCase(sanitized);\n\n if (!sanitized) {\n sanitized = 'table';\n }\n if (!/^[A-Za-z_]/.test(sanitized)) {\n sanitized = `_${sanitized}`;\n }\n return sanitized;\n}\n\nexport function toAsciiName(name: string): string {\n if (!/[^\\x00-\\x7F]/.test(name)) {\n return name;\n }\n\n try {\n const transliterated = pinyin(name, { toneType: 'none', type: 'array' }).join('_');\n return transliterated || name;\n } catch (error) {\n return name;\n }\n}\n","// Match patterns like: 'user_profile', 'workspace_xxx.user_profile', 'user_profile[]', 'workspace_xxx.user_profile[]','file_attachment[]', 'workspace_xxx.file_attachment[]'\nconst CUSTOM_TYPE_PATTERN = /\\/\\/ TODO: failed to parse database type '(?:\\w+\\.)?(user_profile|file_attachment)(\\[\\])?'/;\n\nexport { CUSTOM_TYPE_PATTERN };\n\nexport type ReplacementOutcome = {\n text: string;\n replaced: number;\n unmatched: string[];\n};\n\nexport function replaceUnknownColumns(source: string): ReplacementOutcome {\n const lines = source.split('\\n');\n const result: string[] = [];\n let replaced = 0;\n const unmatched: string[] = [];\n\n for (let i = 0; i < lines.length; i += 1) {\n const line = lines[i];\n\n const match = line.match(CUSTOM_TYPE_PATTERN);\n if (match) {\n const typeName = match[1]; // 'user_profile' or 'file_attachment'\n const factory = typeName === 'user_profile' ? 'userProfile' : 'fileAttachment';\n\n const replacedLine = replaceFollowingUnknown(lines[i + 1], factory);\n if (replacedLine) {\n // Successfully replaced, add the replaced line and skip both the comment and original line\n result.push(replacedLine);\n replaced += 1;\n i += 1; // Skip the next line (original unknown line)\n } else {\n // Failed to replace, keep the comment and continue\n unmatched.push(line.trim());\n result.push(line);\n }\n continue;\n }\n\n if (line.includes('unknown(')) {\n unmatched.push(line.trim());\n }\n\n result.push(line);\n }\n\n return {\n text: result.join('\\n'),\n replaced,\n unmatched,\n };\n}\n\nexport function replaceFollowingUnknown(nextLine: string | undefined, factory: 'userProfile' | 'fileAttachment'): string | undefined {\n if (!nextLine || !nextLine.includes('unknown(')) {\n return undefined;\n }\n // Simply replace 'unknown' with the appropriate factory, preserving everything else (like .array())\n return nextLine.replace('unknown(', `${factory}(`);\n}\n","import fs from 'node:fs';\nimport path from 'node:path';\nimport { HEADER_COMMENT } from './header-format';\n\nexport function tweakImports(source: string): string {\n const importRegex = /import \\{([^}]*)\\} from \"drizzle-orm\\/pg-core\";?/;\n const match = source.match(importRegex);\n if (!match) {\n return source;\n }\n\n const identifiers = match[1]\n .split(',')\n .map((id) => id.trim())\n .filter(Boolean)\n .filter((id) => id !== 'pgSchema' && id !== 'customType');\n\n // Remove timestamp if it's no longer used in the source\n // (after timestamp -> customTimestamptz replacement)\n const filteredIdentifiers = identifiers.filter((id) => {\n if (id === 'timestamp') {\n // Check if timestamp() is still used anywhere\n const timestampUsageRegex = /timestamp\\s*\\(/;\n return timestampUsageRegex.test(source);\n }\n return true;\n });\n\n if (source.includes('pgTable(') && !filteredIdentifiers.includes('pgTable')) {\n filteredIdentifiers.push('pgTable');\n }\n if (source.includes('pgView(') && !filteredIdentifiers.includes('pgView')) {\n filteredIdentifiers.push('pgView');\n }\n if (source.includes('pgMaterializedView(') && !filteredIdentifiers.includes('pgMaterializedView')) {\n filteredIdentifiers.push('pgMaterializedView');\n }\n if (source.includes('pgEnum(') && !filteredIdentifiers.includes('pgEnum')) {\n filteredIdentifiers.push('pgEnum');\n }\n if (source.includes('pgSequence(') && !filteredIdentifiers.includes('pgSequence')) {\n filteredIdentifiers.push('pgSequence');\n }\n\n const unique = Array.from(new Set(filteredIdentifiers));\n const replacement = `import { ${unique.join(', ')} } from \"drizzle-orm/pg-core\"`;\n return source.replace(importRegex, replacement);\n}\n\nexport function ensureTypesImport(source: string): string {\n if (source.includes('from \"./types\"')) {\n return source;\n }\n\n const typesImport = 'import { userProfile, fileAttachment } from \"./types\"\\n';\n const headerPrefix = `${HEADER_COMMENT}\\n`;\n if (source.startsWith(headerPrefix)) {\n const insertionPoint = headerPrefix.length;\n return `${source.slice(0, insertionPoint)}${typesImport}${source.slice(insertionPoint)}`;\n }\n\n const importSectionMatch = source.match(/^(?:import [^\\n]+\\n)+/);\n if (importSectionMatch) {\n const insertionPoint = importSectionMatch[0].length;\n return `${source.slice(0, insertionPoint)}${typesImport}${source.slice(insertionPoint)}`;\n }\n\n return `${typesImport}${source}`;\n}\n\nexport function removeInlineTypeHelpers(source: string): string {\n return source\n .replace(/type UserProfile = [\\s\\S]*?\\}\\);\\n*/g, '')\n .replace(/type FileAttachment = [\\s\\S]*?\\}\\);\\n*/g, '');\n}\n\nexport function inlineCustomTypes(source: string): string {\n // Remove any existing import from \"./types\"\n let text = source.replace(/import \\{[^}]*\\} from [\"']\\.\\/types[\"'];?\\n*/g, '');\n\n // Try multiple possible paths for the template file\n const possiblePaths = [\n // When bundled by tsup - __dirname points to dist/\n path.resolve(__dirname, 'template', 'types.ts'),\n // When running from source (development) - relative to helper/imports.ts\n path.resolve(__dirname, '../template', 'types.ts'),\n // Alternative paths\n path.resolve(__dirname, '../../template', 'types.ts'),\n path.resolve(__dirname, '../../../template', 'types.ts'),\n ];\n\n let templatePath: string | undefined;\n for (const possiblePath of possiblePaths) {\n if (fs.existsSync(possiblePath)) {\n templatePath = possiblePath;\n break;\n }\n }\n\n if (!templatePath) {\n console.warn('[postprocess-drizzle-schema] Template types file not found. Tried paths:', possiblePaths);\n return text;\n }\n\n return inlineFromTemplate(text, templatePath);\n}\n\nexport function inlineFromTemplate(text: string, templatePath: string): string {\n const templateContent = fs.readFileSync(templatePath, 'utf8');\n\n // Extract the type definitions (everything except the imports)\n const typeDefinitions = templateContent\n .replace(/^import\\s+.*;\\r?\\n*/gm, '') // Remove all import statements\n .trim();\n\n // Check if we need to add imports for sql and customType\n const needsSql = typeDefinitions.includes('sql`') && !text.includes(\"from 'drizzle-orm'\") && !text.includes('from \"drizzle-orm\"');\n const needsCustomType = typeDefinitions.includes('customType<') && !text.includes('customType');\n\n // Add necessary imports to drizzle-orm/pg-core import\n if (needsCustomType) {\n text = ensureImportIdentifier(text, 'drizzle-orm/pg-core', 'customType');\n }\n\n // Add sql import from drizzle-orm if needed\n if (needsSql && !text.includes(\"from 'drizzle-orm'\") && !text.includes('from \"drizzle-orm\"')) {\n const importMatch = text.match(/^import [\\s\\S]*?from [\"']drizzle-orm\\/pg-core[\"'];?\\n/m);\n if (importMatch) {\n const insertPoint = text.indexOf(importMatch[0]) + importMatch[0].length;\n text = text.slice(0, insertPoint) + \"import { sql } from 'drizzle-orm';\\n\" + text.slice(insertPoint);\n }\n }\n\n // Find insertion point: after the imports section\n const headerPrefix = `${HEADER_COMMENT}\\n`;\n let insertionPoint = 0;\n\n if (text.startsWith(headerPrefix)) {\n insertionPoint = headerPrefix.length;\n }\n\n const importSectionMatch = text.slice(insertionPoint).match(/^(?:import [^\\n]+\\n)+/);\n if (importSectionMatch) {\n insertionPoint += importSectionMatch[0].length;\n }\n\n // Insert type definitions\n const typeBlock = `\\n${typeDefinitions}\\n\\n`;\n return text.slice(0, insertionPoint) + typeBlock + text.slice(insertionPoint);\n}\n\nexport function ensureImportIdentifier(source: string, packageName: string, identifier: string): string {\n const escapedPackage = packageName.replace(/\\//g, '\\\\/');\n const importRegex = new RegExp(`import \\\\{([^}]*)\\\\} from [\"']${escapedPackage}[\"'];?`);\n const match = source.match(importRegex);\n\n if (!match) {\n // No import from this package, can't add it\n return source;\n }\n\n const identifiers = match[1]\n .split(',')\n .map((id) => id.trim())\n .filter(Boolean);\n\n if (identifiers.includes(identifier)) {\n return source; // Already imported\n }\n\n identifiers.push(identifier);\n const unique = Array.from(new Set(identifiers));\n const replacement = `import { ${unique.join(', ')} } from \"${packageName}\"`;\n return source.replace(importRegex, replacement);\n}\n","/**\n * Add comments for system fields in the Drizzle schema source code.\n * @param source The source code string to process\n * @returns The processed source code with system field comments added\n */\nexport function addSystemFieldComments(source: string): string {\n const commentMap: Record<string, string> = {\n '_created_at': 'Creation time',\n '_created_by': 'Creator',\n '_updated_at': 'Update time',\n '_updated_by': 'Updater',\n };\n\n const lines = source.split('\\n');\n\n for (let i = 0; i < lines.length; i += 1) {\n const line = lines[i];\n const entry = Object.entries(commentMap).find(([key]) => line.includes(`\"${key}\"`));\n if (!entry) {\n continue;\n }\n\n const [, description] = entry;\n const previousLine = lines[i - 1]?.trim() ?? '';\n if (previousLine.startsWith('//') && previousLine.includes('System field')) {\n continue;\n }\n\n const indentMatch = line.match(/^\\s*/);\n const indent = indentMatch ? indentMatch[0] : '';\n const comment = `${indent}// System field: ${description} (auto-filled, do not modify)`;\n lines.splice(i, 0, comment);\n i += 1;\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Remove conflicting system fields from the Drizzle schema source code.\n * @param source The source code string to process\n * @returns The processed source code with conflicting system fields removed\n */\nexport function removeConflictingSystemFields(source: string): string {\n const systemFieldMap: Record<string, string> = {\n '_created_at': 'created_at',\n '_created_by': 'created_by',\n '_updated_at': 'updated_at',\n '_updated_by': 'updated_by',\n };\n\n const lines = source.split('\\n');\n const result: Array<string | null> = [];\n\n // Track table-level state\n let inTable = false;\n let tableStartLine = -1;\n const tableBusinessFields = new Set<string>();\n let bracketDepth = 0;\n\n for (let i = 0; i < lines.length; i += 1) {\n const line = lines[i];\n\n // Check if we're starting a new table definition\n if (!inTable && /=\\s*(pgTable|pgView|pgMaterializedView)\\s*\\(/.test(line)) {\n inTable = true;\n tableStartLine = result.length;\n tableBusinessFields.clear();\n bracketDepth = 0;\n }\n\n if (inTable) {\n // Track bracket depth to find table end\n for (const char of line) {\n if (char === '{') bracketDepth++;\n if (char === '}') bracketDepth--;\n }\n\n // Collect business fields in this table\n for (const businessField of Object.values(systemFieldMap)) {\n if (line.includes(`\"${businessField}\"`) || line.includes(`'${businessField}'`)) {\n tableBusinessFields.add(businessField);\n }\n }\n\n // Check if table definition ends\n if (bracketDepth === 0 && line.includes(');')) {\n inTable = false;\n\n // Now process this table's lines to remove conflicting system fields\n const tableEndLine = result.length;\n for (let j = tableStartLine; j <= tableEndLine; j++) {\n const tableLine = result[j] || '';\n let shouldRemove = false;\n\n for (const [systemField, businessField] of Object.entries(systemFieldMap)) {\n if (tableBusinessFields.has(businessField)) {\n if (tableLine.includes(`\"${systemField}\"`) || tableLine.includes(`'${systemField}'`)) {\n shouldRemove = true;\n // Also remove previous line if it's a system field comment\n if (j > 0 && result[j - 1]?.includes('// System field:')) {\n result[j - 1] = null; // Mark for removal\n }\n break;\n }\n }\n }\n\n if (shouldRemove) {\n result[j] = null; // Mark for removal\n }\n }\n }\n }\n\n result.push(line);\n }\n\n // Filter out null entries (marked for removal)\n return result.filter(line => line !== null).join('\\n');\n}\n","/**\n * Patch for drizzle-kit defects.\n *\n * Fix syntax error: .default(') -> .default('')\n * This is a drizzle-kit bug that generates invalid TypeScript code.\n */\n\n/**\n * Fix drizzle-kit syntax error where .default(') is generated.\n *\n * @param source - The schema source code\n * @returns Object with fixed text and count of fixes applied\n */\nexport function patchDrizzleKitDefects(source: string): { text: string; fixed: number } {\n let fixed = 0;\n\n // Fix syntax error: .default(') -> .default('')\n const text = source.replace(/\\.default\\('\\)/g, () => {\n fixed += 1;\n return `.default('')`;\n });\n\n return { text, fixed };\n}\n","/**\n * Replace timestamp with specific options to custom types.\n *\n * Replaces timestamp(\"field\", { withTimezone: true, mode: 'string' })\n * with customTimestamptz(\"field\")\n */\n\n/**\n * Replace timestamp with timezone and string mode to customTimestamptz.\n *\n * @param source - The schema source code\n * @returns Object with replaced text and count of replacements\n */\nexport function replaceTimestampWithCustomTypes(source: string): { text: string; replaced: number } {\n let replaced = 0;\n\n // Match timestamp with any options: timestamp(\"field\", { ... })\n const pattern = /timestamp\\((['\"])(.*?)\\1,\\s*(\\{[^}]*\\})\\)/g;\n\n const text = source.replace(pattern, (match, quote, fieldName, options) => {\n // Check if options contain both withTimezone: true and mode: 'string'\n const hasWithTimezone = /withTimezone:\\s*true/.test(options);\n const hasModeString = /mode:\\s*['\"]string['\"]/.test(options);\n\n if (hasWithTimezone && hasModeString) {\n replaced += 1;\n return `customTimestamptz(${quote}${fieldName}${quote})`;\n }\n\n // Return original if conditions not met\n return match;\n });\n\n return { text, replaced };\n}\n\n/**\n * Replace .defaultNow() with .default(sql`CURRENT_TIMESTAMP`)\n * for custom timestamp types that don't support defaultNow().\n *\n * @param source - The schema source code\n * @returns Object with replaced text and count of replacements\n */\nexport function replaceDefaultNowWithSql(source: string): { text: string; replaced: number } {\n let replaced = 0;\n\n // Match .defaultNow() and replace with .default(sql`CURRENT_TIMESTAMP`)\n const pattern = /\\.defaultNow\\(\\)/g;\n\n const text = source.replace(pattern, () => {\n replaced += 1;\n return '.default(sql`CURRENT_TIMESTAMP`)';\n });\n\n return { text, replaced };\n}\n","const TABLE_ALIAS_MARKER = '// table aliases';\n\nexport function appendTableAliases(source: string): string {\n const markerIndex = source.indexOf(`\\n${TABLE_ALIAS_MARKER}`);\n const base = markerIndex === -1 ? source : source.slice(0, markerIndex);\n\n const exportRegex = /export const\\s+([A-Za-z_$][\\w$]*)\\s*=\\s*pgTable\\s*\\(/g;\n const tableExports = new Set<string>();\n\n for (const match of base.matchAll(exportRegex)) {\n const name = match[1];\n tableExports.add(name);\n }\n\n if (tableExports.size === 0) {\n\n return base;\n\n }\n\n const aliasLines = Array.from(tableExports)\n .sort()\n .map((name) => `export const ${name}Table = ${name};`)\n .join('\\n');\n\n const prefix = base.trimEnd();\n return `${prefix}\\n\\n${TABLE_ALIAS_MARKER}\\n${aliasLines}\\n`;\n}\n","import { pluralize } from 'inflection';\nimport { FieldInfo, TableInfo } from './schema-parser';\nimport { mapDrizzleTypeToTS, toKebabCase, toPascalCase, toSnakeCase } from './utils';\n\nexport function generateDTO(table: TableInfo): string {\n const className = toPascalCase(table.variableName);\n\n let dto = `// 请修改该文件代码以满足需求\nimport { ApiProperty, ApiPropertyOptional } from '@nestjs/swagger';\nimport { IsDefined, IsNumber, IsOptional, IsString, MaxLength, IsInt, IsBoolean, IsUUID, IsDate, IsObject, IsArray } from 'class-validator';\nimport { Type } from 'class-transformer';\nimport { FileAttachment } from '../../../database/schema';\n\n`;\n dto += `export class Create${className}Dto {\\n`;\n\n for (const field of table.fields) {\n // 跳过主键\n if (\n field.isPrimaryKey || field.name === 'id' ||\n field.name.startsWith('_') ||\n field.name.startsWith('created') ||\n field.name.startsWith('updated')\n ) {\n continue;\n }\n\n // if (field.comment) {\n // dto += ` /** ${field.comment} */\\n`;\n // }\n\n const tsType = mapDrizzleTypeToTS(field);\n const optional = field.nullable || field.hasDefault ? '?' : '';\n\n // 添加验证装饰器注释\n const decorators = generateValidationDecorators(field);\n if (decorators) {\n dto += decorators;\n }\n\n dto += ` ${field.name}${optional}: ${tsType};\\n\\n`;\n }\n\n dto += '}\\n\\n';\n\n // Update DTO\n dto += `export class Update${className}Dto {\\n`;\n\n for (const field of table.fields) {\n // 跳过系统字段和主键\n if (\n field.name.startsWith('_') ||\n field.name.startsWith('created') ||\n field.name.startsWith('updated') ||\n field.isPrimaryKey || field.name === 'id'\n ) {\n continue;\n }\n\n // 先不插入注释\n // if (field.comment) {\n // dto += ` /** ${field.comment} */\\n`;\n // }\n\n const tsType = mapDrizzleTypeToTS(field);\n\n const decorators = generateValidationDecorators(field, {\n isUpdate: true,\n });\n if (decorators) {\n dto += decorators;\n }\n\n dto += ` ${field.name}?: ${tsType};\\n\\n`;\n }\n\n dto += '}\\n\\n';\n\n // Response DTO\n dto += `export class ${className}ResponseDto {\\n`;\n\n for (const field of table.fields) {\n // if (field.comment) {\n // dto += ` /** ${field.comment} */\\n`;\n // }\n\n const tsType = mapDrizzleTypeToTS(field);\n const optional = field.nullable ? '?' : '';\n\n const decorators = generateValidationDecorators(field, {\n isResponse: true,\n });\n if (decorators) {\n dto += decorators;\n }\n\n dto += ` ${field.name}${optional}: ${tsType};\\n\\n`;\n }\n\n dto += '}\\n';\n\n return dto;\n}\n\n// 生成验证装饰器提示\nexport function generateValidationDecorators(field: FieldInfo, {\n isUpdate = false,\n isResponse = false,\n}: { isUpdate?: boolean, isResponse?: boolean } = {}): string {\n let decorators = ' // 请按用户需求修改以下装饰器注释\\n';\n\n if (field.nullable || (!isResponse && field.hasDefault) || isUpdate) {\n decorators += ` @ApiPropertyOptional({ description: '${field.comment || field.name}' })\\n`;\n if (isResponse) {\n return decorators;\n }\n decorators += ' @IsOptional()\\n';\n } else {\n decorators += ` @ApiProperty({ description: '${field.comment || field.name}' })\\n`;\n if (isResponse) {\n return decorators;\n }\n decorators += ' @IsDefined()\\n';\n }\n\n // 根据字段类型和约束生成装饰器注释\n switch (field.type) {\n case 'varchar':\n case 'char':\n case 'text':\n decorators += ' @IsString()\\n';\n if (field.length) {\n decorators += ` @MaxLength(${field.length})\\n`;\n }\n break;\n\n case 'integer':\n case 'smallint':\n case 'serial':\n case 'smallserial':\n decorators += ' @IsInt()\\n';\n break;\n\n case 'decimal':\n case 'numeric':\n case 'real':\n case 'doublePrecision':\n decorators += ' @IsNumber()\\n';\n break;\n\n case 'boolean':\n decorators += ' @IsBoolean()\\n';\n break;\n\n case 'uuid':\n decorators += ' @IsUUID()\\n';\n break;\n\n case 'timestamp':\n case 'timestamptz':\n case 'date':\n case 'customTimestamptz':\n decorators += ' @IsDate()\\n';\n break;\n\n case 'json':\n case 'jsonb':\n decorators += ' @IsObject()\\n';\n break;\n\n // case 'inet':\n // case 'cidr':\n // decorators += ' // @IsIP()\\n';\n // break;\n }\n\n if (field.isArray) {\n decorators += ' @IsArray()\\n';\n }\n\n // if (field.isUnique) {\n // decorators += ' // @IsUnique() // Custom validator needed\\n';\n // }\n\n // if (field.enumValues) {\n // decorators += ` // @IsEnum([${field.enumValues.map((v) => `'${v}'`).join(', ')}])\\n`;\n // }\n\n return decorators;\n}\n\n// Controller 生成器\nexport function generateController(table: TableInfo): string {\n const className = toPascalCase(table.variableName);\n const routePath = toKebabCase(pluralize(table.variableName));\n const filePath = toSnakeCase(table.variableName);\n\n // 找到主键字段\n const pkField = table.fields.find((f) => f.isPrimaryKey);\n const pkType = pkField ? mapDrizzleTypeToTS(pkField) : 'string';\n const pkName = pkField ? pkField.name : 'id';\n\n const controller = `\n// 请修改该文件代码以满足需求\nimport { \n Controller, \n Get, \n Post, \n Put, \n Delete, \n Body, \n Param, \n Query,\n} from '@nestjs/common';\n import {\n ApiTags,\n ApiOperation,\n ApiOkResponse,\n ApiCreatedResponse,\n} from '@nestjs/swagger';\nimport { \n Create${className}Dto, \n Update${className}Dto,\n ${className}ResponseDto\n} from './dtos/${filePath}.dto';\nimport { ${className}Service } from './${filePath}.service';\n\n@ApiTags('${toPascalCase(table.variableName)}')\n@Controller('api/${routePath}')\nexport class ${className}Controller {\n constructor(private readonly ${table.variableName}Service: ${className}Service) {}\n\n @Post()\n @ApiOperation({\n summary: '创建一条记录(模板内容,请修改我)',\n description: '创建一条记录(模板内容,请修改我)',\n })\n @ApiCreatedResponse({\n description: '成功创建一条记录',\n type: ${className}ResponseDto,\n })\n async create(\n @Body() createDto: Create${className}Dto\n ): Promise<${className}ResponseDto> {\n return this.${table.variableName}Service.create(createDto);\n }\n\n @ApiOperation({\n summary: '根据主键查询一条记录(模板内容,请修改我)',\n description: '根据主键查询一条记录(模板内容,请修改我)',\n })\n @ApiOkResponse({\n description: '成功查询一条记录',\n type: ${className}ResponseDto,\n })\n @Get(':${pkName}')\n async findOne(\n @Param('${pkName}') ${pkName}: ${pkType}\n ): Promise<${className}ResponseDto> {\n return this.${table.variableName}Service.findOne(${pkName});\n }\n\n @ApiOperation({\n summary: '根据主键更新一条记录(模板内容,请修改我)',\n description: '根据主键更新一条记录(模板内容,请修改我)',\n })\n @ApiOkResponse({\n description: '成功更新一条记录',\n type: ${className}ResponseDto,\n })\n @Put(':${pkName}')\n async update(\n @Param('${pkName}') ${pkName}: ${pkType},\n @Body() updateDto: Update${className}Dto\n ): Promise<${className}ResponseDto> {\n return this.${table.variableName}Service.update(${pkName}, updateDto);\n }\n\n @ApiOperation({\n summary: '根据主键删除一条记录(模板内容,请修改我)',\n description: '根据主键删除一条记录(模板内容,请修改我)',\n })\n @ApiOkResponse({\n description: '成功删除一条记录',\n })\n @Delete(':${pkName}')\n async remove(\n @Param('${pkName}') ${pkName}: ${pkType}\n ): Promise<void> {\n return this.${table.variableName}Service.remove(${pkName});\n }\n}\n`;\n\n return controller;\n}\n\n// Service 生成器\nexport function generateService(table: TableInfo): string {\n const className = toPascalCase(table.variableName);\n const filePath = toSnakeCase(table.variableName);\n\n const pkField = table.fields.find((f) => f.isPrimaryKey);\n const pkType = pkField ? mapDrizzleTypeToTS(pkField) : 'string';\n const pkName = pkField ? pkField.name : 'id';\n\n const service = `\n// 请修改该文件代码以满足需求\nimport { Injectable, Inject, Logger, NotFoundException } from '@nestjs/common';\nimport { eq } from 'drizzle-orm';\nimport { DRIZZLE_DATABASE, type PostgresJsDatabase } from '@lark-apaas/fullstack-nestjs-core';\nimport { ${table.variableName} } from '../../database/schema';\nimport { \n Create${className}Dto, \n Update${className}Dto,\n ${className}ResponseDto \n} from './dtos/${filePath}.dto';\n\n@Injectable()\nexport class ${className}Service {\n private readonly logger = new Logger(${className}Service.name);\n\n constructor(@Inject(DRIZZLE_DATABASE) private readonly db: PostgresJsDatabase) {}\n\n async create(createDto: Create${className}Dto): Promise<${className}ResponseDto> {\n const [result] = await this.db\n .insert(${table.variableName})\n .values(createDto)\n .returning();\n\n this.logger.log(\\`Created ${className} with ${pkName} \\${result.${pkName}}\\`);\n \n return result;\n }\n\n async findAll(options?: { page?: number; limit?: number }): Promise<${className}ResponseDto[]> {\n const { page = 1, limit = 10 } = options || {};\n const offset = (page - 1) * limit;\n\n return this.db\n .select()\n .from(${table.variableName})\n .limit(limit)\n .offset(offset);\n }\n\n async findOne(${pkName}: ${pkType}): Promise<${className}ResponseDto> {\n const [result] = await this.db\n .select()\n .from(${table.variableName})\n .where(eq(${table.variableName}.${pkName}, ${pkName}))\n .limit(1);\n\n if (!result) {\n throw new NotFoundException(\\`${className} with ${pkName} \\${${pkName}} not found\\`);\n }\n\n return result;\n }\n\n async update(${pkName}: ${pkType}, updateDto: Update${className}Dto): Promise<${className}ResponseDto> {\n const [result] = await this.db\n .update(${table.variableName})\n .set(updateDto)\n .where(eq(${table.variableName}.${pkName}, ${pkName}))\n .returning();\n\n if (!result) {\n throw new NotFoundException(\\`${className} with ${pkName} \\${${pkName}} not found\\`);\n }\n\n return result;\n }\n\n async remove(${pkName}: ${pkType}): Promise<void> {\n const result = await this.db\n .delete(${table.variableName})\n .where(eq(${table.variableName}.${pkName}, ${pkName}))\n .returning();\n\n if (result.length === 0) {\n throw new NotFoundException(\\`${className} with ${pkName} \\${${pkName}} not found\\`);\n }\n\n this.logger.log(\\`Deleted ${className} with ${pkName} \\${${pkName}}\\`);\n }\n}\n`;\n\n return service;\n}\n\nexport function generateModule(table: TableInfo): string {\n const className = toPascalCase(table.variableName);\n const filePath = toSnakeCase(table.variableName);\n\n const module = `\nimport { Module } from '@nestjs/common';\nimport { ${className}Controller } from './${filePath}.controller';\nimport { ${className}Service } from './${filePath}.service';\n\n@Module({\n controllers: [${className}Controller],\n providers: [${className}Service],\n})\nexport class ${className}Module {}\n`;\n\n return module;\n}\n","import { FieldInfo } from './schema-parser';\n\n// 完整的类型映射\nexport function mapDrizzleTypeToTS(field: FieldInfo): string {\n const typeMap: Record<string, string> = {\n // String types\n char: 'string',\n varchar: 'string',\n text: 'string',\n\n // Numeric types\n smallint: 'number',\n integer: 'number',\n int: 'number',\n bigint: 'string', // bigint 在 JS 中通常作为 string 处理\n serial: 'number',\n smallserial: 'number',\n bigserial: 'string',\n\n // Decimal types\n decimal: 'string', // 精确数值通常用 string\n numeric: 'string',\n real: 'number',\n doublePrecision: 'number',\n\n // Boolean\n boolean: 'boolean',\n\n // Date/Time types\n timestamp: 'Date',\n timestamptz: 'Date',\n date: 'Date',\n time: 'string',\n timetz: 'string',\n interval: 'string',\n\n // UUID\n uuid: 'string',\n\n // JSON types\n json: 'any',\n jsonb: 'any',\n\n // Binary\n bytea: 'Buffer',\n\n // Network types\n inet: 'string',\n cidr: 'string',\n macaddr: 'string',\n macaddr8: 'string',\n\n // Geometric types\n point: '{ x: number; y: number }',\n line: 'string',\n lseg: 'string',\n box: 'string',\n path: 'string',\n polygon: 'string',\n circle: 'string',\n\n // Array types (handled by isArray flag)\n array: 'any[]',\n\n // Custom types\n customType: 'any',\n customTimestamptz: 'Date',\n userProfile: 'string',\n fileAttachment: 'FileAttachment',\n\n // Enum (handled separately)\n pgEnum: 'string',\n };\n\n let baseType = typeMap[field.type] || 'any';\n\n // 处理数组类型\n if (field.isArray) {\n baseType = baseType.endsWith('[]') ? baseType : `${baseType}[]`;\n }\n\n // 处理枚举\n if (field.enumValues && field.enumValues.length > 0) {\n baseType = field.enumValues.map((v) => `'${v}'`).join(' | ');\n }\n\n return baseType;\n}\n\n// 辅助函数\nexport function toPascalCase(str: string): string {\n return str\n .replace(/([a-z])([A-Z])/g, '$1 $2')\n .split(/[-_\\s]/)\n .map((word) => word.charAt(0).toUpperCase() + word.slice(1))\n .join('');\n}\n\nexport function toKebabCase(str: string): string {\n return str\n .replace(/([a-z])([A-Z])/g, '$1-$2')\n .toLowerCase()\n .replace(/[_\\s]/g, '-');\n}\n\nexport function toSnakeCase(str: string): string {\n return str\n .replace(/([a-z])([A-Z])/g, '$1_$2')\n .toLowerCase()\n .replace(/[-\\s]/g, '_');\n}\n","import { Project, CallExpression, Node, ProjectOptions } from 'ts-morph';\n\ninterface FieldInfo {\n name: string;\n columnName: string;\n type: string;\n length?: number;\n precision?: number;\n scale?: number;\n nullable: boolean;\n hasDefault: boolean;\n defaultValue?: string;\n notNull: boolean;\n mode?: 'string' | 'number' | 'bigint' | 'boolean' | 'date';\n isPrimaryKey: boolean;\n isUnique: boolean;\n isArray: boolean;\n comment?: string;\n enumValues?: string[];\n withTimezone?: boolean;\n references?: {\n table: string;\n column: string;\n };\n}\n\ninterface TableInfo {\n tableName: string;\n variableName: string;\n fields: FieldInfo[];\n}\n\nclass DrizzleSchemaParser {\n private project: Project;\n\n constructor(projectOptions: ProjectOptions) {\n this.project = new Project(projectOptions);\n }\n\n parseSchemaFile(filePath: string): TableInfo[] {\n const sourceFile = this.project.addSourceFileAtPath(filePath);\n const tables: TableInfo[] = [];\n\n // 查找所有变量声明\n const variableStatements = sourceFile.getVariableStatements();\n\n for (const statement of variableStatements) {\n const declarations = statement.getDeclarations();\n\n for (const declaration of declarations) {\n const initializer = declaration.getInitializer();\n\n if (initializer && Node.isCallExpression(initializer)) {\n const expression = initializer.getExpression();\n\n // 检查是否是 pgTable 调用\n if (expression.getText() === 'pgTable') {\n const tableInfo = this.parsePgTable(\n declaration.getName(),\n initializer,\n );\n if (tableInfo) {\n tables.push(tableInfo);\n }\n }\n }\n }\n }\n\n return tables;\n }\n\n private parsePgTable(\n variableName: string,\n callExpr: CallExpression,\n ): TableInfo | null {\n const args = callExpr.getArguments();\n\n if (args.length < 2) {\n return null;\n }\n\n // 表名\n const tableName = args[0].getText().replace(/['\"]/g, '');\n\n // 字段定义对象\n const fieldsArg = args[1];\n\n if (!Node.isObjectLiteralExpression(fieldsArg)) {\n return null;\n }\n\n const fields: FieldInfo[] = [];\n\n // 解析字段\n const properties = fieldsArg.getProperties();\n\n for (const prop of properties) {\n if (Node.isPropertyAssignment(prop)) {\n const fieldName = prop.getName();\n const initializer = prop.getInitializer();\n\n // 获取字段上方的注释\n const leadingComments = prop.getLeadingCommentRanges();\n let comment: string | undefined;\n\n if (leadingComments.length > 0) {\n comment = leadingComments\n .map((c) => c.getText())\n .join('\\n')\n .replace(/\\/\\//g, '')\n .trim();\n }\n\n if (initializer && Node.isCallExpression(initializer)) {\n const fieldInfo = this.parseField(fieldName, initializer, comment);\n fields.push(fieldInfo);\n }\n }\n }\n\n return {\n tableName,\n variableName,\n fields,\n };\n }\n\n private parseField(\n fieldName: string,\n callExpr: CallExpression,\n comment?: string,\n ): FieldInfo {\n const fieldInfo: FieldInfo = {\n name: fieldName,\n columnName: fieldName,\n type: '',\n nullable: true,\n hasDefault: false,\n notNull: false,\n isPrimaryKey: false,\n isUnique: false,\n isArray: false,\n comment,\n };\n\n // 获取字段类型和基础参数\n this.parseBaseType(callExpr, fieldInfo);\n\n // 解析整个调用链\n this.parseCallChain(callExpr, fieldInfo);\n\n return fieldInfo;\n }\n\n private parseBaseType(callExpr: CallExpression, fieldInfo: FieldInfo): void {\n // 找到最底层的类型调用\n let current: Node = callExpr;\n let baseCall: CallExpression | null = null;\n\n while (Node.isCallExpression(current)) {\n baseCall = current;\n const expression = current.getExpression();\n\n if (Node.isPropertyAccessExpression(expression)) {\n current = expression.getExpression();\n } else {\n break;\n }\n }\n\n if (!baseCall) {\n return;\n }\n\n const expression = baseCall.getExpression();\n let typeName = '';\n\n if (Node.isPropertyAccessExpression(expression)) {\n typeName = expression.getName();\n } else {\n typeName = expression.getText();\n }\n\n fieldInfo.type = typeName;\n\n // 解析类型参数\n const args = baseCall.getArguments();\n\n if (args.length > 0) {\n const firstArg = args[0];\n\n // 字符串参数 - 列名\n if (Node.isStringLiteral(firstArg)) {\n fieldInfo.columnName = firstArg.getLiteralText();\n }\n // 对象参数 - 配置\n else if (Node.isObjectLiteralExpression(firstArg)) {\n this.parseTypeConfig(firstArg, fieldInfo);\n }\n // 数组参数 - enum 值\n else if (Node.isArrayLiteralExpression(firstArg)) {\n fieldInfo.enumValues = firstArg\n .getElements()\n .map((el) => el.getText().replace(/['\"]/g, ''));\n }\n }\n\n // 第二个参数可能是配置对象\n if (args.length > 1 && Node.isObjectLiteralExpression(args[1])) {\n this.parseTypeConfig(args[1], fieldInfo);\n }\n }\n\n private parseTypeConfig(objLiteral: Node, fieldInfo: FieldInfo): void {\n if (!Node.isObjectLiteralExpression(objLiteral)) {\n return;\n }\n\n const properties = objLiteral.getProperties();\n\n for (const prop of properties) {\n if (Node.isPropertyAssignment(prop)) {\n const propName = prop.getName();\n const value = prop.getInitializer()?.getText();\n\n switch (propName) {\n case 'length':\n fieldInfo.length = value ? parseInt(value) : undefined;\n break;\n case 'precision':\n fieldInfo.precision = value ? parseInt(value) : undefined;\n break;\n case 'scale':\n fieldInfo.scale = value ? parseInt(value) : undefined;\n break;\n case 'default':\n fieldInfo.hasDefault = true;\n fieldInfo.defaultValue = value;\n break;\n // 时间精度(用于 timestamp, time 等)\n case 'withTimezone':\n fieldInfo.withTimezone = value === 'true';\n break;\n case 'mode':\n // mode 可以是 'string' | 'number' | 'bigint' | 'boolean' | 'date'\n // 用于指定 JS 中的数据类型\n fieldInfo.mode = value?.replace(/['\"]/g, '') as\n | 'string'\n | 'number'\n | 'bigint'\n | 'boolean'\n | 'date'\n | undefined;\n break;\n default:\n throw new Error(`Unsupported property: ${propName}`);\n }\n }\n }\n }\n\n private parseCallChain(callExpr: CallExpression, fieldInfo: FieldInfo): void {\n let current: Node = callExpr;\n\n while (Node.isCallExpression(current)) {\n const expression = current.getExpression();\n\n if (Node.isPropertyAccessExpression(expression)) {\n const methodName = expression.getName();\n const args = current.getArguments();\n\n switch (methodName) {\n case 'notNull':\n fieldInfo.notNull = true;\n fieldInfo.nullable = false;\n break;\n\n case 'default':\n fieldInfo.hasDefault = true;\n if (args.length > 0) {\n fieldInfo.defaultValue = args[0].getText();\n }\n break;\n\n case 'defaultRandom':\n fieldInfo.hasDefault = true;\n fieldInfo.defaultValue = 'random';\n break;\n\n case 'primaryKey':\n fieldInfo.isPrimaryKey = true;\n fieldInfo.notNull = true;\n fieldInfo.nullable = false;\n break;\n\n case 'unique':\n fieldInfo.isUnique = true;\n break;\n\n case 'array':\n fieldInfo.isArray = true;\n break;\n\n case 'references':\n if (args.length > 0) {\n const refArg = args[0].getText();\n // 解析引用,如 () => users.id\n const match = refArg.match(/=>\\s*(\\w+)\\.(\\w+)/);\n if (match) {\n fieldInfo.references = {\n table: match[1],\n column: match[2],\n };\n }\n }\n break;\n default:\n throw new Error(`Unsupported method: ${methodName}`);\n }\n\n current = expression.getExpression();\n } else {\n break;\n }\n }\n }\n}\n\nexport { DrizzleSchemaParser, TableInfo, FieldInfo };\n\n","import { generateController, generateDTO, generateModule, generateService } from './generator';\nimport { DrizzleSchemaParser } from './schema-parser';\nimport { join } from 'path';\nimport { toSnakeCase } from './utils';\nimport { mkdir, rm, writeFile } from 'fs/promises';\nimport { existsSync } from 'fs';\n\nexport interface Options {\n tsConfigFilePath: string;\n schemaFilePath: string;\n moduleOutputDir: string;\n}\n\nexport async function parseAndGenerateNestResourceTemplate(options: Options) {\n const parser = new DrizzleSchemaParser({\n tsConfigFilePath: options.tsConfigFilePath,\n });\n const tables = parser.parseSchemaFile(options.schemaFilePath);\n\n if (tables.length === 0) {\n console.warn('未解析到任何数据库表,无需生成 Nest.js 模块模板');\n return;\n }\n\n // 按照变量名长度排序,降序\n tables.sort((a, b) => b.variableName.length - a.variableName.length);\n\n const table = tables[0];\n\n // 生成代码,只生成一个模块的\n console.info(`生成 Nest.js ${table.variableName} 模块`);\n const filePath = toSnakeCase(table.variableName);\n const moduleDir = join(options.moduleOutputDir, filePath);\n if (existsSync(moduleDir)) {\n console.info(`Nest.js 模块 ${filePath} 已存在,跳过生成代码`);\n return;\n }\n const dto = generateDTO(table);\n const controller = generateController(table);\n const service = generateService(table);\n const moduleFilePath = join(moduleDir, `${filePath}.module.ts`);\n const module = generateModule(table);\n\n try {\n await mkdir(moduleDir, { recursive: true });\n await mkdir(join(moduleDir, 'dtos'), { recursive: true });\n await writeFile(join(moduleDir, 'dtos', `${filePath}.dto.ts`), dto);\n await writeFile(join(moduleDir, `${filePath}.controller.ts`), controller);\n await writeFile(join(moduleDir, `${filePath}.service.ts`), service);\n await writeFile(moduleFilePath, module);\n } catch (err) {\n console.error(`生成 Nest.js ${filePath} 模块失败: ${(err as Error).message}`);\n await rm(moduleDir, { recursive: true });\n }\n}\n\n// const projectDir = '/Users/bytedance/Workspace/nestjs-react-fullstack-template';\n// const tsConfigFilePath = join(projectDir, './tsconfig.json');\n// const schemaFilePath = join(projectDir, './server/database/schema.ts');\n\n// const tmp = mkdtempSync(join(tmpdir(), 'tmp-'));\n// console.log('生成到 ', tmp);\n\n// parseAndGenerateNestResourceTemplate({\n// tsConfigFilePath,\n// schemaFilePath,\n// moduleOutputDir: tmp,\n// }).finally(() => {\n// console.log('NestJS 资源模板生成完成');\n// });\n","import fs from 'node:fs';\nimport path from 'node:path';\nimport http from 'node:http';\nimport https from 'node:https';\nimport type { IncomingMessage, ServerResponse } from 'node:http';\nimport { normalizeBasePath } from '../../utils/index';\n\n// 缓存 HTML 模板\nlet errorHtmlTemplate: string | null = null;\n\n/**\n * Proxy Error Handler 选项\n */\nexport interface ProxyErrorOptions {\n /** 日志目录路径 */\n logDir?: string;\n /** 读取最近多少条错误日志 */\n maxErrorLogs?: number;\n /** 日志文件名(默认为 server.log) */\n logFileName?: string;\n /** 等待服务重启的超时时间(毫秒),默认 5000ms */\n retryTimeout?: number;\n /** 轮询检查服务的间隔时间(毫秒),默认 500ms */\n retryInterval?: number;\n /** 目标服务器地址,用于检查服务是否恢复,格式:http://localhost:3000 */\n target?: string;\n /** 客户端基础路径,默认 '/' */\n clientBasePath?: string;\n}\n\n/**\n * 日志读取结果\n */\ninterface LogReadResult {\n /** 日志行 */\n logs: string[];\n /** 是否包含编译错误 */\n hasCompileError: boolean;\n}\n\n/**\n * 判断是否是连接错误(服务未启动或重启中)\n */\nfunction isConnectionError(err: Error): boolean {\n const code = (err as NodeJS.ErrnoException).code;\n const connectionErrorCodes = ['ECONNREFUSED', 'ECONNRESET', 'ETIMEDOUT', 'ENOTFOUND', 'ENETUNREACH'];\n return connectionErrorCodes.includes(code || '');\n}\n\n/**\n * 检查服务是否可用(通过HTTP请求)\n * 发送一个真实的HTTP请求来验证服务是否真正可用,而不仅仅是检查TCP端口\n */\nfunction checkServiceAvailable(targetUrl: string, timeout = 1000): Promise<boolean> {\n return new Promise((resolve) => {\n try {\n const url = new URL(targetUrl);\n const isHttps = url.protocol === 'https:';\n const httpModule = isHttps ? https : http;\n\n const req = httpModule.request(\n {\n hostname: url.hostname,\n port: url.port || (isHttps ? 443 : 80),\n path: '/',\n method: 'HEAD',\n timeout,\n },\n (res) => {\n // 任何响应都表示服务可用(包括错误状态码)\n // 因为如果返回502+错误页面,说明代理正常但后端未ready\n // 如果返回200或其他状态码,说明服务已恢复\n const available = res.statusCode !== 502 && !res.headers['x-proxy-error-page'];\n resolve(available);\n }\n );\n\n req.on('timeout', () => {\n req.destroy();\n resolve(false);\n });\n\n req.on('error', () => {\n resolve(false);\n });\n\n req.end();\n } catch (e) {\n resolve(false);\n }\n });\n}\n\n/**\n * 等待服务恢复,带超时和轮询检查\n */\nasync function waitForServiceRecovery(\n targetUrl: string,\n timeout: number,\n interval: number\n): Promise<boolean> {\n const startTime = Date.now();\n\n while (Date.now() - startTime < timeout) {\n const isAvailable = await checkServiceAvailable(targetUrl, 2000);\n if (isAvailable) {\n return true;\n }\n // 等待一段时间后再次检查\n await new Promise(resolve => setTimeout(resolve, interval));\n }\n\n return false;\n}\n\n/**\n * 获取当前文件所在目录\n * 兼容 ESM 和 CJS\n */\nfunction getDirname(): string {\n return __dirname;\n}\n\n/**\n * 读取错误页面 HTML 模板\n */\nfunction getErrorHtmlTemplate(): string {\n if (!errorHtmlTemplate) {\n const dirname = getDirname();\n const htmlPath = path.join(dirname, 'error.html');\n errorHtmlTemplate = fs.readFileSync(htmlPath, 'utf-8');\n }\n return errorHtmlTemplate;\n}\n\n/**\n * 解析日志行,去掉时间戳和 [server] 前缀\n * 只处理 [server] 日志,过滤掉 [client] 日志\n */\nfunction parseLogLine(line: string): string | null {\n const trimmed = line.trim();\n if (!trimmed) return null;\n\n // 匹配格式:[2025-10-24 19:43:25] [server] 实际内容\n // 只处理 [server] 日志,过滤掉 [client] 日志\n const match = trimmed.match(/^\\[\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\]\\s+\\[server\\]\\s+(.*)$/);\n if (match) {\n const content = match[1].trim();\n // 如果内容为空,返回 null(过滤掉空行)\n return content || null;\n }\n\n // 如果不匹配(可能是 [client] 或其他格式),返回 null(只关注 [server] 日志)\n return null;\n}\n\n/**\n * 读取最近的错误日志段落\n * 从最后一次编译开始标记,到下一次编译开始或文件末尾\n * 起始标记:\n * - \"Starting compilation in watch mode\"\n * - \"File change detected. Starting incremental compilation\"\n * 结束标记:\n * - 下一个编译开始标记\n * - 或文件末尾\n * 错误判断:\n * - 编译错误:Found x errors. Watching for file changes (x > 0)\n * - 运行时错误:Error:, Exception:, EADDRINUSE 等\n * 优化:只读取文件末尾的内容,避免加载整个大文件到内存\n */\nasync function readRecentErrorLogs(\n logDir: string,\n maxLogs: number,\n fileName: string\n): Promise<LogReadResult> {\n const logFilePath = path.join(logDir, fileName);\n\n // 检查文件是否存在\n let fileStats;\n try {\n fileStats = await fs.promises.stat(logFilePath);\n } catch {\n return { logs: [], hasCompileError: false };\n }\n\n const fileSize = fileStats.size;\n\n // 定义读取的最大字节数(1MB),避免读取过大的文件\n const maxReadSize = 1024 * 1024; // 1MB\n const readSize = Math.min(fileSize, maxReadSize);\n const startPosition = Math.max(0, fileSize - readSize);\n\n // 从文件末尾读取固定大小的内容\n const buffer = Buffer.allocUnsafe(readSize);\n let fileHandle;\n\n try {\n fileHandle = await fs.promises.open(logFilePath, 'r');\n await fileHandle.read(buffer, 0, readSize, startPosition);\n } catch (error) {\n console.error('[Proxy Error]: Failed to read log file:', error);\n return { logs: [], hasCompileError: false };\n } finally {\n if (fileHandle) {\n await fileHandle.close();\n }\n }\n\n // 将 buffer 转换为字符串并按行分割\n const content = buffer.toString('utf8');\n const lines = content.split('\\n');\n\n // 如果不是从文件开头读取的,第一行可能不完整,丢弃它\n if (startPosition > 0 && lines.length > 0) {\n lines.shift();\n }\n\n // 解析所有行\n const allLines: string[] = [];\n for (const line of lines) {\n const parsed = parseLogLine(line);\n if (parsed !== null) {\n allLines.push(parsed);\n }\n }\n\n // 从后往前找最后一次出现编译开始标记的位置\n let startIndex = -1;\n for (let i = allLines.length - 1; i >= 0; i--) {\n const line = allLines[i];\n // 匹配编译开始标记\n // 示例:[22:13:48] Starting compilation in watch mode...\n // [22:13:48] File change detected. Starting incremental compilation...\n if (\n line.includes('Starting compilation in watch mode') ||\n line.includes('File change detected. Starting incremental compilation')\n ) {\n startIndex = i;\n break;\n }\n }\n\n // 如果没找到编译开始标记,使用降级策略:返回最后 maxLogs 行\n if (startIndex === -1) {\n console.log('[Proxy Error]: No compilation start marker found, returning last logs');\n const fallbackLogs = allLines.slice(-maxLogs);\n const hasCompileError = checkForErrors(fallbackLogs);\n return { logs: fallbackLogs, hasCompileError };\n }\n\n // 从 startIndex 开始,找到下一个编译开始标记或文件末尾\n let endIndex = allLines.length;\n\n for (let i = startIndex + 1; i < allLines.length; i++) {\n const line = allLines[i];\n // 如果遇到下一个编译开始标记,停止\n if (\n line.includes('Starting compilation in watch mode') ||\n line.includes('File change detected. Starting incremental compilation')\n ) {\n endIndex = i;\n break;\n }\n }\n\n // 提取这段日志(从编译开始到下一次编译开始或文件末尾)\n const errorSection = allLines.slice(startIndex, endIndex);\n\n // 检查是否有错误(编译错误或运行时错误)\n const hasCompileError = checkForErrors(errorSection);\n\n // 限制最大行数\n const logs = errorSection.length > maxLogs\n ? errorSection.slice(-maxLogs)\n : errorSection;\n\n return { logs, hasCompileError };\n}\n\n/**\n * 检查日志中是否有编译错误\n * 只检测:Found x errors (x > 0)\n */\nfunction checkForErrors(logs: string[]): boolean {\n for (const line of logs) {\n // 检查编译错误:Found X errors. Watching for file changes\n const compileErrorMatch = line.match(/Found (\\d+) errors?\\. Watching for file changes/);\n if (compileErrorMatch) {\n const errorCount = parseInt(compileErrorMatch[1], 10);\n if (errorCount > 0) {\n console.log(`[Proxy Error]: Found ${errorCount} compilation error(s)`);\n return true;\n }\n }\n }\n\n return false;\n}\n\n/**\n * 将错误信息和日志注入到 HTML 模板中\n */\nfunction injectTemplateData(\n template: string,\n clientBasePath: string\n): string {\n // 替换模板中的占位符\n return template.replace('{{.clientBasePath}}', clientBasePath);\n}\n\n/**\n * HTTP Proxy 错误处理器\n * 用于 http-proxy 的 onError 回调\n *\n * @example\n * ```typescript\n * import { createProxyMiddleware } from 'http-proxy-middleware';\n * import { handleDevProxyError } from '@lark-apaas/devtool-kits';\n *\n * // 基础用法\n * const proxy = createProxyMiddleware({\n * target: 'http://localhost:3000',\n * onError: handleDevProxyError\n * });\n *\n * // 自定义配置\n * const proxy = createProxyMiddleware({\n * target: 'http://localhost:3000',\n * onError: (err, req, res) => {\n * handleDevProxyError(err, req, res, {\n * logDir: './logs',\n * maxErrorLogs: 50,\n * logFileName: 'server.log'\n * });\n * }\n * });\n * ```\n */\nexport function handleDevProxyError(\n err: Error,\n req: IncomingMessage,\n res: ServerResponse,\n options?: ProxyErrorOptions\n): void {\n const {\n logDir = path.join(process.cwd(), 'logs'),\n maxErrorLogs = 100,\n logFileName = 'server.log',\n retryTimeout = 5000,\n retryInterval = 500,\n target = `http://localhost:${process.env.SERVER_PORT || 3000}`,\n clientBasePath = process.env.CLIENT_BASE_PATH || '/',\n } = options || {};\n\n const clientBasePathWithoutSlash = normalizeBasePath(clientBasePath);\n console.error('[Proxy Error]:', err.message, clientBasePathWithoutSlash);\n\n // 检查响应是否已发送\n if (res.headersSent) {\n console.error('[Proxy Error]: Headers already sent, cannot send error page');\n return;\n }\n\n // 异步处理错误\n (async () => {\n try {\n // 判断是否是连接错误\n const isConnError = isConnectionError(err);\n\n // 读取最近的错误日志,检查是否有编译错误\n const { hasCompileError } = await readRecentErrorLogs(\n logDir,\n maxErrorLogs,\n logFileName\n );\n\n // 场景1: 连接错误 + 无编译错误 = 可能是服务重启中\n // 尝试等待服务恢复,如果5秒内恢复则302重定向,用户无感知\n if (isConnError && !hasCompileError) {\n console.log('[Proxy Error]: Connection error without compile errors, possibly server restarting...');\n\n // 验证 target URL\n try {\n new URL(target); // 验证URL格式\n } catch (e) {\n console.error('[Proxy Error]: Invalid target URL:', target);\n // 如果target无效,直接显示错误页面\n console.log('[Proxy Error]: Showing error page due to invalid target');\n }\n\n // 等待服务恢复(通过HTTP健康检查)\n console.log(`[Proxy Error]: Waiting for service recovery at ${target} (timeout: ${retryTimeout}ms)...`);\n const recovered = await waitForServiceRecovery(target, retryTimeout, retryInterval);\n\n if (recovered) {\n console.log('[Proxy Error]: Service recovered within timeout, sending 302 redirect');\n sendSimpleRedirect(req, res);\n return;\n }\n\n console.log('[Proxy Error]: Service did not recover within timeout, showing error page with probe');\n }\n\n // 场景2: 有编译错误 或 服务未在5秒内恢复\n // 显示错误页面,由前端探针继续检测服务恢复\n if (isConnError && !hasCompileError) {\n console.log('[Proxy Error]: Showing error page with auto-refresh probe');\n } else {\n console.log('[Proxy Error]: Compile error or non-connection error, showing error page');\n }\n\n // 获取 HTML 模板\n const template = getErrorHtmlTemplate();\n\n // 注入错误信息和日志\n const html = injectTemplateData(template, clientBasePathWithoutSlash);\n\n // 设置响应头\n res.writeHead(200, {\n 'Content-Type': 'text/html; charset=utf-8',\n 'Cache-Control': 'no-cache, no-store, must-revalidate',\n 'X-Proxy-Error-Page': 'true', // 标识这是错误页面\n });\n\n // 发送 HTML\n res.end(html);\n } catch (error) {\n console.error('[Proxy Error]: Failed to handle error:', error);\n\n // 降级方案:发送纯文本错误信息\n if (!res.headersSent) {\n res.writeHead(502, { 'Content-Type': 'text/plain; charset=utf-8' });\n res.end(`Node 服务启动异常,请根据日志修复相关问题`);\n }\n }\n })();\n}\n\n/**\n * 发送 302 重定向响应,强制用户刷新页面\n */\nfunction sendSimpleRedirect(req: IncomingMessage, res: ServerResponse): void {\n if (res.headersSent) return;\n\n // 获取原始请求的 URL\n const originalUrl = req.url || '/';\n\n console.log('[Proxy Error]: Sending 302 redirect to', originalUrl);\n\n res.writeHead(302, {\n 'Location': originalUrl,\n 'Cache-Control': 'no-cache, no-store, must-revalidate',\n });\n res.end();\n}","import path from 'node:path';\nimport type {\n Middleware,\n RouteMiddleware,\n GlobalMiddleware,\n MiddlewareContext,\n ExpressApp,\n ViteMiddleware,\n} from './types';\n/**\n * Type guard to check if middleware is route-based\n */\nfunction isRouteMiddleware(middleware: Middleware): middleware is RouteMiddleware {\n return 'createRouter' in middleware && middleware.createRouter !== undefined;\n}\n\n/**\n * Type guard to check if middleware is global\n */\nfunction isGlobalMiddleware(middleware: Middleware): middleware is GlobalMiddleware {\n return 'createHandler' in middleware && middleware.createHandler !== undefined;\n}\n\n/**\n * Compute the full mount path by combining basePath and middleware mountPath\n */\nfunction computeMountPath(basePath: string, mountPath: string): string {\n const routePath = path.posix.join(basePath, mountPath);\n return routePath.startsWith('/') ? routePath : `/${routePath}`;\n}\n\n/**\n * Log middleware registration with routes info\n */\nfunction logMiddlewareRegistration(\n middleware: RouteMiddleware,\n fullMountPath: string,\n): void {\n if (middleware.routes && middleware.routes.length > 0) {\n console.log(`[Middleware] Registered: ${middleware.name} at ${fullMountPath}`);\n middleware.routes.forEach((route) => {\n const routePath = route.path === '/' ? fullMountPath : path.posix.join(fullMountPath, route.path);\n console.log(` ${route.method} ${routePath} - ${route.description}`);\n });\n } else {\n console.log(`[Middleware] Registered: ${middleware.name} at ${fullMountPath}`);\n }\n}\n\n/**\n * Register a single route middleware\n */\nasync function registerRouteMiddleware(\n server: ExpressApp | ViteMiddleware,\n middleware: RouteMiddleware,\n context: MiddlewareContext,\n): Promise<void> {\n if (!middleware.mountPath) {\n console.error(\n `[Middleware] ${middleware.name}: Route middleware must have mountPath. Skipping.`\n );\n return;\n }\n\n const router = middleware.createRouter(context);\n const fullMountPath = computeMountPath(context.basePath, middleware.mountPath);\n\n server.use(fullMountPath, router);\n logMiddlewareRegistration(middleware, fullMountPath);\n}\n\n/**\n * Register a single global middleware\n */\nasync function registerGlobalMiddleware(\n server: ExpressApp | ViteMiddleware,\n middleware: GlobalMiddleware,\n context: MiddlewareContext,\n): Promise<void> {\n if (middleware.mountPath) {\n console.warn(\n `[Middleware] ${middleware.name}: Global middleware should not have mountPath. ` +\n `Ignoring mountPath \"${middleware.mountPath}\".`\n );\n }\n\n const handler = middleware.createHandler(context);\n server.use(handler);\n console.log(`[Middleware] Registered global: ${middleware.name}`);\n}\n\n/**\n * Register middlewares for Express-compatible servers or Vite\n * @param server - Express app or Vite middleware instance\n * @param middlewares - List of middlewares to register\n * @param options - Optional context configuration\n * @returns Promise that resolves when all middlewares are registered\n *\n * @example\n * ```typescript\n * // In rspack/webpack setupMiddlewares\n * setupMiddlewares: (middlewares, devServer) => {\n * if (devServer.app) {\n * registerMiddlewares(devServer.app, [\n * // Global middlewares (execute first)\n * createCorsMiddleware({ origin: '*' }),\n * createAuthMiddleware(),\n *\n * // Route middlewares\n * createDevLogsMiddleware({ logDir: './logs' }),\n * createOpenapiMiddleware({ openapiFilePath: './openapi.json' })\n * ], {\n * basePath: '/api',\n * isDev: true,\n * rootDir: __dirname\n * });\n * }\n * return middlewares;\n * }\n *\n * // In Vite configureServer\n * configureServer: (server) => {\n * registerMiddlewares(server.middlewares, [\n * createDevLogsMiddleware({ logDir: './logs' })\n * ], { basePath: '/', isDev: true, rootDir: __dirname });\n * }\n * ```\n */\nexport async function registerMiddlewares(\n server: ExpressApp | ViteMiddleware,\n middlewares: Middleware[],\n options?: Partial<MiddlewareContext>,\n): Promise<void> {\n // Build context with defaults\n const context: MiddlewareContext = {\n basePath: '/',\n isDev: process.env.NODE_ENV !== 'production',\n rootDir: process.cwd(),\n ...options,\n };\n // 合并默认中间件和用户提供的中间件\n const allMiddlewares = [...middlewares];\n\n for (const middleware of allMiddlewares) {\n // Check if middleware should be enabled\n if (middleware.enabled && !middleware.enabled(context)) {\n continue;\n }\n\n try {\n // Check if both createRouter and createHandler are provided (type-unsafe case)\n const hasCreateRouter = 'createRouter' in middleware && typeof (middleware as any).createRouter === 'function';\n const hasCreateHandler = 'createHandler' in middleware && typeof (middleware as any).createHandler === 'function';\n\n if (hasCreateRouter && hasCreateHandler) {\n console.warn(\n `[Middleware] ${middleware.name}: Both createRouter and createHandler provided. ` +\n `Using createRouter and ignoring createHandler.`\n );\n }\n\n if (isRouteMiddleware(middleware)) {\n // Route-based middleware\n await registerRouteMiddleware(server, middleware, context);\n } else if (isGlobalMiddleware(middleware)) {\n // Global middleware\n await registerGlobalMiddleware(server, middleware, context);\n } else {\n // Type-unsafe middleware that has neither createRouter nor createHandler\n console.error(\n `[Middleware] ${(middleware as any).name || 'unknown'}: Must provide either createRouter or createHandler.`\n );\n }\n } catch (error) {\n console.error(`[Middleware] Failed to register ${middleware.name}:`, error);\n }\n }\n}\n\n// Re-export types and middleware creators\nexport type {\n Middleware,\n RouteMiddleware,\n GlobalMiddleware,\n MiddlewareContext,\n RouteInfo,\n} from './types';\nexport { createOpenapiMiddleware } from './openapi';\nexport { createDevLogsMiddleware } from './dev-logs';\nexport { createCollectLogsMiddleware } from './collect-logs';","import express, { Router } from 'express';\nimport type { MiddlewareContext } from '../types';\nimport { createOpenapiHandler } from './controller';\n\n/**\n * Options for creating OpenAPI router\n */\nexport interface OpenapiRouterOptions {\n /** Path to the openapi.json file */\n openapiFilePath: string;\n /** Enable source code enhancement */\n enableEnhancement: boolean;\n /** Server directory for source code scanning */\n serverDir?: string;\n}\n\n/**\n * Create OpenAPI router\n */\nexport function createOpenapiRouter(\n options: OpenapiRouterOptions,\n context: MiddlewareContext,\n): Router {\n const { openapiFilePath, enableEnhancement, serverDir } = options;\n const router = express.Router();\n const handler = createOpenapiHandler(openapiFilePath, enableEnhancement, serverDir);\n\n // GET /openapi.json - Serve OpenAPI specification\n router.get('/openapi.json', (req, res) => handler(req, res, context));\n\n return router;\n}\n","import fs from 'node:fs/promises';\nimport crypto from 'node:crypto';\n\nimport { enhanceOpenApiWithSourceInfo } from './services';\nimport { transformOpenapiPaths } from './utils';\n\nimport type { Request, Response } from 'express';\nimport type { OpenapiCache } from './types';\nimport type { MiddlewareContext } from '../types';\n\n/**\n * Create OpenAPI request handler with caching\n */\nexport function createOpenapiHandler(\n openapiFilePath: string,\n enableEnhancement: boolean,\n serverDir?: string,\n) {\n // In-memory cache\n let cache: OpenapiCache | null = null;\n\n return async (_req: Request, res: Response, context: MiddlewareContext) => {\n try {\n // Read OpenAPI file\n const fileBuffer = await fs.readFile(openapiFilePath, 'utf-8');\n\n // Calculate file hash for cache invalidation\n const currentHash = crypto.createHash('md5').update(fileBuffer).digest('hex');\n\n // Use cache if file content hasn't changed\n if (cache && cache.fileHash === currentHash) {\n return res.json(cache.data);\n }\n\n // Parse OpenAPI\n let payload = JSON.parse(fileBuffer);\n\n // Enhance with x-source if enabled\n if (enableEnhancement && context.isDev) {\n const { openapi: enhancedPayload, stats } = await enhanceOpenApiWithSourceInfo({\n openapiData: payload,\n writeFile: false, // Don't write to file, keep in memory\n serverDir: serverDir || context.rootDir, // Use provided serverDir or fallback to context.rootDir\n });\n payload = enhancedPayload;\n\n // Log performance\n console.log(`[OpenAPI] Enhanced in ${stats.duration}ms (${stats.endpointsEnhanced} endpoints)`);\n }\n\n // Transform paths: remove basePath prefix from all API paths\n const result = transformOpenapiPaths(payload, context.basePath);\n\n // Update cache\n cache = {\n data: result,\n fileHash: currentHash,\n };\n\n res.json(result);\n } catch (error) {\n const message = error instanceof Error ? error.message : 'Unknown error';\n res.status(500).json({\n error: 'Failed to load OpenAPI spec',\n message,\n });\n }\n };\n}\n","import { promises as fs } from 'node:fs';\nimport path from 'node:path';\nimport ts from 'typescript';\nimport type { SourceInfo, EnhanceOptions, EnhanceResult } from './types';\nimport { findControllerFiles, buildSourceMap, enhanceOpenApiPaths } from './utils';\n\n/**\n * Enhances OpenAPI JSON with source file location metadata\n * Can be called programmatically or run as a script\n */\nexport async function enhanceOpenApiWithSourceInfo(options: EnhanceOptions = {}): Promise<EnhanceResult> {\n const startTime = Date.now();\n\n const openapiPath = options.openapiPath || path.resolve(__dirname, '../client/src/api/gen/openapi.json');\n const serverDir = options.serverDir || path.resolve(__dirname, '../server');\n const writeFile = options.writeFile !== false;\n\n let openapi: any;\n if (options.openapiData) {\n // Use provided data (for in-memory enhancement)\n openapi = JSON.parse(JSON.stringify(options.openapiData)); // Deep clone\n } else {\n // Read from file\n const openapiContent = await fs.readFile(openapiPath, 'utf-8');\n openapi = JSON.parse(openapiContent);\n }\n\n const controllerFiles = await findControllerFiles(serverDir);\n const sourceMap = await buildSourceMap(controllerFiles, processControllerFile);\n const enhanced = enhanceOpenApiPaths(openapi, sourceMap);\n\n if (writeFile) {\n await fs.writeFile(openapiPath, JSON.stringify(openapi, null, 2) + '\\n', 'utf-8');\n }\n\n const duration = Date.now() - startTime;\n\n return {\n openapi,\n stats: {\n duration,\n controllersFound: controllerFiles.length,\n endpointsExtracted: sourceMap.size,\n endpointsEnhanced: enhanced,\n },\n };\n}\n\n/**\n * Process a single controller file\n */\nasync function processControllerFile(filePath: string): Promise<Map<string, SourceInfo>> {\n const relativePath = path.relative(process.cwd(), filePath);\n\n // Parse file\n const content = await fs.readFile(filePath, 'utf-8');\n const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true);\n\n return extractControllerMetadata(sourceFile, relativePath);\n}\n\n/**\n * Extract controller metadata from TypeScript source file\n */\nfunction extractControllerMetadata(sourceFile: ts.SourceFile, filePath: string): Map<string, SourceInfo> {\n const metadata = new Map<string, SourceInfo>();\n let controllerPath = '';\n let className = '';\n\n // Helper function to get decorators from both old and new TypeScript APIs\n function getDecorators(node: ts.Node): readonly ts.Decorator[] {\n // TypeScript 5.x: decorators are in modifiers array\n if ('modifiers' in node && Array.isArray(node.modifiers)) {\n return (node.modifiers as ts.ModifierLike[]).filter(\n (mod): mod is ts.Decorator => mod.kind === ts.SyntaxKind.Decorator,\n );\n }\n // TypeScript 4.x: decorators are in decorators array\n if ('decorators' in node && Array.isArray(node.decorators)) {\n return node.decorators as readonly ts.Decorator[];\n }\n return [];\n }\n\n function visit(node: ts.Node): void {\n // Extract @Controller decorator and its path\n if (ts.isClassDeclaration(node)) {\n const decorators = getDecorators(node);\n\n // Extract class name\n if (node.name) {\n className = node.name.getText(sourceFile);\n }\n\n for (const decorator of decorators) {\n if (ts.isCallExpression(decorator.expression)) {\n const expression = decorator.expression;\n const decoratorName = expression.expression.getText(sourceFile);\n\n if (decoratorName === 'Controller') {\n if (expression.arguments.length > 0) {\n const arg = expression.arguments[0];\n if (ts.isStringLiteral(arg)) {\n controllerPath = arg.text;\n }\n }\n }\n }\n }\n }\n\n // Extract methods with HTTP decorators\n if (ts.isMethodDeclaration(node) && node.name) {\n const methodName = node.name.getText(sourceFile);\n let httpMethod = '';\n let routePath = '';\n const { line } = sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile));\n\n const decorators = getDecorators(node);\n\n for (const decorator of decorators) {\n if (ts.isCallExpression(decorator.expression)) {\n const decoratorName = decorator.expression.expression.getText(sourceFile);\n if (['Get', 'Post', 'Put', 'Delete', 'Patch', 'Options', 'Head', 'All'].includes(decoratorName)) {\n httpMethod = decoratorName.toLowerCase();\n if (decorator.expression.arguments.length > 0) {\n const arg = decorator.expression.arguments[0];\n if (ts.isStringLiteral(arg)) {\n routePath = arg.text;\n }\n }\n }\n }\n }\n\n if (httpMethod && methodName && className) {\n const operationId = `${className}_${methodName}`;\n metadata.set(operationId, {\n file: filePath,\n line: line + 1,\n method: httpMethod,\n controllerPath,\n routePath,\n });\n }\n }\n\n ts.forEachChild(node, visit);\n }\n\n visit(sourceFile);\n return metadata;\n}\n","import path from 'node:path';\nimport { promises as fs } from 'node:fs';\nimport type { SourceInfo } from './types';\n\n/**\n * Find all controller files in a directory\n */\nexport async function findControllerFiles(dir: string): Promise<string[]> {\n const files: string[] = [];\n\n async function scan(currentDir: string): Promise<void> {\n const entries = await fs.readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(currentDir, entry.name);\n\n if (entry.isDirectory()) {\n await scan(fullPath);\n } else if (entry.isFile() && entry.name.endsWith('.controller.ts')) {\n files.push(fullPath);\n }\n }\n }\n\n await scan(dir);\n return files;\n}\n\n/**\n * Build source map from controller files\n */\nexport async function buildSourceMap(\n controllerFiles: string[],\n processFile: (filePath: string) => Promise<Map<string, SourceInfo>>,\n): Promise<Map<string, SourceInfo>> {\n const sourceMap = new Map<string, SourceInfo>();\n\n // Process files in parallel with a concurrency limit\n const concurrency = 10;\n const results: Map<string, SourceInfo>[] = [];\n\n for (let i = 0; i < controllerFiles.length; i += concurrency) {\n const batch = controllerFiles.slice(i, i + concurrency);\n const batchResults = await Promise.all(batch.map((filePath) => processFile(filePath)));\n results.push(...batchResults);\n }\n\n // Merge results\n for (const metadata of results) {\n for (const [operationId, info] of metadata.entries()) {\n sourceMap.set(operationId, info);\n }\n }\n\n return sourceMap;\n}\n\n/**\n * Try to match operationId with different formats\n * Supports:\n * - Direct match: ClassName_methodName\n * - Camel case: classNameMethodName\n * - Method only: methodName\n */\nfunction findSourceInfo(operationId: string, sourceMap: Map<string, SourceInfo>): SourceInfo | undefined {\n // Try direct match first\n const directMatch = sourceMap.get(operationId);\n if (directMatch) {\n return directMatch;\n }\n\n // Try matching with different formats\n for (const [key, value] of sourceMap.entries()) {\n // key format: ClassName_methodName\n const [className, methodName] = key.split('_');\n if (!className || !methodName) continue;\n\n // Try camelCase format: classNameMethodName\n const camelCaseId = className.charAt(0).toLowerCase() + className.slice(1) + methodName.charAt(0).toUpperCase() + methodName.slice(1);\n if (operationId === camelCaseId) {\n return value;\n }\n\n // Try method name only\n if (operationId === methodName) {\n return value;\n }\n }\n\n return undefined;\n}\n\n/**\n * Enhance OpenAPI paths with source information\n */\nexport function enhanceOpenApiPaths(openapi: any, sourceMap: Map<string, SourceInfo>): number {\n let enhancedCount = 0;\n\n if (!openapi.paths) {\n return enhancedCount;\n }\n\n for (const pathItem of Object.values(openapi.paths)) {\n if (!pathItem || typeof pathItem !== 'object') continue;\n\n for (const operation of Object.values(pathItem)) {\n if (operation && typeof operation === 'object' && 'operationId' in operation) {\n const sourceInfo = findSourceInfo(operation.operationId as string, sourceMap);\n if (sourceInfo) {\n operation['x-source'] = {\n file: sourceInfo.file,\n line: sourceInfo.line,\n };\n enhancedCount++;\n }\n }\n }\n }\n\n return enhancedCount;\n}\n\n/**\n * Transform OpenAPI paths by removing basePath prefix\n */\nexport function transformOpenapiPaths(openapi: any, basePath: string): any {\n if (basePath === '/' || !openapi.paths) {\n return openapi;\n }\n\n const newPaths: any = {};\n Object.keys(openapi.paths).forEach((key) => {\n const staticApiKey = key.startsWith(basePath) ? key.slice(basePath.length) : key;\n newPaths[staticApiKey] = openapi.paths[key];\n });\n\n return {\n ...openapi,\n paths: newPaths,\n basePath,\n };\n}\n","import type { RouteMiddleware, MiddlewareContext, RouteInfo } from '../types';\nimport type { OpenapiMiddlewareOptions } from './types';\nimport { createOpenapiRouter } from './router';\n\n/**\n * Routes provided by OpenAPI middleware\n */\nconst OPENAPI_ROUTES: RouteInfo[] = [\n {\n method: 'GET',\n path: '/openapi.json',\n description: 'Serve enhanced OpenAPI specification with source code references',\n }\n];\n\n/**\n * Creates OpenAPI middleware that serves enhanced openapi.json\n * Supports both rspack/webpack and Vite dev servers\n */\nexport function createOpenapiMiddleware(options: OpenapiMiddlewareOptions): RouteMiddleware {\n const { openapiFilePath, enableEnhancement = true, serverDir } = options;\n\n return {\n name: 'openapi',\n mountPath: '/dev',\n routes: OPENAPI_ROUTES,\n\n enabled: (context: MiddlewareContext) => context.isDev,\n\n createRouter: (context: MiddlewareContext) => {\n return createOpenapiRouter(\n {\n openapiFilePath,\n enableEnhancement,\n serverDir,\n },\n context,\n );\n },\n };\n}\n\n// Re-export types and utilities\nexport type { OpenapiMiddlewareOptions, SourceInfo, EnhanceOptions, EnhanceResult } from './types';\nexport { enhanceOpenApiWithSourceInfo } from './services';\n","import express, { Router } from 'express';\nimport type { RouterOptions } from './types';\nimport { resolveLogDir } from './utils';\nimport {\n createGetTraceEntriesHandler,\n createGetRecentTracesHandler,\n createGetLogFileHandler,\n createGetServerLogsHandler,\n createGetTriggerListHandler,\n createGetTriggerDetailHandler,\n} from './controller';\nimport { createHealthCheckHandler } from './health.controller';\n/**\n * Create dev log router with all routes registered\n */\nexport function createDevLogRouter(options: RouterOptions = {}): Router {\n const logDir = resolveLogDir(options.logDir);\n const router = express.Router();\n\n // GET /app/trace/:traceId - Get log entries by trace ID\n router.get('/app/trace/:traceId', createGetTraceEntriesHandler(logDir));\n\n // GET /trace/recent - Get recent trace calls with pagination\n router.get('/trace/recent', createGetRecentTracesHandler(logDir));\n\n // GET /files/:fileName - Get paginated log file content\n router.get('/files/:fileName', createGetLogFileHandler(logDir));\n\n // GET /server-logs - Get server logs in ServerLog format (compatible with frontend)\n router.get('/server-logs', createGetServerLogsHandler(logDir));\n \n // GET /trace/trigger/list - get all automation trigger list in trace.log\n router.get('/trace/trigger/list', createGetTriggerListHandler(logDir));\n\n // GET /trace/trigger/:instanceID - get automation trigger detail by instanceID in server.log\n router.get('/trace/trigger/:instanceID', createGetTriggerDetailHandler(logDir));\n\n router.get('/health', createHealthCheckHandler());\n\n return router;\n}\n","import { promises as fs } from 'node:fs';\nimport { isAbsolute, join, relative } from 'node:path';\nimport type { LogEntry } from './types';\nimport { matchesPathPattern } from './helper/path-matcher';\n\n/**\n * Resolve log directory path\n */\nexport function resolveLogDir(provided?: string): string {\n if (!provided) {\n return join(process.cwd(), 'logs');\n }\n return isAbsolute(provided) ? provided : join(process.cwd(), provided);\n}\n\n/**\n * Get relative path from current working directory\n */\nexport function getRelativePath(filePath: string): string {\n return relative(process.cwd(), filePath);\n}\n\n/**\n * Check if file exists\n */\nexport async function fileExists(filePath: string): Promise<boolean> {\n try {\n await fs.access(filePath);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Parse a log line as JSON\n */\nexport function parseLogLine(line: string): LogEntry | undefined {\n const trimmed = line.trim();\n if (!trimmed) return undefined;\n\n try {\n return JSON.parse(trimmed) as LogEntry;\n } catch {\n return undefined;\n }\n}\n\n/**\n * Extract number from message using regex pattern\n */\nexport function extractNumber(message: string, pattern: RegExp): number | undefined {\n if (typeof message !== 'string') return undefined;\n\n const match = message.match(pattern);\n if (!match) return undefined;\n\n const value = Number(match[1]);\n return Number.isFinite(value) ? value : undefined;\n}\n\n/**\n * Parse limit parameter with default and max value\n */\nexport function parseLimit(value: string | undefined, defaultValue: number, maxValue: number): number {\n if (typeof value !== 'string' || !value.trim()) {\n return defaultValue;\n }\n\n const parsed = Number(value);\n if (Number.isFinite(parsed) && parsed > 0) {\n return Math.min(Math.floor(parsed), maxValue);\n }\n\n return defaultValue;\n}\n\n/**\n * Parse positive integer with fallback\n */\nexport function parsePositiveInt(value: string | undefined, fallback: number): number {\n if (typeof value !== 'string' || !value.trim()) {\n return fallback;\n }\n\n const parsed = Number(value);\n return Number.isFinite(parsed) && parsed > 0 ? Math.floor(parsed) : fallback;\n}\n\n/**\n * Resolve log file path with security checks\n */\nexport function resolveLogFilePath(baseDir: string, fileName: string): string {\n const sanitized = fileName.replace(/\\\\/g, '/');\n const segments = sanitized.split('/').filter(Boolean);\n\n if (segments.some((segment) => segment === '..')) {\n throw new Error('Invalid log file path');\n }\n\n const resolved = join(baseDir, segments.join('/'));\n const rel = relative(baseDir, resolved);\n\n if (rel.startsWith('..')) {\n throw new Error('Access to the specified log file is denied');\n }\n\n return resolved;\n}\n\n/**\n * Check if actual request path matches expected OpenAPI/Swagger path pattern\n *\n * @param actualPath - The actual request path from logs (e.g., /api/users/123)\n * @param pattern - The OpenAPI path pattern to match against (e.g., /api/users/{id})\n * @returns true if the path matches the pattern\n *\n * @example\n * matchesPath('/api/users/123', '/api/users/{id}') // true\n * matchesPath('/api/users/123', '/api/users') // true (prefix match)\n * matchesPath('/api/posts/456', '/api/users/{id}') // false\n */\nexport function matchesPath(actualPath: string | undefined, pattern: string): boolean {\n return matchesPathPattern(actualPath, pattern);\n}\n\n/**\n * Check if actual request method matches expected HTTP method\n *\n * @param actualMethod - The actual request method from logs (e.g., 'GET', 'POST')\n * @param expectedMethod - The expected HTTP method to match against (e.g., 'GET')\n * @returns true if the method matches (case-insensitive)\n *\n * @example\n * matchesMethod('GET', 'GET') // true\n * matchesMethod('get', 'GET') // true\n * matchesMethod('POST', 'GET') // false\n * matchesMethod(undefined, 'GET') // false\n */\nexport function matchesMethod(actualMethod: string | undefined, expectedMethod: string): boolean {\n if (!actualMethod || !expectedMethod) {\n return false;\n }\n return actualMethod.toUpperCase() === expectedMethod.toUpperCase();\n}\n\n\n/**\n * Serialize error for JSON response\n */\nexport function serializeError(error: unknown): { name?: string; message: string } {\n return error instanceof Error\n ? { name: error.name, message: error.message }\n : { message: String(error) };\n}\n","/**\n * Path matcher utility for matching request paths against OpenAPI/Swagger/NestJS path patterns\n *\n * Note: Input paths should be clean request paths without query strings or hash fragments\n *\n * Supports:\n * - Exact matching: /api/users === /api/users (only)\n * - Path parameters: /api/users/{id} matches /api/users/123\n * - Nested parameters: /api/users/{userId}/posts/{postId}\n * - Wildcard (asterisk): /api/star/users matches /api/v1/users (use * for single segment)\n * - Recursive wildcard: /files/star-star matches /files/a/b/c (use two asterisks for multiple segments)\n *\n * Note: Prefix matching has been removed. Use slash-two-asterisks for matching sub-paths:\n * - /api/users - matches /api/users only\n * - /api/users/two-asterisks - matches /api/users, /api/users/123, /api/users/123/posts, etc.\n */\n\n/**\n * Convert path pattern to regex, supporting wildcards and path parameters\n *\n * Examples:\n * /api/users/{id} -> /^\\/api\\/users\\/[^/]+$/\n * /api/star/users -> /^\\/api\\/[^/]+\\/users$/\n * /files/double-star -> /^\\/files\\/.+$/\n * /api/v{version}/users -> /^\\/api\\/v[^/]+\\/users$/\n */\nfunction pathPatternToRegex(pattern: string): RegExp {\n // Escape special regex characters except {}, *, /\n let regexPattern = pattern.replace(/[.+?^$|()[\\]\\\\]/g, '\\\\$&');\n\n // Handle recursive wildcard /** (must be at the end or followed by /)\n // /files/** -> /files/.+ (matches one or more path segments)\n // This follows NestJS routing convention where ** requires at least one segment\n regexPattern = regexPattern.replace(/\\/\\*\\*$/, '/.+');\n regexPattern = regexPattern.replace(/\\/\\*\\*\\//g, '/(?:.+/)?');\n\n // Handle single wildcard * (matches exactly one path segment)\n // /api/*/users -> /api/[^/]+/users\n regexPattern = regexPattern.replace(/\\*/g, '[^/]+');\n\n // Replace path parameters {param} with regex group that matches anything except /\n regexPattern = regexPattern.replace(/\\{[^}]+\\}/g, '[^/]+');\n\n // Anchor to start and end\n return new RegExp(`^${regexPattern}$`);\n}\n\n/**\n * Check if an actual path matches an OpenAPI/Swagger/NestJS path pattern\n *\n * @param actualPath - The actual request path (e.g., /api/users/123)\n * @param pattern - The path pattern (e.g., /api/users/{id})\n * @returns true if the actual path matches the pattern\n *\n * @example\n * // Exact match\n * matchesPathPattern('/api/users', '/api/users') // true\n * matchesPathPattern('/api/users/123', '/api/users') // false\n *\n * // Path parameters\n * matchesPathPattern('/api/users/123', '/api/users/{id}') // true\n *\n * // Wildcards\n * matchesPathPattern('/api/v1/users', '/api/star/users') // true (use * not star)\n * matchesPathPattern('/api/v2/users', '/api/star/users') // true\n *\n * // Recursive wildcards (use two asterisks, not /**!)\n * matchesPathPattern('/files/a/b/c', '/files/starstar') // true (use ** not starstar)\n * matchesPathPattern('/files/x', '/files/starstar') // true\n */\nexport function matchesPathPattern(actualPath: string | undefined, pattern: string): boolean {\n if (!actualPath || !pattern) {\n return false;\n }\n\n // Normalize paths: remove trailing slashes and query strings\n const normalizedActual = normalizePathForMatching(actualPath);\n const normalizedPattern = normalizePathForMatching(pattern);\n\n // Exact match (no parameters or wildcards)\n if (normalizedActual === normalizedPattern) {\n return true;\n }\n\n // Check if pattern contains special characters (parameters, wildcards)\n if (hasSpecialPatterns(normalizedPattern)) {\n const regex = pathPatternToRegex(normalizedPattern);\n return regex.test(normalizedActual);\n }\n\n // No match - exact paths that are different\n return false;\n}\n\n/**\n * Extract path parameters from actual path using pattern\n *\n * Note: This only extracts named parameters {param}, not wildcards\n *\n * @param actualPath - The actual request path\n * @param pattern - The path pattern with parameters\n * @returns Object with parameter names and values, or null if no match\n *\n * @example\n * extractPathParams('/api/users/123', '/api/users/{id}')\n * // { id: '123' }\n *\n * extractPathParams('/api/users/123/posts/456', '/api/users/{userId}/posts/{postId}')\n * // { userId: '123', postId: '456' }\n *\n * // Wildcards are not captured (use asterisk)\n * extractPathParams('/api/v1/users/123', '/api/asterisk/users/{id}')\n * // { id: '123' }\n */\nexport function extractPathParams(\n actualPath: string,\n pattern: string,\n): Record<string, string> | null {\n const normalizedActual = normalizePathForMatching(actualPath);\n const normalizedPattern = normalizePathForMatching(pattern);\n\n // Extract parameter names from pattern (only {param} style)\n const paramNames: string[] = [];\n const paramRegex = /\\{([^}]+)\\}/g;\n let match: RegExpExecArray | null;\n\n while ((match = paramRegex.exec(normalizedPattern)) !== null) {\n paramNames.push(match[1]);\n }\n\n if (paramNames.length === 0) {\n // No named parameters in pattern\n return normalizedActual === normalizedPattern ? {} : null;\n }\n\n // Build regex with capturing groups for named parameters\n let regexPattern = normalizedPattern.replace(/[.+?^$|()[\\]\\\\]/g, '\\\\$&');\n\n // Replace wildcards with non-capturing groups\n regexPattern = regexPattern.replace(/\\/\\*\\*$/, '/.+');\n regexPattern = regexPattern.replace(/\\/\\*\\*\\//g, '/(?:.+/)?');\n regexPattern = regexPattern.replace(/\\*/g, '[^/]+');\n\n // Replace named parameters with capturing groups\n regexPattern = regexPattern.replace(/\\{[^}]+\\}/g, '([^/]+)');\n\n const regex = new RegExp(`^${regexPattern}$`);\n\n const result = regex.exec(normalizedActual);\n if (!result) {\n return null;\n }\n\n // Map parameter names to captured values\n const params: Record<string, string> = {};\n paramNames.forEach((name, index) => {\n params[name] = result[index + 1]; // index + 1 because result[0] is the full match\n });\n\n return params;\n}\n\n/**\n * Check if a pattern contains special matching characters\n *\n * @param pattern - The path pattern to check\n * @returns true if the pattern contains parameters, wildcards, etc.\n *\n * @example\n * hasSpecialPatterns('/api/users/{id}') // true\n * hasSpecialPatterns('/api/asterisk/users') // true (with actual asterisk character)\n * hasSpecialPatterns('/files/double-asterisk') // true (with actual double asterisk)\n * hasSpecialPatterns('/api/users') // false\n */\nexport function hasSpecialPatterns(pattern: string): boolean {\n return /[{*]/.test(pattern);\n}\n\n/**\n * Check if a pattern contains path parameters (not wildcards)\n *\n * @param pattern - The path pattern to check\n * @returns true if the pattern contains {param} style parameters\n *\n * @example\n * hasPathParameters('/api/users/{id}') // true\n * hasPathParameters('/api/asterisk/users') // false (with actual asterisk character)\n * hasPathParameters('/api/users') // false\n */\nexport function hasPathParameters(pattern: string): boolean {\n return /\\{[^}]+\\}/.test(pattern);\n}\n\n/**\n * Check if a pattern contains wildcards\n *\n * @param pattern - The path pattern to check\n * @returns true if the pattern contains asterisk or double asterisk\n *\n * @example\n * hasWildcards('/api/asterisk/users') // true (with actual asterisk character)\n * hasWildcards('/files/double-asterisk') // true (with actual double asterisk)\n * hasWildcards('/api/users/{id}') // false\n */\nexport function hasWildcards(pattern: string): boolean {\n return pattern.includes('*');\n}\n\n/**\n * Normalize a path for matching by:\n * - Removing trailing slashes\n * - Removing duplicate slashes\n *\n * @param path - The path to normalize\n * @returns Normalized path\n *\n * @example\n * normalizePathForMatching('/api/users/') // '/api/users'\n * normalizePathForMatching('/api//users') // '/api/users'\n */\nexport function normalizePathForMatching(path: string): string {\n return path\n .replace(/\\/+/g, '/') // Replace multiple slashes with single slash\n .replace(/\\/+$/, ''); // Remove trailing slash\n}\n\n/**\n * Normalize a path (deprecated, use normalizePathForMatching)\n * @deprecated Use normalizePathForMatching instead\n */\nexport function normalizePath(path: string): string {\n return normalizePathForMatching(path);\n}\n","import { join } from 'node:path';\nimport type { Request, Response } from 'express';\n\nimport { readLogEntriesByTrace, readRecentTraceCalls, readLogFilePage, readServerLogs, readTriggerList, readTriggerDetail } from './services';\nimport {\n getRelativePath,\n parseLimit,\n parsePositiveInt,\n resolveLogFilePath,\n serializeError,\n} from './utils';\n\n/**\n * Handle not found error\n */\nfunction handleNotFound(res: Response, filePath: string, message = 'Log file not found'): void {\n res.status(404).json({ message: `${message}: ${getRelativePath(filePath)}` });\n}\n\n/**\n * Handle generic error\n */\nfunction handleError(res: Response, error: unknown, message = 'Failed to read log file'): void {\n res.status(500).json({ message, error: serializeError(error) });\n}\n\n/**\n * Create handler for getting log entries by trace ID\n */\nexport function createGetTraceEntriesHandler(logDir: string) {\n const appLogPath = join(logDir, 'server.log');\n\n return async (req: Request, res: Response) => {\n const traceId = (req.params.traceId || '').trim();\n if (!traceId) {\n return res.status(400).json({ message: 'traceId is required' });\n }\n\n const limit = parseLimit(req.query.limit as string | undefined, 200, 1000);\n\n try {\n const entries = await readLogEntriesByTrace(appLogPath, traceId, limit);\n if (!entries) {\n return handleNotFound(res, appLogPath);\n }\n res.json({\n file: getRelativePath(appLogPath),\n traceId,\n count: entries.length,\n entries,\n });\n } catch (error) {\n handleError(res, error);\n }\n };\n}\n\n/**\n * Create handler for getting recent trace calls\n */\nexport function createGetRecentTracesHandler(logDir: string) {\n const traceLogPath = join(logDir, 'trace.log');\n\n return async (req: Request, res: Response) => {\n const page = parsePositiveInt(req.query.page as string | undefined, 1);\n const pageSize = parseLimit(req.query.pageSize as string | undefined, 10, 100);\n const pathFilter = typeof req.query.path === 'string' ? req.query.path.trim() : undefined;\n const methodFilter = typeof req.query.method === 'string' ? req.query.method.trim().toUpperCase() : undefined;\n\n try {\n const result = await readRecentTraceCalls(traceLogPath, page, pageSize, pathFilter, methodFilter);\n if (!result) {\n return handleNotFound(res, traceLogPath);\n }\n res.json({\n file: getRelativePath(traceLogPath),\n ...result,\n path: pathFilter || null,\n method: methodFilter || null,\n count: result.calls.length,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read trace log');\n }\n };\n}\n\n/**\n * Create handler for getting log file by name\n */\nexport function createGetLogFileHandler(logDir: string) {\n return async (req: Request, res: Response) => {\n const fileName = (req.params.fileName || '').trim();\n if (!fileName) {\n return res.status(400).json({ message: 'fileName is required' });\n }\n\n const page = parsePositiveInt(req.query.page as string | undefined, 1);\n const pageSize = parseLimit(req.query.pageSize as string | undefined, 200, 2000);\n\n try {\n const filePath = resolveLogFilePath(logDir, fileName);\n const result = await readLogFilePage(filePath, page, pageSize);\n if (!result) {\n return handleNotFound(res, filePath);\n }\n\n res.json({\n file: getRelativePath(filePath),\n ...result,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read specified log file');\n }\n };\n}\n\n/**\n * Create handler for getting server logs\n *\n * Reads from 4 log sources:\n * - server.log (Pino JSON)\n * - trace.log (Pino JSON)\n * - server.std.log (plain text)\n * - client.std.log (plain text)\n *\n * Query parameters:\n * - limit: number of logs to return (default: 100, max: 1000)\n * - offset: offset for pagination (default: 0)\n * - levels: comma-separated log levels (e.g., \"error,warn\")\n * - sources: comma-separated sources (e.g., \"server,trace\")\n */\nexport function createGetServerLogsHandler(logDir: string) {\n return async (req: Request, res: Response) => {\n const limit = parseLimit(req.query.limit as string | undefined, 100, 1000);\n const offset = parsePositiveInt(req.query.offset as string | undefined, 0);\n\n // Parse levels filter\n const levels = req.query.levels\n ? String(req.query.levels).split(',').map(l => l.trim()).filter(Boolean)\n : undefined;\n\n // Parse sources filter\n const sources = req.query.sources\n ? String(req.query.sources).split(',').map(s => s.trim()).filter(Boolean)\n : undefined;\n\n try {\n const result = await readServerLogs(logDir, {\n limit,\n offset,\n levels,\n sources,\n });\n\n if (!result) {\n return res.status(404).json({\n message: 'No server log files found',\n hint: 'Expected files: server.log, trace.log, server.std.log, client.std.log',\n });\n }\n\n res.json(result);\n } catch (error) {\n handleError(res, error, 'Failed to read server logs');\n }\n }\n}\n/**\n * Create handler for getting all automation trigger list in trace.log\n * filter by request_body.trigger & limit & path=/__innerapi__/automation/invoke\n */\nexport function createGetTriggerListHandler(logDir: string) {\n const traceLogPath = join(logDir, 'trace.log');\n\n return async (req: Request, res: Response) => {\n const trigger = typeof req.query.trigger === 'string' ? req.query.trigger.trim() : undefined;\n if (!trigger) {\n return res.status(400).json({ message: 'trigger is required' });\n }\n\n const triggerID = typeof req.query.triggerID === 'string' ? req.query.triggerID.trim() : undefined;\n const path = typeof req.query.path === 'string' ? req.query.path.trim() : '/__innerapi__/automation/invoke';\n const limit = parseLimit(req.query.limit as string | undefined, 10, 200);\n\n try {\n const result = await readTriggerList(traceLogPath, trigger, path, limit, triggerID);\n if (!result) {\n return handleNotFound(res, traceLogPath);\n }\n res.json({\n file: getRelativePath(traceLogPath),\n path,\n ...result,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read trace log');\n }\n };\n}\n\nexport function createGetTriggerDetailHandler(logDir: string) {\n const traceLogPath = join(logDir, 'server.log');\n\n return async (req: Request, res: Response) => {\n const instanceID = (req.params.instanceID || '').trim();\n if (!instanceID) {\n return res.status(400).json({ message: 'instanceID is required' });\n }\n\n const path = typeof req.query.path === 'string' ? req.query.path.trim() : '/__innerapi__/automation/invoke';\n\n try {\n const result = await readTriggerDetail(traceLogPath, path, instanceID);\n if (!result) {\n return handleNotFound(res, traceLogPath);\n }\n res.json({\n file: getRelativePath(traceLogPath),\n ...result,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read trace log');\n }\n };\n}","import { createReadStream, promises as fs } from 'node:fs';\nimport { createInterface } from 'node:readline';\nimport type {\n LogEntry,\n TraceBuilder,\n RecentTraceCallsResponse,\n LogFilePageResponse,\n ServerLog,\n ServerLogResponse,\n} from './types';\nimport { fileExists, parseLogLine, extractNumber, matchesPath, matchesMethod } from './utils';\n\n/**\n * Read log entries by trace ID\n */\nexport async function readLogEntriesByTrace(\n filePath: string,\n traceId: string,\n limit: number,\n): Promise<LogEntry[] | undefined> {\n const exists = await fileExists(filePath);\n if (!exists) {\n return undefined;\n }\n\n const matches: LogEntry[] = [];\n const stream = createReadStream(filePath, { encoding: 'utf8' });\n const rl = createInterface({ input: stream, crlfDelay: Infinity });\n\n for await (const line of rl) {\n const entry = parseLogLine(line);\n if (!entry) continue;\n if (entry.trace_id !== traceId) continue;\n\n matches.push(entry);\n if (limit > 0 && matches.length > limit) {\n matches.shift();\n }\n }\n\n rl.close();\n stream.close();\n\n return matches;\n}\n\n/**\n * Read recent trace calls with pagination\n */\nexport async function readRecentTraceCalls(\n filePath: string,\n page: number,\n pageSize: number,\n pathFilter?: string,\n methodFilter?: string,\n): Promise<RecentTraceCallsResponse | undefined> {\n if (!(await fileExists(filePath))) {\n return undefined;\n }\n\n const config = {\n maxEntriesPerTrace: 10,\n chunkSize: 64 * 1024,\n };\n\n const builders = new Map<string, TraceBuilder>();\n const completedCalls: TraceBuilder[] = [];\n\n const createTraceBuilder = (traceId: string): TraceBuilder => ({\n traceId,\n entries: [],\n method: undefined,\n path: undefined,\n startTime: undefined,\n endTime: undefined,\n statusCode: undefined,\n durationMs: undefined,\n hasCompleted: false,\n });\n\n const updateBuilderMetadata = (builder: TraceBuilder, entry: LogEntry): void => {\n if (entry.method && !builder.method) builder.method = String(entry.method);\n if (entry.path && !builder.path) builder.path = String(entry.path);\n\n builder.entries.push(entry);\n if (builder.entries.length > config.maxEntriesPerTrace) {\n builder.entries.shift();\n }\n };\n\n const handleRequestCompleted = (builder: TraceBuilder, entry: LogEntry, message: string): void => {\n builder.hasCompleted = true;\n builder.endTime = entry.time;\n builder.statusCode = extractNumber(message, /status_code:\\s*(\\d+)/);\n builder.durationMs = extractNumber(message, /duration_ms:\\s*(\\d+)/);\n if (!builder.path && entry.path) {\n builder.path = String(entry.path);\n }\n\n const pathMatches = !pathFilter || matchesPath(builder.path, pathFilter);\n const methodMatches = !methodFilter || matchesMethod(builder.method, methodFilter);\n const shouldInclude = pathMatches && methodMatches;\n\n if (shouldInclude) {\n completedCalls.push(builder);\n }\n };\n\n const processLogEntry = (entry: LogEntry): void => {\n const { trace_id: traceId, message = '' } = entry;\n if (!traceId) return;\n\n let builder = builders.get(traceId);\n if (!builder) {\n builder = createTraceBuilder(traceId);\n builders.set(traceId, builder);\n }\n\n updateBuilderMetadata(builder, entry);\n\n if (!builder.hasCompleted && (message.includes('HTTP request completed') || message.includes('HTTP request failed'))) {\n handleRequestCompleted(builder, entry, message);\n }\n\n if (message.includes('HTTP request started') && !builder.startTime) {\n builder.startTime = entry.time;\n }\n };\n\n const processLine = (line: string): void => {\n const entry = parseLogLine(line);\n if (entry?.trace_id) {\n processLogEntry(entry);\n }\n };\n\n await readFileReverse(filePath, config.chunkSize, processLine);\n\n return buildPaginatedResponse(completedCalls, page, pageSize);\n}\n\n/**\n * Read file in reverse order (from end to beginning)\n */\nasync function readFileReverse(\n filePath: string,\n chunkSize: number,\n processLine: (line: string) => void,\n): Promise<void> {\n const handle = await fs.open(filePath, 'r');\n\n try {\n const stats = await handle.stat();\n let position = stats.size;\n let remainder = '';\n\n while (position > 0) {\n const length = Math.min(chunkSize, position);\n position -= length;\n\n const buffer = Buffer.alloc(length);\n await handle.read(buffer, 0, length, position);\n\n let chunk = buffer.toString('utf8');\n if (remainder) {\n chunk += remainder;\n remainder = '';\n }\n\n const lines = chunk.split('\\n');\n remainder = lines.shift() ?? '';\n\n for (let i = lines.length - 1; i >= 0; i -= 1) {\n if (lines[i]) {\n processLine(lines[i]);\n }\n }\n }\n\n if (remainder) {\n processLine(remainder);\n }\n } finally {\n await handle.close();\n }\n}\n\n/**\n * Build paginated response from trace builders\n */\nfunction buildPaginatedResponse(\n items: TraceBuilder[],\n page: number,\n pageSize: number,\n): RecentTraceCallsResponse {\n const totalItems = items.length;\n const totalPages = totalItems === 0 ? 0 : Math.ceil(totalItems / pageSize);\n const startIndex = (page - 1) * pageSize;\n const endIndex = Math.min(startIndex + pageSize, totalItems);\n\n const pagedItems = items.slice(startIndex, endIndex).map((builder) => ({\n traceId: builder.traceId,\n method: builder.method,\n path: builder.path,\n startTime: builder.startTime,\n endTime: builder.endTime,\n statusCode: builder.statusCode,\n durationMs: builder.durationMs,\n entries: builder.entries.slice().reverse(),\n }));\n\n return {\n page,\n pageSize,\n totalCalls: totalItems,\n totalPages,\n calls: pagedItems,\n };\n}\n\n/**\n * Read log file page with pagination\n */\nexport async function readLogFilePage(\n filePath: string,\n page: number,\n pageSize: number,\n): Promise<LogFilePageResponse | undefined> {\n if (!(await fileExists(filePath))) {\n return undefined;\n }\n\n const capacity = page * pageSize;\n const buffer: string[] = [];\n let totalLines = 0;\n\n const stream = createReadStream(filePath, { encoding: 'utf8' });\n const rl = createInterface({ input: stream, crlfDelay: Infinity });\n\n try {\n for await (const line of rl) {\n buffer.push(line);\n if (buffer.length > capacity) {\n buffer.shift();\n }\n totalLines += 1;\n }\n } finally {\n rl.close();\n stream.close();\n }\n\n const totalPages = totalLines === 0 ? 0 : Math.ceil(totalLines / pageSize);\n\n if (buffer.length === 0) {\n return { page, pageSize, totalLines, totalPages, lines: [] };\n }\n\n const startIndex = Math.max(totalLines - page * pageSize, 0);\n const endIndex = Math.max(totalLines - (page - 1) * pageSize, 0);\n const bufferStartIndex = totalLines - buffer.length;\n\n const lines: string[] = [];\n for (let i = buffer.length - 1; i >= 0; i -= 1) {\n const lineIndex = bufferStartIndex + i;\n if (lineIndex >= startIndex && lineIndex < endIndex) {\n lines.push(buffer[i]);\n }\n }\n\n return {\n page,\n pageSize,\n totalLines,\n totalPages,\n lines: lines.reverse(),\n };\n}\n\n/**\n * Read ServerLog logs from multiple sources\n *\n * Reads from 4 log files:\n * - server.log (Pino JSON)\n * - trace.log (Pino JSON)\n * - server.std.log (plain text)\n * - client.std.log (plain text)\n *\n * @param logDir - Log directory path\n * @param options - Query options (limit, offset, levels, sources)\n * @returns ServerLogResponse or undefined if no logs found\n */\nexport async function readServerLogs(\n logDir: string,\n options: {\n limit?: number;\n offset?: number;\n levels?: string[];\n sources?: string[];\n } = {}\n): Promise<ServerLogResponse | undefined> {\n const limit = options.limit || 100;\n const offset = options.offset || 0;\n const sources = options.sources || ['server', 'trace', 'server-std', 'client-std'];\n\n const allLogs: ServerLog[] = [];\n const errors: string[] = [];\n\n // Read logs from all sources\n for (const source of sources) {\n try {\n const logs = await readLogsBySource(logDir, source);\n allLogs.push(...logs);\n } catch (error) {\n const errorMsg = `Failed to read ${source}: ${error instanceof Error ? error.message : String(error)}`;\n errors.push(errorMsg);\n console.warn(`[readServerLogs] ${errorMsg}`);\n // Continue reading other sources even if one fails\n }\n }\n\n // If no logs at all, return undefined\n if (allLogs.length === 0) {\n if (errors.length > 0) {\n console.warn(`[readServerLogs] No logs found. Errors: ${errors.join(', ')}`);\n }\n return undefined;\n }\n\n // Filter by levels\n let filteredLogs = allLogs;\n if (options.levels && options.levels.length > 0) {\n filteredLogs = allLogs.filter(log => options.levels!.includes(log.level));\n }\n\n // Sort by timestamp descending (newest first)\n filteredLogs.sort((a, b) => b.timestamp - a.timestamp);\n\n const total = filteredLogs.length;\n const paginatedLogs = filteredLogs.slice(offset, offset + limit);\n\n return {\n logs: paginatedLogs,\n total,\n hasMore: offset + limit < total,\n };\n}\n\n/**\n * Read logs from a specific source\n */\nasync function readLogsBySource(\n logDir: string,\n source: string\n): Promise<ServerLog[]> {\n const { join } = await import('node:path');\n\n let filePath: string;\n let parser: (line: string) => ServerLog | null;\n\n // Determine file path and parser\n if (source === 'server') {\n filePath = join(logDir, 'server.log');\n parser = (line) => parsePinoLog(line, 'server');\n } else if (source === 'trace') {\n filePath = join(logDir, 'trace.log');\n parser = (line) => parsePinoLog(line, 'trace');\n } else if (source === 'server-std') {\n filePath = join(logDir, 'server.std.log');\n parser = (line) => parseStdLog(line, 'server-std');\n } else if (source === 'client-std') {\n filePath = join(logDir, 'client.std.log');\n parser = (line) => parseStdLog(line, 'client-std');\n } else {\n console.warn(`[readLogsBySource] Unknown source: ${source}`);\n return [];\n }\n\n // Check if file exists\n if (!(await fileExists(filePath))) {\n console.warn(`[readLogsBySource] File not found: ${filePath}`);\n return [];\n }\n\n const logs: ServerLog[] = [];\n let stream: ReturnType<typeof createReadStream> | null = null;\n let rl: ReturnType<typeof createInterface> | null = null;\n\n try {\n stream = createReadStream(filePath, { encoding: 'utf8' });\n rl = createInterface({ input: stream, crlfDelay: Infinity });\n\n for await (const line of rl) {\n if (!line.trim()) continue;\n\n try {\n const log = parser(line);\n if (log) {\n logs.push(log);\n }\n } catch (parseError) {\n // Ignore individual line parse errors\n // This is expected for malformed lines\n }\n }\n } catch (error) {\n console.error(`[readLogsBySource] Error reading ${filePath}:`, error);\n throw error;\n } finally {\n // Ensure streams are closed\n if (rl) {\n rl.close();\n }\n if (stream) {\n stream.close();\n }\n }\n\n return logs;\n}\n\n/**\n * Parse Pino JSON log to ServerLog format\n */\nfunction parsePinoLog(line: string, source: 'server' | 'trace'): ServerLog | null {\n try {\n const pinoLog = JSON.parse(line);\n\n // Generate UUID (simple implementation)\n const id = generateUUID();\n\n return {\n id,\n level: mapPinoLevelToServerLogLevel(pinoLog.level),\n timestamp: new Date(pinoLog.time).getTime(),\n message: pinoLog.message || pinoLog.msg || '',\n context: pinoLog.context || null,\n traceId: pinoLog.trace_id || null,\n userId: pinoLog.user_id || null,\n appId: pinoLog.app_id || null,\n tenantId: pinoLog.tenant_id || null,\n stack: pinoLog.stack || null,\n meta: {\n pid: pinoLog.pid,\n hostname: pinoLog.hostname,\n path: pinoLog.path,\n method: pinoLog.method,\n statusCode: pinoLog.status_code,\n durationMs: pinoLog.duration_ms,\n ip: pinoLog.ip,\n requestBody: pinoLog.request_body,\n responseBody: pinoLog.response_body,\n },\n tags: [source],\n };\n } catch (error) {\n // Return null for invalid JSON\n return null;\n }\n}\n\n/**\n * Parse plain text log to ServerLog format\n * Format: [2025-11-20 21:48:42] [server] content\n */\nfunction parseStdLog(line: string, source: 'server-std' | 'client-std'): ServerLog | null {\n const id = generateUUID();\n\n // Try to extract timestamp and content\n // Format: [YYYY-MM-DD HH:MM:SS] [tag] content\n const match = line.match(/^\\[(\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2})\\] \\[(server|client)\\] (.*)$/);\n\n if (!match) {\n // If format doesn't match, use current time and full line as message\n return {\n id,\n level: 'log',\n timestamp: Date.now(),\n message: line,\n context: null,\n traceId: null,\n userId: null,\n appId: null,\n tenantId: null,\n stack: null,\n meta: null,\n tags: [source],\n };\n }\n\n const [, timeStr, , content] = match;\n\n // Parse timestamp (convert to UTC)\n let timestamp: number;\n try {\n // Assume local time, convert to ISO format\n const isoStr = timeStr.replace(' ', 'T');\n timestamp = new Date(isoStr).getTime();\n\n // If invalid timestamp, use current time\n if (isNaN(timestamp)) {\n timestamp = Date.now();\n }\n } catch (error) {\n timestamp = Date.now();\n }\n\n // Extract log level from content\n const level = extractLogLevel(content);\n\n return {\n id,\n level,\n timestamp,\n message: content,\n context: null,\n traceId: null,\n userId: null,\n appId: null,\n tenantId: null,\n stack: null,\n meta: null,\n tags: [source],\n };\n}\n\n/**\n * Map Pino log level to ServerLogLevel\n */\nfunction mapPinoLevelToServerLogLevel(\n pinoLevel: number | string\n): 'fatal' | 'error' | 'warn' | 'log' | 'debug' | 'verbose' {\n if (typeof pinoLevel === 'string') {\n const lower = pinoLevel.toLowerCase();\n if (lower === 'fatal') return 'fatal';\n if (lower === 'error') return 'error';\n if (lower === 'warn' || lower === 'warning') return 'warn';\n if (lower === 'info' || lower === 'log') return 'log';\n if (lower === 'debug') return 'debug';\n if (lower === 'trace' || lower === 'verbose') return 'verbose';\n return 'log';\n }\n\n // Pino numeric levels\n if (pinoLevel >= 60) return 'fatal';\n if (pinoLevel >= 50) return 'error';\n if (pinoLevel >= 40) return 'warn';\n if (pinoLevel >= 30) return 'log';\n if (pinoLevel >= 20) return 'debug';\n return 'verbose';\n}\n\n/**\n * Extract log level from text content\n */\nfunction extractLogLevel(text: string): 'fatal' | 'error' | 'warn' | 'log' | 'debug' | 'verbose' {\n const lower = text.toLowerCase();\n\n // Check for common error keywords\n if (lower.includes('fatal') || lower.includes('critical')) return 'fatal';\n if (lower.includes('error') || lower.includes('<e>') || lower.includes('✖')) return 'error';\n if (lower.includes('warn') || lower.includes('warning') || lower.includes('<w>') || lower.includes('⚠')) return 'warn';\n if (lower.includes('debug') || lower.includes('<d>')) return 'debug';\n if (lower.includes('verbose') || lower.includes('trace')) return 'verbose';\n\n // Default to log\n return 'log';\n}\n\n/**\n * Generate a simple UUID v4\n * Note: This is a simplified implementation for performance\n */\nfunction generateUUID(): string {\n return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {\n const r = (Math.random() * 16) | 0;\n const v = c === 'x' ? r : (r & 0x3) | 0x8;\n return v.toString(16);\n });\n}\n/**\n * Read all automation trigger list in trace.log\n * filter by request_body.trigger & limit & path=/__innerapi__/automation/invoke\n */\nexport async function readTriggerList(\n filePath: string,\n trigger: string,\n path: string,\n limit?: number,\n triggerID?: string,\n): Promise<RecentTraceCallsResponse | undefined> {\n if (!(await fileExists(filePath))) {\n return undefined;\n }\n\n const config = {\n maxEntriesPerTrace: 10,\n chunkSize: 64 * 1024,\n };\n\n const builders = new Map<string, TraceBuilder>();\n const completedCalls: TraceBuilder[] = [];\n\n const createTraceBuilder = (traceId: string): TraceBuilder => ({\n traceId,\n entries: [],\n method: undefined,\n path: undefined,\n startTime: undefined,\n endTime: undefined,\n statusCode: undefined,\n durationMs: undefined,\n hasCompleted: false,\n });\n\n const shouldIncludeInCompletedCalls = (builder: TraceBuilder): boolean => {\n // Check if already in completedCalls to avoid duplicates\n const alreadyAdded = completedCalls.some(call => call.traceId === builder.traceId);\n if (alreadyAdded) {\n return false;\n }\n\n // Check if this is an automation trigger request\n const isAutomationTrigger = builder.path?.endsWith(path);\n if (!isAutomationTrigger) {\n return false;\n }\n\n // Check trigger filter if provided\n if (trigger && builder.entries.length > 0) {\n // Look for request_body.trigger in the entries\n const requestEntry = builder.entries.find(e => e.request_body?.trigger);\n if (requestEntry?.request_body?.trigger) {\n return String(requestEntry.request_body.trigger) === trigger\n && (triggerID ? requestEntry?.request_body?.triggerID === triggerID : true);\n }\n return false;\n }\n\n return true;\n };\n\n const updateBuilderMetadata = (builder: TraceBuilder, entry: LogEntry): void => {\n if (entry.method && !builder.method) builder.method = String(entry.method);\n if (entry.path && !builder.path) builder.path = String(entry.path);\n\n builder.entries.push(entry);\n if (builder.entries.length > config.maxEntriesPerTrace) {\n builder.entries.shift();\n }\n\n // Check if should be included in completedCalls after each entry update\n // This is important because request_body may be logged after request completion\n if (shouldIncludeInCompletedCalls(builder)) {\n completedCalls.push(builder);\n // Apply limit if specified\n if (limit && completedCalls.length > limit) {\n completedCalls.pop();\n }\n }\n };\n\n const handleRequestCompleted = (builder: TraceBuilder, entry: LogEntry, message: string): void => {\n builder.hasCompleted = true;\n builder.endTime = entry.time;\n builder.statusCode = extractNumber(message, /status_code:\\s*(\\d+)/);\n builder.durationMs = extractNumber(message, /duration_ms:\\s*(\\d+)/);\n if (!builder.path && entry.path) {\n builder.path = String(entry.path);\n }\n\n // Check if should be included in completedCalls after request completion\n // This ensures we catch cases where all conditions are met at completion time\n if (shouldIncludeInCompletedCalls(builder)) {\n completedCalls.push(builder);\n // Apply limit if specified\n if (limit && completedCalls.length > limit) {\n completedCalls.pop();\n }\n }\n };\n\n const processLogEntry = (entry: LogEntry): void => {\n const { trace_id: traceId, message = '' } = entry;\n if (!traceId) return;\n\n let builder = builders.get(traceId);\n if (!builder) {\n builder = createTraceBuilder(traceId);\n builders.set(traceId, builder);\n }\n\n updateBuilderMetadata(builder, entry);\n\n if (!builder.hasCompleted && (message.includes('HTTP request completed') || message.includes('HTTP request failed'))) {\n handleRequestCompleted(builder, entry, message);\n }\n\n if (message.includes('HTTP request started') && !builder.startTime) {\n builder.startTime = entry.time;\n }\n };\n\n const processLine = (line: string): void => {\n const entry = parseLogLine(line);\n if (entry?.trace_id) {\n processLogEntry(entry);\n }\n };\n\n await readFileReverse(filePath, config.chunkSize, processLine);\n\n // Return as page 1 with all results (no pagination for trigger list)\n return {\n page: 1,\n pageSize: completedCalls.length,\n totalCalls: completedCalls.length,\n totalPages: 1,\n calls: completedCalls.map((builder) => ({\n traceId: builder.traceId,\n method: builder.method,\n path: builder.path,\n startTime: builder.startTime,\n endTime: builder.endTime,\n statusCode: builder.statusCode,\n durationMs: builder.durationMs,\n entries: builder.entries.slice().reverse(),\n })),\n };\n}\n\n/**\n * Read trigger detail entries by instance ID\n */\nexport async function readTriggerDetail(\n filePath: string,\n path: string,\n instanceID: string,\n): Promise<{ instanceID: string; entries: LogEntry[] } | undefined> {\n const exists = await fileExists(filePath);\n if (!exists) {\n return undefined;\n }\n\n const matches: LogEntry[] = [];\n const stream = createReadStream(filePath, { encoding: 'utf8' });\n const rl = createInterface({ input: stream, crlfDelay: Infinity });\n\n for await (const line of rl) {\n const entry = parseLogLine(line);\n if (!entry) continue;\n\n const isAutomationTrigger = entry.path?.endsWith(path);\n // Check if instanceID is in the message field\n const hasInstanceID = entry.instance_id === instanceID && entry.trigger;\n if (!isAutomationTrigger || !hasInstanceID) continue;\n\n matches.push(entry);\n }\n\n rl.close();\n stream.close();\n\n return {\n instanceID,\n entries: matches,\n };\n}\n","import http from 'node:http';\n\ninterface HealthCheckRouterOptions {\n /** 目标服务端口,默认 3000 */\n targetPort?: number;\n /** 目标服务主机,默认 localhost */\n targetHost?: string;\n /** 健康检查超时时间(毫秒),默认 2000ms */\n timeout?: number;\n}\n\n/**\n * 检查目标服务是否可用\n */\nfunction checkServiceHealth(\n host: string,\n port: number,\n timeout: number\n): Promise<{ available: boolean; responseTime?: number; error?: string }> {\n return new Promise((resolve) => {\n const startTime = Date.now();\n\n const req = http.request(\n {\n hostname: host,\n port,\n path: '/',\n method: 'HEAD',\n timeout,\n },\n (_res) => {\n const responseTime = Date.now() - startTime;\n // 任何响应都表示服务可用(包括错误状态码)\n resolve({\n available: true,\n responseTime,\n });\n }\n );\n\n req.on('timeout', () => {\n req.destroy();\n resolve({\n available: false,\n error: 'Request timeout',\n });\n });\n\n req.on('error', (err) => {\n resolve({\n available: false,\n error: err.message,\n });\n });\n\n req.end();\n });\n}\n\n/**\n * 创建健康检查路由\n */\nexport function createHealthCheckHandler(options: HealthCheckRouterOptions = {}) {\n const {\n targetPort = Number(process.env.SERVER_PORT) || 3000,\n targetHost = 'localhost',\n timeout = 2000,\n } = options;\n return async (_req, res) => {\n try {\n const result = await checkServiceHealth(targetHost, targetPort, timeout);\n\n if (result.available) {\n res.status(200).json({\n status: 'healthy',\n service: `${targetHost}:${targetPort}`,\n responseTime: result.responseTime,\n timestamp: new Date().toISOString(),\n });\n } else {\n res.status(503).json({\n status: 'unhealthy',\n service: `${targetHost}:${targetPort}`,\n error: result.error,\n timestamp: new Date().toISOString(),\n });\n }\n } catch (error) {\n res.status(500).json({\n status: 'error',\n service: `${targetHost}:${targetPort}`,\n error: error instanceof Error ? error.message : 'Unknown error',\n timestamp: new Date().toISOString(),\n });\n }\n };\n}\n","import type { RouteMiddleware, MiddlewareContext, RouteInfo } from '../types';\nimport { createDevLogRouter } from './router';\n\ninterface DevLogsMiddlewareOptions {\n /** Directory containing log files */\n logDir?: string;\n}\n\n/**\n * Routes provided by dev logs middleware\n */\nconst DEV_LOGS_ROUTES: RouteInfo[] = [\n {\n method: 'GET',\n path: '/app/trace/:traceId',\n description: 'Get log entries by trace ID',\n },\n {\n method: 'GET',\n path: '/trace/recent',\n description: 'Get recent trace calls with pagination and optional path/method filters',\n },\n {\n method: 'GET',\n path: '/files/:fileName',\n description: 'Get paginated log file content by file name',\n },\n {\n method: 'GET',\n path: '/server-logs',\n description: 'Get server logs in ServerLog format (compatible with frontend)',\n },\n {\n method: 'GET',\n path: '/trace/trigger/list',\n description: 'Get trigger list (automation trigger) in trace.log',\n },\n {\n method: 'GET',\n path: '/trace/trigger/:instanceID',\n description: 'Get trigger detail (automation trigger) in trace.log by instanceID',\n },\n];\n\n/**\n * Creates dev logs middleware for viewing application logs\n * Supports both rspack/webpack and Vite dev servers\n */\nexport function createDevLogsMiddleware(options: DevLogsMiddlewareOptions = {}): RouteMiddleware {\n const { logDir } = options;\n\n return {\n name: 'dev-logs',\n mountPath: '/dev/logs',\n routes: DEV_LOGS_ROUTES,\n\n enabled: (context: MiddlewareContext) => context.isDev,\n\n createRouter: (context: MiddlewareContext) => {\n return createDevLogRouter({ logDir: logDir || context.logDir });\n },\n };\n}\n","import express, { Router } from 'express';\nimport {\n collectLogsHandler,\n collectLogsBatchHandler,\n} from './controller';\nimport { resolveLogDir } from './utils';\n\ninterface RouterOptions {\n logDir?: string;\n fileName?: string;\n}\n\n/**\n * Create dev log router with all routes registered\n */\nexport function createDevLogRouter(options: RouterOptions = {}): Router {\n const logDir = resolveLogDir(options.logDir);\n const router = express.Router();\n\n // POST /collect - Collect logs from client\n // Use express.json() middleware only for this route\n router.post('/collect', express.json(), collectLogsHandler(logDir, options.fileName || 'client.log'));\n router.post('/collect-batch', express.json(), collectLogsBatchHandler(logDir, options.fileName || 'client.log'));\n\n return router;\n}\n","import { Request, Response } from 'express';\nimport { join } from 'path';\nimport fs from 'fs';\n\nimport { serializeError, ensureDir } from './utils';\n\ninterface CollectLogRequest {\n level: string;\n message: string;\n time: string; // ISO String\n source?: string; // Log Source string\n user_id: string; // User ID\n tenant_id: string; // Tenant ID\n app_id: string; // App ID\n}\n\nexport function collectLogsHandler(logDir: string, fileName: string) {\n const filePath = join(logDir, fileName);\n // Ensure log directory exists\n ensureDir(logDir);\n\n return async (req: Request, res: Response) => {\n try {\n const logContent = req.body as CollectLogRequest;\n if(!logContent.message){\n return res.status(400).json({ message: 'message is required' });\n }\n const logLine = JSON.stringify({\n ...logContent,\n server_time: new Date().toISOString(),\n }) + '\\n';\n await fs.promises.appendFile(filePath, logLine);\n res.json({\n success: true,\n });\n } catch (error) {\n handleError(res, error, 'Failed to collect logs');\n }\n };\n}\nexport function collectLogsBatchHandler(logDir: string, fileName: string) {\n const filePath = join(logDir, fileName);\n // Ensure log directory exists\n ensureDir(logDir);\n\n return async (req: Request, res: Response) => {\n try {\n const logContents = req.body as Array<CollectLogRequest>;\n if(!Array.isArray(logContents)){\n return res.status(400).json({ message: 'logContents must be an array' });\n }\n const logLines = [];\n for (const logContent of logContents) {\n logLines.push(JSON.stringify({\n ...logContent,\n server_time: new Date().toISOString(),\n }) + '\\n');\n }\n await fs.promises.appendFile(filePath, logLines.join(''));\n res.json({\n success: true,\n });\n } catch (error) {\n handleError(res, error, 'Failed to collect logs');\n }\n };\n}\n\nfunction handleError(res: Response, error: unknown, message = 'Failed to collect logs'): void {\n res.status(500).json({ message, error: serializeError(error) });\n}","import { isAbsolute, join } from 'node:path';\nimport fs from 'node:fs';\n\n/**\n * Resolve log directory path\n */\nexport function resolveLogDir(provided?: string): string {\n if (!provided) {\n return join(process.cwd(), 'logs');\n }\n return isAbsolute(provided) ? provided : join(process.cwd(), provided);\n}\n\n/**\n * Ensure directory exists, create if not\n */\nexport function ensureDir(dir: string): void {\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir, { recursive: true });\n }\n}\n\n/**\n * Serialize error for JSON response\n */\nexport function serializeError(error: unknown): { name?: string; message: string } {\n return error instanceof Error\n ? { name: error.name, message: error.message }\n : { message: String(error) };\n}\n","import type { RouteMiddleware, MiddlewareContext, RouteInfo } from '../types';\nimport { createDevLogRouter } from './router';\n\ninterface DevLogsMiddlewareOptions {\n logDir?: string; // log directory\n fileName?: string; // client.log by default\n}\n\n/**\n * Routes provided by dev logs middleware\n */\nconst DEV_LOGS_ROUTES: RouteInfo[] = [\n {\n method: 'POST',\n path: '/collect',\n description: 'Collect logs from client.',\n }\n];\n\n/**\n * Creates dev logs middleware for viewing application logs\n * Supports both rspack/webpack and Vite dev servers\n */\nexport function createCollectLogsMiddleware(options: DevLogsMiddlewareOptions = {}): RouteMiddleware {\n const { logDir } = options;\n\n return {\n name: 'collect-logs',\n mountPath: '/dev/logs',\n routes: DEV_LOGS_ROUTES,\n enabled: (context: MiddlewareContext) => context.isDev,\n createRouter: (context: MiddlewareContext) => {\n return createDevLogRouter({\n logDir: logDir || context.logDir,\n fileName: options.fileName || 'client.log',\n });\n },\n };\n}\n"],"mappings":";;;;AAKO,SAASA,kBAAkBC,UAAgB;AAChD,QAAMC,qBAAqBD,SAASE,WAAW,GAAA,IAC3CF,WACA,IAAIA,QAAAA;AACR,QAAMG,+BAA+BF,mBAAmBG,SAAS,GAAA,IAC7DH,mBAAmBI,MAAM,GAAG,EAAC,IAC7BJ;AACJ,SAAOE;AACT;AARgBJ;;;ACLhB,OAAOO,SAAQ;AACf,OAAOC,WAAU;;;ACDjB,IAAMC,iBAAiB;AAIhB,SAASC,oBAAoBC,QAAc;AAChD,MAAIC,OAAOD,OAAOE,WAAW,QAAA,IAAYF,OAAOG,MAAM,CAAA,IAAKH;AAE3D,SAAOC,KAAKC,WAAWE,cAAAA,GAAiB;AACtCH,WAAOA,KAAKE,MAAMC,eAAeC,MAAM;AACvCJ,WAAOK,qBAAqBL,IAAAA;EAC9B;AAEA,QAAMM,UAAUD,qBAAqBL,IAAAA;AACrC,MAAIM,QAAQF,WAAW,GAAG;AACxB,WAAO,GAAGD,cAAAA;;EACZ;AAEA,SAAO,GAAGA,cAAAA;EAAmBG,OAAAA;AAC/B;AAdgBR;AAgBT,SAASO,qBAAqBE,OAAa;AAChD,MAAIC,UAAUD;AACd,SAAOC,QAAQP,WAAW,MAAA,KAAWO,QAAQP,WAAW,IAAA,GAAO;AAC7DO,cAAUA,QAAQP,WAAW,MAAA,IAAUO,QAAQN,MAAM,CAAA,IAAKM,QAAQN,MAAM,CAAA;EAC1E;AACA,SAAOM;AACT;AANgBH;AAQT,SAASI,wBAAwBT,MAAY;AAClD,SAAOA,KAAKU,QAAQ,WAAW,MAAA;AACjC;AAFgBD;;;AC5BT,SAASE,2BAA2BC,QAAc;AACvD,SAAOA,OAAOC,QAAQ,gDAAgD,EAAA;AACxE;AAFgBF;AAIT,SAASG,8BAA8BF,QAAc;AAC1D,MAAIG,YAAY;AAChB,MAAIC,OAAOJ,OAAOC,QAAQ,6BAA6B,MAAA;AACrDE,iBAAa;AACb,WAAO;EACT,CAAA;AACAC,SAAOA,KAAKH,QAAQ,4BAA4B,MAAA;AAC9CE,iBAAa;AACb,WAAO;EACT,CAAA;AACAC,SAAOA,KAAKH,QAAQ,wCAAwC,MAAA;AAC1DE,iBAAa;AACb,WAAO;EACT,CAAA;AACAC,SAAOA,KAAKH,QAAQ,4BAA4B,MAAA;AAC9CE,iBAAa;AACb,WAAO;EACT,CAAA;AACAC,SAAOA,KAAKH,QAAQ,gCAAgC,MAAA;AAClDE,iBAAa;AACb,WAAO;EACT,CAAA;AACA,SAAO;IAAEC;IAAMD;EAAU;AAC3B;AAvBgBD;;;ACJhB,SAASG,cAAc;AAOhB,SAASC,uBAAuBC,QAAc;AACnD,QAAMC,eAAe;AACrB,QAAMC,UAAyB,CAAA;AAE/B,QAAMC,UAAUH,OAAOI,QAAQH,cAAc,CAACI,OAAOC,aAAqBC,SAAiBC,cAAAA;AACzF,UAAMC,YAAYC,mBAAmBF,SAAAA;AACrC,QAAIC,cAAcH,aAAa;AAC7B,aAAOD;IACT;AACAH,YAAQS,KAAK;MAAEC,MAAMN;MAAaO,IAAIJ;IAAU,CAAA;AAChD,UAAMK,cAAcT,MAAMU,QAAQ,GAAA;AAClC,UAAMC,SAASF,eAAe,IAAIT,MAAMY,MAAMH,WAAAA,IAAe,MAAMP,OAAAA,KAAYC,SAAAA;AAC/E,UAAMU,mBAAmBF,OAAOG,UAAS;AACzC,WAAO,gBAAgBV,SAAAA,IAAaS,gBAAAA;EACtC,CAAA;AAEA,SAAO;IAAEE,MAAMjB;IAASD;EAAQ;AAClC;AAjBgBH;AAmBT,SAASsB,gCAAgCrB,QAAgBE,SAAsB;AACpF,MAAIA,QAAQoB,WAAW,GAAG;AACxB,WAAOtB;EACT;AAEA,SAAOE,QAAQqB,OAAO,CAACC,KAAKC,WAAAA;AAC1B,QAAI,CAACA,OAAOb,QAAQa,OAAOb,SAASa,OAAOZ,IAAI;AAC7C,aAAOW;IACT;AACA,UAAME,UAAU,IAAIC,OAAO,MAAMC,aAAaH,OAAOb,IAAI,CAAA,aAAc,GAAA;AACvE,WAAOY,IAAIpB,QAAQsB,SAAS,GAAGD,OAAOZ,EAAE,IAAI;EAC9C,GAAGb,MAAAA;AACL;AAZgBqB;AAcT,SAASO,aAAaC,OAAa;AACxC,SAAOA,MAAMzB,QAAQ,uBAAuB,MAAA,EAAQA,QAAQ,OAAO,KAAA;AACrE;AAFgBwB;AAQhB,SAASE,YAAYC,KAAW;AAE9B,QAAMC,QAAQD,IAAIE,MAAM,UAAA,EAAYC,OAAOC,OAAAA;AAE3C,MAAIH,MAAMV,WAAW,GAAG;AACtB,WAAO;EACT;AAGA,SAAOU,MACJI,IAAI,CAACC,MAAMC,UAAAA;AACV,QAAIA,UAAU,GAAG;AACf,aAAOD,KAAKE,YAAW;IACzB;AACA,WAAOF,KAAKG,OAAO,CAAA,EAAGC,YAAW,IAAKJ,KAAKpB,MAAM,CAAA,EAAGsB,YAAW;EACjE,CAAA,EACCG,KAAK,EAAA;AACV;AAjBSZ;AAmBF,SAASpB,mBAAmBiC,MAAY;AAC7C,QAAMC,YAAYC,YAAYF,IAAAA;AAE9B,MAAIlC,YAAYmC,UAAUxC,QAAQ,kBAAkB,GAAA;AAEpDK,cAAYA,UAAUL,QAAQ,OAAO,GAAA;AAErCK,cAAYA,UAAUL,QAAQ,UAAU,EAAA;AAGxCK,cAAYqB,YAAYrB,SAAAA;AAExB,MAAI,CAACA,WAAW;AACdA,gBAAY;EACd;AACA,MAAI,CAAC,aAAaqC,KAAKrC,SAAAA,GAAY;AACjCA,gBAAY,IAAIA,SAAAA;EAClB;AACA,SAAOA;AACT;AAnBgBC;AAqBT,SAASmC,YAAYF,MAAY;AACtC,MAAI,CAAC,eAAeG,KAAKH,IAAAA,GAAO;AAC9B,WAAOA;EACT;AAEA,MAAI;AACF,UAAMI,iBAAiBC,OAAOL,MAAM;MAAEM,UAAU;MAAQC,MAAM;IAAQ,CAAA,EAAGR,KAAK,GAAA;AAC9E,WAAOK,kBAAkBJ;EAC3B,SAASQ,OAAO;AACd,WAAOR;EACT;AACF;AAXgBE;;;ACvFhB,IAAMO,sBAAsB;AAUrB,SAASC,sBAAsBC,QAAc;AAClD,QAAMC,QAAQD,OAAOE,MAAM,IAAA;AAC3B,QAAMC,SAAmB,CAAA;AACzB,MAAIC,WAAW;AACf,QAAMC,YAAsB,CAAA;AAE5B,WAASC,IAAI,GAAGA,IAAIL,MAAMM,QAAQD,KAAK,GAAG;AACxC,UAAME,OAAOP,MAAMK,CAAAA;AAEnB,UAAMG,QAAQD,KAAKC,MAAMC,mBAAAA;AACzB,QAAID,OAAO;AACT,YAAME,WAAWF,MAAM,CAAA;AACvB,YAAMG,UAAUD,aAAa,iBAAiB,gBAAgB;AAE9D,YAAME,eAAeC,wBAAwBb,MAAMK,IAAI,CAAA,GAAIM,OAAAA;AAC3D,UAAIC,cAAc;AAEhBV,eAAOY,KAAKF,YAAAA;AACZT,oBAAY;AACZE,aAAK;MACP,OAAO;AAELD,kBAAUU,KAAKP,KAAKQ,KAAI,CAAA;AACxBb,eAAOY,KAAKP,IAAAA;MACd;AACA;IACF;AAEA,QAAIA,KAAKS,SAAS,UAAA,GAAa;AAC7BZ,gBAAUU,KAAKP,KAAKQ,KAAI,CAAA;IAC1B;AAEAb,WAAOY,KAAKP,IAAAA;EACd;AAEA,SAAO;IACLU,MAAMf,OAAOgB,KAAK,IAAA;IAClBf;IACAC;EACF;AACF;AAxCgBN;AA0CT,SAASe,wBAAwBM,UAA8BR,SAAyC;AAC7G,MAAI,CAACQ,YAAY,CAACA,SAASH,SAAS,UAAA,GAAa;AAC/C,WAAOI;EACT;AAEA,SAAOD,SAASE,QAAQ,YAAY,GAAGV,OAAAA,GAAU;AACnD;AANgBE;;;ACrDhB,OAAOS,QAAQ;AACf,OAAOC,UAAU;AAGV,SAASC,aAAaC,QAAc;AACzC,QAAMC,cAAc;AACpB,QAAMC,QAAQF,OAAOE,MAAMD,WAAAA;AAC3B,MAAI,CAACC,OAAO;AACV,WAAOF;EACT;AAEA,QAAMG,cAAcD,MAAM,CAAA,EACvBE,MAAM,GAAA,EACNC,IAAI,CAACC,OAAOA,GAAGC,KAAI,CAAA,EACnBC,OAAOC,OAAAA,EACPD,OAAO,CAACF,OAAOA,OAAO,cAAcA,OAAO,YAAA;AAI9C,QAAMI,sBAAsBP,YAAYK,OAAO,CAACF,OAAAA;AAC9C,QAAIA,OAAO,aAAa;AAEtB,YAAMK,sBAAsB;AAC5B,aAAOA,oBAAoBC,KAAKZ,MAAAA;IAClC;AACA,WAAO;EACT,CAAA;AAEA,MAAIA,OAAOa,SAAS,UAAA,KAAe,CAACH,oBAAoBG,SAAS,SAAA,GAAY;AAC3EH,wBAAoBI,KAAK,SAAA;EAC3B;AACA,MAAId,OAAOa,SAAS,SAAA,KAAc,CAACH,oBAAoBG,SAAS,QAAA,GAAW;AACzEH,wBAAoBI,KAAK,QAAA;EAC3B;AACA,MAAId,OAAOa,SAAS,qBAAA,KAA0B,CAACH,oBAAoBG,SAAS,oBAAA,GAAuB;AACjGH,wBAAoBI,KAAK,oBAAA;EAC3B;AACA,MAAId,OAAOa,SAAS,SAAA,KAAc,CAACH,oBAAoBG,SAAS,QAAA,GAAW;AACzEH,wBAAoBI,KAAK,QAAA;EAC3B;AACA,MAAId,OAAOa,SAAS,aAAA,KAAkB,CAACH,oBAAoBG,SAAS,YAAA,GAAe;AACjFH,wBAAoBI,KAAK,YAAA;EAC3B;AAEA,QAAMC,SAASC,MAAMC,KAAK,IAAIC,IAAIR,mBAAAA,CAAAA;AAClC,QAAMS,cAAc,YAAYJ,OAAOK,KAAK,IAAA,CAAA;AAC5C,SAAOpB,OAAOqB,QAAQpB,aAAakB,WAAAA;AACrC;AA3CgBpB;AAwET,SAASuB,kBAAkBC,QAAc;AAE9C,MAAIC,OAAOD,OAAOE,QAAQ,iDAAiD,EAAA;AAG3E,QAAMC,gBAAgB;;IAEpBC,KAAKC,QAAQC,WAAW,YAAY,UAAA;;IAEpCF,KAAKC,QAAQC,WAAW,eAAe,UAAA;;IAEvCF,KAAKC,QAAQC,WAAW,kBAAkB,UAAA;IAC1CF,KAAKC,QAAQC,WAAW,qBAAqB,UAAA;;AAG/C,MAAIC;AACJ,aAAWC,gBAAgBL,eAAe;AACxC,QAAIM,GAAGC,WAAWF,YAAAA,GAAe;AAC/BD,qBAAeC;AACf;IACF;EACF;AAEA,MAAI,CAACD,cAAc;AACjBI,YAAQC,KAAK,4EAA4ET,aAAAA;AACzF,WAAOF;EACT;AAEA,SAAOY,mBAAmBZ,MAAMM,YAAAA;AAClC;AA7BgBR;AA+BT,SAASc,mBAAmBZ,MAAcM,cAAoB;AACnE,QAAMO,kBAAkBL,GAAGM,aAAaR,cAAc,MAAA;AAGtD,QAAMS,kBAAkBF,gBACrBZ,QAAQ,yBAAyB,EAAA,EACjCe,KAAI;AAGP,QAAMC,WAAWF,gBAAgBG,SAAS,MAAA,KAAW,CAAClB,KAAKkB,SAAS,oBAAA,KAAyB,CAAClB,KAAKkB,SAAS,oBAAA;AAC5G,QAAMC,kBAAkBJ,gBAAgBG,SAAS,aAAA,KAAkB,CAAClB,KAAKkB,SAAS,YAAA;AAGlF,MAAIC,iBAAiB;AACnBnB,WAAOoB,uBAAuBpB,MAAM,uBAAuB,YAAA;EAC7D;AAGA,MAAIiB,YAAY,CAACjB,KAAKkB,SAAS,oBAAA,KAAyB,CAAClB,KAAKkB,SAAS,oBAAA,GAAuB;AAC5F,UAAMG,cAAcrB,KAAKsB,MAAM,wDAAA;AAC/B,QAAID,aAAa;AACf,YAAME,cAAcvB,KAAKwB,QAAQH,YAAY,CAAA,CAAE,IAAIA,YAAY,CAAA,EAAGI;AAClEzB,aAAOA,KAAK0B,MAAM,GAAGH,WAAAA,IAAe,yCAAyCvB,KAAK0B,MAAMH,WAAAA;IAC1F;EACF;AAGA,QAAMI,eAAe,GAAGC,cAAAA;;AACxB,MAAIC,iBAAiB;AAErB,MAAI7B,KAAK8B,WAAWH,YAAAA,GAAe;AACjCE,qBAAiBF,aAAaF;EAChC;AAEA,QAAMM,qBAAqB/B,KAAK0B,MAAMG,cAAAA,EAAgBP,MAAM,uBAAA;AAC5D,MAAIS,oBAAoB;AACtBF,sBAAkBE,mBAAmB,CAAA,EAAGN;EAC1C;AAGA,QAAMO,YAAY;EAAKjB,eAAAA;;;AACvB,SAAOf,KAAK0B,MAAM,GAAGG,cAAAA,IAAkBG,YAAYhC,KAAK0B,MAAMG,cAAAA;AAChE;AA1CgBjB;AA4CT,SAASQ,uBAAuBrB,QAAgBkC,aAAqBC,YAAkB;AAC5F,QAAMC,iBAAiBF,YAAYhC,QAAQ,OAAO,KAAA;AAClD,QAAMmC,cAAc,IAAIC,OAAO,iCAAiCF,cAAAA,QAAsB;AACtF,QAAMb,QAAQvB,OAAOuB,MAAMc,WAAAA;AAE3B,MAAI,CAACd,OAAO;AAEV,WAAOvB;EACT;AAEA,QAAMuC,cAAchB,MAAM,CAAA,EACvBiB,MAAM,GAAA,EACNC,IAAI,CAACC,OAAOA,GAAGzB,KAAI,CAAA,EACnB0B,OAAOC,OAAAA;AAEV,MAAIL,YAAYpB,SAASgB,UAAAA,GAAa;AACpC,WAAOnC;EACT;AAEAuC,cAAYM,KAAKV,UAAAA;AACjB,QAAMW,SAASC,MAAMC,KAAK,IAAIC,IAAIV,WAAAA,CAAAA;AAClC,QAAMW,cAAc,YAAYJ,OAAOK,KAAK,IAAA,CAAA,YAAiBjB,WAAAA;AAC7D,SAAOlC,OAAOE,QAAQmC,aAAaa,WAAAA;AACrC;AAvBgB7B;;;AClJT,SAAS+B,uBAAuBC,QAAc;AACnD,QAAMC,aAAqC;IACzC,eAAe;IACf,eAAe;IACf,eAAe;IACf,eAAe;EACjB;AAEA,QAAMC,QAAQF,OAAOG,MAAM,IAAA;AAE3B,WAASC,IAAI,GAAGA,IAAIF,MAAMG,QAAQD,KAAK,GAAG;AACxC,UAAME,OAAOJ,MAAME,CAAAA;AACnB,UAAMG,QAAQC,OAAOC,QAAQR,UAAAA,EAAYS,KAAK,CAAC,CAACC,GAAAA,MAASL,KAAKM,SAAS,IAAID,GAAAA,GAAM,CAAA;AACjF,QAAI,CAACJ,OAAO;AACV;IACF;AAEA,UAAM,CAAA,EAAGM,WAAAA,IAAeN;AACxB,UAAMO,eAAeZ,MAAME,IAAI,CAAA,GAAIW,KAAAA,KAAU;AAC7C,QAAID,aAAaE,WAAW,IAAA,KAASF,aAAaF,SAAS,cAAA,GAAiB;AAC1E;IACF;AAEA,UAAMK,cAAcX,KAAKY,MAAM,MAAA;AAC/B,UAAMC,SAASF,cAAcA,YAAY,CAAA,IAAK;AAC9C,UAAMG,UAAU,GAAGD,MAAAA,oBAA0BN,WAAAA;AAC7CX,UAAMmB,OAAOjB,GAAG,GAAGgB,OAAAA;AACnBhB,SAAK;EACP;AAEA,SAAOF,MAAMoB,KAAK,IAAA;AACpB;AA/BgBvB;AAsCT,SAASwB,8BAA8BvB,QAAc;AAC1D,QAAMwB,iBAAyC;IAC7C,eAAe;IACf,eAAe;IACf,eAAe;IACf,eAAe;EACjB;AAEA,QAAMtB,QAAQF,OAAOG,MAAM,IAAA;AAC3B,QAAMsB,SAA+B,CAAA;AAGrC,MAAIC,UAAU;AACd,MAAIC,iBAAiB;AACrB,QAAMC,sBAAsB,oBAAIC,IAAAA;AAChC,MAAIC,eAAe;AAEnB,WAAS1B,IAAI,GAAGA,IAAIF,MAAMG,QAAQD,KAAK,GAAG;AACxC,UAAME,OAAOJ,MAAME,CAAAA;AAGnB,QAAI,CAACsB,WAAW,+CAA+CK,KAAKzB,IAAAA,GAAO;AACzEoB,gBAAU;AACVC,uBAAiBF,OAAOpB;AACxBuB,0BAAoBI,MAAK;AACzBF,qBAAe;IACjB;AAEA,QAAIJ,SAAS;AAEX,iBAAWO,QAAQ3B,MAAM;AACvB,YAAI2B,SAAS,IAAKH;AAClB,YAAIG,SAAS,IAAKH;MACpB;AAGA,iBAAWI,iBAAiB1B,OAAO2B,OAAOX,cAAAA,GAAiB;AACzD,YAAIlB,KAAKM,SAAS,IAAIsB,aAAAA,GAAgB,KAAK5B,KAAKM,SAAS,IAAIsB,aAAAA,GAAgB,GAAG;AAC9EN,8BAAoBQ,IAAIF,aAAAA;QAC1B;MACF;AAGA,UAAIJ,iBAAiB,KAAKxB,KAAKM,SAAS,IAAA,GAAO;AAC7Cc,kBAAU;AAGV,cAAMW,eAAeZ,OAAOpB;AAC5B,iBAASiC,IAAIX,gBAAgBW,KAAKD,cAAcC,KAAK;AACnD,gBAAMC,YAAYd,OAAOa,CAAAA,KAAM;AAC/B,cAAIE,eAAe;AAEnB,qBAAW,CAACC,aAAaP,aAAAA,KAAkB1B,OAAOC,QAAQe,cAAAA,GAAiB;AACzE,gBAAII,oBAAoBc,IAAIR,aAAAA,GAAgB;AAC1C,kBAAIK,UAAU3B,SAAS,IAAI6B,WAAAA,GAAc,KAAKF,UAAU3B,SAAS,IAAI6B,WAAAA,GAAc,GAAG;AACpFD,+BAAe;AAEf,oBAAIF,IAAI,KAAKb,OAAOa,IAAI,CAAA,GAAI1B,SAAS,kBAAA,GAAqB;AACxDa,yBAAOa,IAAI,CAAA,IAAK;gBAClB;AACA;cACF;YACF;UACF;AAEA,cAAIE,cAAc;AAChBf,mBAAOa,CAAAA,IAAK;UACd;QACF;MACF;IACF;AAEAb,WAAOkB,KAAKrC,IAAAA;EACd;AAGA,SAAOmB,OAAOmB,OAAOtC,CAAAA,SAAQA,SAAS,IAAA,EAAMgB,KAAK,IAAA;AACnD;AA7EgBC;;;AC9BT,SAASsB,uBAAuBC,QAAc;AACnD,MAAIC,QAAQ;AAGZ,QAAMC,OAAOF,OAAOG,QAAQ,mBAAmB,MAAA;AAC7CF,aAAS;AACT,WAAO;EACT,CAAA;AAEA,SAAO;IAAEC;IAAMD;EAAM;AACvB;AAVgBF;;;ACAT,SAASK,gCAAgCC,QAAc;AAC5D,MAAIC,WAAW;AAGf,QAAMC,UAAU;AAEhB,QAAMC,OAAOH,OAAOI,QAAQF,SAAS,CAACG,OAAOC,OAAOC,WAAWC,YAAAA;AAE7D,UAAMC,kBAAkB,uBAAuBC,KAAKF,OAAAA;AACpD,UAAMG,gBAAgB,yBAAyBD,KAAKF,OAAAA;AAEpD,QAAIC,mBAAmBE,eAAe;AACpCV,kBAAY;AACZ,aAAO,qBAAqBK,KAAAA,GAAQC,SAAAA,GAAYD,KAAAA;IAClD;AAGA,WAAOD;EACT,CAAA;AAEA,SAAO;IAAEF;IAAMF;EAAS;AAC1B;AArBgBF;AA8BT,SAASa,yBAAyBZ,QAAc;AACrD,MAAIC,WAAW;AAGf,QAAMC,UAAU;AAEhB,QAAMC,OAAOH,OAAOI,QAAQF,SAAS,MAAA;AACnCD,gBAAY;AACZ,WAAO;EACT,CAAA;AAEA,SAAO;IAAEE;IAAMF;EAAS;AAC1B;AAZgBW;;;AC3ChB,IAAMC,qBAAqB;AAEpB,SAASC,mBAAmBC,QAAc;AAC/C,QAAMC,cAAcD,OAAOE,QAAQ;EAAKJ,kBAAAA,EAAoB;AAC5D,QAAMK,OAAOF,gBAAgB,KAAKD,SAASA,OAAOI,MAAM,GAAGH,WAAAA;AAE3D,QAAMI,cAAc;AACpB,QAAMC,eAAe,oBAAIC,IAAAA;AAEzB,aAAWC,SAASL,KAAKM,SAASJ,WAAAA,GAAc;AAC9C,UAAMK,OAAOF,MAAM,CAAA;AACnBF,iBAAaK,IAAID,IAAAA;EACnB;AAEA,MAAIJ,aAAaM,SAAS,GAAG;AAE3B,WAAOT;EAET;AAEA,QAAMU,aAAaC,MAAMC,KAAKT,YAAAA,EAC3BU,KAAI,EACJC,IAAI,CAACP,SAAS,gBAAgBA,IAAAA,WAAeA,IAAAA,GAAO,EACpDQ,KAAK,IAAA;AAER,QAAMC,SAAShB,KAAKiB,QAAO;AAC3B,SAAO,GAAGD,MAAAA;;EAAarB,kBAAAA;EAAuBe,UAAAA;;AAChD;AAzBgBd;;;AT+ET,SAASsB,yBAAyBC,YAAkB;AACzD,QAAMC,eAAeC,MAAKC,QAAQH,UAAAA;AAClC,MAAI,CAACI,IAAGC,WAAWJ,YAAAA,GAAe;AAChCK,YAAQC,KAAK,gDAAgDN,YAAAA,EAAc;AAC3E,WAAOO;EACT;AAEA,MAAIC,OAAOL,IAAGM,aAAaT,cAAc,MAAA;AAGzCQ,SAAOE,oBAAoBF,IAAAA;AAG3B,QAAMG,cAAcC,uBAAuBJ,IAAAA;AAC3CA,SAAOG,YAAYH;AAEnBA,SAAOK,2BAA2BL,IAAAA;AAClC,QAAMM,kBAAkBC,8BAA8BP,IAAAA;AACtDA,SAAOM,gBAAgBN;AACvB,QAAMQ,eAAeC,uBAAuBT,IAAAA;AAC5CA,SAAOQ,aAAaR;AACpBA,SAAOU,gCAAgCV,MAAMQ,aAAaG,OAAO;AAEjE,QAAMC,cAAcC,sBAAsBb,IAAAA;AAC1CA,SAAOY,YAAYZ;AAGnB,QAAMc,uBAAuBC,gCAAgCf,IAAAA;AAC7DA,SAAOc,qBAAqBd;AAG5B,QAAMgB,wBAAwBC,yBAAyBjB,IAAAA;AACvDA,SAAOgB,sBAAsBhB;AAG7BA,SAAOkB,8BAA8BlB,IAAAA;AAErCA,SAAOmB,uBAAuBnB,IAAAA;AAE9BA,SAAOoB,aAAapB,IAAAA;AAEpBA,SAAOqB,kBAAkBrB,IAAAA;AAEzBA,SAAOsB,mBAAmBtB,IAAAA;AAE1BA,SAAOA,KAAKuB,QAAQ,UAAU,IAAA;AAC9BvB,SAAOwB,wBAAwBxB,IAAAA;AAE/BL,EAAAA,IAAG8B,cAAcjC,cAAcQ,MAAM,MAAA;AAErC,MAAIG,YAAYuB,QAAQ,GAAG;AACzB7B,YAAQ8B,KAAK,wCAAwCxB,YAAYuB,KAAK,oDAAoD;EAC5H;AACA,MAAId,YAAYgB,WAAW,GAAG;AAC5B/B,YAAQ8B,KAAK,yCAAyCf,YAAYgB,QAAQ,kBAAkB;EAC9F;AACA,MAAIhB,YAAYiB,UAAUC,SAAS,GAAG;AACpCjC,YAAQC,KAAK,wDAAwDc,YAAYiB,UAAUC,MAAM;AACjGlB,gBAAYiB,UAAUE,QAAQ,CAACC,SAASnC,QAAQC,KAAK,KAAKkC,IAAAA,EAAM,CAAA;EAClE;AACA,MAAI1B,gBAAgB2B,YAAY,GAAG;AACjCpC,YAAQ8B,KAAK,0CAA0CrB,gBAAgB2B,SAAS,sCAAsC;EACxH;AACA,MAAInB,qBAAqBc,WAAW,GAAG;AACrC/B,YAAQ8B,KAAK,yCAAyCb,qBAAqBc,QAAQ,0CAA0C;EAC/H;AACA,MAAIZ,sBAAsBY,WAAW,GAAG;AACtC/B,YAAQ8B,KAAK,yCAAyCX,sBAAsBY,QAAQ,wDAAwD;EAC9I;AAEA,SAAO;IACLM,iBAAiBtB,YAAYgB;IAC7BO,kBAAkBvB,YAAYiB;IAC9BO,gBAAgBjC,YAAYuB;IAC5BW,oBAAoBvB,qBAAqBc;IACzCU,oBAAoBtB,sBAAsBY;EAC5C;AACF;AA7EgBtC;;;AUjFhB,SAASiD,iBAAiB;;;ACGnB,SAASC,mBAAmBC,OAAgB;AACjD,QAAMC,UAAkC;;IAEtCC,MAAM;IACNC,SAAS;IACTC,MAAM;;IAGNC,UAAU;IACVC,SAAS;IACTC,KAAK;IACLC,QAAQ;IACRC,QAAQ;IACRC,aAAa;IACbC,WAAW;;IAGXC,SAAS;IACTC,SAAS;IACTC,MAAM;IACNC,iBAAiB;;IAGjBC,SAAS;;IAGTC,WAAW;IACXC,aAAa;IACbC,MAAM;IACNC,MAAM;IACNC,QAAQ;IACRC,UAAU;;IAGVC,MAAM;;IAGNC,MAAM;IACNC,OAAO;;IAGPC,OAAO;;IAGPC,MAAM;IACNC,MAAM;IACNC,SAAS;IACTC,UAAU;;IAGVC,OAAO;IACPC,MAAM;IACNC,MAAM;IACNC,KAAK;IACLC,MAAM;IACNC,SAAS;IACTC,QAAQ;;IAGRC,OAAO;;IAGPC,YAAY;IACZC,mBAAmB;IACnBC,aAAa;IACbC,gBAAgB;;IAGhBC,QAAQ;EACV;AAEA,MAAIC,WAAW3C,QAAQD,MAAM6C,IAAI,KAAK;AAGtC,MAAI7C,MAAM8C,SAAS;AACjBF,eAAWA,SAASG,SAAS,IAAA,IAAQH,WAAW,GAAGA,QAAAA;EACrD;AAGA,MAAI5C,MAAMgD,cAAchD,MAAMgD,WAAWC,SAAS,GAAG;AACnDL,eAAW5C,MAAMgD,WAAWE,IAAI,CAACC,MAAM,IAAIA,CAAAA,GAAI,EAAEC,KAAK,KAAA;EACxD;AAEA,SAAOR;AACT;AApFgB7C;AAuFT,SAASsD,aAAaC,KAAW;AACtC,SAAOA,IACJC,QAAQ,mBAAmB,OAAA,EAC3BC,MAAM,QAAA,EACNN,IAAI,CAACO,SAASA,KAAKC,OAAO,CAAA,EAAGC,YAAW,IAAKF,KAAKG,MAAM,CAAA,CAAA,EACxDR,KAAK,EAAA;AACV;AANgBC;AAQT,SAASQ,YAAYP,KAAW;AACrC,SAAOA,IACJC,QAAQ,mBAAmB,OAAA,EAC3BO,YAAW,EACXP,QAAQ,UAAU,GAAA;AACvB;AALgBM;AAOT,SAASE,YAAYT,KAAW;AACrC,SAAOA,IACJC,QAAQ,mBAAmB,OAAA,EAC3BO,YAAW,EACXP,QAAQ,UAAU,GAAA;AACvB;AALgBQ;;;ADrGT,SAASC,YAAYC,OAAgB;AAC1C,QAAMC,YAAYC,aAAaF,MAAMG,YAAY;AAEjD,MAAIC,MAAM;;;;;;;AAOVA,SAAO,sBAAsBH,SAAAA;;AAE7B,aAAWI,SAASL,MAAMM,QAAQ;AAEhC,QACED,MAAME,gBAAgBF,MAAMG,SAAS,QACrCH,MAAMG,KAAKC,WAAW,GAAA,KACtBJ,MAAMG,KAAKC,WAAW,SAAA,KACtBJ,MAAMG,KAAKC,WAAW,SAAA,GACtB;AACA;IACF;AAMA,UAAMC,SAASC,mBAAmBN,KAAAA;AAClC,UAAMO,WAAWP,MAAMQ,YAAYR,MAAMS,aAAa,MAAM;AAG5D,UAAMC,aAAaC,6BAA6BX,KAAAA;AAChD,QAAIU,YAAY;AACdX,aAAOW;IACT;AAEAX,WAAO,KAAKC,MAAMG,IAAI,GAAGI,QAAAA,KAAaF,MAAAA;;;EACxC;AAEAN,SAAO;AAGPA,SAAO,sBAAsBH,SAAAA;;AAE7B,aAAWI,SAASL,MAAMM,QAAQ;AAEhC,QACED,MAAMG,KAAKC,WAAW,GAAA,KACtBJ,MAAMG,KAAKC,WAAW,SAAA,KACtBJ,MAAMG,KAAKC,WAAW,SAAA,KACtBJ,MAAME,gBAAgBF,MAAMG,SAAS,MACrC;AACA;IACF;AAOA,UAAME,SAASC,mBAAmBN,KAAAA;AAElC,UAAMU,aAAaC,6BAA6BX,OAAO;MACrDY,UAAU;IACZ,CAAA;AACA,QAAIF,YAAY;AACdX,aAAOW;IACT;AAEAX,WAAO,KAAKC,MAAMG,IAAI,MAAME,MAAAA;;;EAC9B;AAEAN,SAAO;AAGPA,SAAO,gBAAgBH,SAAAA;;AAEvB,aAAWI,SAASL,MAAMM,QAAQ;AAKhC,UAAMI,SAASC,mBAAmBN,KAAAA;AAClC,UAAMO,WAAWP,MAAMQ,WAAW,MAAM;AAExC,UAAME,aAAaC,6BAA6BX,OAAO;MACrDa,YAAY;IACd,CAAA;AACA,QAAIH,YAAY;AACdX,aAAOW;IACT;AAEAX,WAAO,KAAKC,MAAMG,IAAI,GAAGI,QAAAA,KAAaF,MAAAA;;;EACxC;AAEAN,SAAO;AAEP,SAAOA;AACT;AAlGgBL;AAqGT,SAASiB,6BAA6BX,OAAkB,EAC7DY,WAAW,OACXC,aAAa,MAAK,IAC8B,CAAC,GAAC;AAClD,MAAIH,aAAa;AAEjB,MAAIV,MAAMQ,YAAa,CAACK,cAAcb,MAAMS,cAAeG,UAAU;AACnEF,kBAAc,0CAA0CV,MAAMc,WAAWd,MAAMG,IAAI;;AACnF,QAAIU,YAAY;AACd,aAAOH;IACT;AACAA,kBAAc;EAChB,OAAO;AACLA,kBAAc,kCAAkCV,MAAMc,WAAWd,MAAMG,IAAI;;AAC3E,QAAIU,YAAY;AACd,aAAOH;IACT;AACAA,kBAAc;EAChB;AAGA,UAAQV,MAAMe,MAAI;IAChB,KAAK;IACL,KAAK;IACL,KAAK;AACHL,oBAAc;AACd,UAAIV,MAAMgB,QAAQ;AAChBN,sBAAc,gBAAgBV,MAAMgB,MAAM;;MAC5C;AACA;IAEF,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;AACHN,oBAAc;AACd;IAEF,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;AACHA,oBAAc;AACd;IAEF,KAAK;AACHA,oBAAc;AACd;IAEF,KAAK;AACHA,oBAAc;AACd;IAEF,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;AACHA,oBAAc;AACd;IAEF,KAAK;IACL,KAAK;AACHA,oBAAc;AACd;EAMJ;AAEA,MAAIV,MAAMiB,SAAS;AACjBP,kBAAc;EAChB;AAUA,SAAOA;AACT;AApFgBC;AAuFT,SAASO,mBAAmBvB,OAAgB;AACjD,QAAMC,YAAYC,aAAaF,MAAMG,YAAY;AACjD,QAAMqB,YAAYC,YAAYC,UAAU1B,MAAMG,YAAY,CAAA;AAC1D,QAAMwB,WAAWC,YAAY5B,MAAMG,YAAY;AAG/C,QAAM0B,UAAU7B,MAAMM,OAAOwB,KAAK,CAACC,MAAMA,EAAExB,YAAY;AACvD,QAAMyB,SAASH,UAAUlB,mBAAmBkB,OAAAA,IAAW;AACvD,QAAMI,SAASJ,UAAUA,QAAQrB,OAAO;AAExC,QAAM0B,aAAa;;;;;;;;;;;;;;;;;;;UAmBXjC,SAAAA;UACAA,SAAAA;IACNA,SAAAA;iBACa0B,QAAAA;WACN1B,SAAAA,qBAA8B0B,QAAAA;;YAE7BzB,aAAaF,MAAMG,YAAY,CAAA;mBACxBqB,SAAAA;eACJvB,SAAAA;iCACkBD,MAAMG,YAAY,YAAYF,SAAAA;;;;;;;;;YASnDA,SAAAA;;;+BAGmBA,SAAAA;eAChBA,SAAAA;kBACGD,MAAMG,YAAY;;;;;;;;;YASxBF,SAAAA;;WAEDgC,MAAAA;;cAEGA,MAAAA,MAAYA,MAAAA,KAAWD,MAAAA;eACtB/B,SAAAA;kBACGD,MAAMG,YAAY,mBAAmB8B,MAAAA;;;;;;;;;YAS3ChC,SAAAA;;WAEDgC,MAAAA;;cAEGA,MAAAA,MAAYA,MAAAA,KAAWD,MAAAA;+BACN/B,SAAAA;eAChBA,SAAAA;kBACGD,MAAMG,YAAY,kBAAkB8B,MAAAA;;;;;;;;;;cAUxCA,MAAAA;;cAEAA,MAAAA,MAAYA,MAAAA,KAAWD,MAAAA;;kBAEnBhC,MAAMG,YAAY,kBAAkB8B,MAAAA;;;;AAKpD,SAAOC;AACT;AAvGgBX;AA0GT,SAASY,gBAAgBnC,OAAgB;AAC9C,QAAMC,YAAYC,aAAaF,MAAMG,YAAY;AACjD,QAAMwB,WAAWC,YAAY5B,MAAMG,YAAY;AAE/C,QAAM0B,UAAU7B,MAAMM,OAAOwB,KAAK,CAACC,MAAMA,EAAExB,YAAY;AACvD,QAAMyB,SAASH,UAAUlB,mBAAmBkB,OAAAA,IAAW;AACvD,QAAMI,SAASJ,UAAUA,QAAQrB,OAAO;AAExC,QAAM4B,UAAU;;;;;WAKPpC,MAAMG,YAAY;;UAEnBF,SAAAA;UACAA,SAAAA;IACNA,SAAAA;iBACa0B,QAAAA;;;eAGF1B,SAAAA;yCAC0BA,SAAAA;;;;kCAIPA,SAAAA,iBAA0BA,SAAAA;;gBAE5CD,MAAMG,YAAY;;;;gCAIFF,SAAAA,SAAkBgC,MAAAA,cAAoBA,MAAAA;;;;;wEAKEhC,SAAAA;;;;;;cAM1DD,MAAMG,YAAY;;;;;kBAKd8B,MAAAA,KAAWD,MAAAA,cAAoB/B,SAAAA;;;cAGnCD,MAAMG,YAAY;kBACdH,MAAMG,YAAY,IAAI8B,MAAAA,KAAWA,MAAAA;;;;sCAIbhC,SAAAA,SAAkBgC,MAAAA,OAAaA,MAAAA;;;;;;iBAMpDA,MAAAA,KAAWD,MAAAA,sBAA4B/B,SAAAA,iBAA0BA,SAAAA;;gBAElED,MAAMG,YAAY;;kBAEhBH,MAAMG,YAAY,IAAI8B,MAAAA,KAAWA,MAAAA;;;;sCAIbhC,SAAAA,SAAkBgC,MAAAA,OAAaA,MAAAA;;;;;;iBAMpDA,MAAAA,KAAWD,MAAAA;;gBAEZhC,MAAMG,YAAY;kBAChBH,MAAMG,YAAY,IAAI8B,MAAAA,KAAWA,MAAAA;;;;sCAIbhC,SAAAA,SAAkBgC,MAAAA,OAAaA,MAAAA;;;gCAGrChC,SAAAA,SAAkBgC,MAAAA,OAAaA,MAAAA;;;;AAK7D,SAAOG;AACT;AA5FgBD;AA8FT,SAASE,eAAerC,OAAgB;AAC7C,QAAMC,YAAYC,aAAaF,MAAMG,YAAY;AACjD,QAAMwB,WAAWC,YAAY5B,MAAMG,YAAY;AAE/C,QAAMmC,SAAS;;WAENrC,SAAAA,wBAAiC0B,QAAAA;WACjC1B,SAAAA,qBAA8B0B,QAAAA;;;kBAGvB1B,SAAAA;gBACFA,SAAAA;;eAEDA,SAAAA;;AAGb,SAAOqC;AACT;AAjBgBD;;;AExYhB,SAASE,SAAyBC,YAA4B;AAgC9D,IAAMC,sBAAN,MAAMA,qBAAAA;EAhCN,OAgCMA;;;EACIC;EAER,YAAYC,gBAAgC;AAC1C,SAAKD,UAAU,IAAIE,QAAQD,cAAAA;EAC7B;EAEAE,gBAAgBC,UAA+B;AAC7C,UAAMC,aAAa,KAAKL,QAAQM,oBAAoBF,QAAAA;AACpD,UAAMG,SAAsB,CAAA;AAG5B,UAAMC,qBAAqBH,WAAWI,sBAAqB;AAE3D,eAAWC,aAAaF,oBAAoB;AAC1C,YAAMG,eAAeD,UAAUE,gBAAe;AAE9C,iBAAWC,eAAeF,cAAc;AACtC,cAAMG,cAAcD,YAAYE,eAAc;AAE9C,YAAID,eAAeE,KAAKC,iBAAiBH,WAAAA,GAAc;AACrD,gBAAMI,aAAaJ,YAAYK,cAAa;AAG5C,cAAID,WAAWE,QAAO,MAAO,WAAW;AACtC,kBAAMC,YAAY,KAAKC,aACrBT,YAAYU,QAAO,GACnBT,WAAAA;AAEF,gBAAIO,WAAW;AACbd,qBAAOiB,KAAKH,SAAAA;YACd;UACF;QACF;MACF;IACF;AAEA,WAAOd;EACT;EAEQe,aACNG,cACAC,UACkB;AAClB,UAAMC,OAAOD,SAASE,aAAY;AAElC,QAAID,KAAKE,SAAS,GAAG;AACnB,aAAO;IACT;AAGA,UAAMC,YAAYH,KAAK,CAAA,EAAGP,QAAO,EAAGW,QAAQ,SAAS,EAAA;AAGrD,UAAMC,YAAYL,KAAK,CAAA;AAEvB,QAAI,CAACX,KAAKiB,0BAA0BD,SAAAA,GAAY;AAC9C,aAAO;IACT;AAEA,UAAME,SAAsB,CAAA;AAG5B,UAAMC,aAAaH,UAAUI,cAAa;AAE1C,eAAWC,QAAQF,YAAY;AAC7B,UAAInB,KAAKsB,qBAAqBD,IAAAA,GAAO;AACnC,cAAME,YAAYF,KAAKd,QAAO;AAC9B,cAAMT,cAAcuB,KAAKtB,eAAc;AAGvC,cAAMyB,kBAAkBH,KAAKI,wBAAuB;AACpD,YAAIC;AAEJ,YAAIF,gBAAgBX,SAAS,GAAG;AAC9Ba,oBAAUF,gBACPG,IAAI,CAACC,MAAMA,EAAExB,QAAO,CAAA,EACpByB,KAAK,IAAA,EACLd,QAAQ,SAAS,EAAA,EACjBe,KAAI;QACT;AAEA,YAAIhC,eAAeE,KAAKC,iBAAiBH,WAAAA,GAAc;AACrD,gBAAMiC,YAAY,KAAKC,WAAWT,WAAWzB,aAAa4B,OAAAA;AAC1DR,iBAAOV,KAAKuB,SAAAA;QACd;MACF;IACF;AAEA,WAAO;MACLjB;MACAL;MACAS;IACF;EACF;EAEQc,WACNT,WACAb,UACAgB,SACW;AACX,UAAMK,YAAuB;MAC3BE,MAAMV;MACNW,YAAYX;MACZY,MAAM;MACNC,UAAU;MACVC,YAAY;MACZC,SAAS;MACTC,cAAc;MACdC,UAAU;MACVC,SAAS;MACTf;IACF;AAGA,SAAKgB,cAAchC,UAAUqB,SAAAA;AAG7B,SAAKY,eAAejC,UAAUqB,SAAAA;AAE9B,WAAOA;EACT;EAEQW,cAAchC,UAA0BqB,WAA4B;AAE1E,QAAIa,UAAgBlC;AACpB,QAAImC,WAAkC;AAEtC,WAAO7C,KAAKC,iBAAiB2C,OAAAA,GAAU;AACrCC,iBAAWD;AACX,YAAM1C,cAAa0C,QAAQzC,cAAa;AAExC,UAAIH,KAAK8C,2BAA2B5C,WAAAA,GAAa;AAC/C0C,kBAAU1C,YAAWC,cAAa;MACpC,OAAO;AACL;MACF;IACF;AAEA,QAAI,CAAC0C,UAAU;AACb;IACF;AAEA,UAAM3C,aAAa2C,SAAS1C,cAAa;AACzC,QAAI4C,WAAW;AAEf,QAAI/C,KAAK8C,2BAA2B5C,UAAAA,GAAa;AAC/C6C,iBAAW7C,WAAWK,QAAO;IAC/B,OAAO;AACLwC,iBAAW7C,WAAWE,QAAO;IAC/B;AAEA2B,cAAUI,OAAOY;AAGjB,UAAMpC,OAAOkC,SAASjC,aAAY;AAElC,QAAID,KAAKE,SAAS,GAAG;AACnB,YAAMmC,WAAWrC,KAAK,CAAA;AAGtB,UAAIX,KAAKiD,gBAAgBD,QAAAA,GAAW;AAClCjB,kBAAUG,aAAac,SAASE,eAAc;MAChD,WAESlD,KAAKiB,0BAA0B+B,QAAAA,GAAW;AACjD,aAAKG,gBAAgBH,UAAUjB,SAAAA;MACjC,WAES/B,KAAKoD,yBAAyBJ,QAAAA,GAAW;AAChDjB,kBAAUsB,aAAaL,SACpBM,YAAW,EACX3B,IAAI,CAAC4B,OAAOA,GAAGnD,QAAO,EAAGW,QAAQ,SAAS,EAAA,CAAA;MAC/C;IACF;AAGA,QAAIJ,KAAKE,SAAS,KAAKb,KAAKiB,0BAA0BN,KAAK,CAAA,CAAE,GAAG;AAC9D,WAAKwC,gBAAgBxC,KAAK,CAAA,GAAIoB,SAAAA;IAChC;EACF;EAEQoB,gBAAgBK,YAAkBzB,WAA4B;AACpE,QAAI,CAAC/B,KAAKiB,0BAA0BuC,UAAAA,GAAa;AAC/C;IACF;AAEA,UAAMrC,aAAaqC,WAAWpC,cAAa;AAE3C,eAAWC,QAAQF,YAAY;AAC7B,UAAInB,KAAKsB,qBAAqBD,IAAAA,GAAO;AACnC,cAAMoC,WAAWpC,KAAKd,QAAO;AAC7B,cAAMmD,QAAQrC,KAAKtB,eAAc,GAAIK,QAAAA;AAErC,gBAAQqD,UAAAA;UACN,KAAK;AACH1B,sBAAUlB,SAAS6C,QAAQC,SAASD,KAAAA,IAASE;AAC7C;UACF,KAAK;AACH7B,sBAAU8B,YAAYH,QAAQC,SAASD,KAAAA,IAASE;AAChD;UACF,KAAK;AACH7B,sBAAU+B,QAAQJ,QAAQC,SAASD,KAAAA,IAASE;AAC5C;UACF,KAAK;AACH7B,sBAAUM,aAAa;AACvBN,sBAAUgC,eAAeL;AACzB;;UAEF,KAAK;AACH3B,sBAAUiC,eAAeN,UAAU;AACnC;UACF,KAAK;AAGH3B,sBAAUkC,OAAOP,OAAO3C,QAAQ,SAAS,EAAA;AAOzC;UACF;AACE,kBAAM,IAAImD,MAAM,yBAAyBT,QAAAA,EAAU;QACvD;MACF;IACF;EACF;EAEQd,eAAejC,UAA0BqB,WAA4B;AAC3E,QAAIa,UAAgBlC;AAEpB,WAAOV,KAAKC,iBAAiB2C,OAAAA,GAAU;AACrC,YAAM1C,aAAa0C,QAAQzC,cAAa;AAExC,UAAIH,KAAK8C,2BAA2B5C,UAAAA,GAAa;AAC/C,cAAMiE,aAAajE,WAAWK,QAAO;AACrC,cAAMI,OAAOiC,QAAQhC,aAAY;AAEjC,gBAAQuD,YAAAA;UACN,KAAK;AACHpC,sBAAUO,UAAU;AACpBP,sBAAUK,WAAW;AACrB;UAEF,KAAK;AACHL,sBAAUM,aAAa;AACvB,gBAAI1B,KAAKE,SAAS,GAAG;AACnBkB,wBAAUgC,eAAepD,KAAK,CAAA,EAAGP,QAAO;YAC1C;AACA;UAEF,KAAK;AACH2B,sBAAUM,aAAa;AACvBN,sBAAUgC,eAAe;AACzB;UAEF,KAAK;AACHhC,sBAAUQ,eAAe;AACzBR,sBAAUO,UAAU;AACpBP,sBAAUK,WAAW;AACrB;UAEF,KAAK;AACHL,sBAAUS,WAAW;AACrB;UAEF,KAAK;AACHT,sBAAUU,UAAU;AACpB;UAEF,KAAK;AACH,gBAAI9B,KAAKE,SAAS,GAAG;AACnB,oBAAMuD,SAASzD,KAAK,CAAA,EAAGP,QAAO;AAE9B,oBAAMiE,QAAQD,OAAOC,MAAM,mBAAA;AAC3B,kBAAIA,OAAO;AACTtC,0BAAUuC,aAAa;kBACrBC,OAAOF,MAAM,CAAA;kBACbG,QAAQH,MAAM,CAAA;gBAChB;cACF;YACF;AACA;UACF;AACE,kBAAM,IAAIH,MAAM,uBAAuBC,UAAAA,EAAY;QACvD;AAEAvB,kBAAU1C,WAAWC,cAAa;MACpC,OAAO;AACL;MACF;IACF;EACF;AACF;;;ACrUA,SAASsE,YAAY;AAErB,SAASC,OAAOC,IAAIC,iBAAiB;AACrC,SAASC,kBAAkB;AAQ3B,eAAsBC,qCAAqCC,SAAgB;AACzE,QAAMC,SAAS,IAAIC,oBAAoB;IACrCC,kBAAkBH,QAAQG;EAC5B,CAAA;AACA,QAAMC,SAASH,OAAOI,gBAAgBL,QAAQM,cAAc;AAE5D,MAAIF,OAAOG,WAAW,GAAG;AACvBC,YAAQC,KAAK,6HAAA;AACb;EACF;AAGAL,SAAOM,KAAK,CAACC,GAAGC,MAAMA,EAAEC,aAAaN,SAASI,EAAEE,aAAaN,MAAM;AAEnE,QAAMO,QAAQV,OAAO,CAAA;AAGrBI,UAAQO,KAAK,wBAAcD,MAAMD,YAAY,eAAK;AAClD,QAAMG,WAAWC,YAAYH,MAAMD,YAAY;AAC/C,QAAMK,YAAYC,KAAKnB,QAAQoB,iBAAiBJ,QAAAA;AAChD,MAAIK,WAAWH,SAAAA,GAAY;AACzBV,YAAQO,KAAK,wBAAcC,QAAAA,+DAAqB;AAChD;EACF;AACA,QAAMM,MAAMC,YAAYT,KAAAA;AACxB,QAAMU,aAAaC,mBAAmBX,KAAAA;AACtC,QAAMY,UAAUC,gBAAgBb,KAAAA;AAChC,QAAMc,iBAAiBT,KAAKD,WAAW,GAAGF,QAAAA,YAAoB;AAC9D,QAAMa,SAASC,eAAehB,KAAAA;AAE9B,MAAI;AACF,UAAMiB,MAAMb,WAAW;MAAEc,WAAW;IAAK,CAAA;AACzC,UAAMD,MAAMZ,KAAKD,WAAW,MAAA,GAAS;MAAEc,WAAW;IAAK,CAAA;AACvD,UAAMC,UAAUd,KAAKD,WAAW,QAAQ,GAAGF,QAAAA,SAAiB,GAAGM,GAAAA;AAC/D,UAAMW,UAAUd,KAAKD,WAAW,GAAGF,QAAAA,gBAAwB,GAAGQ,UAAAA;AAC9D,UAAMS,UAAUd,KAAKD,WAAW,GAAGF,QAAAA,aAAqB,GAAGU,OAAAA;AAC3D,UAAMO,UAAUL,gBAAgBC,MAAAA;EAClC,SAASK,KAAK;AACZ1B,YAAQ2B,MAAM,wBAAcnB,QAAAA,8BAAmBkB,IAAcE,OAAO,EAAE;AACtE,UAAMC,GAAGnB,WAAW;MAAEc,WAAW;IAAK,CAAA;EACxC;AACF;AAzCsBjC;;;ACbtB,OAAOuC,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAOC,UAAU;AACjB,OAAOC,WAAW;AAKlB,IAAIC,oBAAmC;AAmCvC,SAASC,kBAAkBC,KAAU;AACnC,QAAMC,OAAQD,IAA8BC;AAC5C,QAAMC,uBAAuB;IAAC;IAAgB;IAAc;IAAa;IAAa;;AACtF,SAAOA,qBAAqBC,SAASF,QAAQ,EAAA;AAC/C;AAJSF;AAUT,SAASK,sBAAsBC,WAAmBC,UAAU,KAAI;AAC9D,SAAO,IAAIC,QAAQ,CAACC,YAAAA;AAClB,QAAI;AACF,YAAMC,MAAM,IAAIC,IAAIL,SAAAA;AACpB,YAAMM,UAAUF,IAAIG,aAAa;AACjC,YAAMC,aAAaF,UAAUG,QAAQC;AAErC,YAAMC,MAAMH,WAAWI,QACrB;QACEC,UAAUT,IAAIS;QACdC,MAAMV,IAAIU,SAASR,UAAU,MAAM;QACnCS,MAAM;QACNC,QAAQ;QACRf;MACF,GACA,CAACgB,QAAAA;AAIC,cAAMC,YAAYD,IAAIE,eAAe,OAAO,CAACF,IAAIG,QAAQ,oBAAA;AACzDjB,gBAAQe,SAAAA;MACV,CAAA;AAGFP,UAAIU,GAAG,WAAW,MAAA;AAChBV,YAAIW,QAAO;AACXnB,gBAAQ,KAAA;MACV,CAAA;AAEAQ,UAAIU,GAAG,SAAS,MAAA;AACdlB,gBAAQ,KAAA;MACV,CAAA;AAEAQ,UAAIY,IAAG;IACT,SAASC,GAAG;AACVrB,cAAQ,KAAA;IACV;EACF,CAAA;AACF;AAtCSJ;AA2CT,eAAe0B,uBACbzB,WACAC,SACAyB,UAAgB;AAEhB,QAAMC,YAAYC,KAAKC,IAAG;AAE1B,SAAOD,KAAKC,IAAG,IAAKF,YAAY1B,SAAS;AACvC,UAAM6B,cAAc,MAAM/B,sBAAsBC,WAAW,GAAA;AAC3D,QAAI8B,aAAa;AACf,aAAO;IACT;AAEA,UAAM,IAAI5B,QAAQC,CAAAA,YAAW4B,WAAW5B,SAASuB,QAAAA,CAAAA;EACnD;AAEA,SAAO;AACT;AAjBeD;AAuBf,SAASO,aAAAA;AACP,SAAOC;AACT;AAFSD;AAOT,SAASE,uBAAAA;AACP,MAAI,CAACzC,mBAAmB;AACtB,UAAM0C,UAAUH,WAAAA;AAChB,UAAMI,WAAWrB,MAAKsB,KAAKF,SAAS,YAAA;AACpC1C,wBAAoB6C,IAAGC,aAAaH,UAAU,OAAA;EAChD;AACA,SAAO3C;AACT;AAPSyC;AAaT,SAASM,aAAaC,MAAY;AAChC,QAAMC,UAAUD,KAAKE,KAAI;AACzB,MAAI,CAACD,QAAS,QAAO;AAIrB,QAAME,QAAQF,QAAQE,MAAM,iEAAA;AAC5B,MAAIA,OAAO;AACT,UAAMC,UAAUD,MAAM,CAAA,EAAGD,KAAI;AAE7B,WAAOE,WAAW;EACpB;AAGA,SAAO;AACT;AAfSL;AA+BT,eAAeM,oBACbC,QACAC,SACAC,UAAgB;AAEhB,QAAMC,cAAcnC,MAAKsB,KAAKU,QAAQE,QAAAA;AAGtC,MAAIE;AACJ,MAAI;AACFA,gBAAY,MAAMb,IAAGc,SAASC,KAAKH,WAAAA;EACrC,QAAQ;AACN,WAAO;MAAEI,MAAM,CAAA;MAAIC,iBAAiB;IAAM;EAC5C;AAEA,QAAMC,WAAWL,UAAUM;AAG3B,QAAMC,cAAc,OAAO;AAC3B,QAAMC,WAAWC,KAAKC,IAAIL,UAAUE,WAAAA;AACpC,QAAMI,gBAAgBF,KAAKG,IAAI,GAAGP,WAAWG,QAAAA;AAG7C,QAAMK,SAASC,OAAOC,YAAYP,QAAAA;AAClC,MAAIQ;AAEJ,MAAI;AACFA,iBAAa,MAAM7B,IAAGc,SAASgB,KAAKlB,aAAa,GAAA;AACjD,UAAMiB,WAAWE,KAAKL,QAAQ,GAAGL,UAAUG,aAAAA;EAC7C,SAASQ,OAAO;AACdC,YAAQD,MAAM,2CAA2CA,KAAAA;AACzD,WAAO;MAAEhB,MAAM,CAAA;MAAIC,iBAAiB;IAAM;EAC5C,UAAA;AACE,QAAIY,YAAY;AACd,YAAMA,WAAWK,MAAK;IACxB;EACF;AAGA,QAAM3B,UAAUmB,OAAOS,SAAS,MAAA;AAChC,QAAMC,QAAQ7B,QAAQ8B,MAAM,IAAA;AAG5B,MAAIb,gBAAgB,KAAKY,MAAME,SAAS,GAAG;AACzCF,UAAMG,MAAK;EACb;AAGA,QAAMC,WAAqB,CAAA;AAC3B,aAAWrC,QAAQiC,OAAO;AACxB,UAAMK,SAASvC,aAAaC,IAAAA;AAC5B,QAAIsC,WAAW,MAAM;AACnBD,eAASE,KAAKD,MAAAA;IAChB;EACF;AAGA,MAAIE,aAAa;AACjB,WAASC,IAAIJ,SAASF,SAAS,GAAGM,KAAK,GAAGA,KAAK;AAC7C,UAAMzC,OAAOqC,SAASI,CAAAA;AAItB,QACEzC,KAAK3C,SAAS,oCAAA,KACd2C,KAAK3C,SAAS,wDAAA,GACd;AACAmF,mBAAaC;AACb;IACF;EACF;AAGA,MAAID,eAAe,IAAI;AACrBV,YAAQY,IAAI,uEAAA;AACZ,UAAMC,eAAeN,SAASO,MAAM,CAACrC,OAAAA;AACrC,UAAMO,mBAAkB+B,eAAeF,YAAAA;AACvC,WAAO;MAAE9B,MAAM8B;MAAc7B,iBAAAA;IAAgB;EAC/C;AAGA,MAAIgC,WAAWT,SAASF;AAExB,WAASM,IAAID,aAAa,GAAGC,IAAIJ,SAASF,QAAQM,KAAK;AACrD,UAAMzC,OAAOqC,SAASI,CAAAA;AAEtB,QACEzC,KAAK3C,SAAS,oCAAA,KACd2C,KAAK3C,SAAS,wDAAA,GACd;AACAyF,iBAAWL;AACX;IACF;EACF;AAGA,QAAMM,eAAeV,SAASO,MAAMJ,YAAYM,QAAAA;AAGhD,QAAMhC,kBAAkB+B,eAAeE,YAAAA;AAGvC,QAAMlC,OAAOkC,aAAaZ,SAAS5B,UAC/BwC,aAAaH,MAAM,CAACrC,OAAAA,IACpBwC;AAEJ,SAAO;IAAElC;IAAMC;EAAgB;AACjC;AA3GeT;AAiHf,SAASwC,eAAehC,MAAc;AACpC,aAAWb,QAAQa,MAAM;AAEvB,UAAMmC,oBAAoBhD,KAAKG,MAAM,iDAAA;AACrC,QAAI6C,mBAAmB;AACrB,YAAMC,aAAaC,SAASF,kBAAkB,CAAA,GAAI,EAAA;AAClD,UAAIC,aAAa,GAAG;AAClBnB,gBAAQY,IAAI,wBAAwBO,UAAAA,uBAAiC;AACrE,eAAO;MACT;IACF;EACF;AAEA,SAAO;AACT;AAdSJ;AAmBT,SAASM,mBACPC,UACAC,gBAAsB;AAGtB,SAAOD,SAASE,QAAQ,uBAAuBD,cAAAA;AACjD;AANSF;AAoCF,SAASI,oBACdrG,KACAgB,KACAM,KACAgF,SAA2B;AAE3B,QAAM,EACJlD,SAAShC,MAAKsB,KAAK6D,QAAQC,IAAG,GAAI,MAAA,GAClCC,eAAe,KACfC,cAAc,cACdC,eAAe,KACfC,gBAAgB,KAChBC,SAAS,oBAAoBN,QAAQO,IAAIC,eAAe,GAAA,IACxDZ,iBAAiBI,QAAQO,IAAIE,oBAAoB,IAAG,IAClDV,WAAW,CAAC;AAEhB,QAAMW,6BAA6BC,kBAAkBf,cAAAA;AACrDvB,UAAQD,MAAM,kBAAkB3E,IAAImH,SAASF,0BAAAA;AAG7C,MAAI3F,IAAI8F,aAAa;AACnBxC,YAAQD,MAAM,6DAAA;AACd;EACF;AAGC,GAAA,YAAA;AACC,QAAI;AAEF,YAAM0C,cAActH,kBAAkBC,GAAAA;AAGtC,YAAM,EAAE4D,gBAAe,IAAK,MAAMT,oBAChCC,QACAqD,cACAC,WAAAA;AAKF,UAAIW,eAAe,CAACzD,iBAAiB;AACnCgB,gBAAQY,IAAI,uFAAA;AAGZ,YAAI;AACF,cAAI9E,IAAImG,MAAAA;QACV,SAAShF,GAAG;AACV+C,kBAAQD,MAAM,sCAAsCkC,MAAAA;AAEpDjC,kBAAQY,IAAI,yDAAA;QACd;AAGAZ,gBAAQY,IAAI,kDAAkDqB,MAAAA,cAAoBF,YAAAA,QAAoB;AACtG,cAAMW,YAAY,MAAMxF,uBAAuB+E,QAAQF,cAAcC,aAAAA;AAErE,YAAIU,WAAW;AACb1C,kBAAQY,IAAI,uEAAA;AACZ+B,6BAAmBvG,KAAKM,GAAAA;AACxB;QACF;AAEAsD,gBAAQY,IAAI,sFAAA;MACd;AAIA,UAAI6B,eAAe,CAACzD,iBAAiB;AACnCgB,gBAAQY,IAAI,2DAAA;MACd,OAAO;AACLZ,gBAAQY,IAAI,0EAAA;MACd;AAGA,YAAMU,WAAW3D,qBAAAA;AAGjB,YAAMiF,OAAOvB,mBAAmBC,UAAUe,0BAAAA;AAG1C3F,UAAImG,UAAU,KAAK;QACjB,gBAAgB;QAChB,iBAAiB;QACjB,sBAAsB;MACxB,CAAA;AAGAnG,UAAIM,IAAI4F,IAAAA;IACV,SAAS7C,OAAO;AACdC,cAAQD,MAAM,0CAA0CA,KAAAA;AAGxD,UAAI,CAACrD,IAAI8F,aAAa;AACpB9F,YAAImG,UAAU,KAAK;UAAE,gBAAgB;QAA4B,CAAA;AACjEnG,YAAIM,IAAI,mHAAyB;MACnC;IACF;EACF,GAAA;AACF;AAlGgByE;AAuGhB,SAASkB,mBAAmBvG,KAAsBM,KAAmB;AACnE,MAAIA,IAAI8F,YAAa;AAGrB,QAAMM,cAAc1G,IAAIP,OAAO;AAE/BmE,UAAQY,IAAI,0CAA0CkC,WAAAA;AAEtDpG,MAAImG,UAAU,KAAK;IACjB,YAAYC;IACZ,iBAAiB;EACnB,CAAA;AACApG,MAAIM,IAAG;AACT;AAbS2F;;;ACzbT,OAAOI,WAAU;;;ACAjB,OAAOC,aAAyB;;;ACAhC,OAAOC,SAAQ;AACf,OAAOC,YAAY;;;ACDnB,SAASC,YAAYC,WAAU;AAC/B,OAAOC,WAAU;AACjB,OAAOC,QAAQ;;;ACFf,OAAOC,WAAU;AACjB,SAASC,YAAYC,WAAU;AAM/B,eAAsBC,oBAAoBC,KAAW;AACnD,QAAMC,QAAkB,CAAA;AAExB,iBAAeC,KAAKC,YAAkB;AACpC,UAAMC,UAAU,MAAMC,IAAGC,QAAQH,YAAY;MAAEI,eAAe;IAAK,CAAA;AAEnE,eAAWC,SAASJ,SAAS;AAC3B,YAAMK,WAAWC,MAAKC,KAAKR,YAAYK,MAAMI,IAAI;AAEjD,UAAIJ,MAAMK,YAAW,GAAI;AACvB,cAAMX,KAAKO,QAAAA;MACb,WAAWD,MAAMM,OAAM,KAAMN,MAAMI,KAAKG,SAAS,gBAAA,GAAmB;AAClEd,cAAMe,KAAKP,QAAAA;MACb;IACF;EACF;AAZeP;AAcf,QAAMA,KAAKF,GAAAA;AACX,SAAOC;AACT;AAnBsBF;AAwBtB,eAAsBkB,eACpBC,iBACAC,aAAmE;AAEnE,QAAMC,YAAY,oBAAIC,IAAAA;AAGtB,QAAMC,cAAc;AACpB,QAAMC,UAAqC,CAAA;AAE3C,WAASC,IAAI,GAAGA,IAAIN,gBAAgBO,QAAQD,KAAKF,aAAa;AAC5D,UAAMI,QAAQR,gBAAgBS,MAAMH,GAAGA,IAAIF,WAAAA;AAC3C,UAAMM,eAAe,MAAMC,QAAQC,IAAIJ,MAAMK,IAAI,CAACC,aAAab,YAAYa,QAAAA,CAAAA,CAAAA;AAC3ET,YAAQP,KAAI,GAAIY,YAAAA;EAClB;AAGA,aAAWK,YAAYV,SAAS;AAC9B,eAAW,CAACW,aAAaC,IAAAA,KAASF,SAAS7B,QAAO,GAAI;AACpDgB,gBAAUgB,IAAIF,aAAaC,IAAAA;IAC7B;EACF;AAEA,SAAOf;AACT;AAxBsBH;AAiCtB,SAASoB,eAAeH,aAAqBd,WAAkC;AAE7E,QAAMkB,cAAclB,UAAUmB,IAAIL,WAAAA;AAClC,MAAII,aAAa;AACf,WAAOA;EACT;AAGA,aAAW,CAACE,KAAKC,KAAAA,KAAUrB,UAAUhB,QAAO,GAAI;AAE9C,UAAM,CAACsC,WAAWC,UAAAA,IAAcH,IAAII,MAAM,GAAA;AAC1C,QAAI,CAACF,aAAa,CAACC,WAAY;AAG/B,UAAME,cAAcH,UAAUI,OAAO,CAAA,EAAGC,YAAW,IAAKL,UAAUf,MAAM,CAAA,IAAKgB,WAAWG,OAAO,CAAA,EAAGE,YAAW,IAAKL,WAAWhB,MAAM,CAAA;AACnI,QAAIO,gBAAgBW,aAAa;AAC/B,aAAOJ;IACT;AAGA,QAAIP,gBAAgBS,YAAY;AAC9B,aAAOF;IACT;EACF;AAEA,SAAOQ;AACT;AA1BSZ;AA+BF,SAASa,oBAAoBC,SAAc/B,WAAkC;AAClF,MAAIgC,gBAAgB;AAEpB,MAAI,CAACD,QAAQE,OAAO;AAClB,WAAOD;EACT;AAEA,aAAWE,YAAYC,OAAOC,OAAOL,QAAQE,KAAK,GAAG;AACnD,QAAI,CAACC,YAAY,OAAOA,aAAa,SAAU;AAE/C,eAAWG,aAAaF,OAAOC,OAAOF,QAAAA,GAAW;AAC/C,UAAIG,aAAa,OAAOA,cAAc,YAAY,iBAAiBA,WAAW;AAC5E,cAAMC,aAAarB,eAAeoB,UAAUvB,aAAuBd,SAAAA;AACnE,YAAIsC,YAAY;AACdD,oBAAU,UAAA,IAAc;YACtBE,MAAMD,WAAWC;YACjBC,MAAMF,WAAWE;UACnB;AACAR;QACF;MACF;IACF;EACF;AAEA,SAAOA;AACT;AAzBgBF;AA8BT,SAASW,sBAAsBV,SAAcW,UAAgB;AAClE,MAAIA,aAAa,OAAO,CAACX,QAAQE,OAAO;AACtC,WAAOF;EACT;AAEA,QAAMY,WAAgB,CAAC;AACvBR,SAAOS,KAAKb,QAAQE,KAAK,EAAEY,QAAQ,CAACzB,QAAAA;AAClC,UAAM0B,eAAe1B,IAAI2B,WAAWL,QAAAA,IAAYtB,IAAIb,MAAMmC,SAASrC,MAAM,IAAIe;AAC7EuB,aAASG,YAAAA,IAAgBf,QAAQE,MAAMb,GAAAA;EACzC,CAAA;AAEA,SAAO;IACL,GAAGW;IACHE,OAAOU;IACPD;EACF;AACF;AAhBgBD;;;ADnHhB,eAAsBO,6BAA6BC,UAA0B,CAAC,GAAC;AAC7E,QAAMC,YAAYC,KAAKC,IAAG;AAE1B,QAAMC,cAAcJ,QAAQI,eAAeC,MAAKC,QAAQC,WAAW,oCAAA;AACnE,QAAMC,YAAYR,QAAQQ,aAAaH,MAAKC,QAAQC,WAAW,WAAA;AAC/D,QAAME,aAAYT,QAAQS,cAAc;AAExC,MAAIC;AACJ,MAAIV,QAAQW,aAAa;AAEvBD,cAAUE,KAAKC,MAAMD,KAAKE,UAAUd,QAAQW,WAAW,CAAA;EACzD,OAAO;AAEL,UAAMI,iBAAiB,MAAMC,IAAGC,SAASb,aAAa,OAAA;AACtDM,cAAUE,KAAKC,MAAME,cAAAA;EACvB;AAEA,QAAMG,kBAAkB,MAAMC,oBAAoBX,SAAAA;AAClD,QAAMY,YAAY,MAAMC,eAAeH,iBAAiBI,qBAAAA;AACxD,QAAMC,WAAWC,oBAAoBd,SAASU,SAAAA;AAE9C,MAAIX,YAAW;AACb,UAAMO,IAAGP,UAAUL,aAAaQ,KAAKE,UAAUJ,SAAS,MAAM,CAAA,IAAK,MAAM,OAAA;EAC3E;AAEA,QAAMe,WAAWvB,KAAKC,IAAG,IAAKF;AAE9B,SAAO;IACLS;IACAgB,OAAO;MACLD;MACAE,kBAAkBT,gBAAgBU;MAClCC,oBAAoBT,UAAUU;MAC9BC,mBAAmBR;IACrB;EACF;AACF;AApCsBxB;AAyCtB,eAAeuB,sBAAsBU,UAAgB;AACnD,QAAMC,eAAe5B,MAAK6B,SAASC,QAAQC,IAAG,GAAIJ,QAAAA;AAGlD,QAAMK,UAAU,MAAMrB,IAAGC,SAASe,UAAU,OAAA;AAC5C,QAAMM,aAAaC,GAAGC,iBAAiBR,UAAUK,SAASE,GAAGE,aAAaC,QAAQ,IAAA;AAElF,SAAOC,0BAA0BL,YAAYL,YAAAA;AAC/C;AAReX;AAaf,SAASqB,0BAA0BL,YAA2BN,UAAgB;AAC5E,QAAMY,WAAW,oBAAIC,IAAAA;AACrB,MAAIC,iBAAiB;AACrB,MAAIC,YAAY;AAGhB,WAASC,cAAcC,MAAa;AAElC,QAAI,eAAeA,QAAQC,MAAMC,QAAQF,KAAKG,SAAS,GAAG;AACxD,aAAQH,KAAKG,UAAgCC,OAC3C,CAACC,QAA6BA,IAAIC,SAAShB,GAAGiB,WAAWC,SAAS;IAEtE;AAEA,QAAI,gBAAgBR,QAAQC,MAAMC,QAAQF,KAAKS,UAAU,GAAG;AAC1D,aAAOT,KAAKS;IACd;AACA,WAAO,CAAA;EACT;AAZSV;AAcT,WAASW,MAAMV,MAAa;AAE1B,QAAIV,GAAGqB,mBAAmBX,IAAAA,GAAO;AAC/B,YAAMS,aAAaV,cAAcC,IAAAA;AAGjC,UAAIA,KAAKY,MAAM;AACbd,oBAAYE,KAAKY,KAAKC,QAAQxB,UAAAA;MAChC;AAEA,iBAAWyB,aAAaL,YAAY;AAClC,YAAInB,GAAGyB,iBAAiBD,UAAUE,UAAU,GAAG;AAC7C,gBAAMA,aAAaF,UAAUE;AAC7B,gBAAMC,gBAAgBD,WAAWA,WAAWH,QAAQxB,UAAAA;AAEpD,cAAI4B,kBAAkB,cAAc;AAClC,gBAAID,WAAWE,UAAUvC,SAAS,GAAG;AACnC,oBAAMwC,MAAMH,WAAWE,UAAU,CAAA;AACjC,kBAAI5B,GAAG8B,gBAAgBD,GAAAA,GAAM;AAC3BtB,iCAAiBsB,IAAIE;cACvB;YACF;UACF;QACF;MACF;IACF;AAGA,QAAI/B,GAAGgC,oBAAoBtB,IAAAA,KAASA,KAAKY,MAAM;AAC7C,YAAMW,aAAavB,KAAKY,KAAKC,QAAQxB,UAAAA;AACrC,UAAImC,aAAa;AACjB,UAAIC,YAAY;AAChB,YAAM,EAAEC,KAAI,IAAKrC,WAAWsC,8BAA8B3B,KAAK4B,SAASvC,UAAAA,CAAAA;AAExE,YAAMoB,aAAaV,cAAcC,IAAAA;AAEjC,iBAAWc,aAAaL,YAAY;AAClC,YAAInB,GAAGyB,iBAAiBD,UAAUE,UAAU,GAAG;AAC7C,gBAAMC,gBAAgBH,UAAUE,WAAWA,WAAWH,QAAQxB,UAAAA;AAC9D,cAAI;YAAC;YAAO;YAAQ;YAAO;YAAU;YAAS;YAAW;YAAQ;YAAOwC,SAASZ,aAAAA,GAAgB;AAC/FO,yBAAaP,cAAca,YAAW;AACtC,gBAAIhB,UAAUE,WAAWE,UAAUvC,SAAS,GAAG;AAC7C,oBAAMwC,MAAML,UAAUE,WAAWE,UAAU,CAAA;AAC3C,kBAAI5B,GAAG8B,gBAAgBD,GAAAA,GAAM;AAC3BM,4BAAYN,IAAIE;cAClB;YACF;UACF;QACF;MACF;AAEA,UAAIG,cAAcD,cAAczB,WAAW;AACzC,cAAMiC,cAAc,GAAGjC,SAAAA,IAAayB,UAAAA;AACpC5B,iBAASqC,IAAID,aAAa;UACxBE,MAAMlD;UACN2C,MAAMA,OAAO;UACbQ,QAAQV;UACR3B;UACA4B;QACF,CAAA;MACF;IACF;AAEAnC,OAAG6C,aAAanC,MAAMU,KAAAA;EACxB;AAhESA;AAkETA,QAAMrB,UAAAA;AACN,SAAOM;AACT;AAxFSD;;;ADnDF,SAAS0C,qBACdC,iBACAC,mBACAC,WAAkB;AAGlB,MAAIC,QAA6B;AAEjC,SAAO,OAAOC,MAAeC,KAAeC,YAAAA;AAC1C,QAAI;AAEF,YAAMC,aAAa,MAAMC,IAAGC,SAAST,iBAAiB,OAAA;AAGtD,YAAMU,cAAcC,OAAOC,WAAW,KAAA,EAAOC,OAAON,UAAAA,EAAYO,OAAO,KAAA;AAGvE,UAAIX,SAASA,MAAMY,aAAaL,aAAa;AAC3C,eAAOL,IAAIW,KAAKb,MAAMc,IAAI;MAC5B;AAGA,UAAIC,UAAUC,KAAKC,MAAMb,UAAAA;AAGzB,UAAIN,qBAAqBK,QAAQe,OAAO;AACtC,cAAM,EAAEC,SAASC,iBAAiBC,MAAK,IAAK,MAAMC,6BAA6B;UAC7EC,aAAaR;UACbS,WAAW;UACXzB,WAAWA,aAAaI,QAAQsB;QAClC,CAAA;AACAV,kBAAUK;AAGVM,gBAAQC,IAAI,yBAAyBN,MAAMO,QAAQ,OAAOP,MAAMQ,iBAAiB,aAAa;MAChG;AAGA,YAAMC,SAASC,sBAAsBhB,SAASZ,QAAQ6B,QAAQ;AAG9DhC,cAAQ;QACNc,MAAMgB;QACNlB,UAAUL;MACZ;AAEAL,UAAIW,KAAKiB,MAAAA;IACX,SAASG,OAAO;AACd,YAAMC,UAAUD,iBAAiBE,QAAQF,MAAMC,UAAU;AACzDhC,UAAIkC,OAAO,GAAA,EAAKvB,KAAK;QACnBoB,OAAO;QACPC;MACF,CAAA;IACF;EACF;AACF;AAvDgBtC;;;ADMT,SAASyC,oBACdC,SACAC,SAA0B;AAE1B,QAAM,EAAEC,iBAAiBC,mBAAmBC,UAAS,IAAKJ;AAC1D,QAAMK,SAASC,QAAQC,OAAM;AAC7B,QAAMC,UAAUC,qBAAqBP,iBAAiBC,mBAAmBC,SAAAA;AAGzEC,SAAOK,IAAI,iBAAiB,CAACC,KAAKC,QAAQJ,QAAQG,KAAKC,KAAKX,OAAAA,CAAAA;AAE5D,SAAOI;AACT;AAZgBN;;;AIZhB,IAAMc,iBAA8B;EAClC;IACEC,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;;AAOK,SAASC,wBAAwBC,SAAiC;AACvE,QAAM,EAAEC,iBAAiBC,oBAAoB,MAAMC,UAAS,IAAKH;AAEjE,SAAO;IACLI,MAAM;IACNC,WAAW;IACXC,QAAQX;IAERY,SAAS,wBAACC,YAA+BA,QAAQC,OAAxC;IAETC,cAAc,wBAACF,YAAAA;AACb,aAAOG,oBACL;QACEV;QACAC;QACAC;MACF,GACAK,OAAAA;IAEJ,GATc;EAUhB;AACF;AArBgBT;;;ACnBhB,OAAOa,cAAyB;;;ACAhC,SAASC,YAAYC,WAAU;AAC/B,SAASC,YAAYC,QAAAA,OAAMC,gBAAgB;;;ACyB3C,SAASC,mBAAmBC,SAAe;AAEzC,MAAIC,eAAeD,QAAQE,QAAQ,oBAAoB,MAAA;AAKvDD,iBAAeA,aAAaC,QAAQ,WAAW,KAAA;AAC/CD,iBAAeA,aAAaC,QAAQ,aAAa,WAAA;AAIjDD,iBAAeA,aAAaC,QAAQ,OAAO,OAAA;AAG3CD,iBAAeA,aAAaC,QAAQ,cAAc,OAAA;AAGlD,SAAO,IAAIC,OAAO,IAAIF,YAAAA,GAAe;AACvC;AAnBSF;AA4CF,SAASK,mBAAmBC,YAAgCL,SAAe;AAChF,MAAI,CAACK,cAAc,CAACL,SAAS;AAC3B,WAAO;EACT;AAGA,QAAMM,mBAAmBC,yBAAyBF,UAAAA;AAClD,QAAMG,oBAAoBD,yBAAyBP,OAAAA;AAGnD,MAAIM,qBAAqBE,mBAAmB;AAC1C,WAAO;EACT;AAGA,MAAIC,mBAAmBD,iBAAAA,GAAoB;AACzC,UAAME,QAAQX,mBAAmBS,iBAAAA;AACjC,WAAOE,MAAMC,KAAKL,gBAAAA;EACpB;AAGA,SAAO;AACT;AAtBgBF;AAwGT,SAASQ,mBAAmBC,SAAe;AAChD,SAAO,OAAOC,KAAKD,OAAAA;AACrB;AAFgBD;AA8CT,SAASG,yBAAyBC,OAAY;AACnD,SAAOA,MACJC,QAAQ,QAAQ,GAAA,EAChBA,QAAQ,QAAQ,EAAA;AACrB;AAJgBF;;;ADpNT,SAASG,cAAcC,UAAiB;AAC7C,MAAI,CAACA,UAAU;AACb,WAAOC,MAAKC,QAAQC,IAAG,GAAI,MAAA;EAC7B;AACA,SAAOC,WAAWJ,QAAAA,IAAYA,WAAWC,MAAKC,QAAQC,IAAG,GAAIH,QAAAA;AAC/D;AALgBD;AAUT,SAASM,gBAAgBC,UAAgB;AAC9C,SAAOC,SAASL,QAAQC,IAAG,GAAIG,QAAAA;AACjC;AAFgBD;AAOhB,eAAsBG,WAAWF,UAAgB;AAC/C,MAAI;AACF,UAAMG,IAAGC,OAAOJ,QAAAA;AAChB,WAAO;EACT,QAAQ;AACN,WAAO;EACT;AACF;AAPsBE;AAYf,SAASG,cAAaC,MAAY;AACvC,QAAMC,UAAUD,KAAKE,KAAI;AACzB,MAAI,CAACD,QAAS,QAAOE;AAErB,MAAI;AACF,WAAOC,KAAKC,MAAMJ,OAAAA;EACpB,QAAQ;AACN,WAAOE;EACT;AACF;AATgBJ,OAAAA,eAAAA;AAcT,SAASO,cAAcC,SAAiBC,SAAe;AAC5D,MAAI,OAAOD,YAAY,SAAU,QAAOJ;AAExC,QAAMM,QAAQF,QAAQE,MAAMD,OAAAA;AAC5B,MAAI,CAACC,MAAO,QAAON;AAEnB,QAAMO,QAAQC,OAAOF,MAAM,CAAA,CAAE;AAC7B,SAAOE,OAAOC,SAASF,KAAAA,IAASA,QAAQP;AAC1C;AARgBG;AAaT,SAASO,WAAWH,OAA2BI,cAAsBC,UAAgB;AAC1F,MAAI,OAAOL,UAAU,YAAY,CAACA,MAAMR,KAAI,GAAI;AAC9C,WAAOY;EACT;AAEA,QAAME,SAASL,OAAOD,KAAAA;AACtB,MAAIC,OAAOC,SAASI,MAAAA,KAAWA,SAAS,GAAG;AACzC,WAAOC,KAAKC,IAAID,KAAKE,MAAMH,MAAAA,GAASD,QAAAA;EACtC;AAEA,SAAOD;AACT;AAXgBD;AAgBT,SAASO,iBAAiBV,OAA2BW,UAAgB;AAC1E,MAAI,OAAOX,UAAU,YAAY,CAACA,MAAMR,KAAI,GAAI;AAC9C,WAAOmB;EACT;AAEA,QAAML,SAASL,OAAOD,KAAAA;AACtB,SAAOC,OAAOC,SAASI,MAAAA,KAAWA,SAAS,IAAIC,KAAKE,MAAMH,MAAAA,IAAUK;AACtE;AAPgBD;AAYT,SAASE,mBAAmBC,SAAiBC,UAAgB;AAClE,QAAMC,YAAYD,SAASE,QAAQ,OAAO,GAAA;AAC1C,QAAMC,WAAWF,UAAUG,MAAM,GAAA,EAAKC,OAAOC,OAAAA;AAE7C,MAAIH,SAASI,KAAK,CAACC,YAAYA,YAAY,IAAA,GAAO;AAChD,UAAM,IAAIC,MAAM,uBAAA;EAClB;AAEA,QAAMC,WAAW7C,MAAKkC,SAASI,SAAStC,KAAK,GAAA,CAAA;AAC7C,QAAM8C,MAAMxC,SAAS4B,SAASW,QAAAA;AAE9B,MAAIC,IAAIC,WAAW,IAAA,GAAO;AACxB,UAAM,IAAIH,MAAM,4CAAA;EAClB;AAEA,SAAOC;AACT;AAhBgBZ;AA8BT,SAASe,YAAYC,YAAgC9B,SAAe;AACzE,SAAO+B,mBAAmBD,YAAY9B,OAAAA;AACxC;AAFgB6B;AAiBT,SAASG,cAAcC,cAAkCC,gBAAsB;AACpF,MAAI,CAACD,gBAAgB,CAACC,gBAAgB;AACpC,WAAO;EACT;AACA,SAAOD,aAAaE,YAAW,MAAOD,eAAeC,YAAW;AAClE;AALgBH;AAWT,SAASI,eAAeC,OAAc;AAC3C,SAAOA,iBAAiBZ,QACpB;IAAEa,MAAMD,MAAMC;IAAMvC,SAASsC,MAAMtC;EAAQ,IAC3C;IAAEA,SAASwC,OAAOF,KAAAA;EAAO;AAC/B;AAJgBD;;;AEtJhB,SAASI,QAAAA,aAAY;;;ACArB,SAASC,kBAAkBC,YAAYC,WAAU;AACjD,SAASC,uBAAuB;AAchC,eAAsBC,sBACpBC,UACAC,SACAC,OAAa;AAEb,QAAMC,SAAS,MAAMC,WAAWJ,QAAAA;AAChC,MAAI,CAACG,QAAQ;AACX,WAAOE;EACT;AAEA,QAAMC,UAAsB,CAAA;AAC5B,QAAMC,SAASC,iBAAiBR,UAAU;IAAES,UAAU;EAAO,CAAA;AAC7D,QAAMC,KAAKC,gBAAgB;IAAEC,OAAOL;IAAQM,WAAWC;EAAS,CAAA;AAEhE,mBAAiBC,QAAQL,IAAI;AAC3B,UAAMM,QAAQC,cAAaF,IAAAA;AAC3B,QAAI,CAACC,MAAO;AACZ,QAAIA,MAAME,aAAajB,QAAS;AAEhCK,YAAQa,KAAKH,KAAAA;AACb,QAAId,QAAQ,KAAKI,QAAQc,SAASlB,OAAO;AACvCI,cAAQe,MAAK;IACf;EACF;AAEAX,KAAGY,MAAK;AACRf,SAAOe,MAAK;AAEZ,SAAOhB;AACT;AA7BsBP;AAkCtB,eAAsBwB,qBACpBvB,UACAwB,MACAC,UACAC,YACAC,cAAqB;AAErB,MAAI,CAAE,MAAMvB,WAAWJ,QAAAA,GAAY;AACjC,WAAOK;EACT;AAEA,QAAMuB,SAAS;IACbC,oBAAoB;IACpBC,WAAW,KAAK;EAClB;AAEA,QAAMC,WAAW,oBAAIC,IAAAA;AACrB,QAAMC,iBAAiC,CAAA;AAEvC,QAAMC,qBAAqB,wBAACjC,aAAmC;IAC7DA;IACAkC,SAAS,CAAA;IACTC,QAAQ/B;IACRgC,MAAMhC;IACNiC,WAAWjC;IACXkC,SAASlC;IACTmC,YAAYnC;IACZoC,YAAYpC;IACZqC,cAAc;EAChB,IAV2B;AAY3B,QAAMC,wBAAwB,wBAACC,SAAuB5B,UAAAA;AACpD,QAAIA,MAAMoB,UAAU,CAACQ,QAAQR,OAAQQ,SAAQR,SAASS,OAAO7B,MAAMoB,MAAM;AACzE,QAAIpB,MAAMqB,QAAQ,CAACO,QAAQP,KAAMO,SAAQP,OAAOQ,OAAO7B,MAAMqB,IAAI;AAEjEO,YAAQT,QAAQhB,KAAKH,KAAAA;AACrB,QAAI4B,QAAQT,QAAQf,SAASQ,OAAOC,oBAAoB;AACtDe,cAAQT,QAAQd,MAAK;IACvB;EACF,GAR8B;AAU9B,QAAMyB,yBAAyB,wBAACF,SAAuB5B,OAAiB+B,YAAAA;AACtEH,YAAQF,eAAe;AACvBE,YAAQL,UAAUvB,MAAMgC;AACxBJ,YAAQJ,aAAaS,cAAcF,SAAS,sBAAA;AAC5CH,YAAQH,aAAaQ,cAAcF,SAAS,sBAAA;AAC5C,QAAI,CAACH,QAAQP,QAAQrB,MAAMqB,MAAM;AAC/BO,cAAQP,OAAOQ,OAAO7B,MAAMqB,IAAI;IAClC;AAEA,UAAMa,cAAc,CAACxB,cAAcyB,YAAYP,QAAQP,MAAMX,UAAAA;AAC7D,UAAM0B,gBAAgB,CAACzB,gBAAgB0B,cAAcT,QAAQR,QAAQT,YAAAA;AACrE,UAAM2B,gBAAgBJ,eAAeE;AAErC,QAAIE,eAAe;AACjBrB,qBAAed,KAAKyB,OAAAA;IACtB;EACF,GAhB+B;AAkB/B,QAAMW,kBAAkB,wBAACvC,UAAAA;AACvB,UAAM,EAAEE,UAAUjB,SAAS8C,UAAU,GAAE,IAAK/B;AAC5C,QAAI,CAACf,QAAS;AAEd,QAAI2C,UAAUb,SAASyB,IAAIvD,OAAAA;AAC3B,QAAI,CAAC2C,SAAS;AACZA,gBAAUV,mBAAmBjC,OAAAA;AAC7B8B,eAAS0B,IAAIxD,SAAS2C,OAAAA;IACxB;AAEAD,0BAAsBC,SAAS5B,KAAAA;AAE/B,QAAI,CAAC4B,QAAQF,iBAAiBK,QAAQW,SAAS,wBAAA,KAA6BX,QAAQW,SAAS,qBAAA,IAAyB;AACpHZ,6BAAuBF,SAAS5B,OAAO+B,OAAAA;IACzC;AAEA,QAAIA,QAAQW,SAAS,sBAAA,KAA2B,CAACd,QAAQN,WAAW;AAClEM,cAAQN,YAAYtB,MAAMgC;IAC5B;EACF,GAnBwB;AAqBxB,QAAMW,cAAc,wBAAC5C,SAAAA;AACnB,UAAMC,QAAQC,cAAaF,IAAAA;AAC3B,QAAIC,OAAOE,UAAU;AACnBqC,sBAAgBvC,KAAAA;IAClB;EACF,GALoB;AAOpB,QAAM4C,gBAAgB5D,UAAU4B,OAAOE,WAAW6B,WAAAA;AAElD,SAAOE,uBAAuB5B,gBAAgBT,MAAMC,QAAAA;AACtD;AA1FsBF;AA+FtB,eAAeqC,gBACb5D,UACA8B,WACA6B,aAAmC;AAEnC,QAAMG,SAAS,MAAMC,IAAGC,KAAKhE,UAAU,GAAA;AAEvC,MAAI;AACF,UAAMiE,QAAQ,MAAMH,OAAOI,KAAI;AAC/B,QAAIC,WAAWF,MAAMG;AACrB,QAAIC,YAAY;AAEhB,WAAOF,WAAW,GAAG;AACnB,YAAM/C,SAASkD,KAAKC,IAAIzC,WAAWqC,QAAAA;AACnCA,kBAAY/C;AAEZ,YAAMoD,SAASC,OAAOC,MAAMtD,MAAAA;AAC5B,YAAM0C,OAAOa,KAAKH,QAAQ,GAAGpD,QAAQ+C,QAAAA;AAErC,UAAIS,QAAQJ,OAAOK,SAAS,MAAA;AAC5B,UAAIR,WAAW;AACbO,iBAASP;AACTA,oBAAY;MACd;AAEA,YAAMS,QAAQF,MAAMG,MAAM,IAAA;AAC1BV,kBAAYS,MAAMzD,MAAK,KAAM;AAE7B,eAAS2D,IAAIF,MAAM1D,SAAS,GAAG4D,KAAK,GAAGA,KAAK,GAAG;AAC7C,YAAIF,MAAME,CAAAA,GAAI;AACZrB,sBAAYmB,MAAME,CAAAA,CAAE;QACtB;MACF;IACF;AAEA,QAAIX,WAAW;AACbV,kBAAYU,SAAAA;IACd;EACF,UAAA;AACE,UAAMP,OAAOxC,MAAK;EACpB;AACF;AAzCesC;AA8Cf,SAASC,uBACPoB,OACAzD,MACAC,UAAgB;AAEhB,QAAMyD,aAAaD,MAAM7D;AACzB,QAAM+D,aAAaD,eAAe,IAAI,IAAIZ,KAAKc,KAAKF,aAAazD,QAAAA;AACjE,QAAM4D,cAAc7D,OAAO,KAAKC;AAChC,QAAM6D,WAAWhB,KAAKC,IAAIc,aAAa5D,UAAUyD,UAAAA;AAEjD,QAAMK,aAAaN,MAAMO,MAAMH,YAAYC,QAAAA,EAAUG,IAAI,CAAC7C,aAAa;IACrE3C,SAAS2C,QAAQ3C;IACjBmC,QAAQQ,QAAQR;IAChBC,MAAMO,QAAQP;IACdC,WAAWM,QAAQN;IACnBC,SAASK,QAAQL;IACjBC,YAAYI,QAAQJ;IACpBC,YAAYG,QAAQH;IACpBN,SAASS,QAAQT,QAAQqD,MAAK,EAAGE,QAAO;EAC1C,EAAA;AAEA,SAAO;IACLlE;IACAC;IACAkE,YAAYT;IACZC;IACAS,OAAOL;EACT;AACF;AA5BS1B;AAiCT,eAAsBgC,gBACpB7F,UACAwB,MACAC,UAAgB;AAEhB,MAAI,CAAE,MAAMrB,WAAWJ,QAAAA,GAAY;AACjC,WAAOK;EACT;AAEA,QAAMyF,WAAWtE,OAAOC;AACxB,QAAM+C,SAAmB,CAAA;AACzB,MAAIuB,aAAa;AAEjB,QAAMxF,SAASC,iBAAiBR,UAAU;IAAES,UAAU;EAAO,CAAA;AAC7D,QAAMC,KAAKC,gBAAgB;IAAEC,OAAOL;IAAQM,WAAWC;EAAS,CAAA;AAEhE,MAAI;AACF,qBAAiBC,QAAQL,IAAI;AAC3B8D,aAAOrD,KAAKJ,IAAAA;AACZ,UAAIyD,OAAOpD,SAAS0E,UAAU;AAC5BtB,eAAOnD,MAAK;MACd;AACA0E,oBAAc;IAChB;EACF,UAAA;AACErF,OAAGY,MAAK;AACRf,WAAOe,MAAK;EACd;AAEA,QAAM6D,aAAaY,eAAe,IAAI,IAAIzB,KAAKc,KAAKW,aAAatE,QAAAA;AAEjE,MAAI+C,OAAOpD,WAAW,GAAG;AACvB,WAAO;MAAEI;MAAMC;MAAUsE;MAAYZ;MAAYL,OAAO,CAAA;IAAG;EAC7D;AAEA,QAAMO,aAAaf,KAAK0B,IAAID,aAAavE,OAAOC,UAAU,CAAA;AAC1D,QAAM6D,WAAWhB,KAAK0B,IAAID,cAAcvE,OAAO,KAAKC,UAAU,CAAA;AAC9D,QAAMwE,mBAAmBF,aAAavB,OAAOpD;AAE7C,QAAM0D,QAAkB,CAAA;AACxB,WAASE,IAAIR,OAAOpD,SAAS,GAAG4D,KAAK,GAAGA,KAAK,GAAG;AAC9C,UAAMkB,YAAYD,mBAAmBjB;AACrC,QAAIkB,aAAab,cAAca,YAAYZ,UAAU;AACnDR,YAAM3D,KAAKqD,OAAOQ,CAAAA,CAAE;IACtB;EACF;AAEA,SAAO;IACLxD;IACAC;IACAsE;IACAZ;IACAL,OAAOA,MAAMY,QAAO;EACtB;AACF;AAtDsBG;AAqEtB,eAAsBM,eACpBC,QACAC,UAKI,CAAC,GAAC;AAEN,QAAMnG,QAAQmG,QAAQnG,SAAS;AAC/B,QAAMoG,SAASD,QAAQC,UAAU;AACjC,QAAMC,UAAUF,QAAQE,WAAW;IAAC;IAAU;IAAS;IAAc;;AAErE,QAAMC,UAAuB,CAAA;AAC7B,QAAMC,SAAmB,CAAA;AAGzB,aAAWC,UAAUH,SAAS;AAC5B,QAAI;AACF,YAAMI,OAAO,MAAMC,iBAAiBR,QAAQM,MAAAA;AAC5CF,cAAQrF,KAAI,GAAIwF,IAAAA;IAClB,SAASE,OAAO;AACd,YAAMC,WAAW,kBAAkBJ,MAAAA,KAAWG,iBAAiBE,QAAQF,MAAM9D,UAAUF,OAAOgE,KAAAA,CAAAA;AAC9FJ,aAAOtF,KAAK2F,QAAAA;AACZE,cAAQC,KAAK,oBAAoBH,QAAAA,EAAU;IAE7C;EACF;AAGA,MAAIN,QAAQpF,WAAW,GAAG;AACxB,QAAIqF,OAAOrF,SAAS,GAAG;AACrB4F,cAAQC,KAAK,2CAA2CR,OAAOS,KAAK,IAAA,CAAA,EAAO;IAC7E;AACA,WAAO7G;EACT;AAGA,MAAI8G,eAAeX;AACnB,MAAIH,QAAQe,UAAUf,QAAQe,OAAOhG,SAAS,GAAG;AAC/C+F,mBAAeX,QAAQa,OAAOC,CAAAA,QAAOjB,QAAQe,OAAQ1D,SAAS4D,IAAIC,KAAK,CAAA;EACzE;AAGAJ,eAAaK,KAAK,CAACC,GAAGC,MAAMA,EAAEC,YAAYF,EAAEE,SAAS;AAErD,QAAMC,QAAQT,aAAa/F;AAC3B,QAAMyG,gBAAgBV,aAAa3B,MAAMc,QAAQA,SAASpG,KAAAA;AAE1D,SAAO;IACLyG,MAAMkB;IACND;IACAE,SAASxB,SAASpG,QAAQ0H;EAC5B;AACF;AAtDsBzB;AA2DtB,eAAeS,iBACbR,QACAM,QAAc;AAEd,QAAM,EAAEQ,MAAAA,MAAI,IAAK,MAAM,OAAO,MAAA;AAE9B,MAAIlH;AACJ,MAAI+H;AAGJ,MAAIrB,WAAW,UAAU;AACvB1G,eAAWkH,MAAKd,QAAQ,YAAA;AACxB2B,aAAS,wBAAChH,SAASiH,aAAajH,MAAM,QAAA,GAA7B;EACX,WAAW2F,WAAW,SAAS;AAC7B1G,eAAWkH,MAAKd,QAAQ,WAAA;AACxB2B,aAAS,wBAAChH,SAASiH,aAAajH,MAAM,OAAA,GAA7B;EACX,WAAW2F,WAAW,cAAc;AAClC1G,eAAWkH,MAAKd,QAAQ,gBAAA;AACxB2B,aAAS,wBAAChH,SAASkH,YAAYlH,MAAM,YAAA,GAA5B;EACX,WAAW2F,WAAW,cAAc;AAClC1G,eAAWkH,MAAKd,QAAQ,gBAAA;AACxB2B,aAAS,wBAAChH,SAASkH,YAAYlH,MAAM,YAAA,GAA5B;EACX,OAAO;AACLiG,YAAQC,KAAK,sCAAsCP,MAAAA,EAAQ;AAC3D,WAAO,CAAA;EACT;AAGA,MAAI,CAAE,MAAMtG,WAAWJ,QAAAA,GAAY;AACjCgH,YAAQC,KAAK,sCAAsCjH,QAAAA,EAAU;AAC7D,WAAO,CAAA;EACT;AAEA,QAAM2G,OAAoB,CAAA;AAC1B,MAAIpG,SAAqD;AACzD,MAAIG,KAAgD;AAEpD,MAAI;AACFH,aAASC,iBAAiBR,UAAU;MAAES,UAAU;IAAO,CAAA;AACvDC,SAAKC,gBAAgB;MAAEC,OAAOL;MAAQM,WAAWC;IAAS,CAAA;AAE1D,qBAAiBC,QAAQL,IAAI;AAC3B,UAAI,CAACK,KAAKmH,KAAI,EAAI;AAElB,UAAI;AACF,cAAMZ,MAAMS,OAAOhH,IAAAA;AACnB,YAAIuG,KAAK;AACPX,eAAKxF,KAAKmG,GAAAA;QACZ;MACF,SAASa,YAAY;MAGrB;IACF;EACF,SAAStB,OAAO;AACdG,YAAQH,MAAM,oCAAoC7G,QAAAA,KAAa6G,KAAAA;AAC/D,UAAMA;EACR,UAAA;AAEE,QAAInG,IAAI;AACNA,SAAGY,MAAK;IACV;AACA,QAAIf,QAAQ;AACVA,aAAOe,MAAK;IACd;EACF;AAEA,SAAOqF;AACT;AApEeC;AAyEf,SAASoB,aAAajH,MAAc2F,QAA0B;AAC5D,MAAI;AACF,UAAM0B,UAAUC,KAAKC,MAAMvH,IAAAA;AAG3B,UAAMwH,KAAKC,aAAAA;AAEX,WAAO;MACLD;MACAhB,OAAOkB,6BAA6BL,QAAQb,KAAK;MACjDI,WAAW,IAAIe,KAAKN,QAAQpF,IAAI,EAAE2F,QAAO;MACzC5F,SAASqF,QAAQrF,WAAWqF,QAAQQ,OAAO;MAC3CC,SAAST,QAAQS,WAAW;MAC5B5I,SAASmI,QAAQlH,YAAY;MAC7B4H,QAAQV,QAAQW,WAAW;MAC3BC,OAAOZ,QAAQa,UAAU;MACzBC,UAAUd,QAAQe,aAAa;MAC/BC,OAAOhB,QAAQgB,SAAS;MACxBC,MAAM;QACJC,KAAKlB,QAAQkB;QACbC,UAAUnB,QAAQmB;QAClBlH,MAAM+F,QAAQ/F;QACdD,QAAQgG,QAAQhG;QAChBI,YAAY4F,QAAQoB;QACpB/G,YAAY2F,QAAQqB;QACpBC,IAAItB,QAAQsB;QACZC,aAAavB,QAAQwB;QACrBC,cAAczB,QAAQ0B;MACxB;MACAC,MAAM;QAACrD;;IACT;EACF,SAASG,OAAO;AAEd,WAAO;EACT;AACF;AAnCSmB;AAyCT,SAASC,YAAYlH,MAAc2F,QAAmC;AACpE,QAAM6B,KAAKC,aAAAA;AAIX,QAAMwB,QAAQjJ,KAAKiJ,MAAM,sEAAA;AAEzB,MAAI,CAACA,OAAO;AAEV,WAAO;MACLzB;MACAhB,OAAO;MACPI,WAAWe,KAAKuB,IAAG;MACnBlH,SAAShC;MACT8H,SAAS;MACT5I,SAAS;MACT6I,QAAQ;MACRE,OAAO;MACPE,UAAU;MACVE,OAAO;MACPC,MAAM;MACNU,MAAM;QAACrD;;IACT;EACF;AAEA,QAAM,CAAA,EAAGwD,SAAAA,EAAWC,OAAAA,IAAWH;AAG/B,MAAIrC;AACJ,MAAI;AAEF,UAAMyC,SAASF,QAAQG,QAAQ,KAAK,GAAA;AACpC1C,gBAAY,IAAIe,KAAK0B,MAAAA,EAAQzB,QAAO;AAGpC,QAAI2B,MAAM3C,SAAAA,GAAY;AACpBA,kBAAYe,KAAKuB,IAAG;IACtB;EACF,SAASpD,OAAO;AACdc,gBAAYe,KAAKuB,IAAG;EACtB;AAGA,QAAM1C,QAAQgD,gBAAgBJ,OAAAA;AAE9B,SAAO;IACL5B;IACAhB;IACAI;IACA5E,SAASoH;IACTtB,SAAS;IACT5I,SAAS;IACT6I,QAAQ;IACRE,OAAO;IACPE,UAAU;IACVE,OAAO;IACPC,MAAM;IACNU,MAAM;MAACrD;;EACT;AACF;AA3DSuB;AAgET,SAASQ,6BACP+B,WAA0B;AAE1B,MAAI,OAAOA,cAAc,UAAU;AACjC,UAAMC,QAAQD,UAAUE,YAAW;AACnC,QAAID,UAAU,QAAS,QAAO;AAC9B,QAAIA,UAAU,QAAS,QAAO;AAC9B,QAAIA,UAAU,UAAUA,UAAU,UAAW,QAAO;AACpD,QAAIA,UAAU,UAAUA,UAAU,MAAO,QAAO;AAChD,QAAIA,UAAU,QAAS,QAAO;AAC9B,QAAIA,UAAU,WAAWA,UAAU,UAAW,QAAO;AACrD,WAAO;EACT;AAGA,MAAID,aAAa,GAAI,QAAO;AAC5B,MAAIA,aAAa,GAAI,QAAO;AAC5B,MAAIA,aAAa,GAAI,QAAO;AAC5B,MAAIA,aAAa,GAAI,QAAO;AAC5B,MAAIA,aAAa,GAAI,QAAO;AAC5B,SAAO;AACT;AArBS/B;AA0BT,SAAS8B,gBAAgBI,MAAY;AACnC,QAAMF,QAAQE,KAAKD,YAAW;AAG9B,MAAID,MAAM/G,SAAS,OAAA,KAAY+G,MAAM/G,SAAS,UAAA,EAAa,QAAO;AAClE,MAAI+G,MAAM/G,SAAS,OAAA,KAAY+G,MAAM/G,SAAS,KAAA,KAAU+G,MAAM/G,SAAS,QAAA,EAAM,QAAO;AACpF,MAAI+G,MAAM/G,SAAS,MAAA,KAAW+G,MAAM/G,SAAS,SAAA,KAAc+G,MAAM/G,SAAS,KAAA,KAAU+G,MAAM/G,SAAS,QAAA,EAAM,QAAO;AAChH,MAAI+G,MAAM/G,SAAS,OAAA,KAAY+G,MAAM/G,SAAS,KAAA,EAAQ,QAAO;AAC7D,MAAI+G,MAAM/G,SAAS,SAAA,KAAc+G,MAAM/G,SAAS,OAAA,EAAU,QAAO;AAGjE,SAAO;AACT;AAZS6G;AAkBT,SAAS/B,eAAAA;AACP,SAAO,uCAAuC6B,QAAQ,SAAS,CAACO,MAAAA;AAC9D,UAAMC,IAAKvG,KAAKwG,OAAM,IAAK,KAAM;AACjC,UAAMC,IAAIH,MAAM,MAAMC,IAAKA,IAAI,IAAO;AACtC,WAAOE,EAAElG,SAAS,EAAA;EACpB,CAAA;AACF;AANS2D;AAWT,eAAsBwC,gBACpBhL,UACAiL,SACA5I,OACAnC,OACAgL,WAAkB;AAElB,MAAI,CAAE,MAAM9K,WAAWJ,QAAAA,GAAY;AACjC,WAAOK;EACT;AAEA,QAAMuB,SAAS;IACbC,oBAAoB;IACpBC,WAAW,KAAK;EAClB;AAEA,QAAMC,WAAW,oBAAIC,IAAAA;AACrB,QAAMC,iBAAiC,CAAA;AAEvC,QAAMC,qBAAqB,wBAACjC,aAAmC;IAC7DA;IACAkC,SAAS,CAAA;IACTC,QAAQ/B;IACRgC,MAAMhC;IACNiC,WAAWjC;IACXkC,SAASlC;IACTmC,YAAYnC;IACZoC,YAAYpC;IACZqC,cAAc;EAChB,IAV2B;AAY3B,QAAMyI,gCAAgC,wBAACvI,YAAAA;AAErC,UAAMwI,eAAenJ,eAAeoJ,KAAKC,CAAAA,SAAQA,KAAKrL,YAAY2C,QAAQ3C,OAAO;AACjF,QAAImL,cAAc;AAChB,aAAO;IACT;AAGA,UAAMG,sBAAsB3I,QAAQP,MAAMmJ,SAASnJ,KAAAA;AACnD,QAAI,CAACkJ,qBAAqB;AACxB,aAAO;IACT;AAGA,QAAIN,WAAWrI,QAAQT,QAAQf,SAAS,GAAG;AAEzC,YAAMqK,eAAe7I,QAAQT,QAAQuJ,KAAKC,CAAAA,MAAKA,EAAE/B,cAAcqB,OAAAA;AAC/D,UAAIQ,cAAc7B,cAAcqB,SAAS;AACvC,eAAOpI,OAAO4I,aAAa7B,aAAaqB,OAAO,MAAMA,YAC/CC,YAAYO,cAAc7B,cAAcsB,cAAcA,YAAY;MAC1E;AACA,aAAO;IACT;AAEA,WAAO;EACT,GAzBsC;AA2BtC,QAAMvI,wBAAwB,wBAACC,SAAuB5B,UAAAA;AACpD,QAAIA,MAAMoB,UAAU,CAACQ,QAAQR,OAAQQ,SAAQR,SAASS,OAAO7B,MAAMoB,MAAM;AACzE,QAAIpB,MAAMqB,QAAQ,CAACO,QAAQP,KAAMO,SAAQP,OAAOQ,OAAO7B,MAAMqB,IAAI;AAEjEO,YAAQT,QAAQhB,KAAKH,KAAAA;AACrB,QAAI4B,QAAQT,QAAQf,SAASQ,OAAOC,oBAAoB;AACtDe,cAAQT,QAAQd,MAAK;IACvB;AAIA,QAAI8J,8BAA8BvI,OAAAA,GAAU;AAC1CX,qBAAed,KAAKyB,OAAAA;AAEpB,UAAI1C,SAAS+B,eAAeb,SAASlB,OAAO;AAC1C+B,uBAAe2J,IAAG;MACpB;IACF;EACF,GAlB8B;AAoB9B,QAAM9I,yBAAyB,wBAACF,SAAuB5B,OAAiB+B,YAAAA;AACtEH,YAAQF,eAAe;AACvBE,YAAQL,UAAUvB,MAAMgC;AACxBJ,YAAQJ,aAAaS,cAAcF,SAAS,sBAAA;AAC5CH,YAAQH,aAAaQ,cAAcF,SAAS,sBAAA;AAC5C,QAAI,CAACH,QAAQP,QAAQrB,MAAMqB,MAAM;AAC/BO,cAAQP,OAAOQ,OAAO7B,MAAMqB,IAAI;IAClC;AAIA,QAAI8I,8BAA8BvI,OAAAA,GAAU;AAC1CX,qBAAed,KAAKyB,OAAAA;AAEpB,UAAI1C,SAAS+B,eAAeb,SAASlB,OAAO;AAC1C+B,uBAAe2J,IAAG;MACpB;IACF;EACF,GAlB+B;AAoB/B,QAAMrI,kBAAkB,wBAACvC,UAAAA;AACvB,UAAM,EAAEE,UAAUjB,SAAS8C,UAAU,GAAE,IAAK/B;AAC5C,QAAI,CAACf,QAAS;AAEd,QAAI2C,UAAUb,SAASyB,IAAIvD,OAAAA;AAC3B,QAAI,CAAC2C,SAAS;AACZA,gBAAUV,mBAAmBjC,OAAAA;AAC7B8B,eAAS0B,IAAIxD,SAAS2C,OAAAA;IACxB;AAEAD,0BAAsBC,SAAS5B,KAAAA;AAE/B,QAAI,CAAC4B,QAAQF,iBAAiBK,QAAQW,SAAS,wBAAA,KAA6BX,QAAQW,SAAS,qBAAA,IAAyB;AACpHZ,6BAAuBF,SAAS5B,OAAO+B,OAAAA;IACzC;AAEA,QAAIA,QAAQW,SAAS,sBAAA,KAA2B,CAACd,QAAQN,WAAW;AAClEM,cAAQN,YAAYtB,MAAMgC;IAC5B;EACF,GAnBwB;AAqBxB,QAAMW,cAAc,wBAAC5C,SAAAA;AACnB,UAAMC,QAAQC,cAAaF,IAAAA;AAC3B,QAAIC,OAAOE,UAAU;AACnBqC,sBAAgBvC,KAAAA;IAClB;EACF,GALoB;AAOpB,QAAM4C,gBAAgB5D,UAAU4B,OAAOE,WAAW6B,WAAAA;AAGlD,SAAO;IACLnC,MAAM;IACNC,UAAUQ,eAAeb;IACzBuE,YAAY1D,eAAeb;IAC3B+D,YAAY;IACZS,OAAO3D,eAAewD,IAAI,CAAC7C,aAAa;MACtC3C,SAAS2C,QAAQ3C;MACjBmC,QAAQQ,QAAQR;MAChBC,MAAMO,QAAQP;MACdC,WAAWM,QAAQN;MACnBC,SAASK,QAAQL;MACjBC,YAAYI,QAAQJ;MACpBC,YAAYG,QAAQH;MACpBN,SAASS,QAAQT,QAAQqD,MAAK,EAAGE,QAAO;IAC1C,EAAA;EACF;AACF;AAjJsBsF;AAsJtB,eAAsBa,kBACpB7L,UACAqC,OACAyJ,YAAkB;AAElB,QAAM3L,SAAS,MAAMC,WAAWJ,QAAAA;AAChC,MAAI,CAACG,QAAQ;AACX,WAAOE;EACT;AAEA,QAAMC,UAAsB,CAAA;AAC5B,QAAMC,SAASC,iBAAiBR,UAAU;IAAES,UAAU;EAAO,CAAA;AAC7D,QAAMC,KAAKC,gBAAgB;IAAEC,OAAOL;IAAQM,WAAWC;EAAS,CAAA;AAEhE,mBAAiBC,QAAQL,IAAI;AAC3B,UAAMM,QAAQC,cAAaF,IAAAA;AAC3B,QAAI,CAACC,MAAO;AAEZ,UAAMuK,sBAAsBvK,MAAMqB,MAAMmJ,SAASnJ,KAAAA;AAEjD,UAAM0J,gBAAgB/K,MAAMgL,gBAAgBF,cAAc9K,MAAMiK;AAChE,QAAI,CAACM,uBAAuB,CAACQ,cAAe;AAE5CzL,YAAQa,KAAKH,KAAAA;EACf;AAEAN,KAAGY,MAAK;AACRf,SAAOe,MAAK;AAEZ,SAAO;IACLwK;IACA3J,SAAS7B;EACX;AACF;AAjCsBuL;;;AD/sBtB,SAASI,eAAeC,KAAeC,UAAkBC,UAAU,sBAAoB;AACrFF,MAAIG,OAAO,GAAA,EAAKC,KAAK;IAAEF,SAAS,GAAGA,OAAAA,KAAYG,gBAAgBJ,QAAAA,CAAAA;EAAY,CAAA;AAC7E;AAFSF;AAOT,SAASO,YAAYN,KAAeO,OAAgBL,UAAU,2BAAyB;AACrFF,MAAIG,OAAO,GAAA,EAAKC,KAAK;IAAEF;IAASK,OAAOC,eAAeD,KAAAA;EAAO,CAAA;AAC/D;AAFSD;AAOF,SAASG,6BAA6BC,QAAc;AACzD,QAAMC,aAAaC,MAAKF,QAAQ,YAAA;AAEhC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMc,WAAWD,IAAIE,OAAOD,WAAW,IAAIE,KAAI;AAC/C,QAAI,CAACF,SAAS;AACZ,aAAOd,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAAsB,CAAA;IAC/D;AAEA,UAAMe,QAAQC,WAAWL,IAAIM,MAAMF,OAA6B,KAAK,GAAA;AAErE,QAAI;AACF,YAAMG,UAAU,MAAMC,sBAAsBV,YAAYG,SAASG,KAAAA;AACjE,UAAI,CAACG,SAAS;AACZ,eAAOrB,eAAeC,KAAKW,UAAAA;MAC7B;AACAX,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBM,UAAAA;QACtBG;QACAS,OAAOH,QAAQI;QACfJ;MACF,CAAA;IACF,SAASb,OAAO;AACdD,kBAAYN,KAAKO,KAAAA;IACnB;EACF;AACF;AA1BgBE;AA+BT,SAASgB,6BAA6Bf,QAAc;AACzD,QAAMgB,eAAed,MAAKF,QAAQ,WAAA;AAElC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAM2B,OAAOC,iBAAiBf,IAAIM,MAAMQ,MAA4B,CAAA;AACpE,UAAME,WAAWX,WAAWL,IAAIM,MAAMU,UAAgC,IAAI,GAAA;AAC1E,UAAMC,aAAa,OAAOjB,IAAIM,MAAMY,SAAS,WAAWlB,IAAIM,MAAMY,KAAKf,KAAI,IAAKgB;AAChF,UAAMC,eAAe,OAAOpB,IAAIM,MAAMe,WAAW,WAAWrB,IAAIM,MAAMe,OAAOlB,KAAI,EAAGmB,YAAW,IAAKH;AAEpG,QAAI;AACF,YAAMI,SAAS,MAAMC,qBAAqBX,cAAcC,MAAME,UAAUC,YAAYG,YAAAA;AACpF,UAAI,CAACG,QAAQ;AACX,eAAOrC,eAAeC,KAAK0B,YAAAA;MAC7B;AACA1B,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBqB,YAAAA;QACtB,GAAGU;QACHL,MAAMD,cAAc;QACpBI,QAAQD,gBAAgB;QACxBV,OAAOa,OAAOE,MAAMd;MACtB,CAAA;IACF,SAASjB,OAAO;AACdD,kBAAYN,KAAKO,OAAO,0BAAA;IAC1B;EACF;AACF;AAzBgBkB;AA8BT,SAASc,wBAAwB7B,QAAc;AACpD,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMwC,YAAY3B,IAAIE,OAAOyB,YAAY,IAAIxB,KAAI;AACjD,QAAI,CAACwB,UAAU;AACb,aAAOxC,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAAuB,CAAA;IAChE;AAEA,UAAMyB,OAAOC,iBAAiBf,IAAIM,MAAMQ,MAA4B,CAAA;AACpE,UAAME,WAAWX,WAAWL,IAAIM,MAAMU,UAAgC,KAAK,GAAA;AAE3E,QAAI;AACF,YAAM5B,WAAWwC,mBAAmB/B,QAAQ8B,QAAAA;AAC5C,YAAMJ,SAAS,MAAMM,gBAAgBzC,UAAU0B,MAAME,QAAAA;AACrD,UAAI,CAACO,QAAQ;AACX,eAAOrC,eAAeC,KAAKC,QAAAA;MAC7B;AAEAD,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBJ,QAAAA;QACtB,GAAGmC;MACL,CAAA;IACF,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,mCAAA;IAC1B;EACF;AACF;AAzBgBgC;AA0CT,SAASI,2BAA2BjC,QAAc;AACvD,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMiB,QAAQC,WAAWL,IAAIM,MAAMF,OAA6B,KAAK,GAAA;AACrE,UAAM2B,SAAShB,iBAAiBf,IAAIM,MAAMyB,QAA8B,CAAA;AAGxE,UAAMC,SAAShC,IAAIM,MAAM0B,SACrBC,OAAOjC,IAAIM,MAAM0B,MAAM,EAAEE,MAAM,GAAA,EAAKC,IAAIC,CAAAA,MAAKA,EAAEjC,KAAI,CAAA,EAAIkC,OAAOC,OAAAA,IAC9DnB;AAGJ,UAAMoB,UAAUvC,IAAIM,MAAMiC,UACtBN,OAAOjC,IAAIM,MAAMiC,OAAO,EAAEL,MAAM,GAAA,EAAKC,IAAIK,CAAAA,MAAKA,EAAErC,KAAI,CAAA,EAAIkC,OAAOC,OAAAA,IAC/DnB;AAEJ,QAAI;AACF,YAAMI,SAAS,MAAMkB,eAAe5C,QAAQ;QAC1CO;QACA2B;QACAC;QACAO;MACF,CAAA;AAEA,UAAI,CAAChB,QAAQ;AACX,eAAOpC,IAAIG,OAAO,GAAA,EAAKC,KAAK;UAC1BF,SAAS;UACTqD,MAAM;QACR,CAAA;MACF;AAEAvD,UAAII,KAAKgC,MAAAA;IACX,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,4BAAA;IAC1B;EACF;AACF;AAnCgBoC;AAwCT,SAASa,4BAA4B9C,QAAc;AACxD,QAAMgB,eAAed,MAAKF,QAAQ,WAAA;AAElC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMyD,UAAU,OAAO5C,IAAIM,MAAMsC,YAAY,WAAW5C,IAAIM,MAAMsC,QAAQzC,KAAI,IAAKgB;AACnF,QAAI,CAACyB,SAAS;AACZ,aAAOzD,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAAsB,CAAA;IAC/D;AAEA,UAAMwD,YAAY,OAAO7C,IAAIM,MAAMuC,cAAc,WAAW7C,IAAIM,MAAMuC,UAAU1C,KAAI,IAAKgB;AACzF,UAAMD,QAAO,OAAOlB,IAAIM,MAAMY,SAAS,WAAWlB,IAAIM,MAAMY,KAAKf,KAAI,IAAK;AAC1E,UAAMC,QAAQC,WAAWL,IAAIM,MAAMF,OAA6B,IAAI,GAAA;AAEpE,QAAI;AACF,YAAMmB,SAAS,MAAMuB,gBAAgBjC,cAAc+B,SAAS1B,OAAMd,OAAOyC,SAAAA;AACzE,UAAI,CAACtB,QAAQ;AACX,eAAOrC,eAAeC,KAAK0B,YAAAA;MAC7B;AACA1B,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBqB,YAAAA;QACtBK,MAAAA;QACA,GAAGK;MACL,CAAA;IACF,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,0BAAA;IAC1B;EACF;AACF;AA3BgBiD;AA6BT,SAASI,8BAA8BlD,QAAc;AAC1D,QAAMgB,eAAed,MAAKF,QAAQ,YAAA;AAElC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAM6D,cAAchD,IAAIE,OAAO8C,cAAc,IAAI7C,KAAI;AACrD,QAAI,CAAC6C,YAAY;AACf,aAAO7D,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAAyB,CAAA;IAClE;AAEA,UAAM6B,QAAO,OAAOlB,IAAIM,MAAMY,SAAS,WAAWlB,IAAIM,MAAMY,KAAKf,KAAI,IAAK;AAE1E,QAAI;AACF,YAAMoB,SAAS,MAAM0B,kBAAkBpC,cAAcK,OAAM8B,UAAAA;AAC3D,UAAI,CAACzB,QAAQ;AACX,eAAOrC,eAAeC,KAAK0B,YAAAA;MAC7B;AACA1B,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBqB,YAAAA;QACtB,GAAGU;MACL,CAAA;IACF,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,0BAAA;IAC1B;EACF;AACF;AAxBgBqD;;;AEzMhB,OAAOG,WAAU;AAcjB,SAASC,mBACPC,MACAC,MACAC,SAAe;AAEf,SAAO,IAAIC,QAAQ,CAACC,YAAAA;AAClB,UAAMC,YAAYC,KAAKC,IAAG;AAE1B,UAAMC,MAAMC,MAAKC,QACf;MACEC,UAAUX;MACVC;MACAW,MAAM;MACNC,QAAQ;MACRX;IACF,GACA,CAACY,SAAAA;AACC,YAAMC,eAAeT,KAAKC,IAAG,IAAKF;AAElCD,cAAQ;QACNY,WAAW;QACXD;MACF,CAAA;IACF,CAAA;AAGFP,QAAIS,GAAG,WAAW,MAAA;AAChBT,UAAIU,QAAO;AACXd,cAAQ;QACNY,WAAW;QACXG,OAAO;MACT,CAAA;IACF,CAAA;AAEAX,QAAIS,GAAG,SAAS,CAACG,QAAAA;AACfhB,cAAQ;QACNY,WAAW;QACXG,OAAOC,IAAIC;MACb,CAAA;IACF,CAAA;AAEAb,QAAIc,IAAG;EACT,CAAA;AACF;AA3CSvB;AAgDF,SAASwB,yBAAyBC,UAAoC,CAAC,GAAC;AAC7E,QAAM,EACJC,aAAaC,OAAOC,QAAQC,IAAIC,WAAW,KAAK,KAChDC,aAAa,aACb5B,UAAU,IAAI,IACZsB;AACJ,SAAQ,OAAOO,MAAMC,QAAAA;AACnB,QAAI;AACF,YAAMC,SAAS,MAAMlC,mBAAmB+B,YAAYL,YAAYvB,OAAAA;AAEhE,UAAI+B,OAAOjB,WAAW;AACpBgB,YAAIE,OAAO,GAAA,EAAKC,KAAK;UACnBD,QAAQ;UACRE,SAAS,GAAGN,UAAAA,IAAcL,UAAAA;UAC1BV,cAAckB,OAAOlB;UACrBsB,YAAW,oBAAI/B,KAAAA,GAAOgC,YAAW;QACnC,CAAA;MACF,OAAO;AACLN,YAAIE,OAAO,GAAA,EAAKC,KAAK;UACnBD,QAAQ;UACRE,SAAS,GAAGN,UAAAA,IAAcL,UAAAA;UAC1BN,OAAOc,OAAOd;UACdkB,YAAW,oBAAI/B,KAAAA,GAAOgC,YAAW;QACnC,CAAA;MACF;IACF,SAASnB,OAAO;AACda,UAAIE,OAAO,GAAA,EAAKC,KAAK;QACnBD,QAAQ;QACRE,SAAS,GAAGN,UAAAA,IAAcL,UAAAA;QAC1BN,OAAOA,iBAAiBoB,QAAQpB,MAAME,UAAU;QAChDgB,YAAW,oBAAI/B,KAAAA,GAAOgC,YAAW;MACnC,CAAA;IACF;EACF;AACF;AAlCgBf;;;AL/CT,SAASiB,mBAAmBC,UAAyB,CAAC,GAAC;AAC5D,QAAMC,SAASC,cAAcF,QAAQC,MAAM;AAC3C,QAAME,SAASC,SAAQC,OAAM;AAG7BF,SAAOG,IAAI,uBAAuBC,6BAA6BN,MAAAA,CAAAA;AAG/DE,SAAOG,IAAI,iBAAiBE,6BAA6BP,MAAAA,CAAAA;AAGzDE,SAAOG,IAAI,oBAAoBG,wBAAwBR,MAAAA,CAAAA;AAGvDE,SAAOG,IAAI,gBAAgBI,2BAA2BT,MAAAA,CAAAA;AAGtDE,SAAOG,IAAI,uBAAuBK,4BAA4BV,MAAAA,CAAAA;AAG9DE,SAAOG,IAAI,8BAA8BM,8BAA8BX,MAAAA,CAAAA;AAEvEE,SAAOG,IAAI,WAAWO,yBAAAA,CAAAA;AAEtB,SAAOV;AACT;AAzBgBJ;;;AMJhB,IAAMe,kBAA+B;EACnC;IACEC,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;;AAOK,SAASC,wBAAwBC,UAAoC,CAAC,GAAC;AAC5E,QAAM,EAAEC,OAAM,IAAKD;AAEnB,SAAO;IACLE,MAAM;IACNC,WAAW;IACXC,QAAQT;IAERU,SAAS,wBAACC,YAA+BA,QAAQC,OAAxC;IAETC,cAAc,wBAACF,YAAAA;AACb,aAAOG,mBAAmB;QAAER,QAAQA,UAAUK,QAAQL;MAAO,CAAA;IAC/D,GAFc;EAGhB;AACF;AAdgBF;;;AChDhB,OAAOW,cAAyB;;;ACChC,SAASC,QAAAA,aAAY;AACrB,OAAOC,UAAQ;;;ACFf,SAASC,cAAAA,aAAYC,QAAAA,aAAY;AACjC,OAAOC,SAAQ;AAKR,SAASC,eAAcC,UAAiB;AAC7C,MAAI,CAACA,UAAU;AACb,WAAOC,MAAKC,QAAQC,IAAG,GAAI,MAAA;EAC7B;AACA,SAAOC,YAAWJ,QAAAA,IAAYA,WAAWC,MAAKC,QAAQC,IAAG,GAAIH,QAAAA;AAC/D;AALgBD,OAAAA,gBAAAA;AAUT,SAASM,UAAUC,KAAW;AACnC,MAAI,CAACC,IAAGC,WAAWF,GAAAA,GAAM;AACvBC,IAAAA,IAAGE,UAAUH,KAAK;MAAEI,WAAW;IAAK,CAAA;EACtC;AACF;AAJgBL;AAST,SAASM,gBAAeC,OAAc;AAC3C,SAAOA,iBAAiBC,QACpB;IAAEC,MAAMF,MAAME;IAAMC,SAASH,MAAMG;EAAQ,IAC3C;IAAEA,SAASC,OAAOJ,KAAAA;EAAO;AAC/B;AAJgBD,OAAAA,iBAAAA;;;ADTT,SAASM,mBAAmBC,QAAgBC,UAAgB;AACjE,QAAMC,WAAWC,MAAKH,QAAQC,QAAAA;AAE9BG,YAAUJ,MAAAA;AAEV,SAAO,OAAOK,KAAcC,QAAAA;AAC1B,QAAI;AACF,YAAMC,aAAaF,IAAIG;AACvB,UAAG,CAACD,WAAWE,SAAQ;AACrB,eAAOH,IAAII,OAAO,GAAA,EAAKC,KAAK;UAAEF,SAAS;QAAsB,CAAA;MAC/D;AACA,YAAMG,UAAUC,KAAKC,UAAU;QAC7B,GAAGP;QACHQ,cAAa,oBAAIC,KAAAA,GAAOC,YAAW;MACrC,CAAA,IAAK;AACL,YAAMC,KAAGC,SAASC,WAAWlB,UAAUU,OAAAA;AACvCN,UAAIK,KAAK;QACPU,SAAS;MACX,CAAA;IACF,SAASC,OAAO;AACdC,MAAAA,aAAYjB,KAAKgB,OAAO,wBAAA;IAC1B;EACF;AACF;AAvBgBvB;AAwBT,SAASyB,wBAAwBxB,QAAgBC,UAAgB;AACtE,QAAMC,WAAWC,MAAKH,QAAQC,QAAAA;AAE9BG,YAAUJ,MAAAA;AAEV,SAAO,OAAOK,KAAcC,QAAAA;AAC1B,QAAI;AACF,YAAMmB,cAAcpB,IAAIG;AACxB,UAAG,CAACkB,MAAMC,QAAQF,WAAAA,GAAa;AAC7B,eAAOnB,IAAII,OAAO,GAAA,EAAKC,KAAK;UAAEF,SAAS;QAA+B,CAAA;MACxE;AACA,YAAMmB,WAAW,CAAA;AACjB,iBAAWrB,cAAckB,aAAa;AACpCG,iBAASC,KAAKhB,KAAKC,UAAU;UAC3B,GAAGP;UACHQ,cAAa,oBAAIC,KAAAA,GAAOC,YAAW;QACrC,CAAA,IAAK,IAAA;MACP;AACA,YAAMC,KAAGC,SAASC,WAAWlB,UAAU0B,SAASzB,KAAK,EAAA,CAAA;AACrDG,UAAIK,KAAK;QACPU,SAAS;MACX,CAAA;IACF,SAASC,OAAO;AACdC,MAAAA,aAAYjB,KAAKgB,OAAO,wBAAA;IAC1B;EACF;AACF;AA1BgBE;AA4BhB,SAASD,aAAYjB,KAAegB,OAAgBb,UAAU,0BAAwB;AACpFH,MAAII,OAAO,GAAA,EAAKC,KAAK;IAAEF;IAASa,OAAOQ,gBAAeR,KAAAA;EAAO,CAAA;AAC/D;AAFSC,OAAAA,cAAAA;;;ADrDF,SAASQ,oBAAmBC,UAAyB,CAAC,GAAC;AAC5D,QAAMC,SAASC,eAAcF,QAAQC,MAAM;AAC3C,QAAME,SAASC,SAAQC,OAAM;AAI7BF,SAAOG,KAAK,YAAYF,SAAQG,KAAI,GAAIC,mBAAmBP,QAAQD,QAAQS,YAAY,YAAA,CAAA;AACvFN,SAAOG,KAAK,kBAAkBF,SAAQG,KAAI,GAAIG,wBAAwBT,QAAQD,QAAQS,YAAY,YAAA,CAAA;AAElG,SAAON;AACT;AAVgBJ,OAAAA,qBAAAA;;;AGJhB,IAAMY,mBAA+B;EACnC;IACEC,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;;AAOK,SAASC,4BAA4BC,UAAoC,CAAC,GAAC;AAChF,QAAM,EAAEC,OAAM,IAAKD;AAEnB,SAAO;IACLE,MAAM;IACNC,WAAW;IACXC,QAAQT;IACRU,SAAS,wBAACC,YAA+BA,QAAQC,OAAxC;IACTC,cAAc,wBAACF,YAAAA;AACb,aAAOG,oBAAmB;QACxBR,QAAQA,UAAUK,QAAQL;QAC1BS,UAAUV,QAAQU,YAAY;MAChC,CAAA;IACF,GALc;EAMhB;AACF;AAfgBX;;;AhBXhB,SAASY,kBAAkBC,YAAsB;AAC/C,SAAO,kBAAkBA,cAAcA,WAAWC,iBAAiBC;AACrE;AAFSH;AAOT,SAASI,mBAAmBH,YAAsB;AAChD,SAAO,mBAAmBA,cAAcA,WAAWI,kBAAkBF;AACvE;AAFSC;AAOT,SAASE,iBAAiBC,UAAkBC,WAAiB;AAC3D,QAAMC,YAAYC,MAAKC,MAAMC,KAAKL,UAAUC,SAAAA;AAC5C,SAAOC,UAAUI,WAAW,GAAA,IAAOJ,YAAY,IAAIA,SAAAA;AACrD;AAHSH;AAQT,SAASQ,0BACPb,YACAc,eAAqB;AAErB,MAAId,WAAWe,UAAUf,WAAWe,OAAOC,SAAS,GAAG;AACrDC,YAAQC,IAAI,4BAA4BlB,WAAWmB,IAAI,OAAOL,aAAAA,EAAe;AAC7Ed,eAAWe,OAAOK,QAAQ,CAACC,UAAAA;AACzB,YAAMb,YAAYa,MAAMZ,SAAS,MAAMK,gBAAgBL,MAAKC,MAAMC,KAAKG,eAAeO,MAAMZ,IAAI;AAChGQ,cAAQC,IAAI,KAAKG,MAAMC,MAAM,IAAId,SAAAA,MAAea,MAAME,WAAW,EAAE;IACrE,CAAA;EACF,OAAO;AACLN,YAAQC,IAAI,4BAA4BlB,WAAWmB,IAAI,OAAOL,aAAAA,EAAe;EAC/E;AACF;AAbSD;AAkBT,eAAeW,wBACbC,QACAzB,YACA0B,SAA0B;AAE1B,MAAI,CAAC1B,WAAWO,WAAW;AACzBU,YAAQU,MACN,gBAAgB3B,WAAWmB,IAAI,mDAAmD;AAEpF;EACF;AAEA,QAAMS,SAAS5B,WAAWC,aAAayB,OAAAA;AACvC,QAAMZ,gBAAgBT,iBAAiBqB,QAAQpB,UAAUN,WAAWO,SAAS;AAE7EkB,SAAOI,IAAIf,eAAec,MAAAA;AAC1Bf,4BAA0Bb,YAAYc,aAAAA;AACxC;AAjBeU;AAsBf,eAAeM,yBACbL,QACAzB,YACA0B,SAA0B;AAE1B,MAAI1B,WAAWO,WAAW;AACxBU,YAAQc,KACN,gBAAgB/B,WAAWmB,IAAI,sEACRnB,WAAWO,SAAS,IAAI;EAEnD;AAEA,QAAMyB,UAAUhC,WAAWI,cAAcsB,OAAAA;AACzCD,SAAOI,IAAIG,OAAAA;AACXf,UAAQC,IAAI,mCAAmClB,WAAWmB,IAAI,EAAE;AAClE;AAfeW;AAsDf,eAAsBG,oBACpBR,QACAS,aACAC,SAAoC;AAGpC,QAAMT,UAA6B;IACjCpB,UAAU;IACV8B,OAAOC,QAAQC,IAAIC,aAAa;IAChCC,SAASH,QAAQI,IAAG;IACpB,GAAGN;EACL;AAEA,QAAMO,iBAAiB;OAAIR;;AAE3B,aAAWlC,cAAc0C,gBAAgB;AAEvC,QAAI1C,WAAW2C,WAAW,CAAC3C,WAAW2C,QAAQjB,OAAAA,GAAU;AACtD;IACF;AAEA,QAAI;AAEF,YAAMkB,kBAAkB,kBAAkB5C,cAAc,OAAQA,WAAmBC,iBAAiB;AACpG,YAAM4C,mBAAmB,mBAAmB7C,cAAc,OAAQA,WAAmBI,kBAAkB;AAEvG,UAAIwC,mBAAmBC,kBAAkB;AACvC5B,gBAAQc,KACN,gBAAgB/B,WAAWmB,IAAI,gGACiB;MAEpD;AAEA,UAAIpB,kBAAkBC,UAAAA,GAAa;AAEjC,cAAMwB,wBAAwBC,QAAQzB,YAAY0B,OAAAA;MACpD,WAAWvB,mBAAmBH,UAAAA,GAAa;AAEzC,cAAM8B,yBAAyBL,QAAQzB,YAAY0B,OAAAA;MACrD,OAAO;AAELT,gBAAQU,MACN,gBAAiB3B,WAAmBmB,QAAQ,SAAA,sDAA+D;MAE/G;IACF,SAASQ,OAAO;AACdV,cAAQU,MAAM,mCAAmC3B,WAAWmB,IAAI,KAAKQ,KAAAA;IACvE;EACF;AACF;AAjDsBM;","names":["normalizeBasePath","basePath","normalizedBasePath","startsWith","basePathWithoutTrailingSlash","endsWith","slice","fs","path","HEADER_COMMENT","ensureHeaderComment","source","text","startsWith","slice","HEADER_COMMENT","length","stripLeadingNewlines","trimmed","value","current","collapseExtraBlankLines","replace","removePgSchemaDeclarations","source","replace","convertSchemaTableInvocations","converted","text","pinyin","renamePgTableConstants","source","pgTableRegex","renames","updated","replace","match","currentName","factory","tableName","sanitized","sanitizeIdentifier","push","from","to","equalsIndex","indexOf","suffix","slice","normalizedSuffix","trimStart","text","updateTableReferenceIdentifiers","length","reduce","acc","rename","pattern","RegExp","escapeRegExp","value","toCamelCase","str","words","split","filter","Boolean","map","word","index","toLowerCase","charAt","toUpperCase","join","name","asciiName","toAsciiName","test","transliterated","pinyin","toneType","type","error","CUSTOM_TYPE_PATTERN","replaceUnknownColumns","source","lines","split","result","replaced","unmatched","i","length","line","match","CUSTOM_TYPE_PATTERN","typeName","factory","replacedLine","replaceFollowingUnknown","push","trim","includes","text","join","nextLine","undefined","replace","fs","path","tweakImports","source","importRegex","match","identifiers","split","map","id","trim","filter","Boolean","filteredIdentifiers","timestampUsageRegex","test","includes","push","unique","Array","from","Set","replacement","join","replace","inlineCustomTypes","source","text","replace","possiblePaths","path","resolve","__dirname","templatePath","possiblePath","fs","existsSync","console","warn","inlineFromTemplate","templateContent","readFileSync","typeDefinitions","trim","needsSql","includes","needsCustomType","ensureImportIdentifier","importMatch","match","insertPoint","indexOf","length","slice","headerPrefix","HEADER_COMMENT","insertionPoint","startsWith","importSectionMatch","typeBlock","packageName","identifier","escapedPackage","importRegex","RegExp","identifiers","split","map","id","filter","Boolean","push","unique","Array","from","Set","replacement","join","addSystemFieldComments","source","commentMap","lines","split","i","length","line","entry","Object","entries","find","key","includes","description","previousLine","trim","startsWith","indentMatch","match","indent","comment","splice","join","removeConflictingSystemFields","systemFieldMap","result","inTable","tableStartLine","tableBusinessFields","Set","bracketDepth","test","clear","char","businessField","values","add","tableEndLine","j","tableLine","shouldRemove","systemField","has","push","filter","patchDrizzleKitDefects","source","fixed","text","replace","replaceTimestampWithCustomTypes","source","replaced","pattern","text","replace","match","quote","fieldName","options","hasWithTimezone","test","hasModeString","replaceDefaultNowWithSql","TABLE_ALIAS_MARKER","appendTableAliases","source","markerIndex","indexOf","base","slice","exportRegex","tableExports","Set","match","matchAll","name","add","size","aliasLines","Array","from","sort","map","join","prefix","trimEnd","postprocessDrizzleSchema","targetPath","resolvedPath","path","resolve","fs","existsSync","console","warn","undefined","text","readFileSync","ensureHeaderComment","patchResult","patchDrizzleKitDefects","removePgSchemaDeclarations","tableConversion","convertSchemaTableInvocations","renameResult","renamePgTableConstants","updateTableReferenceIdentifiers","renames","replacement","replaceUnknownColumns","timestampReplacement","replaceTimestampWithCustomTypes","defaultNowReplacement","replaceDefaultNowWithSql","removeConflictingSystemFields","addSystemFieldComments","tweakImports","inlineCustomTypes","appendTableAliases","replace","collapseExtraBlankLines","writeFileSync","fixed","info","replaced","unmatched","length","forEach","line","converted","replacedUnknown","unmatchedUnknown","patchedDefects","replacedTimestamps","replacedDefaultNow","pluralize","mapDrizzleTypeToTS","field","typeMap","char","varchar","text","smallint","integer","int","bigint","serial","smallserial","bigserial","decimal","numeric","real","doublePrecision","boolean","timestamp","timestamptz","date","time","timetz","interval","uuid","json","jsonb","bytea","inet","cidr","macaddr","macaddr8","point","line","lseg","box","path","polygon","circle","array","customType","customTimestamptz","userProfile","fileAttachment","pgEnum","baseType","type","isArray","endsWith","enumValues","length","map","v","join","toPascalCase","str","replace","split","word","charAt","toUpperCase","slice","toKebabCase","toLowerCase","toSnakeCase","generateDTO","table","className","toPascalCase","variableName","dto","field","fields","isPrimaryKey","name","startsWith","tsType","mapDrizzleTypeToTS","optional","nullable","hasDefault","decorators","generateValidationDecorators","isUpdate","isResponse","comment","type","length","isArray","generateController","routePath","toKebabCase","pluralize","filePath","toSnakeCase","pkField","find","f","pkType","pkName","controller","generateService","service","generateModule","module","Project","Node","DrizzleSchemaParser","project","projectOptions","Project","parseSchemaFile","filePath","sourceFile","addSourceFileAtPath","tables","variableStatements","getVariableStatements","statement","declarations","getDeclarations","declaration","initializer","getInitializer","Node","isCallExpression","expression","getExpression","getText","tableInfo","parsePgTable","getName","push","variableName","callExpr","args","getArguments","length","tableName","replace","fieldsArg","isObjectLiteralExpression","fields","properties","getProperties","prop","isPropertyAssignment","fieldName","leadingComments","getLeadingCommentRanges","comment","map","c","join","trim","fieldInfo","parseField","name","columnName","type","nullable","hasDefault","notNull","isPrimaryKey","isUnique","isArray","parseBaseType","parseCallChain","current","baseCall","isPropertyAccessExpression","typeName","firstArg","isStringLiteral","getLiteralText","parseTypeConfig","isArrayLiteralExpression","enumValues","getElements","el","objLiteral","propName","value","parseInt","undefined","precision","scale","defaultValue","withTimezone","mode","Error","methodName","refArg","match","references","table","column","join","mkdir","rm","writeFile","existsSync","parseAndGenerateNestResourceTemplate","options","parser","DrizzleSchemaParser","tsConfigFilePath","tables","parseSchemaFile","schemaFilePath","length","console","warn","sort","a","b","variableName","table","info","filePath","toSnakeCase","moduleDir","join","moduleOutputDir","existsSync","dto","generateDTO","controller","generateController","service","generateService","moduleFilePath","module","generateModule","mkdir","recursive","writeFile","err","error","message","rm","fs","path","http","https","errorHtmlTemplate","isConnectionError","err","code","connectionErrorCodes","includes","checkServiceAvailable","targetUrl","timeout","Promise","resolve","url","URL","isHttps","protocol","httpModule","https","http","req","request","hostname","port","path","method","res","available","statusCode","headers","on","destroy","end","e","waitForServiceRecovery","interval","startTime","Date","now","isAvailable","setTimeout","getDirname","__dirname","getErrorHtmlTemplate","dirname","htmlPath","join","fs","readFileSync","parseLogLine","line","trimmed","trim","match","content","readRecentErrorLogs","logDir","maxLogs","fileName","logFilePath","fileStats","promises","stat","logs","hasCompileError","fileSize","size","maxReadSize","readSize","Math","min","startPosition","max","buffer","Buffer","allocUnsafe","fileHandle","open","read","error","console","close","toString","lines","split","length","shift","allLines","parsed","push","startIndex","i","log","fallbackLogs","slice","checkForErrors","endIndex","errorSection","compileErrorMatch","errorCount","parseInt","injectTemplateData","template","clientBasePath","replace","handleDevProxyError","options","process","cwd","maxErrorLogs","logFileName","retryTimeout","retryInterval","target","env","SERVER_PORT","CLIENT_BASE_PATH","clientBasePathWithoutSlash","normalizeBasePath","message","headersSent","isConnError","recovered","sendSimpleRedirect","html","writeHead","originalUrl","path","express","fs","crypto","promises","fs","path","ts","path","promises","fs","findControllerFiles","dir","files","scan","currentDir","entries","fs","readdir","withFileTypes","entry","fullPath","path","join","name","isDirectory","isFile","endsWith","push","buildSourceMap","controllerFiles","processFile","sourceMap","Map","concurrency","results","i","length","batch","slice","batchResults","Promise","all","map","filePath","metadata","operationId","info","set","findSourceInfo","directMatch","get","key","value","className","methodName","split","camelCaseId","charAt","toLowerCase","toUpperCase","undefined","enhanceOpenApiPaths","openapi","enhancedCount","paths","pathItem","Object","values","operation","sourceInfo","file","line","transformOpenapiPaths","basePath","newPaths","keys","forEach","staticApiKey","startsWith","enhanceOpenApiWithSourceInfo","options","startTime","Date","now","openapiPath","path","resolve","__dirname","serverDir","writeFile","openapi","openapiData","JSON","parse","stringify","openapiContent","fs","readFile","controllerFiles","findControllerFiles","sourceMap","buildSourceMap","processControllerFile","enhanced","enhanceOpenApiPaths","duration","stats","controllersFound","length","endpointsExtracted","size","endpointsEnhanced","filePath","relativePath","relative","process","cwd","content","sourceFile","ts","createSourceFile","ScriptTarget","Latest","extractControllerMetadata","metadata","Map","controllerPath","className","getDecorators","node","Array","isArray","modifiers","filter","mod","kind","SyntaxKind","Decorator","decorators","visit","isClassDeclaration","name","getText","decorator","isCallExpression","expression","decoratorName","arguments","arg","isStringLiteral","text","isMethodDeclaration","methodName","httpMethod","routePath","line","getLineAndCharacterOfPosition","getStart","includes","toLowerCase","operationId","set","file","method","forEachChild","createOpenapiHandler","openapiFilePath","enableEnhancement","serverDir","cache","_req","res","context","fileBuffer","fs","readFile","currentHash","crypto","createHash","update","digest","fileHash","json","data","payload","JSON","parse","isDev","openapi","enhancedPayload","stats","enhanceOpenApiWithSourceInfo","openapiData","writeFile","rootDir","console","log","duration","endpointsEnhanced","result","transformOpenapiPaths","basePath","error","message","Error","status","createOpenapiRouter","options","context","openapiFilePath","enableEnhancement","serverDir","router","express","Router","handler","createOpenapiHandler","get","req","res","OPENAPI_ROUTES","method","path","description","createOpenapiMiddleware","options","openapiFilePath","enableEnhancement","serverDir","name","mountPath","routes","enabled","context","isDev","createRouter","createOpenapiRouter","express","promises","fs","isAbsolute","join","relative","pathPatternToRegex","pattern","regexPattern","replace","RegExp","matchesPathPattern","actualPath","normalizedActual","normalizePathForMatching","normalizedPattern","hasSpecialPatterns","regex","test","hasSpecialPatterns","pattern","test","normalizePathForMatching","path","replace","resolveLogDir","provided","join","process","cwd","isAbsolute","getRelativePath","filePath","relative","fileExists","fs","access","parseLogLine","line","trimmed","trim","undefined","JSON","parse","extractNumber","message","pattern","match","value","Number","isFinite","parseLimit","defaultValue","maxValue","parsed","Math","min","floor","parsePositiveInt","fallback","resolveLogFilePath","baseDir","fileName","sanitized","replace","segments","split","filter","Boolean","some","segment","Error","resolved","rel","startsWith","matchesPath","actualPath","matchesPathPattern","matchesMethod","actualMethod","expectedMethod","toUpperCase","serializeError","error","name","String","join","createReadStream","promises","fs","createInterface","readLogEntriesByTrace","filePath","traceId","limit","exists","fileExists","undefined","matches","stream","createReadStream","encoding","rl","createInterface","input","crlfDelay","Infinity","line","entry","parseLogLine","trace_id","push","length","shift","close","readRecentTraceCalls","page","pageSize","pathFilter","methodFilter","config","maxEntriesPerTrace","chunkSize","builders","Map","completedCalls","createTraceBuilder","entries","method","path","startTime","endTime","statusCode","durationMs","hasCompleted","updateBuilderMetadata","builder","String","handleRequestCompleted","message","time","extractNumber","pathMatches","matchesPath","methodMatches","matchesMethod","shouldInclude","processLogEntry","get","set","includes","processLine","readFileReverse","buildPaginatedResponse","handle","fs","open","stats","stat","position","size","remainder","Math","min","buffer","Buffer","alloc","read","chunk","toString","lines","split","i","items","totalItems","totalPages","ceil","startIndex","endIndex","pagedItems","slice","map","reverse","totalCalls","calls","readLogFilePage","capacity","totalLines","max","bufferStartIndex","lineIndex","readServerLogs","logDir","options","offset","sources","allLogs","errors","source","logs","readLogsBySource","error","errorMsg","Error","console","warn","join","filteredLogs","levels","filter","log","level","sort","a","b","timestamp","total","paginatedLogs","hasMore","parser","parsePinoLog","parseStdLog","trim","parseError","pinoLog","JSON","parse","id","generateUUID","mapPinoLevelToServerLogLevel","Date","getTime","msg","context","userId","user_id","appId","app_id","tenantId","tenant_id","stack","meta","pid","hostname","status_code","duration_ms","ip","requestBody","request_body","responseBody","response_body","tags","match","now","timeStr","content","isoStr","replace","isNaN","extractLogLevel","pinoLevel","lower","toLowerCase","text","c","r","random","v","readTriggerList","trigger","triggerID","shouldIncludeInCompletedCalls","alreadyAdded","some","call","isAutomationTrigger","endsWith","requestEntry","find","e","pop","readTriggerDetail","instanceID","hasInstanceID","instance_id","handleNotFound","res","filePath","message","status","json","getRelativePath","handleError","error","serializeError","createGetTraceEntriesHandler","logDir","appLogPath","join","req","traceId","params","trim","limit","parseLimit","query","entries","readLogEntriesByTrace","file","count","length","createGetRecentTracesHandler","traceLogPath","page","parsePositiveInt","pageSize","pathFilter","path","undefined","methodFilter","method","toUpperCase","result","readRecentTraceCalls","calls","createGetLogFileHandler","fileName","resolveLogFilePath","readLogFilePage","createGetServerLogsHandler","offset","levels","String","split","map","l","filter","Boolean","sources","s","readServerLogs","hint","createGetTriggerListHandler","trigger","triggerID","readTriggerList","createGetTriggerDetailHandler","instanceID","readTriggerDetail","http","checkServiceHealth","host","port","timeout","Promise","resolve","startTime","Date","now","req","http","request","hostname","path","method","_res","responseTime","available","on","destroy","error","err","message","end","createHealthCheckHandler","options","targetPort","Number","process","env","SERVER_PORT","targetHost","_req","res","result","status","json","service","timestamp","toISOString","Error","createDevLogRouter","options","logDir","resolveLogDir","router","express","Router","get","createGetTraceEntriesHandler","createGetRecentTracesHandler","createGetLogFileHandler","createGetServerLogsHandler","createGetTriggerListHandler","createGetTriggerDetailHandler","createHealthCheckHandler","DEV_LOGS_ROUTES","method","path","description","createDevLogsMiddleware","options","logDir","name","mountPath","routes","enabled","context","isDev","createRouter","createDevLogRouter","express","join","fs","isAbsolute","join","fs","resolveLogDir","provided","join","process","cwd","isAbsolute","ensureDir","dir","fs","existsSync","mkdirSync","recursive","serializeError","error","Error","name","message","String","collectLogsHandler","logDir","fileName","filePath","join","ensureDir","req","res","logContent","body","message","status","json","logLine","JSON","stringify","server_time","Date","toISOString","fs","promises","appendFile","success","error","handleError","collectLogsBatchHandler","logContents","Array","isArray","logLines","push","serializeError","createDevLogRouter","options","logDir","resolveLogDir","router","express","Router","post","json","collectLogsHandler","fileName","collectLogsBatchHandler","DEV_LOGS_ROUTES","method","path","description","createCollectLogsMiddleware","options","logDir","name","mountPath","routes","enabled","context","isDev","createRouter","createDevLogRouter","fileName","isRouteMiddleware","middleware","createRouter","undefined","isGlobalMiddleware","createHandler","computeMountPath","basePath","mountPath","routePath","path","posix","join","startsWith","logMiddlewareRegistration","fullMountPath","routes","length","console","log","name","forEach","route","method","description","registerRouteMiddleware","server","context","error","router","use","registerGlobalMiddleware","warn","handler","registerMiddlewares","middlewares","options","isDev","process","env","NODE_ENV","rootDir","cwd","allMiddlewares","enabled","hasCreateRouter","hasCreateHandler"]}
|
|
1
|
+
{"version":3,"sources":["../src/utils/index.ts","../src/helpers/gen-dbschema/postprocess.ts","../src/helpers/gen-dbschema/helper/header-format.ts","../src/helpers/gen-dbschema/helper/schema-conversion.ts","../src/helpers/gen-dbschema/helper/table-rename.ts","../src/helpers/gen-dbschema/helper/custom-types.ts","../src/helpers/gen-dbschema/helper/imports.ts","../src/helpers/gen-dbschema/helper/system-fields.ts","../src/helpers/gen-dbschema/helper/patch-helper.ts","../src/helpers/gen-dbschema/helper/timestamp-replacement.ts","../src/helpers/gen-dbschema/helper/appendTableAliases.ts","../src/helpers/gen-nest-resource/generator.ts","../src/helpers/gen-nest-resource/utils.ts","../src/helpers/gen-nest-resource/schema-parser.ts","../src/helpers/gen-nest-resource/index.ts","../src/helpers/proxy-error/index.ts","../src/middlewares/index.ts","../src/middlewares/openapi/router.ts","../src/middlewares/openapi/controller.ts","../src/middlewares/openapi/services.ts","../src/middlewares/openapi/utils.ts","../src/middlewares/openapi/index.ts","../src/middlewares/dev-logs/router.ts","../src/middlewares/dev-logs/utils.ts","../src/middlewares/dev-logs/helper/path-matcher.ts","../src/middlewares/dev-logs/controller.ts","../src/middlewares/dev-logs/services/file-reader.ts","../src/middlewares/dev-logs/services/parsers.ts","../src/middlewares/dev-logs/services/trace.service.ts","../src/middlewares/dev-logs/services/server-log.service.ts","../src/middlewares/dev-logs/services/trigger.service.ts","../src/middlewares/dev-logs/services/capability.service.ts","../src/middlewares/dev-logs/health.controller.ts","../src/middlewares/dev-logs/index.ts","../src/middlewares/collect-logs/router.ts","../src/middlewares/collect-logs/controller.ts","../src/middlewares/collect-logs/utils.ts","../src/middlewares/collect-logs/index.ts"],"sourcesContent":["/**\n * 标准化基础路径,确保以 '/' 开头且不包含 trailing slash\n * @param basePath 原始基础路径\n * @returns 标准化后的基础路径\n */\nexport function normalizeBasePath(basePath: string): string {\n const normalizedBasePath = basePath.startsWith('/')\n ? basePath\n : `/${basePath}`;\n const basePathWithoutTrailingSlash = normalizedBasePath.endsWith('/')\n ? normalizedBasePath.slice(0, -1)\n : normalizedBasePath;\n return basePathWithoutTrailingSlash;\n}\n","import fs from 'node:fs';\nimport path from 'node:path';\n\n// Import all module functions\nimport {\n // Constants\n HEADER_COMMENT,\n CUSTOM_TYPE_PATTERN,\n // Header & Format\n ensureHeaderComment,\n stripLeadingNewlines,\n collapseExtraBlankLines,\n // Schema Conversion\n removePgSchemaDeclarations,\n convertSchemaTableInvocations,\n // Table Rename\n renamePgTableConstants,\n updateTableReferenceIdentifiers,\n escapeRegExp,\n sanitizeIdentifier,\n toAsciiName,\n // Custom Types\n replaceUnknownColumns,\n replaceFollowingUnknown,\n // Imports\n tweakImports,\n ensureTypesImport,\n removeInlineTypeHelpers,\n inlineCustomTypes,\n inlineFromTemplate,\n ensureImportIdentifier,\n // System Fields\n addSystemFieldComments,\n removeConflictingSystemFields,\n // Patch\n patchDrizzleKitDefects,\n // Timestamp Replacement\n replaceTimestampWithCustomTypes,\n replaceDefaultNowWithSql,\n // Table Aliases\n appendTableAliases,\n} from './helper';\n\ntype PostprocessStats = {\n replacedUnknown: number;\n unmatchedUnknown: string[];\n patchedDefects: number;\n replacedTimestamps: number;\n replacedDefaultNow: number;\n};\n\n// Internal functions (exported for testing)\nexport const internal = {\n CUSTOM_TYPE_PATTERN,\n HEADER_COMMENT,\n ensureHeaderComment,\n stripLeadingNewlines,\n collapseExtraBlankLines,\n removePgSchemaDeclarations,\n convertSchemaTableInvocations,\n sanitizeIdentifier,\n toAsciiName,\n renamePgTableConstants,\n updateTableReferenceIdentifiers,\n escapeRegExp,\n replaceUnknownColumns,\n replaceFollowingUnknown,\n addSystemFieldComments,\n tweakImports,\n ensureTypesImport,\n removeInlineTypeHelpers,\n inlineCustomTypes,\n inlineFromTemplate,\n ensureImportIdentifier,\n removeConflictingSystemFields,\n patchDrizzleKitDefects,\n replaceTimestampWithCustomTypes,\n replaceDefaultNowWithSql,\n appendTableAliases,\n};\n\nexport function postprocessDrizzleSchema(targetPath: string): PostprocessStats | undefined {\n const resolvedPath = path.resolve(targetPath);\n if (!fs.existsSync(resolvedPath)) {\n console.warn(`[postprocess-drizzle-schema] File not found: ${resolvedPath}`);\n return undefined;\n }\n\n let text = fs.readFileSync(resolvedPath, 'utf8');\n\n // Processing pipeline - maintain exact order\n text = ensureHeaderComment(text);\n\n // Patch drizzle-kit defects first\n const patchResult = patchDrizzleKitDefects(text);\n text = patchResult.text;\n\n text = removePgSchemaDeclarations(text);\n const tableConversion = convertSchemaTableInvocations(text);\n text = tableConversion.text;\n const renameResult = renamePgTableConstants(text);\n text = renameResult.text;\n text = updateTableReferenceIdentifiers(text, renameResult.renames);\n\n const replacement = replaceUnknownColumns(text);\n text = replacement.text;\n\n // Replace timestamp with specific options to custom types\n const timestampReplacement = replaceTimestampWithCustomTypes(text);\n text = timestampReplacement.text;\n\n // Replace .defaultNow() with .default(sql`CURRENT_TIMESTAMP`)\n const defaultNowReplacement = replaceDefaultNowWithSql(text);\n text = defaultNowReplacement.text;\n\n // Remove conflicting system fields before adding comments\n text = removeConflictingSystemFields(text);\n\n text = addSystemFieldComments(text);\n\n text = tweakImports(text);\n // Inline custom types instead of importing from external file\n text = inlineCustomTypes(text);\n\n text = appendTableAliases(text);\n // Note: removeInlineTypeHelpers is no longer needed since we intentionally inline types now\n text = text.replace(/\\r?\\n/g, '\\n');\n text = collapseExtraBlankLines(text);\n\n fs.writeFileSync(resolvedPath, text, 'utf8');\n\n if (patchResult.fixed > 0) {\n console.info(`[postprocess-drizzle-schema] Patched ${patchResult.fixed} drizzle-kit defects (.default(') -> .default(''))`);\n }\n if (replacement.replaced > 0) {\n console.info(`[postprocess-drizzle-schema] Replaced ${replacement.replaced} unknown columns`);\n }\n if (replacement.unmatched.length > 0) {\n console.warn('[postprocess-drizzle-schema] Unmatched custom types:', replacement.unmatched.length);\n replacement.unmatched.forEach((line) => console.warn(` ${line}`));\n }\n if (tableConversion.converted > 0) {\n console.info(`[postprocess-drizzle-schema] Converted ${tableConversion.converted} schema.table invocations to pgTable`);\n }\n if (timestampReplacement.replaced > 0) {\n console.info(`[postprocess-drizzle-schema] Replaced ${timestampReplacement.replaced} timestamp fields with customTimestamptz`);\n }\n if (defaultNowReplacement.replaced > 0) {\n console.info(`[postprocess-drizzle-schema] Replaced ${defaultNowReplacement.replaced} .defaultNow() with .default(sql\\`CURRENT_TIMESTAMP\\`)`);\n }\n\n return {\n replacedUnknown: replacement.replaced,\n unmatchedUnknown: replacement.unmatched,\n patchedDefects: patchResult.fixed,\n replacedTimestamps: timestampReplacement.replaced,\n replacedDefaultNow: defaultNowReplacement.replaced,\n };\n}\n","const HEADER_COMMENT = '/** auto generated, do not edit */';\n\nexport { HEADER_COMMENT };\n\nexport function ensureHeaderComment(source: string): string {\n let text = source.startsWith('\\uFEFF') ? source.slice(1) : source;\n\n while (text.startsWith(HEADER_COMMENT)) {\n text = text.slice(HEADER_COMMENT.length);\n text = stripLeadingNewlines(text);\n }\n\n const trimmed = stripLeadingNewlines(text);\n if (trimmed.length === 0) {\n return `${HEADER_COMMENT}\\n`;\n }\n\n return `${HEADER_COMMENT}\\n${trimmed}`;\n}\n\nexport function stripLeadingNewlines(value: string): string {\n let current = value;\n while (current.startsWith('\\r\\n') || current.startsWith('\\n')) {\n current = current.startsWith('\\r\\n') ? current.slice(2) : current.slice(1);\n }\n return current;\n}\n\nexport function collapseExtraBlankLines(text: string): string {\n return text.replace(/\\n{3,}/g, '\\n\\n');\n}\n","export function removePgSchemaDeclarations(source: string): string {\n return source.replace(/export const \\w+ = pgSchema\\([\\s\\S]*?\\);\\n*/g, '');\n}\n\nexport function convertSchemaTableInvocations(source: string): { text: string; converted: number } {\n let converted = 0;\n let text = source.replace(/([A-Za-z0-9_]+)\\.table\\(/g, () => {\n converted += 1;\n return 'pgTable(';\n });\n text = text.replace(/([A-Za-z0-9_]+)\\.view\\(/g, () => {\n converted += 1;\n return 'pgView(';\n });\n text = text.replace(/([A-Za-z0-9_]+)\\.materializedView\\(/g, () => {\n converted += 1;\n return 'pgMaterializedView(';\n });\n text = text.replace(/([A-Za-z0-9_]+)\\.enum\\(/g, () => {\n converted += 1;\n return 'pgEnum(';\n });\n text = text.replace(/([A-Za-z0-9_]+)\\.sequence\\(/g, () => {\n converted += 1;\n return 'pgSequence(';\n });\n return { text, converted };\n}\n","import { pinyin } from 'pinyin-pro';\n\nexport type TableRename = {\n from: string;\n to: string;\n};\n\nexport function renamePgTableConstants(source: string): { text: string; renames: TableRename[] } {\n const pgTableRegex = /export const\\s+([^\\s=]+)\\s*=\\s*(pgTable|pgView|pgMaterializedView)\\(\\s*[\"'`]([^\"'`]+)[\"'`]/gu;\n const renames: TableRename[] = [];\n\n const updated = source.replace(pgTableRegex, (match, currentName: string, factory: string, tableName: string) => {\n const sanitized = sanitizeIdentifier(tableName);\n if (sanitized === currentName) {\n return match;\n }\n renames.push({ from: currentName, to: sanitized });\n const equalsIndex = match.indexOf('=');\n const suffix = equalsIndex >= 0 ? match.slice(equalsIndex) : ` = ${factory}(\"${tableName}\"`;\n const normalizedSuffix = suffix.trimStart();\n return `export const ${sanitized} ${normalizedSuffix}`;\n });\n\n return { text: updated, renames };\n}\n\nexport function updateTableReferenceIdentifiers(source: string, renames: TableRename[]): string {\n if (renames.length === 0) {\n return source;\n }\n\n return renames.reduce((acc, rename) => {\n if (!rename.from || rename.from === rename.to) {\n return acc;\n }\n const pattern = new RegExp(`\\\\b${escapeRegExp(rename.from)}(\\\\s*\\\\.)`, 'g');\n return acc.replace(pattern, `${rename.to}$1`);\n }, source);\n}\n\nexport function escapeRegExp(value: string): string {\n return value.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&').replace(/\\//g, '\\\\/');\n}\n\n/**\n * Convert a string to camelCase\n * Example: \"test_exam\" -> \"testExam\", \"user_profile_data\" -> \"userProfileData\"\n */\nfunction toCamelCase(str: string): string {\n // Split by underscores, hyphens, or spaces\n const words = str.split(/[_\\-\\s]+/).filter(Boolean);\n\n if (words.length === 0) {\n return '';\n }\n\n // First word lowercase, capitalize first letter of subsequent words\n return words\n .map((word, index) => {\n if (index === 0) {\n return word.toLowerCase();\n }\n return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();\n })\n .join('');\n}\n\nexport function sanitizeIdentifier(name: string): string {\n const asciiName = toAsciiName(name);\n // Replace non-alphanumeric characters with underscores\n let sanitized = asciiName.replace(/[^A-Za-z0-9_]/g, '_');\n // Collapse multiple underscores into one\n sanitized = sanitized.replace(/_+/g, '_');\n // Remove leading/trailing underscores\n sanitized = sanitized.replace(/^_|_$/g, '');\n\n // Convert to camelCase\n sanitized = toCamelCase(sanitized);\n\n if (!sanitized) {\n sanitized = 'table';\n }\n if (!/^[A-Za-z_]/.test(sanitized)) {\n sanitized = `_${sanitized}`;\n }\n return sanitized;\n}\n\nexport function toAsciiName(name: string): string {\n if (!/[^\\x00-\\x7F]/.test(name)) {\n return name;\n }\n\n try {\n const transliterated = pinyin(name, { toneType: 'none', type: 'array' }).join('_');\n return transliterated || name;\n } catch (error) {\n return name;\n }\n}\n","// Match patterns like: 'user_profile', 'workspace_xxx.user_profile', 'user_profile[]', 'workspace_xxx.user_profile[]','file_attachment[]', 'workspace_xxx.file_attachment[]'\nconst CUSTOM_TYPE_PATTERN = /\\/\\/ TODO: failed to parse database type '(?:\\w+\\.)?(user_profile|file_attachment)(\\[\\])?'/;\n\nexport { CUSTOM_TYPE_PATTERN };\n\nexport type ReplacementOutcome = {\n text: string;\n replaced: number;\n unmatched: string[];\n};\n\nexport function replaceUnknownColumns(source: string): ReplacementOutcome {\n const lines = source.split('\\n');\n const result: string[] = [];\n let replaced = 0;\n const unmatched: string[] = [];\n\n for (let i = 0; i < lines.length; i += 1) {\n const line = lines[i];\n\n const match = line.match(CUSTOM_TYPE_PATTERN);\n if (match) {\n const typeName = match[1]; // 'user_profile' or 'file_attachment'\n const factory = typeName === 'user_profile' ? 'userProfile' : 'fileAttachment';\n\n const replacedLine = replaceFollowingUnknown(lines[i + 1], factory);\n if (replacedLine) {\n // Successfully replaced, add the replaced line and skip both the comment and original line\n result.push(replacedLine);\n replaced += 1;\n i += 1; // Skip the next line (original unknown line)\n } else {\n // Failed to replace, keep the comment and continue\n unmatched.push(line.trim());\n result.push(line);\n }\n continue;\n }\n\n if (line.includes('unknown(')) {\n unmatched.push(line.trim());\n }\n\n result.push(line);\n }\n\n return {\n text: result.join('\\n'),\n replaced,\n unmatched,\n };\n}\n\nexport function replaceFollowingUnknown(nextLine: string | undefined, factory: 'userProfile' | 'fileAttachment'): string | undefined {\n if (!nextLine || !nextLine.includes('unknown(')) {\n return undefined;\n }\n // Simply replace 'unknown' with the appropriate factory, preserving everything else (like .array())\n return nextLine.replace('unknown(', `${factory}(`);\n}\n","import fs from 'node:fs';\nimport path from 'node:path';\nimport { HEADER_COMMENT } from './header-format';\n\nexport function tweakImports(source: string): string {\n const importRegex = /import \\{([^}]*)\\} from \"drizzle-orm\\/pg-core\";?/;\n const match = source.match(importRegex);\n if (!match) {\n return source;\n }\n\n const identifiers = match[1]\n .split(',')\n .map((id) => id.trim())\n .filter(Boolean)\n .filter((id) => id !== 'pgSchema' && id !== 'customType');\n\n // Remove timestamp if it's no longer used in the source\n // (after timestamp -> customTimestamptz replacement)\n const filteredIdentifiers = identifiers.filter((id) => {\n if (id === 'timestamp') {\n // Check if timestamp() is still used anywhere\n const timestampUsageRegex = /timestamp\\s*\\(/;\n return timestampUsageRegex.test(source);\n }\n return true;\n });\n\n if (source.includes('pgTable(') && !filteredIdentifiers.includes('pgTable')) {\n filteredIdentifiers.push('pgTable');\n }\n if (source.includes('pgView(') && !filteredIdentifiers.includes('pgView')) {\n filteredIdentifiers.push('pgView');\n }\n if (source.includes('pgMaterializedView(') && !filteredIdentifiers.includes('pgMaterializedView')) {\n filteredIdentifiers.push('pgMaterializedView');\n }\n if (source.includes('pgEnum(') && !filteredIdentifiers.includes('pgEnum')) {\n filteredIdentifiers.push('pgEnum');\n }\n if (source.includes('pgSequence(') && !filteredIdentifiers.includes('pgSequence')) {\n filteredIdentifiers.push('pgSequence');\n }\n\n const unique = Array.from(new Set(filteredIdentifiers));\n const replacement = `import { ${unique.join(', ')} } from \"drizzle-orm/pg-core\"`;\n return source.replace(importRegex, replacement);\n}\n\nexport function ensureTypesImport(source: string): string {\n if (source.includes('from \"./types\"')) {\n return source;\n }\n\n const typesImport = 'import { userProfile, fileAttachment } from \"./types\"\\n';\n const headerPrefix = `${HEADER_COMMENT}\\n`;\n if (source.startsWith(headerPrefix)) {\n const insertionPoint = headerPrefix.length;\n return `${source.slice(0, insertionPoint)}${typesImport}${source.slice(insertionPoint)}`;\n }\n\n const importSectionMatch = source.match(/^(?:import [^\\n]+\\n)+/);\n if (importSectionMatch) {\n const insertionPoint = importSectionMatch[0].length;\n return `${source.slice(0, insertionPoint)}${typesImport}${source.slice(insertionPoint)}`;\n }\n\n return `${typesImport}${source}`;\n}\n\nexport function removeInlineTypeHelpers(source: string): string {\n return source\n .replace(/type UserProfile = [\\s\\S]*?\\}\\);\\n*/g, '')\n .replace(/type FileAttachment = [\\s\\S]*?\\}\\);\\n*/g, '');\n}\n\nexport function inlineCustomTypes(source: string): string {\n // Remove any existing import from \"./types\"\n let text = source.replace(/import \\{[^}]*\\} from [\"']\\.\\/types[\"'];?\\n*/g, '');\n\n // Try multiple possible paths for the template file\n const possiblePaths = [\n // When bundled by tsup - __dirname points to dist/\n path.resolve(__dirname, 'template', 'types.ts'),\n // When running from source (development) - relative to helper/imports.ts\n path.resolve(__dirname, '../template', 'types.ts'),\n // Alternative paths\n path.resolve(__dirname, '../../template', 'types.ts'),\n path.resolve(__dirname, '../../../template', 'types.ts'),\n ];\n\n let templatePath: string | undefined;\n for (const possiblePath of possiblePaths) {\n if (fs.existsSync(possiblePath)) {\n templatePath = possiblePath;\n break;\n }\n }\n\n if (!templatePath) {\n console.warn('[postprocess-drizzle-schema] Template types file not found. Tried paths:', possiblePaths);\n return text;\n }\n\n return inlineFromTemplate(text, templatePath);\n}\n\nexport function inlineFromTemplate(text: string, templatePath: string): string {\n const templateContent = fs.readFileSync(templatePath, 'utf8');\n\n // Extract the type definitions (everything except the imports)\n const typeDefinitions = templateContent\n .replace(/^import\\s+.*;\\r?\\n*/gm, '') // Remove all import statements\n .trim();\n\n // Check if we need to add imports for sql and customType\n const needsSql = typeDefinitions.includes('sql`') && !text.includes(\"from 'drizzle-orm'\") && !text.includes('from \"drizzle-orm\"');\n const needsCustomType = typeDefinitions.includes('customType<') && !text.includes('customType');\n\n // Add necessary imports to drizzle-orm/pg-core import\n if (needsCustomType) {\n text = ensureImportIdentifier(text, 'drizzle-orm/pg-core', 'customType');\n }\n\n // Add sql import from drizzle-orm if needed\n if (needsSql && !text.includes(\"from 'drizzle-orm'\") && !text.includes('from \"drizzle-orm\"')) {\n const importMatch = text.match(/^import [\\s\\S]*?from [\"']drizzle-orm\\/pg-core[\"'];?\\n/m);\n if (importMatch) {\n const insertPoint = text.indexOf(importMatch[0]) + importMatch[0].length;\n text = text.slice(0, insertPoint) + \"import { sql } from 'drizzle-orm';\\n\" + text.slice(insertPoint);\n }\n }\n\n // Find insertion point: after the imports section\n const headerPrefix = `${HEADER_COMMENT}\\n`;\n let insertionPoint = 0;\n\n if (text.startsWith(headerPrefix)) {\n insertionPoint = headerPrefix.length;\n }\n\n const importSectionMatch = text.slice(insertionPoint).match(/^(?:import [^\\n]+\\n)+/);\n if (importSectionMatch) {\n insertionPoint += importSectionMatch[0].length;\n }\n\n // Insert type definitions\n const typeBlock = `\\n${typeDefinitions}\\n\\n`;\n return text.slice(0, insertionPoint) + typeBlock + text.slice(insertionPoint);\n}\n\nexport function ensureImportIdentifier(source: string, packageName: string, identifier: string): string {\n const escapedPackage = packageName.replace(/\\//g, '\\\\/');\n const importRegex = new RegExp(`import \\\\{([^}]*)\\\\} from [\"']${escapedPackage}[\"'];?`);\n const match = source.match(importRegex);\n\n if (!match) {\n // No import from this package, can't add it\n return source;\n }\n\n const identifiers = match[1]\n .split(',')\n .map((id) => id.trim())\n .filter(Boolean);\n\n if (identifiers.includes(identifier)) {\n return source; // Already imported\n }\n\n identifiers.push(identifier);\n const unique = Array.from(new Set(identifiers));\n const replacement = `import { ${unique.join(', ')} } from \"${packageName}\"`;\n return source.replace(importRegex, replacement);\n}\n","/**\n * Add comments for system fields in the Drizzle schema source code.\n * @param source The source code string to process\n * @returns The processed source code with system field comments added\n */\nexport function addSystemFieldComments(source: string): string {\n const commentMap: Record<string, string> = {\n '_created_at': 'Creation time',\n '_created_by': 'Creator',\n '_updated_at': 'Update time',\n '_updated_by': 'Updater',\n };\n\n const lines = source.split('\\n');\n\n for (let i = 0; i < lines.length; i += 1) {\n const line = lines[i];\n const entry = Object.entries(commentMap).find(([key]) => line.includes(`\"${key}\"`));\n if (!entry) {\n continue;\n }\n\n const [, description] = entry;\n const previousLine = lines[i - 1]?.trim() ?? '';\n if (previousLine.startsWith('//') && previousLine.includes('System field')) {\n continue;\n }\n\n const indentMatch = line.match(/^\\s*/);\n const indent = indentMatch ? indentMatch[0] : '';\n const comment = `${indent}// System field: ${description} (auto-filled, do not modify)`;\n lines.splice(i, 0, comment);\n i += 1;\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Remove conflicting system fields from the Drizzle schema source code.\n * @param source The source code string to process\n * @returns The processed source code with conflicting system fields removed\n */\nexport function removeConflictingSystemFields(source: string): string {\n const systemFieldMap: Record<string, string> = {\n '_created_at': 'created_at',\n '_created_by': 'created_by',\n '_updated_at': 'updated_at',\n '_updated_by': 'updated_by',\n };\n\n const lines = source.split('\\n');\n const result: Array<string | null> = [];\n\n // Track table-level state\n let inTable = false;\n let tableStartLine = -1;\n const tableBusinessFields = new Set<string>();\n let bracketDepth = 0;\n\n for (let i = 0; i < lines.length; i += 1) {\n const line = lines[i];\n\n // Check if we're starting a new table definition\n if (!inTable && /=\\s*(pgTable|pgView|pgMaterializedView)\\s*\\(/.test(line)) {\n inTable = true;\n tableStartLine = result.length;\n tableBusinessFields.clear();\n bracketDepth = 0;\n }\n\n if (inTable) {\n // Track bracket depth to find table end\n for (const char of line) {\n if (char === '{') bracketDepth++;\n if (char === '}') bracketDepth--;\n }\n\n // Collect business fields in this table\n for (const businessField of Object.values(systemFieldMap)) {\n if (line.includes(`\"${businessField}\"`) || line.includes(`'${businessField}'`)) {\n tableBusinessFields.add(businessField);\n }\n }\n\n // Check if table definition ends\n if (bracketDepth === 0 && line.includes(');')) {\n inTable = false;\n\n // Now process this table's lines to remove conflicting system fields\n const tableEndLine = result.length;\n for (let j = tableStartLine; j <= tableEndLine; j++) {\n const tableLine = result[j] || '';\n let shouldRemove = false;\n\n for (const [systemField, businessField] of Object.entries(systemFieldMap)) {\n if (tableBusinessFields.has(businessField)) {\n if (tableLine.includes(`\"${systemField}\"`) || tableLine.includes(`'${systemField}'`)) {\n shouldRemove = true;\n // Also remove previous line if it's a system field comment\n if (j > 0 && result[j - 1]?.includes('// System field:')) {\n result[j - 1] = null; // Mark for removal\n }\n break;\n }\n }\n }\n\n if (shouldRemove) {\n result[j] = null; // Mark for removal\n }\n }\n }\n }\n\n result.push(line);\n }\n\n // Filter out null entries (marked for removal)\n return result.filter(line => line !== null).join('\\n');\n}\n","/**\n * Patch for drizzle-kit defects.\n *\n * Fix syntax error: .default(') -> .default('')\n * This is a drizzle-kit bug that generates invalid TypeScript code.\n */\n\n/**\n * Fix drizzle-kit syntax error where .default(') is generated.\n *\n * @param source - The schema source code\n * @returns Object with fixed text and count of fixes applied\n */\nexport function patchDrizzleKitDefects(source: string): { text: string; fixed: number } {\n let fixed = 0;\n\n // Fix syntax error: .default(') -> .default('')\n const text = source.replace(/\\.default\\('\\)/g, () => {\n fixed += 1;\n return `.default('')`;\n });\n\n return { text, fixed };\n}\n","/**\n * Replace timestamp with specific options to custom types.\n *\n * Replaces timestamp(\"field\", { withTimezone: true, mode: 'string' })\n * with customTimestamptz(\"field\")\n */\n\n/**\n * Replace timestamp with timezone and string mode to customTimestamptz.\n *\n * @param source - The schema source code\n * @returns Object with replaced text and count of replacements\n */\nexport function replaceTimestampWithCustomTypes(source: string): { text: string; replaced: number } {\n let replaced = 0;\n\n // Match timestamp with any options: timestamp(\"field\", { ... })\n const pattern = /timestamp\\((['\"])(.*?)\\1,\\s*(\\{[^}]*\\})\\)/g;\n\n const text = source.replace(pattern, (match, quote, fieldName, options) => {\n // Check if options contain both withTimezone: true and mode: 'string'\n const hasWithTimezone = /withTimezone:\\s*true/.test(options);\n const hasModeString = /mode:\\s*['\"]string['\"]/.test(options);\n\n if (hasWithTimezone && hasModeString) {\n replaced += 1;\n return `customTimestamptz(${quote}${fieldName}${quote})`;\n }\n\n // Return original if conditions not met\n return match;\n });\n\n return { text, replaced };\n}\n\n/**\n * Replace .defaultNow() with .default(sql`CURRENT_TIMESTAMP`)\n * for custom timestamp types that don't support defaultNow().\n *\n * @param source - The schema source code\n * @returns Object with replaced text and count of replacements\n */\nexport function replaceDefaultNowWithSql(source: string): { text: string; replaced: number } {\n let replaced = 0;\n\n // Match .defaultNow() and replace with .default(sql`CURRENT_TIMESTAMP`)\n const pattern = /\\.defaultNow\\(\\)/g;\n\n const text = source.replace(pattern, () => {\n replaced += 1;\n return '.default(sql`CURRENT_TIMESTAMP`)';\n });\n\n return { text, replaced };\n}\n","const TABLE_ALIAS_MARKER = '// table aliases';\n\nexport function appendTableAliases(source: string): string {\n const markerIndex = source.indexOf(`\\n${TABLE_ALIAS_MARKER}`);\n const base = markerIndex === -1 ? source : source.slice(0, markerIndex);\n\n const exportRegex = /export const\\s+([A-Za-z_$][\\w$]*)\\s*=\\s*pgTable\\s*\\(/g;\n const tableExports = new Set<string>();\n\n for (const match of base.matchAll(exportRegex)) {\n const name = match[1];\n tableExports.add(name);\n }\n\n if (tableExports.size === 0) {\n\n return base;\n\n }\n\n const aliasLines = Array.from(tableExports)\n .sort()\n .map((name) => `export const ${name}Table = ${name};`)\n .join('\\n');\n\n const prefix = base.trimEnd();\n return `${prefix}\\n\\n${TABLE_ALIAS_MARKER}\\n${aliasLines}\\n`;\n}\n","import { pluralize } from 'inflection';\nimport { FieldInfo, TableInfo } from './schema-parser';\nimport { mapDrizzleTypeToTS, toKebabCase, toPascalCase, toSnakeCase } from './utils';\n\nexport function generateDTO(table: TableInfo): string {\n const className = toPascalCase(table.variableName);\n\n let dto = `// 请修改该文件代码以满足需求\nimport { ApiProperty, ApiPropertyOptional } from '@nestjs/swagger';\nimport { IsDefined, IsNumber, IsOptional, IsString, MaxLength, IsInt, IsBoolean, IsUUID, IsDate, IsObject, IsArray } from 'class-validator';\nimport { Type } from 'class-transformer';\nimport { FileAttachment } from '../../../database/schema';\n\n`;\n dto += `export class Create${className}Dto {\\n`;\n\n for (const field of table.fields) {\n // 跳过主键\n if (\n field.isPrimaryKey || field.name === 'id' ||\n field.name.startsWith('_') ||\n field.name.startsWith('created') ||\n field.name.startsWith('updated')\n ) {\n continue;\n }\n\n // if (field.comment) {\n // dto += ` /** ${field.comment} */\\n`;\n // }\n\n const tsType = mapDrizzleTypeToTS(field);\n const optional = field.nullable || field.hasDefault ? '?' : '';\n\n // 添加验证装饰器注释\n const decorators = generateValidationDecorators(field);\n if (decorators) {\n dto += decorators;\n }\n\n dto += ` ${field.name}${optional}: ${tsType};\\n\\n`;\n }\n\n dto += '}\\n\\n';\n\n // Update DTO\n dto += `export class Update${className}Dto {\\n`;\n\n for (const field of table.fields) {\n // 跳过系统字段和主键\n if (\n field.name.startsWith('_') ||\n field.name.startsWith('created') ||\n field.name.startsWith('updated') ||\n field.isPrimaryKey || field.name === 'id'\n ) {\n continue;\n }\n\n // 先不插入注释\n // if (field.comment) {\n // dto += ` /** ${field.comment} */\\n`;\n // }\n\n const tsType = mapDrizzleTypeToTS(field);\n\n const decorators = generateValidationDecorators(field, {\n isUpdate: true,\n });\n if (decorators) {\n dto += decorators;\n }\n\n dto += ` ${field.name}?: ${tsType};\\n\\n`;\n }\n\n dto += '}\\n\\n';\n\n // Response DTO\n dto += `export class ${className}ResponseDto {\\n`;\n\n for (const field of table.fields) {\n // if (field.comment) {\n // dto += ` /** ${field.comment} */\\n`;\n // }\n\n const tsType = mapDrizzleTypeToTS(field);\n const optional = field.nullable ? '?' : '';\n\n const decorators = generateValidationDecorators(field, {\n isResponse: true,\n });\n if (decorators) {\n dto += decorators;\n }\n\n dto += ` ${field.name}${optional}: ${tsType};\\n\\n`;\n }\n\n dto += '}\\n';\n\n return dto;\n}\n\n// 生成验证装饰器提示\nexport function generateValidationDecorators(field: FieldInfo, {\n isUpdate = false,\n isResponse = false,\n}: { isUpdate?: boolean, isResponse?: boolean } = {}): string {\n let decorators = ' // 请按用户需求修改以下装饰器注释\\n';\n\n if (field.nullable || (!isResponse && field.hasDefault) || isUpdate) {\n decorators += ` @ApiPropertyOptional({ description: '${field.comment || field.name}' })\\n`;\n if (isResponse) {\n return decorators;\n }\n decorators += ' @IsOptional()\\n';\n } else {\n decorators += ` @ApiProperty({ description: '${field.comment || field.name}' })\\n`;\n if (isResponse) {\n return decorators;\n }\n decorators += ' @IsDefined()\\n';\n }\n\n // 根据字段类型和约束生成装饰器注释\n switch (field.type) {\n case 'varchar':\n case 'char':\n case 'text':\n decorators += ' @IsString()\\n';\n if (field.length) {\n decorators += ` @MaxLength(${field.length})\\n`;\n }\n break;\n\n case 'integer':\n case 'smallint':\n case 'serial':\n case 'smallserial':\n decorators += ' @IsInt()\\n';\n break;\n\n case 'decimal':\n case 'numeric':\n case 'real':\n case 'doublePrecision':\n decorators += ' @IsNumber()\\n';\n break;\n\n case 'boolean':\n decorators += ' @IsBoolean()\\n';\n break;\n\n case 'uuid':\n decorators += ' @IsUUID()\\n';\n break;\n\n case 'timestamp':\n case 'timestamptz':\n case 'date':\n case 'customTimestamptz':\n decorators += ' @IsDate()\\n';\n break;\n\n case 'json':\n case 'jsonb':\n decorators += ' @IsObject()\\n';\n break;\n\n // case 'inet':\n // case 'cidr':\n // decorators += ' // @IsIP()\\n';\n // break;\n }\n\n if (field.isArray) {\n decorators += ' @IsArray()\\n';\n }\n\n // if (field.isUnique) {\n // decorators += ' // @IsUnique() // Custom validator needed\\n';\n // }\n\n // if (field.enumValues) {\n // decorators += ` // @IsEnum([${field.enumValues.map((v) => `'${v}'`).join(', ')}])\\n`;\n // }\n\n return decorators;\n}\n\n// Controller 生成器\nexport function generateController(table: TableInfo): string {\n const className = toPascalCase(table.variableName);\n const routePath = toKebabCase(pluralize(table.variableName));\n const filePath = toSnakeCase(table.variableName);\n\n // 找到主键字段\n const pkField = table.fields.find((f) => f.isPrimaryKey);\n const pkType = pkField ? mapDrizzleTypeToTS(pkField) : 'string';\n const pkName = pkField ? pkField.name : 'id';\n\n const controller = `\n// 请修改该文件代码以满足需求\nimport { \n Controller, \n Get, \n Post, \n Put, \n Delete, \n Body, \n Param, \n Query,\n} from '@nestjs/common';\n import {\n ApiTags,\n ApiOperation,\n ApiOkResponse,\n ApiCreatedResponse,\n} from '@nestjs/swagger';\nimport { \n Create${className}Dto, \n Update${className}Dto,\n ${className}ResponseDto\n} from './dtos/${filePath}.dto';\nimport { ${className}Service } from './${filePath}.service';\n\n@ApiTags('${toPascalCase(table.variableName)}')\n@Controller('api/${routePath}')\nexport class ${className}Controller {\n constructor(private readonly ${table.variableName}Service: ${className}Service) {}\n\n @Post()\n @ApiOperation({\n summary: '创建一条记录(模板内容,请修改我)',\n description: '创建一条记录(模板内容,请修改我)',\n })\n @ApiCreatedResponse({\n description: '成功创建一条记录',\n type: ${className}ResponseDto,\n })\n async create(\n @Body() createDto: Create${className}Dto\n ): Promise<${className}ResponseDto> {\n return this.${table.variableName}Service.create(createDto);\n }\n\n @ApiOperation({\n summary: '根据主键查询一条记录(模板内容,请修改我)',\n description: '根据主键查询一条记录(模板内容,请修改我)',\n })\n @ApiOkResponse({\n description: '成功查询一条记录',\n type: ${className}ResponseDto,\n })\n @Get(':${pkName}')\n async findOne(\n @Param('${pkName}') ${pkName}: ${pkType}\n ): Promise<${className}ResponseDto> {\n return this.${table.variableName}Service.findOne(${pkName});\n }\n\n @ApiOperation({\n summary: '根据主键更新一条记录(模板内容,请修改我)',\n description: '根据主键更新一条记录(模板内容,请修改我)',\n })\n @ApiOkResponse({\n description: '成功更新一条记录',\n type: ${className}ResponseDto,\n })\n @Put(':${pkName}')\n async update(\n @Param('${pkName}') ${pkName}: ${pkType},\n @Body() updateDto: Update${className}Dto\n ): Promise<${className}ResponseDto> {\n return this.${table.variableName}Service.update(${pkName}, updateDto);\n }\n\n @ApiOperation({\n summary: '根据主键删除一条记录(模板内容,请修改我)',\n description: '根据主键删除一条记录(模板内容,请修改我)',\n })\n @ApiOkResponse({\n description: '成功删除一条记录',\n })\n @Delete(':${pkName}')\n async remove(\n @Param('${pkName}') ${pkName}: ${pkType}\n ): Promise<void> {\n return this.${table.variableName}Service.remove(${pkName});\n }\n}\n`;\n\n return controller;\n}\n\n// Service 生成器\nexport function generateService(table: TableInfo): string {\n const className = toPascalCase(table.variableName);\n const filePath = toSnakeCase(table.variableName);\n\n const pkField = table.fields.find((f) => f.isPrimaryKey);\n const pkType = pkField ? mapDrizzleTypeToTS(pkField) : 'string';\n const pkName = pkField ? pkField.name : 'id';\n\n const service = `\n// 请修改该文件代码以满足需求\nimport { Injectable, Inject, Logger, NotFoundException } from '@nestjs/common';\nimport { eq } from 'drizzle-orm';\nimport { DRIZZLE_DATABASE, type PostgresJsDatabase } from '@lark-apaas/fullstack-nestjs-core';\nimport { ${table.variableName} } from '../../database/schema';\nimport { \n Create${className}Dto, \n Update${className}Dto,\n ${className}ResponseDto \n} from './dtos/${filePath}.dto';\n\n@Injectable()\nexport class ${className}Service {\n private readonly logger = new Logger(${className}Service.name);\n\n constructor(@Inject(DRIZZLE_DATABASE) private readonly db: PostgresJsDatabase) {}\n\n async create(createDto: Create${className}Dto): Promise<${className}ResponseDto> {\n const [result] = await this.db\n .insert(${table.variableName})\n .values(createDto)\n .returning();\n\n this.logger.log(\\`Created ${className} with ${pkName} \\${result.${pkName}}\\`);\n \n return result;\n }\n\n async findAll(options?: { page?: number; limit?: number }): Promise<${className}ResponseDto[]> {\n const { page = 1, limit = 10 } = options || {};\n const offset = (page - 1) * limit;\n\n return this.db\n .select()\n .from(${table.variableName})\n .limit(limit)\n .offset(offset);\n }\n\n async findOne(${pkName}: ${pkType}): Promise<${className}ResponseDto> {\n const [result] = await this.db\n .select()\n .from(${table.variableName})\n .where(eq(${table.variableName}.${pkName}, ${pkName}))\n .limit(1);\n\n if (!result) {\n throw new NotFoundException(\\`${className} with ${pkName} \\${${pkName}} not found\\`);\n }\n\n return result;\n }\n\n async update(${pkName}: ${pkType}, updateDto: Update${className}Dto): Promise<${className}ResponseDto> {\n const [result] = await this.db\n .update(${table.variableName})\n .set(updateDto)\n .where(eq(${table.variableName}.${pkName}, ${pkName}))\n .returning();\n\n if (!result) {\n throw new NotFoundException(\\`${className} with ${pkName} \\${${pkName}} not found\\`);\n }\n\n return result;\n }\n\n async remove(${pkName}: ${pkType}): Promise<void> {\n const result = await this.db\n .delete(${table.variableName})\n .where(eq(${table.variableName}.${pkName}, ${pkName}))\n .returning();\n\n if (result.length === 0) {\n throw new NotFoundException(\\`${className} with ${pkName} \\${${pkName}} not found\\`);\n }\n\n this.logger.log(\\`Deleted ${className} with ${pkName} \\${${pkName}}\\`);\n }\n}\n`;\n\n return service;\n}\n\nexport function generateModule(table: TableInfo): string {\n const className = toPascalCase(table.variableName);\n const filePath = toSnakeCase(table.variableName);\n\n const module = `\nimport { Module } from '@nestjs/common';\nimport { ${className}Controller } from './${filePath}.controller';\nimport { ${className}Service } from './${filePath}.service';\n\n@Module({\n controllers: [${className}Controller],\n providers: [${className}Service],\n})\nexport class ${className}Module {}\n`;\n\n return module;\n}\n","import { FieldInfo } from './schema-parser';\n\n// 完整的类型映射\nexport function mapDrizzleTypeToTS(field: FieldInfo): string {\n const typeMap: Record<string, string> = {\n // String types\n char: 'string',\n varchar: 'string',\n text: 'string',\n\n // Numeric types\n smallint: 'number',\n integer: 'number',\n int: 'number',\n bigint: 'string', // bigint 在 JS 中通常作为 string 处理\n serial: 'number',\n smallserial: 'number',\n bigserial: 'string',\n\n // Decimal types\n decimal: 'string', // 精确数值通常用 string\n numeric: 'string',\n real: 'number',\n doublePrecision: 'number',\n\n // Boolean\n boolean: 'boolean',\n\n // Date/Time types\n timestamp: 'Date',\n timestamptz: 'Date',\n date: 'Date',\n time: 'string',\n timetz: 'string',\n interval: 'string',\n\n // UUID\n uuid: 'string',\n\n // JSON types\n json: 'any',\n jsonb: 'any',\n\n // Binary\n bytea: 'Buffer',\n\n // Network types\n inet: 'string',\n cidr: 'string',\n macaddr: 'string',\n macaddr8: 'string',\n\n // Geometric types\n point: '{ x: number; y: number }',\n line: 'string',\n lseg: 'string',\n box: 'string',\n path: 'string',\n polygon: 'string',\n circle: 'string',\n\n // Array types (handled by isArray flag)\n array: 'any[]',\n\n // Custom types\n customType: 'any',\n customTimestamptz: 'Date',\n userProfile: 'string',\n fileAttachment: 'FileAttachment',\n\n // Enum (handled separately)\n pgEnum: 'string',\n };\n\n let baseType = typeMap[field.type] || 'any';\n\n // 处理数组类型\n if (field.isArray) {\n baseType = baseType.endsWith('[]') ? baseType : `${baseType}[]`;\n }\n\n // 处理枚举\n if (field.enumValues && field.enumValues.length > 0) {\n baseType = field.enumValues.map((v) => `'${v}'`).join(' | ');\n }\n\n return baseType;\n}\n\n// 辅助函数\nexport function toPascalCase(str: string): string {\n return str\n .replace(/([a-z])([A-Z])/g, '$1 $2')\n .split(/[-_\\s]/)\n .map((word) => word.charAt(0).toUpperCase() + word.slice(1))\n .join('');\n}\n\nexport function toKebabCase(str: string): string {\n return str\n .replace(/([a-z])([A-Z])/g, '$1-$2')\n .toLowerCase()\n .replace(/[_\\s]/g, '-');\n}\n\nexport function toSnakeCase(str: string): string {\n return str\n .replace(/([a-z])([A-Z])/g, '$1_$2')\n .toLowerCase()\n .replace(/[-\\s]/g, '_');\n}\n","import { Project, CallExpression, Node, ProjectOptions } from 'ts-morph';\n\ninterface FieldInfo {\n name: string;\n columnName: string;\n type: string;\n length?: number;\n precision?: number;\n scale?: number;\n nullable: boolean;\n hasDefault: boolean;\n defaultValue?: string;\n notNull: boolean;\n mode?: 'string' | 'number' | 'bigint' | 'boolean' | 'date';\n isPrimaryKey: boolean;\n isUnique: boolean;\n isArray: boolean;\n comment?: string;\n enumValues?: string[];\n withTimezone?: boolean;\n references?: {\n table: string;\n column: string;\n };\n}\n\ninterface TableInfo {\n tableName: string;\n variableName: string;\n fields: FieldInfo[];\n}\n\nclass DrizzleSchemaParser {\n private project: Project;\n\n constructor(projectOptions: ProjectOptions) {\n this.project = new Project(projectOptions);\n }\n\n parseSchemaFile(filePath: string): TableInfo[] {\n const sourceFile = this.project.addSourceFileAtPath(filePath);\n const tables: TableInfo[] = [];\n\n // 查找所有变量声明\n const variableStatements = sourceFile.getVariableStatements();\n\n for (const statement of variableStatements) {\n const declarations = statement.getDeclarations();\n\n for (const declaration of declarations) {\n const initializer = declaration.getInitializer();\n\n if (initializer && Node.isCallExpression(initializer)) {\n const expression = initializer.getExpression();\n\n // 检查是否是 pgTable 调用\n if (expression.getText() === 'pgTable') {\n const tableInfo = this.parsePgTable(\n declaration.getName(),\n initializer,\n );\n if (tableInfo) {\n tables.push(tableInfo);\n }\n }\n }\n }\n }\n\n return tables;\n }\n\n private parsePgTable(\n variableName: string,\n callExpr: CallExpression,\n ): TableInfo | null {\n const args = callExpr.getArguments();\n\n if (args.length < 2) {\n return null;\n }\n\n // 表名\n const tableName = args[0].getText().replace(/['\"]/g, '');\n\n // 字段定义对象\n const fieldsArg = args[1];\n\n if (!Node.isObjectLiteralExpression(fieldsArg)) {\n return null;\n }\n\n const fields: FieldInfo[] = [];\n\n // 解析字段\n const properties = fieldsArg.getProperties();\n\n for (const prop of properties) {\n if (Node.isPropertyAssignment(prop)) {\n const fieldName = prop.getName();\n const initializer = prop.getInitializer();\n\n // 获取字段上方的注释\n const leadingComments = prop.getLeadingCommentRanges();\n let comment: string | undefined;\n\n if (leadingComments.length > 0) {\n comment = leadingComments\n .map((c) => c.getText())\n .join('\\n')\n .replace(/\\/\\//g, '')\n .trim();\n }\n\n if (initializer && Node.isCallExpression(initializer)) {\n const fieldInfo = this.parseField(fieldName, initializer, comment);\n fields.push(fieldInfo);\n }\n }\n }\n\n return {\n tableName,\n variableName,\n fields,\n };\n }\n\n private parseField(\n fieldName: string,\n callExpr: CallExpression,\n comment?: string,\n ): FieldInfo {\n const fieldInfo: FieldInfo = {\n name: fieldName,\n columnName: fieldName,\n type: '',\n nullable: true,\n hasDefault: false,\n notNull: false,\n isPrimaryKey: false,\n isUnique: false,\n isArray: false,\n comment,\n };\n\n // 获取字段类型和基础参数\n this.parseBaseType(callExpr, fieldInfo);\n\n // 解析整个调用链\n this.parseCallChain(callExpr, fieldInfo);\n\n return fieldInfo;\n }\n\n private parseBaseType(callExpr: CallExpression, fieldInfo: FieldInfo): void {\n // 找到最底层的类型调用\n let current: Node = callExpr;\n let baseCall: CallExpression | null = null;\n\n while (Node.isCallExpression(current)) {\n baseCall = current;\n const expression = current.getExpression();\n\n if (Node.isPropertyAccessExpression(expression)) {\n current = expression.getExpression();\n } else {\n break;\n }\n }\n\n if (!baseCall) {\n return;\n }\n\n const expression = baseCall.getExpression();\n let typeName = '';\n\n if (Node.isPropertyAccessExpression(expression)) {\n typeName = expression.getName();\n } else {\n typeName = expression.getText();\n }\n\n fieldInfo.type = typeName;\n\n // 解析类型参数\n const args = baseCall.getArguments();\n\n if (args.length > 0) {\n const firstArg = args[0];\n\n // 字符串参数 - 列名\n if (Node.isStringLiteral(firstArg)) {\n fieldInfo.columnName = firstArg.getLiteralText();\n }\n // 对象参数 - 配置\n else if (Node.isObjectLiteralExpression(firstArg)) {\n this.parseTypeConfig(firstArg, fieldInfo);\n }\n // 数组参数 - enum 值\n else if (Node.isArrayLiteralExpression(firstArg)) {\n fieldInfo.enumValues = firstArg\n .getElements()\n .map((el) => el.getText().replace(/['\"]/g, ''));\n }\n }\n\n // 第二个参数可能是配置对象\n if (args.length > 1 && Node.isObjectLiteralExpression(args[1])) {\n this.parseTypeConfig(args[1], fieldInfo);\n }\n }\n\n private parseTypeConfig(objLiteral: Node, fieldInfo: FieldInfo): void {\n if (!Node.isObjectLiteralExpression(objLiteral)) {\n return;\n }\n\n const properties = objLiteral.getProperties();\n\n for (const prop of properties) {\n if (Node.isPropertyAssignment(prop)) {\n const propName = prop.getName();\n const value = prop.getInitializer()?.getText();\n\n switch (propName) {\n case 'length':\n fieldInfo.length = value ? parseInt(value) : undefined;\n break;\n case 'precision':\n fieldInfo.precision = value ? parseInt(value) : undefined;\n break;\n case 'scale':\n fieldInfo.scale = value ? parseInt(value) : undefined;\n break;\n case 'default':\n fieldInfo.hasDefault = true;\n fieldInfo.defaultValue = value;\n break;\n // 时间精度(用于 timestamp, time 等)\n case 'withTimezone':\n fieldInfo.withTimezone = value === 'true';\n break;\n case 'mode':\n // mode 可以是 'string' | 'number' | 'bigint' | 'boolean' | 'date'\n // 用于指定 JS 中的数据类型\n fieldInfo.mode = value?.replace(/['\"]/g, '') as\n | 'string'\n | 'number'\n | 'bigint'\n | 'boolean'\n | 'date'\n | undefined;\n break;\n default:\n throw new Error(`Unsupported property: ${propName}`);\n }\n }\n }\n }\n\n private parseCallChain(callExpr: CallExpression, fieldInfo: FieldInfo): void {\n let current: Node = callExpr;\n\n while (Node.isCallExpression(current)) {\n const expression = current.getExpression();\n\n if (Node.isPropertyAccessExpression(expression)) {\n const methodName = expression.getName();\n const args = current.getArguments();\n\n switch (methodName) {\n case 'notNull':\n fieldInfo.notNull = true;\n fieldInfo.nullable = false;\n break;\n\n case 'default':\n fieldInfo.hasDefault = true;\n if (args.length > 0) {\n fieldInfo.defaultValue = args[0].getText();\n }\n break;\n\n case 'defaultRandom':\n fieldInfo.hasDefault = true;\n fieldInfo.defaultValue = 'random';\n break;\n\n case 'primaryKey':\n fieldInfo.isPrimaryKey = true;\n fieldInfo.notNull = true;\n fieldInfo.nullable = false;\n break;\n\n case 'unique':\n fieldInfo.isUnique = true;\n break;\n\n case 'array':\n fieldInfo.isArray = true;\n break;\n\n case 'references':\n if (args.length > 0) {\n const refArg = args[0].getText();\n // 解析引用,如 () => users.id\n const match = refArg.match(/=>\\s*(\\w+)\\.(\\w+)/);\n if (match) {\n fieldInfo.references = {\n table: match[1],\n column: match[2],\n };\n }\n }\n break;\n default:\n throw new Error(`Unsupported method: ${methodName}`);\n }\n\n current = expression.getExpression();\n } else {\n break;\n }\n }\n }\n}\n\nexport { DrizzleSchemaParser, TableInfo, FieldInfo };\n\n","import { generateController, generateDTO, generateModule, generateService } from './generator';\nimport { DrizzleSchemaParser } from './schema-parser';\nimport { join } from 'path';\nimport { toSnakeCase } from './utils';\nimport { mkdir, rm, writeFile } from 'fs/promises';\nimport { existsSync } from 'fs';\n\nexport interface Options {\n tsConfigFilePath: string;\n schemaFilePath: string;\n moduleOutputDir: string;\n}\n\nexport async function parseAndGenerateNestResourceTemplate(options: Options) {\n const parser = new DrizzleSchemaParser({\n tsConfigFilePath: options.tsConfigFilePath,\n });\n const tables = parser.parseSchemaFile(options.schemaFilePath);\n\n if (tables.length === 0) {\n console.warn('未解析到任何数据库表,无需生成 Nest.js 模块模板');\n return;\n }\n\n // 按照变量名长度排序,降序\n tables.sort((a, b) => b.variableName.length - a.variableName.length);\n\n const table = tables[0];\n\n // 生成代码,只生成一个模块的\n console.info(`生成 Nest.js ${table.variableName} 模块`);\n const filePath = toSnakeCase(table.variableName);\n const moduleDir = join(options.moduleOutputDir, filePath);\n if (existsSync(moduleDir)) {\n console.info(`Nest.js 模块 ${filePath} 已存在,跳过生成代码`);\n return;\n }\n const dto = generateDTO(table);\n const controller = generateController(table);\n const service = generateService(table);\n const moduleFilePath = join(moduleDir, `${filePath}.module.ts`);\n const module = generateModule(table);\n\n try {\n await mkdir(moduleDir, { recursive: true });\n await mkdir(join(moduleDir, 'dtos'), { recursive: true });\n await writeFile(join(moduleDir, 'dtos', `${filePath}.dto.ts`), dto);\n await writeFile(join(moduleDir, `${filePath}.controller.ts`), controller);\n await writeFile(join(moduleDir, `${filePath}.service.ts`), service);\n await writeFile(moduleFilePath, module);\n } catch (err) {\n console.error(`生成 Nest.js ${filePath} 模块失败: ${(err as Error).message}`);\n await rm(moduleDir, { recursive: true });\n }\n}\n\n// const projectDir = '/Users/bytedance/Workspace/nestjs-react-fullstack-template';\n// const tsConfigFilePath = join(projectDir, './tsconfig.json');\n// const schemaFilePath = join(projectDir, './server/database/schema.ts');\n\n// const tmp = mkdtempSync(join(tmpdir(), 'tmp-'));\n// console.log('生成到 ', tmp);\n\n// parseAndGenerateNestResourceTemplate({\n// tsConfigFilePath,\n// schemaFilePath,\n// moduleOutputDir: tmp,\n// }).finally(() => {\n// console.log('NestJS 资源模板生成完成');\n// });\n","import fs from 'node:fs';\nimport path from 'node:path';\nimport http from 'node:http';\nimport https from 'node:https';\nimport type { IncomingMessage, ServerResponse } from 'node:http';\nimport { normalizeBasePath } from '../../utils/index';\n\n// 缓存 HTML 模板\nlet errorHtmlTemplate: string | null = null;\n\n/**\n * Proxy Error Handler 选项\n */\nexport interface ProxyErrorOptions {\n /** 日志目录路径 */\n logDir?: string;\n /** 读取最近多少条错误日志 */\n maxErrorLogs?: number;\n /** 日志文件名(默认为 server.log) */\n logFileName?: string;\n /** 等待服务重启的超时时间(毫秒),默认 5000ms */\n retryTimeout?: number;\n /** 轮询检查服务的间隔时间(毫秒),默认 500ms */\n retryInterval?: number;\n /** 目标服务器地址,用于检查服务是否恢复,格式:http://localhost:3000 */\n target?: string;\n /** 客户端基础路径,默认 '/' */\n clientBasePath?: string;\n}\n\n/**\n * 日志读取结果\n */\ninterface LogReadResult {\n /** 日志行 */\n logs: string[];\n /** 是否包含编译错误 */\n hasCompileError: boolean;\n}\n\n/**\n * 判断是否是连接错误(服务未启动或重启中)\n */\nfunction isConnectionError(err: Error): boolean {\n const code = (err as NodeJS.ErrnoException).code;\n const connectionErrorCodes = ['ECONNREFUSED', 'ECONNRESET', 'ETIMEDOUT', 'ENOTFOUND', 'ENETUNREACH'];\n return connectionErrorCodes.includes(code || '');\n}\n\n/**\n * 检查服务是否可用(通过HTTP请求)\n * 发送一个真实的HTTP请求来验证服务是否真正可用,而不仅仅是检查TCP端口\n */\nfunction checkServiceAvailable(targetUrl: string, timeout = 1000): Promise<boolean> {\n return new Promise((resolve) => {\n try {\n const url = new URL(targetUrl);\n const isHttps = url.protocol === 'https:';\n const httpModule = isHttps ? https : http;\n\n const req = httpModule.request(\n {\n hostname: url.hostname,\n port: url.port || (isHttps ? 443 : 80),\n path: '/',\n method: 'HEAD',\n timeout,\n },\n (res) => {\n // 任何响应都表示服务可用(包括错误状态码)\n // 因为如果返回502+错误页面,说明代理正常但后端未ready\n // 如果返回200或其他状态码,说明服务已恢复\n const available = res.statusCode !== 502 && !res.headers['x-proxy-error-page'];\n resolve(available);\n }\n );\n\n req.on('timeout', () => {\n req.destroy();\n resolve(false);\n });\n\n req.on('error', () => {\n resolve(false);\n });\n\n req.end();\n } catch (e) {\n resolve(false);\n }\n });\n}\n\n/**\n * 等待服务恢复,带超时和轮询检查\n */\nasync function waitForServiceRecovery(\n targetUrl: string,\n timeout: number,\n interval: number\n): Promise<boolean> {\n const startTime = Date.now();\n\n while (Date.now() - startTime < timeout) {\n const isAvailable = await checkServiceAvailable(targetUrl, 2000);\n if (isAvailable) {\n return true;\n }\n // 等待一段时间后再次检查\n await new Promise(resolve => setTimeout(resolve, interval));\n }\n\n return false;\n}\n\n/**\n * 获取当前文件所在目录\n * 兼容 ESM 和 CJS\n */\nfunction getDirname(): string {\n return __dirname;\n}\n\n/**\n * 读取错误页面 HTML 模板\n */\nfunction getErrorHtmlTemplate(): string {\n if (!errorHtmlTemplate) {\n const dirname = getDirname();\n const htmlPath = path.join(dirname, 'error.html');\n errorHtmlTemplate = fs.readFileSync(htmlPath, 'utf-8');\n }\n return errorHtmlTemplate;\n}\n\n/**\n * 解析日志行,去掉时间戳和 [server] 前缀\n * 只处理 [server] 日志,过滤掉 [client] 日志\n */\nfunction parseLogLine(line: string): string | null {\n const trimmed = line.trim();\n if (!trimmed) return null;\n\n // 匹配格式:[2025-10-24 19:43:25] [server] 实际内容\n // 只处理 [server] 日志,过滤掉 [client] 日志\n const match = trimmed.match(/^\\[\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\]\\s+\\[server\\]\\s+(.*)$/);\n if (match) {\n const content = match[1].trim();\n // 如果内容为空,返回 null(过滤掉空行)\n return content || null;\n }\n\n // 如果不匹配(可能是 [client] 或其他格式),返回 null(只关注 [server] 日志)\n return null;\n}\n\n/**\n * 读取最近的错误日志段落\n * 从最后一次编译开始标记,到下一次编译开始或文件末尾\n * 起始标记:\n * - \"Starting compilation in watch mode\"\n * - \"File change detected. Starting incremental compilation\"\n * 结束标记:\n * - 下一个编译开始标记\n * - 或文件末尾\n * 错误判断:\n * - 编译错误:Found x errors. Watching for file changes (x > 0)\n * - 运行时错误:Error:, Exception:, EADDRINUSE 等\n * 优化:只读取文件末尾的内容,避免加载整个大文件到内存\n */\nasync function readRecentErrorLogs(\n logDir: string,\n maxLogs: number,\n fileName: string\n): Promise<LogReadResult> {\n const logFilePath = path.join(logDir, fileName);\n\n // 检查文件是否存在\n let fileStats;\n try {\n fileStats = await fs.promises.stat(logFilePath);\n } catch {\n return { logs: [], hasCompileError: false };\n }\n\n const fileSize = fileStats.size;\n\n // 定义读取的最大字节数(1MB),避免读取过大的文件\n const maxReadSize = 1024 * 1024; // 1MB\n const readSize = Math.min(fileSize, maxReadSize);\n const startPosition = Math.max(0, fileSize - readSize);\n\n // 从文件末尾读取固定大小的内容\n const buffer = Buffer.allocUnsafe(readSize);\n let fileHandle;\n\n try {\n fileHandle = await fs.promises.open(logFilePath, 'r');\n await fileHandle.read(buffer, 0, readSize, startPosition);\n } catch (error) {\n console.error('[Proxy Error]: Failed to read log file:', error);\n return { logs: [], hasCompileError: false };\n } finally {\n if (fileHandle) {\n await fileHandle.close();\n }\n }\n\n // 将 buffer 转换为字符串并按行分割\n const content = buffer.toString('utf8');\n const lines = content.split('\\n');\n\n // 如果不是从文件开头读取的,第一行可能不完整,丢弃它\n if (startPosition > 0 && lines.length > 0) {\n lines.shift();\n }\n\n // 解析所有行\n const allLines: string[] = [];\n for (const line of lines) {\n const parsed = parseLogLine(line);\n if (parsed !== null) {\n allLines.push(parsed);\n }\n }\n\n // 从后往前找最后一次出现编译开始标记的位置\n let startIndex = -1;\n for (let i = allLines.length - 1; i >= 0; i--) {\n const line = allLines[i];\n // 匹配编译开始标记\n // 示例:[22:13:48] Starting compilation in watch mode...\n // [22:13:48] File change detected. Starting incremental compilation...\n if (\n line.includes('Starting compilation in watch mode') ||\n line.includes('File change detected. Starting incremental compilation')\n ) {\n startIndex = i;\n break;\n }\n }\n\n // 如果没找到编译开始标记,使用降级策略:返回最后 maxLogs 行\n if (startIndex === -1) {\n console.log('[Proxy Error]: No compilation start marker found, returning last logs');\n const fallbackLogs = allLines.slice(-maxLogs);\n const hasCompileError = checkForErrors(fallbackLogs);\n return { logs: fallbackLogs, hasCompileError };\n }\n\n // 从 startIndex 开始,找到下一个编译开始标记或文件末尾\n let endIndex = allLines.length;\n\n for (let i = startIndex + 1; i < allLines.length; i++) {\n const line = allLines[i];\n // 如果遇到下一个编译开始标记,停止\n if (\n line.includes('Starting compilation in watch mode') ||\n line.includes('File change detected. Starting incremental compilation')\n ) {\n endIndex = i;\n break;\n }\n }\n\n // 提取这段日志(从编译开始到下一次编译开始或文件末尾)\n const errorSection = allLines.slice(startIndex, endIndex);\n\n // 检查是否有错误(编译错误或运行时错误)\n const hasCompileError = checkForErrors(errorSection);\n\n // 限制最大行数\n const logs = errorSection.length > maxLogs\n ? errorSection.slice(-maxLogs)\n : errorSection;\n\n return { logs, hasCompileError };\n}\n\n/**\n * 检查日志中是否有编译错误\n * 只检测:Found x errors (x > 0)\n */\nfunction checkForErrors(logs: string[]): boolean {\n for (const line of logs) {\n // 检查编译错误:Found X errors. Watching for file changes\n const compileErrorMatch = line.match(/Found (\\d+) errors?\\. Watching for file changes/);\n if (compileErrorMatch) {\n const errorCount = parseInt(compileErrorMatch[1], 10);\n if (errorCount > 0) {\n console.log(`[Proxy Error]: Found ${errorCount} compilation error(s)`);\n return true;\n }\n }\n }\n\n return false;\n}\n\n/**\n * 将错误信息和日志注入到 HTML 模板中\n */\nfunction injectTemplateData(\n template: string,\n clientBasePath: string\n): string {\n // 替换模板中的占位符\n return template.replace('{{.clientBasePath}}', clientBasePath);\n}\n\n/**\n * HTTP Proxy 错误处理器\n * 用于 http-proxy 的 onError 回调\n *\n * @example\n * ```typescript\n * import { createProxyMiddleware } from 'http-proxy-middleware';\n * import { handleDevProxyError } from '@lark-apaas/devtool-kits';\n *\n * // 基础用法\n * const proxy = createProxyMiddleware({\n * target: 'http://localhost:3000',\n * onError: handleDevProxyError\n * });\n *\n * // 自定义配置\n * const proxy = createProxyMiddleware({\n * target: 'http://localhost:3000',\n * onError: (err, req, res) => {\n * handleDevProxyError(err, req, res, {\n * logDir: './logs',\n * maxErrorLogs: 50,\n * logFileName: 'server.log'\n * });\n * }\n * });\n * ```\n */\nexport function handleDevProxyError(\n err: Error,\n req: IncomingMessage,\n res: ServerResponse,\n options?: ProxyErrorOptions\n): void {\n const {\n logDir = path.join(process.cwd(), 'logs'),\n maxErrorLogs = 100,\n logFileName = 'server.log',\n retryTimeout = 5000,\n retryInterval = 500,\n target = `http://localhost:${process.env.SERVER_PORT || 3000}`,\n clientBasePath = process.env.CLIENT_BASE_PATH || '/',\n } = options || {};\n\n const clientBasePathWithoutSlash = normalizeBasePath(clientBasePath);\n console.error('[Proxy Error]:', err.message, clientBasePathWithoutSlash);\n\n // 检查响应是否已发送\n if (res.headersSent) {\n console.error('[Proxy Error]: Headers already sent, cannot send error page');\n return;\n }\n\n // 异步处理错误\n (async () => {\n try {\n // 判断是否是连接错误\n const isConnError = isConnectionError(err);\n\n // 读取最近的错误日志,检查是否有编译错误\n const { hasCompileError } = await readRecentErrorLogs(\n logDir,\n maxErrorLogs,\n logFileName\n );\n\n // 场景1: 连接错误 + 无编译错误 = 可能是服务重启中\n // 尝试等待服务恢复,如果5秒内恢复则302重定向,用户无感知\n if (isConnError && !hasCompileError) {\n console.log('[Proxy Error]: Connection error without compile errors, possibly server restarting...');\n\n // 验证 target URL\n try {\n new URL(target); // 验证URL格式\n } catch (e) {\n console.error('[Proxy Error]: Invalid target URL:', target);\n // 如果target无效,直接显示错误页面\n console.log('[Proxy Error]: Showing error page due to invalid target');\n }\n\n // 等待服务恢复(通过HTTP健康检查)\n console.log(`[Proxy Error]: Waiting for service recovery at ${target} (timeout: ${retryTimeout}ms)...`);\n const recovered = await waitForServiceRecovery(target, retryTimeout, retryInterval);\n\n if (recovered) {\n console.log('[Proxy Error]: Service recovered within timeout, sending 302 redirect');\n sendSimpleRedirect(req, res);\n return;\n }\n\n console.log('[Proxy Error]: Service did not recover within timeout, showing error page with probe');\n }\n\n // 场景2: 有编译错误 或 服务未在5秒内恢复\n // 显示错误页面,由前端探针继续检测服务恢复\n if (isConnError && !hasCompileError) {\n console.log('[Proxy Error]: Showing error page with auto-refresh probe');\n } else {\n console.log('[Proxy Error]: Compile error or non-connection error, showing error page');\n }\n\n // 获取 HTML 模板\n const template = getErrorHtmlTemplate();\n\n // 注入错误信息和日志\n const html = injectTemplateData(template, clientBasePathWithoutSlash);\n\n // 设置响应头\n res.writeHead(200, {\n 'Content-Type': 'text/html; charset=utf-8',\n 'Cache-Control': 'no-cache, no-store, must-revalidate',\n 'X-Proxy-Error-Page': 'true', // 标识这是错误页面\n });\n\n // 发送 HTML\n res.end(html);\n } catch (error) {\n console.error('[Proxy Error]: Failed to handle error:', error);\n\n // 降级方案:发送纯文本错误信息\n if (!res.headersSent) {\n res.writeHead(502, { 'Content-Type': 'text/plain; charset=utf-8' });\n res.end(`Node 服务启动异常,请根据日志修复相关问题`);\n }\n }\n })();\n}\n\n/**\n * 发送 302 重定向响应,强制用户刷新页面\n */\nfunction sendSimpleRedirect(req: IncomingMessage, res: ServerResponse): void {\n if (res.headersSent) return;\n\n // 获取原始请求的 URL\n const originalUrl = req.url || '/';\n\n console.log('[Proxy Error]: Sending 302 redirect to', originalUrl);\n\n res.writeHead(302, {\n 'Location': originalUrl,\n 'Cache-Control': 'no-cache, no-store, must-revalidate',\n });\n res.end();\n}","import path from 'node:path';\nimport type {\n Middleware,\n RouteMiddleware,\n GlobalMiddleware,\n MiddlewareContext,\n ExpressApp,\n ViteMiddleware,\n} from './types';\n/**\n * Type guard to check if middleware is route-based\n */\nfunction isRouteMiddleware(middleware: Middleware): middleware is RouteMiddleware {\n return 'createRouter' in middleware && middleware.createRouter !== undefined;\n}\n\n/**\n * Type guard to check if middleware is global\n */\nfunction isGlobalMiddleware(middleware: Middleware): middleware is GlobalMiddleware {\n return 'createHandler' in middleware && middleware.createHandler !== undefined;\n}\n\n/**\n * Compute the full mount path by combining basePath and middleware mountPath\n */\nfunction computeMountPath(basePath: string, mountPath: string): string {\n const routePath = path.posix.join(basePath, mountPath);\n return routePath.startsWith('/') ? routePath : `/${routePath}`;\n}\n\n/**\n * Log middleware registration with routes info\n */\nfunction logMiddlewareRegistration(\n middleware: RouteMiddleware,\n fullMountPath: string,\n): void {\n if (middleware.routes && middleware.routes.length > 0) {\n console.log(`[Middleware] Registered: ${middleware.name} at ${fullMountPath}`);\n middleware.routes.forEach((route) => {\n const routePath = route.path === '/' ? fullMountPath : path.posix.join(fullMountPath, route.path);\n console.log(` ${route.method} ${routePath} - ${route.description}`);\n });\n } else {\n console.log(`[Middleware] Registered: ${middleware.name} at ${fullMountPath}`);\n }\n}\n\n/**\n * Register a single route middleware\n */\nasync function registerRouteMiddleware(\n server: ExpressApp | ViteMiddleware,\n middleware: RouteMiddleware,\n context: MiddlewareContext,\n): Promise<void> {\n if (!middleware.mountPath) {\n console.error(\n `[Middleware] ${middleware.name}: Route middleware must have mountPath. Skipping.`\n );\n return;\n }\n\n const router = middleware.createRouter(context);\n const fullMountPath = computeMountPath(context.basePath, middleware.mountPath);\n\n server.use(fullMountPath, router);\n logMiddlewareRegistration(middleware, fullMountPath);\n}\n\n/**\n * Register a single global middleware\n */\nasync function registerGlobalMiddleware(\n server: ExpressApp | ViteMiddleware,\n middleware: GlobalMiddleware,\n context: MiddlewareContext,\n): Promise<void> {\n if (middleware.mountPath) {\n console.warn(\n `[Middleware] ${middleware.name}: Global middleware should not have mountPath. ` +\n `Ignoring mountPath \"${middleware.mountPath}\".`\n );\n }\n\n const handler = middleware.createHandler(context);\n server.use(handler);\n console.log(`[Middleware] Registered global: ${middleware.name}`);\n}\n\n/**\n * Register middlewares for Express-compatible servers or Vite\n * @param server - Express app or Vite middleware instance\n * @param middlewares - List of middlewares to register\n * @param options - Optional context configuration\n * @returns Promise that resolves when all middlewares are registered\n *\n * @example\n * ```typescript\n * // In rspack/webpack setupMiddlewares\n * setupMiddlewares: (middlewares, devServer) => {\n * if (devServer.app) {\n * registerMiddlewares(devServer.app, [\n * // Global middlewares (execute first)\n * createCorsMiddleware({ origin: '*' }),\n * createAuthMiddleware(),\n *\n * // Route middlewares\n * createDevLogsMiddleware({ logDir: './logs' }),\n * createOpenapiMiddleware({ openapiFilePath: './openapi.json' })\n * ], {\n * basePath: '/api',\n * isDev: true,\n * rootDir: __dirname\n * });\n * }\n * return middlewares;\n * }\n *\n * // In Vite configureServer\n * configureServer: (server) => {\n * registerMiddlewares(server.middlewares, [\n * createDevLogsMiddleware({ logDir: './logs' })\n * ], { basePath: '/', isDev: true, rootDir: __dirname });\n * }\n * ```\n */\nexport async function registerMiddlewares(\n server: ExpressApp | ViteMiddleware,\n middlewares: Middleware[],\n options?: Partial<MiddlewareContext>,\n): Promise<void> {\n // Build context with defaults\n const context: MiddlewareContext = {\n basePath: '/',\n isDev: process.env.NODE_ENV !== 'production',\n rootDir: process.cwd(),\n ...options,\n };\n // 合并默认中间件和用户提供的中间件\n const allMiddlewares = [...middlewares];\n\n for (const middleware of allMiddlewares) {\n // Check if middleware should be enabled\n if (middleware.enabled && !middleware.enabled(context)) {\n continue;\n }\n\n try {\n // Check if both createRouter and createHandler are provided (type-unsafe case)\n const hasCreateRouter = 'createRouter' in middleware && typeof (middleware as any).createRouter === 'function';\n const hasCreateHandler = 'createHandler' in middleware && typeof (middleware as any).createHandler === 'function';\n\n if (hasCreateRouter && hasCreateHandler) {\n console.warn(\n `[Middleware] ${middleware.name}: Both createRouter and createHandler provided. ` +\n `Using createRouter and ignoring createHandler.`\n );\n }\n\n if (isRouteMiddleware(middleware)) {\n // Route-based middleware\n await registerRouteMiddleware(server, middleware, context);\n } else if (isGlobalMiddleware(middleware)) {\n // Global middleware\n await registerGlobalMiddleware(server, middleware, context);\n } else {\n // Type-unsafe middleware that has neither createRouter nor createHandler\n console.error(\n `[Middleware] ${(middleware as any).name || 'unknown'}: Must provide either createRouter or createHandler.`\n );\n }\n } catch (error) {\n console.error(`[Middleware] Failed to register ${middleware.name}:`, error);\n }\n }\n}\n\n// Re-export types and middleware creators\nexport type {\n Middleware,\n RouteMiddleware,\n GlobalMiddleware,\n MiddlewareContext,\n RouteInfo,\n} from './types';\nexport { createOpenapiMiddleware } from './openapi';\nexport { createDevLogsMiddleware } from './dev-logs';\nexport { createCollectLogsMiddleware } from './collect-logs';","import express, { Router } from 'express';\nimport type { MiddlewareContext } from '../types';\nimport { createOpenapiHandler } from './controller';\n\n/**\n * Options for creating OpenAPI router\n */\nexport interface OpenapiRouterOptions {\n /** Path to the openapi.json file */\n openapiFilePath: string;\n /** Enable source code enhancement */\n enableEnhancement: boolean;\n /** Server directory for source code scanning */\n serverDir?: string;\n}\n\n/**\n * Create OpenAPI router\n */\nexport function createOpenapiRouter(\n options: OpenapiRouterOptions,\n context: MiddlewareContext,\n): Router {\n const { openapiFilePath, enableEnhancement, serverDir } = options;\n const router = express.Router();\n const handler = createOpenapiHandler(openapiFilePath, enableEnhancement, serverDir);\n\n // GET /openapi.json - Serve OpenAPI specification\n router.get('/openapi.json', (req, res) => handler(req, res, context));\n\n return router;\n}\n","import fs from 'node:fs/promises';\nimport crypto from 'node:crypto';\n\nimport { enhanceOpenApiWithSourceInfo } from './services';\nimport { transformOpenapiPaths } from './utils';\n\nimport type { Request, Response } from 'express';\nimport type { OpenapiCache } from './types';\nimport type { MiddlewareContext } from '../types';\n\n/**\n * Create OpenAPI request handler with caching\n */\nexport function createOpenapiHandler(\n openapiFilePath: string,\n enableEnhancement: boolean,\n serverDir?: string,\n) {\n // In-memory cache\n let cache: OpenapiCache | null = null;\n\n return async (_req: Request, res: Response, context: MiddlewareContext) => {\n try {\n // Read OpenAPI file\n const fileBuffer = await fs.readFile(openapiFilePath, 'utf-8');\n\n // Calculate file hash for cache invalidation\n const currentHash = crypto.createHash('md5').update(fileBuffer).digest('hex');\n\n // Use cache if file content hasn't changed\n if (cache && cache.fileHash === currentHash) {\n return res.json(cache.data);\n }\n\n // Parse OpenAPI\n let payload = JSON.parse(fileBuffer);\n\n // Enhance with x-source if enabled\n if (enableEnhancement && context.isDev) {\n const { openapi: enhancedPayload, stats } = await enhanceOpenApiWithSourceInfo({\n openapiData: payload,\n writeFile: false, // Don't write to file, keep in memory\n serverDir: serverDir || context.rootDir, // Use provided serverDir or fallback to context.rootDir\n });\n payload = enhancedPayload;\n\n // Log performance\n console.log(`[OpenAPI] Enhanced in ${stats.duration}ms (${stats.endpointsEnhanced} endpoints)`);\n }\n\n // Transform paths: remove basePath prefix from all API paths\n const result = transformOpenapiPaths(payload, context.basePath);\n\n // Update cache\n cache = {\n data: result,\n fileHash: currentHash,\n };\n\n res.json(result);\n } catch (error) {\n const message = error instanceof Error ? error.message : 'Unknown error';\n res.status(500).json({\n error: 'Failed to load OpenAPI spec',\n message,\n });\n }\n };\n}\n","import { promises as fs } from 'node:fs';\nimport path from 'node:path';\nimport ts from 'typescript';\nimport type { SourceInfo, EnhanceOptions, EnhanceResult } from './types';\nimport { findControllerFiles, buildSourceMap, enhanceOpenApiPaths } from './utils';\n\n/**\n * Enhances OpenAPI JSON with source file location metadata\n * Can be called programmatically or run as a script\n */\nexport async function enhanceOpenApiWithSourceInfo(options: EnhanceOptions = {}): Promise<EnhanceResult> {\n const startTime = Date.now();\n\n const openapiPath = options.openapiPath || path.resolve(__dirname, '../client/src/api/gen/openapi.json');\n const serverDir = options.serverDir || path.resolve(__dirname, '../server');\n const writeFile = options.writeFile !== false;\n\n let openapi: any;\n if (options.openapiData) {\n // Use provided data (for in-memory enhancement)\n openapi = JSON.parse(JSON.stringify(options.openapiData)); // Deep clone\n } else {\n // Read from file\n const openapiContent = await fs.readFile(openapiPath, 'utf-8');\n openapi = JSON.parse(openapiContent);\n }\n\n const controllerFiles = await findControllerFiles(serverDir);\n const sourceMap = await buildSourceMap(controllerFiles, processControllerFile);\n const enhanced = enhanceOpenApiPaths(openapi, sourceMap);\n\n if (writeFile) {\n await fs.writeFile(openapiPath, JSON.stringify(openapi, null, 2) + '\\n', 'utf-8');\n }\n\n const duration = Date.now() - startTime;\n\n return {\n openapi,\n stats: {\n duration,\n controllersFound: controllerFiles.length,\n endpointsExtracted: sourceMap.size,\n endpointsEnhanced: enhanced,\n },\n };\n}\n\n/**\n * Process a single controller file\n */\nasync function processControllerFile(filePath: string): Promise<Map<string, SourceInfo>> {\n const relativePath = path.relative(process.cwd(), filePath);\n\n // Parse file\n const content = await fs.readFile(filePath, 'utf-8');\n const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true);\n\n return extractControllerMetadata(sourceFile, relativePath);\n}\n\n/**\n * Extract controller metadata from TypeScript source file\n */\nfunction extractControllerMetadata(sourceFile: ts.SourceFile, filePath: string): Map<string, SourceInfo> {\n const metadata = new Map<string, SourceInfo>();\n let controllerPath = '';\n let className = '';\n\n // Helper function to get decorators from both old and new TypeScript APIs\n function getDecorators(node: ts.Node): readonly ts.Decorator[] {\n // TypeScript 5.x: decorators are in modifiers array\n if ('modifiers' in node && Array.isArray(node.modifiers)) {\n return (node.modifiers as ts.ModifierLike[]).filter(\n (mod): mod is ts.Decorator => mod.kind === ts.SyntaxKind.Decorator,\n );\n }\n // TypeScript 4.x: decorators are in decorators array\n if ('decorators' in node && Array.isArray(node.decorators)) {\n return node.decorators as readonly ts.Decorator[];\n }\n return [];\n }\n\n function visit(node: ts.Node): void {\n // Extract @Controller decorator and its path\n if (ts.isClassDeclaration(node)) {\n const decorators = getDecorators(node);\n\n // Extract class name\n if (node.name) {\n className = node.name.getText(sourceFile);\n }\n\n for (const decorator of decorators) {\n if (ts.isCallExpression(decorator.expression)) {\n const expression = decorator.expression;\n const decoratorName = expression.expression.getText(sourceFile);\n\n if (decoratorName === 'Controller') {\n if (expression.arguments.length > 0) {\n const arg = expression.arguments[0];\n if (ts.isStringLiteral(arg)) {\n controllerPath = arg.text;\n }\n }\n }\n }\n }\n }\n\n // Extract methods with HTTP decorators\n if (ts.isMethodDeclaration(node) && node.name) {\n const methodName = node.name.getText(sourceFile);\n let httpMethod = '';\n let routePath = '';\n const { line } = sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile));\n\n const decorators = getDecorators(node);\n\n for (const decorator of decorators) {\n if (ts.isCallExpression(decorator.expression)) {\n const decoratorName = decorator.expression.expression.getText(sourceFile);\n if (['Get', 'Post', 'Put', 'Delete', 'Patch', 'Options', 'Head', 'All'].includes(decoratorName)) {\n httpMethod = decoratorName.toLowerCase();\n if (decorator.expression.arguments.length > 0) {\n const arg = decorator.expression.arguments[0];\n if (ts.isStringLiteral(arg)) {\n routePath = arg.text;\n }\n }\n }\n }\n }\n\n if (httpMethod && methodName && className) {\n const operationId = `${className}_${methodName}`;\n metadata.set(operationId, {\n file: filePath,\n line: line + 1,\n method: httpMethod,\n controllerPath,\n routePath,\n });\n }\n }\n\n ts.forEachChild(node, visit);\n }\n\n visit(sourceFile);\n return metadata;\n}\n","import path from 'node:path';\nimport { promises as fs } from 'node:fs';\nimport type { SourceInfo } from './types';\n\n/**\n * Find all controller files in a directory\n */\nexport async function findControllerFiles(dir: string): Promise<string[]> {\n const files: string[] = [];\n\n async function scan(currentDir: string): Promise<void> {\n const entries = await fs.readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(currentDir, entry.name);\n\n if (entry.isDirectory()) {\n await scan(fullPath);\n } else if (entry.isFile() && entry.name.endsWith('.controller.ts')) {\n files.push(fullPath);\n }\n }\n }\n\n await scan(dir);\n return files;\n}\n\n/**\n * Build source map from controller files\n */\nexport async function buildSourceMap(\n controllerFiles: string[],\n processFile: (filePath: string) => Promise<Map<string, SourceInfo>>,\n): Promise<Map<string, SourceInfo>> {\n const sourceMap = new Map<string, SourceInfo>();\n\n // Process files in parallel with a concurrency limit\n const concurrency = 10;\n const results: Map<string, SourceInfo>[] = [];\n\n for (let i = 0; i < controllerFiles.length; i += concurrency) {\n const batch = controllerFiles.slice(i, i + concurrency);\n const batchResults = await Promise.all(batch.map((filePath) => processFile(filePath)));\n results.push(...batchResults);\n }\n\n // Merge results\n for (const metadata of results) {\n for (const [operationId, info] of metadata.entries()) {\n sourceMap.set(operationId, info);\n }\n }\n\n return sourceMap;\n}\n\n/**\n * Try to match operationId with different formats\n * Supports:\n * - Direct match: ClassName_methodName\n * - Camel case: classNameMethodName\n * - Method only: methodName\n */\nfunction findSourceInfo(operationId: string, sourceMap: Map<string, SourceInfo>): SourceInfo | undefined {\n // Try direct match first\n const directMatch = sourceMap.get(operationId);\n if (directMatch) {\n return directMatch;\n }\n\n // Try matching with different formats\n for (const [key, value] of sourceMap.entries()) {\n // key format: ClassName_methodName\n const [className, methodName] = key.split('_');\n if (!className || !methodName) continue;\n\n // Try camelCase format: classNameMethodName\n const camelCaseId = className.charAt(0).toLowerCase() + className.slice(1) + methodName.charAt(0).toUpperCase() + methodName.slice(1);\n if (operationId === camelCaseId) {\n return value;\n }\n\n // Try method name only\n if (operationId === methodName) {\n return value;\n }\n }\n\n return undefined;\n}\n\n/**\n * Enhance OpenAPI paths with source information\n */\nexport function enhanceOpenApiPaths(openapi: any, sourceMap: Map<string, SourceInfo>): number {\n let enhancedCount = 0;\n\n if (!openapi.paths) {\n return enhancedCount;\n }\n\n for (const pathItem of Object.values(openapi.paths)) {\n if (!pathItem || typeof pathItem !== 'object') continue;\n\n for (const operation of Object.values(pathItem)) {\n if (operation && typeof operation === 'object' && 'operationId' in operation) {\n const sourceInfo = findSourceInfo(operation.operationId as string, sourceMap);\n if (sourceInfo) {\n operation['x-source'] = {\n file: sourceInfo.file,\n line: sourceInfo.line,\n };\n enhancedCount++;\n }\n }\n }\n }\n\n return enhancedCount;\n}\n\n/**\n * Transform OpenAPI paths by removing basePath prefix\n */\nexport function transformOpenapiPaths(openapi: any, basePath: string): any {\n if (basePath === '/' || !openapi.paths) {\n return openapi;\n }\n\n const newPaths: any = {};\n Object.keys(openapi.paths).forEach((key) => {\n const staticApiKey = key.startsWith(basePath) ? key.slice(basePath.length) : key;\n newPaths[staticApiKey] = openapi.paths[key];\n });\n\n return {\n ...openapi,\n paths: newPaths,\n basePath,\n };\n}\n","import type { RouteMiddleware, MiddlewareContext, RouteInfo } from '../types';\nimport type { OpenapiMiddlewareOptions } from './types';\nimport { createOpenapiRouter } from './router';\n\n/**\n * Routes provided by OpenAPI middleware\n */\nconst OPENAPI_ROUTES: RouteInfo[] = [\n {\n method: 'GET',\n path: '/openapi.json',\n description: 'Serve enhanced OpenAPI specification with source code references',\n }\n];\n\n/**\n * Creates OpenAPI middleware that serves enhanced openapi.json\n * Supports both rspack/webpack and Vite dev servers\n */\nexport function createOpenapiMiddleware(options: OpenapiMiddlewareOptions): RouteMiddleware {\n const { openapiFilePath, enableEnhancement = true, serverDir } = options;\n\n return {\n name: 'openapi',\n mountPath: '/dev',\n routes: OPENAPI_ROUTES,\n\n enabled: (context: MiddlewareContext) => context.isDev,\n\n createRouter: (context: MiddlewareContext) => {\n return createOpenapiRouter(\n {\n openapiFilePath,\n enableEnhancement,\n serverDir,\n },\n context,\n );\n },\n };\n}\n\n// Re-export types and utilities\nexport type { OpenapiMiddlewareOptions, SourceInfo, EnhanceOptions, EnhanceResult } from './types';\nexport { enhanceOpenApiWithSourceInfo } from './services';\n","import express, { Router } from 'express';\nimport type { RouterOptions } from './types';\nimport { resolveLogDir } from './utils';\nimport {\n createGetTraceEntriesHandler,\n createGetRecentTracesHandler,\n createGetLogFileHandler,\n createGetServerLogsHandler,\n createGetTriggerListHandler,\n createGetTriggerDetailHandler,\n createGetCapabilityTraceListHandler,\n} from './controller';\nimport { createHealthCheckHandler } from './health.controller';\n/**\n * Create dev log router with all routes registered\n */\nexport function createDevLogRouter(options: RouterOptions = {}): Router {\n const logDir = resolveLogDir(options.logDir);\n const router = express.Router();\n\n // GET /app/trace/:traceId - Get log entries by trace ID\n router.get('/app/trace/:traceId', createGetTraceEntriesHandler(logDir));\n\n // GET /trace/recent - Get recent trace calls with pagination\n router.get('/trace/recent', createGetRecentTracesHandler(logDir));\n\n // GET /files/:fileName - Get paginated log file content\n router.get('/files/:fileName', createGetLogFileHandler(logDir));\n\n // GET /server-logs - Get server logs in ServerLog format (compatible with frontend)\n router.get('/server-logs', createGetServerLogsHandler(logDir));\n \n // GET /trace/trigger/list - get all automation trigger list in trace.log\n router.get('/trace/trigger/list', createGetTriggerListHandler(logDir));\n\n // GET /trace/trigger/:instanceID - get automation trigger detail by instanceID in server.log\n router.get('/trace/trigger/:instanceID', createGetTriggerDetailHandler(logDir));\n\n // GET /trace/capability/list - get capability trace list by capability_id in server.log\n router.get('/trace/capability/list', createGetCapabilityTraceListHandler(logDir));\n\n router.get('/health', createHealthCheckHandler());\n\n return router;\n}\n","import { promises as fs } from 'node:fs';\nimport { isAbsolute, join, relative } from 'node:path';\nimport type { LogEntry } from './types';\nimport { matchesPathPattern } from './helper/path-matcher';\n\n/**\n * Resolve log directory path\n */\nexport function resolveLogDir(provided?: string): string {\n if (!provided) {\n return join(process.cwd(), 'logs');\n }\n return isAbsolute(provided) ? provided : join(process.cwd(), provided);\n}\n\n/**\n * Get relative path from current working directory\n */\nexport function getRelativePath(filePath: string): string {\n return relative(process.cwd(), filePath);\n}\n\n/**\n * Check if file exists\n */\nexport async function fileExists(filePath: string): Promise<boolean> {\n try {\n await fs.access(filePath);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Parse a log line as JSON\n */\nexport function parseLogLine(line: string): LogEntry | undefined {\n const trimmed = line.trim();\n if (!trimmed) return undefined;\n\n try {\n return JSON.parse(trimmed) as LogEntry;\n } catch {\n return undefined;\n }\n}\n\n/**\n * Extract number from message using regex pattern\n */\nexport function extractNumber(message: string, pattern: RegExp): number | undefined {\n if (typeof message !== 'string') return undefined;\n\n const match = message.match(pattern);\n if (!match) return undefined;\n\n const value = Number(match[1]);\n return Number.isFinite(value) ? value : undefined;\n}\n\n/**\n * Parse limit parameter with default and max value\n */\nexport function parseLimit(value: string | undefined, defaultValue: number, maxValue: number): number {\n if (typeof value !== 'string' || !value.trim()) {\n return defaultValue;\n }\n\n const parsed = Number(value);\n if (Number.isFinite(parsed) && parsed > 0) {\n return Math.min(Math.floor(parsed), maxValue);\n }\n\n return defaultValue;\n}\n\n/**\n * Parse positive integer with fallback\n */\nexport function parsePositiveInt(value: string | undefined, fallback: number): number {\n if (typeof value !== 'string' || !value.trim()) {\n return fallback;\n }\n\n const parsed = Number(value);\n return Number.isFinite(parsed) && parsed > 0 ? Math.floor(parsed) : fallback;\n}\n\n/**\n * Resolve log file path with security checks\n */\nexport function resolveLogFilePath(baseDir: string, fileName: string): string {\n const sanitized = fileName.replace(/\\\\/g, '/');\n const segments = sanitized.split('/').filter(Boolean);\n\n if (segments.some((segment) => segment === '..')) {\n throw new Error('Invalid log file path');\n }\n\n const resolved = join(baseDir, segments.join('/'));\n const rel = relative(baseDir, resolved);\n\n if (rel.startsWith('..')) {\n throw new Error('Access to the specified log file is denied');\n }\n\n return resolved;\n}\n\n/**\n * Check if actual request path matches expected OpenAPI/Swagger path pattern\n *\n * @param actualPath - The actual request path from logs (e.g., /api/users/123)\n * @param pattern - The OpenAPI path pattern to match against (e.g., /api/users/{id})\n * @returns true if the path matches the pattern\n *\n * @example\n * matchesPath('/api/users/123', '/api/users/{id}') // true\n * matchesPath('/api/users/123', '/api/users') // true (prefix match)\n * matchesPath('/api/posts/456', '/api/users/{id}') // false\n */\nexport function matchesPath(actualPath: string | undefined, pattern: string): boolean {\n return matchesPathPattern(actualPath, pattern);\n}\n\n/**\n * Check if actual request method matches expected HTTP method\n *\n * @param actualMethod - The actual request method from logs (e.g., 'GET', 'POST')\n * @param expectedMethod - The expected HTTP method to match against (e.g., 'GET')\n * @returns true if the method matches (case-insensitive)\n *\n * @example\n * matchesMethod('GET', 'GET') // true\n * matchesMethod('get', 'GET') // true\n * matchesMethod('POST', 'GET') // false\n * matchesMethod(undefined, 'GET') // false\n */\nexport function matchesMethod(actualMethod: string | undefined, expectedMethod: string): boolean {\n if (!actualMethod || !expectedMethod) {\n return false;\n }\n return actualMethod.toUpperCase() === expectedMethod.toUpperCase();\n}\n\n\n/**\n * Serialize error for JSON response\n */\nexport function serializeError(error: unknown): { name?: string; message: string } {\n return error instanceof Error\n ? { name: error.name, message: error.message }\n : { message: String(error) };\n}\n","/**\n * Path matcher utility for matching request paths against OpenAPI/Swagger/NestJS path patterns\n *\n * Note: Input paths should be clean request paths without query strings or hash fragments\n *\n * Supports:\n * - Exact matching: /api/users === /api/users (only)\n * - Path parameters: /api/users/{id} matches /api/users/123\n * - Nested parameters: /api/users/{userId}/posts/{postId}\n * - Wildcard (asterisk): /api/star/users matches /api/v1/users (use * for single segment)\n * - Recursive wildcard: /files/star-star matches /files/a/b/c (use two asterisks for multiple segments)\n *\n * Note: Prefix matching has been removed. Use slash-two-asterisks for matching sub-paths:\n * - /api/users - matches /api/users only\n * - /api/users/two-asterisks - matches /api/users, /api/users/123, /api/users/123/posts, etc.\n */\n\n/**\n * Convert path pattern to regex, supporting wildcards and path parameters\n *\n * Examples:\n * /api/users/{id} -> /^\\/api\\/users\\/[^/]+$/\n * /api/star/users -> /^\\/api\\/[^/]+\\/users$/\n * /files/double-star -> /^\\/files\\/.+$/\n * /api/v{version}/users -> /^\\/api\\/v[^/]+\\/users$/\n */\nfunction pathPatternToRegex(pattern: string): RegExp {\n // Escape special regex characters except {}, *, /\n let regexPattern = pattern.replace(/[.+?^$|()[\\]\\\\]/g, '\\\\$&');\n\n // Handle recursive wildcard /** (must be at the end or followed by /)\n // /files/** -> /files/.+ (matches one or more path segments)\n // This follows NestJS routing convention where ** requires at least one segment\n regexPattern = regexPattern.replace(/\\/\\*\\*$/, '/.+');\n regexPattern = regexPattern.replace(/\\/\\*\\*\\//g, '/(?:.+/)?');\n\n // Handle single wildcard * (matches exactly one path segment)\n // /api/*/users -> /api/[^/]+/users\n regexPattern = regexPattern.replace(/\\*/g, '[^/]+');\n\n // Replace path parameters {param} with regex group that matches anything except /\n regexPattern = regexPattern.replace(/\\{[^}]+\\}/g, '[^/]+');\n\n // Anchor to start and end\n return new RegExp(`^${regexPattern}$`);\n}\n\n/**\n * Check if an actual path matches an OpenAPI/Swagger/NestJS path pattern\n *\n * @param actualPath - The actual request path (e.g., /api/users/123)\n * @param pattern - The path pattern (e.g., /api/users/{id})\n * @returns true if the actual path matches the pattern\n *\n * @example\n * // Exact match\n * matchesPathPattern('/api/users', '/api/users') // true\n * matchesPathPattern('/api/users/123', '/api/users') // false\n *\n * // Path parameters\n * matchesPathPattern('/api/users/123', '/api/users/{id}') // true\n *\n * // Wildcards\n * matchesPathPattern('/api/v1/users', '/api/star/users') // true (use * not star)\n * matchesPathPattern('/api/v2/users', '/api/star/users') // true\n *\n * // Recursive wildcards (use two asterisks, not /**!)\n * matchesPathPattern('/files/a/b/c', '/files/starstar') // true (use ** not starstar)\n * matchesPathPattern('/files/x', '/files/starstar') // true\n */\nexport function matchesPathPattern(actualPath: string | undefined, pattern: string): boolean {\n if (!actualPath || !pattern) {\n return false;\n }\n\n // Normalize paths: remove trailing slashes and query strings\n const normalizedActual = normalizePathForMatching(actualPath);\n const normalizedPattern = normalizePathForMatching(pattern);\n\n // Exact match (no parameters or wildcards)\n if (normalizedActual === normalizedPattern) {\n return true;\n }\n\n // Check if pattern contains special characters (parameters, wildcards)\n if (hasSpecialPatterns(normalizedPattern)) {\n const regex = pathPatternToRegex(normalizedPattern);\n return regex.test(normalizedActual);\n }\n\n // No match - exact paths that are different\n return false;\n}\n\n/**\n * Extract path parameters from actual path using pattern\n *\n * Note: This only extracts named parameters {param}, not wildcards\n *\n * @param actualPath - The actual request path\n * @param pattern - The path pattern with parameters\n * @returns Object with parameter names and values, or null if no match\n *\n * @example\n * extractPathParams('/api/users/123', '/api/users/{id}')\n * // { id: '123' }\n *\n * extractPathParams('/api/users/123/posts/456', '/api/users/{userId}/posts/{postId}')\n * // { userId: '123', postId: '456' }\n *\n * // Wildcards are not captured (use asterisk)\n * extractPathParams('/api/v1/users/123', '/api/asterisk/users/{id}')\n * // { id: '123' }\n */\nexport function extractPathParams(\n actualPath: string,\n pattern: string,\n): Record<string, string> | null {\n const normalizedActual = normalizePathForMatching(actualPath);\n const normalizedPattern = normalizePathForMatching(pattern);\n\n // Extract parameter names from pattern (only {param} style)\n const paramNames: string[] = [];\n const paramRegex = /\\{([^}]+)\\}/g;\n let match: RegExpExecArray | null;\n\n while ((match = paramRegex.exec(normalizedPattern)) !== null) {\n paramNames.push(match[1]);\n }\n\n if (paramNames.length === 0) {\n // No named parameters in pattern\n return normalizedActual === normalizedPattern ? {} : null;\n }\n\n // Build regex with capturing groups for named parameters\n let regexPattern = normalizedPattern.replace(/[.+?^$|()[\\]\\\\]/g, '\\\\$&');\n\n // Replace wildcards with non-capturing groups\n regexPattern = regexPattern.replace(/\\/\\*\\*$/, '/.+');\n regexPattern = regexPattern.replace(/\\/\\*\\*\\//g, '/(?:.+/)?');\n regexPattern = regexPattern.replace(/\\*/g, '[^/]+');\n\n // Replace named parameters with capturing groups\n regexPattern = regexPattern.replace(/\\{[^}]+\\}/g, '([^/]+)');\n\n const regex = new RegExp(`^${regexPattern}$`);\n\n const result = regex.exec(normalizedActual);\n if (!result) {\n return null;\n }\n\n // Map parameter names to captured values\n const params: Record<string, string> = {};\n paramNames.forEach((name, index) => {\n params[name] = result[index + 1]; // index + 1 because result[0] is the full match\n });\n\n return params;\n}\n\n/**\n * Check if a pattern contains special matching characters\n *\n * @param pattern - The path pattern to check\n * @returns true if the pattern contains parameters, wildcards, etc.\n *\n * @example\n * hasSpecialPatterns('/api/users/{id}') // true\n * hasSpecialPatterns('/api/asterisk/users') // true (with actual asterisk character)\n * hasSpecialPatterns('/files/double-asterisk') // true (with actual double asterisk)\n * hasSpecialPatterns('/api/users') // false\n */\nexport function hasSpecialPatterns(pattern: string): boolean {\n return /[{*]/.test(pattern);\n}\n\n/**\n * Check if a pattern contains path parameters (not wildcards)\n *\n * @param pattern - The path pattern to check\n * @returns true if the pattern contains {param} style parameters\n *\n * @example\n * hasPathParameters('/api/users/{id}') // true\n * hasPathParameters('/api/asterisk/users') // false (with actual asterisk character)\n * hasPathParameters('/api/users') // false\n */\nexport function hasPathParameters(pattern: string): boolean {\n return /\\{[^}]+\\}/.test(pattern);\n}\n\n/**\n * Check if a pattern contains wildcards\n *\n * @param pattern - The path pattern to check\n * @returns true if the pattern contains asterisk or double asterisk\n *\n * @example\n * hasWildcards('/api/asterisk/users') // true (with actual asterisk character)\n * hasWildcards('/files/double-asterisk') // true (with actual double asterisk)\n * hasWildcards('/api/users/{id}') // false\n */\nexport function hasWildcards(pattern: string): boolean {\n return pattern.includes('*');\n}\n\n/**\n * Normalize a path for matching by:\n * - Removing trailing slashes\n * - Removing duplicate slashes\n *\n * @param path - The path to normalize\n * @returns Normalized path\n *\n * @example\n * normalizePathForMatching('/api/users/') // '/api/users'\n * normalizePathForMatching('/api//users') // '/api/users'\n */\nexport function normalizePathForMatching(path: string): string {\n return path\n .replace(/\\/+/g, '/') // Replace multiple slashes with single slash\n .replace(/\\/+$/, ''); // Remove trailing slash\n}\n\n/**\n * Normalize a path (deprecated, use normalizePathForMatching)\n * @deprecated Use normalizePathForMatching instead\n */\nexport function normalizePath(path: string): string {\n return normalizePathForMatching(path);\n}\n","import { join } from 'node:path';\nimport type { Request, Response } from 'express';\n\nimport { readLogEntriesByTrace, readRecentTraceCalls, readLogFilePage, readServerLogs, readTriggerList, readTriggerDetail, readCapabilityTraceList } from './services';\nimport {\n getRelativePath,\n parseLimit,\n parsePositiveInt,\n resolveLogFilePath,\n serializeError,\n} from './utils';\n\n/**\n * Handle not found error\n */\nfunction handleNotFound(res: Response, filePath: string, message = 'Log file not found'): void {\n res.status(404).json({ message: `${message}: ${getRelativePath(filePath)}` });\n}\n\n/**\n * Handle generic error\n */\nfunction handleError(res: Response, error: unknown, message = 'Failed to read log file'): void {\n res.status(500).json({ message, error: serializeError(error) });\n}\n\n/**\n * Create handler for getting log entries by trace ID\n */\nexport function createGetTraceEntriesHandler(logDir: string) {\n const appLogPath = join(logDir, 'server.log');\n\n return async (req: Request, res: Response) => {\n const traceId = (req.params.traceId || '').trim();\n if (!traceId) {\n return res.status(400).json({ message: 'traceId is required' });\n }\n\n const limit = parseLimit(req.query.limit as string | undefined, 200, 1000);\n\n try {\n const entries = await readLogEntriesByTrace(appLogPath, traceId, limit);\n if (!entries) {\n return handleNotFound(res, appLogPath);\n }\n res.json({\n file: getRelativePath(appLogPath),\n traceId,\n count: entries.length,\n entries,\n });\n } catch (error) {\n handleError(res, error);\n }\n };\n}\n\n/**\n * Create handler for getting recent trace calls\n */\nexport function createGetRecentTracesHandler(logDir: string) {\n const traceLogPath = join(logDir, 'trace.log');\n\n return async (req: Request, res: Response) => {\n const page = parsePositiveInt(req.query.page as string | undefined, 1);\n const pageSize = parseLimit(req.query.pageSize as string | undefined, 10, 100);\n const pathFilter = typeof req.query.path === 'string' ? req.query.path.trim() : undefined;\n const methodFilter = typeof req.query.method === 'string' ? req.query.method.trim().toUpperCase() : undefined;\n\n try {\n const result = await readRecentTraceCalls(traceLogPath, page, pageSize, pathFilter, methodFilter);\n if (!result) {\n return handleNotFound(res, traceLogPath);\n }\n res.json({\n file: getRelativePath(traceLogPath),\n ...result,\n path: pathFilter || null,\n method: methodFilter || null,\n count: result.calls.length,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read trace log');\n }\n };\n}\n\n/**\n * Create handler for getting log file by name\n */\nexport function createGetLogFileHandler(logDir: string) {\n return async (req: Request, res: Response) => {\n const fileName = (req.params.fileName || '').trim();\n if (!fileName) {\n return res.status(400).json({ message: 'fileName is required' });\n }\n\n const page = parsePositiveInt(req.query.page as string | undefined, 1);\n const pageSize = parseLimit(req.query.pageSize as string | undefined, 200, 2000);\n\n try {\n const filePath = resolveLogFilePath(logDir, fileName);\n const result = await readLogFilePage(filePath, page, pageSize);\n if (!result) {\n return handleNotFound(res, filePath);\n }\n\n res.json({\n file: getRelativePath(filePath),\n ...result,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read specified log file');\n }\n };\n}\n\n/**\n * Create handler for getting server logs\n *\n * Reads from 4 log sources:\n * - server.log (Pino JSON)\n * - trace.log (Pino JSON)\n * - server.std.log (plain text)\n * - client.std.log (plain text)\n *\n * Query parameters:\n * - limit: number of logs to return (default: 100, max: 1000)\n * - offset: offset for pagination (default: 0)\n * - levels: comma-separated log levels (e.g., \"error,warn\")\n * - sources: comma-separated sources (e.g., \"server,trace\")\n */\nexport function createGetServerLogsHandler(logDir: string) {\n return async (req: Request, res: Response) => {\n const limit = parseLimit(req.query.limit as string | undefined, 100, 1000);\n const offset = parsePositiveInt(req.query.offset as string | undefined, 0);\n\n // Parse levels filter\n const levels = req.query.levels\n ? String(req.query.levels).split(',').map(l => l.trim()).filter(Boolean)\n : undefined;\n\n // Parse sources filter\n const sources = req.query.sources\n ? String(req.query.sources).split(',').map(s => s.trim()).filter(Boolean)\n : undefined;\n\n try {\n const result = await readServerLogs(logDir, {\n limit,\n offset,\n levels,\n sources,\n });\n\n if (!result) {\n return res.status(404).json({\n message: 'No server log files found',\n hint: 'Expected files: server.log, trace.log, server.std.log, client.std.log',\n });\n }\n\n res.json(result);\n } catch (error) {\n handleError(res, error, 'Failed to read server logs');\n }\n }\n}\n/**\n * Create handler for getting all automation trigger list in trace.log\n * filter by request_body.trigger & limit & path=/__innerapi__/automation/invoke\n */\nexport function createGetTriggerListHandler(logDir: string) {\n const traceLogPath = join(logDir, 'trace.log');\n\n return async (req: Request, res: Response) => {\n const trigger = typeof req.query.trigger === 'string' ? req.query.trigger.trim() : undefined;\n if (!trigger) {\n return res.status(400).json({ message: 'trigger is required' });\n }\n\n const triggerID = typeof req.query.triggerID === 'string' ? req.query.triggerID.trim() : undefined;\n const path = typeof req.query.path === 'string' ? req.query.path.trim() : '/__innerapi__/automation/invoke';\n const limit = parseLimit(req.query.limit as string | undefined, 10, 200);\n\n try {\n const result = await readTriggerList(traceLogPath, trigger, path, limit, triggerID);\n if (!result) {\n return handleNotFound(res, traceLogPath);\n }\n res.json({\n file: getRelativePath(traceLogPath),\n path,\n ...result,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read trace log');\n }\n };\n}\n\nexport function createGetTriggerDetailHandler(logDir: string) {\n const traceLogPath = join(logDir, 'server.log');\n\n return async (req: Request, res: Response) => {\n const instanceID = (req.params.instanceID || '').trim();\n if (!instanceID) {\n return res.status(400).json({ message: 'instanceID is required' });\n }\n\n const path = typeof req.query.path === 'string' ? req.query.path.trim() : '/__innerapi__/automation/invoke';\n\n try {\n const result = await readTriggerDetail(traceLogPath, path, instanceID);\n if (!result) {\n return handleNotFound(res, traceLogPath);\n }\n res.json({\n file: getRelativePath(traceLogPath),\n ...result,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read trace log');\n }\n };\n}\n\n/**\n * Create handler for getting capability trace list\n * Filter by capability_id in server.log\n *\n * Query parameters:\n * - capability_id: required, the capability ID to filter\n * - limit: number of traces to return (default: 10, max: 200)\n */\nexport function createGetCapabilityTraceListHandler(logDir: string) {\n const serverLogPath = join(logDir, 'server.log');\n\n return async (req: Request, res: Response) => {\n const capabilityId = typeof req.query.capability_id === 'string' ? req.query.capability_id.trim() : undefined;\n if (!capabilityId) {\n return res.status(400).json({ message: 'capability_id is required' });\n }\n\n const limit = parseLimit(req.query.limit as string | undefined, 10, 200);\n\n try {\n const result = await readCapabilityTraceList(serverLogPath, capabilityId, limit);\n if (!result) {\n return handleNotFound(res, serverLogPath);\n }\n res.json({\n file: getRelativePath(serverLogPath),\n ...result,\n });\n } catch (error) {\n handleError(res, error, 'Failed to read server log');\n }\n };\n}","import { promises as fs } from 'node:fs';\nimport type { TraceBuilder, RecentTraceCallsResponse } from '../types';\n\n/**\n * Read file in reverse order (from end to beginning)\n */\nexport async function readFileReverse(\n filePath: string,\n chunkSize: number,\n processLine: (line: string) => void,\n): Promise<void> {\n const handle = await fs.open(filePath, 'r');\n\n try {\n const stats = await handle.stat();\n let position = stats.size;\n let remainder = '';\n\n while (position > 0) {\n const length = Math.min(chunkSize, position);\n position -= length;\n\n const buffer = Buffer.alloc(length);\n await handle.read(buffer, 0, length, position);\n\n let chunk = buffer.toString('utf8');\n if (remainder) {\n chunk += remainder;\n remainder = '';\n }\n\n const lines = chunk.split('\\n');\n remainder = lines.shift() ?? '';\n\n for (let i = lines.length - 1; i >= 0; i -= 1) {\n if (lines[i]) {\n processLine(lines[i]);\n }\n }\n }\n\n if (remainder) {\n processLine(remainder);\n }\n } finally {\n await handle.close();\n }\n}\n\n/**\n * Build paginated response from trace builders\n */\nexport function buildPaginatedResponse(\n items: TraceBuilder[],\n page: number,\n pageSize: number,\n): RecentTraceCallsResponse {\n const totalItems = items.length;\n const totalPages = totalItems === 0 ? 0 : Math.ceil(totalItems / pageSize);\n const startIndex = (page - 1) * pageSize;\n const endIndex = Math.min(startIndex + pageSize, totalItems);\n\n const pagedItems = items.slice(startIndex, endIndex).map((builder) => ({\n traceId: builder.traceId,\n method: builder.method,\n path: builder.path,\n startTime: builder.startTime,\n endTime: builder.endTime,\n statusCode: builder.statusCode,\n durationMs: builder.durationMs,\n entries: builder.entries.slice().reverse(),\n }));\n\n return {\n page,\n pageSize,\n totalCalls: totalItems,\n totalPages,\n calls: pagedItems,\n };\n}\n","import type { ServerLog } from '../types';\n\n/**\n * Generate a simple UUID v4\n */\nexport function generateUUID(): string {\n return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {\n const r = (Math.random() * 16) | 0;\n const v = c === 'x' ? r : (r & 0x3) | 0x8;\n return v.toString(16);\n });\n}\n\n/**\n * Map Pino log level to ServerLogLevel\n */\nexport function mapPinoLevelToServerLogLevel(\n pinoLevel: number | string\n): 'fatal' | 'error' | 'warn' | 'log' | 'debug' | 'verbose' {\n if (typeof pinoLevel === 'string') {\n const lower = pinoLevel.toLowerCase();\n if (lower === 'fatal') return 'fatal';\n if (lower === 'error') return 'error';\n if (lower === 'warn' || lower === 'warning') return 'warn';\n if (lower === 'info' || lower === 'log') return 'log';\n if (lower === 'debug') return 'debug';\n if (lower === 'trace' || lower === 'verbose') return 'verbose';\n return 'log';\n }\n\n // Pino numeric levels\n if (pinoLevel >= 60) return 'fatal';\n if (pinoLevel >= 50) return 'error';\n if (pinoLevel >= 40) return 'warn';\n if (pinoLevel >= 30) return 'log';\n if (pinoLevel >= 20) return 'debug';\n return 'verbose';\n}\n\n/**\n * Extract log level from text content\n */\nexport function extractLogLevel(text: string): 'fatal' | 'error' | 'warn' | 'log' | 'debug' | 'verbose' {\n const lower = text.toLowerCase();\n\n if (lower.includes('fatal') || lower.includes('critical')) return 'fatal';\n if (lower.includes('error') || lower.includes('<e>') || lower.includes('✖')) return 'error';\n if (lower.includes('warn') || lower.includes('warning') || lower.includes('<w>') || lower.includes('⚠')) return 'warn';\n if (lower.includes('debug') || lower.includes('<d>')) return 'debug';\n if (lower.includes('verbose') || lower.includes('trace')) return 'verbose';\n\n return 'log';\n}\n\n/**\n * Parse Pino JSON log to ServerLog format\n */\nexport function parsePinoLog(line: string, source: 'server' | 'trace'): ServerLog | null {\n try {\n const pinoLog = JSON.parse(line);\n const id = generateUUID();\n\n return {\n id,\n level: mapPinoLevelToServerLogLevel(pinoLog.level),\n timestamp: new Date(pinoLog.time).getTime(),\n message: pinoLog.message || pinoLog.msg || '',\n context: pinoLog.context || null,\n traceId: pinoLog.trace_id || null,\n userId: pinoLog.user_id || null,\n appId: pinoLog.app_id || null,\n tenantId: pinoLog.tenant_id || null,\n stack: pinoLog.stack || null,\n meta: {\n pid: pinoLog.pid,\n hostname: pinoLog.hostname,\n path: pinoLog.path,\n method: pinoLog.method,\n statusCode: pinoLog.status_code,\n durationMs: pinoLog.duration_ms,\n ip: pinoLog.ip,\n requestBody: pinoLog.request_body,\n responseBody: pinoLog.response_body,\n },\n tags: [source],\n };\n } catch (error) {\n return null;\n }\n}\n\n/**\n * Parse plain text log to ServerLog format\n * Format: [2025-11-20 21:48:42] [server] content\n */\nexport function parseStdLog(line: string, source: 'server-std' | 'client-std'): ServerLog | null {\n const id = generateUUID();\n const match = line.match(/^\\[(\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2})\\] \\[(server|client)\\] (.*)$/);\n\n if (!match) {\n return {\n id,\n level: 'log',\n timestamp: Date.now(),\n message: line,\n context: null,\n traceId: null,\n userId: null,\n appId: null,\n tenantId: null,\n stack: null,\n meta: null,\n tags: [source],\n };\n }\n\n const [, timeStr, , content] = match;\n\n let timestamp: number;\n try {\n const isoStr = timeStr.replace(' ', 'T');\n timestamp = new Date(isoStr).getTime();\n if (isNaN(timestamp)) {\n timestamp = Date.now();\n }\n } catch (error) {\n timestamp = Date.now();\n }\n\n const level = extractLogLevel(content);\n\n return {\n id,\n level,\n timestamp,\n message: content,\n context: null,\n traceId: null,\n userId: null,\n appId: null,\n tenantId: null,\n stack: null,\n meta: null,\n tags: [source],\n };\n}\n","import { createReadStream } from 'node:fs';\nimport { createInterface } from 'node:readline';\nimport type {\n LogEntry,\n TraceBuilder,\n RecentTraceCallsResponse,\n LogFilePageResponse,\n} from '../types';\nimport { fileExists, parseLogLine, extractNumber, matchesPath, matchesMethod } from '../utils';\nimport { readFileReverse, buildPaginatedResponse } from './file-reader';\n\n/**\n * Read log entries by trace ID\n */\nexport async function readLogEntriesByTrace(\n filePath: string,\n traceId: string,\n limit: number,\n): Promise<LogEntry[] | undefined> {\n const exists = await fileExists(filePath);\n if (!exists) {\n return undefined;\n }\n\n const matches: LogEntry[] = [];\n const stream = createReadStream(filePath, { encoding: 'utf8' });\n const rl = createInterface({ input: stream, crlfDelay: Infinity });\n\n for await (const line of rl) {\n const entry = parseLogLine(line);\n if (!entry) continue;\n if (entry.trace_id !== traceId) continue;\n\n matches.push(entry);\n if (limit > 0 && matches.length > limit) {\n matches.shift();\n }\n }\n\n rl.close();\n stream.close();\n\n return matches;\n}\n\n/**\n * Read recent trace calls with pagination\n */\nexport async function readRecentTraceCalls(\n filePath: string,\n page: number,\n pageSize: number,\n pathFilter?: string,\n methodFilter?: string,\n): Promise<RecentTraceCallsResponse | undefined> {\n if (!(await fileExists(filePath))) {\n return undefined;\n }\n\n const config = {\n maxEntriesPerTrace: 10,\n chunkSize: 64 * 1024,\n };\n\n const builders = new Map<string, TraceBuilder>();\n const completedCalls: TraceBuilder[] = [];\n\n const createTraceBuilder = (traceId: string): TraceBuilder => ({\n traceId,\n entries: [],\n method: undefined,\n path: undefined,\n startTime: undefined,\n endTime: undefined,\n statusCode: undefined,\n durationMs: undefined,\n hasCompleted: false,\n });\n\n const updateBuilderMetadata = (builder: TraceBuilder, entry: LogEntry): void => {\n if (entry.method && !builder.method) builder.method = String(entry.method);\n if (entry.path && !builder.path) builder.path = String(entry.path);\n\n builder.entries.push(entry);\n if (builder.entries.length > config.maxEntriesPerTrace) {\n builder.entries.shift();\n }\n };\n\n const handleRequestCompleted = (builder: TraceBuilder, entry: LogEntry, message: string): void => {\n builder.hasCompleted = true;\n builder.endTime = entry.time;\n builder.statusCode = extractNumber(message, /status_code:\\s*(\\d+)/);\n builder.durationMs = extractNumber(message, /duration_ms:\\s*(\\d+)/);\n if (!builder.path && entry.path) {\n builder.path = String(entry.path);\n }\n\n const pathMatches = !pathFilter || matchesPath(builder.path, pathFilter);\n const methodMatches = !methodFilter || matchesMethod(builder.method, methodFilter);\n const shouldInclude = pathMatches && methodMatches;\n\n if (shouldInclude) {\n completedCalls.push(builder);\n }\n };\n\n const processLogEntry = (entry: LogEntry): void => {\n const { trace_id: traceId, message = '' } = entry;\n if (!traceId) return;\n\n let builder = builders.get(traceId);\n if (!builder) {\n builder = createTraceBuilder(traceId);\n builders.set(traceId, builder);\n }\n\n updateBuilderMetadata(builder, entry);\n\n if (!builder.hasCompleted && (message.includes('HTTP request completed') || message.includes('HTTP request failed'))) {\n handleRequestCompleted(builder, entry, message);\n }\n\n if (message.includes('HTTP request started') && !builder.startTime) {\n builder.startTime = entry.time;\n }\n };\n\n const processLine = (line: string): void => {\n const entry = parseLogLine(line);\n if (entry?.trace_id) {\n processLogEntry(entry);\n }\n };\n\n await readFileReverse(filePath, config.chunkSize, processLine);\n\n return buildPaginatedResponse(completedCalls, page, pageSize);\n}\n\n/**\n * Read log file page with pagination\n */\nexport async function readLogFilePage(\n filePath: string,\n page: number,\n pageSize: number,\n): Promise<LogFilePageResponse | undefined> {\n if (!(await fileExists(filePath))) {\n return undefined;\n }\n\n const capacity = page * pageSize;\n const buffer: string[] = [];\n let totalLines = 0;\n\n const stream = createReadStream(filePath, { encoding: 'utf8' });\n const rl = createInterface({ input: stream, crlfDelay: Infinity });\n\n try {\n for await (const line of rl) {\n buffer.push(line);\n if (buffer.length > capacity) {\n buffer.shift();\n }\n totalLines += 1;\n }\n } finally {\n rl.close();\n stream.close();\n }\n\n const totalPages = totalLines === 0 ? 0 : Math.ceil(totalLines / pageSize);\n\n if (buffer.length === 0) {\n return { page, pageSize, totalLines, totalPages, lines: [] };\n }\n\n const startIndex = Math.max(totalLines - page * pageSize, 0);\n const endIndex = Math.max(totalLines - (page - 1) * pageSize, 0);\n const bufferStartIndex = totalLines - buffer.length;\n\n const lines: string[] = [];\n for (let i = buffer.length - 1; i >= 0; i -= 1) {\n const lineIndex = bufferStartIndex + i;\n if (lineIndex >= startIndex && lineIndex < endIndex) {\n lines.push(buffer[i]);\n }\n }\n\n return {\n page,\n pageSize,\n totalLines,\n totalPages,\n lines: lines.reverse(),\n };\n}\n","import { createReadStream } from 'node:fs';\nimport { createInterface } from 'node:readline';\nimport { join } from 'node:path';\nimport type { ServerLog, ServerLogResponse } from '../types';\nimport { fileExists } from '../utils';\nimport { parsePinoLog, parseStdLog } from './parsers';\n\n/**\n * Read ServerLog logs from multiple sources\n *\n * Reads from 4 log files:\n * - server.log (Pino JSON)\n * - trace.log (Pino JSON)\n * - server.std.log (plain text)\n * - client.std.log (plain text)\n */\nexport async function readServerLogs(\n logDir: string,\n options: {\n limit?: number;\n offset?: number;\n levels?: string[];\n sources?: string[];\n } = {}\n): Promise<ServerLogResponse | undefined> {\n const limit = options.limit || 100;\n const offset = options.offset || 0;\n const sources = options.sources || ['server', 'trace', 'server-std', 'client-std'];\n\n const allLogs: ServerLog[] = [];\n const errors: string[] = [];\n\n for (const source of sources) {\n try {\n const logs = await readLogsBySource(logDir, source);\n allLogs.push(...logs);\n } catch (error) {\n const errorMsg = `Failed to read ${source}: ${error instanceof Error ? error.message : String(error)}`;\n errors.push(errorMsg);\n console.warn(`[readServerLogs] ${errorMsg}`);\n }\n }\n\n if (allLogs.length === 0) {\n if (errors.length > 0) {\n console.warn(`[readServerLogs] No logs found. Errors: ${errors.join(', ')}`);\n }\n return undefined;\n }\n\n let filteredLogs = allLogs;\n if (options.levels && options.levels.length > 0) {\n filteredLogs = allLogs.filter(log => options.levels!.includes(log.level));\n }\n\n // Sort by timestamp descending (newest first)\n filteredLogs.sort((a, b) => b.timestamp - a.timestamp);\n\n const total = filteredLogs.length;\n const paginatedLogs = filteredLogs.slice(offset, offset + limit);\n\n return {\n logs: paginatedLogs,\n total,\n hasMore: offset + limit < total,\n };\n}\n\n/**\n * Read logs from a specific source\n */\nasync function readLogsBySource(\n logDir: string,\n source: string\n): Promise<ServerLog[]> {\n let filePath: string;\n let parser: (line: string) => ServerLog | null;\n\n if (source === 'server') {\n filePath = join(logDir, 'server.log');\n parser = (line) => parsePinoLog(line, 'server');\n } else if (source === 'trace') {\n filePath = join(logDir, 'trace.log');\n parser = (line) => parsePinoLog(line, 'trace');\n } else if (source === 'server-std') {\n filePath = join(logDir, 'server.std.log');\n parser = (line) => parseStdLog(line, 'server-std');\n } else if (source === 'client-std') {\n filePath = join(logDir, 'client.std.log');\n parser = (line) => parseStdLog(line, 'client-std');\n } else {\n console.warn(`[readLogsBySource] Unknown source: ${source}`);\n return [];\n }\n\n if (!(await fileExists(filePath))) {\n console.warn(`[readLogsBySource] File not found: ${filePath}`);\n return [];\n }\n\n const logs: ServerLog[] = [];\n let stream: ReturnType<typeof createReadStream> | null = null;\n let rl: ReturnType<typeof createInterface> | null = null;\n\n try {\n stream = createReadStream(filePath, { encoding: 'utf8' });\n rl = createInterface({ input: stream, crlfDelay: Infinity });\n\n for await (const line of rl) {\n if (!line.trim()) continue;\n\n try {\n const log = parser(line);\n if (log) {\n logs.push(log);\n }\n } catch (parseError) {\n // Ignore individual line parse errors\n }\n }\n } catch (error) {\n console.error(`[readLogsBySource] Error reading ${filePath}:`, error);\n throw error;\n } finally {\n if (rl) {\n rl.close();\n }\n if (stream) {\n stream.close();\n }\n }\n\n return logs;\n}\n","import { createReadStream } from 'node:fs';\nimport { createInterface } from 'node:readline';\nimport type { LogEntry, TraceBuilder, RecentTraceCallsResponse } from '../types';\nimport { fileExists, parseLogLine, extractNumber } from '../utils';\nimport { readFileReverse } from './file-reader';\n\n/**\n * Read all automation trigger list in trace.log\n * filter by request_body.trigger & limit & path=/__innerapi__/automation/invoke\n */\nexport async function readTriggerList(\n filePath: string,\n trigger: string,\n path: string,\n limit?: number,\n triggerID?: string,\n): Promise<RecentTraceCallsResponse | undefined> {\n if (!(await fileExists(filePath))) {\n return undefined;\n }\n\n const config = {\n maxEntriesPerTrace: 10,\n chunkSize: 64 * 1024,\n };\n\n const builders = new Map<string, TraceBuilder>();\n const completedCalls: TraceBuilder[] = [];\n\n const createTraceBuilder = (traceId: string): TraceBuilder => ({\n traceId,\n entries: [],\n method: undefined,\n path: undefined,\n startTime: undefined,\n endTime: undefined,\n statusCode: undefined,\n durationMs: undefined,\n hasCompleted: false,\n });\n\n const shouldIncludeInCompletedCalls = (builder: TraceBuilder): boolean => {\n const alreadyAdded = completedCalls.some(call => call.traceId === builder.traceId);\n if (alreadyAdded) {\n return false;\n }\n\n const isAutomationTrigger = builder.path?.endsWith(path);\n if (!isAutomationTrigger) {\n return false;\n }\n\n if (trigger && builder.entries.length > 0) {\n const requestEntry = builder.entries.find(e => e.request_body?.trigger);\n if (requestEntry?.request_body?.trigger) {\n return String(requestEntry.request_body.trigger) === trigger\n && (triggerID ? requestEntry?.request_body?.triggerID === triggerID : true);\n }\n return false;\n }\n\n return true;\n };\n\n const updateBuilderMetadata = (builder: TraceBuilder, entry: LogEntry): void => {\n if (entry.method && !builder.method) builder.method = String(entry.method);\n if (entry.path && !builder.path) builder.path = String(entry.path);\n\n builder.entries.push(entry);\n if (builder.entries.length > config.maxEntriesPerTrace) {\n builder.entries.shift();\n }\n\n if (shouldIncludeInCompletedCalls(builder)) {\n completedCalls.push(builder);\n if (limit && completedCalls.length > limit) {\n completedCalls.pop();\n }\n }\n };\n\n const handleRequestCompleted = (builder: TraceBuilder, entry: LogEntry, message: string): void => {\n builder.hasCompleted = true;\n builder.endTime = entry.time;\n builder.statusCode = extractNumber(message, /status_code:\\s*(\\d+)/);\n builder.durationMs = extractNumber(message, /duration_ms:\\s*(\\d+)/);\n if (!builder.path && entry.path) {\n builder.path = String(entry.path);\n }\n\n if (shouldIncludeInCompletedCalls(builder)) {\n completedCalls.push(builder);\n if (limit && completedCalls.length > limit) {\n completedCalls.pop();\n }\n }\n };\n\n const processLogEntry = (entry: LogEntry): void => {\n const { trace_id: traceId, message = '' } = entry;\n if (!traceId) return;\n\n let builder = builders.get(traceId);\n if (!builder) {\n builder = createTraceBuilder(traceId);\n builders.set(traceId, builder);\n }\n\n updateBuilderMetadata(builder, entry);\n\n if (!builder.hasCompleted && (message.includes('HTTP request completed') || message.includes('HTTP request failed'))) {\n handleRequestCompleted(builder, entry, message);\n }\n\n if (message.includes('HTTP request started') && !builder.startTime) {\n builder.startTime = entry.time;\n }\n };\n\n const processLine = (line: string): void => {\n const entry = parseLogLine(line);\n if (entry?.trace_id) {\n processLogEntry(entry);\n }\n };\n\n await readFileReverse(filePath, config.chunkSize, processLine);\n\n return {\n page: 1,\n pageSize: completedCalls.length,\n totalCalls: completedCalls.length,\n totalPages: 1,\n calls: completedCalls.map((builder) => ({\n traceId: builder.traceId,\n method: builder.method,\n path: builder.path,\n startTime: builder.startTime,\n endTime: builder.endTime,\n statusCode: builder.statusCode,\n durationMs: builder.durationMs,\n entries: builder.entries.slice().reverse(),\n })),\n };\n}\n\n/**\n * Read trigger detail entries by instance ID\n */\nexport async function readTriggerDetail(\n filePath: string,\n path: string,\n instanceID: string,\n): Promise<{ instanceID: string; entries: LogEntry[] } | undefined> {\n const exists = await fileExists(filePath);\n if (!exists) {\n return undefined;\n }\n\n const matches: LogEntry[] = [];\n const stream = createReadStream(filePath, { encoding: 'utf8' });\n const rl = createInterface({ input: stream, crlfDelay: Infinity });\n\n for await (const line of rl) {\n const entry = parseLogLine(line);\n if (!entry) continue;\n\n const isAutomationTrigger = entry.path?.endsWith(path);\n const hasInstanceID = entry.instance_id === instanceID && entry.trigger;\n if (!isAutomationTrigger || !hasInstanceID) continue;\n\n matches.push(entry);\n }\n\n rl.close();\n stream.close();\n\n return {\n instanceID,\n entries: matches,\n };\n}\n","import type { LogEntry } from '../types';\nimport { fileExists, parseLogLine } from '../utils';\nimport { readFileReverse } from './file-reader';\n\n/**\n * Capability trace builder for aggregating capability execution logs\n */\ninterface CapabilityTraceBuilder {\n traceId: string;\n capabilityId: string;\n pluginKey?: string;\n action?: string;\n startTime?: string;\n endTime?: string;\n durationMs?: number;\n status?: 'success' | 'failed';\n input?: string;\n output?: string;\n error?: { message: string };\n hasCompleted: boolean;\n hasStartEntry: boolean;\n}\n\n/**\n * Capability trace item\n */\nexport interface CapabilityTraceItem {\n traceId: string;\n capabilityId: string;\n pluginKey?: string;\n action?: string;\n startTime?: string;\n endTime?: string;\n durationMs?: number;\n status: 'success' | 'failed';\n input?: string;\n output?: string;\n error?: { message: string };\n}\n\n/**\n * Response for capability trace list\n */\nexport interface CapabilityTraceListResponse {\n capabilityId: string;\n totalTraces: number;\n traces: CapabilityTraceItem[];\n}\n\n/**\n * Read capability trace list from server.log\n * Filter by capability_id field in log entries\n * Returns only input and output entries for each trace\n */\nexport async function readCapabilityTraceList(\n filePath: string,\n capabilityId: string,\n limit?: number,\n): Promise<CapabilityTraceListResponse | undefined> {\n if (!(await fileExists(filePath))) {\n return undefined;\n }\n\n const config = {\n chunkSize: 64 * 1024,\n };\n\n const builders = new Map<string, CapabilityTraceBuilder>();\n const completedTraces: CapabilityTraceBuilder[] = [];\n\n const createCapabilityTraceBuilder = (traceId: string, capId: string): CapabilityTraceBuilder => ({\n traceId,\n capabilityId: capId,\n hasCompleted: false,\n hasStartEntry: false,\n });\n\n const shouldIncludeInCompletedTraces = (builder: CapabilityTraceBuilder): boolean => {\n const alreadyAdded = completedTraces.some(trace => trace.traceId === builder.traceId);\n if (alreadyAdded) {\n return false;\n }\n return builder.capabilityId === capabilityId;\n };\n\n const updateBuilderMetadata = (builder: CapabilityTraceBuilder, entry: LogEntry): void => {\n if (entry.plugin_key && !builder.pluginKey) {\n builder.pluginKey = String(entry.plugin_key);\n }\n if (entry.action && !builder.action) {\n builder.action = String(entry.action);\n }\n\n const message = entry.message || '';\n\n // Check if this is the start of capability execution (入参日志)\n // 从独立字段读取 input\n if (message.includes('Executing capability') && !builder.hasStartEntry) {\n builder.hasStartEntry = true;\n builder.startTime = entry.time;\n if (entry.input) {\n builder.input = String(entry.input);\n }\n }\n\n // Check if this is successful completion (结果日志)\n // 从独立字段读取 output, duration_ms\n if (message.includes('executed successfully')) {\n builder.hasCompleted = true;\n builder.endTime = entry.time;\n builder.status = 'success';\n if (entry.output) {\n builder.output = String(entry.output);\n }\n if (entry.duration_ms) {\n builder.durationMs = Number(entry.duration_ms);\n }\n\n if (shouldIncludeInCompletedTraces(builder)) {\n completedTraces.push(builder);\n }\n }\n\n // Check if this is failed execution (失败日志)\n // 从独立字段读取 error, duration_ms\n if (message.includes('execution failed')) {\n builder.hasCompleted = true;\n builder.endTime = entry.time;\n builder.status = 'failed';\n if (entry.error) {\n builder.error = { message: String(entry.error) };\n }\n if (entry.duration_ms) {\n builder.durationMs = Number(entry.duration_ms);\n }\n\n if (shouldIncludeInCompletedTraces(builder)) {\n completedTraces.push(builder);\n }\n }\n };\n\n const processLogEntry = (entry: LogEntry): void => {\n const { trace_id: traceId, capability_id: capId } = entry;\n\n if (!traceId || !capId || capId !== capabilityId) return;\n\n let builder = builders.get(traceId);\n if (!builder) {\n builder = createCapabilityTraceBuilder(traceId, capId);\n builders.set(traceId, builder);\n }\n\n updateBuilderMetadata(builder, entry);\n };\n\n const processLine = (line: string): void => {\n const entry = parseLogLine(line);\n if (entry?.capability_id) {\n processLogEntry(entry);\n }\n };\n\n await readFileReverse(filePath, config.chunkSize, processLine);\n\n // Sort by endTime descending (newest first)\n completedTraces.sort((a, b) => {\n const timeA = a.endTime ? new Date(a.endTime).getTime() : 0;\n const timeB = b.endTime ? new Date(b.endTime).getTime() : 0;\n return timeB - timeA;\n });\n\n // Apply limit after sorting\n const limitedTraces = limit ? completedTraces.slice(0, limit) : completedTraces;\n\n return {\n capabilityId,\n totalTraces: limitedTraces.length,\n traces: limitedTraces.map((builder): CapabilityTraceItem => ({\n traceId: builder.traceId,\n capabilityId: builder.capabilityId,\n pluginKey: builder.pluginKey,\n action: builder.action,\n startTime: builder.startTime,\n endTime: builder.endTime,\n durationMs: builder.durationMs,\n status: builder.status || 'failed',\n input: builder.input,\n output: builder.output,\n error: builder.error,\n })),\n };\n}\n","import http from 'node:http';\n\ninterface HealthCheckRouterOptions {\n /** 目标服务端口,默认 3000 */\n targetPort?: number;\n /** 目标服务主机,默认 localhost */\n targetHost?: string;\n /** 健康检查超时时间(毫秒),默认 2000ms */\n timeout?: number;\n}\n\n/**\n * 检查目标服务是否可用\n */\nfunction checkServiceHealth(\n host: string,\n port: number,\n timeout: number\n): Promise<{ available: boolean; responseTime?: number; error?: string }> {\n return new Promise((resolve) => {\n const startTime = Date.now();\n\n const req = http.request(\n {\n hostname: host,\n port,\n path: '/',\n method: 'HEAD',\n timeout,\n },\n (_res) => {\n const responseTime = Date.now() - startTime;\n // 任何响应都表示服务可用(包括错误状态码)\n resolve({\n available: true,\n responseTime,\n });\n }\n );\n\n req.on('timeout', () => {\n req.destroy();\n resolve({\n available: false,\n error: 'Request timeout',\n });\n });\n\n req.on('error', (err) => {\n resolve({\n available: false,\n error: err.message,\n });\n });\n\n req.end();\n });\n}\n\n/**\n * 创建健康检查路由\n */\nexport function createHealthCheckHandler(options: HealthCheckRouterOptions = {}) {\n const {\n targetPort = Number(process.env.SERVER_PORT) || 3000,\n targetHost = 'localhost',\n timeout = 2000,\n } = options;\n return async (_req, res) => {\n try {\n const result = await checkServiceHealth(targetHost, targetPort, timeout);\n\n if (result.available) {\n res.status(200).json({\n status: 'healthy',\n service: `${targetHost}:${targetPort}`,\n responseTime: result.responseTime,\n timestamp: new Date().toISOString(),\n });\n } else {\n res.status(503).json({\n status: 'unhealthy',\n service: `${targetHost}:${targetPort}`,\n error: result.error,\n timestamp: new Date().toISOString(),\n });\n }\n } catch (error) {\n res.status(500).json({\n status: 'error',\n service: `${targetHost}:${targetPort}`,\n error: error instanceof Error ? error.message : 'Unknown error',\n timestamp: new Date().toISOString(),\n });\n }\n };\n}\n","import type { RouteMiddleware, MiddlewareContext, RouteInfo } from '../types';\nimport { createDevLogRouter } from './router';\n\ninterface DevLogsMiddlewareOptions {\n /** Directory containing log files */\n logDir?: string;\n}\n\n/**\n * Routes provided by dev logs middleware\n */\nconst DEV_LOGS_ROUTES: RouteInfo[] = [\n {\n method: 'GET',\n path: '/app/trace/:traceId',\n description: 'Get log entries by trace ID',\n },\n {\n method: 'GET',\n path: '/trace/recent',\n description: 'Get recent trace calls with pagination and optional path/method filters',\n },\n {\n method: 'GET',\n path: '/files/:fileName',\n description: 'Get paginated log file content by file name',\n },\n {\n method: 'GET',\n path: '/server-logs',\n description: 'Get server logs in ServerLog format (compatible with frontend)',\n },\n {\n method: 'GET',\n path: '/trace/trigger/list',\n description: 'Get trigger list (automation trigger) in trace.log',\n },\n {\n method: 'GET',\n path: '/trace/trigger/:instanceID',\n description: 'Get trigger detail (automation trigger) in trace.log by instanceID',\n },\n];\n\n/**\n * Creates dev logs middleware for viewing application logs\n * Supports both rspack/webpack and Vite dev servers\n */\nexport function createDevLogsMiddleware(options: DevLogsMiddlewareOptions = {}): RouteMiddleware {\n const { logDir } = options;\n\n return {\n name: 'dev-logs',\n mountPath: '/dev/logs',\n routes: DEV_LOGS_ROUTES,\n\n enabled: (context: MiddlewareContext) => context.isDev,\n\n createRouter: (context: MiddlewareContext) => {\n return createDevLogRouter({ logDir: logDir || context.logDir });\n },\n };\n}\n","import express, { Router } from 'express';\nimport {\n collectLogsHandler,\n collectLogsBatchHandler,\n} from './controller';\nimport { resolveLogDir } from './utils';\n\ninterface RouterOptions {\n logDir?: string;\n fileName?: string;\n}\n\n/**\n * Create dev log router with all routes registered\n */\nexport function createDevLogRouter(options: RouterOptions = {}): Router {\n const logDir = resolveLogDir(options.logDir);\n const router = express.Router();\n\n // POST /collect - Collect logs from client\n // Use express.json() middleware only for this route\n router.post('/collect', express.json(), collectLogsHandler(logDir, options.fileName || 'client.log'));\n router.post('/collect-batch', express.json(), collectLogsBatchHandler(logDir, options.fileName || 'client.log'));\n\n return router;\n}\n","import { Request, Response } from 'express';\nimport { join } from 'path';\nimport fs from 'fs';\n\nimport { serializeError, ensureDir } from './utils';\n\ninterface CollectLogRequest {\n level: string;\n message: string;\n time: string; // ISO String\n source?: string; // Log Source string\n user_id: string; // User ID\n tenant_id: string; // Tenant ID\n app_id: string; // App ID\n}\n\nexport function collectLogsHandler(logDir: string, fileName: string) {\n const filePath = join(logDir, fileName);\n // Ensure log directory exists\n ensureDir(logDir);\n\n return async (req: Request, res: Response) => {\n try {\n const logContent = req.body as CollectLogRequest;\n if(!logContent.message){\n return res.status(400).json({ message: 'message is required' });\n }\n const logLine = JSON.stringify({\n ...logContent,\n server_time: new Date().toISOString(),\n }) + '\\n';\n await fs.promises.appendFile(filePath, logLine);\n res.json({\n success: true,\n });\n } catch (error) {\n handleError(res, error, 'Failed to collect logs');\n }\n };\n}\nexport function collectLogsBatchHandler(logDir: string, fileName: string) {\n const filePath = join(logDir, fileName);\n // Ensure log directory exists\n ensureDir(logDir);\n\n return async (req: Request, res: Response) => {\n try {\n const logContents = req.body as Array<CollectLogRequest>;\n if(!Array.isArray(logContents)){\n return res.status(400).json({ message: 'logContents must be an array' });\n }\n const logLines = [];\n for (const logContent of logContents) {\n logLines.push(JSON.stringify({\n ...logContent,\n server_time: new Date().toISOString(),\n }) + '\\n');\n }\n await fs.promises.appendFile(filePath, logLines.join(''));\n res.json({\n success: true,\n });\n } catch (error) {\n handleError(res, error, 'Failed to collect logs');\n }\n };\n}\n\nfunction handleError(res: Response, error: unknown, message = 'Failed to collect logs'): void {\n res.status(500).json({ message, error: serializeError(error) });\n}","import { isAbsolute, join } from 'node:path';\nimport fs from 'node:fs';\n\n/**\n * Resolve log directory path\n */\nexport function resolveLogDir(provided?: string): string {\n if (!provided) {\n return join(process.cwd(), 'logs');\n }\n return isAbsolute(provided) ? provided : join(process.cwd(), provided);\n}\n\n/**\n * Ensure directory exists, create if not\n */\nexport function ensureDir(dir: string): void {\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir, { recursive: true });\n }\n}\n\n/**\n * Serialize error for JSON response\n */\nexport function serializeError(error: unknown): { name?: string; message: string } {\n return error instanceof Error\n ? { name: error.name, message: error.message }\n : { message: String(error) };\n}\n","import type { RouteMiddleware, MiddlewareContext, RouteInfo } from '../types';\nimport { createDevLogRouter } from './router';\n\ninterface DevLogsMiddlewareOptions {\n logDir?: string; // log directory\n fileName?: string; // client.log by default\n}\n\n/**\n * Routes provided by dev logs middleware\n */\nconst DEV_LOGS_ROUTES: RouteInfo[] = [\n {\n method: 'POST',\n path: '/collect',\n description: 'Collect logs from client.',\n }\n];\n\n/**\n * Creates dev logs middleware for viewing application logs\n * Supports both rspack/webpack and Vite dev servers\n */\nexport function createCollectLogsMiddleware(options: DevLogsMiddlewareOptions = {}): RouteMiddleware {\n const { logDir } = options;\n\n return {\n name: 'collect-logs',\n mountPath: '/dev/logs',\n routes: DEV_LOGS_ROUTES,\n enabled: (context: MiddlewareContext) => context.isDev,\n createRouter: (context: MiddlewareContext) => {\n return createDevLogRouter({\n logDir: logDir || context.logDir,\n fileName: options.fileName || 'client.log',\n });\n },\n };\n}\n"],"mappings":";;;;AAKO,SAASA,kBAAkBC,UAAgB;AAChD,QAAMC,qBAAqBD,SAASE,WAAW,GAAA,IAC3CF,WACA,IAAIA,QAAAA;AACR,QAAMG,+BAA+BF,mBAAmBG,SAAS,GAAA,IAC7DH,mBAAmBI,MAAM,GAAG,EAAC,IAC7BJ;AACJ,SAAOE;AACT;AARgBJ;;;ACLhB,OAAOO,SAAQ;AACf,OAAOC,WAAU;;;ACDjB,IAAMC,iBAAiB;AAIhB,SAASC,oBAAoBC,QAAc;AAChD,MAAIC,OAAOD,OAAOE,WAAW,QAAA,IAAYF,OAAOG,MAAM,CAAA,IAAKH;AAE3D,SAAOC,KAAKC,WAAWE,cAAAA,GAAiB;AACtCH,WAAOA,KAAKE,MAAMC,eAAeC,MAAM;AACvCJ,WAAOK,qBAAqBL,IAAAA;EAC9B;AAEA,QAAMM,UAAUD,qBAAqBL,IAAAA;AACrC,MAAIM,QAAQF,WAAW,GAAG;AACxB,WAAO,GAAGD,cAAAA;;EACZ;AAEA,SAAO,GAAGA,cAAAA;EAAmBG,OAAAA;AAC/B;AAdgBR;AAgBT,SAASO,qBAAqBE,OAAa;AAChD,MAAIC,UAAUD;AACd,SAAOC,QAAQP,WAAW,MAAA,KAAWO,QAAQP,WAAW,IAAA,GAAO;AAC7DO,cAAUA,QAAQP,WAAW,MAAA,IAAUO,QAAQN,MAAM,CAAA,IAAKM,QAAQN,MAAM,CAAA;EAC1E;AACA,SAAOM;AACT;AANgBH;AAQT,SAASI,wBAAwBT,MAAY;AAClD,SAAOA,KAAKU,QAAQ,WAAW,MAAA;AACjC;AAFgBD;;;AC5BT,SAASE,2BAA2BC,QAAc;AACvD,SAAOA,OAAOC,QAAQ,gDAAgD,EAAA;AACxE;AAFgBF;AAIT,SAASG,8BAA8BF,QAAc;AAC1D,MAAIG,YAAY;AAChB,MAAIC,OAAOJ,OAAOC,QAAQ,6BAA6B,MAAA;AACrDE,iBAAa;AACb,WAAO;EACT,CAAA;AACAC,SAAOA,KAAKH,QAAQ,4BAA4B,MAAA;AAC9CE,iBAAa;AACb,WAAO;EACT,CAAA;AACAC,SAAOA,KAAKH,QAAQ,wCAAwC,MAAA;AAC1DE,iBAAa;AACb,WAAO;EACT,CAAA;AACAC,SAAOA,KAAKH,QAAQ,4BAA4B,MAAA;AAC9CE,iBAAa;AACb,WAAO;EACT,CAAA;AACAC,SAAOA,KAAKH,QAAQ,gCAAgC,MAAA;AAClDE,iBAAa;AACb,WAAO;EACT,CAAA;AACA,SAAO;IAAEC;IAAMD;EAAU;AAC3B;AAvBgBD;;;ACJhB,SAASG,cAAc;AAOhB,SAASC,uBAAuBC,QAAc;AACnD,QAAMC,eAAe;AACrB,QAAMC,UAAyB,CAAA;AAE/B,QAAMC,UAAUH,OAAOI,QAAQH,cAAc,CAACI,OAAOC,aAAqBC,SAAiBC,cAAAA;AACzF,UAAMC,YAAYC,mBAAmBF,SAAAA;AACrC,QAAIC,cAAcH,aAAa;AAC7B,aAAOD;IACT;AACAH,YAAQS,KAAK;MAAEC,MAAMN;MAAaO,IAAIJ;IAAU,CAAA;AAChD,UAAMK,cAAcT,MAAMU,QAAQ,GAAA;AAClC,UAAMC,SAASF,eAAe,IAAIT,MAAMY,MAAMH,WAAAA,IAAe,MAAMP,OAAAA,KAAYC,SAAAA;AAC/E,UAAMU,mBAAmBF,OAAOG,UAAS;AACzC,WAAO,gBAAgBV,SAAAA,IAAaS,gBAAAA;EACtC,CAAA;AAEA,SAAO;IAAEE,MAAMjB;IAASD;EAAQ;AAClC;AAjBgBH;AAmBT,SAASsB,gCAAgCrB,QAAgBE,SAAsB;AACpF,MAAIA,QAAQoB,WAAW,GAAG;AACxB,WAAOtB;EACT;AAEA,SAAOE,QAAQqB,OAAO,CAACC,KAAKC,WAAAA;AAC1B,QAAI,CAACA,OAAOb,QAAQa,OAAOb,SAASa,OAAOZ,IAAI;AAC7C,aAAOW;IACT;AACA,UAAME,UAAU,IAAIC,OAAO,MAAMC,aAAaH,OAAOb,IAAI,CAAA,aAAc,GAAA;AACvE,WAAOY,IAAIpB,QAAQsB,SAAS,GAAGD,OAAOZ,EAAE,IAAI;EAC9C,GAAGb,MAAAA;AACL;AAZgBqB;AAcT,SAASO,aAAaC,OAAa;AACxC,SAAOA,MAAMzB,QAAQ,uBAAuB,MAAA,EAAQA,QAAQ,OAAO,KAAA;AACrE;AAFgBwB;AAQhB,SAASE,YAAYC,KAAW;AAE9B,QAAMC,QAAQD,IAAIE,MAAM,UAAA,EAAYC,OAAOC,OAAAA;AAE3C,MAAIH,MAAMV,WAAW,GAAG;AACtB,WAAO;EACT;AAGA,SAAOU,MACJI,IAAI,CAACC,MAAMC,UAAAA;AACV,QAAIA,UAAU,GAAG;AACf,aAAOD,KAAKE,YAAW;IACzB;AACA,WAAOF,KAAKG,OAAO,CAAA,EAAGC,YAAW,IAAKJ,KAAKpB,MAAM,CAAA,EAAGsB,YAAW;EACjE,CAAA,EACCG,KAAK,EAAA;AACV;AAjBSZ;AAmBF,SAASpB,mBAAmBiC,MAAY;AAC7C,QAAMC,YAAYC,YAAYF,IAAAA;AAE9B,MAAIlC,YAAYmC,UAAUxC,QAAQ,kBAAkB,GAAA;AAEpDK,cAAYA,UAAUL,QAAQ,OAAO,GAAA;AAErCK,cAAYA,UAAUL,QAAQ,UAAU,EAAA;AAGxCK,cAAYqB,YAAYrB,SAAAA;AAExB,MAAI,CAACA,WAAW;AACdA,gBAAY;EACd;AACA,MAAI,CAAC,aAAaqC,KAAKrC,SAAAA,GAAY;AACjCA,gBAAY,IAAIA,SAAAA;EAClB;AACA,SAAOA;AACT;AAnBgBC;AAqBT,SAASmC,YAAYF,MAAY;AACtC,MAAI,CAAC,eAAeG,KAAKH,IAAAA,GAAO;AAC9B,WAAOA;EACT;AAEA,MAAI;AACF,UAAMI,iBAAiBC,OAAOL,MAAM;MAAEM,UAAU;MAAQC,MAAM;IAAQ,CAAA,EAAGR,KAAK,GAAA;AAC9E,WAAOK,kBAAkBJ;EAC3B,SAASQ,OAAO;AACd,WAAOR;EACT;AACF;AAXgBE;;;ACvFhB,IAAMO,sBAAsB;AAUrB,SAASC,sBAAsBC,QAAc;AAClD,QAAMC,QAAQD,OAAOE,MAAM,IAAA;AAC3B,QAAMC,SAAmB,CAAA;AACzB,MAAIC,WAAW;AACf,QAAMC,YAAsB,CAAA;AAE5B,WAASC,IAAI,GAAGA,IAAIL,MAAMM,QAAQD,KAAK,GAAG;AACxC,UAAME,OAAOP,MAAMK,CAAAA;AAEnB,UAAMG,QAAQD,KAAKC,MAAMC,mBAAAA;AACzB,QAAID,OAAO;AACT,YAAME,WAAWF,MAAM,CAAA;AACvB,YAAMG,UAAUD,aAAa,iBAAiB,gBAAgB;AAE9D,YAAME,eAAeC,wBAAwBb,MAAMK,IAAI,CAAA,GAAIM,OAAAA;AAC3D,UAAIC,cAAc;AAEhBV,eAAOY,KAAKF,YAAAA;AACZT,oBAAY;AACZE,aAAK;MACP,OAAO;AAELD,kBAAUU,KAAKP,KAAKQ,KAAI,CAAA;AACxBb,eAAOY,KAAKP,IAAAA;MACd;AACA;IACF;AAEA,QAAIA,KAAKS,SAAS,UAAA,GAAa;AAC7BZ,gBAAUU,KAAKP,KAAKQ,KAAI,CAAA;IAC1B;AAEAb,WAAOY,KAAKP,IAAAA;EACd;AAEA,SAAO;IACLU,MAAMf,OAAOgB,KAAK,IAAA;IAClBf;IACAC;EACF;AACF;AAxCgBN;AA0CT,SAASe,wBAAwBM,UAA8BR,SAAyC;AAC7G,MAAI,CAACQ,YAAY,CAACA,SAASH,SAAS,UAAA,GAAa;AAC/C,WAAOI;EACT;AAEA,SAAOD,SAASE,QAAQ,YAAY,GAAGV,OAAAA,GAAU;AACnD;AANgBE;;;ACrDhB,OAAOS,QAAQ;AACf,OAAOC,UAAU;AAGV,SAASC,aAAaC,QAAc;AACzC,QAAMC,cAAc;AACpB,QAAMC,QAAQF,OAAOE,MAAMD,WAAAA;AAC3B,MAAI,CAACC,OAAO;AACV,WAAOF;EACT;AAEA,QAAMG,cAAcD,MAAM,CAAA,EACvBE,MAAM,GAAA,EACNC,IAAI,CAACC,OAAOA,GAAGC,KAAI,CAAA,EACnBC,OAAOC,OAAAA,EACPD,OAAO,CAACF,OAAOA,OAAO,cAAcA,OAAO,YAAA;AAI9C,QAAMI,sBAAsBP,YAAYK,OAAO,CAACF,OAAAA;AAC9C,QAAIA,OAAO,aAAa;AAEtB,YAAMK,sBAAsB;AAC5B,aAAOA,oBAAoBC,KAAKZ,MAAAA;IAClC;AACA,WAAO;EACT,CAAA;AAEA,MAAIA,OAAOa,SAAS,UAAA,KAAe,CAACH,oBAAoBG,SAAS,SAAA,GAAY;AAC3EH,wBAAoBI,KAAK,SAAA;EAC3B;AACA,MAAId,OAAOa,SAAS,SAAA,KAAc,CAACH,oBAAoBG,SAAS,QAAA,GAAW;AACzEH,wBAAoBI,KAAK,QAAA;EAC3B;AACA,MAAId,OAAOa,SAAS,qBAAA,KAA0B,CAACH,oBAAoBG,SAAS,oBAAA,GAAuB;AACjGH,wBAAoBI,KAAK,oBAAA;EAC3B;AACA,MAAId,OAAOa,SAAS,SAAA,KAAc,CAACH,oBAAoBG,SAAS,QAAA,GAAW;AACzEH,wBAAoBI,KAAK,QAAA;EAC3B;AACA,MAAId,OAAOa,SAAS,aAAA,KAAkB,CAACH,oBAAoBG,SAAS,YAAA,GAAe;AACjFH,wBAAoBI,KAAK,YAAA;EAC3B;AAEA,QAAMC,SAASC,MAAMC,KAAK,IAAIC,IAAIR,mBAAAA,CAAAA;AAClC,QAAMS,cAAc,YAAYJ,OAAOK,KAAK,IAAA,CAAA;AAC5C,SAAOpB,OAAOqB,QAAQpB,aAAakB,WAAAA;AACrC;AA3CgBpB;AAwET,SAASuB,kBAAkBC,QAAc;AAE9C,MAAIC,OAAOD,OAAOE,QAAQ,iDAAiD,EAAA;AAG3E,QAAMC,gBAAgB;;IAEpBC,KAAKC,QAAQC,WAAW,YAAY,UAAA;;IAEpCF,KAAKC,QAAQC,WAAW,eAAe,UAAA;;IAEvCF,KAAKC,QAAQC,WAAW,kBAAkB,UAAA;IAC1CF,KAAKC,QAAQC,WAAW,qBAAqB,UAAA;;AAG/C,MAAIC;AACJ,aAAWC,gBAAgBL,eAAe;AACxC,QAAIM,GAAGC,WAAWF,YAAAA,GAAe;AAC/BD,qBAAeC;AACf;IACF;EACF;AAEA,MAAI,CAACD,cAAc;AACjBI,YAAQC,KAAK,4EAA4ET,aAAAA;AACzF,WAAOF;EACT;AAEA,SAAOY,mBAAmBZ,MAAMM,YAAAA;AAClC;AA7BgBR;AA+BT,SAASc,mBAAmBZ,MAAcM,cAAoB;AACnE,QAAMO,kBAAkBL,GAAGM,aAAaR,cAAc,MAAA;AAGtD,QAAMS,kBAAkBF,gBACrBZ,QAAQ,yBAAyB,EAAA,EACjCe,KAAI;AAGP,QAAMC,WAAWF,gBAAgBG,SAAS,MAAA,KAAW,CAAClB,KAAKkB,SAAS,oBAAA,KAAyB,CAAClB,KAAKkB,SAAS,oBAAA;AAC5G,QAAMC,kBAAkBJ,gBAAgBG,SAAS,aAAA,KAAkB,CAAClB,KAAKkB,SAAS,YAAA;AAGlF,MAAIC,iBAAiB;AACnBnB,WAAOoB,uBAAuBpB,MAAM,uBAAuB,YAAA;EAC7D;AAGA,MAAIiB,YAAY,CAACjB,KAAKkB,SAAS,oBAAA,KAAyB,CAAClB,KAAKkB,SAAS,oBAAA,GAAuB;AAC5F,UAAMG,cAAcrB,KAAKsB,MAAM,wDAAA;AAC/B,QAAID,aAAa;AACf,YAAME,cAAcvB,KAAKwB,QAAQH,YAAY,CAAA,CAAE,IAAIA,YAAY,CAAA,EAAGI;AAClEzB,aAAOA,KAAK0B,MAAM,GAAGH,WAAAA,IAAe,yCAAyCvB,KAAK0B,MAAMH,WAAAA;IAC1F;EACF;AAGA,QAAMI,eAAe,GAAGC,cAAAA;;AACxB,MAAIC,iBAAiB;AAErB,MAAI7B,KAAK8B,WAAWH,YAAAA,GAAe;AACjCE,qBAAiBF,aAAaF;EAChC;AAEA,QAAMM,qBAAqB/B,KAAK0B,MAAMG,cAAAA,EAAgBP,MAAM,uBAAA;AAC5D,MAAIS,oBAAoB;AACtBF,sBAAkBE,mBAAmB,CAAA,EAAGN;EAC1C;AAGA,QAAMO,YAAY;EAAKjB,eAAAA;;;AACvB,SAAOf,KAAK0B,MAAM,GAAGG,cAAAA,IAAkBG,YAAYhC,KAAK0B,MAAMG,cAAAA;AAChE;AA1CgBjB;AA4CT,SAASQ,uBAAuBrB,QAAgBkC,aAAqBC,YAAkB;AAC5F,QAAMC,iBAAiBF,YAAYhC,QAAQ,OAAO,KAAA;AAClD,QAAMmC,cAAc,IAAIC,OAAO,iCAAiCF,cAAAA,QAAsB;AACtF,QAAMb,QAAQvB,OAAOuB,MAAMc,WAAAA;AAE3B,MAAI,CAACd,OAAO;AAEV,WAAOvB;EACT;AAEA,QAAMuC,cAAchB,MAAM,CAAA,EACvBiB,MAAM,GAAA,EACNC,IAAI,CAACC,OAAOA,GAAGzB,KAAI,CAAA,EACnB0B,OAAOC,OAAAA;AAEV,MAAIL,YAAYpB,SAASgB,UAAAA,GAAa;AACpC,WAAOnC;EACT;AAEAuC,cAAYM,KAAKV,UAAAA;AACjB,QAAMW,SAASC,MAAMC,KAAK,IAAIC,IAAIV,WAAAA,CAAAA;AAClC,QAAMW,cAAc,YAAYJ,OAAOK,KAAK,IAAA,CAAA,YAAiBjB,WAAAA;AAC7D,SAAOlC,OAAOE,QAAQmC,aAAaa,WAAAA;AACrC;AAvBgB7B;;;AClJT,SAAS+B,uBAAuBC,QAAc;AACnD,QAAMC,aAAqC;IACzC,eAAe;IACf,eAAe;IACf,eAAe;IACf,eAAe;EACjB;AAEA,QAAMC,QAAQF,OAAOG,MAAM,IAAA;AAE3B,WAASC,IAAI,GAAGA,IAAIF,MAAMG,QAAQD,KAAK,GAAG;AACxC,UAAME,OAAOJ,MAAME,CAAAA;AACnB,UAAMG,QAAQC,OAAOC,QAAQR,UAAAA,EAAYS,KAAK,CAAC,CAACC,GAAAA,MAASL,KAAKM,SAAS,IAAID,GAAAA,GAAM,CAAA;AACjF,QAAI,CAACJ,OAAO;AACV;IACF;AAEA,UAAM,CAAA,EAAGM,WAAAA,IAAeN;AACxB,UAAMO,eAAeZ,MAAME,IAAI,CAAA,GAAIW,KAAAA,KAAU;AAC7C,QAAID,aAAaE,WAAW,IAAA,KAASF,aAAaF,SAAS,cAAA,GAAiB;AAC1E;IACF;AAEA,UAAMK,cAAcX,KAAKY,MAAM,MAAA;AAC/B,UAAMC,SAASF,cAAcA,YAAY,CAAA,IAAK;AAC9C,UAAMG,UAAU,GAAGD,MAAAA,oBAA0BN,WAAAA;AAC7CX,UAAMmB,OAAOjB,GAAG,GAAGgB,OAAAA;AACnBhB,SAAK;EACP;AAEA,SAAOF,MAAMoB,KAAK,IAAA;AACpB;AA/BgBvB;AAsCT,SAASwB,8BAA8BvB,QAAc;AAC1D,QAAMwB,iBAAyC;IAC7C,eAAe;IACf,eAAe;IACf,eAAe;IACf,eAAe;EACjB;AAEA,QAAMtB,QAAQF,OAAOG,MAAM,IAAA;AAC3B,QAAMsB,SAA+B,CAAA;AAGrC,MAAIC,UAAU;AACd,MAAIC,iBAAiB;AACrB,QAAMC,sBAAsB,oBAAIC,IAAAA;AAChC,MAAIC,eAAe;AAEnB,WAAS1B,IAAI,GAAGA,IAAIF,MAAMG,QAAQD,KAAK,GAAG;AACxC,UAAME,OAAOJ,MAAME,CAAAA;AAGnB,QAAI,CAACsB,WAAW,+CAA+CK,KAAKzB,IAAAA,GAAO;AACzEoB,gBAAU;AACVC,uBAAiBF,OAAOpB;AACxBuB,0BAAoBI,MAAK;AACzBF,qBAAe;IACjB;AAEA,QAAIJ,SAAS;AAEX,iBAAWO,QAAQ3B,MAAM;AACvB,YAAI2B,SAAS,IAAKH;AAClB,YAAIG,SAAS,IAAKH;MACpB;AAGA,iBAAWI,iBAAiB1B,OAAO2B,OAAOX,cAAAA,GAAiB;AACzD,YAAIlB,KAAKM,SAAS,IAAIsB,aAAAA,GAAgB,KAAK5B,KAAKM,SAAS,IAAIsB,aAAAA,GAAgB,GAAG;AAC9EN,8BAAoBQ,IAAIF,aAAAA;QAC1B;MACF;AAGA,UAAIJ,iBAAiB,KAAKxB,KAAKM,SAAS,IAAA,GAAO;AAC7Cc,kBAAU;AAGV,cAAMW,eAAeZ,OAAOpB;AAC5B,iBAASiC,IAAIX,gBAAgBW,KAAKD,cAAcC,KAAK;AACnD,gBAAMC,YAAYd,OAAOa,CAAAA,KAAM;AAC/B,cAAIE,eAAe;AAEnB,qBAAW,CAACC,aAAaP,aAAAA,KAAkB1B,OAAOC,QAAQe,cAAAA,GAAiB;AACzE,gBAAII,oBAAoBc,IAAIR,aAAAA,GAAgB;AAC1C,kBAAIK,UAAU3B,SAAS,IAAI6B,WAAAA,GAAc,KAAKF,UAAU3B,SAAS,IAAI6B,WAAAA,GAAc,GAAG;AACpFD,+BAAe;AAEf,oBAAIF,IAAI,KAAKb,OAAOa,IAAI,CAAA,GAAI1B,SAAS,kBAAA,GAAqB;AACxDa,yBAAOa,IAAI,CAAA,IAAK;gBAClB;AACA;cACF;YACF;UACF;AAEA,cAAIE,cAAc;AAChBf,mBAAOa,CAAAA,IAAK;UACd;QACF;MACF;IACF;AAEAb,WAAOkB,KAAKrC,IAAAA;EACd;AAGA,SAAOmB,OAAOmB,OAAOtC,CAAAA,SAAQA,SAAS,IAAA,EAAMgB,KAAK,IAAA;AACnD;AA7EgBC;;;AC9BT,SAASsB,uBAAuBC,QAAc;AACnD,MAAIC,QAAQ;AAGZ,QAAMC,OAAOF,OAAOG,QAAQ,mBAAmB,MAAA;AAC7CF,aAAS;AACT,WAAO;EACT,CAAA;AAEA,SAAO;IAAEC;IAAMD;EAAM;AACvB;AAVgBF;;;ACAT,SAASK,gCAAgCC,QAAc;AAC5D,MAAIC,WAAW;AAGf,QAAMC,UAAU;AAEhB,QAAMC,OAAOH,OAAOI,QAAQF,SAAS,CAACG,OAAOC,OAAOC,WAAWC,YAAAA;AAE7D,UAAMC,kBAAkB,uBAAuBC,KAAKF,OAAAA;AACpD,UAAMG,gBAAgB,yBAAyBD,KAAKF,OAAAA;AAEpD,QAAIC,mBAAmBE,eAAe;AACpCV,kBAAY;AACZ,aAAO,qBAAqBK,KAAAA,GAAQC,SAAAA,GAAYD,KAAAA;IAClD;AAGA,WAAOD;EACT,CAAA;AAEA,SAAO;IAAEF;IAAMF;EAAS;AAC1B;AArBgBF;AA8BT,SAASa,yBAAyBZ,QAAc;AACrD,MAAIC,WAAW;AAGf,QAAMC,UAAU;AAEhB,QAAMC,OAAOH,OAAOI,QAAQF,SAAS,MAAA;AACnCD,gBAAY;AACZ,WAAO;EACT,CAAA;AAEA,SAAO;IAAEE;IAAMF;EAAS;AAC1B;AAZgBW;;;AC3ChB,IAAMC,qBAAqB;AAEpB,SAASC,mBAAmBC,QAAc;AAC/C,QAAMC,cAAcD,OAAOE,QAAQ;EAAKJ,kBAAAA,EAAoB;AAC5D,QAAMK,OAAOF,gBAAgB,KAAKD,SAASA,OAAOI,MAAM,GAAGH,WAAAA;AAE3D,QAAMI,cAAc;AACpB,QAAMC,eAAe,oBAAIC,IAAAA;AAEzB,aAAWC,SAASL,KAAKM,SAASJ,WAAAA,GAAc;AAC9C,UAAMK,OAAOF,MAAM,CAAA;AACnBF,iBAAaK,IAAID,IAAAA;EACnB;AAEA,MAAIJ,aAAaM,SAAS,GAAG;AAE3B,WAAOT;EAET;AAEA,QAAMU,aAAaC,MAAMC,KAAKT,YAAAA,EAC3BU,KAAI,EACJC,IAAI,CAACP,SAAS,gBAAgBA,IAAAA,WAAeA,IAAAA,GAAO,EACpDQ,KAAK,IAAA;AAER,QAAMC,SAAShB,KAAKiB,QAAO;AAC3B,SAAO,GAAGD,MAAAA;;EAAarB,kBAAAA;EAAuBe,UAAAA;;AAChD;AAzBgBd;;;AT+ET,SAASsB,yBAAyBC,YAAkB;AACzD,QAAMC,eAAeC,MAAKC,QAAQH,UAAAA;AAClC,MAAI,CAACI,IAAGC,WAAWJ,YAAAA,GAAe;AAChCK,YAAQC,KAAK,gDAAgDN,YAAAA,EAAc;AAC3E,WAAOO;EACT;AAEA,MAAIC,OAAOL,IAAGM,aAAaT,cAAc,MAAA;AAGzCQ,SAAOE,oBAAoBF,IAAAA;AAG3B,QAAMG,cAAcC,uBAAuBJ,IAAAA;AAC3CA,SAAOG,YAAYH;AAEnBA,SAAOK,2BAA2BL,IAAAA;AAClC,QAAMM,kBAAkBC,8BAA8BP,IAAAA;AACtDA,SAAOM,gBAAgBN;AACvB,QAAMQ,eAAeC,uBAAuBT,IAAAA;AAC5CA,SAAOQ,aAAaR;AACpBA,SAAOU,gCAAgCV,MAAMQ,aAAaG,OAAO;AAEjE,QAAMC,cAAcC,sBAAsBb,IAAAA;AAC1CA,SAAOY,YAAYZ;AAGnB,QAAMc,uBAAuBC,gCAAgCf,IAAAA;AAC7DA,SAAOc,qBAAqBd;AAG5B,QAAMgB,wBAAwBC,yBAAyBjB,IAAAA;AACvDA,SAAOgB,sBAAsBhB;AAG7BA,SAAOkB,8BAA8BlB,IAAAA;AAErCA,SAAOmB,uBAAuBnB,IAAAA;AAE9BA,SAAOoB,aAAapB,IAAAA;AAEpBA,SAAOqB,kBAAkBrB,IAAAA;AAEzBA,SAAOsB,mBAAmBtB,IAAAA;AAE1BA,SAAOA,KAAKuB,QAAQ,UAAU,IAAA;AAC9BvB,SAAOwB,wBAAwBxB,IAAAA;AAE/BL,EAAAA,IAAG8B,cAAcjC,cAAcQ,MAAM,MAAA;AAErC,MAAIG,YAAYuB,QAAQ,GAAG;AACzB7B,YAAQ8B,KAAK,wCAAwCxB,YAAYuB,KAAK,oDAAoD;EAC5H;AACA,MAAId,YAAYgB,WAAW,GAAG;AAC5B/B,YAAQ8B,KAAK,yCAAyCf,YAAYgB,QAAQ,kBAAkB;EAC9F;AACA,MAAIhB,YAAYiB,UAAUC,SAAS,GAAG;AACpCjC,YAAQC,KAAK,wDAAwDc,YAAYiB,UAAUC,MAAM;AACjGlB,gBAAYiB,UAAUE,QAAQ,CAACC,SAASnC,QAAQC,KAAK,KAAKkC,IAAAA,EAAM,CAAA;EAClE;AACA,MAAI1B,gBAAgB2B,YAAY,GAAG;AACjCpC,YAAQ8B,KAAK,0CAA0CrB,gBAAgB2B,SAAS,sCAAsC;EACxH;AACA,MAAInB,qBAAqBc,WAAW,GAAG;AACrC/B,YAAQ8B,KAAK,yCAAyCb,qBAAqBc,QAAQ,0CAA0C;EAC/H;AACA,MAAIZ,sBAAsBY,WAAW,GAAG;AACtC/B,YAAQ8B,KAAK,yCAAyCX,sBAAsBY,QAAQ,wDAAwD;EAC9I;AAEA,SAAO;IACLM,iBAAiBtB,YAAYgB;IAC7BO,kBAAkBvB,YAAYiB;IAC9BO,gBAAgBjC,YAAYuB;IAC5BW,oBAAoBvB,qBAAqBc;IACzCU,oBAAoBtB,sBAAsBY;EAC5C;AACF;AA7EgBtC;;;AUjFhB,SAASiD,iBAAiB;;;ACGnB,SAASC,mBAAmBC,OAAgB;AACjD,QAAMC,UAAkC;;IAEtCC,MAAM;IACNC,SAAS;IACTC,MAAM;;IAGNC,UAAU;IACVC,SAAS;IACTC,KAAK;IACLC,QAAQ;IACRC,QAAQ;IACRC,aAAa;IACbC,WAAW;;IAGXC,SAAS;IACTC,SAAS;IACTC,MAAM;IACNC,iBAAiB;;IAGjBC,SAAS;;IAGTC,WAAW;IACXC,aAAa;IACbC,MAAM;IACNC,MAAM;IACNC,QAAQ;IACRC,UAAU;;IAGVC,MAAM;;IAGNC,MAAM;IACNC,OAAO;;IAGPC,OAAO;;IAGPC,MAAM;IACNC,MAAM;IACNC,SAAS;IACTC,UAAU;;IAGVC,OAAO;IACPC,MAAM;IACNC,MAAM;IACNC,KAAK;IACLC,MAAM;IACNC,SAAS;IACTC,QAAQ;;IAGRC,OAAO;;IAGPC,YAAY;IACZC,mBAAmB;IACnBC,aAAa;IACbC,gBAAgB;;IAGhBC,QAAQ;EACV;AAEA,MAAIC,WAAW3C,QAAQD,MAAM6C,IAAI,KAAK;AAGtC,MAAI7C,MAAM8C,SAAS;AACjBF,eAAWA,SAASG,SAAS,IAAA,IAAQH,WAAW,GAAGA,QAAAA;EACrD;AAGA,MAAI5C,MAAMgD,cAAchD,MAAMgD,WAAWC,SAAS,GAAG;AACnDL,eAAW5C,MAAMgD,WAAWE,IAAI,CAACC,MAAM,IAAIA,CAAAA,GAAI,EAAEC,KAAK,KAAA;EACxD;AAEA,SAAOR;AACT;AApFgB7C;AAuFT,SAASsD,aAAaC,KAAW;AACtC,SAAOA,IACJC,QAAQ,mBAAmB,OAAA,EAC3BC,MAAM,QAAA,EACNN,IAAI,CAACO,SAASA,KAAKC,OAAO,CAAA,EAAGC,YAAW,IAAKF,KAAKG,MAAM,CAAA,CAAA,EACxDR,KAAK,EAAA;AACV;AANgBC;AAQT,SAASQ,YAAYP,KAAW;AACrC,SAAOA,IACJC,QAAQ,mBAAmB,OAAA,EAC3BO,YAAW,EACXP,QAAQ,UAAU,GAAA;AACvB;AALgBM;AAOT,SAASE,YAAYT,KAAW;AACrC,SAAOA,IACJC,QAAQ,mBAAmB,OAAA,EAC3BO,YAAW,EACXP,QAAQ,UAAU,GAAA;AACvB;AALgBQ;;;ADrGT,SAASC,YAAYC,OAAgB;AAC1C,QAAMC,YAAYC,aAAaF,MAAMG,YAAY;AAEjD,MAAIC,MAAM;;;;;;;AAOVA,SAAO,sBAAsBH,SAAAA;;AAE7B,aAAWI,SAASL,MAAMM,QAAQ;AAEhC,QACED,MAAME,gBAAgBF,MAAMG,SAAS,QACrCH,MAAMG,KAAKC,WAAW,GAAA,KACtBJ,MAAMG,KAAKC,WAAW,SAAA,KACtBJ,MAAMG,KAAKC,WAAW,SAAA,GACtB;AACA;IACF;AAMA,UAAMC,SAASC,mBAAmBN,KAAAA;AAClC,UAAMO,WAAWP,MAAMQ,YAAYR,MAAMS,aAAa,MAAM;AAG5D,UAAMC,aAAaC,6BAA6BX,KAAAA;AAChD,QAAIU,YAAY;AACdX,aAAOW;IACT;AAEAX,WAAO,KAAKC,MAAMG,IAAI,GAAGI,QAAAA,KAAaF,MAAAA;;;EACxC;AAEAN,SAAO;AAGPA,SAAO,sBAAsBH,SAAAA;;AAE7B,aAAWI,SAASL,MAAMM,QAAQ;AAEhC,QACED,MAAMG,KAAKC,WAAW,GAAA,KACtBJ,MAAMG,KAAKC,WAAW,SAAA,KACtBJ,MAAMG,KAAKC,WAAW,SAAA,KACtBJ,MAAME,gBAAgBF,MAAMG,SAAS,MACrC;AACA;IACF;AAOA,UAAME,SAASC,mBAAmBN,KAAAA;AAElC,UAAMU,aAAaC,6BAA6BX,OAAO;MACrDY,UAAU;IACZ,CAAA;AACA,QAAIF,YAAY;AACdX,aAAOW;IACT;AAEAX,WAAO,KAAKC,MAAMG,IAAI,MAAME,MAAAA;;;EAC9B;AAEAN,SAAO;AAGPA,SAAO,gBAAgBH,SAAAA;;AAEvB,aAAWI,SAASL,MAAMM,QAAQ;AAKhC,UAAMI,SAASC,mBAAmBN,KAAAA;AAClC,UAAMO,WAAWP,MAAMQ,WAAW,MAAM;AAExC,UAAME,aAAaC,6BAA6BX,OAAO;MACrDa,YAAY;IACd,CAAA;AACA,QAAIH,YAAY;AACdX,aAAOW;IACT;AAEAX,WAAO,KAAKC,MAAMG,IAAI,GAAGI,QAAAA,KAAaF,MAAAA;;;EACxC;AAEAN,SAAO;AAEP,SAAOA;AACT;AAlGgBL;AAqGT,SAASiB,6BAA6BX,OAAkB,EAC7DY,WAAW,OACXC,aAAa,MAAK,IAC8B,CAAC,GAAC;AAClD,MAAIH,aAAa;AAEjB,MAAIV,MAAMQ,YAAa,CAACK,cAAcb,MAAMS,cAAeG,UAAU;AACnEF,kBAAc,0CAA0CV,MAAMc,WAAWd,MAAMG,IAAI;;AACnF,QAAIU,YAAY;AACd,aAAOH;IACT;AACAA,kBAAc;EAChB,OAAO;AACLA,kBAAc,kCAAkCV,MAAMc,WAAWd,MAAMG,IAAI;;AAC3E,QAAIU,YAAY;AACd,aAAOH;IACT;AACAA,kBAAc;EAChB;AAGA,UAAQV,MAAMe,MAAI;IAChB,KAAK;IACL,KAAK;IACL,KAAK;AACHL,oBAAc;AACd,UAAIV,MAAMgB,QAAQ;AAChBN,sBAAc,gBAAgBV,MAAMgB,MAAM;;MAC5C;AACA;IAEF,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;AACHN,oBAAc;AACd;IAEF,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;AACHA,oBAAc;AACd;IAEF,KAAK;AACHA,oBAAc;AACd;IAEF,KAAK;AACHA,oBAAc;AACd;IAEF,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;AACHA,oBAAc;AACd;IAEF,KAAK;IACL,KAAK;AACHA,oBAAc;AACd;EAMJ;AAEA,MAAIV,MAAMiB,SAAS;AACjBP,kBAAc;EAChB;AAUA,SAAOA;AACT;AApFgBC;AAuFT,SAASO,mBAAmBvB,OAAgB;AACjD,QAAMC,YAAYC,aAAaF,MAAMG,YAAY;AACjD,QAAMqB,YAAYC,YAAYC,UAAU1B,MAAMG,YAAY,CAAA;AAC1D,QAAMwB,WAAWC,YAAY5B,MAAMG,YAAY;AAG/C,QAAM0B,UAAU7B,MAAMM,OAAOwB,KAAK,CAACC,MAAMA,EAAExB,YAAY;AACvD,QAAMyB,SAASH,UAAUlB,mBAAmBkB,OAAAA,IAAW;AACvD,QAAMI,SAASJ,UAAUA,QAAQrB,OAAO;AAExC,QAAM0B,aAAa;;;;;;;;;;;;;;;;;;;UAmBXjC,SAAAA;UACAA,SAAAA;IACNA,SAAAA;iBACa0B,QAAAA;WACN1B,SAAAA,qBAA8B0B,QAAAA;;YAE7BzB,aAAaF,MAAMG,YAAY,CAAA;mBACxBqB,SAAAA;eACJvB,SAAAA;iCACkBD,MAAMG,YAAY,YAAYF,SAAAA;;;;;;;;;YASnDA,SAAAA;;;+BAGmBA,SAAAA;eAChBA,SAAAA;kBACGD,MAAMG,YAAY;;;;;;;;;YASxBF,SAAAA;;WAEDgC,MAAAA;;cAEGA,MAAAA,MAAYA,MAAAA,KAAWD,MAAAA;eACtB/B,SAAAA;kBACGD,MAAMG,YAAY,mBAAmB8B,MAAAA;;;;;;;;;YAS3ChC,SAAAA;;WAEDgC,MAAAA;;cAEGA,MAAAA,MAAYA,MAAAA,KAAWD,MAAAA;+BACN/B,SAAAA;eAChBA,SAAAA;kBACGD,MAAMG,YAAY,kBAAkB8B,MAAAA;;;;;;;;;;cAUxCA,MAAAA;;cAEAA,MAAAA,MAAYA,MAAAA,KAAWD,MAAAA;;kBAEnBhC,MAAMG,YAAY,kBAAkB8B,MAAAA;;;;AAKpD,SAAOC;AACT;AAvGgBX;AA0GT,SAASY,gBAAgBnC,OAAgB;AAC9C,QAAMC,YAAYC,aAAaF,MAAMG,YAAY;AACjD,QAAMwB,WAAWC,YAAY5B,MAAMG,YAAY;AAE/C,QAAM0B,UAAU7B,MAAMM,OAAOwB,KAAK,CAACC,MAAMA,EAAExB,YAAY;AACvD,QAAMyB,SAASH,UAAUlB,mBAAmBkB,OAAAA,IAAW;AACvD,QAAMI,SAASJ,UAAUA,QAAQrB,OAAO;AAExC,QAAM4B,UAAU;;;;;WAKPpC,MAAMG,YAAY;;UAEnBF,SAAAA;UACAA,SAAAA;IACNA,SAAAA;iBACa0B,QAAAA;;;eAGF1B,SAAAA;yCAC0BA,SAAAA;;;;kCAIPA,SAAAA,iBAA0BA,SAAAA;;gBAE5CD,MAAMG,YAAY;;;;gCAIFF,SAAAA,SAAkBgC,MAAAA,cAAoBA,MAAAA;;;;;wEAKEhC,SAAAA;;;;;;cAM1DD,MAAMG,YAAY;;;;;kBAKd8B,MAAAA,KAAWD,MAAAA,cAAoB/B,SAAAA;;;cAGnCD,MAAMG,YAAY;kBACdH,MAAMG,YAAY,IAAI8B,MAAAA,KAAWA,MAAAA;;;;sCAIbhC,SAAAA,SAAkBgC,MAAAA,OAAaA,MAAAA;;;;;;iBAMpDA,MAAAA,KAAWD,MAAAA,sBAA4B/B,SAAAA,iBAA0BA,SAAAA;;gBAElED,MAAMG,YAAY;;kBAEhBH,MAAMG,YAAY,IAAI8B,MAAAA,KAAWA,MAAAA;;;;sCAIbhC,SAAAA,SAAkBgC,MAAAA,OAAaA,MAAAA;;;;;;iBAMpDA,MAAAA,KAAWD,MAAAA;;gBAEZhC,MAAMG,YAAY;kBAChBH,MAAMG,YAAY,IAAI8B,MAAAA,KAAWA,MAAAA;;;;sCAIbhC,SAAAA,SAAkBgC,MAAAA,OAAaA,MAAAA;;;gCAGrChC,SAAAA,SAAkBgC,MAAAA,OAAaA,MAAAA;;;;AAK7D,SAAOG;AACT;AA5FgBD;AA8FT,SAASE,eAAerC,OAAgB;AAC7C,QAAMC,YAAYC,aAAaF,MAAMG,YAAY;AACjD,QAAMwB,WAAWC,YAAY5B,MAAMG,YAAY;AAE/C,QAAMmC,SAAS;;WAENrC,SAAAA,wBAAiC0B,QAAAA;WACjC1B,SAAAA,qBAA8B0B,QAAAA;;;kBAGvB1B,SAAAA;gBACFA,SAAAA;;eAEDA,SAAAA;;AAGb,SAAOqC;AACT;AAjBgBD;;;AExYhB,SAASE,SAAyBC,YAA4B;AAgC9D,IAAMC,sBAAN,MAAMA,qBAAAA;EAhCN,OAgCMA;;;EACIC;EAER,YAAYC,gBAAgC;AAC1C,SAAKD,UAAU,IAAIE,QAAQD,cAAAA;EAC7B;EAEAE,gBAAgBC,UAA+B;AAC7C,UAAMC,aAAa,KAAKL,QAAQM,oBAAoBF,QAAAA;AACpD,UAAMG,SAAsB,CAAA;AAG5B,UAAMC,qBAAqBH,WAAWI,sBAAqB;AAE3D,eAAWC,aAAaF,oBAAoB;AAC1C,YAAMG,eAAeD,UAAUE,gBAAe;AAE9C,iBAAWC,eAAeF,cAAc;AACtC,cAAMG,cAAcD,YAAYE,eAAc;AAE9C,YAAID,eAAeE,KAAKC,iBAAiBH,WAAAA,GAAc;AACrD,gBAAMI,aAAaJ,YAAYK,cAAa;AAG5C,cAAID,WAAWE,QAAO,MAAO,WAAW;AACtC,kBAAMC,YAAY,KAAKC,aACrBT,YAAYU,QAAO,GACnBT,WAAAA;AAEF,gBAAIO,WAAW;AACbd,qBAAOiB,KAAKH,SAAAA;YACd;UACF;QACF;MACF;IACF;AAEA,WAAOd;EACT;EAEQe,aACNG,cACAC,UACkB;AAClB,UAAMC,OAAOD,SAASE,aAAY;AAElC,QAAID,KAAKE,SAAS,GAAG;AACnB,aAAO;IACT;AAGA,UAAMC,YAAYH,KAAK,CAAA,EAAGP,QAAO,EAAGW,QAAQ,SAAS,EAAA;AAGrD,UAAMC,YAAYL,KAAK,CAAA;AAEvB,QAAI,CAACX,KAAKiB,0BAA0BD,SAAAA,GAAY;AAC9C,aAAO;IACT;AAEA,UAAME,SAAsB,CAAA;AAG5B,UAAMC,aAAaH,UAAUI,cAAa;AAE1C,eAAWC,QAAQF,YAAY;AAC7B,UAAInB,KAAKsB,qBAAqBD,IAAAA,GAAO;AACnC,cAAME,YAAYF,KAAKd,QAAO;AAC9B,cAAMT,cAAcuB,KAAKtB,eAAc;AAGvC,cAAMyB,kBAAkBH,KAAKI,wBAAuB;AACpD,YAAIC;AAEJ,YAAIF,gBAAgBX,SAAS,GAAG;AAC9Ba,oBAAUF,gBACPG,IAAI,CAACC,MAAMA,EAAExB,QAAO,CAAA,EACpByB,KAAK,IAAA,EACLd,QAAQ,SAAS,EAAA,EACjBe,KAAI;QACT;AAEA,YAAIhC,eAAeE,KAAKC,iBAAiBH,WAAAA,GAAc;AACrD,gBAAMiC,YAAY,KAAKC,WAAWT,WAAWzB,aAAa4B,OAAAA;AAC1DR,iBAAOV,KAAKuB,SAAAA;QACd;MACF;IACF;AAEA,WAAO;MACLjB;MACAL;MACAS;IACF;EACF;EAEQc,WACNT,WACAb,UACAgB,SACW;AACX,UAAMK,YAAuB;MAC3BE,MAAMV;MACNW,YAAYX;MACZY,MAAM;MACNC,UAAU;MACVC,YAAY;MACZC,SAAS;MACTC,cAAc;MACdC,UAAU;MACVC,SAAS;MACTf;IACF;AAGA,SAAKgB,cAAchC,UAAUqB,SAAAA;AAG7B,SAAKY,eAAejC,UAAUqB,SAAAA;AAE9B,WAAOA;EACT;EAEQW,cAAchC,UAA0BqB,WAA4B;AAE1E,QAAIa,UAAgBlC;AACpB,QAAImC,WAAkC;AAEtC,WAAO7C,KAAKC,iBAAiB2C,OAAAA,GAAU;AACrCC,iBAAWD;AACX,YAAM1C,cAAa0C,QAAQzC,cAAa;AAExC,UAAIH,KAAK8C,2BAA2B5C,WAAAA,GAAa;AAC/C0C,kBAAU1C,YAAWC,cAAa;MACpC,OAAO;AACL;MACF;IACF;AAEA,QAAI,CAAC0C,UAAU;AACb;IACF;AAEA,UAAM3C,aAAa2C,SAAS1C,cAAa;AACzC,QAAI4C,WAAW;AAEf,QAAI/C,KAAK8C,2BAA2B5C,UAAAA,GAAa;AAC/C6C,iBAAW7C,WAAWK,QAAO;IAC/B,OAAO;AACLwC,iBAAW7C,WAAWE,QAAO;IAC/B;AAEA2B,cAAUI,OAAOY;AAGjB,UAAMpC,OAAOkC,SAASjC,aAAY;AAElC,QAAID,KAAKE,SAAS,GAAG;AACnB,YAAMmC,WAAWrC,KAAK,CAAA;AAGtB,UAAIX,KAAKiD,gBAAgBD,QAAAA,GAAW;AAClCjB,kBAAUG,aAAac,SAASE,eAAc;MAChD,WAESlD,KAAKiB,0BAA0B+B,QAAAA,GAAW;AACjD,aAAKG,gBAAgBH,UAAUjB,SAAAA;MACjC,WAES/B,KAAKoD,yBAAyBJ,QAAAA,GAAW;AAChDjB,kBAAUsB,aAAaL,SACpBM,YAAW,EACX3B,IAAI,CAAC4B,OAAOA,GAAGnD,QAAO,EAAGW,QAAQ,SAAS,EAAA,CAAA;MAC/C;IACF;AAGA,QAAIJ,KAAKE,SAAS,KAAKb,KAAKiB,0BAA0BN,KAAK,CAAA,CAAE,GAAG;AAC9D,WAAKwC,gBAAgBxC,KAAK,CAAA,GAAIoB,SAAAA;IAChC;EACF;EAEQoB,gBAAgBK,YAAkBzB,WAA4B;AACpE,QAAI,CAAC/B,KAAKiB,0BAA0BuC,UAAAA,GAAa;AAC/C;IACF;AAEA,UAAMrC,aAAaqC,WAAWpC,cAAa;AAE3C,eAAWC,QAAQF,YAAY;AAC7B,UAAInB,KAAKsB,qBAAqBD,IAAAA,GAAO;AACnC,cAAMoC,WAAWpC,KAAKd,QAAO;AAC7B,cAAMmD,QAAQrC,KAAKtB,eAAc,GAAIK,QAAAA;AAErC,gBAAQqD,UAAAA;UACN,KAAK;AACH1B,sBAAUlB,SAAS6C,QAAQC,SAASD,KAAAA,IAASE;AAC7C;UACF,KAAK;AACH7B,sBAAU8B,YAAYH,QAAQC,SAASD,KAAAA,IAASE;AAChD;UACF,KAAK;AACH7B,sBAAU+B,QAAQJ,QAAQC,SAASD,KAAAA,IAASE;AAC5C;UACF,KAAK;AACH7B,sBAAUM,aAAa;AACvBN,sBAAUgC,eAAeL;AACzB;;UAEF,KAAK;AACH3B,sBAAUiC,eAAeN,UAAU;AACnC;UACF,KAAK;AAGH3B,sBAAUkC,OAAOP,OAAO3C,QAAQ,SAAS,EAAA;AAOzC;UACF;AACE,kBAAM,IAAImD,MAAM,yBAAyBT,QAAAA,EAAU;QACvD;MACF;IACF;EACF;EAEQd,eAAejC,UAA0BqB,WAA4B;AAC3E,QAAIa,UAAgBlC;AAEpB,WAAOV,KAAKC,iBAAiB2C,OAAAA,GAAU;AACrC,YAAM1C,aAAa0C,QAAQzC,cAAa;AAExC,UAAIH,KAAK8C,2BAA2B5C,UAAAA,GAAa;AAC/C,cAAMiE,aAAajE,WAAWK,QAAO;AACrC,cAAMI,OAAOiC,QAAQhC,aAAY;AAEjC,gBAAQuD,YAAAA;UACN,KAAK;AACHpC,sBAAUO,UAAU;AACpBP,sBAAUK,WAAW;AACrB;UAEF,KAAK;AACHL,sBAAUM,aAAa;AACvB,gBAAI1B,KAAKE,SAAS,GAAG;AACnBkB,wBAAUgC,eAAepD,KAAK,CAAA,EAAGP,QAAO;YAC1C;AACA;UAEF,KAAK;AACH2B,sBAAUM,aAAa;AACvBN,sBAAUgC,eAAe;AACzB;UAEF,KAAK;AACHhC,sBAAUQ,eAAe;AACzBR,sBAAUO,UAAU;AACpBP,sBAAUK,WAAW;AACrB;UAEF,KAAK;AACHL,sBAAUS,WAAW;AACrB;UAEF,KAAK;AACHT,sBAAUU,UAAU;AACpB;UAEF,KAAK;AACH,gBAAI9B,KAAKE,SAAS,GAAG;AACnB,oBAAMuD,SAASzD,KAAK,CAAA,EAAGP,QAAO;AAE9B,oBAAMiE,QAAQD,OAAOC,MAAM,mBAAA;AAC3B,kBAAIA,OAAO;AACTtC,0BAAUuC,aAAa;kBACrBC,OAAOF,MAAM,CAAA;kBACbG,QAAQH,MAAM,CAAA;gBAChB;cACF;YACF;AACA;UACF;AACE,kBAAM,IAAIH,MAAM,uBAAuBC,UAAAA,EAAY;QACvD;AAEAvB,kBAAU1C,WAAWC,cAAa;MACpC,OAAO;AACL;MACF;IACF;EACF;AACF;;;ACrUA,SAASsE,YAAY;AAErB,SAASC,OAAOC,IAAIC,iBAAiB;AACrC,SAASC,kBAAkB;AAQ3B,eAAsBC,qCAAqCC,SAAgB;AACzE,QAAMC,SAAS,IAAIC,oBAAoB;IACrCC,kBAAkBH,QAAQG;EAC5B,CAAA;AACA,QAAMC,SAASH,OAAOI,gBAAgBL,QAAQM,cAAc;AAE5D,MAAIF,OAAOG,WAAW,GAAG;AACvBC,YAAQC,KAAK,6HAAA;AACb;EACF;AAGAL,SAAOM,KAAK,CAACC,GAAGC,MAAMA,EAAEC,aAAaN,SAASI,EAAEE,aAAaN,MAAM;AAEnE,QAAMO,QAAQV,OAAO,CAAA;AAGrBI,UAAQO,KAAK,wBAAcD,MAAMD,YAAY,eAAK;AAClD,QAAMG,WAAWC,YAAYH,MAAMD,YAAY;AAC/C,QAAMK,YAAYC,KAAKnB,QAAQoB,iBAAiBJ,QAAAA;AAChD,MAAIK,WAAWH,SAAAA,GAAY;AACzBV,YAAQO,KAAK,wBAAcC,QAAAA,+DAAqB;AAChD;EACF;AACA,QAAMM,MAAMC,YAAYT,KAAAA;AACxB,QAAMU,aAAaC,mBAAmBX,KAAAA;AACtC,QAAMY,UAAUC,gBAAgBb,KAAAA;AAChC,QAAMc,iBAAiBT,KAAKD,WAAW,GAAGF,QAAAA,YAAoB;AAC9D,QAAMa,SAASC,eAAehB,KAAAA;AAE9B,MAAI;AACF,UAAMiB,MAAMb,WAAW;MAAEc,WAAW;IAAK,CAAA;AACzC,UAAMD,MAAMZ,KAAKD,WAAW,MAAA,GAAS;MAAEc,WAAW;IAAK,CAAA;AACvD,UAAMC,UAAUd,KAAKD,WAAW,QAAQ,GAAGF,QAAAA,SAAiB,GAAGM,GAAAA;AAC/D,UAAMW,UAAUd,KAAKD,WAAW,GAAGF,QAAAA,gBAAwB,GAAGQ,UAAAA;AAC9D,UAAMS,UAAUd,KAAKD,WAAW,GAAGF,QAAAA,aAAqB,GAAGU,OAAAA;AAC3D,UAAMO,UAAUL,gBAAgBC,MAAAA;EAClC,SAASK,KAAK;AACZ1B,YAAQ2B,MAAM,wBAAcnB,QAAAA,8BAAmBkB,IAAcE,OAAO,EAAE;AACtE,UAAMC,GAAGnB,WAAW;MAAEc,WAAW;IAAK,CAAA;EACxC;AACF;AAzCsBjC;;;ACbtB,OAAOuC,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAOC,UAAU;AACjB,OAAOC,WAAW;AAKlB,IAAIC,oBAAmC;AAmCvC,SAASC,kBAAkBC,KAAU;AACnC,QAAMC,OAAQD,IAA8BC;AAC5C,QAAMC,uBAAuB;IAAC;IAAgB;IAAc;IAAa;IAAa;;AACtF,SAAOA,qBAAqBC,SAASF,QAAQ,EAAA;AAC/C;AAJSF;AAUT,SAASK,sBAAsBC,WAAmBC,UAAU,KAAI;AAC9D,SAAO,IAAIC,QAAQ,CAACC,YAAAA;AAClB,QAAI;AACF,YAAMC,MAAM,IAAIC,IAAIL,SAAAA;AACpB,YAAMM,UAAUF,IAAIG,aAAa;AACjC,YAAMC,aAAaF,UAAUG,QAAQC;AAErC,YAAMC,MAAMH,WAAWI,QACrB;QACEC,UAAUT,IAAIS;QACdC,MAAMV,IAAIU,SAASR,UAAU,MAAM;QACnCS,MAAM;QACNC,QAAQ;QACRf;MACF,GACA,CAACgB,QAAAA;AAIC,cAAMC,YAAYD,IAAIE,eAAe,OAAO,CAACF,IAAIG,QAAQ,oBAAA;AACzDjB,gBAAQe,SAAAA;MACV,CAAA;AAGFP,UAAIU,GAAG,WAAW,MAAA;AAChBV,YAAIW,QAAO;AACXnB,gBAAQ,KAAA;MACV,CAAA;AAEAQ,UAAIU,GAAG,SAAS,MAAA;AACdlB,gBAAQ,KAAA;MACV,CAAA;AAEAQ,UAAIY,IAAG;IACT,SAASC,GAAG;AACVrB,cAAQ,KAAA;IACV;EACF,CAAA;AACF;AAtCSJ;AA2CT,eAAe0B,uBACbzB,WACAC,SACAyB,UAAgB;AAEhB,QAAMC,YAAYC,KAAKC,IAAG;AAE1B,SAAOD,KAAKC,IAAG,IAAKF,YAAY1B,SAAS;AACvC,UAAM6B,cAAc,MAAM/B,sBAAsBC,WAAW,GAAA;AAC3D,QAAI8B,aAAa;AACf,aAAO;IACT;AAEA,UAAM,IAAI5B,QAAQC,CAAAA,YAAW4B,WAAW5B,SAASuB,QAAAA,CAAAA;EACnD;AAEA,SAAO;AACT;AAjBeD;AAuBf,SAASO,aAAAA;AACP,SAAOC;AACT;AAFSD;AAOT,SAASE,uBAAAA;AACP,MAAI,CAACzC,mBAAmB;AACtB,UAAM0C,UAAUH,WAAAA;AAChB,UAAMI,WAAWrB,MAAKsB,KAAKF,SAAS,YAAA;AACpC1C,wBAAoB6C,IAAGC,aAAaH,UAAU,OAAA;EAChD;AACA,SAAO3C;AACT;AAPSyC;AAaT,SAASM,aAAaC,MAAY;AAChC,QAAMC,UAAUD,KAAKE,KAAI;AACzB,MAAI,CAACD,QAAS,QAAO;AAIrB,QAAME,QAAQF,QAAQE,MAAM,iEAAA;AAC5B,MAAIA,OAAO;AACT,UAAMC,UAAUD,MAAM,CAAA,EAAGD,KAAI;AAE7B,WAAOE,WAAW;EACpB;AAGA,SAAO;AACT;AAfSL;AA+BT,eAAeM,oBACbC,QACAC,SACAC,UAAgB;AAEhB,QAAMC,cAAcnC,MAAKsB,KAAKU,QAAQE,QAAAA;AAGtC,MAAIE;AACJ,MAAI;AACFA,gBAAY,MAAMb,IAAGc,SAASC,KAAKH,WAAAA;EACrC,QAAQ;AACN,WAAO;MAAEI,MAAM,CAAA;MAAIC,iBAAiB;IAAM;EAC5C;AAEA,QAAMC,WAAWL,UAAUM;AAG3B,QAAMC,cAAc,OAAO;AAC3B,QAAMC,WAAWC,KAAKC,IAAIL,UAAUE,WAAAA;AACpC,QAAMI,gBAAgBF,KAAKG,IAAI,GAAGP,WAAWG,QAAAA;AAG7C,QAAMK,SAASC,OAAOC,YAAYP,QAAAA;AAClC,MAAIQ;AAEJ,MAAI;AACFA,iBAAa,MAAM7B,IAAGc,SAASgB,KAAKlB,aAAa,GAAA;AACjD,UAAMiB,WAAWE,KAAKL,QAAQ,GAAGL,UAAUG,aAAAA;EAC7C,SAASQ,OAAO;AACdC,YAAQD,MAAM,2CAA2CA,KAAAA;AACzD,WAAO;MAAEhB,MAAM,CAAA;MAAIC,iBAAiB;IAAM;EAC5C,UAAA;AACE,QAAIY,YAAY;AACd,YAAMA,WAAWK,MAAK;IACxB;EACF;AAGA,QAAM3B,UAAUmB,OAAOS,SAAS,MAAA;AAChC,QAAMC,QAAQ7B,QAAQ8B,MAAM,IAAA;AAG5B,MAAIb,gBAAgB,KAAKY,MAAME,SAAS,GAAG;AACzCF,UAAMG,MAAK;EACb;AAGA,QAAMC,WAAqB,CAAA;AAC3B,aAAWrC,QAAQiC,OAAO;AACxB,UAAMK,SAASvC,aAAaC,IAAAA;AAC5B,QAAIsC,WAAW,MAAM;AACnBD,eAASE,KAAKD,MAAAA;IAChB;EACF;AAGA,MAAIE,aAAa;AACjB,WAASC,IAAIJ,SAASF,SAAS,GAAGM,KAAK,GAAGA,KAAK;AAC7C,UAAMzC,OAAOqC,SAASI,CAAAA;AAItB,QACEzC,KAAK3C,SAAS,oCAAA,KACd2C,KAAK3C,SAAS,wDAAA,GACd;AACAmF,mBAAaC;AACb;IACF;EACF;AAGA,MAAID,eAAe,IAAI;AACrBV,YAAQY,IAAI,uEAAA;AACZ,UAAMC,eAAeN,SAASO,MAAM,CAACrC,OAAAA;AACrC,UAAMO,mBAAkB+B,eAAeF,YAAAA;AACvC,WAAO;MAAE9B,MAAM8B;MAAc7B,iBAAAA;IAAgB;EAC/C;AAGA,MAAIgC,WAAWT,SAASF;AAExB,WAASM,IAAID,aAAa,GAAGC,IAAIJ,SAASF,QAAQM,KAAK;AACrD,UAAMzC,OAAOqC,SAASI,CAAAA;AAEtB,QACEzC,KAAK3C,SAAS,oCAAA,KACd2C,KAAK3C,SAAS,wDAAA,GACd;AACAyF,iBAAWL;AACX;IACF;EACF;AAGA,QAAMM,eAAeV,SAASO,MAAMJ,YAAYM,QAAAA;AAGhD,QAAMhC,kBAAkB+B,eAAeE,YAAAA;AAGvC,QAAMlC,OAAOkC,aAAaZ,SAAS5B,UAC/BwC,aAAaH,MAAM,CAACrC,OAAAA,IACpBwC;AAEJ,SAAO;IAAElC;IAAMC;EAAgB;AACjC;AA3GeT;AAiHf,SAASwC,eAAehC,MAAc;AACpC,aAAWb,QAAQa,MAAM;AAEvB,UAAMmC,oBAAoBhD,KAAKG,MAAM,iDAAA;AACrC,QAAI6C,mBAAmB;AACrB,YAAMC,aAAaC,SAASF,kBAAkB,CAAA,GAAI,EAAA;AAClD,UAAIC,aAAa,GAAG;AAClBnB,gBAAQY,IAAI,wBAAwBO,UAAAA,uBAAiC;AACrE,eAAO;MACT;IACF;EACF;AAEA,SAAO;AACT;AAdSJ;AAmBT,SAASM,mBACPC,UACAC,gBAAsB;AAGtB,SAAOD,SAASE,QAAQ,uBAAuBD,cAAAA;AACjD;AANSF;AAoCF,SAASI,oBACdrG,KACAgB,KACAM,KACAgF,SAA2B;AAE3B,QAAM,EACJlD,SAAShC,MAAKsB,KAAK6D,QAAQC,IAAG,GAAI,MAAA,GAClCC,eAAe,KACfC,cAAc,cACdC,eAAe,KACfC,gBAAgB,KAChBC,SAAS,oBAAoBN,QAAQO,IAAIC,eAAe,GAAA,IACxDZ,iBAAiBI,QAAQO,IAAIE,oBAAoB,IAAG,IAClDV,WAAW,CAAC;AAEhB,QAAMW,6BAA6BC,kBAAkBf,cAAAA;AACrDvB,UAAQD,MAAM,kBAAkB3E,IAAImH,SAASF,0BAAAA;AAG7C,MAAI3F,IAAI8F,aAAa;AACnBxC,YAAQD,MAAM,6DAAA;AACd;EACF;AAGC,GAAA,YAAA;AACC,QAAI;AAEF,YAAM0C,cAActH,kBAAkBC,GAAAA;AAGtC,YAAM,EAAE4D,gBAAe,IAAK,MAAMT,oBAChCC,QACAqD,cACAC,WAAAA;AAKF,UAAIW,eAAe,CAACzD,iBAAiB;AACnCgB,gBAAQY,IAAI,uFAAA;AAGZ,YAAI;AACF,cAAI9E,IAAImG,MAAAA;QACV,SAAShF,GAAG;AACV+C,kBAAQD,MAAM,sCAAsCkC,MAAAA;AAEpDjC,kBAAQY,IAAI,yDAAA;QACd;AAGAZ,gBAAQY,IAAI,kDAAkDqB,MAAAA,cAAoBF,YAAAA,QAAoB;AACtG,cAAMW,YAAY,MAAMxF,uBAAuB+E,QAAQF,cAAcC,aAAAA;AAErE,YAAIU,WAAW;AACb1C,kBAAQY,IAAI,uEAAA;AACZ+B,6BAAmBvG,KAAKM,GAAAA;AACxB;QACF;AAEAsD,gBAAQY,IAAI,sFAAA;MACd;AAIA,UAAI6B,eAAe,CAACzD,iBAAiB;AACnCgB,gBAAQY,IAAI,2DAAA;MACd,OAAO;AACLZ,gBAAQY,IAAI,0EAAA;MACd;AAGA,YAAMU,WAAW3D,qBAAAA;AAGjB,YAAMiF,OAAOvB,mBAAmBC,UAAUe,0BAAAA;AAG1C3F,UAAImG,UAAU,KAAK;QACjB,gBAAgB;QAChB,iBAAiB;QACjB,sBAAsB;MACxB,CAAA;AAGAnG,UAAIM,IAAI4F,IAAAA;IACV,SAAS7C,OAAO;AACdC,cAAQD,MAAM,0CAA0CA,KAAAA;AAGxD,UAAI,CAACrD,IAAI8F,aAAa;AACpB9F,YAAImG,UAAU,KAAK;UAAE,gBAAgB;QAA4B,CAAA;AACjEnG,YAAIM,IAAI,mHAAyB;MACnC;IACF;EACF,GAAA;AACF;AAlGgByE;AAuGhB,SAASkB,mBAAmBvG,KAAsBM,KAAmB;AACnE,MAAIA,IAAI8F,YAAa;AAGrB,QAAMM,cAAc1G,IAAIP,OAAO;AAE/BmE,UAAQY,IAAI,0CAA0CkC,WAAAA;AAEtDpG,MAAImG,UAAU,KAAK;IACjB,YAAYC;IACZ,iBAAiB;EACnB,CAAA;AACApG,MAAIM,IAAG;AACT;AAbS2F;;;ACzbT,OAAOI,WAAU;;;ACAjB,OAAOC,aAAyB;;;ACAhC,OAAOC,SAAQ;AACf,OAAOC,YAAY;;;ACDnB,SAASC,YAAYC,WAAU;AAC/B,OAAOC,WAAU;AACjB,OAAOC,QAAQ;;;ACFf,OAAOC,WAAU;AACjB,SAASC,YAAYC,WAAU;AAM/B,eAAsBC,oBAAoBC,KAAW;AACnD,QAAMC,QAAkB,CAAA;AAExB,iBAAeC,KAAKC,YAAkB;AACpC,UAAMC,UAAU,MAAMC,IAAGC,QAAQH,YAAY;MAAEI,eAAe;IAAK,CAAA;AAEnE,eAAWC,SAASJ,SAAS;AAC3B,YAAMK,WAAWC,MAAKC,KAAKR,YAAYK,MAAMI,IAAI;AAEjD,UAAIJ,MAAMK,YAAW,GAAI;AACvB,cAAMX,KAAKO,QAAAA;MACb,WAAWD,MAAMM,OAAM,KAAMN,MAAMI,KAAKG,SAAS,gBAAA,GAAmB;AAClEd,cAAMe,KAAKP,QAAAA;MACb;IACF;EACF;AAZeP;AAcf,QAAMA,KAAKF,GAAAA;AACX,SAAOC;AACT;AAnBsBF;AAwBtB,eAAsBkB,eACpBC,iBACAC,aAAmE;AAEnE,QAAMC,YAAY,oBAAIC,IAAAA;AAGtB,QAAMC,cAAc;AACpB,QAAMC,UAAqC,CAAA;AAE3C,WAASC,IAAI,GAAGA,IAAIN,gBAAgBO,QAAQD,KAAKF,aAAa;AAC5D,UAAMI,QAAQR,gBAAgBS,MAAMH,GAAGA,IAAIF,WAAAA;AAC3C,UAAMM,eAAe,MAAMC,QAAQC,IAAIJ,MAAMK,IAAI,CAACC,aAAab,YAAYa,QAAAA,CAAAA,CAAAA;AAC3ET,YAAQP,KAAI,GAAIY,YAAAA;EAClB;AAGA,aAAWK,YAAYV,SAAS;AAC9B,eAAW,CAACW,aAAaC,IAAAA,KAASF,SAAS7B,QAAO,GAAI;AACpDgB,gBAAUgB,IAAIF,aAAaC,IAAAA;IAC7B;EACF;AAEA,SAAOf;AACT;AAxBsBH;AAiCtB,SAASoB,eAAeH,aAAqBd,WAAkC;AAE7E,QAAMkB,cAAclB,UAAUmB,IAAIL,WAAAA;AAClC,MAAII,aAAa;AACf,WAAOA;EACT;AAGA,aAAW,CAACE,KAAKC,KAAAA,KAAUrB,UAAUhB,QAAO,GAAI;AAE9C,UAAM,CAACsC,WAAWC,UAAAA,IAAcH,IAAII,MAAM,GAAA;AAC1C,QAAI,CAACF,aAAa,CAACC,WAAY;AAG/B,UAAME,cAAcH,UAAUI,OAAO,CAAA,EAAGC,YAAW,IAAKL,UAAUf,MAAM,CAAA,IAAKgB,WAAWG,OAAO,CAAA,EAAGE,YAAW,IAAKL,WAAWhB,MAAM,CAAA;AACnI,QAAIO,gBAAgBW,aAAa;AAC/B,aAAOJ;IACT;AAGA,QAAIP,gBAAgBS,YAAY;AAC9B,aAAOF;IACT;EACF;AAEA,SAAOQ;AACT;AA1BSZ;AA+BF,SAASa,oBAAoBC,SAAc/B,WAAkC;AAClF,MAAIgC,gBAAgB;AAEpB,MAAI,CAACD,QAAQE,OAAO;AAClB,WAAOD;EACT;AAEA,aAAWE,YAAYC,OAAOC,OAAOL,QAAQE,KAAK,GAAG;AACnD,QAAI,CAACC,YAAY,OAAOA,aAAa,SAAU;AAE/C,eAAWG,aAAaF,OAAOC,OAAOF,QAAAA,GAAW;AAC/C,UAAIG,aAAa,OAAOA,cAAc,YAAY,iBAAiBA,WAAW;AAC5E,cAAMC,aAAarB,eAAeoB,UAAUvB,aAAuBd,SAAAA;AACnE,YAAIsC,YAAY;AACdD,oBAAU,UAAA,IAAc;YACtBE,MAAMD,WAAWC;YACjBC,MAAMF,WAAWE;UACnB;AACAR;QACF;MACF;IACF;EACF;AAEA,SAAOA;AACT;AAzBgBF;AA8BT,SAASW,sBAAsBV,SAAcW,UAAgB;AAClE,MAAIA,aAAa,OAAO,CAACX,QAAQE,OAAO;AACtC,WAAOF;EACT;AAEA,QAAMY,WAAgB,CAAC;AACvBR,SAAOS,KAAKb,QAAQE,KAAK,EAAEY,QAAQ,CAACzB,QAAAA;AAClC,UAAM0B,eAAe1B,IAAI2B,WAAWL,QAAAA,IAAYtB,IAAIb,MAAMmC,SAASrC,MAAM,IAAIe;AAC7EuB,aAASG,YAAAA,IAAgBf,QAAQE,MAAMb,GAAAA;EACzC,CAAA;AAEA,SAAO;IACL,GAAGW;IACHE,OAAOU;IACPD;EACF;AACF;AAhBgBD;;;ADnHhB,eAAsBO,6BAA6BC,UAA0B,CAAC,GAAC;AAC7E,QAAMC,YAAYC,KAAKC,IAAG;AAE1B,QAAMC,cAAcJ,QAAQI,eAAeC,MAAKC,QAAQC,WAAW,oCAAA;AACnE,QAAMC,YAAYR,QAAQQ,aAAaH,MAAKC,QAAQC,WAAW,WAAA;AAC/D,QAAME,aAAYT,QAAQS,cAAc;AAExC,MAAIC;AACJ,MAAIV,QAAQW,aAAa;AAEvBD,cAAUE,KAAKC,MAAMD,KAAKE,UAAUd,QAAQW,WAAW,CAAA;EACzD,OAAO;AAEL,UAAMI,iBAAiB,MAAMC,IAAGC,SAASb,aAAa,OAAA;AACtDM,cAAUE,KAAKC,MAAME,cAAAA;EACvB;AAEA,QAAMG,kBAAkB,MAAMC,oBAAoBX,SAAAA;AAClD,QAAMY,YAAY,MAAMC,eAAeH,iBAAiBI,qBAAAA;AACxD,QAAMC,WAAWC,oBAAoBd,SAASU,SAAAA;AAE9C,MAAIX,YAAW;AACb,UAAMO,IAAGP,UAAUL,aAAaQ,KAAKE,UAAUJ,SAAS,MAAM,CAAA,IAAK,MAAM,OAAA;EAC3E;AAEA,QAAMe,WAAWvB,KAAKC,IAAG,IAAKF;AAE9B,SAAO;IACLS;IACAgB,OAAO;MACLD;MACAE,kBAAkBT,gBAAgBU;MAClCC,oBAAoBT,UAAUU;MAC9BC,mBAAmBR;IACrB;EACF;AACF;AApCsBxB;AAyCtB,eAAeuB,sBAAsBU,UAAgB;AACnD,QAAMC,eAAe5B,MAAK6B,SAASC,QAAQC,IAAG,GAAIJ,QAAAA;AAGlD,QAAMK,UAAU,MAAMrB,IAAGC,SAASe,UAAU,OAAA;AAC5C,QAAMM,aAAaC,GAAGC,iBAAiBR,UAAUK,SAASE,GAAGE,aAAaC,QAAQ,IAAA;AAElF,SAAOC,0BAA0BL,YAAYL,YAAAA;AAC/C;AAReX;AAaf,SAASqB,0BAA0BL,YAA2BN,UAAgB;AAC5E,QAAMY,WAAW,oBAAIC,IAAAA;AACrB,MAAIC,iBAAiB;AACrB,MAAIC,YAAY;AAGhB,WAASC,cAAcC,MAAa;AAElC,QAAI,eAAeA,QAAQC,MAAMC,QAAQF,KAAKG,SAAS,GAAG;AACxD,aAAQH,KAAKG,UAAgCC,OAC3C,CAACC,QAA6BA,IAAIC,SAAShB,GAAGiB,WAAWC,SAAS;IAEtE;AAEA,QAAI,gBAAgBR,QAAQC,MAAMC,QAAQF,KAAKS,UAAU,GAAG;AAC1D,aAAOT,KAAKS;IACd;AACA,WAAO,CAAA;EACT;AAZSV;AAcT,WAASW,MAAMV,MAAa;AAE1B,QAAIV,GAAGqB,mBAAmBX,IAAAA,GAAO;AAC/B,YAAMS,aAAaV,cAAcC,IAAAA;AAGjC,UAAIA,KAAKY,MAAM;AACbd,oBAAYE,KAAKY,KAAKC,QAAQxB,UAAAA;MAChC;AAEA,iBAAWyB,aAAaL,YAAY;AAClC,YAAInB,GAAGyB,iBAAiBD,UAAUE,UAAU,GAAG;AAC7C,gBAAMA,aAAaF,UAAUE;AAC7B,gBAAMC,gBAAgBD,WAAWA,WAAWH,QAAQxB,UAAAA;AAEpD,cAAI4B,kBAAkB,cAAc;AAClC,gBAAID,WAAWE,UAAUvC,SAAS,GAAG;AACnC,oBAAMwC,MAAMH,WAAWE,UAAU,CAAA;AACjC,kBAAI5B,GAAG8B,gBAAgBD,GAAAA,GAAM;AAC3BtB,iCAAiBsB,IAAIE;cACvB;YACF;UACF;QACF;MACF;IACF;AAGA,QAAI/B,GAAGgC,oBAAoBtB,IAAAA,KAASA,KAAKY,MAAM;AAC7C,YAAMW,aAAavB,KAAKY,KAAKC,QAAQxB,UAAAA;AACrC,UAAImC,aAAa;AACjB,UAAIC,YAAY;AAChB,YAAM,EAAEC,KAAI,IAAKrC,WAAWsC,8BAA8B3B,KAAK4B,SAASvC,UAAAA,CAAAA;AAExE,YAAMoB,aAAaV,cAAcC,IAAAA;AAEjC,iBAAWc,aAAaL,YAAY;AAClC,YAAInB,GAAGyB,iBAAiBD,UAAUE,UAAU,GAAG;AAC7C,gBAAMC,gBAAgBH,UAAUE,WAAWA,WAAWH,QAAQxB,UAAAA;AAC9D,cAAI;YAAC;YAAO;YAAQ;YAAO;YAAU;YAAS;YAAW;YAAQ;YAAOwC,SAASZ,aAAAA,GAAgB;AAC/FO,yBAAaP,cAAca,YAAW;AACtC,gBAAIhB,UAAUE,WAAWE,UAAUvC,SAAS,GAAG;AAC7C,oBAAMwC,MAAML,UAAUE,WAAWE,UAAU,CAAA;AAC3C,kBAAI5B,GAAG8B,gBAAgBD,GAAAA,GAAM;AAC3BM,4BAAYN,IAAIE;cAClB;YACF;UACF;QACF;MACF;AAEA,UAAIG,cAAcD,cAAczB,WAAW;AACzC,cAAMiC,cAAc,GAAGjC,SAAAA,IAAayB,UAAAA;AACpC5B,iBAASqC,IAAID,aAAa;UACxBE,MAAMlD;UACN2C,MAAMA,OAAO;UACbQ,QAAQV;UACR3B;UACA4B;QACF,CAAA;MACF;IACF;AAEAnC,OAAG6C,aAAanC,MAAMU,KAAAA;EACxB;AAhESA;AAkETA,QAAMrB,UAAAA;AACN,SAAOM;AACT;AAxFSD;;;ADnDF,SAAS0C,qBACdC,iBACAC,mBACAC,WAAkB;AAGlB,MAAIC,QAA6B;AAEjC,SAAO,OAAOC,MAAeC,KAAeC,YAAAA;AAC1C,QAAI;AAEF,YAAMC,aAAa,MAAMC,IAAGC,SAAST,iBAAiB,OAAA;AAGtD,YAAMU,cAAcC,OAAOC,WAAW,KAAA,EAAOC,OAAON,UAAAA,EAAYO,OAAO,KAAA;AAGvE,UAAIX,SAASA,MAAMY,aAAaL,aAAa;AAC3C,eAAOL,IAAIW,KAAKb,MAAMc,IAAI;MAC5B;AAGA,UAAIC,UAAUC,KAAKC,MAAMb,UAAAA;AAGzB,UAAIN,qBAAqBK,QAAQe,OAAO;AACtC,cAAM,EAAEC,SAASC,iBAAiBC,MAAK,IAAK,MAAMC,6BAA6B;UAC7EC,aAAaR;UACbS,WAAW;UACXzB,WAAWA,aAAaI,QAAQsB;QAClC,CAAA;AACAV,kBAAUK;AAGVM,gBAAQC,IAAI,yBAAyBN,MAAMO,QAAQ,OAAOP,MAAMQ,iBAAiB,aAAa;MAChG;AAGA,YAAMC,SAASC,sBAAsBhB,SAASZ,QAAQ6B,QAAQ;AAG9DhC,cAAQ;QACNc,MAAMgB;QACNlB,UAAUL;MACZ;AAEAL,UAAIW,KAAKiB,MAAAA;IACX,SAASG,OAAO;AACd,YAAMC,UAAUD,iBAAiBE,QAAQF,MAAMC,UAAU;AACzDhC,UAAIkC,OAAO,GAAA,EAAKvB,KAAK;QACnBoB,OAAO;QACPC;MACF,CAAA;IACF;EACF;AACF;AAvDgBtC;;;ADMT,SAASyC,oBACdC,SACAC,SAA0B;AAE1B,QAAM,EAAEC,iBAAiBC,mBAAmBC,UAAS,IAAKJ;AAC1D,QAAMK,SAASC,QAAQC,OAAM;AAC7B,QAAMC,UAAUC,qBAAqBP,iBAAiBC,mBAAmBC,SAAAA;AAGzEC,SAAOK,IAAI,iBAAiB,CAACC,KAAKC,QAAQJ,QAAQG,KAAKC,KAAKX,OAAAA,CAAAA;AAE5D,SAAOI;AACT;AAZgBN;;;AIZhB,IAAMc,iBAA8B;EAClC;IACEC,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;;AAOK,SAASC,wBAAwBC,SAAiC;AACvE,QAAM,EAAEC,iBAAiBC,oBAAoB,MAAMC,UAAS,IAAKH;AAEjE,SAAO;IACLI,MAAM;IACNC,WAAW;IACXC,QAAQX;IAERY,SAAS,wBAACC,YAA+BA,QAAQC,OAAxC;IAETC,cAAc,wBAACF,YAAAA;AACb,aAAOG,oBACL;QACEV;QACAC;QACAC;MACF,GACAK,OAAAA;IAEJ,GATc;EAUhB;AACF;AArBgBT;;;ACnBhB,OAAOa,cAAyB;;;ACAhC,SAASC,YAAYC,WAAU;AAC/B,SAASC,YAAYC,QAAAA,OAAMC,gBAAgB;;;ACyB3C,SAASC,mBAAmBC,SAAe;AAEzC,MAAIC,eAAeD,QAAQE,QAAQ,oBAAoB,MAAA;AAKvDD,iBAAeA,aAAaC,QAAQ,WAAW,KAAA;AAC/CD,iBAAeA,aAAaC,QAAQ,aAAa,WAAA;AAIjDD,iBAAeA,aAAaC,QAAQ,OAAO,OAAA;AAG3CD,iBAAeA,aAAaC,QAAQ,cAAc,OAAA;AAGlD,SAAO,IAAIC,OAAO,IAAIF,YAAAA,GAAe;AACvC;AAnBSF;AA4CF,SAASK,mBAAmBC,YAAgCL,SAAe;AAChF,MAAI,CAACK,cAAc,CAACL,SAAS;AAC3B,WAAO;EACT;AAGA,QAAMM,mBAAmBC,yBAAyBF,UAAAA;AAClD,QAAMG,oBAAoBD,yBAAyBP,OAAAA;AAGnD,MAAIM,qBAAqBE,mBAAmB;AAC1C,WAAO;EACT;AAGA,MAAIC,mBAAmBD,iBAAAA,GAAoB;AACzC,UAAME,QAAQX,mBAAmBS,iBAAAA;AACjC,WAAOE,MAAMC,KAAKL,gBAAAA;EACpB;AAGA,SAAO;AACT;AAtBgBF;AAwGT,SAASQ,mBAAmBC,SAAe;AAChD,SAAO,OAAOC,KAAKD,OAAAA;AACrB;AAFgBD;AA8CT,SAASG,yBAAyBC,OAAY;AACnD,SAAOA,MACJC,QAAQ,QAAQ,GAAA,EAChBA,QAAQ,QAAQ,EAAA;AACrB;AAJgBF;;;ADpNT,SAASG,cAAcC,UAAiB;AAC7C,MAAI,CAACA,UAAU;AACb,WAAOC,MAAKC,QAAQC,IAAG,GAAI,MAAA;EAC7B;AACA,SAAOC,WAAWJ,QAAAA,IAAYA,WAAWC,MAAKC,QAAQC,IAAG,GAAIH,QAAAA;AAC/D;AALgBD;AAUT,SAASM,gBAAgBC,UAAgB;AAC9C,SAAOC,SAASL,QAAQC,IAAG,GAAIG,QAAAA;AACjC;AAFgBD;AAOhB,eAAsBG,WAAWF,UAAgB;AAC/C,MAAI;AACF,UAAMG,IAAGC,OAAOJ,QAAAA;AAChB,WAAO;EACT,QAAQ;AACN,WAAO;EACT;AACF;AAPsBE;AAYf,SAASG,cAAaC,MAAY;AACvC,QAAMC,UAAUD,KAAKE,KAAI;AACzB,MAAI,CAACD,QAAS,QAAOE;AAErB,MAAI;AACF,WAAOC,KAAKC,MAAMJ,OAAAA;EACpB,QAAQ;AACN,WAAOE;EACT;AACF;AATgBJ,OAAAA,eAAAA;AAcT,SAASO,cAAcC,SAAiBC,SAAe;AAC5D,MAAI,OAAOD,YAAY,SAAU,QAAOJ;AAExC,QAAMM,QAAQF,QAAQE,MAAMD,OAAAA;AAC5B,MAAI,CAACC,MAAO,QAAON;AAEnB,QAAMO,QAAQC,OAAOF,MAAM,CAAA,CAAE;AAC7B,SAAOE,OAAOC,SAASF,KAAAA,IAASA,QAAQP;AAC1C;AARgBG;AAaT,SAASO,WAAWH,OAA2BI,cAAsBC,UAAgB;AAC1F,MAAI,OAAOL,UAAU,YAAY,CAACA,MAAMR,KAAI,GAAI;AAC9C,WAAOY;EACT;AAEA,QAAME,SAASL,OAAOD,KAAAA;AACtB,MAAIC,OAAOC,SAASI,MAAAA,KAAWA,SAAS,GAAG;AACzC,WAAOC,KAAKC,IAAID,KAAKE,MAAMH,MAAAA,GAASD,QAAAA;EACtC;AAEA,SAAOD;AACT;AAXgBD;AAgBT,SAASO,iBAAiBV,OAA2BW,UAAgB;AAC1E,MAAI,OAAOX,UAAU,YAAY,CAACA,MAAMR,KAAI,GAAI;AAC9C,WAAOmB;EACT;AAEA,QAAML,SAASL,OAAOD,KAAAA;AACtB,SAAOC,OAAOC,SAASI,MAAAA,KAAWA,SAAS,IAAIC,KAAKE,MAAMH,MAAAA,IAAUK;AACtE;AAPgBD;AAYT,SAASE,mBAAmBC,SAAiBC,UAAgB;AAClE,QAAMC,YAAYD,SAASE,QAAQ,OAAO,GAAA;AAC1C,QAAMC,WAAWF,UAAUG,MAAM,GAAA,EAAKC,OAAOC,OAAAA;AAE7C,MAAIH,SAASI,KAAK,CAACC,YAAYA,YAAY,IAAA,GAAO;AAChD,UAAM,IAAIC,MAAM,uBAAA;EAClB;AAEA,QAAMC,WAAW7C,MAAKkC,SAASI,SAAStC,KAAK,GAAA,CAAA;AAC7C,QAAM8C,MAAMxC,SAAS4B,SAASW,QAAAA;AAE9B,MAAIC,IAAIC,WAAW,IAAA,GAAO;AACxB,UAAM,IAAIH,MAAM,4CAAA;EAClB;AAEA,SAAOC;AACT;AAhBgBZ;AA8BT,SAASe,YAAYC,YAAgC9B,SAAe;AACzE,SAAO+B,mBAAmBD,YAAY9B,OAAAA;AACxC;AAFgB6B;AAiBT,SAASG,cAAcC,cAAkCC,gBAAsB;AACpF,MAAI,CAACD,gBAAgB,CAACC,gBAAgB;AACpC,WAAO;EACT;AACA,SAAOD,aAAaE,YAAW,MAAOD,eAAeC,YAAW;AAClE;AALgBH;AAWT,SAASI,eAAeC,OAAc;AAC3C,SAAOA,iBAAiBZ,QACpB;IAAEa,MAAMD,MAAMC;IAAMvC,SAASsC,MAAMtC;EAAQ,IAC3C;IAAEA,SAASwC,OAAOF,KAAAA;EAAO;AAC/B;AAJgBD;;;AEtJhB,SAASI,QAAAA,aAAY;;;ACArB,SAASC,YAAYC,WAAU;AAM/B,eAAsBC,gBACpBC,UACAC,WACAC,aAAmC;AAEnC,QAAMC,SAAS,MAAMC,IAAGC,KAAKL,UAAU,GAAA;AAEvC,MAAI;AACF,UAAMM,QAAQ,MAAMH,OAAOI,KAAI;AAC/B,QAAIC,WAAWF,MAAMG;AACrB,QAAIC,YAAY;AAEhB,WAAOF,WAAW,GAAG;AACnB,YAAMG,SAASC,KAAKC,IAAIZ,WAAWO,QAAAA;AACnCA,kBAAYG;AAEZ,YAAMG,SAASC,OAAOC,MAAML,MAAAA;AAC5B,YAAMR,OAAOc,KAAKH,QAAQ,GAAGH,QAAQH,QAAAA;AAErC,UAAIU,QAAQJ,OAAOK,SAAS,MAAA;AAC5B,UAAIT,WAAW;AACbQ,iBAASR;AACTA,oBAAY;MACd;AAEA,YAAMU,QAAQF,MAAMG,MAAM,IAAA;AAC1BX,kBAAYU,MAAME,MAAK,KAAM;AAE7B,eAASC,IAAIH,MAAMT,SAAS,GAAGY,KAAK,GAAGA,KAAK,GAAG;AAC7C,YAAIH,MAAMG,CAAAA,GAAI;AACZrB,sBAAYkB,MAAMG,CAAAA,CAAE;QACtB;MACF;IACF;AAEA,QAAIb,WAAW;AACbR,kBAAYQ,SAAAA;IACd;EACF,UAAA;AACE,UAAMP,OAAOqB,MAAK;EACpB;AACF;AAzCsBzB;AA8Cf,SAAS0B,uBACdC,OACAC,MACAC,UAAgB;AAEhB,QAAMC,aAAaH,MAAMf;AACzB,QAAMmB,aAAaD,eAAe,IAAI,IAAIjB,KAAKmB,KAAKF,aAAaD,QAAAA;AACjE,QAAMI,cAAcL,OAAO,KAAKC;AAChC,QAAMK,WAAWrB,KAAKC,IAAImB,aAAaJ,UAAUC,UAAAA;AAEjD,QAAMK,aAAaR,MAAMS,MAAMH,YAAYC,QAAAA,EAAUG,IAAI,CAACC,aAAa;IACrEC,SAASD,QAAQC;IACjBC,QAAQF,QAAQE;IAChBC,MAAMH,QAAQG;IACdC,WAAWJ,QAAQI;IACnBC,SAASL,QAAQK;IACjBC,YAAYN,QAAQM;IACpBC,YAAYP,QAAQO;IACpBC,SAASR,QAAQQ,QAAQV,MAAK,EAAGW,QAAO;EAC1C,EAAA;AAEA,SAAO;IACLnB;IACAC;IACAmB,YAAYlB;IACZC;IACAkB,OAAOd;EACT;AACF;AA5BgBT;;;AC/CT,SAASwB,eAAAA;AACd,SAAO,uCAAuCC,QAAQ,SAAS,CAACC,MAAAA;AAC9D,UAAMC,IAAKC,KAAKC,OAAM,IAAK,KAAM;AACjC,UAAMC,IAAIJ,MAAM,MAAMC,IAAKA,IAAI,IAAO;AACtC,WAAOG,EAAEC,SAAS,EAAA;EACpB,CAAA;AACF;AANgBP;AAWT,SAASQ,6BACdC,WAA0B;AAE1B,MAAI,OAAOA,cAAc,UAAU;AACjC,UAAMC,QAAQD,UAAUE,YAAW;AACnC,QAAID,UAAU,QAAS,QAAO;AAC9B,QAAIA,UAAU,QAAS,QAAO;AAC9B,QAAIA,UAAU,UAAUA,UAAU,UAAW,QAAO;AACpD,QAAIA,UAAU,UAAUA,UAAU,MAAO,QAAO;AAChD,QAAIA,UAAU,QAAS,QAAO;AAC9B,QAAIA,UAAU,WAAWA,UAAU,UAAW,QAAO;AACrD,WAAO;EACT;AAGA,MAAID,aAAa,GAAI,QAAO;AAC5B,MAAIA,aAAa,GAAI,QAAO;AAC5B,MAAIA,aAAa,GAAI,QAAO;AAC5B,MAAIA,aAAa,GAAI,QAAO;AAC5B,MAAIA,aAAa,GAAI,QAAO;AAC5B,SAAO;AACT;AArBgBD;AA0BT,SAASI,gBAAgBC,MAAY;AAC1C,QAAMH,QAAQG,KAAKF,YAAW;AAE9B,MAAID,MAAMI,SAAS,OAAA,KAAYJ,MAAMI,SAAS,UAAA,EAAa,QAAO;AAClE,MAAIJ,MAAMI,SAAS,OAAA,KAAYJ,MAAMI,SAAS,KAAA,KAAUJ,MAAMI,SAAS,QAAA,EAAM,QAAO;AACpF,MAAIJ,MAAMI,SAAS,MAAA,KAAWJ,MAAMI,SAAS,SAAA,KAAcJ,MAAMI,SAAS,KAAA,KAAUJ,MAAMI,SAAS,QAAA,EAAM,QAAO;AAChH,MAAIJ,MAAMI,SAAS,OAAA,KAAYJ,MAAMI,SAAS,KAAA,EAAQ,QAAO;AAC7D,MAAIJ,MAAMI,SAAS,SAAA,KAAcJ,MAAMI,SAAS,OAAA,EAAU,QAAO;AAEjE,SAAO;AACT;AAVgBF;AAeT,SAASG,aAAaC,MAAcC,QAA0B;AACnE,MAAI;AACF,UAAMC,UAAUC,KAAKC,MAAMJ,IAAAA;AAC3B,UAAMK,KAAKrB,aAAAA;AAEX,WAAO;MACLqB;MACAC,OAAOd,6BAA6BU,QAAQI,KAAK;MACjDC,WAAW,IAAIC,KAAKN,QAAQO,IAAI,EAAEC,QAAO;MACzCC,SAAST,QAAQS,WAAWT,QAAQU,OAAO;MAC3CC,SAASX,QAAQW,WAAW;MAC5BC,SAASZ,QAAQa,YAAY;MAC7BC,QAAQd,QAAQe,WAAW;MAC3BC,OAAOhB,QAAQiB,UAAU;MACzBC,UAAUlB,QAAQmB,aAAa;MAC/BC,OAAOpB,QAAQoB,SAAS;MACxBC,MAAM;QACJC,KAAKtB,QAAQsB;QACbC,UAAUvB,QAAQuB;QAClBC,MAAMxB,QAAQwB;QACdC,QAAQzB,QAAQyB;QAChBC,YAAY1B,QAAQ2B;QACpBC,YAAY5B,QAAQ6B;QACpBC,IAAI9B,QAAQ8B;QACZC,aAAa/B,QAAQgC;QACrBC,cAAcjC,QAAQkC;MACxB;MACAC,MAAM;QAACpC;;IACT;EACF,SAASqC,OAAO;AACd,WAAO;EACT;AACF;AAhCgBvC;AAsCT,SAASwC,YAAYvC,MAAcC,QAAmC;AAC3E,QAAMI,KAAKrB,aAAAA;AACX,QAAMwD,QAAQxC,KAAKwC,MAAM,sEAAA;AAEzB,MAAI,CAACA,OAAO;AACV,WAAO;MACLnC;MACAC,OAAO;MACPC,WAAWC,KAAKiC,IAAG;MACnB9B,SAASX;MACTa,SAAS;MACTC,SAAS;MACTE,QAAQ;MACRE,OAAO;MACPE,UAAU;MACVE,OAAO;MACPC,MAAM;MACNc,MAAM;QAACpC;;IACT;EACF;AAEA,QAAM,CAAA,EAAGyC,SAAAA,EAAWC,OAAAA,IAAWH;AAE/B,MAAIjC;AACJ,MAAI;AACF,UAAMqC,SAASF,QAAQzD,QAAQ,KAAK,GAAA;AACpCsB,gBAAY,IAAIC,KAAKoC,MAAAA,EAAQlC,QAAO;AACpC,QAAImC,MAAMtC,SAAAA,GAAY;AACpBA,kBAAYC,KAAKiC,IAAG;IACtB;EACF,SAASH,OAAO;AACd/B,gBAAYC,KAAKiC,IAAG;EACtB;AAEA,QAAMnC,QAAQV,gBAAgB+C,OAAAA;AAE9B,SAAO;IACLtC;IACAC;IACAC;IACAI,SAASgC;IACT9B,SAAS;IACTC,SAAS;IACTE,QAAQ;IACRE,OAAO;IACPE,UAAU;IACVE,OAAO;IACPC,MAAM;IACNc,MAAM;MAACpC;;EACT;AACF;AAlDgBsC;;;AC/FhB,SAASO,wBAAwB;AACjC,SAASC,uBAAuB;AAahC,eAAsBC,sBACpBC,UACAC,SACAC,OAAa;AAEb,QAAMC,SAAS,MAAMC,WAAWJ,QAAAA;AAChC,MAAI,CAACG,QAAQ;AACX,WAAOE;EACT;AAEA,QAAMC,UAAsB,CAAA;AAC5B,QAAMC,SAASC,iBAAiBR,UAAU;IAAES,UAAU;EAAO,CAAA;AAC7D,QAAMC,KAAKC,gBAAgB;IAAEC,OAAOL;IAAQM,WAAWC;EAAS,CAAA;AAEhE,mBAAiBC,QAAQL,IAAI;AAC3B,UAAMM,QAAQC,cAAaF,IAAAA;AAC3B,QAAI,CAACC,MAAO;AACZ,QAAIA,MAAME,aAAajB,QAAS;AAEhCK,YAAQa,KAAKH,KAAAA;AACb,QAAId,QAAQ,KAAKI,QAAQc,SAASlB,OAAO;AACvCI,cAAQe,MAAK;IACf;EACF;AAEAX,KAAGY,MAAK;AACRf,SAAOe,MAAK;AAEZ,SAAOhB;AACT;AA7BsBP;AAkCtB,eAAsBwB,qBACpBvB,UACAwB,MACAC,UACAC,YACAC,cAAqB;AAErB,MAAI,CAAE,MAAMvB,WAAWJ,QAAAA,GAAY;AACjC,WAAOK;EACT;AAEA,QAAMuB,SAAS;IACbC,oBAAoB;IACpBC,WAAW,KAAK;EAClB;AAEA,QAAMC,WAAW,oBAAIC,IAAAA;AACrB,QAAMC,iBAAiC,CAAA;AAEvC,QAAMC,qBAAqB,wBAACjC,aAAmC;IAC7DA;IACAkC,SAAS,CAAA;IACTC,QAAQ/B;IACRgC,MAAMhC;IACNiC,WAAWjC;IACXkC,SAASlC;IACTmC,YAAYnC;IACZoC,YAAYpC;IACZqC,cAAc;EAChB,IAV2B;AAY3B,QAAMC,wBAAwB,wBAACC,SAAuB5B,UAAAA;AACpD,QAAIA,MAAMoB,UAAU,CAACQ,QAAQR,OAAQQ,SAAQR,SAASS,OAAO7B,MAAMoB,MAAM;AACzE,QAAIpB,MAAMqB,QAAQ,CAACO,QAAQP,KAAMO,SAAQP,OAAOQ,OAAO7B,MAAMqB,IAAI;AAEjEO,YAAQT,QAAQhB,KAAKH,KAAAA;AACrB,QAAI4B,QAAQT,QAAQf,SAASQ,OAAOC,oBAAoB;AACtDe,cAAQT,QAAQd,MAAK;IACvB;EACF,GAR8B;AAU9B,QAAMyB,yBAAyB,wBAACF,SAAuB5B,OAAiB+B,YAAAA;AACtEH,YAAQF,eAAe;AACvBE,YAAQL,UAAUvB,MAAMgC;AACxBJ,YAAQJ,aAAaS,cAAcF,SAAS,sBAAA;AAC5CH,YAAQH,aAAaQ,cAAcF,SAAS,sBAAA;AAC5C,QAAI,CAACH,QAAQP,QAAQrB,MAAMqB,MAAM;AAC/BO,cAAQP,OAAOQ,OAAO7B,MAAMqB,IAAI;IAClC;AAEA,UAAMa,cAAc,CAACxB,cAAcyB,YAAYP,QAAQP,MAAMX,UAAAA;AAC7D,UAAM0B,gBAAgB,CAACzB,gBAAgB0B,cAAcT,QAAQR,QAAQT,YAAAA;AACrE,UAAM2B,gBAAgBJ,eAAeE;AAErC,QAAIE,eAAe;AACjBrB,qBAAed,KAAKyB,OAAAA;IACtB;EACF,GAhB+B;AAkB/B,QAAMW,kBAAkB,wBAACvC,UAAAA;AACvB,UAAM,EAAEE,UAAUjB,SAAS8C,UAAU,GAAE,IAAK/B;AAC5C,QAAI,CAACf,QAAS;AAEd,QAAI2C,UAAUb,SAASyB,IAAIvD,OAAAA;AAC3B,QAAI,CAAC2C,SAAS;AACZA,gBAAUV,mBAAmBjC,OAAAA;AAC7B8B,eAAS0B,IAAIxD,SAAS2C,OAAAA;IACxB;AAEAD,0BAAsBC,SAAS5B,KAAAA;AAE/B,QAAI,CAAC4B,QAAQF,iBAAiBK,QAAQW,SAAS,wBAAA,KAA6BX,QAAQW,SAAS,qBAAA,IAAyB;AACpHZ,6BAAuBF,SAAS5B,OAAO+B,OAAAA;IACzC;AAEA,QAAIA,QAAQW,SAAS,sBAAA,KAA2B,CAACd,QAAQN,WAAW;AAClEM,cAAQN,YAAYtB,MAAMgC;IAC5B;EACF,GAnBwB;AAqBxB,QAAMW,cAAc,wBAAC5C,SAAAA;AACnB,UAAMC,QAAQC,cAAaF,IAAAA;AAC3B,QAAIC,OAAOE,UAAU;AACnBqC,sBAAgBvC,KAAAA;IAClB;EACF,GALoB;AAOpB,QAAM4C,gBAAgB5D,UAAU4B,OAAOE,WAAW6B,WAAAA;AAElD,SAAOE,uBAAuB5B,gBAAgBT,MAAMC,QAAAA;AACtD;AA1FsBF;AA+FtB,eAAsBuC,gBACpB9D,UACAwB,MACAC,UAAgB;AAEhB,MAAI,CAAE,MAAMrB,WAAWJ,QAAAA,GAAY;AACjC,WAAOK;EACT;AAEA,QAAM0D,WAAWvC,OAAOC;AACxB,QAAMuC,SAAmB,CAAA;AACzB,MAAIC,aAAa;AAEjB,QAAM1D,SAASC,iBAAiBR,UAAU;IAAES,UAAU;EAAO,CAAA;AAC7D,QAAMC,KAAKC,gBAAgB;IAAEC,OAAOL;IAAQM,WAAWC;EAAS,CAAA;AAEhE,MAAI;AACF,qBAAiBC,QAAQL,IAAI;AAC3BsD,aAAO7C,KAAKJ,IAAAA;AACZ,UAAIiD,OAAO5C,SAAS2C,UAAU;AAC5BC,eAAO3C,MAAK;MACd;AACA4C,oBAAc;IAChB;EACF,UAAA;AACEvD,OAAGY,MAAK;AACRf,WAAOe,MAAK;EACd;AAEA,QAAM4C,aAAaD,eAAe,IAAI,IAAIE,KAAKC,KAAKH,aAAaxC,QAAAA;AAEjE,MAAIuC,OAAO5C,WAAW,GAAG;AACvB,WAAO;MAAEI;MAAMC;MAAUwC;MAAYC;MAAYG,OAAO,CAAA;IAAG;EAC7D;AAEA,QAAMC,aAAaH,KAAKI,IAAIN,aAAazC,OAAOC,UAAU,CAAA;AAC1D,QAAM+C,WAAWL,KAAKI,IAAIN,cAAczC,OAAO,KAAKC,UAAU,CAAA;AAC9D,QAAMgD,mBAAmBR,aAAaD,OAAO5C;AAE7C,QAAMiD,QAAkB,CAAA;AACxB,WAASK,IAAIV,OAAO5C,SAAS,GAAGsD,KAAK,GAAGA,KAAK,GAAG;AAC9C,UAAMC,YAAYF,mBAAmBC;AACrC,QAAIC,aAAaL,cAAcK,YAAYH,UAAU;AACnDH,YAAMlD,KAAK6C,OAAOU,CAAAA,CAAE;IACtB;EACF;AAEA,SAAO;IACLlD;IACAC;IACAwC;IACAC;IACAG,OAAOA,MAAMO,QAAO;EACtB;AACF;AAtDsBd;;;AC/ItB,SAASe,oBAAAA,yBAAwB;AACjC,SAASC,mBAAAA,wBAAuB;AAChC,SAASC,QAAAA,aAAY;AAcrB,eAAsBC,eACpBC,QACAC,UAKI,CAAC,GAAC;AAEN,QAAMC,QAAQD,QAAQC,SAAS;AAC/B,QAAMC,SAASF,QAAQE,UAAU;AACjC,QAAMC,UAAUH,QAAQG,WAAW;IAAC;IAAU;IAAS;IAAc;;AAErE,QAAMC,UAAuB,CAAA;AAC7B,QAAMC,SAAmB,CAAA;AAEzB,aAAWC,UAAUH,SAAS;AAC5B,QAAI;AACF,YAAMI,OAAO,MAAMC,iBAAiBT,QAAQO,MAAAA;AAC5CF,cAAQK,KAAI,GAAIF,IAAAA;IAClB,SAASG,OAAO;AACd,YAAMC,WAAW,kBAAkBL,MAAAA,KAAWI,iBAAiBE,QAAQF,MAAMG,UAAUC,OAAOJ,KAAAA,CAAAA;AAC9FL,aAAOI,KAAKE,QAAAA;AACZI,cAAQC,KAAK,oBAAoBL,QAAAA,EAAU;IAC7C;EACF;AAEA,MAAIP,QAAQa,WAAW,GAAG;AACxB,QAAIZ,OAAOY,SAAS,GAAG;AACrBF,cAAQC,KAAK,2CAA2CX,OAAOa,KAAK,IAAA,CAAA,EAAO;IAC7E;AACA,WAAOC;EACT;AAEA,MAAIC,eAAehB;AACnB,MAAIJ,QAAQqB,UAAUrB,QAAQqB,OAAOJ,SAAS,GAAG;AAC/CG,mBAAehB,QAAQkB,OAAOC,CAAAA,QAAOvB,QAAQqB,OAAQG,SAASD,IAAIE,KAAK,CAAA;EACzE;AAGAL,eAAaM,KAAK,CAACC,GAAGC,MAAMA,EAAEC,YAAYF,EAAEE,SAAS;AAErD,QAAMC,QAAQV,aAAaH;AAC3B,QAAMc,gBAAgBX,aAAaY,MAAM9B,QAAQA,SAASD,KAAAA;AAE1D,SAAO;IACLM,MAAMwB;IACND;IACAG,SAAS/B,SAASD,QAAQ6B;EAC5B;AACF;AAlDsBhC;AAuDtB,eAAeU,iBACbT,QACAO,QAAc;AAEd,MAAI4B;AACJ,MAAIC;AAEJ,MAAI7B,WAAW,UAAU;AACvB4B,eAAWhB,MAAKnB,QAAQ,YAAA;AACxBoC,aAAS,wBAACC,SAASC,aAAaD,MAAM,QAAA,GAA7B;EACX,WAAW9B,WAAW,SAAS;AAC7B4B,eAAWhB,MAAKnB,QAAQ,WAAA;AACxBoC,aAAS,wBAACC,SAASC,aAAaD,MAAM,OAAA,GAA7B;EACX,WAAW9B,WAAW,cAAc;AAClC4B,eAAWhB,MAAKnB,QAAQ,gBAAA;AACxBoC,aAAS,wBAACC,SAASE,YAAYF,MAAM,YAAA,GAA5B;EACX,WAAW9B,WAAW,cAAc;AAClC4B,eAAWhB,MAAKnB,QAAQ,gBAAA;AACxBoC,aAAS,wBAACC,SAASE,YAAYF,MAAM,YAAA,GAA5B;EACX,OAAO;AACLrB,YAAQC,KAAK,sCAAsCV,MAAAA,EAAQ;AAC3D,WAAO,CAAA;EACT;AAEA,MAAI,CAAE,MAAMiC,WAAWL,QAAAA,GAAY;AACjCnB,YAAQC,KAAK,sCAAsCkB,QAAAA,EAAU;AAC7D,WAAO,CAAA;EACT;AAEA,QAAM3B,OAAoB,CAAA;AAC1B,MAAIiC,SAAqD;AACzD,MAAIC,KAAgD;AAEpD,MAAI;AACFD,aAASE,kBAAiBR,UAAU;MAAES,UAAU;IAAO,CAAA;AACvDF,SAAKG,iBAAgB;MAAEC,OAAOL;MAAQM,WAAWC;IAAS,CAAA;AAE1D,qBAAiBX,QAAQK,IAAI;AAC3B,UAAI,CAACL,KAAKY,KAAI,EAAI;AAElB,UAAI;AACF,cAAMzB,MAAMY,OAAOC,IAAAA;AACnB,YAAIb,KAAK;AACPhB,eAAKE,KAAKc,GAAAA;QACZ;MACF,SAAS0B,YAAY;MAErB;IACF;EACF,SAASvC,OAAO;AACdK,YAAQL,MAAM,oCAAoCwB,QAAAA,KAAaxB,KAAAA;AAC/D,UAAMA;EACR,UAAA;AACE,QAAI+B,IAAI;AACNA,SAAGS,MAAK;IACV;AACA,QAAIV,QAAQ;AACVA,aAAOU,MAAK;IACd;EACF;AAEA,SAAO3C;AACT;AA9DeC;;;ACvEf,SAAS2C,oBAAAA,yBAAwB;AACjC,SAASC,mBAAAA,wBAAuB;AAShC,eAAsBC,gBACpBC,UACAC,SACAC,OACAC,OACAC,WAAkB;AAElB,MAAI,CAAE,MAAMC,WAAWL,QAAAA,GAAY;AACjC,WAAOM;EACT;AAEA,QAAMC,SAAS;IACbC,oBAAoB;IACpBC,WAAW,KAAK;EAClB;AAEA,QAAMC,WAAW,oBAAIC,IAAAA;AACrB,QAAMC,iBAAiC,CAAA;AAEvC,QAAMC,qBAAqB,wBAACC,aAAmC;IAC7DA;IACAC,SAAS,CAAA;IACTC,QAAQV;IACRJ,MAAMI;IACNW,WAAWX;IACXY,SAASZ;IACTa,YAAYb;IACZc,YAAYd;IACZe,cAAc;EAChB,IAV2B;AAY3B,QAAMC,gCAAgC,wBAACC,YAAAA;AACrC,UAAMC,eAAeZ,eAAea,KAAKC,CAAAA,SAAQA,KAAKZ,YAAYS,QAAQT,OAAO;AACjF,QAAIU,cAAc;AAChB,aAAO;IACT;AAEA,UAAMG,sBAAsBJ,QAAQrB,MAAM0B,SAAS1B,KAAAA;AACnD,QAAI,CAACyB,qBAAqB;AACxB,aAAO;IACT;AAEA,QAAI1B,WAAWsB,QAAQR,QAAQc,SAAS,GAAG;AACzC,YAAMC,eAAeP,QAAQR,QAAQgB,KAAKC,CAAAA,MAAKA,EAAEC,cAAchC,OAAAA;AAC/D,UAAI6B,cAAcG,cAAchC,SAAS;AACvC,eAAOiC,OAAOJ,aAAaG,aAAahC,OAAO,MAAMA,YAC/CG,YAAY0B,cAAcG,cAAc7B,cAAcA,YAAY;MAC1E;AACA,aAAO;IACT;AAEA,WAAO;EACT,GArBsC;AAuBtC,QAAM+B,wBAAwB,wBAACZ,SAAuBa,UAAAA;AACpD,QAAIA,MAAMpB,UAAU,CAACO,QAAQP,OAAQO,SAAQP,SAASkB,OAAOE,MAAMpB,MAAM;AACzE,QAAIoB,MAAMlC,QAAQ,CAACqB,QAAQrB,KAAMqB,SAAQrB,OAAOgC,OAAOE,MAAMlC,IAAI;AAEjEqB,YAAQR,QAAQsB,KAAKD,KAAAA;AACrB,QAAIb,QAAQR,QAAQc,SAAStB,OAAOC,oBAAoB;AACtDe,cAAQR,QAAQuB,MAAK;IACvB;AAEA,QAAIhB,8BAA8BC,OAAAA,GAAU;AAC1CX,qBAAeyB,KAAKd,OAAAA;AACpB,UAAIpB,SAASS,eAAeiB,SAAS1B,OAAO;AAC1CS,uBAAe2B,IAAG;MACpB;IACF;EACF,GAf8B;AAiB9B,QAAMC,yBAAyB,wBAACjB,SAAuBa,OAAiBK,YAAAA;AACtElB,YAAQF,eAAe;AACvBE,YAAQL,UAAUkB,MAAMM;AACxBnB,YAAQJ,aAAawB,cAAcF,SAAS,sBAAA;AAC5ClB,YAAQH,aAAauB,cAAcF,SAAS,sBAAA;AAC5C,QAAI,CAAClB,QAAQrB,QAAQkC,MAAMlC,MAAM;AAC/BqB,cAAQrB,OAAOgC,OAAOE,MAAMlC,IAAI;IAClC;AAEA,QAAIoB,8BAA8BC,OAAAA,GAAU;AAC1CX,qBAAeyB,KAAKd,OAAAA;AACpB,UAAIpB,SAASS,eAAeiB,SAAS1B,OAAO;AAC1CS,uBAAe2B,IAAG;MACpB;IACF;EACF,GAf+B;AAiB/B,QAAMK,kBAAkB,wBAACR,UAAAA;AACvB,UAAM,EAAES,UAAU/B,SAAS2B,UAAU,GAAE,IAAKL;AAC5C,QAAI,CAACtB,QAAS;AAEd,QAAIS,UAAUb,SAASoC,IAAIhC,OAAAA;AAC3B,QAAI,CAACS,SAAS;AACZA,gBAAUV,mBAAmBC,OAAAA;AAC7BJ,eAASqC,IAAIjC,SAASS,OAAAA;IACxB;AAEAY,0BAAsBZ,SAASa,KAAAA;AAE/B,QAAI,CAACb,QAAQF,iBAAiBoB,QAAQO,SAAS,wBAAA,KAA6BP,QAAQO,SAAS,qBAAA,IAAyB;AACpHR,6BAAuBjB,SAASa,OAAOK,OAAAA;IACzC;AAEA,QAAIA,QAAQO,SAAS,sBAAA,KAA2B,CAACzB,QAAQN,WAAW;AAClEM,cAAQN,YAAYmB,MAAMM;IAC5B;EACF,GAnBwB;AAqBxB,QAAMO,cAAc,wBAACC,SAAAA;AACnB,UAAMd,QAAQe,cAAaD,IAAAA;AAC3B,QAAId,OAAOS,UAAU;AACnBD,sBAAgBR,KAAAA;IAClB;EACF,GALoB;AAOpB,QAAMgB,gBAAgBpD,UAAUO,OAAOE,WAAWwC,WAAAA;AAElD,SAAO;IACLI,MAAM;IACNC,UAAU1C,eAAeiB;IACzB0B,YAAY3C,eAAeiB;IAC3B2B,YAAY;IACZC,OAAO7C,eAAe8C,IAAI,CAACnC,aAAa;MACtCT,SAASS,QAAQT;MACjBE,QAAQO,QAAQP;MAChBd,MAAMqB,QAAQrB;MACde,WAAWM,QAAQN;MACnBC,SAASK,QAAQL;MACjBC,YAAYI,QAAQJ;MACpBC,YAAYG,QAAQH;MACpBL,SAASQ,QAAQR,QAAQ4C,MAAK,EAAGC,QAAO;IAC1C,EAAA;EACF;AACF;AAtIsB7D;AA2ItB,eAAsB8D,kBACpB7D,UACAE,OACA4D,YAAkB;AAElB,QAAMC,SAAS,MAAM1D,WAAWL,QAAAA;AAChC,MAAI,CAAC+D,QAAQ;AACX,WAAOzD;EACT;AAEA,QAAM0D,UAAsB,CAAA;AAC5B,QAAMC,SAASC,kBAAiBlE,UAAU;IAAEmE,UAAU;EAAO,CAAA;AAC7D,QAAMC,KAAKC,iBAAgB;IAAEC,OAAOL;IAAQM,WAAWC;EAAS,CAAA;AAEhE,mBAAiBtB,QAAQkB,IAAI;AAC3B,UAAMhC,QAAQe,cAAaD,IAAAA;AAC3B,QAAI,CAACd,MAAO;AAEZ,UAAMT,sBAAsBS,MAAMlC,MAAM0B,SAAS1B,KAAAA;AACjD,UAAMuE,gBAAgBrC,MAAMsC,gBAAgBZ,cAAc1B,MAAMnC;AAChE,QAAI,CAAC0B,uBAAuB,CAAC8C,cAAe;AAE5CT,YAAQ3B,KAAKD,KAAAA;EACf;AAEAgC,KAAGO,MAAK;AACRV,SAAOU,MAAK;AAEZ,SAAO;IACLb;IACA/C,SAASiD;EACX;AACF;AAhCsBH;;;AC/FtB,eAAsBe,wBACpBC,UACAC,cACAC,OAAc;AAEd,MAAI,CAAE,MAAMC,WAAWH,QAAAA,GAAY;AACjC,WAAOI;EACT;AAEA,QAAMC,SAAS;IACbC,WAAW,KAAK;EAClB;AAEA,QAAMC,WAAW,oBAAIC,IAAAA;AACrB,QAAMC,kBAA4C,CAAA;AAElD,QAAMC,+BAA+B,wBAACC,SAAiBC,WAA2C;IAChGD;IACAV,cAAcW;IACdC,cAAc;IACdC,eAAe;EACjB,IALqC;AAOrC,QAAMC,iCAAiC,wBAACC,YAAAA;AACtC,UAAMC,eAAeR,gBAAgBS,KAAKC,CAAAA,UAASA,MAAMR,YAAYK,QAAQL,OAAO;AACpF,QAAIM,cAAc;AAChB,aAAO;IACT;AACA,WAAOD,QAAQf,iBAAiBA;EAClC,GANuC;AAQvC,QAAMmB,wBAAwB,wBAACJ,SAAiCK,UAAAA;AAC9D,QAAIA,MAAMC,cAAc,CAACN,QAAQO,WAAW;AAC1CP,cAAQO,YAAYC,OAAOH,MAAMC,UAAU;IAC7C;AACA,QAAID,MAAMI,UAAU,CAACT,QAAQS,QAAQ;AACnCT,cAAQS,SAASD,OAAOH,MAAMI,MAAM;IACtC;AAEA,UAAMC,UAAUL,MAAMK,WAAW;AAIjC,QAAIA,QAAQC,SAAS,sBAAA,KAA2B,CAACX,QAAQF,eAAe;AACtEE,cAAQF,gBAAgB;AACxBE,cAAQY,YAAYP,MAAMQ;AAC1B,UAAIR,MAAMS,OAAO;AACfd,gBAAQc,QAAQN,OAAOH,MAAMS,KAAK;MACpC;IACF;AAIA,QAAIJ,QAAQC,SAAS,uBAAA,GAA0B;AAC7CX,cAAQH,eAAe;AACvBG,cAAQe,UAAUV,MAAMQ;AACxBb,cAAQgB,SAAS;AACjB,UAAIX,MAAMY,QAAQ;AAChBjB,gBAAQiB,SAAST,OAAOH,MAAMY,MAAM;MACtC;AACA,UAAIZ,MAAMa,aAAa;AACrBlB,gBAAQmB,aAAaC,OAAOf,MAAMa,WAAW;MAC/C;AAEA,UAAInB,+BAA+BC,OAAAA,GAAU;AAC3CP,wBAAgB4B,KAAKrB,OAAAA;MACvB;IACF;AAIA,QAAIU,QAAQC,SAAS,kBAAA,GAAqB;AACxCX,cAAQH,eAAe;AACvBG,cAAQe,UAAUV,MAAMQ;AACxBb,cAAQgB,SAAS;AACjB,UAAIX,MAAMiB,OAAO;AACftB,gBAAQsB,QAAQ;UAAEZ,SAASF,OAAOH,MAAMiB,KAAK;QAAE;MACjD;AACA,UAAIjB,MAAMa,aAAa;AACrBlB,gBAAQmB,aAAaC,OAAOf,MAAMa,WAAW;MAC/C;AAEA,UAAInB,+BAA+BC,OAAAA,GAAU;AAC3CP,wBAAgB4B,KAAKrB,OAAAA;MACvB;IACF;EACF,GAvD8B;AAyD9B,QAAMuB,kBAAkB,wBAAClB,UAAAA;AACvB,UAAM,EAAEmB,UAAU7B,SAAS8B,eAAe7B,MAAK,IAAKS;AAEpD,QAAI,CAACV,WAAW,CAACC,SAASA,UAAUX,aAAc;AAElD,QAAIe,UAAUT,SAASmC,IAAI/B,OAAAA;AAC3B,QAAI,CAACK,SAAS;AACZA,gBAAUN,6BAA6BC,SAASC,KAAAA;AAChDL,eAASoC,IAAIhC,SAASK,OAAAA;IACxB;AAEAI,0BAAsBJ,SAASK,KAAAA;EACjC,GAZwB;AAcxB,QAAMuB,cAAc,wBAACC,SAAAA;AACnB,UAAMxB,QAAQyB,cAAaD,IAAAA;AAC3B,QAAIxB,OAAOoB,eAAe;AACxBF,sBAAgBlB,KAAAA;IAClB;EACF,GALoB;AAOpB,QAAM0B,gBAAgB/C,UAAUK,OAAOC,WAAWsC,WAAAA;AAGlDnC,kBAAgBuC,KAAK,CAACC,GAAGC,MAAAA;AACvB,UAAMC,QAAQF,EAAElB,UAAU,IAAIqB,KAAKH,EAAElB,OAAO,EAAEsB,QAAO,IAAK;AAC1D,UAAMC,QAAQJ,EAAEnB,UAAU,IAAIqB,KAAKF,EAAEnB,OAAO,EAAEsB,QAAO,IAAK;AAC1D,WAAOC,QAAQH;EACjB,CAAA;AAGA,QAAMI,gBAAgBrD,QAAQO,gBAAgB+C,MAAM,GAAGtD,KAAAA,IAASO;AAEhE,SAAO;IACLR;IACAwD,aAAaF,cAAcG;IAC3BC,QAAQJ,cAAcK,IAAI,CAAC5C,aAAkC;MAC3DL,SAASK,QAAQL;MACjBV,cAAce,QAAQf;MACtBsB,WAAWP,QAAQO;MACnBE,QAAQT,QAAQS;MAChBG,WAAWZ,QAAQY;MACnBG,SAASf,QAAQe;MACjBI,YAAYnB,QAAQmB;MACpBH,QAAQhB,QAAQgB,UAAU;MAC1BF,OAAOd,QAAQc;MACfG,QAAQjB,QAAQiB;MAChBK,OAAOtB,QAAQsB;IACjB,EAAA;EACF;AACF;AA1IsBvC;;;ANvCtB,SAAS8D,eAAeC,KAAeC,UAAkBC,UAAU,sBAAoB;AACrFF,MAAIG,OAAO,GAAA,EAAKC,KAAK;IAAEF,SAAS,GAAGA,OAAAA,KAAYG,gBAAgBJ,QAAAA,CAAAA;EAAY,CAAA;AAC7E;AAFSF;AAOT,SAASO,YAAYN,KAAeO,OAAgBL,UAAU,2BAAyB;AACrFF,MAAIG,OAAO,GAAA,EAAKC,KAAK;IAAEF;IAASK,OAAOC,eAAeD,KAAAA;EAAO,CAAA;AAC/D;AAFSD;AAOF,SAASG,6BAA6BC,QAAc;AACzD,QAAMC,aAAaC,MAAKF,QAAQ,YAAA;AAEhC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMc,WAAWD,IAAIE,OAAOD,WAAW,IAAIE,KAAI;AAC/C,QAAI,CAACF,SAAS;AACZ,aAAOd,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAAsB,CAAA;IAC/D;AAEA,UAAMe,QAAQC,WAAWL,IAAIM,MAAMF,OAA6B,KAAK,GAAA;AAErE,QAAI;AACF,YAAMG,UAAU,MAAMC,sBAAsBV,YAAYG,SAASG,KAAAA;AACjE,UAAI,CAACG,SAAS;AACZ,eAAOrB,eAAeC,KAAKW,UAAAA;MAC7B;AACAX,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBM,UAAAA;QACtBG;QACAS,OAAOH,QAAQI;QACfJ;MACF,CAAA;IACF,SAASb,OAAO;AACdD,kBAAYN,KAAKO,KAAAA;IACnB;EACF;AACF;AA1BgBE;AA+BT,SAASgB,6BAA6Bf,QAAc;AACzD,QAAMgB,eAAed,MAAKF,QAAQ,WAAA;AAElC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAM2B,OAAOC,iBAAiBf,IAAIM,MAAMQ,MAA4B,CAAA;AACpE,UAAME,WAAWX,WAAWL,IAAIM,MAAMU,UAAgC,IAAI,GAAA;AAC1E,UAAMC,aAAa,OAAOjB,IAAIM,MAAMY,SAAS,WAAWlB,IAAIM,MAAMY,KAAKf,KAAI,IAAKgB;AAChF,UAAMC,eAAe,OAAOpB,IAAIM,MAAMe,WAAW,WAAWrB,IAAIM,MAAMe,OAAOlB,KAAI,EAAGmB,YAAW,IAAKH;AAEpG,QAAI;AACF,YAAMI,SAAS,MAAMC,qBAAqBX,cAAcC,MAAME,UAAUC,YAAYG,YAAAA;AACpF,UAAI,CAACG,QAAQ;AACX,eAAOrC,eAAeC,KAAK0B,YAAAA;MAC7B;AACA1B,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBqB,YAAAA;QACtB,GAAGU;QACHL,MAAMD,cAAc;QACpBI,QAAQD,gBAAgB;QACxBV,OAAOa,OAAOE,MAAMd;MACtB,CAAA;IACF,SAASjB,OAAO;AACdD,kBAAYN,KAAKO,OAAO,0BAAA;IAC1B;EACF;AACF;AAzBgBkB;AA8BT,SAASc,wBAAwB7B,QAAc;AACpD,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMwC,YAAY3B,IAAIE,OAAOyB,YAAY,IAAIxB,KAAI;AACjD,QAAI,CAACwB,UAAU;AACb,aAAOxC,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAAuB,CAAA;IAChE;AAEA,UAAMyB,OAAOC,iBAAiBf,IAAIM,MAAMQ,MAA4B,CAAA;AACpE,UAAME,WAAWX,WAAWL,IAAIM,MAAMU,UAAgC,KAAK,GAAA;AAE3E,QAAI;AACF,YAAM5B,WAAWwC,mBAAmB/B,QAAQ8B,QAAAA;AAC5C,YAAMJ,SAAS,MAAMM,gBAAgBzC,UAAU0B,MAAME,QAAAA;AACrD,UAAI,CAACO,QAAQ;AACX,eAAOrC,eAAeC,KAAKC,QAAAA;MAC7B;AAEAD,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBJ,QAAAA;QACtB,GAAGmC;MACL,CAAA;IACF,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,mCAAA;IAC1B;EACF;AACF;AAzBgBgC;AA0CT,SAASI,2BAA2BjC,QAAc;AACvD,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMiB,QAAQC,WAAWL,IAAIM,MAAMF,OAA6B,KAAK,GAAA;AACrE,UAAM2B,SAAShB,iBAAiBf,IAAIM,MAAMyB,QAA8B,CAAA;AAGxE,UAAMC,SAAShC,IAAIM,MAAM0B,SACrBC,OAAOjC,IAAIM,MAAM0B,MAAM,EAAEE,MAAM,GAAA,EAAKC,IAAIC,CAAAA,MAAKA,EAAEjC,KAAI,CAAA,EAAIkC,OAAOC,OAAAA,IAC9DnB;AAGJ,UAAMoB,UAAUvC,IAAIM,MAAMiC,UACtBN,OAAOjC,IAAIM,MAAMiC,OAAO,EAAEL,MAAM,GAAA,EAAKC,IAAIK,CAAAA,MAAKA,EAAErC,KAAI,CAAA,EAAIkC,OAAOC,OAAAA,IAC/DnB;AAEJ,QAAI;AACF,YAAMI,SAAS,MAAMkB,eAAe5C,QAAQ;QAC1CO;QACA2B;QACAC;QACAO;MACF,CAAA;AAEA,UAAI,CAAChB,QAAQ;AACX,eAAOpC,IAAIG,OAAO,GAAA,EAAKC,KAAK;UAC1BF,SAAS;UACTqD,MAAM;QACR,CAAA;MACF;AAEAvD,UAAII,KAAKgC,MAAAA;IACX,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,4BAAA;IAC1B;EACF;AACF;AAnCgBoC;AAwCT,SAASa,4BAA4B9C,QAAc;AACxD,QAAMgB,eAAed,MAAKF,QAAQ,WAAA;AAElC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMyD,UAAU,OAAO5C,IAAIM,MAAMsC,YAAY,WAAW5C,IAAIM,MAAMsC,QAAQzC,KAAI,IAAKgB;AACnF,QAAI,CAACyB,SAAS;AACZ,aAAOzD,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAAsB,CAAA;IAC/D;AAEA,UAAMwD,YAAY,OAAO7C,IAAIM,MAAMuC,cAAc,WAAW7C,IAAIM,MAAMuC,UAAU1C,KAAI,IAAKgB;AACzF,UAAMD,QAAO,OAAOlB,IAAIM,MAAMY,SAAS,WAAWlB,IAAIM,MAAMY,KAAKf,KAAI,IAAK;AAC1E,UAAMC,QAAQC,WAAWL,IAAIM,MAAMF,OAA6B,IAAI,GAAA;AAEpE,QAAI;AACF,YAAMmB,SAAS,MAAMuB,gBAAgBjC,cAAc+B,SAAS1B,OAAMd,OAAOyC,SAAAA;AACzE,UAAI,CAACtB,QAAQ;AACX,eAAOrC,eAAeC,KAAK0B,YAAAA;MAC7B;AACA1B,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBqB,YAAAA;QACtBK,MAAAA;QACA,GAAGK;MACL,CAAA;IACF,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,0BAAA;IAC1B;EACF;AACF;AA3BgBiD;AA6BT,SAASI,8BAA8BlD,QAAc;AAC1D,QAAMgB,eAAed,MAAKF,QAAQ,YAAA;AAElC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAM6D,cAAchD,IAAIE,OAAO8C,cAAc,IAAI7C,KAAI;AACrD,QAAI,CAAC6C,YAAY;AACf,aAAO7D,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAAyB,CAAA;IAClE;AAEA,UAAM6B,QAAO,OAAOlB,IAAIM,MAAMY,SAAS,WAAWlB,IAAIM,MAAMY,KAAKf,KAAI,IAAK;AAE1E,QAAI;AACF,YAAMoB,SAAS,MAAM0B,kBAAkBpC,cAAcK,OAAM8B,UAAAA;AAC3D,UAAI,CAACzB,QAAQ;AACX,eAAOrC,eAAeC,KAAK0B,YAAAA;MAC7B;AACA1B,UAAII,KAAK;QACPkB,MAAMjB,gBAAgBqB,YAAAA;QACtB,GAAGU;MACL,CAAA;IACF,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,0BAAA;IAC1B;EACF;AACF;AAxBgBqD;AAkCT,SAASG,oCAAoCrD,QAAc;AAChE,QAAMsD,gBAAgBpD,MAAKF,QAAQ,YAAA;AAEnC,SAAO,OAAOG,KAAcb,QAAAA;AAC1B,UAAMiE,eAAe,OAAOpD,IAAIM,MAAM+C,kBAAkB,WAAWrD,IAAIM,MAAM+C,cAAclD,KAAI,IAAKgB;AACpG,QAAI,CAACiC,cAAc;AACjB,aAAOjE,IAAIG,OAAO,GAAA,EAAKC,KAAK;QAAEF,SAAS;MAA4B,CAAA;IACrE;AAEA,UAAMe,QAAQC,WAAWL,IAAIM,MAAMF,OAA6B,IAAI,GAAA;AAEpE,QAAI;AACF,YAAMmB,SAAS,MAAM+B,wBAAwBH,eAAeC,cAAchD,KAAAA;AAC1E,UAAI,CAACmB,QAAQ;AACX,eAAOrC,eAAeC,KAAKgE,aAAAA;MAC7B;AACAhE,UAAII,KAAK;QACPkB,MAAMjB,gBAAgB2D,aAAAA;QACtB,GAAG5B;MACL,CAAA;IACF,SAAS7B,OAAO;AACdD,kBAAYN,KAAKO,OAAO,2BAAA;IAC1B;EACF;AACF;AAxBgBwD;;;AO3OhB,OAAOK,WAAU;AAcjB,SAASC,mBACPC,MACAC,MACAC,SAAe;AAEf,SAAO,IAAIC,QAAQ,CAACC,YAAAA;AAClB,UAAMC,YAAYC,KAAKC,IAAG;AAE1B,UAAMC,MAAMC,MAAKC,QACf;MACEC,UAAUX;MACVC;MACAW,MAAM;MACNC,QAAQ;MACRX;IACF,GACA,CAACY,SAAAA;AACC,YAAMC,eAAeT,KAAKC,IAAG,IAAKF;AAElCD,cAAQ;QACNY,WAAW;QACXD;MACF,CAAA;IACF,CAAA;AAGFP,QAAIS,GAAG,WAAW,MAAA;AAChBT,UAAIU,QAAO;AACXd,cAAQ;QACNY,WAAW;QACXG,OAAO;MACT,CAAA;IACF,CAAA;AAEAX,QAAIS,GAAG,SAAS,CAACG,QAAAA;AACfhB,cAAQ;QACNY,WAAW;QACXG,OAAOC,IAAIC;MACb,CAAA;IACF,CAAA;AAEAb,QAAIc,IAAG;EACT,CAAA;AACF;AA3CSvB;AAgDF,SAASwB,yBAAyBC,UAAoC,CAAC,GAAC;AAC7E,QAAM,EACJC,aAAaC,OAAOC,QAAQC,IAAIC,WAAW,KAAK,KAChDC,aAAa,aACb5B,UAAU,IAAI,IACZsB;AACJ,SAAQ,OAAOO,MAAMC,QAAAA;AACnB,QAAI;AACF,YAAMC,SAAS,MAAMlC,mBAAmB+B,YAAYL,YAAYvB,OAAAA;AAEhE,UAAI+B,OAAOjB,WAAW;AACpBgB,YAAIE,OAAO,GAAA,EAAKC,KAAK;UACnBD,QAAQ;UACRE,SAAS,GAAGN,UAAAA,IAAcL,UAAAA;UAC1BV,cAAckB,OAAOlB;UACrBsB,YAAW,oBAAI/B,KAAAA,GAAOgC,YAAW;QACnC,CAAA;MACF,OAAO;AACLN,YAAIE,OAAO,GAAA,EAAKC,KAAK;UACnBD,QAAQ;UACRE,SAAS,GAAGN,UAAAA,IAAcL,UAAAA;UAC1BN,OAAOc,OAAOd;UACdkB,YAAW,oBAAI/B,KAAAA,GAAOgC,YAAW;QACnC,CAAA;MACF;IACF,SAASnB,OAAO;AACda,UAAIE,OAAO,GAAA,EAAKC,KAAK;QACnBD,QAAQ;QACRE,SAAS,GAAGN,UAAAA,IAAcL,UAAAA;QAC1BN,OAAOA,iBAAiBoB,QAAQpB,MAAME,UAAU;QAChDgB,YAAW,oBAAI/B,KAAAA,GAAOgC,YAAW;MACnC,CAAA;IACF;EACF;AACF;AAlCgBf;;;AV9CT,SAASiB,mBAAmBC,UAAyB,CAAC,GAAC;AAC5D,QAAMC,SAASC,cAAcF,QAAQC,MAAM;AAC3C,QAAME,SAASC,SAAQC,OAAM;AAG7BF,SAAOG,IAAI,uBAAuBC,6BAA6BN,MAAAA,CAAAA;AAG/DE,SAAOG,IAAI,iBAAiBE,6BAA6BP,MAAAA,CAAAA;AAGzDE,SAAOG,IAAI,oBAAoBG,wBAAwBR,MAAAA,CAAAA;AAGvDE,SAAOG,IAAI,gBAAgBI,2BAA2BT,MAAAA,CAAAA;AAGtDE,SAAOG,IAAI,uBAAuBK,4BAA4BV,MAAAA,CAAAA;AAG9DE,SAAOG,IAAI,8BAA8BM,8BAA8BX,MAAAA,CAAAA;AAGvEE,SAAOG,IAAI,0BAA0BO,oCAAoCZ,MAAAA,CAAAA;AAEzEE,SAAOG,IAAI,WAAWQ,yBAAAA,CAAAA;AAEtB,SAAOX;AACT;AA5BgBJ;;;AWLhB,IAAMgB,kBAA+B;EACnC;IACEC,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;EACA;IACEF,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;;AAOK,SAASC,wBAAwBC,UAAoC,CAAC,GAAC;AAC5E,QAAM,EAAEC,OAAM,IAAKD;AAEnB,SAAO;IACLE,MAAM;IACNC,WAAW;IACXC,QAAQT;IAERU,SAAS,wBAACC,YAA+BA,QAAQC,OAAxC;IAETC,cAAc,wBAACF,YAAAA;AACb,aAAOG,mBAAmB;QAAER,QAAQA,UAAUK,QAAQL;MAAO,CAAA;IAC/D,GAFc;EAGhB;AACF;AAdgBF;;;AChDhB,OAAOW,cAAyB;;;ACChC,SAASC,QAAAA,aAAY;AACrB,OAAOC,UAAQ;;;ACFf,SAASC,cAAAA,aAAYC,QAAAA,aAAY;AACjC,OAAOC,SAAQ;AAKR,SAASC,eAAcC,UAAiB;AAC7C,MAAI,CAACA,UAAU;AACb,WAAOC,MAAKC,QAAQC,IAAG,GAAI,MAAA;EAC7B;AACA,SAAOC,YAAWJ,QAAAA,IAAYA,WAAWC,MAAKC,QAAQC,IAAG,GAAIH,QAAAA;AAC/D;AALgBD,OAAAA,gBAAAA;AAUT,SAASM,UAAUC,KAAW;AACnC,MAAI,CAACC,IAAGC,WAAWF,GAAAA,GAAM;AACvBC,IAAAA,IAAGE,UAAUH,KAAK;MAAEI,WAAW;IAAK,CAAA;EACtC;AACF;AAJgBL;AAST,SAASM,gBAAeC,OAAc;AAC3C,SAAOA,iBAAiBC,QACpB;IAAEC,MAAMF,MAAME;IAAMC,SAASH,MAAMG;EAAQ,IAC3C;IAAEA,SAASC,OAAOJ,KAAAA;EAAO;AAC/B;AAJgBD,OAAAA,iBAAAA;;;ADTT,SAASM,mBAAmBC,QAAgBC,UAAgB;AACjE,QAAMC,WAAWC,MAAKH,QAAQC,QAAAA;AAE9BG,YAAUJ,MAAAA;AAEV,SAAO,OAAOK,KAAcC,QAAAA;AAC1B,QAAI;AACF,YAAMC,aAAaF,IAAIG;AACvB,UAAG,CAACD,WAAWE,SAAQ;AACrB,eAAOH,IAAII,OAAO,GAAA,EAAKC,KAAK;UAAEF,SAAS;QAAsB,CAAA;MAC/D;AACA,YAAMG,UAAUC,KAAKC,UAAU;QAC7B,GAAGP;QACHQ,cAAa,oBAAIC,KAAAA,GAAOC,YAAW;MACrC,CAAA,IAAK;AACL,YAAMC,KAAGC,SAASC,WAAWlB,UAAUU,OAAAA;AACvCN,UAAIK,KAAK;QACPU,SAAS;MACX,CAAA;IACF,SAASC,OAAO;AACdC,MAAAA,aAAYjB,KAAKgB,OAAO,wBAAA;IAC1B;EACF;AACF;AAvBgBvB;AAwBT,SAASyB,wBAAwBxB,QAAgBC,UAAgB;AACtE,QAAMC,WAAWC,MAAKH,QAAQC,QAAAA;AAE9BG,YAAUJ,MAAAA;AAEV,SAAO,OAAOK,KAAcC,QAAAA;AAC1B,QAAI;AACF,YAAMmB,cAAcpB,IAAIG;AACxB,UAAG,CAACkB,MAAMC,QAAQF,WAAAA,GAAa;AAC7B,eAAOnB,IAAII,OAAO,GAAA,EAAKC,KAAK;UAAEF,SAAS;QAA+B,CAAA;MACxE;AACA,YAAMmB,WAAW,CAAA;AACjB,iBAAWrB,cAAckB,aAAa;AACpCG,iBAASC,KAAKhB,KAAKC,UAAU;UAC3B,GAAGP;UACHQ,cAAa,oBAAIC,KAAAA,GAAOC,YAAW;QACrC,CAAA,IAAK,IAAA;MACP;AACA,YAAMC,KAAGC,SAASC,WAAWlB,UAAU0B,SAASzB,KAAK,EAAA,CAAA;AACrDG,UAAIK,KAAK;QACPU,SAAS;MACX,CAAA;IACF,SAASC,OAAO;AACdC,MAAAA,aAAYjB,KAAKgB,OAAO,wBAAA;IAC1B;EACF;AACF;AA1BgBE;AA4BhB,SAASD,aAAYjB,KAAegB,OAAgBb,UAAU,0BAAwB;AACpFH,MAAII,OAAO,GAAA,EAAKC,KAAK;IAAEF;IAASa,OAAOQ,gBAAeR,KAAAA;EAAO,CAAA;AAC/D;AAFSC,OAAAA,cAAAA;;;ADrDF,SAASQ,oBAAmBC,UAAyB,CAAC,GAAC;AAC5D,QAAMC,SAASC,eAAcF,QAAQC,MAAM;AAC3C,QAAME,SAASC,SAAQC,OAAM;AAI7BF,SAAOG,KAAK,YAAYF,SAAQG,KAAI,GAAIC,mBAAmBP,QAAQD,QAAQS,YAAY,YAAA,CAAA;AACvFN,SAAOG,KAAK,kBAAkBF,SAAQG,KAAI,GAAIG,wBAAwBT,QAAQD,QAAQS,YAAY,YAAA,CAAA;AAElG,SAAON;AACT;AAVgBJ,OAAAA,qBAAAA;;;AGJhB,IAAMY,mBAA+B;EACnC;IACEC,QAAQ;IACRC,MAAM;IACNC,aAAa;EACf;;AAOK,SAASC,4BAA4BC,UAAoC,CAAC,GAAC;AAChF,QAAM,EAAEC,OAAM,IAAKD;AAEnB,SAAO;IACLE,MAAM;IACNC,WAAW;IACXC,QAAQT;IACRU,SAAS,wBAACC,YAA+BA,QAAQC,OAAxC;IACTC,cAAc,wBAACF,YAAAA;AACb,aAAOG,oBAAmB;QACxBR,QAAQA,UAAUK,QAAQL;QAC1BS,UAAUV,QAAQU,YAAY;MAChC,CAAA;IACF,GALc;EAMhB;AACF;AAfgBX;;;ArBXhB,SAASY,kBAAkBC,YAAsB;AAC/C,SAAO,kBAAkBA,cAAcA,WAAWC,iBAAiBC;AACrE;AAFSH;AAOT,SAASI,mBAAmBH,YAAsB;AAChD,SAAO,mBAAmBA,cAAcA,WAAWI,kBAAkBF;AACvE;AAFSC;AAOT,SAASE,iBAAiBC,UAAkBC,WAAiB;AAC3D,QAAMC,YAAYC,MAAKC,MAAMC,KAAKL,UAAUC,SAAAA;AAC5C,SAAOC,UAAUI,WAAW,GAAA,IAAOJ,YAAY,IAAIA,SAAAA;AACrD;AAHSH;AAQT,SAASQ,0BACPb,YACAc,eAAqB;AAErB,MAAId,WAAWe,UAAUf,WAAWe,OAAOC,SAAS,GAAG;AACrDC,YAAQC,IAAI,4BAA4BlB,WAAWmB,IAAI,OAAOL,aAAAA,EAAe;AAC7Ed,eAAWe,OAAOK,QAAQ,CAACC,UAAAA;AACzB,YAAMb,YAAYa,MAAMZ,SAAS,MAAMK,gBAAgBL,MAAKC,MAAMC,KAAKG,eAAeO,MAAMZ,IAAI;AAChGQ,cAAQC,IAAI,KAAKG,MAAMC,MAAM,IAAId,SAAAA,MAAea,MAAME,WAAW,EAAE;IACrE,CAAA;EACF,OAAO;AACLN,YAAQC,IAAI,4BAA4BlB,WAAWmB,IAAI,OAAOL,aAAAA,EAAe;EAC/E;AACF;AAbSD;AAkBT,eAAeW,wBACbC,QACAzB,YACA0B,SAA0B;AAE1B,MAAI,CAAC1B,WAAWO,WAAW;AACzBU,YAAQU,MACN,gBAAgB3B,WAAWmB,IAAI,mDAAmD;AAEpF;EACF;AAEA,QAAMS,SAAS5B,WAAWC,aAAayB,OAAAA;AACvC,QAAMZ,gBAAgBT,iBAAiBqB,QAAQpB,UAAUN,WAAWO,SAAS;AAE7EkB,SAAOI,IAAIf,eAAec,MAAAA;AAC1Bf,4BAA0Bb,YAAYc,aAAAA;AACxC;AAjBeU;AAsBf,eAAeM,yBACbL,QACAzB,YACA0B,SAA0B;AAE1B,MAAI1B,WAAWO,WAAW;AACxBU,YAAQc,KACN,gBAAgB/B,WAAWmB,IAAI,sEACRnB,WAAWO,SAAS,IAAI;EAEnD;AAEA,QAAMyB,UAAUhC,WAAWI,cAAcsB,OAAAA;AACzCD,SAAOI,IAAIG,OAAAA;AACXf,UAAQC,IAAI,mCAAmClB,WAAWmB,IAAI,EAAE;AAClE;AAfeW;AAsDf,eAAsBG,oBACpBR,QACAS,aACAC,SAAoC;AAGpC,QAAMT,UAA6B;IACjCpB,UAAU;IACV8B,OAAOC,QAAQC,IAAIC,aAAa;IAChCC,SAASH,QAAQI,IAAG;IACpB,GAAGN;EACL;AAEA,QAAMO,iBAAiB;OAAIR;;AAE3B,aAAWlC,cAAc0C,gBAAgB;AAEvC,QAAI1C,WAAW2C,WAAW,CAAC3C,WAAW2C,QAAQjB,OAAAA,GAAU;AACtD;IACF;AAEA,QAAI;AAEF,YAAMkB,kBAAkB,kBAAkB5C,cAAc,OAAQA,WAAmBC,iBAAiB;AACpG,YAAM4C,mBAAmB,mBAAmB7C,cAAc,OAAQA,WAAmBI,kBAAkB;AAEvG,UAAIwC,mBAAmBC,kBAAkB;AACvC5B,gBAAQc,KACN,gBAAgB/B,WAAWmB,IAAI,gGACiB;MAEpD;AAEA,UAAIpB,kBAAkBC,UAAAA,GAAa;AAEjC,cAAMwB,wBAAwBC,QAAQzB,YAAY0B,OAAAA;MACpD,WAAWvB,mBAAmBH,UAAAA,GAAa;AAEzC,cAAM8B,yBAAyBL,QAAQzB,YAAY0B,OAAAA;MACrD,OAAO;AAELT,gBAAQU,MACN,gBAAiB3B,WAAmBmB,QAAQ,SAAA,sDAA+D;MAE/G;IACF,SAASQ,OAAO;AACdV,cAAQU,MAAM,mCAAmC3B,WAAWmB,IAAI,KAAKQ,KAAAA;IACvE;EACF;AACF;AAjDsBM;","names":["normalizeBasePath","basePath","normalizedBasePath","startsWith","basePathWithoutTrailingSlash","endsWith","slice","fs","path","HEADER_COMMENT","ensureHeaderComment","source","text","startsWith","slice","HEADER_COMMENT","length","stripLeadingNewlines","trimmed","value","current","collapseExtraBlankLines","replace","removePgSchemaDeclarations","source","replace","convertSchemaTableInvocations","converted","text","pinyin","renamePgTableConstants","source","pgTableRegex","renames","updated","replace","match","currentName","factory","tableName","sanitized","sanitizeIdentifier","push","from","to","equalsIndex","indexOf","suffix","slice","normalizedSuffix","trimStart","text","updateTableReferenceIdentifiers","length","reduce","acc","rename","pattern","RegExp","escapeRegExp","value","toCamelCase","str","words","split","filter","Boolean","map","word","index","toLowerCase","charAt","toUpperCase","join","name","asciiName","toAsciiName","test","transliterated","pinyin","toneType","type","error","CUSTOM_TYPE_PATTERN","replaceUnknownColumns","source","lines","split","result","replaced","unmatched","i","length","line","match","CUSTOM_TYPE_PATTERN","typeName","factory","replacedLine","replaceFollowingUnknown","push","trim","includes","text","join","nextLine","undefined","replace","fs","path","tweakImports","source","importRegex","match","identifiers","split","map","id","trim","filter","Boolean","filteredIdentifiers","timestampUsageRegex","test","includes","push","unique","Array","from","Set","replacement","join","replace","inlineCustomTypes","source","text","replace","possiblePaths","path","resolve","__dirname","templatePath","possiblePath","fs","existsSync","console","warn","inlineFromTemplate","templateContent","readFileSync","typeDefinitions","trim","needsSql","includes","needsCustomType","ensureImportIdentifier","importMatch","match","insertPoint","indexOf","length","slice","headerPrefix","HEADER_COMMENT","insertionPoint","startsWith","importSectionMatch","typeBlock","packageName","identifier","escapedPackage","importRegex","RegExp","identifiers","split","map","id","filter","Boolean","push","unique","Array","from","Set","replacement","join","addSystemFieldComments","source","commentMap","lines","split","i","length","line","entry","Object","entries","find","key","includes","description","previousLine","trim","startsWith","indentMatch","match","indent","comment","splice","join","removeConflictingSystemFields","systemFieldMap","result","inTable","tableStartLine","tableBusinessFields","Set","bracketDepth","test","clear","char","businessField","values","add","tableEndLine","j","tableLine","shouldRemove","systemField","has","push","filter","patchDrizzleKitDefects","source","fixed","text","replace","replaceTimestampWithCustomTypes","source","replaced","pattern","text","replace","match","quote","fieldName","options","hasWithTimezone","test","hasModeString","replaceDefaultNowWithSql","TABLE_ALIAS_MARKER","appendTableAliases","source","markerIndex","indexOf","base","slice","exportRegex","tableExports","Set","match","matchAll","name","add","size","aliasLines","Array","from","sort","map","join","prefix","trimEnd","postprocessDrizzleSchema","targetPath","resolvedPath","path","resolve","fs","existsSync","console","warn","undefined","text","readFileSync","ensureHeaderComment","patchResult","patchDrizzleKitDefects","removePgSchemaDeclarations","tableConversion","convertSchemaTableInvocations","renameResult","renamePgTableConstants","updateTableReferenceIdentifiers","renames","replacement","replaceUnknownColumns","timestampReplacement","replaceTimestampWithCustomTypes","defaultNowReplacement","replaceDefaultNowWithSql","removeConflictingSystemFields","addSystemFieldComments","tweakImports","inlineCustomTypes","appendTableAliases","replace","collapseExtraBlankLines","writeFileSync","fixed","info","replaced","unmatched","length","forEach","line","converted","replacedUnknown","unmatchedUnknown","patchedDefects","replacedTimestamps","replacedDefaultNow","pluralize","mapDrizzleTypeToTS","field","typeMap","char","varchar","text","smallint","integer","int","bigint","serial","smallserial","bigserial","decimal","numeric","real","doublePrecision","boolean","timestamp","timestamptz","date","time","timetz","interval","uuid","json","jsonb","bytea","inet","cidr","macaddr","macaddr8","point","line","lseg","box","path","polygon","circle","array","customType","customTimestamptz","userProfile","fileAttachment","pgEnum","baseType","type","isArray","endsWith","enumValues","length","map","v","join","toPascalCase","str","replace","split","word","charAt","toUpperCase","slice","toKebabCase","toLowerCase","toSnakeCase","generateDTO","table","className","toPascalCase","variableName","dto","field","fields","isPrimaryKey","name","startsWith","tsType","mapDrizzleTypeToTS","optional","nullable","hasDefault","decorators","generateValidationDecorators","isUpdate","isResponse","comment","type","length","isArray","generateController","routePath","toKebabCase","pluralize","filePath","toSnakeCase","pkField","find","f","pkType","pkName","controller","generateService","service","generateModule","module","Project","Node","DrizzleSchemaParser","project","projectOptions","Project","parseSchemaFile","filePath","sourceFile","addSourceFileAtPath","tables","variableStatements","getVariableStatements","statement","declarations","getDeclarations","declaration","initializer","getInitializer","Node","isCallExpression","expression","getExpression","getText","tableInfo","parsePgTable","getName","push","variableName","callExpr","args","getArguments","length","tableName","replace","fieldsArg","isObjectLiteralExpression","fields","properties","getProperties","prop","isPropertyAssignment","fieldName","leadingComments","getLeadingCommentRanges","comment","map","c","join","trim","fieldInfo","parseField","name","columnName","type","nullable","hasDefault","notNull","isPrimaryKey","isUnique","isArray","parseBaseType","parseCallChain","current","baseCall","isPropertyAccessExpression","typeName","firstArg","isStringLiteral","getLiteralText","parseTypeConfig","isArrayLiteralExpression","enumValues","getElements","el","objLiteral","propName","value","parseInt","undefined","precision","scale","defaultValue","withTimezone","mode","Error","methodName","refArg","match","references","table","column","join","mkdir","rm","writeFile","existsSync","parseAndGenerateNestResourceTemplate","options","parser","DrizzleSchemaParser","tsConfigFilePath","tables","parseSchemaFile","schemaFilePath","length","console","warn","sort","a","b","variableName","table","info","filePath","toSnakeCase","moduleDir","join","moduleOutputDir","existsSync","dto","generateDTO","controller","generateController","service","generateService","moduleFilePath","module","generateModule","mkdir","recursive","writeFile","err","error","message","rm","fs","path","http","https","errorHtmlTemplate","isConnectionError","err","code","connectionErrorCodes","includes","checkServiceAvailable","targetUrl","timeout","Promise","resolve","url","URL","isHttps","protocol","httpModule","https","http","req","request","hostname","port","path","method","res","available","statusCode","headers","on","destroy","end","e","waitForServiceRecovery","interval","startTime","Date","now","isAvailable","setTimeout","getDirname","__dirname","getErrorHtmlTemplate","dirname","htmlPath","join","fs","readFileSync","parseLogLine","line","trimmed","trim","match","content","readRecentErrorLogs","logDir","maxLogs","fileName","logFilePath","fileStats","promises","stat","logs","hasCompileError","fileSize","size","maxReadSize","readSize","Math","min","startPosition","max","buffer","Buffer","allocUnsafe","fileHandle","open","read","error","console","close","toString","lines","split","length","shift","allLines","parsed","push","startIndex","i","log","fallbackLogs","slice","checkForErrors","endIndex","errorSection","compileErrorMatch","errorCount","parseInt","injectTemplateData","template","clientBasePath","replace","handleDevProxyError","options","process","cwd","maxErrorLogs","logFileName","retryTimeout","retryInterval","target","env","SERVER_PORT","CLIENT_BASE_PATH","clientBasePathWithoutSlash","normalizeBasePath","message","headersSent","isConnError","recovered","sendSimpleRedirect","html","writeHead","originalUrl","path","express","fs","crypto","promises","fs","path","ts","path","promises","fs","findControllerFiles","dir","files","scan","currentDir","entries","fs","readdir","withFileTypes","entry","fullPath","path","join","name","isDirectory","isFile","endsWith","push","buildSourceMap","controllerFiles","processFile","sourceMap","Map","concurrency","results","i","length","batch","slice","batchResults","Promise","all","map","filePath","metadata","operationId","info","set","findSourceInfo","directMatch","get","key","value","className","methodName","split","camelCaseId","charAt","toLowerCase","toUpperCase","undefined","enhanceOpenApiPaths","openapi","enhancedCount","paths","pathItem","Object","values","operation","sourceInfo","file","line","transformOpenapiPaths","basePath","newPaths","keys","forEach","staticApiKey","startsWith","enhanceOpenApiWithSourceInfo","options","startTime","Date","now","openapiPath","path","resolve","__dirname","serverDir","writeFile","openapi","openapiData","JSON","parse","stringify","openapiContent","fs","readFile","controllerFiles","findControllerFiles","sourceMap","buildSourceMap","processControllerFile","enhanced","enhanceOpenApiPaths","duration","stats","controllersFound","length","endpointsExtracted","size","endpointsEnhanced","filePath","relativePath","relative","process","cwd","content","sourceFile","ts","createSourceFile","ScriptTarget","Latest","extractControllerMetadata","metadata","Map","controllerPath","className","getDecorators","node","Array","isArray","modifiers","filter","mod","kind","SyntaxKind","Decorator","decorators","visit","isClassDeclaration","name","getText","decorator","isCallExpression","expression","decoratorName","arguments","arg","isStringLiteral","text","isMethodDeclaration","methodName","httpMethod","routePath","line","getLineAndCharacterOfPosition","getStart","includes","toLowerCase","operationId","set","file","method","forEachChild","createOpenapiHandler","openapiFilePath","enableEnhancement","serverDir","cache","_req","res","context","fileBuffer","fs","readFile","currentHash","crypto","createHash","update","digest","fileHash","json","data","payload","JSON","parse","isDev","openapi","enhancedPayload","stats","enhanceOpenApiWithSourceInfo","openapiData","writeFile","rootDir","console","log","duration","endpointsEnhanced","result","transformOpenapiPaths","basePath","error","message","Error","status","createOpenapiRouter","options","context","openapiFilePath","enableEnhancement","serverDir","router","express","Router","handler","createOpenapiHandler","get","req","res","OPENAPI_ROUTES","method","path","description","createOpenapiMiddleware","options","openapiFilePath","enableEnhancement","serverDir","name","mountPath","routes","enabled","context","isDev","createRouter","createOpenapiRouter","express","promises","fs","isAbsolute","join","relative","pathPatternToRegex","pattern","regexPattern","replace","RegExp","matchesPathPattern","actualPath","normalizedActual","normalizePathForMatching","normalizedPattern","hasSpecialPatterns","regex","test","hasSpecialPatterns","pattern","test","normalizePathForMatching","path","replace","resolveLogDir","provided","join","process","cwd","isAbsolute","getRelativePath","filePath","relative","fileExists","fs","access","parseLogLine","line","trimmed","trim","undefined","JSON","parse","extractNumber","message","pattern","match","value","Number","isFinite","parseLimit","defaultValue","maxValue","parsed","Math","min","floor","parsePositiveInt","fallback","resolveLogFilePath","baseDir","fileName","sanitized","replace","segments","split","filter","Boolean","some","segment","Error","resolved","rel","startsWith","matchesPath","actualPath","matchesPathPattern","matchesMethod","actualMethod","expectedMethod","toUpperCase","serializeError","error","name","String","join","promises","fs","readFileReverse","filePath","chunkSize","processLine","handle","fs","open","stats","stat","position","size","remainder","length","Math","min","buffer","Buffer","alloc","read","chunk","toString","lines","split","shift","i","close","buildPaginatedResponse","items","page","pageSize","totalItems","totalPages","ceil","startIndex","endIndex","pagedItems","slice","map","builder","traceId","method","path","startTime","endTime","statusCode","durationMs","entries","reverse","totalCalls","calls","generateUUID","replace","c","r","Math","random","v","toString","mapPinoLevelToServerLogLevel","pinoLevel","lower","toLowerCase","extractLogLevel","text","includes","parsePinoLog","line","source","pinoLog","JSON","parse","id","level","timestamp","Date","time","getTime","message","msg","context","traceId","trace_id","userId","user_id","appId","app_id","tenantId","tenant_id","stack","meta","pid","hostname","path","method","statusCode","status_code","durationMs","duration_ms","ip","requestBody","request_body","responseBody","response_body","tags","error","parseStdLog","match","now","timeStr","content","isoStr","isNaN","createReadStream","createInterface","readLogEntriesByTrace","filePath","traceId","limit","exists","fileExists","undefined","matches","stream","createReadStream","encoding","rl","createInterface","input","crlfDelay","Infinity","line","entry","parseLogLine","trace_id","push","length","shift","close","readRecentTraceCalls","page","pageSize","pathFilter","methodFilter","config","maxEntriesPerTrace","chunkSize","builders","Map","completedCalls","createTraceBuilder","entries","method","path","startTime","endTime","statusCode","durationMs","hasCompleted","updateBuilderMetadata","builder","String","handleRequestCompleted","message","time","extractNumber","pathMatches","matchesPath","methodMatches","matchesMethod","shouldInclude","processLogEntry","get","set","includes","processLine","readFileReverse","buildPaginatedResponse","readLogFilePage","capacity","buffer","totalLines","totalPages","Math","ceil","lines","startIndex","max","endIndex","bufferStartIndex","i","lineIndex","reverse","createReadStream","createInterface","join","readServerLogs","logDir","options","limit","offset","sources","allLogs","errors","source","logs","readLogsBySource","push","error","errorMsg","Error","message","String","console","warn","length","join","undefined","filteredLogs","levels","filter","log","includes","level","sort","a","b","timestamp","total","paginatedLogs","slice","hasMore","filePath","parser","line","parsePinoLog","parseStdLog","fileExists","stream","rl","createReadStream","encoding","createInterface","input","crlfDelay","Infinity","trim","parseError","close","createReadStream","createInterface","readTriggerList","filePath","trigger","path","limit","triggerID","fileExists","undefined","config","maxEntriesPerTrace","chunkSize","builders","Map","completedCalls","createTraceBuilder","traceId","entries","method","startTime","endTime","statusCode","durationMs","hasCompleted","shouldIncludeInCompletedCalls","builder","alreadyAdded","some","call","isAutomationTrigger","endsWith","length","requestEntry","find","e","request_body","String","updateBuilderMetadata","entry","push","shift","pop","handleRequestCompleted","message","time","extractNumber","processLogEntry","trace_id","get","set","includes","processLine","line","parseLogLine","readFileReverse","page","pageSize","totalCalls","totalPages","calls","map","slice","reverse","readTriggerDetail","instanceID","exists","matches","stream","createReadStream","encoding","rl","createInterface","input","crlfDelay","Infinity","hasInstanceID","instance_id","close","readCapabilityTraceList","filePath","capabilityId","limit","fileExists","undefined","config","chunkSize","builders","Map","completedTraces","createCapabilityTraceBuilder","traceId","capId","hasCompleted","hasStartEntry","shouldIncludeInCompletedTraces","builder","alreadyAdded","some","trace","updateBuilderMetadata","entry","plugin_key","pluginKey","String","action","message","includes","startTime","time","input","endTime","status","output","duration_ms","durationMs","Number","push","error","processLogEntry","trace_id","capability_id","get","set","processLine","line","parseLogLine","readFileReverse","sort","a","b","timeA","Date","getTime","timeB","limitedTraces","slice","totalTraces","length","traces","map","handleNotFound","res","filePath","message","status","json","getRelativePath","handleError","error","serializeError","createGetTraceEntriesHandler","logDir","appLogPath","join","req","traceId","params","trim","limit","parseLimit","query","entries","readLogEntriesByTrace","file","count","length","createGetRecentTracesHandler","traceLogPath","page","parsePositiveInt","pageSize","pathFilter","path","undefined","methodFilter","method","toUpperCase","result","readRecentTraceCalls","calls","createGetLogFileHandler","fileName","resolveLogFilePath","readLogFilePage","createGetServerLogsHandler","offset","levels","String","split","map","l","filter","Boolean","sources","s","readServerLogs","hint","createGetTriggerListHandler","trigger","triggerID","readTriggerList","createGetTriggerDetailHandler","instanceID","readTriggerDetail","createGetCapabilityTraceListHandler","serverLogPath","capabilityId","capability_id","readCapabilityTraceList","http","checkServiceHealth","host","port","timeout","Promise","resolve","startTime","Date","now","req","http","request","hostname","path","method","_res","responseTime","available","on","destroy","error","err","message","end","createHealthCheckHandler","options","targetPort","Number","process","env","SERVER_PORT","targetHost","_req","res","result","status","json","service","timestamp","toISOString","Error","createDevLogRouter","options","logDir","resolveLogDir","router","express","Router","get","createGetTraceEntriesHandler","createGetRecentTracesHandler","createGetLogFileHandler","createGetServerLogsHandler","createGetTriggerListHandler","createGetTriggerDetailHandler","createGetCapabilityTraceListHandler","createHealthCheckHandler","DEV_LOGS_ROUTES","method","path","description","createDevLogsMiddleware","options","logDir","name","mountPath","routes","enabled","context","isDev","createRouter","createDevLogRouter","express","join","fs","isAbsolute","join","fs","resolveLogDir","provided","join","process","cwd","isAbsolute","ensureDir","dir","fs","existsSync","mkdirSync","recursive","serializeError","error","Error","name","message","String","collectLogsHandler","logDir","fileName","filePath","join","ensureDir","req","res","logContent","body","message","status","json","logLine","JSON","stringify","server_time","Date","toISOString","fs","promises","appendFile","success","error","handleError","collectLogsBatchHandler","logContents","Array","isArray","logLines","push","serializeError","createDevLogRouter","options","logDir","resolveLogDir","router","express","Router","post","json","collectLogsHandler","fileName","collectLogsBatchHandler","DEV_LOGS_ROUTES","method","path","description","createCollectLogsMiddleware","options","logDir","name","mountPath","routes","enabled","context","isDev","createRouter","createDevLogRouter","fileName","isRouteMiddleware","middleware","createRouter","undefined","isGlobalMiddleware","createHandler","computeMountPath","basePath","mountPath","routePath","path","posix","join","startsWith","logMiddlewareRegistration","fullMountPath","routes","length","console","log","name","forEach","route","method","description","registerRouteMiddleware","server","context","error","router","use","registerGlobalMiddleware","warn","handler","registerMiddlewares","middlewares","options","isDev","process","env","NODE_ENV","rootDir","cwd","allMiddlewares","enabled","hasCreateRouter","hasCreateHandler"]}
|